blob: 7d4c4d3fb285ae95a2d6f15a6fdcffd0250691f9 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017 The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ipd9f9fa62016-09-09 13:42:32 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040020#include <linux/sort.h>
Clarence Ip8f7366c2016-07-05 12:15:26 -040021#include <linux/debugfs.h>
Clarence Ipcae1bb62016-07-07 12:07:13 -040022#include <linux/ktime.h>
Clarence Ip4c1d9772016-06-26 09:35:38 -040023#include <uapi/drm/sde_drm.h>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070024#include <drm/drm_mode.h>
25#include <drm/drm_crtc.h>
26#include <drm/drm_crtc_helper.h>
27#include <drm/drm_flip_work.h>
28
29#include "sde_kms.h"
30#include "sde_hw_lm.h"
Clarence Ipc475b082016-06-26 09:27:23 -040031#include "sde_hw_ctl.h"
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040032#include "sde_crtc.h"
Alan Kwong83285fb2016-10-21 20:51:17 -040033#include "sde_plane.h"
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -070034#include "sde_color_processing.h"
Alan Kwong83285fb2016-10-21 20:51:17 -040035#include "sde_encoder.h"
36#include "sde_connector.h"
Clarence Ip980405d2017-08-08 18:33:44 -040037#include "sde_vbif.h"
Alan Kwong67a3f792016-11-01 23:16:53 -040038#include "sde_power_handle.h"
Alan Kwong9aa061c2016-11-06 21:17:12 -050039#include "sde_core_perf.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070040#include "sde_trace.h"
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -070041#include <soc/qcom/scm.h>
42#include "soc/qcom/secure_buffer.h"
43
44/* defines for secure channel call */
45#define SEC_SID_CNT 2
46#define SEC_SID_MASK_0 0x80881
47#define SEC_SID_MASK_1 0x80C81
48#define MEM_PROTECT_SD_CTRL_SWITCH 0x18
49#define MDP_DEVICE_ID 0x1A
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040050
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070051struct sde_crtc_custom_events {
52 u32 event;
53 int (*func)(struct drm_crtc *crtc, bool en,
54 struct sde_irq_callback *irq);
55};
56
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070057static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
58 bool en, struct sde_irq_callback *ad_irq);
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +053059static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
60 bool en, struct sde_irq_callback *idle_irq);
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070061
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070062static struct sde_crtc_custom_events custom_events[] = {
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -070063 {DRM_EVENT_AD_BACKLIGHT, sde_cp_ad_interrupt},
Benjamin Chan90139102017-06-21 16:00:39 -040064 {DRM_EVENT_CRTC_POWER, sde_crtc_power_interrupt_handler},
Xu Yang5e53c2e2017-07-11 16:46:28 +080065 {DRM_EVENT_IDLE_NOTIFY, sde_crtc_idle_interrupt_handler},
66 {DRM_EVENT_HISTOGRAM, sde_cp_hist_interrupt},
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -070067};
68
Clarence Ipcae1bb62016-07-07 12:07:13 -040069/* default input fence timeout, in ms */
Dhaval Patelb9850c02017-08-07 22:55:47 -070070#define SDE_CRTC_INPUT_FENCE_TIMEOUT 10000
Clarence Ipcae1bb62016-07-07 12:07:13 -040071
Dhaval Patel4e574842016-08-23 15:11:37 -070072/*
73 * The default input fence timeout is 2 seconds while max allowed
74 * range is 10 seconds. Any value above 10 seconds adds glitches beyond
75 * tolerance limit.
76 */
77#define SDE_CRTC_MAX_INPUT_FENCE_TIMEOUT 10000
78
Dhaval Patel48c76022016-09-01 17:51:23 -070079/* layer mixer index on sde_crtc */
80#define LEFT_MIXER 0
81#define RIGHT_MIXER 1
82
Dhaval Patelf9245d62017-03-28 16:24:00 -070083#define MISR_BUFF_SIZE 256
84
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -040085static inline struct sde_kms *_sde_crtc_get_kms(struct drm_crtc *crtc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040086{
Clarence Ip7f70ce42017-03-20 06:53:46 -070087 struct msm_drm_private *priv;
88
89 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
90 SDE_ERROR("invalid crtc\n");
91 return NULL;
92 }
93 priv = crtc->dev->dev_private;
94 if (!priv || !priv->kms) {
95 SDE_ERROR("invalid kms\n");
96 return NULL;
97 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -040098
Ben Chan78647cd2016-06-26 22:02:47 -040099 return to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400100}
101
Dhaval Patelf9245d62017-03-28 16:24:00 -0700102static inline int _sde_crtc_power_enable(struct sde_crtc *sde_crtc, bool enable)
103{
104 struct drm_crtc *crtc;
105 struct msm_drm_private *priv;
106 struct sde_kms *sde_kms;
107
108 if (!sde_crtc) {
109 SDE_ERROR("invalid sde crtc\n");
110 return -EINVAL;
111 }
112
113 crtc = &sde_crtc->base;
114 if (!crtc->dev || !crtc->dev->dev_private) {
115 SDE_ERROR("invalid drm device\n");
116 return -EINVAL;
117 }
118
119 priv = crtc->dev->dev_private;
120 if (!priv->kms) {
121 SDE_ERROR("invalid kms\n");
122 return -EINVAL;
123 }
124
125 sde_kms = to_sde_kms(priv->kms);
126
127 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
128 enable);
129}
130
Alan Kwongcdb2f282017-03-18 13:42:06 -0700131/**
132 * _sde_crtc_rp_to_crtc - get crtc from resource pool object
133 * @rp: Pointer to resource pool
134 * return: Pointer to drm crtc if success; null otherwise
135 */
136static struct drm_crtc *_sde_crtc_rp_to_crtc(struct sde_crtc_respool *rp)
137{
138 if (!rp)
139 return NULL;
140
141 return container_of(rp, struct sde_crtc_state, rp)->base.crtc;
142}
143
144/**
145 * _sde_crtc_rp_reclaim - reclaim unused, or all if forced, resources in pool
146 * @rp: Pointer to resource pool
147 * @force: True to reclaim all resources; otherwise, reclaim only unused ones
148 * return: None
149 */
150static void _sde_crtc_rp_reclaim(struct sde_crtc_respool *rp, bool force)
151{
152 struct sde_crtc_res *res, *next;
153 struct drm_crtc *crtc;
154
155 crtc = _sde_crtc_rp_to_crtc(rp);
156 if (!crtc) {
157 SDE_ERROR("invalid crtc\n");
158 return;
159 }
160
161 SDE_DEBUG("crtc%d.%u %s\n", crtc->base.id, rp->sequence_id,
162 force ? "destroy" : "free_unused");
163
164 list_for_each_entry_safe(res, next, &rp->res_list, list) {
165 if (!force && !(res->flags & SDE_CRTC_RES_FLAG_FREE))
166 continue;
167 SDE_DEBUG("crtc%d.%u reclaim res:0x%x/0x%llx/%pK/%d\n",
168 crtc->base.id, rp->sequence_id,
169 res->type, res->tag, res->val,
170 atomic_read(&res->refcount));
171 list_del(&res->list);
172 if (res->ops.put)
173 res->ops.put(res->val);
174 kfree(res);
175 }
176}
177
178/**
179 * _sde_crtc_rp_free_unused - free unused resource in pool
180 * @rp: Pointer to resource pool
181 * return: none
182 */
183static void _sde_crtc_rp_free_unused(struct sde_crtc_respool *rp)
184{
Alan Kwong310e9b02017-08-03 02:04:07 -0400185 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700186 _sde_crtc_rp_reclaim(rp, false);
Alan Kwong310e9b02017-08-03 02:04:07 -0400187 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700188}
189
190/**
191 * _sde_crtc_rp_destroy - destroy resource pool
192 * @rp: Pointer to resource pool
193 * return: None
194 */
195static void _sde_crtc_rp_destroy(struct sde_crtc_respool *rp)
196{
Alan Kwong310e9b02017-08-03 02:04:07 -0400197 mutex_lock(rp->rp_lock);
198 list_del_init(&rp->rp_list);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700199 _sde_crtc_rp_reclaim(rp, true);
Alan Kwong310e9b02017-08-03 02:04:07 -0400200 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700201}
202
203/**
204 * _sde_crtc_hw_blk_get - get callback for hardware block
205 * @val: Resource handle
206 * @type: Resource type
207 * @tag: Search tag for given resource
208 * return: Resource handle
209 */
210static void *_sde_crtc_hw_blk_get(void *val, u32 type, u64 tag)
211{
212 SDE_DEBUG("res:%d/0x%llx/%pK\n", type, tag, val);
213 return sde_hw_blk_get(val, type, tag);
214}
215
216/**
217 * _sde_crtc_hw_blk_put - put callback for hardware block
218 * @val: Resource handle
219 * return: None
220 */
221static void _sde_crtc_hw_blk_put(void *val)
222{
223 SDE_DEBUG("res://%pK\n", val);
224 sde_hw_blk_put(val);
225}
226
227/**
228 * _sde_crtc_rp_duplicate - duplicate resource pool and reset reference count
229 * @rp: Pointer to original resource pool
230 * @dup_rp: Pointer to duplicated resource pool
231 * return: None
232 */
233static void _sde_crtc_rp_duplicate(struct sde_crtc_respool *rp,
234 struct sde_crtc_respool *dup_rp)
235{
236 struct sde_crtc_res *res, *dup_res;
237 struct drm_crtc *crtc;
238
Alan Kwong310e9b02017-08-03 02:04:07 -0400239 if (!rp || !dup_rp || !rp->rp_head) {
Alan Kwongcdb2f282017-03-18 13:42:06 -0700240 SDE_ERROR("invalid resource pool\n");
241 return;
242 }
243
244 crtc = _sde_crtc_rp_to_crtc(rp);
245 if (!crtc) {
246 SDE_ERROR("invalid crtc\n");
247 return;
248 }
249
250 SDE_DEBUG("crtc%d.%u duplicate\n", crtc->base.id, rp->sequence_id);
251
Alan Kwong310e9b02017-08-03 02:04:07 -0400252 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700253 dup_rp->sequence_id = rp->sequence_id + 1;
254 INIT_LIST_HEAD(&dup_rp->res_list);
255 dup_rp->ops = rp->ops;
256 list_for_each_entry(res, &rp->res_list, list) {
257 dup_res = kzalloc(sizeof(struct sde_crtc_res), GFP_KERNEL);
Alan Kwong310e9b02017-08-03 02:04:07 -0400258 if (!dup_res) {
259 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700260 return;
Alan Kwong310e9b02017-08-03 02:04:07 -0400261 }
Alan Kwongcdb2f282017-03-18 13:42:06 -0700262 INIT_LIST_HEAD(&dup_res->list);
263 atomic_set(&dup_res->refcount, 0);
264 dup_res->type = res->type;
265 dup_res->tag = res->tag;
266 dup_res->val = res->val;
267 dup_res->ops = res->ops;
268 dup_res->flags = SDE_CRTC_RES_FLAG_FREE;
269 SDE_DEBUG("crtc%d.%u dup res:0x%x/0x%llx/%pK/%d\n",
270 crtc->base.id, dup_rp->sequence_id,
271 dup_res->type, dup_res->tag, dup_res->val,
272 atomic_read(&dup_res->refcount));
273 list_add_tail(&dup_res->list, &dup_rp->res_list);
274 if (dup_res->ops.get)
275 dup_res->ops.get(dup_res->val, 0, -1);
276 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400277
278 dup_rp->rp_lock = rp->rp_lock;
279 dup_rp->rp_head = rp->rp_head;
280 INIT_LIST_HEAD(&dup_rp->rp_list);
281 list_add_tail(&dup_rp->rp_list, rp->rp_head);
282 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700283}
284
285/**
286 * _sde_crtc_rp_reset - reset resource pool after allocation
287 * @rp: Pointer to original resource pool
Alan Kwong310e9b02017-08-03 02:04:07 -0400288 * @rp_lock: Pointer to serialization resource pool lock
289 * @rp_head: Pointer to crtc resource pool head
Alan Kwongcdb2f282017-03-18 13:42:06 -0700290 * return: None
291 */
Alan Kwong310e9b02017-08-03 02:04:07 -0400292static void _sde_crtc_rp_reset(struct sde_crtc_respool *rp,
293 struct mutex *rp_lock, struct list_head *rp_head)
Alan Kwongcdb2f282017-03-18 13:42:06 -0700294{
Alan Kwong310e9b02017-08-03 02:04:07 -0400295 if (!rp || !rp_lock || !rp_head) {
Alan Kwongcdb2f282017-03-18 13:42:06 -0700296 SDE_ERROR("invalid resource pool\n");
297 return;
298 }
299
Alan Kwong310e9b02017-08-03 02:04:07 -0400300 mutex_lock(rp_lock);
301 rp->rp_lock = rp_lock;
302 rp->rp_head = rp_head;
303 INIT_LIST_HEAD(&rp->rp_list);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700304 rp->sequence_id = 0;
305 INIT_LIST_HEAD(&rp->res_list);
306 rp->ops.get = _sde_crtc_hw_blk_get;
307 rp->ops.put = _sde_crtc_hw_blk_put;
Alan Kwong310e9b02017-08-03 02:04:07 -0400308 list_add_tail(&rp->rp_list, rp->rp_head);
309 mutex_unlock(rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700310}
311
312/**
Alan Kwong310e9b02017-08-03 02:04:07 -0400313 * _sde_crtc_rp_add_no_lock - add given resource to resource pool without lock
Alan Kwongcdb2f282017-03-18 13:42:06 -0700314 * @rp: Pointer to original resource pool
315 * @type: Resource type
316 * @tag: Search tag for given resource
317 * @val: Resource handle
318 * @ops: Resource callback operations
319 * return: 0 if success; error code otherwise
320 */
Alan Kwong310e9b02017-08-03 02:04:07 -0400321static int _sde_crtc_rp_add_no_lock(struct sde_crtc_respool *rp, u32 type,
322 u64 tag, void *val, struct sde_crtc_res_ops *ops)
Alan Kwongcdb2f282017-03-18 13:42:06 -0700323{
324 struct sde_crtc_res *res;
325 struct drm_crtc *crtc;
326
327 if (!rp || !ops) {
328 SDE_ERROR("invalid resource pool/ops\n");
329 return -EINVAL;
330 }
331
332 crtc = _sde_crtc_rp_to_crtc(rp);
333 if (!crtc) {
334 SDE_ERROR("invalid crtc\n");
335 return -EINVAL;
336 }
337
338 list_for_each_entry(res, &rp->res_list, list) {
339 if (res->type != type || res->tag != tag)
340 continue;
341 SDE_ERROR("crtc%d.%u already exist res:0x%x/0x%llx/%pK/%d\n",
342 crtc->base.id, rp->sequence_id,
343 res->type, res->tag, res->val,
344 atomic_read(&res->refcount));
345 return -EEXIST;
346 }
347 res = kzalloc(sizeof(struct sde_crtc_res), GFP_KERNEL);
348 if (!res)
349 return -ENOMEM;
350 INIT_LIST_HEAD(&res->list);
351 atomic_set(&res->refcount, 1);
352 res->type = type;
353 res->tag = tag;
354 res->val = val;
355 res->ops = *ops;
356 list_add_tail(&res->list, &rp->res_list);
357 SDE_DEBUG("crtc%d.%u added res:0x%x/0x%llx\n",
358 crtc->base.id, rp->sequence_id, type, tag);
359 return 0;
360}
361
362/**
Alan Kwong310e9b02017-08-03 02:04:07 -0400363 * _sde_crtc_rp_add - add given resource to resource pool
364 * @rp: Pointer to original resource pool
365 * @type: Resource type
366 * @tag: Search tag for given resource
367 * @val: Resource handle
368 * @ops: Resource callback operations
369 * return: 0 if success; error code otherwise
370 */
371static int _sde_crtc_rp_add(struct sde_crtc_respool *rp, u32 type, u64 tag,
372 void *val, struct sde_crtc_res_ops *ops)
373{
374 int rc;
375
376 if (!rp) {
377 SDE_ERROR("invalid resource pool\n");
378 return -EINVAL;
379 }
380
381 mutex_lock(rp->rp_lock);
382 rc = _sde_crtc_rp_add_no_lock(rp, type, tag, val, ops);
383 mutex_unlock(rp->rp_lock);
384 return rc;
385}
386
387/**
Alan Kwongcdb2f282017-03-18 13:42:06 -0700388 * _sde_crtc_rp_get - lookup the resource from given resource pool and obtain
389 * if available; otherwise, obtain resource from global pool
390 * @rp: Pointer to original resource pool
391 * @type: Resource type
392 * @tag: Search tag for given resource
393 * return: Resource handle if success; pointer error or null otherwise
394 */
395static void *_sde_crtc_rp_get(struct sde_crtc_respool *rp, u32 type, u64 tag)
396{
Alan Kwong310e9b02017-08-03 02:04:07 -0400397 struct sde_crtc_respool *old_rp;
Alan Kwongcdb2f282017-03-18 13:42:06 -0700398 struct sde_crtc_res *res;
399 void *val = NULL;
400 int rc;
401 struct drm_crtc *crtc;
402
403 if (!rp) {
404 SDE_ERROR("invalid resource pool\n");
405 return NULL;
406 }
407
408 crtc = _sde_crtc_rp_to_crtc(rp);
409 if (!crtc) {
410 SDE_ERROR("invalid crtc\n");
411 return NULL;
412 }
413
Alan Kwong310e9b02017-08-03 02:04:07 -0400414 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700415 list_for_each_entry(res, &rp->res_list, list) {
416 if (res->type != type || res->tag != tag)
417 continue;
418 SDE_DEBUG("crtc%d.%u found res:0x%x/0x%llx/%pK/%d\n",
419 crtc->base.id, rp->sequence_id,
420 res->type, res->tag, res->val,
421 atomic_read(&res->refcount));
422 atomic_inc(&res->refcount);
423 res->flags &= ~SDE_CRTC_RES_FLAG_FREE;
Alan Kwong310e9b02017-08-03 02:04:07 -0400424 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700425 return res->val;
426 }
427 list_for_each_entry(res, &rp->res_list, list) {
428 if (res->type != type || !(res->flags & SDE_CRTC_RES_FLAG_FREE))
429 continue;
430 SDE_DEBUG("crtc%d.%u retag res:0x%x/0x%llx/%pK/%d\n",
431 crtc->base.id, rp->sequence_id,
432 res->type, res->tag, res->val,
433 atomic_read(&res->refcount));
434 atomic_inc(&res->refcount);
435 res->tag = tag;
436 res->flags &= ~SDE_CRTC_RES_FLAG_FREE;
Alan Kwong310e9b02017-08-03 02:04:07 -0400437 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700438 return res->val;
439 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400440 /* not in this rp, try to grab from global pool */
Alan Kwongcdb2f282017-03-18 13:42:06 -0700441 if (rp->ops.get)
442 val = rp->ops.get(NULL, type, -1);
Alan Kwong310e9b02017-08-03 02:04:07 -0400443 if (!IS_ERR_OR_NULL(val))
444 goto add_res;
445 /*
446 * Search older resource pools for hw blk with matching type,
447 * necessary when resource is being used by this object,
448 * but in previous states not yet cleaned up.
449 *
450 * This enables searching of all resources currently owned
451 * by this crtc even though the resource might not be used
452 * in the current atomic state. This allows those resources
453 * to be re-acquired by the new atomic state immediately
454 * without waiting for the resources to be fully released.
455 */
456 else if (IS_ERR_OR_NULL(val) && (type < SDE_HW_BLK_MAX)) {
457 list_for_each_entry(old_rp, rp->rp_head, rp_list) {
458 if (old_rp == rp)
459 continue;
460
461 list_for_each_entry(res, &old_rp->res_list, list) {
462 if (res->type != type)
463 continue;
464 SDE_DEBUG(
465 "crtc%d.%u found res:0x%x//%pK/ in crtc%d.%d\n",
466 crtc->base.id,
467 rp->sequence_id,
468 res->type, res->val,
469 crtc->base.id,
470 old_rp->sequence_id);
471 SDE_EVT32_VERBOSE(crtc->base.id,
472 rp->sequence_id,
473 res->type, res->val,
474 crtc->base.id,
475 old_rp->sequence_id);
476 if (res->ops.get)
477 res->ops.get(res->val, 0, -1);
478 val = res->val;
479 break;
480 }
481
482 if (!IS_ERR_OR_NULL(val))
483 break;
484 }
485 }
Alan Kwongcdb2f282017-03-18 13:42:06 -0700486 if (IS_ERR_OR_NULL(val)) {
Alan Kwong42e35052017-05-05 06:52:51 -0700487 SDE_DEBUG("crtc%d.%u failed to get res:0x%x//\n",
Alan Kwongcdb2f282017-03-18 13:42:06 -0700488 crtc->base.id, rp->sequence_id, type);
Alan Kwong310e9b02017-08-03 02:04:07 -0400489 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700490 return NULL;
491 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400492add_res:
493 rc = _sde_crtc_rp_add_no_lock(rp, type, tag, val, &rp->ops);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700494 if (rc) {
495 SDE_ERROR("crtc%d.%u failed to add res:0x%x/0x%llx\n",
496 crtc->base.id, rp->sequence_id, type, tag);
497 if (rp->ops.put)
498 rp->ops.put(val);
499 val = NULL;
500 }
Alan Kwong310e9b02017-08-03 02:04:07 -0400501 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700502 return val;
503}
504
505/**
506 * _sde_crtc_rp_put - return given resource to resource pool
507 * @rp: Pointer to original resource pool
508 * @type: Resource type
509 * @tag: Search tag for given resource
510 * return: None
511 */
512static void _sde_crtc_rp_put(struct sde_crtc_respool *rp, u32 type, u64 tag)
513{
514 struct sde_crtc_res *res, *next;
515 struct drm_crtc *crtc;
516
517 if (!rp) {
518 SDE_ERROR("invalid resource pool\n");
519 return;
520 }
521
522 crtc = _sde_crtc_rp_to_crtc(rp);
523 if (!crtc) {
524 SDE_ERROR("invalid crtc\n");
525 return;
526 }
527
Alan Kwong310e9b02017-08-03 02:04:07 -0400528 mutex_lock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700529 list_for_each_entry_safe(res, next, &rp->res_list, list) {
530 if (res->type != type || res->tag != tag)
531 continue;
532 SDE_DEBUG("crtc%d.%u found res:0x%x/0x%llx/%pK/%d\n",
533 crtc->base.id, rp->sequence_id,
534 res->type, res->tag, res->val,
535 atomic_read(&res->refcount));
536 if (res->flags & SDE_CRTC_RES_FLAG_FREE)
537 SDE_ERROR(
538 "crtc%d.%u already free res:0x%x/0x%llx/%pK/%d\n",
539 crtc->base.id, rp->sequence_id,
540 res->type, res->tag, res->val,
541 atomic_read(&res->refcount));
542 else if (atomic_dec_return(&res->refcount) == 0)
543 res->flags |= SDE_CRTC_RES_FLAG_FREE;
544
Alan Kwong310e9b02017-08-03 02:04:07 -0400545 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700546 return;
547 }
548 SDE_ERROR("crtc%d.%u not found res:0x%x/0x%llx\n",
549 crtc->base.id, rp->sequence_id, type, tag);
Alan Kwong310e9b02017-08-03 02:04:07 -0400550 mutex_unlock(rp->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -0700551}
552
553int sde_crtc_res_add(struct drm_crtc_state *state, u32 type, u64 tag,
554 void *val, struct sde_crtc_res_ops *ops)
555{
556 struct sde_crtc_respool *rp;
557
558 if (!state) {
559 SDE_ERROR("invalid parameters\n");
560 return -EINVAL;
561 }
562
563 rp = &to_sde_crtc_state(state)->rp;
564 return _sde_crtc_rp_add(rp, type, tag, val, ops);
565}
566
567void *sde_crtc_res_get(struct drm_crtc_state *state, u32 type, u64 tag)
568{
569 struct sde_crtc_respool *rp;
570 void *val;
571
572 if (!state) {
573 SDE_ERROR("invalid parameters\n");
574 return NULL;
575 }
576
577 rp = &to_sde_crtc_state(state)->rp;
578 val = _sde_crtc_rp_get(rp, type, tag);
579 if (IS_ERR(val)) {
580 SDE_ERROR("failed to get res type:0x%x:0x%llx\n",
581 type, tag);
582 return NULL;
583 }
584
585 return val;
586}
587
588void sde_crtc_res_put(struct drm_crtc_state *state, u32 type, u64 tag)
589{
590 struct sde_crtc_respool *rp;
591
592 if (!state) {
593 SDE_ERROR("invalid parameters\n");
594 return;
595 }
596
597 rp = &to_sde_crtc_state(state)->rp;
598 _sde_crtc_rp_put(rp, type, tag);
599}
600
Clarence Ipa18d4832017-03-13 12:35:44 -0700601static void _sde_crtc_deinit_events(struct sde_crtc *sde_crtc)
602{
603 if (!sde_crtc)
604 return;
Clarence Ipa18d4832017-03-13 12:35:44 -0700605}
606
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530607/**
608 * sde_crtc_destroy_dest_scaler - free memory allocated for scaler lut
609 * @sde_crtc: Pointer to sde crtc
610 */
611static void _sde_crtc_destroy_dest_scaler(struct sde_crtc *sde_crtc)
612{
613 if (!sde_crtc)
614 return;
615
616 kfree(sde_crtc->scl3_lut_cfg);
617}
618
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700619static void sde_crtc_destroy(struct drm_crtc *crtc)
620{
621 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
622
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400623 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -0400624
625 if (!crtc)
626 return;
627
Dhaval Patele4a5dda2016-10-13 19:29:30 -0700628 if (sde_crtc->blob_info)
629 drm_property_unreference_blob(sde_crtc->blob_info);
Clarence Ip7a753bb2016-07-07 11:47:44 -0400630 msm_property_destroy(&sde_crtc->property_info);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700631 sde_cp_crtc_destroy_properties(crtc);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530632 _sde_crtc_destroy_dest_scaler(sde_crtc);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -0700633
Clarence Ip24f80662016-06-13 19:05:32 -0400634 sde_fence_deinit(&sde_crtc->output_fence);
Clarence Ipa18d4832017-03-13 12:35:44 -0700635 _sde_crtc_deinit_events(sde_crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -0400636
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700637 drm_crtc_cleanup(crtc);
Clarence Ip7f70ce42017-03-20 06:53:46 -0700638 mutex_destroy(&sde_crtc->crtc_lock);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700639 kfree(sde_crtc);
640}
641
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700642static bool sde_crtc_mode_fixup(struct drm_crtc *crtc,
643 const struct drm_display_mode *mode,
644 struct drm_display_mode *adjusted_mode)
645{
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400646 SDE_DEBUG("\n");
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400647
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530648 if ((msm_is_mode_seamless(adjusted_mode) ||
649 msm_is_mode_seamless_vrr(adjusted_mode)) &&
650 (!crtc->enabled)) {
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -0400651 SDE_ERROR("crtc state prevents seamless transition\n");
652 return false;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400653 }
654
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700655 return true;
656}
657
Dhaval Patel48c76022016-09-01 17:51:23 -0700658static void _sde_crtc_setup_blend_cfg(struct sde_crtc_mixer *mixer,
659 struct sde_plane_state *pstate, struct sde_format *format)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400660{
Dhaval Patel48c76022016-09-01 17:51:23 -0700661 uint32_t blend_op, fg_alpha, bg_alpha;
662 uint32_t blend_type;
Dhaval Patel44f12472016-08-29 12:19:47 -0700663 struct sde_hw_mixer *lm = mixer->hw_lm;
664
Dhaval Patel48c76022016-09-01 17:51:23 -0700665 /* default to opaque blending */
666 fg_alpha = sde_plane_get_property(pstate, PLANE_PROP_ALPHA);
667 bg_alpha = 0xFF - fg_alpha;
668 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST | SDE_BLEND_BG_ALPHA_BG_CONST;
669 blend_type = sde_plane_get_property(pstate, PLANE_PROP_BLEND_OP);
Dhaval Patel44f12472016-08-29 12:19:47 -0700670
Dhaval Patel48c76022016-09-01 17:51:23 -0700671 SDE_DEBUG("blend type:0x%x blend alpha:0x%x\n", blend_type, fg_alpha);
672
673 switch (blend_type) {
674
675 case SDE_DRM_BLEND_OP_OPAQUE:
676 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST |
677 SDE_BLEND_BG_ALPHA_BG_CONST;
678 break;
679
680 case SDE_DRM_BLEND_OP_PREMULTIPLIED:
681 if (format->alpha_enable) {
682 blend_op = SDE_BLEND_FG_ALPHA_FG_CONST |
683 SDE_BLEND_BG_ALPHA_FG_PIXEL;
684 if (fg_alpha != 0xff) {
685 bg_alpha = fg_alpha;
686 blend_op |= SDE_BLEND_BG_MOD_ALPHA |
687 SDE_BLEND_BG_INV_MOD_ALPHA;
688 } else {
689 blend_op |= SDE_BLEND_BG_INV_ALPHA;
690 }
691 }
692 break;
693
694 case SDE_DRM_BLEND_OP_COVERAGE:
695 if (format->alpha_enable) {
696 blend_op = SDE_BLEND_FG_ALPHA_FG_PIXEL |
697 SDE_BLEND_BG_ALPHA_FG_PIXEL;
698 if (fg_alpha != 0xff) {
699 bg_alpha = fg_alpha;
700 blend_op |= SDE_BLEND_FG_MOD_ALPHA |
701 SDE_BLEND_FG_INV_MOD_ALPHA |
702 SDE_BLEND_BG_MOD_ALPHA |
703 SDE_BLEND_BG_INV_MOD_ALPHA;
704 } else {
705 blend_op |= SDE_BLEND_BG_INV_ALPHA;
706 }
707 }
708 break;
709 default:
710 /* do nothing */
711 break;
Clarence Ipd9f9fa62016-09-09 13:42:32 -0400712 }
Dhaval Patel48c76022016-09-01 17:51:23 -0700713
714 lm->ops.setup_blend_config(lm, pstate->stage, fg_alpha,
715 bg_alpha, blend_op);
Dhaval Patel6c666622017-03-21 23:02:59 -0700716 SDE_DEBUG(
717 "format: %4.4s, alpha_enable %u fg alpha:0x%x bg alpha:0x%x blend_op:0x%x\n",
718 (char *) &format->base.pixel_format,
Dhaval Patel48c76022016-09-01 17:51:23 -0700719 format->alpha_enable, fg_alpha, bg_alpha, blend_op);
720}
721
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800722static void _sde_crtc_setup_dim_layer_cfg(struct drm_crtc *crtc,
723 struct sde_crtc *sde_crtc, struct sde_crtc_mixer *mixer,
724 struct sde_hw_dim_layer *dim_layer)
725{
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500726 struct sde_crtc_state *cstate;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800727 struct sde_hw_mixer *lm;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800728 struct sde_hw_dim_layer split_dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800729 int i;
730
731 if (!dim_layer->rect.w || !dim_layer->rect.h) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700732 SDE_DEBUG("empty dim_layer\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800733 return;
734 }
735
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500736 cstate = to_sde_crtc_state(crtc->state);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800737
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700738 SDE_DEBUG("dim_layer - flags:%d, stage:%d\n",
739 dim_layer->flags, dim_layer->stage);
740
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800741 split_dim_layer.stage = dim_layer->stage;
742 split_dim_layer.color_fill = dim_layer->color_fill;
743
744 /*
745 * traverse through the layer mixers attached to crtc and find the
746 * intersecting dim layer rect in each LM and program accordingly.
747 */
748 for (i = 0; i < sde_crtc->num_mixers; i++) {
749 split_dim_layer.flags = dim_layer->flags;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800750
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500751 sde_kms_rect_intersect(&cstate->lm_bounds[i], &dim_layer->rect,
Lloyd Atkinsone0e11e22017-01-17 12:08:48 -0500752 &split_dim_layer.rect);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500753 if (sde_kms_rect_is_null(&split_dim_layer.rect)) {
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800754 /*
755 * no extra programming required for non-intersecting
756 * layer mixers with INCLUSIVE dim layer
757 */
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500758 if (split_dim_layer.flags & SDE_DRM_DIM_LAYER_INCLUSIVE)
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800759 continue;
760
761 /*
762 * program the other non-intersecting layer mixers with
763 * INCLUSIVE dim layer of full size for uniformity
764 * with EXCLUSIVE dim layer config.
765 */
766 split_dim_layer.flags &= ~SDE_DRM_DIM_LAYER_EXCLUSIVE;
767 split_dim_layer.flags |= SDE_DRM_DIM_LAYER_INCLUSIVE;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500768 memcpy(&split_dim_layer.rect, &cstate->lm_bounds[i],
769 sizeof(split_dim_layer.rect));
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800770
771 } else {
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -0500772 split_dim_layer.rect.x =
773 split_dim_layer.rect.x -
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700774 cstate->lm_bounds[i].x;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800775 }
776
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -0700777 SDE_DEBUG("split_dim_layer - LM:%d, rect:{%d,%d,%d,%d}}\n",
778 i, split_dim_layer.rect.x, split_dim_layer.rect.y,
779 split_dim_layer.rect.w, split_dim_layer.rect.h);
780
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -0800781 lm = mixer[i].hw_lm;
782 mixer[i].mixer_op_mode |= 1 << split_dim_layer.stage;
783 lm->ops.setup_dim_layer(lm, &split_dim_layer);
784 }
785}
786
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400787void sde_crtc_get_crtc_roi(struct drm_crtc_state *state,
788 const struct sde_rect **crtc_roi)
789{
790 struct sde_crtc_state *crtc_state;
791
792 if (!state || !crtc_roi)
793 return;
794
795 crtc_state = to_sde_crtc_state(state);
796 *crtc_roi = &crtc_state->crtc_roi;
797}
798
799static int _sde_crtc_set_roi_v1(struct drm_crtc_state *state,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530800 void __user *usr_ptr)
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400801{
802 struct drm_crtc *crtc;
803 struct sde_crtc_state *cstate;
804 struct sde_drm_roi_v1 roi_v1;
805 int i;
806
807 if (!state) {
808 SDE_ERROR("invalid args\n");
809 return -EINVAL;
810 }
811
812 cstate = to_sde_crtc_state(state);
813 crtc = cstate->base.crtc;
814
815 memset(&cstate->user_roi_list, 0, sizeof(cstate->user_roi_list));
816
817 if (!usr_ptr) {
818 SDE_DEBUG("crtc%d: rois cleared\n", DRMID(crtc));
819 return 0;
820 }
821
822 if (copy_from_user(&roi_v1, usr_ptr, sizeof(roi_v1))) {
823 SDE_ERROR("crtc%d: failed to copy roi_v1 data\n", DRMID(crtc));
824 return -EINVAL;
825 }
826
827 SDE_DEBUG("crtc%d: num_rects %d\n", DRMID(crtc), roi_v1.num_rects);
828
829 if (roi_v1.num_rects == 0) {
830 SDE_DEBUG("crtc%d: rois cleared\n", DRMID(crtc));
831 return 0;
832 }
833
834 if (roi_v1.num_rects > SDE_MAX_ROI_V1) {
835 SDE_ERROR("crtc%d: too many rects specified: %d\n", DRMID(crtc),
836 roi_v1.num_rects);
837 return -EINVAL;
838 }
839
840 cstate->user_roi_list.num_rects = roi_v1.num_rects;
841 for (i = 0; i < roi_v1.num_rects; ++i) {
842 cstate->user_roi_list.roi[i] = roi_v1.roi[i];
843 SDE_DEBUG("crtc%d: roi%d: roi (%d,%d) (%d,%d)\n",
844 DRMID(crtc), i,
845 cstate->user_roi_list.roi[i].x1,
846 cstate->user_roi_list.roi[i].y1,
847 cstate->user_roi_list.roi[i].x2,
848 cstate->user_roi_list.roi[i].y2);
849 }
850
851 return 0;
852}
853
Ingrid Gallardo83532222017-06-02 16:48:51 -0700854static bool _sde_crtc_setup_is_3dmux_dsc(struct drm_crtc_state *state)
855{
856 int i;
857 struct sde_crtc_state *cstate;
858 bool is_3dmux_dsc = false;
859
860 cstate = to_sde_crtc_state(state);
861
862 for (i = 0; i < cstate->num_connectors; i++) {
863 struct drm_connector *conn = cstate->connectors[i];
864
865 if (sde_connector_get_topology_name(conn) ==
866 SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC)
867 is_3dmux_dsc = true;
868 }
869
870 return is_3dmux_dsc;
871}
872
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400873static int _sde_crtc_set_crtc_roi(struct drm_crtc *crtc,
874 struct drm_crtc_state *state)
875{
876 struct drm_connector *conn;
877 struct drm_connector_state *conn_state;
878 struct sde_crtc *sde_crtc;
879 struct sde_crtc_state *crtc_state;
880 struct sde_rect *crtc_roi;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400881 int i, num_attached_conns = 0;
882
883 if (!crtc || !state)
884 return -EINVAL;
885
886 sde_crtc = to_sde_crtc(crtc);
887 crtc_state = to_sde_crtc_state(state);
888 crtc_roi = &crtc_state->crtc_roi;
889
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400890 for_each_connector_in_state(state->state, conn, conn_state, i) {
891 struct sde_connector_state *sde_conn_state;
892
893 if (!conn_state || conn_state->crtc != crtc)
894 continue;
895
896 if (num_attached_conns) {
897 SDE_ERROR(
898 "crtc%d: unsupported: roi on crtc w/ >1 connectors\n",
899 DRMID(crtc));
900 return -EINVAL;
901 }
902 ++num_attached_conns;
903
904 sde_conn_state = to_sde_connector_state(conn_state);
905
Ingrid Gallardo83532222017-06-02 16:48:51 -0700906 /*
907 * current driver only supports same connector and crtc size,
908 * but if support for different sizes is added, driver needs
909 * to check the connector roi here to make sure is full screen
910 * for dsc 3d-mux topology that doesn't support partial update.
911 */
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400912 if (memcmp(&sde_conn_state->rois, &crtc_state->user_roi_list,
913 sizeof(crtc_state->user_roi_list))) {
914 SDE_ERROR("%s: crtc -> conn roi scaling unsupported\n",
915 sde_crtc->name);
916 return -EINVAL;
917 }
918 }
919
Lloyd Atkinsonc2baf412017-04-19 17:53:09 -0400920 sde_kms_rect_merge_rectangles(&crtc_state->user_roi_list, crtc_roi);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400921
922 SDE_DEBUG("%s: crtc roi (%d,%d,%d,%d)\n", sde_crtc->name,
923 crtc_roi->x, crtc_roi->y, crtc_roi->w, crtc_roi->h);
924
925 return 0;
926}
927
Lloyd Atkinson77382202017-02-01 14:59:43 -0500928static int _sde_crtc_check_autorefresh(struct drm_crtc *crtc,
929 struct drm_crtc_state *state)
930{
931 struct sde_crtc *sde_crtc;
932 struct sde_crtc_state *crtc_state;
933 struct drm_connector *conn;
934 struct drm_connector_state *conn_state;
935 int i;
936
937 if (!crtc || !state)
938 return -EINVAL;
939
940 sde_crtc = to_sde_crtc(crtc);
941 crtc_state = to_sde_crtc_state(state);
942
943 if (sde_kms_rect_is_null(&crtc_state->crtc_roi))
944 return 0;
945
946 /* partial update active, check if autorefresh is also requested */
947 for_each_connector_in_state(state->state, conn, conn_state, i) {
948 uint64_t autorefresh;
949
950 if (!conn_state || conn_state->crtc != crtc)
951 continue;
952
953 autorefresh = sde_connector_get_property(conn_state,
954 CONNECTOR_PROP_AUTOREFRESH);
955 if (autorefresh) {
956 SDE_ERROR(
957 "%s: autorefresh & partial crtc roi incompatible %llu\n",
958 sde_crtc->name, autorefresh);
959 return -EINVAL;
960 }
961 }
962
963 return 0;
964}
965
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400966static int _sde_crtc_set_lm_roi(struct drm_crtc *crtc,
967 struct drm_crtc_state *state, int lm_idx)
968{
969 struct sde_crtc *sde_crtc;
970 struct sde_crtc_state *crtc_state;
971 const struct sde_rect *crtc_roi;
972 const struct sde_rect *lm_bounds;
973 struct sde_rect *lm_roi;
974
975 if (!crtc || !state || lm_idx >= ARRAY_SIZE(crtc_state->lm_bounds))
976 return -EINVAL;
977
978 sde_crtc = to_sde_crtc(crtc);
979 crtc_state = to_sde_crtc_state(state);
980 crtc_roi = &crtc_state->crtc_roi;
981 lm_bounds = &crtc_state->lm_bounds[lm_idx];
982 lm_roi = &crtc_state->lm_roi[lm_idx];
983
Lloyd Atkinson73fb8092017-02-08 16:02:55 -0500984 if (sde_kms_rect_is_null(crtc_roi))
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400985 memcpy(lm_roi, lm_bounds, sizeof(*lm_roi));
Lloyd Atkinson73fb8092017-02-08 16:02:55 -0500986 else
987 sde_kms_rect_intersect(crtc_roi, lm_bounds, lm_roi);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -0400988
989 SDE_DEBUG("%s: lm%d roi (%d,%d,%d,%d)\n", sde_crtc->name, lm_idx,
990 lm_roi->x, lm_roi->y, lm_roi->w, lm_roi->h);
991
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +0530992 /*
993 * partial update is not supported with 3dmux dsc or dest scaler.
994 * hence, crtc roi must match the mixer dimensions.
995 */
996 if (crtc_state->num_ds_enabled ||
997 _sde_crtc_setup_is_3dmux_dsc(state)) {
998 if (memcmp(lm_roi, lm_bounds, sizeof(struct sde_rect))) {
999 SDE_ERROR("Unsupported: Dest scaler/3d mux DSC + PU\n");
1000 return -EINVAL;
1001 }
1002 }
1003
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001004 /* if any dimension is zero, clear all dimensions for clarity */
1005 if (sde_kms_rect_is_null(lm_roi))
1006 memset(lm_roi, 0, sizeof(*lm_roi));
1007
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001008 return 0;
1009}
1010
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001011static u32 _sde_crtc_get_displays_affected(struct drm_crtc *crtc,
1012 struct drm_crtc_state *state)
1013{
1014 struct sde_crtc *sde_crtc;
1015 struct sde_crtc_state *crtc_state;
1016 u32 disp_bitmask = 0;
1017 int i;
1018
1019 sde_crtc = to_sde_crtc(crtc);
1020 crtc_state = to_sde_crtc_state(state);
1021
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001022 /* pingpong split: one ROI, one LM, two physical displays */
1023 if (crtc_state->is_ppsplit) {
1024 u32 lm_split_width = crtc_state->lm_bounds[0].w / 2;
1025 struct sde_rect *roi = &crtc_state->lm_roi[0];
1026
1027 if (sde_kms_rect_is_null(roi))
1028 disp_bitmask = 0;
1029 else if ((u32)roi->x + (u32)roi->w <= lm_split_width)
1030 disp_bitmask = BIT(0); /* left only */
1031 else if (roi->x >= lm_split_width)
1032 disp_bitmask = BIT(1); /* right only */
1033 else
1034 disp_bitmask = BIT(0) | BIT(1); /* left and right */
1035 } else {
1036 for (i = 0; i < sde_crtc->num_mixers; i++) {
1037 if (!sde_kms_rect_is_null(&crtc_state->lm_roi[i]))
1038 disp_bitmask |= BIT(i);
1039 }
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001040 }
1041
1042 SDE_DEBUG("affected displays 0x%x\n", disp_bitmask);
1043
1044 return disp_bitmask;
1045}
1046
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001047static int _sde_crtc_check_rois_centered_and_symmetric(struct drm_crtc *crtc,
1048 struct drm_crtc_state *state)
1049{
1050 struct sde_crtc *sde_crtc;
1051 struct sde_crtc_state *crtc_state;
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001052 const struct sde_rect *roi[CRTC_DUAL_MIXERS];
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001053
1054 if (!crtc || !state)
1055 return -EINVAL;
1056
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001057 sde_crtc = to_sde_crtc(crtc);
1058 crtc_state = to_sde_crtc_state(state);
1059
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001060 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
1061 SDE_ERROR("%s: unsupported number of mixers: %d\n",
1062 sde_crtc->name, sde_crtc->num_mixers);
1063 return -EINVAL;
1064 }
1065
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001066 /*
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001067 * If using pingpong split: one ROI, one LM, two physical displays
1068 * then the ROI must be centered on the panel split boundary and
1069 * be of equal width across the split.
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001070 */
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001071 if (crtc_state->is_ppsplit) {
1072 u16 panel_split_width;
1073 u32 display_mask;
1074
1075 roi[0] = &crtc_state->lm_roi[0];
1076
1077 if (sde_kms_rect_is_null(roi[0]))
1078 return 0;
1079
1080 display_mask = _sde_crtc_get_displays_affected(crtc, state);
1081 if (display_mask != (BIT(0) | BIT(1)))
1082 return 0;
1083
1084 panel_split_width = crtc_state->lm_bounds[0].w / 2;
1085 if (roi[0]->x + roi[0]->w / 2 != panel_split_width) {
1086 SDE_ERROR("%s: roi x %d w %d split %d\n",
1087 sde_crtc->name, roi[0]->x, roi[0]->w,
1088 panel_split_width);
1089 return -EINVAL;
1090 }
1091
1092 return 0;
1093 }
1094
1095 /*
1096 * On certain HW, if using 2 LM, ROIs must be split evenly between the
1097 * LMs and be of equal width.
1098 */
Clarence Ipffb87422017-06-30 13:37:48 -04001099 if (sde_crtc->num_mixers < 2)
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001100 return 0;
1101
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001102 roi[0] = &crtc_state->lm_roi[0];
1103 roi[1] = &crtc_state->lm_roi[1];
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001104
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001105 /* if one of the roi is null it's a left/right-only update */
1106 if (sde_kms_rect_is_null(roi[0]) || sde_kms_rect_is_null(roi[1]))
1107 return 0;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001108
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001109 /* check lm rois are equal width & first roi ends at 2nd roi */
1110 if (roi[0]->x + roi[0]->w != roi[1]->x || roi[0]->w != roi[1]->w) {
1111 SDE_ERROR(
1112 "%s: rois not centered and symmetric: roi0 x %d w %d roi1 x %d w %d\n",
1113 sde_crtc->name, roi[0]->x, roi[0]->w,
1114 roi[1]->x, roi[1]->w);
1115 return -EINVAL;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001116 }
1117
1118 return 0;
1119}
1120
1121static int _sde_crtc_check_planes_within_crtc_roi(struct drm_crtc *crtc,
1122 struct drm_crtc_state *state)
1123{
1124 struct sde_crtc *sde_crtc;
1125 struct sde_crtc_state *crtc_state;
1126 const struct sde_rect *crtc_roi;
Veera Sundaram Sankarand916e2a2017-10-12 14:52:26 -07001127 const struct drm_plane_state *pstate;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001128 struct drm_plane *plane;
1129
1130 if (!crtc || !state)
1131 return -EINVAL;
1132
1133 /*
1134 * Reject commit if a Plane CRTC destination coordinates fall outside
1135 * the partial CRTC ROI. LM output is determined via connector ROIs,
1136 * if they are specified, not Plane CRTC ROIs.
1137 */
1138
1139 sde_crtc = to_sde_crtc(crtc);
1140 crtc_state = to_sde_crtc_state(state);
1141 crtc_roi = &crtc_state->crtc_roi;
1142
1143 if (sde_kms_rect_is_null(crtc_roi))
1144 return 0;
1145
Veera Sundaram Sankarand916e2a2017-10-12 14:52:26 -07001146 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001147 struct sde_rect plane_roi, intersection;
1148
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001149 if (IS_ERR_OR_NULL(pstate)) {
1150 int rc = PTR_ERR(pstate);
1151
1152 SDE_ERROR("%s: failed to get plane%d state, %d\n",
1153 sde_crtc->name, plane->base.id, rc);
1154 return rc;
1155 }
1156
1157 plane_roi.x = pstate->crtc_x;
1158 plane_roi.y = pstate->crtc_y;
1159 plane_roi.w = pstate->crtc_w;
1160 plane_roi.h = pstate->crtc_h;
1161 sde_kms_rect_intersect(crtc_roi, &plane_roi, &intersection);
1162 if (!sde_kms_rect_is_equal(&plane_roi, &intersection)) {
1163 SDE_ERROR(
1164 "%s: plane%d crtc roi (%d,%d,%d,%d) outside crtc roi (%d,%d,%d,%d)\n",
1165 sde_crtc->name, plane->base.id,
1166 plane_roi.x, plane_roi.y,
1167 plane_roi.w, plane_roi.h,
1168 crtc_roi->x, crtc_roi->y,
1169 crtc_roi->w, crtc_roi->h);
1170 return -E2BIG;
1171 }
1172 }
1173
1174 return 0;
1175}
1176
1177static int _sde_crtc_check_rois(struct drm_crtc *crtc,
1178 struct drm_crtc_state *state)
1179{
1180 struct sde_crtc *sde_crtc;
1181 int lm_idx;
1182 int rc;
1183
1184 if (!crtc || !state)
1185 return -EINVAL;
1186
1187 sde_crtc = to_sde_crtc(crtc);
1188
1189 rc = _sde_crtc_set_crtc_roi(crtc, state);
1190 if (rc)
1191 return rc;
1192
Lloyd Atkinson77382202017-02-01 14:59:43 -05001193 rc = _sde_crtc_check_autorefresh(crtc, state);
1194 if (rc)
1195 return rc;
1196
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001197 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
1198 rc = _sde_crtc_set_lm_roi(crtc, state, lm_idx);
1199 if (rc)
1200 return rc;
1201 }
1202
1203 rc = _sde_crtc_check_rois_centered_and_symmetric(crtc, state);
1204 if (rc)
1205 return rc;
1206
1207 rc = _sde_crtc_check_planes_within_crtc_roi(crtc, state);
1208 if (rc)
1209 return rc;
1210
1211 return 0;
1212}
1213
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001214static void _sde_crtc_program_lm_output_roi(struct drm_crtc *crtc)
1215{
1216 struct sde_crtc *sde_crtc;
1217 struct sde_crtc_state *crtc_state;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001218 const struct sde_rect *lm_roi;
1219 struct sde_hw_mixer *hw_lm;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001220 int lm_idx, lm_horiz_position;
1221
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001222 if (!crtc)
1223 return;
1224
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001225 sde_crtc = to_sde_crtc(crtc);
1226 crtc_state = to_sde_crtc_state(crtc->state);
1227
1228 lm_horiz_position = 0;
1229 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001230 struct sde_hw_mixer_cfg cfg;
1231
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001232 lm_roi = &crtc_state->lm_roi[lm_idx];
1233 hw_lm = sde_crtc->mixers[lm_idx].hw_lm;
1234
1235 SDE_EVT32(DRMID(crtc_state->base.crtc), lm_idx,
1236 lm_roi->x, lm_roi->y, lm_roi->w, lm_roi->h);
1237
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001238 if (sde_kms_rect_is_null(lm_roi))
1239 continue;
1240
Ping Lif41c2ef2017-05-04 14:40:45 -07001241 hw_lm->cfg.out_width = lm_roi->w;
1242 hw_lm->cfg.out_height = lm_roi->h;
1243 hw_lm->cfg.right_mixer = lm_horiz_position;
1244
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001245 cfg.out_width = lm_roi->w;
1246 cfg.out_height = lm_roi->h;
1247 cfg.right_mixer = lm_horiz_position++;
1248 cfg.flags = 0;
1249 hw_lm->ops.setup_mixer_out(hw_lm, &cfg);
1250 }
1251}
1252
Dhaval Patel48c76022016-09-01 17:51:23 -07001253static void _sde_crtc_blend_setup_mixer(struct drm_crtc *crtc,
1254 struct sde_crtc *sde_crtc, struct sde_crtc_mixer *mixer)
1255{
1256 struct drm_plane *plane;
Dhaval Patel6c666622017-03-21 23:02:59 -07001257 struct drm_framebuffer *fb;
1258 struct drm_plane_state *state;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001259 struct sde_crtc_state *cstate;
Dhaval Patel48c76022016-09-01 17:51:23 -07001260 struct sde_plane_state *pstate = NULL;
1261 struct sde_format *format;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001262 struct sde_hw_ctl *ctl;
1263 struct sde_hw_mixer *lm;
1264 struct sde_hw_stage_cfg *stage_cfg;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001265 struct sde_rect plane_crtc_roi;
Dhaval Patel48c76022016-09-01 17:51:23 -07001266
Clarence Ipb776b532017-09-12 18:30:06 -04001267 u32 flush_mask, flush_sbuf;
Dhaval Patel572cfd22017-06-12 19:33:39 -07001268 uint32_t stage_idx, lm_idx;
1269 int zpos_cnt[SDE_STAGE_MAX + 1] = { 0 };
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001270 int i;
Dhaval Patel572cfd22017-06-12 19:33:39 -07001271 bool bg_alpha_enable = false;
Alan Kwong4dd64c82017-02-04 18:41:51 -08001272 u32 prefill = 0;
Dhaval Patel48c76022016-09-01 17:51:23 -07001273
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001274 if (!sde_crtc || !mixer) {
1275 SDE_ERROR("invalid sde_crtc or mixer\n");
1276 return;
1277 }
1278
1279 ctl = mixer->hw_ctl;
1280 lm = mixer->hw_lm;
1281 stage_cfg = &sde_crtc->stage_cfg;
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001282 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel44f12472016-08-29 12:19:47 -07001283
Clarence Ip7eb90452017-05-23 11:41:19 -04001284 cstate->sbuf_prefill_line = 0;
Clarence Ipb776b532017-09-12 18:30:06 -04001285 sde_crtc->sbuf_flush_mask = 0x0;
Clarence Ip7eb90452017-05-23 11:41:19 -04001286
Dhaval Patel44f12472016-08-29 12:19:47 -07001287 drm_atomic_crtc_for_each_plane(plane, crtc) {
Dhaval Patel6c666622017-03-21 23:02:59 -07001288 state = plane->state;
1289 if (!state)
1290 continue;
Dhaval Patel48c76022016-09-01 17:51:23 -07001291
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001292 plane_crtc_roi.x = state->crtc_x;
1293 plane_crtc_roi.y = state->crtc_y;
1294 plane_crtc_roi.w = state->crtc_w;
1295 plane_crtc_roi.h = state->crtc_h;
1296
Dhaval Patel6c666622017-03-21 23:02:59 -07001297 pstate = to_sde_plane_state(state);
1298 fb = state->fb;
Dhaval Patel44f12472016-08-29 12:19:47 -07001299
Clarence Ipb776b532017-09-12 18:30:06 -04001300 prefill = sde_plane_rot_calc_prefill(plane);
Clarence Ip7eb90452017-05-23 11:41:19 -04001301 if (prefill > cstate->sbuf_prefill_line)
1302 cstate->sbuf_prefill_line = prefill;
Alan Kwong4dd64c82017-02-04 18:41:51 -08001303
Clarence Ipb776b532017-09-12 18:30:06 -04001304 sde_plane_get_ctl_flush(plane, ctl, &flush_mask, &flush_sbuf);
Dhaval Patel44f12472016-08-29 12:19:47 -07001305
Clarence Ipb776b532017-09-12 18:30:06 -04001306 /* save sbuf flush value for later */
1307 sde_crtc->sbuf_flush_mask |= flush_sbuf;
Dhaval Patel48c76022016-09-01 17:51:23 -07001308
1309 SDE_DEBUG("crtc %d stage:%d - plane %d sspp %d fb %d\n",
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001310 crtc->base.id,
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001311 pstate->stage,
1312 plane->base.id,
1313 sde_plane_pipe(plane) - SSPP_VIG0,
Dhaval Patel6c666622017-03-21 23:02:59 -07001314 state->fb ? state->fb->base.id : -1);
Dhaval Patel44f12472016-08-29 12:19:47 -07001315
Dhaval Patel48c76022016-09-01 17:51:23 -07001316 format = to_sde_format(msm_framebuffer_format(pstate->base.fb));
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07001317 if (!format) {
1318 SDE_ERROR("invalid format\n");
1319 return;
1320 }
1321
Dhaval Patel572cfd22017-06-12 19:33:39 -07001322 if (pstate->stage == SDE_STAGE_BASE && format->alpha_enable)
1323 bg_alpha_enable = true;
Dhaval Patel44f12472016-08-29 12:19:47 -07001324
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001325 SDE_EVT32(DRMID(crtc), DRMID(plane),
1326 state->fb ? state->fb->base.id : -1,
1327 state->src_x >> 16, state->src_y >> 16,
1328 state->src_w >> 16, state->src_h >> 16,
1329 state->crtc_x, state->crtc_y,
Clarence Ip7eb90452017-05-23 11:41:19 -04001330 state->crtc_w, state->crtc_h,
Clarence Ipb776b532017-09-12 18:30:06 -04001331 flush_sbuf != 0);
Dhaval Patel6c666622017-03-21 23:02:59 -07001332
Dhaval Patel572cfd22017-06-12 19:33:39 -07001333 stage_idx = zpos_cnt[pstate->stage]++;
1334 stage_cfg->stage[pstate->stage][stage_idx] =
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001335 sde_plane_pipe(plane);
Dhaval Patel572cfd22017-06-12 19:33:39 -07001336 stage_cfg->multirect_index[pstate->stage][stage_idx] =
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001337 pstate->multirect_index;
1338
Dhaval Patel572cfd22017-06-12 19:33:39 -07001339 SDE_EVT32(DRMID(crtc), DRMID(plane), stage_idx,
1340 sde_plane_pipe(plane) - SSPP_VIG0, pstate->stage,
1341 pstate->multirect_index, pstate->multirect_mode,
1342 format->base.pixel_format, fb ? fb->modifier[0] : 0);
1343
1344 /* blend config update */
1345 for (lm_idx = 0; lm_idx < sde_crtc->num_mixers; lm_idx++) {
1346 _sde_crtc_setup_blend_cfg(mixer + lm_idx, pstate,
1347 format);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001348 mixer[lm_idx].flush_mask |= flush_mask;
1349
Dhaval Patel572cfd22017-06-12 19:33:39 -07001350 if (bg_alpha_enable && !format->alpha_enable)
1351 mixer[lm_idx].mixer_op_mode = 0;
1352 else
1353 mixer[lm_idx].mixer_op_mode |=
Dhaval Patel48c76022016-09-01 17:51:23 -07001354 1 << pstate->stage;
Dhaval Patel48c76022016-09-01 17:51:23 -07001355 }
Dhaval Patel44f12472016-08-29 12:19:47 -07001356 }
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001357
1358 if (lm && lm->ops.setup_dim_layer) {
1359 cstate = to_sde_crtc_state(crtc->state);
1360 for (i = 0; i < cstate->num_dim_layers; i++)
1361 _sde_crtc_setup_dim_layer_cfg(crtc, sde_crtc,
1362 mixer, &cstate->dim_layer[i]);
1363 }
Alan Kwong4dd64c82017-02-04 18:41:51 -08001364
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05001365 _sde_crtc_program_lm_output_roi(crtc);
Dhaval Patel44f12472016-08-29 12:19:47 -07001366}
1367
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001368static void _sde_crtc_swap_mixers_for_right_partial_update(
1369 struct drm_crtc *crtc)
1370{
1371 struct sde_crtc *sde_crtc;
1372 struct sde_crtc_state *cstate;
1373 struct drm_encoder *drm_enc;
1374 bool is_right_only;
1375 bool encoder_in_dsc_merge = false;
1376
1377 if (!crtc || !crtc->state)
1378 return;
1379
1380 sde_crtc = to_sde_crtc(crtc);
1381 cstate = to_sde_crtc_state(crtc->state);
1382
1383 if (sde_crtc->num_mixers != CRTC_DUAL_MIXERS)
1384 return;
1385
1386 drm_for_each_encoder(drm_enc, crtc->dev) {
1387 if (drm_enc->crtc == crtc &&
1388 sde_encoder_is_dsc_merge(drm_enc)) {
1389 encoder_in_dsc_merge = true;
1390 break;
1391 }
1392 }
1393
1394 /**
1395 * For right-only partial update with DSC merge, we swap LM0 & LM1.
1396 * This is due to two reasons:
1397 * - On 8996, there is a DSC HW requirement that in DSC Merge Mode,
1398 * the left DSC must be used, right DSC cannot be used alone.
1399 * For right-only partial update, this means swap layer mixers to map
1400 * Left LM to Right INTF. On later HW this was relaxed.
1401 * - In DSC Merge mode, the physical encoder has already registered
1402 * PP0 as the master, to switch to right-only we would have to
1403 * reprogram to be driven by PP1 instead.
1404 * To support both cases, we prefer to support the mixer swap solution.
1405 */
1406 if (!encoder_in_dsc_merge)
1407 return;
1408
1409 is_right_only = sde_kms_rect_is_null(&cstate->lm_roi[0]) &&
1410 !sde_kms_rect_is_null(&cstate->lm_roi[1]);
1411
1412 if (is_right_only && !sde_crtc->mixers_swapped) {
1413 /* right-only update swap mixers */
1414 swap(sde_crtc->mixers[0], sde_crtc->mixers[1]);
1415 sde_crtc->mixers_swapped = true;
1416 } else if (!is_right_only && sde_crtc->mixers_swapped) {
1417 /* left-only or full update, swap back */
1418 swap(sde_crtc->mixers[0], sde_crtc->mixers[1]);
1419 sde_crtc->mixers_swapped = false;
1420 }
1421
1422 SDE_DEBUG("%s: right_only %d swapped %d, mix0->lm%d, mix1->lm%d\n",
1423 sde_crtc->name, is_right_only, sde_crtc->mixers_swapped,
1424 sde_crtc->mixers[0].hw_lm->idx - LM_0,
1425 sde_crtc->mixers[1].hw_lm->idx - LM_0);
1426 SDE_EVT32(DRMID(crtc), is_right_only, sde_crtc->mixers_swapped,
1427 sde_crtc->mixers[0].hw_lm->idx - LM_0,
1428 sde_crtc->mixers[1].hw_lm->idx - LM_0);
1429}
1430
Clarence Ipd9f9fa62016-09-09 13:42:32 -04001431/**
1432 * _sde_crtc_blend_setup - configure crtc mixers
1433 * @crtc: Pointer to drm crtc structure
1434 */
1435static void _sde_crtc_blend_setup(struct drm_crtc *crtc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001436{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001437 struct sde_crtc *sde_crtc;
1438 struct sde_crtc_state *sde_crtc_state;
1439 struct sde_crtc_mixer *mixer;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001440 struct sde_hw_ctl *ctl;
1441 struct sde_hw_mixer *lm;
Dhaval Patel44f12472016-08-29 12:19:47 -07001442
1443 int i;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001444
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001445 if (!crtc)
1446 return;
1447
1448 sde_crtc = to_sde_crtc(crtc);
1449 sde_crtc_state = to_sde_crtc_state(crtc->state);
1450 mixer = sde_crtc->mixers;
1451
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04001452 SDE_DEBUG("%s\n", sde_crtc->name);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001453
Dhaval Patel48c76022016-09-01 17:51:23 -07001454 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
1455 SDE_ERROR("invalid number mixers: %d\n", sde_crtc->num_mixers);
1456 return;
1457 }
1458
1459 for (i = 0; i < sde_crtc->num_mixers; i++) {
1460 if (!mixer[i].hw_lm || !mixer[i].hw_ctl) {
1461 SDE_ERROR("invalid lm or ctl assigned to mixer\n");
1462 return;
1463 }
1464 mixer[i].mixer_op_mode = 0;
1465 mixer[i].flush_mask = 0;
Lloyd Atkinsone5ec30d2016-08-23 14:32:32 -04001466 if (mixer[i].hw_ctl->ops.clear_all_blendstages)
1467 mixer[i].hw_ctl->ops.clear_all_blendstages(
1468 mixer[i].hw_ctl);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08001469
1470 /* clear dim_layer settings */
1471 lm = mixer[i].hw_lm;
1472 if (lm->ops.clear_dim_layer)
1473 lm->ops.clear_dim_layer(lm);
Dhaval Patel48c76022016-09-01 17:51:23 -07001474 }
1475
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001476 _sde_crtc_swap_mixers_for_right_partial_update(crtc);
1477
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001478 /* initialize stage cfg */
Clarence Ip8f7366c2016-07-05 12:15:26 -04001479 memset(&sde_crtc->stage_cfg, 0, sizeof(struct sde_hw_stage_cfg));
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001480
Dhaval Patel48c76022016-09-01 17:51:23 -07001481 _sde_crtc_blend_setup_mixer(crtc, sde_crtc, mixer);
1482
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001483 for (i = 0; i < sde_crtc->num_mixers; i++) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001484 const struct sde_rect *lm_roi = &sde_crtc_state->lm_roi[i];
1485
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001486 ctl = mixer[i].hw_ctl;
1487 lm = mixer[i].hw_lm;
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001488
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001489 if (sde_kms_rect_is_null(lm_roi)) {
1490 SDE_DEBUG(
1491 "%s: lm%d leave ctl%d mask 0 since null roi\n",
1492 sde_crtc->name, lm->idx - LM_0,
1493 ctl->idx - CTL_0);
1494 continue;
1495 }
1496
Dhaval Patel48c76022016-09-01 17:51:23 -07001497 lm->ops.setup_alpha_out(lm, mixer[i].mixer_op_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001498
Clarence Ip662698e2017-09-12 18:34:16 -04001499 mixer[i].pipe_mask = mixer[i].flush_mask;
Dhaval Patel48c76022016-09-01 17:51:23 -07001500 mixer[i].flush_mask |= ctl->ops.get_bitmask_mixer(ctl,
Abhijit Kulkarni71002ba2016-06-24 18:36:28 -04001501 mixer[i].hw_lm->idx);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04001502
1503 /* stage config flush mask */
Dhaval Patel48c76022016-09-01 17:51:23 -07001504 ctl->ops.update_pending_flush(ctl, mixer[i].flush_mask);
1505
Clarence Ip8e69ad02016-12-09 09:43:57 -05001506 SDE_DEBUG("lm %d, op_mode 0x%X, ctl %d, flush mask 0x%x\n",
1507 mixer[i].hw_lm->idx - LM_0,
1508 mixer[i].mixer_op_mode,
1509 ctl->idx - CTL_0,
1510 mixer[i].flush_mask);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001511
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001512 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
Dhaval Patel572cfd22017-06-12 19:33:39 -07001513 &sde_crtc->stage_cfg);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001514 }
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04001515
1516 _sde_crtc_program_lm_output_roi(crtc);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001517}
1518
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001519static int _sde_crtc_find_plane_fb_modes(struct drm_crtc_state *state,
1520 uint32_t *fb_ns,
1521 uint32_t *fb_sec,
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001522 uint32_t *fb_sec_dir)
1523{
1524 struct drm_plane *plane;
1525 const struct drm_plane_state *pstate;
1526 struct sde_plane_state *sde_pstate;
1527 uint32_t mode = 0;
1528 int rc;
1529
1530 if (!state) {
1531 SDE_ERROR("invalid state\n");
1532 return -EINVAL;
1533 }
1534
1535 *fb_ns = 0;
1536 *fb_sec = 0;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001537 *fb_sec_dir = 0;
1538 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
1539 if (IS_ERR_OR_NULL(pstate)) {
1540 rc = PTR_ERR(pstate);
1541 SDE_ERROR("crtc%d failed to get plane%d state%d\n",
1542 state->crtc->base.id,
1543 plane->base.id, rc);
1544 return rc;
1545 }
1546 sde_pstate = to_sde_plane_state(pstate);
1547 mode = sde_plane_get_property(sde_pstate,
1548 PLANE_PROP_FB_TRANSLATION_MODE);
1549 switch (mode) {
1550 case SDE_DRM_FB_NON_SEC:
1551 (*fb_ns)++;
1552 break;
1553 case SDE_DRM_FB_SEC:
1554 (*fb_sec)++;
1555 break;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001556 case SDE_DRM_FB_SEC_DIR_TRANS:
1557 (*fb_sec_dir)++;
1558 break;
1559 default:
1560 SDE_ERROR("Error: Plane[%d], fb_trans_mode:%d",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001561 plane->base.id, mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001562 return -EINVAL;
1563 }
1564 }
1565 return 0;
1566}
1567
1568/**
1569 * sde_crtc_get_secure_transition_ops - determines the operations that
1570 * need to be performed before transitioning to secure state
1571 * This function should be called after swapping the new state
1572 * @crtc: Pointer to drm crtc structure
1573 * Returns the bitmask of operations need to be performed, -Error in
1574 * case of error cases
1575 */
1576int sde_crtc_get_secure_transition_ops(struct drm_crtc *crtc,
1577 struct drm_crtc_state *old_crtc_state,
1578 bool old_valid_fb)
1579{
1580 struct drm_plane *plane;
1581 struct drm_encoder *encoder;
1582 struct sde_crtc *sde_crtc;
1583 struct sde_crtc_state *cstate;
1584 struct sde_crtc_smmu_state_data *smmu_state;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001585 uint32_t translation_mode = 0, secure_level;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001586 int ops = 0;
1587 bool post_commit = false;
1588
1589 if (!crtc || !crtc->state) {
1590 SDE_ERROR("invalid crtc\n");
1591 return -EINVAL;
1592 }
1593
1594 sde_crtc = to_sde_crtc(crtc);
1595 cstate = to_sde_crtc_state(crtc->state);
1596 smmu_state = &sde_crtc->smmu_state;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001597 secure_level = sde_crtc_get_secure_level(crtc, crtc->state);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001598
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001599 SDE_DEBUG("crtc%d, secure_level%d old_valid_fb%d\n",
1600 crtc->base.id, secure_level, old_valid_fb);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001601
1602 /**
1603 * SMMU operations need to be delayed in case of
1604 * video mode panels when switching back to non_secure
1605 * mode
1606 */
1607 drm_for_each_encoder(encoder, crtc->dev) {
1608 if (encoder->crtc != crtc)
1609 continue;
1610
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05301611 post_commit &= sde_encoder_check_mode(encoder,
1612 MSM_DISPLAY_CAP_VID_MODE);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001613 }
1614
1615 drm_atomic_crtc_for_each_plane(plane, crtc) {
1616 if (!plane->state)
1617 continue;
1618
1619 translation_mode = sde_plane_get_property(
1620 to_sde_plane_state(plane->state),
1621 PLANE_PROP_FB_TRANSLATION_MODE);
1622 if (translation_mode > SDE_DRM_FB_SEC_DIR_TRANS) {
1623 SDE_ERROR("crtc%d, invalid translation_mode%d\n",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001624 crtc->base.id, translation_mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001625 return -EINVAL;
1626 }
1627
1628 /**
1629 * we can break if we find sec_fir or non_sec_dir
1630 * plane
1631 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001632 if (translation_mode == SDE_DRM_FB_SEC_DIR_TRANS)
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001633 break;
1634 }
1635
1636 switch (translation_mode) {
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001637 case SDE_DRM_FB_SEC_DIR_TRANS:
1638 /* secure display usecase */
1639 if ((smmu_state->state == ATTACHED) &&
1640 (secure_level == SDE_DRM_SEC_ONLY)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001641 smmu_state->state = DETACH_ALL_REQ;
1642 smmu_state->transition_type = PRE_COMMIT;
1643 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
1644 if (old_valid_fb) {
1645 ops |= (SDE_KMS_OPS_WAIT_FOR_TX_DONE |
1646 SDE_KMS_OPS_CLEANUP_PLANE_FB);
1647 }
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001648 /* secure camera usecase */
1649 } else if (smmu_state->state == ATTACHED) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001650 smmu_state->state = DETACH_SEC_REQ;
1651 smmu_state->transition_type = PRE_COMMIT;
1652 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
1653 }
1654 break;
1655 case SDE_DRM_FB_SEC:
1656 case SDE_DRM_FB_NON_SEC:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001657 if ((smmu_state->state == DETACHED_SEC) ||
1658 (smmu_state->state == DETACH_SEC_REQ)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001659 smmu_state->state = ATTACH_SEC_REQ;
1660 smmu_state->transition_type = post_commit ?
1661 POST_COMMIT : PRE_COMMIT;
1662 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001663 if (old_valid_fb)
1664 ops |= SDE_KMS_OPS_WAIT_FOR_TX_DONE;
1665 } else if ((smmu_state->state == DETACHED) ||
1666 (smmu_state->state == DETACH_ALL_REQ)) {
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001667 smmu_state->state = ATTACH_ALL_REQ;
1668 smmu_state->transition_type = post_commit ?
1669 POST_COMMIT : PRE_COMMIT;
Veera Sundaram Sankaran47e718f2017-09-13 16:47:23 -07001670 ops |= SDE_KMS_OPS_CRTC_SECURE_STATE_CHANGE;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001671 if (old_valid_fb)
1672 ops |= (SDE_KMS_OPS_WAIT_FOR_TX_DONE |
1673 SDE_KMS_OPS_CLEANUP_PLANE_FB);
1674 }
1675 break;
1676 default:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001677 SDE_ERROR("invalid plane fb_mode:%d\n", translation_mode);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001678 ops = 0;
1679 return -EINVAL;
1680 }
1681
1682 SDE_DEBUG("SMMU State:%d, type:%d ops:%x\n", smmu_state->state,
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001683 smmu_state->transition_type, ops);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001684 return ops;
1685}
1686
1687/**
1688 * _sde_crtc_scm_call - makes secure channel call to switch the VMIDs
1689 * @vimd: switch the stage 2 translation to this VMID.
1690 */
1691static int _sde_crtc_scm_call(int vmid)
1692{
1693 struct scm_desc desc = {0};
1694 uint32_t num_sids;
1695 uint32_t *sec_sid;
1696 uint32_t mem_protect_sd_ctrl_id = MEM_PROTECT_SD_CTRL_SWITCH;
1697 int ret = 0;
1698
1699 /* This info should be queried from catalog */
1700 num_sids = SEC_SID_CNT;
1701 sec_sid = kcalloc(num_sids, sizeof(uint32_t), GFP_KERNEL);
1702 if (!sec_sid)
1703 return -ENOMEM;
1704
1705 /**
1706 * derive this info from device tree/catalog, this is combination of
1707 * smr mask and SID for secure
1708 */
1709 sec_sid[0] = SEC_SID_MASK_0;
1710 sec_sid[1] = SEC_SID_MASK_1;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001711 dmac_flush_range(sec_sid, sec_sid + num_sids);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001712
1713 SDE_DEBUG("calling scm_call for vmid %d", vmid);
1714
1715 desc.arginfo = SCM_ARGS(4, SCM_VAL, SCM_RW, SCM_VAL, SCM_VAL);
1716 desc.args[0] = MDP_DEVICE_ID;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001717 desc.args[1] = SCM_BUFFER_PHYS(sec_sid);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001718 desc.args[2] = sizeof(uint32_t) * num_sids;
1719 desc.args[3] = vmid;
1720
1721 ret = scm_call2(SCM_SIP_FNID(SCM_SVC_MP,
1722 mem_protect_sd_ctrl_id), &desc);
1723 if (ret) {
1724 SDE_ERROR("Error:scm_call2, vmid (%lld): ret%d\n",
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001725 desc.args[3], ret);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001726 }
1727
1728 kfree(sec_sid);
1729 return ret;
1730}
1731
1732/**
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301733 * _sde_crtc_setup_scaler3_lut - Set up scaler lut
1734 * LUTs are configured only once during boot
1735 * @sde_crtc: Pointer to sde crtc
1736 * @cstate: Pointer to sde crtc state
1737 */
1738static int _sde_crtc_set_dest_scaler_lut(struct sde_crtc *sde_crtc,
1739 struct sde_crtc_state *cstate, uint32_t lut_idx)
1740{
1741 struct sde_hw_scaler3_lut_cfg *cfg;
1742 u32 *lut_data = NULL;
1743 size_t len = 0;
1744 int ret = 0;
1745
1746 if (!sde_crtc || !cstate || !sde_crtc->scl3_lut_cfg) {
1747 SDE_ERROR("invalid args\n");
1748 return -EINVAL;
1749 }
1750
1751 if (sde_crtc->scl3_lut_cfg->is_configured) {
1752 SDE_DEBUG("lut already configured\n");
1753 return 0;
1754 }
1755
1756 lut_data = msm_property_get_blob(&sde_crtc->property_info,
1757 &cstate->property_state, &len, lut_idx);
1758 if (!lut_data || !len) {
1759 SDE_ERROR("lut(%d): no data, len(%zu)\n", lut_idx, len);
1760 return -ENODATA;
1761 }
1762
1763 cfg = sde_crtc->scl3_lut_cfg;
1764
1765 switch (lut_idx) {
1766 case CRTC_PROP_DEST_SCALER_LUT_ED:
1767 cfg->dir_lut = lut_data;
1768 cfg->dir_len = len;
1769 break;
1770 case CRTC_PROP_DEST_SCALER_LUT_CIR:
1771 cfg->cir_lut = lut_data;
1772 cfg->cir_len = len;
1773 break;
1774 case CRTC_PROP_DEST_SCALER_LUT_SEP:
1775 cfg->sep_lut = lut_data;
1776 cfg->sep_len = len;
1777 break;
1778 default:
1779 ret = -EINVAL;
1780 SDE_ERROR("invalid LUT index = %d", lut_idx);
1781 break;
1782 }
1783
1784 if (cfg->dir_lut && cfg->cir_lut && cfg->sep_lut)
1785 cfg->is_configured = true;
1786
1787 return ret;
1788}
1789
1790/**
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001791 * sde_crtc_secure_ctrl - Initiates the operations to swtich between secure
1792 * and non-secure mode
1793 * @crtc: Pointer to crtc
1794 * @post_commit: if this operation is triggered after commit
1795 */
1796int sde_crtc_secure_ctrl(struct drm_crtc *crtc, bool post_commit)
1797{
1798 struct sde_crtc *sde_crtc;
1799 struct sde_crtc_state *cstate;
1800 struct sde_kms *sde_kms;
1801 struct sde_crtc_smmu_state_data *smmu_state;
1802 int ret = 0;
1803 int old_smmu_state;
1804
1805 if (!crtc || !crtc->state) {
1806 SDE_ERROR("invalid crtc\n");
1807 return -EINVAL;
1808 }
1809
1810 sde_kms = _sde_crtc_get_kms(crtc);
1811 if (!sde_kms) {
1812 SDE_ERROR("invalid kms\n");
1813 return -EINVAL;
1814 }
1815
1816 sde_crtc = to_sde_crtc(crtc);
1817 cstate = to_sde_crtc_state(crtc->state);
1818 smmu_state = &sde_crtc->smmu_state;
1819 old_smmu_state = smmu_state->state;
1820
1821 if ((!smmu_state->transition_type) ||
1822 ((smmu_state->transition_type == POST_COMMIT) && !post_commit))
1823 /* Bail out */
1824 return 0;
1825
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001826 /* Secure UI use case enable */
1827 switch (smmu_state->state) {
1828 case DETACH_ALL_REQ:
1829 /* detach_all_contexts */
1830 ret = sde_kms_mmu_detach(sde_kms, false);
1831 if (ret) {
1832 SDE_ERROR("crtc: %d, failed to detach %d\n",
1833 crtc->base.id, ret);
1834 goto error;
1835 }
1836
1837 ret = _sde_crtc_scm_call(VMID_CP_SEC_DISPLAY);
1838 if (ret)
1839 goto error;
1840
1841 smmu_state->state = DETACHED;
1842 break;
1843 /* Secure UI use case disable */
1844 case ATTACH_ALL_REQ:
1845 ret = _sde_crtc_scm_call(VMID_CP_PIXEL);
1846 if (ret)
1847 goto error;
1848
1849 /* attach_all_contexts */
1850 ret = sde_kms_mmu_attach(sde_kms, false);
1851 if (ret) {
1852 SDE_ERROR("crtc: %d, failed to attach %d\n",
1853 crtc->base.id,
1854 ret);
1855 goto error;
1856 }
1857
1858 smmu_state->state = ATTACHED;
1859
1860 break;
1861 /* Secure preview enable */
1862 case DETACH_SEC_REQ:
1863 /* detach secure_context */
1864 ret = sde_kms_mmu_detach(sde_kms, true);
1865 if (ret) {
1866 SDE_ERROR("crtc: %d, failed to detach %d\n",
1867 crtc->base.id,
1868 ret);
1869 goto error;
1870 }
1871
1872 smmu_state->state = DETACHED_SEC;
1873 ret = _sde_crtc_scm_call(VMID_CP_CAMERA_PREVIEW);
1874 if (ret)
1875 goto error;
1876
1877 break;
1878
1879 /* Secure preview disable */
1880 case ATTACH_SEC_REQ:
1881 ret = _sde_crtc_scm_call(VMID_CP_PIXEL);
1882 if (ret)
1883 goto error;
1884
1885 ret = sde_kms_mmu_attach(sde_kms, true);
1886 if (ret) {
1887 SDE_ERROR("crtc: %d, failed to attach %d\n",
1888 crtc->base.id,
1889 ret);
1890 goto error;
1891 }
1892 smmu_state->state = ATTACHED;
1893 break;
1894 default:
1895 break;
1896 }
1897
1898 SDE_DEBUG("crtc: %d, old_state %d new_state %d\n", crtc->base.id,
1899 old_smmu_state,
1900 smmu_state->state);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001901 smmu_state->transition_type = NONE;
1902
1903error:
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07001904 smmu_state->transition_error = ret ? true : false;
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07001905 return ret;
1906}
1907
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05301908/**
1909 * _sde_crtc_dest_scaler_setup - Set up dest scaler block
1910 * @crtc: Pointer to drm crtc
1911 */
1912static void _sde_crtc_dest_scaler_setup(struct drm_crtc *crtc)
1913{
1914 struct sde_crtc *sde_crtc;
1915 struct sde_crtc_state *cstate;
1916 struct sde_hw_mixer *hw_lm;
1917 struct sde_hw_ctl *hw_ctl;
1918 struct sde_hw_ds *hw_ds;
1919 struct sde_hw_ds_cfg *cfg;
1920 struct sde_kms *kms;
1921 u32 flush_mask = 0, op_mode = 0;
1922 u32 lm_idx = 0, num_mixers = 0;
1923 int i, count = 0;
1924
1925 if (!crtc)
1926 return;
1927
1928 sde_crtc = to_sde_crtc(crtc);
1929 cstate = to_sde_crtc_state(crtc->state);
1930 kms = _sde_crtc_get_kms(crtc);
1931 num_mixers = sde_crtc->num_mixers;
1932
1933 SDE_DEBUG("crtc%d\n", crtc->base.id);
1934
1935 if (!cstate->ds_dirty) {
1936 SDE_DEBUG("no change in settings, skip commit\n");
1937 } else if (!kms || !kms->catalog) {
1938 SDE_ERROR("invalid parameters\n");
1939 } else if (!kms->catalog->mdp[0].has_dest_scaler) {
1940 SDE_DEBUG("dest scaler feature not supported\n");
1941 } else if (num_mixers > CRTC_DUAL_MIXERS) {
1942 SDE_ERROR("invalid number mixers: %d\n", num_mixers);
1943 } else if (!sde_crtc->scl3_lut_cfg->is_configured) {
1944 SDE_DEBUG("no LUT data available\n");
1945 } else {
1946 count = cstate->num_ds_enabled ? cstate->num_ds : num_mixers;
1947
1948 for (i = 0; i < count; i++) {
1949 cfg = &cstate->ds_cfg[i];
1950
1951 if (!cfg->flags)
1952 continue;
1953
1954 lm_idx = cfg->ndx;
1955 hw_lm = sde_crtc->mixers[lm_idx].hw_lm;
1956 hw_ctl = sde_crtc->mixers[lm_idx].hw_ctl;
1957 hw_ds = sde_crtc->mixers[lm_idx].hw_ds;
1958
1959 /* Setup op mode - Dual/single */
1960 if (cfg->flags & SDE_DRM_DESTSCALER_ENABLE)
1961 op_mode |= BIT(hw_ds->idx - DS_0);
1962
1963 if ((i == count-1) && hw_ds->ops.setup_opmode) {
1964 op_mode |= (cstate->num_ds_enabled ==
1965 CRTC_DUAL_MIXERS) ?
1966 SDE_DS_OP_MODE_DUAL : 0;
1967 hw_ds->ops.setup_opmode(hw_ds, op_mode);
1968 SDE_EVT32(DRMID(crtc), op_mode);
1969 }
1970
1971 /* Setup scaler */
1972 if ((cfg->flags & SDE_DRM_DESTSCALER_SCALE_UPDATE) ||
1973 (cfg->flags &
1974 SDE_DRM_DESTSCALER_ENHANCER_UPDATE)) {
1975 if (hw_ds->ops.setup_scaler)
1976 hw_ds->ops.setup_scaler(hw_ds,
1977 cfg->scl3_cfg,
1978 sde_crtc->scl3_lut_cfg);
1979
1980 /**
1981 * Clear the flags as the block doesn't have to
1982 * be programmed in each commit if no updates
1983 */
1984 cfg->flags &= ~SDE_DRM_DESTSCALER_SCALE_UPDATE;
1985 cfg->flags &=
1986 ~SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
1987 }
1988
1989 /*
1990 * Dest scaler shares the flush bit of the LM in control
1991 */
1992 if (cfg->set_lm_flush && hw_lm && hw_ctl &&
1993 hw_ctl->ops.get_bitmask_mixer) {
1994 flush_mask = hw_ctl->ops.get_bitmask_mixer(
1995 hw_ctl, hw_lm->idx);
1996 SDE_DEBUG("Set lm[%d] flush = %d",
1997 hw_lm->idx, flush_mask);
1998 hw_ctl->ops.update_pending_flush(hw_ctl,
1999 flush_mask);
2000 }
2001 cfg->set_lm_flush = false;
2002 }
2003 cstate->ds_dirty = false;
2004 }
2005}
2006
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002007void sde_crtc_prepare_commit(struct drm_crtc *crtc,
2008 struct drm_crtc_state *old_state)
Clarence Ip24f80662016-06-13 19:05:32 -04002009{
2010 struct sde_crtc *sde_crtc;
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002011 struct sde_crtc_state *cstate;
2012 struct drm_connector *conn;
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002013 struct sde_crtc_retire_event *retire_event = NULL;
2014 unsigned long flags;
2015 int i;
Clarence Ip24f80662016-06-13 19:05:32 -04002016
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002017 if (!crtc || !crtc->state) {
Clarence Ip24f80662016-06-13 19:05:32 -04002018 SDE_ERROR("invalid crtc\n");
2019 return;
2020 }
2021
2022 sde_crtc = to_sde_crtc(crtc);
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002023 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel6c666622017-03-21 23:02:59 -07002024 SDE_EVT32_VERBOSE(DRMID(crtc));
Clarence Ip24f80662016-06-13 19:05:32 -04002025
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002026 /* identify connectors attached to this crtc */
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002027 cstate->num_connectors = 0;
2028
2029 drm_for_each_connector(conn, crtc->dev)
2030 if (conn->state && conn->state->crtc == crtc &&
2031 cstate->num_connectors < MAX_CONNECTORS) {
2032 cstate->connectors[cstate->num_connectors++] = conn;
2033 sde_connector_prepare_fence(conn);
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002034 }
2035
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002036 for (i = 0; i < SDE_CRTC_FRAME_EVENT_SIZE; i++) {
2037 retire_event = &sde_crtc->retire_events[i];
2038 if (list_empty(&retire_event->list))
2039 break;
2040 retire_event = NULL;
2041 }
2042
2043 if (retire_event) {
2044 retire_event->num_connectors = cstate->num_connectors;
2045 for (i = 0; i < cstate->num_connectors; i++)
2046 retire_event->connectors[i] = cstate->connectors[i];
2047
2048 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2049 list_add_tail(&retire_event->list,
2050 &sde_crtc->retire_event_list);
2051 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2052 } else {
2053 SDE_ERROR("crtc%d retire event overflow\n", crtc->base.id);
2054 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
2055 }
2056
Clarence Ip0d0e96d2016-10-24 18:13:13 -04002057 /* prepare main output fence */
Clarence Ip24f80662016-06-13 19:05:32 -04002058 sde_fence_prepare(&sde_crtc->output_fence);
2059}
2060
Abhinav Kumarf2e94b52017-02-09 20:27:24 -08002061/**
2062 * _sde_crtc_complete_flip - signal pending page_flip events
2063 * Any pending vblank events are added to the vblank_event_list
2064 * so that the next vblank interrupt shall signal them.
2065 * However PAGE_FLIP events are not handled through the vblank_event_list.
2066 * This API signals any pending PAGE_FLIP events requested through
2067 * DRM_IOCTL_MODE_PAGE_FLIP and are cached in the sde_crtc->event.
2068 * if file!=NULL, this is preclose potential cancel-flip path
2069 * @crtc: Pointer to drm crtc structure
2070 * @file: Pointer to drm file
2071 */
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002072static void _sde_crtc_complete_flip(struct drm_crtc *crtc,
2073 struct drm_file *file)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002074{
2075 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2076 struct drm_device *dev = crtc->dev;
2077 struct drm_pending_vblank_event *event;
2078 unsigned long flags;
2079
2080 spin_lock_irqsave(&dev->event_lock, flags);
2081 event = sde_crtc->event;
2082 if (event) {
2083 /* if regular vblank case (!file) or if cancel-flip from
2084 * preclose on file that requested flip, then send the
2085 * event:
2086 */
2087 if (!file || (event->base.file_priv == file)) {
2088 sde_crtc->event = NULL;
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002089 DRM_DEBUG_VBL("%s: send event: %pK\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07002090 sde_crtc->name, event);
Dhaval Patela5f75952017-07-25 11:17:41 -07002091 SDE_EVT32_VERBOSE(DRMID(crtc));
Lloyd Atkinsonac933642016-09-14 11:52:00 -04002092 drm_crtc_send_vblank_event(crtc, event);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002093 }
2094 }
2095 spin_unlock_irqrestore(&dev->event_lock, flags);
2096}
2097
Alan Kwong3e985f02017-02-12 15:08:44 -08002098enum sde_intf_mode sde_crtc_get_intf_mode(struct drm_crtc *crtc)
2099{
2100 struct drm_encoder *encoder;
2101
2102 if (!crtc || !crtc->dev) {
2103 SDE_ERROR("invalid crtc\n");
2104 return INTF_MODE_NONE;
2105 }
2106
2107 drm_for_each_encoder(encoder, crtc->dev)
2108 if (encoder->crtc == crtc)
2109 return sde_encoder_get_intf_mode(encoder);
2110
2111 return INTF_MODE_NONE;
2112}
2113
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002114static void sde_crtc_vblank_cb(void *data)
2115{
2116 struct drm_crtc *crtc = (struct drm_crtc *)data;
Alan Kwong07da0982016-11-04 12:57:45 -04002117 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2118
2119 /* keep statistics on vblank callback - with auto reset via debugfs */
2120 if (ktime_equal(sde_crtc->vblank_cb_time, ktime_set(0, 0)))
2121 sde_crtc->vblank_cb_time = ktime_get();
2122 else
2123 sde_crtc->vblank_cb_count++;
Abhinav Kumarf2e94b52017-02-09 20:27:24 -08002124 _sde_crtc_complete_flip(crtc, NULL);
Lloyd Atkinsonac933642016-09-14 11:52:00 -04002125 drm_crtc_handle_vblank(crtc);
Lloyd Atkinson9eabe7a2016-09-14 13:39:15 -04002126 DRM_DEBUG_VBL("crtc%d\n", crtc->base.id);
Dhaval Patel6c666622017-03-21 23:02:59 -07002127 SDE_EVT32_VERBOSE(DRMID(crtc));
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002128}
2129
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002130static void _sde_crtc_retire_event(struct drm_crtc *crtc, ktime_t ts)
2131{
2132 struct sde_crtc_retire_event *retire_event;
2133 struct sde_crtc *sde_crtc;
2134 unsigned long flags;
2135 int i;
2136
2137 if (!crtc) {
2138 SDE_ERROR("invalid param\n");
2139 return;
2140 }
2141
2142 sde_crtc = to_sde_crtc(crtc);
2143 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2144 retire_event = list_first_entry_or_null(&sde_crtc->retire_event_list,
2145 struct sde_crtc_retire_event, list);
2146 if (retire_event)
2147 list_del_init(&retire_event->list);
2148 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2149
2150 if (!retire_event) {
2151 SDE_ERROR("crtc%d retire event without kickoff\n",
2152 crtc->base.id);
2153 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
2154 return;
2155 }
2156
2157 SDE_ATRACE_BEGIN("signal_retire_fence");
2158 for (i = 0; (i < retire_event->num_connectors) &&
2159 retire_event->connectors[i]; ++i)
2160 sde_connector_complete_commit(
2161 retire_event->connectors[i], ts);
2162 SDE_ATRACE_END("signal_retire_fence");
2163}
2164
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002165/* _sde_crtc_idle_notify - signal idle timeout to client */
2166static void _sde_crtc_idle_notify(struct sde_crtc *sde_crtc)
2167{
2168 struct drm_crtc *crtc;
2169 struct drm_event event;
2170 int ret = 0;
2171
2172 if (!sde_crtc) {
2173 SDE_ERROR("invalid sde crtc\n");
2174 return;
2175 }
2176
2177 crtc = &sde_crtc->base;
2178 event.type = DRM_EVENT_IDLE_NOTIFY;
2179 event.length = sizeof(u32);
2180 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
2181 (u8 *)&ret);
2182
2183 SDE_DEBUG("crtc:%d idle timeout notified\n", crtc->base.id);
2184}
2185
2186/*
2187 * sde_crtc_handle_event - crtc frame event handle.
2188 * This API must manage only non-IRQ context events.
2189 */
2190static bool _sde_crtc_handle_event(struct sde_crtc *sde_crtc, u32 event)
2191{
2192 bool event_processed = false;
2193
2194 /**
2195 * idle events are originated from commit thread and can be processed
2196 * in same context
2197 */
2198 if (event & SDE_ENCODER_FRAME_EVENT_IDLE) {
2199 _sde_crtc_idle_notify(sde_crtc);
2200 event_processed = true;
2201 }
2202
2203 return event_processed;
2204}
2205
Alan Kwong628d19e2016-10-31 13:50:13 -04002206static void sde_crtc_frame_event_work(struct kthread_work *work)
2207{
Alan Kwong67a3f792016-11-01 23:16:53 -04002208 struct msm_drm_private *priv;
Alan Kwong628d19e2016-10-31 13:50:13 -04002209 struct sde_crtc_frame_event *fevent;
2210 struct drm_crtc *crtc;
2211 struct sde_crtc *sde_crtc;
2212 struct sde_kms *sde_kms;
2213 unsigned long flags;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002214 bool frame_done = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04002215
2216 if (!work) {
2217 SDE_ERROR("invalid work handle\n");
2218 return;
2219 }
2220
2221 fevent = container_of(work, struct sde_crtc_frame_event, work);
Alan Kwonga1939682017-05-05 11:30:08 -07002222 if (!fevent->crtc || !fevent->crtc->state) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002223 SDE_ERROR("invalid crtc\n");
2224 return;
2225 }
2226
2227 crtc = fevent->crtc;
2228 sde_crtc = to_sde_crtc(crtc);
2229
2230 sde_kms = _sde_crtc_get_kms(crtc);
2231 if (!sde_kms) {
2232 SDE_ERROR("invalid kms handle\n");
2233 return;
2234 }
Alan Kwong67a3f792016-11-01 23:16:53 -04002235 priv = sde_kms->dev->dev_private;
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002236 SDE_ATRACE_BEGIN("crtc_frame_event");
Alan Kwong628d19e2016-10-31 13:50:13 -04002237
2238 SDE_DEBUG("crtc%d event:%u ts:%lld\n", crtc->base.id, fevent->event,
2239 ktime_to_ns(fevent->ts));
2240
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002241 SDE_EVT32_VERBOSE(DRMID(crtc), fevent->event, SDE_EVTLOG_FUNC_ENTRY);
2242
2243 if (fevent->event & (SDE_ENCODER_FRAME_EVENT_DONE
2244 | SDE_ENCODER_FRAME_EVENT_ERROR
2245 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002246
2247 if (atomic_read(&sde_crtc->frame_pending) < 1) {
2248 /* this should not happen */
2249 SDE_ERROR("crtc%d ts:%lld invalid frame_pending:%d\n",
2250 crtc->base.id,
2251 ktime_to_ns(fevent->ts),
2252 atomic_read(&sde_crtc->frame_pending));
Dhaval Patel6c666622017-03-21 23:02:59 -07002253 SDE_EVT32(DRMID(crtc), fevent->event,
2254 SDE_EVTLOG_FUNC_CASE1);
Alan Kwong628d19e2016-10-31 13:50:13 -04002255 } else if (atomic_dec_return(&sde_crtc->frame_pending) == 0) {
2256 /* release bandwidth and other resources */
2257 SDE_DEBUG("crtc%d ts:%lld last pending\n",
2258 crtc->base.id,
2259 ktime_to_ns(fevent->ts));
Dhaval Patel6c666622017-03-21 23:02:59 -07002260 SDE_EVT32(DRMID(crtc), fevent->event,
2261 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07002262 sde_core_perf_crtc_release_bw(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04002263 } else {
Dhaval Patel6c666622017-03-21 23:02:59 -07002264 SDE_EVT32_VERBOSE(DRMID(crtc), fevent->event,
2265 SDE_EVTLOG_FUNC_CASE3);
Alan Kwong628d19e2016-10-31 13:50:13 -04002266 }
Alan Kwonga1939682017-05-05 11:30:08 -07002267
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002268 if (fevent->event & SDE_ENCODER_FRAME_EVENT_DONE)
Alan Kwonga1939682017-05-05 11:30:08 -07002269 sde_core_perf_crtc_update(crtc, 0, false);
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002270
2271 if (fevent->event & (SDE_ENCODER_FRAME_EVENT_DONE
2272 | SDE_ENCODER_FRAME_EVENT_ERROR))
2273 frame_done = true;
2274 }
2275
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002276 if (fevent->event & SDE_ENCODER_FRAME_EVENT_SIGNAL_RELEASE_FENCE) {
2277 SDE_ATRACE_BEGIN("signal_release_fence");
Dhaval Patelfd8f7742017-08-10 13:11:22 -07002278 sde_fence_signal(&sde_crtc->output_fence, fevent->ts, false);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002279 SDE_ATRACE_END("signal_release_fence");
2280 }
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002281
Dhaval Patel5023c3c2017-08-22 12:40:11 -07002282 if (fevent->event & SDE_ENCODER_FRAME_EVENT_SIGNAL_RETIRE_FENCE)
2283 /* this api should be called without spin_lock */
2284 _sde_crtc_retire_event(crtc, fevent->ts);
Alan Kwong628d19e2016-10-31 13:50:13 -04002285
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002286 if (fevent->event & SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)
2287 SDE_ERROR("crtc%d ts:%lld received panel dead event\n",
2288 crtc->base.id, ktime_to_ns(fevent->ts));
2289
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002290 if (frame_done)
2291 complete_all(&sde_crtc->frame_done_comp);
2292
Alan Kwong628d19e2016-10-31 13:50:13 -04002293 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
2294 list_add_tail(&fevent->list, &sde_crtc->frame_event_list);
2295 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002296 SDE_ATRACE_END("crtc_frame_event");
Alan Kwong628d19e2016-10-31 13:50:13 -04002297}
2298
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002299/*
2300 * sde_crtc_frame_event_cb - crtc frame event callback API. CRTC module
2301 * registers this API to encoder for all frame event callbacks like
2302 * release_fence, retire_fence, frame_error, frame_done, idle_timeout,
2303 * etc. Encoder may call different events from different context - IRQ,
2304 * user thread, commit_thread, etc. Each event should be carefully
2305 * reviewed and should be processed in proper task context to avoid scheduling
2306 * delay or properly manage the irq context's bottom half processing.
2307 */
Alan Kwong628d19e2016-10-31 13:50:13 -04002308static void sde_crtc_frame_event_cb(void *data, u32 event)
2309{
2310 struct drm_crtc *crtc = (struct drm_crtc *)data;
2311 struct sde_crtc *sde_crtc;
2312 struct msm_drm_private *priv;
Alan Kwong628d19e2016-10-31 13:50:13 -04002313 struct sde_crtc_frame_event *fevent;
2314 unsigned long flags;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002315 u32 crtc_id;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002316 bool event_processed = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04002317
2318 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
2319 SDE_ERROR("invalid parameters\n");
2320 return;
2321 }
2322 sde_crtc = to_sde_crtc(crtc);
2323 priv = crtc->dev->dev_private;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002324 crtc_id = drm_crtc_index(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04002325
2326 SDE_DEBUG("crtc%d\n", crtc->base.id);
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002327 SDE_EVT32_VERBOSE(DRMID(crtc), event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002328
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002329 /* try to process the event in caller context */
2330 event_processed = _sde_crtc_handle_event(sde_crtc, event);
2331 if (event_processed)
2332 return;
2333
Alan Kwong628d19e2016-10-31 13:50:13 -04002334 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
Lloyd Atkinson78831f82016-12-09 11:24:56 -05002335 fevent = list_first_entry_or_null(&sde_crtc->frame_event_list,
2336 struct sde_crtc_frame_event, list);
2337 if (fevent)
2338 list_del_init(&fevent->list);
Alan Kwong628d19e2016-10-31 13:50:13 -04002339 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
2340
Lloyd Atkinson78831f82016-12-09 11:24:56 -05002341 if (!fevent) {
Alan Kwong628d19e2016-10-31 13:50:13 -04002342 SDE_ERROR("crtc%d event %d overflow\n",
2343 crtc->base.id, event);
2344 SDE_EVT32(DRMID(crtc), event);
2345 return;
2346 }
2347
Alan Kwong628d19e2016-10-31 13:50:13 -04002348 fevent->event = event;
2349 fevent->crtc = crtc;
2350 fevent->ts = ktime_get();
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07002351 kthread_queue_work(&priv->event_thread[crtc_id].worker, &fevent->work);
Alan Kwong628d19e2016-10-31 13:50:13 -04002352}
2353
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07002354void sde_crtc_complete_commit(struct drm_crtc *crtc,
2355 struct drm_crtc_state *old_state)
2356{
2357 struct sde_crtc *sde_crtc;
2358 struct sde_crtc_smmu_state_data *smmu_state;
2359
2360 if (!crtc || !crtc->state) {
2361 SDE_ERROR("invalid crtc\n");
2362 return;
2363 }
2364
2365 sde_crtc = to_sde_crtc(crtc);
2366 SDE_EVT32_VERBOSE(DRMID(crtc));
2367 smmu_state = &sde_crtc->smmu_state;
2368
2369 /* complete secure transitions if any */
2370 if (smmu_state->transition_type == POST_COMMIT)
2371 sde_crtc_secure_ctrl(crtc, true);
2372}
2373
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002374/* _sde_crtc_set_idle_timeout - update idle timeout wait duration */
2375static void _sde_crtc_set_idle_timeout(struct drm_crtc *crtc, u64 val)
2376{
2377 struct drm_encoder *encoder;
2378
2379 if (!crtc) {
2380 SDE_ERROR("invalid crtc\n");
2381 return;
2382 }
2383
2384 drm_for_each_encoder(encoder, crtc->dev) {
2385 if (encoder->crtc != crtc)
2386 continue;
2387
2388 sde_encoder_set_idle_timeout(encoder, (u32) val);
2389 }
2390}
2391
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002392/**
Clarence Ipcae1bb62016-07-07 12:07:13 -04002393 * _sde_crtc_set_input_fence_timeout - update ns version of in fence timeout
2394 * @cstate: Pointer to sde crtc state
2395 */
2396static void _sde_crtc_set_input_fence_timeout(struct sde_crtc_state *cstate)
2397{
2398 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07002399 SDE_ERROR("invalid cstate\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002400 return;
2401 }
2402 cstate->input_fence_timeout_ns =
2403 sde_crtc_get_property(cstate, CRTC_PROP_INPUT_FENCE_TIMEOUT);
2404 cstate->input_fence_timeout_ns *= NSEC_PER_MSEC;
2405}
2406
2407/**
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002408 * _sde_crtc_set_dim_layer_v1 - copy dim layer settings from userspace
2409 * @cstate: Pointer to sde crtc state
2410 * @user_ptr: User ptr for sde_drm_dim_layer_v1 struct
2411 */
2412static void _sde_crtc_set_dim_layer_v1(struct sde_crtc_state *cstate,
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302413 void __user *usr_ptr)
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002414{
2415 struct sde_drm_dim_layer_v1 dim_layer_v1;
2416 struct sde_drm_dim_layer_cfg *user_cfg;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002417 struct sde_hw_dim_layer *dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002418 u32 count, i;
2419
2420 if (!cstate) {
2421 SDE_ERROR("invalid cstate\n");
2422 return;
2423 }
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002424 dim_layer = cstate->dim_layer;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002425
2426 if (!usr_ptr) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002427 SDE_DEBUG("dim_layer data removed\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002428 return;
2429 }
2430
2431 if (copy_from_user(&dim_layer_v1, usr_ptr, sizeof(dim_layer_v1))) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002432 SDE_ERROR("failed to copy dim_layer data\n");
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002433 return;
2434 }
2435
2436 count = dim_layer_v1.num_layers;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002437 if (count > SDE_MAX_DIM_LAYERS) {
2438 SDE_ERROR("invalid number of dim_layers:%d", count);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002439 return;
2440 }
2441
2442 /* populate from user space */
2443 cstate->num_dim_layers = count;
2444 for (i = 0; i < count; i++) {
2445 user_cfg = &dim_layer_v1.layer_cfg[i];
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002446
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002447 dim_layer[i].flags = user_cfg->flags;
2448 dim_layer[i].stage = user_cfg->stage + SDE_STAGE_0;
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002449
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002450 dim_layer[i].rect.x = user_cfg->rect.x1;
2451 dim_layer[i].rect.y = user_cfg->rect.y1;
2452 dim_layer[i].rect.w = user_cfg->rect.x2 - user_cfg->rect.x1;
2453 dim_layer[i].rect.h = user_cfg->rect.y2 - user_cfg->rect.y1;
2454
2455 dim_layer[i].color_fill = (struct sde_mdss_color) {
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002456 user_cfg->color_fill.color_0,
2457 user_cfg->color_fill.color_1,
2458 user_cfg->color_fill.color_2,
2459 user_cfg->color_fill.color_3,
2460 };
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07002461
2462 SDE_DEBUG("dim_layer[%d] - flags:%d, stage:%d\n",
2463 i, dim_layer[i].flags, dim_layer[i].stage);
2464 SDE_DEBUG(" rect:{%d,%d,%d,%d}, color:{%d,%d,%d,%d}\n",
2465 dim_layer[i].rect.x, dim_layer[i].rect.y,
2466 dim_layer[i].rect.w, dim_layer[i].rect.h,
2467 dim_layer[i].color_fill.color_0,
2468 dim_layer[i].color_fill.color_1,
2469 dim_layer[i].color_fill.color_2,
2470 dim_layer[i].color_fill.color_3);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08002471 }
2472}
2473
2474/**
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302475 * _sde_crtc_dest_scaler_init - allocate memory for scaler lut
2476 * @sde_crtc : Pointer to sde crtc
2477 * @catalog : Pointer to mdss catalog info
2478 */
2479static void _sde_crtc_dest_scaler_init(struct sde_crtc *sde_crtc,
2480 struct sde_mdss_cfg *catalog)
2481{
2482 if (!sde_crtc || !catalog)
2483 return;
2484
2485 if (!catalog->mdp[0].has_dest_scaler) {
2486 SDE_DEBUG("dest scaler feature not supported\n");
2487 return;
2488 }
2489
2490 sde_crtc->scl3_lut_cfg = kzalloc(sizeof(struct sde_hw_scaler3_lut_cfg),
2491 GFP_KERNEL);
2492 if (!sde_crtc->scl3_lut_cfg)
2493 SDE_ERROR("failed to create scale LUT for dest scaler");
2494}
2495
2496/**
2497 * _sde_crtc_set_dest_scaler - copy dest scaler settings from userspace
2498 * @sde_crtc : Pointer to sde crtc
2499 * @cstate : Pointer to sde crtc state
2500 * @usr_ptr: User ptr for sde_drm_dest_scaler_data struct
2501 */
2502static int _sde_crtc_set_dest_scaler(struct sde_crtc *sde_crtc,
2503 struct sde_crtc_state *cstate,
2504 void __user *usr_ptr)
2505{
2506 struct sde_drm_dest_scaler_data ds_data;
2507 struct sde_drm_dest_scaler_cfg *ds_cfg_usr;
2508 struct sde_drm_scaler_v2 scaler_v2;
2509 void __user *scaler_v2_usr;
2510 int i, count, ret = 0;
2511
2512 if (!sde_crtc || !cstate) {
2513 SDE_ERROR("invalid sde_crtc/state\n");
2514 return -EINVAL;
2515 }
2516
2517 SDE_DEBUG("crtc %s\n", sde_crtc->name);
2518
2519 cstate->num_ds = 0;
2520 cstate->ds_dirty = false;
2521 if (!usr_ptr) {
2522 SDE_DEBUG("ds data removed\n");
2523 return 0;
2524 }
2525
2526 if (copy_from_user(&ds_data, usr_ptr, sizeof(ds_data))) {
2527 SDE_ERROR("failed to copy dest scaler data from user\n");
2528 return -EINVAL;
2529 }
2530
2531 count = ds_data.num_dest_scaler;
2532 if (!sde_crtc->num_mixers || count > sde_crtc->num_mixers ||
2533 (count && (count != sde_crtc->num_mixers) &&
2534 !(ds_data.ds_cfg[0].flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
2535 SDE_ERROR("invalid config:num ds(%d), mixers(%d),flags(%d)\n",
2536 count, sde_crtc->num_mixers, ds_data.ds_cfg[0].flags);
2537 return -EINVAL;
2538 }
2539
2540 /* Populate from user space */
2541 for (i = 0; i < count; i++) {
2542 ds_cfg_usr = &ds_data.ds_cfg[i];
2543
2544 cstate->ds_cfg[i].ndx = ds_cfg_usr->index;
2545 cstate->ds_cfg[i].flags = ds_cfg_usr->flags;
2546 cstate->ds_cfg[i].lm_width = ds_cfg_usr->lm_width;
2547 cstate->ds_cfg[i].lm_height = ds_cfg_usr->lm_height;
2548 cstate->ds_cfg[i].scl3_cfg = NULL;
2549
2550 if (ds_cfg_usr->scaler_cfg) {
2551 scaler_v2_usr =
2552 (void __user *)((uintptr_t)ds_cfg_usr->scaler_cfg);
2553
2554 memset(&scaler_v2, 0, sizeof(scaler_v2));
2555
2556 cstate->ds_cfg[i].scl3_cfg =
2557 kzalloc(sizeof(struct sde_hw_scaler3_cfg),
2558 GFP_KERNEL);
2559
2560 if (!cstate->ds_cfg[i].scl3_cfg) {
2561 ret = -ENOMEM;
2562 goto err;
2563 }
2564
2565 if (copy_from_user(&scaler_v2, scaler_v2_usr,
2566 sizeof(scaler_v2))) {
2567 SDE_ERROR("scale data:copy from user failed\n");
2568 ret = -EINVAL;
2569 goto err;
2570 }
2571
2572 sde_set_scaler_v2(cstate->ds_cfg[i].scl3_cfg,
2573 &scaler_v2);
2574
2575 SDE_DEBUG("en(%d)dir(%d)de(%d) src(%dx%d) dst(%dx%d)\n",
2576 scaler_v2.enable, scaler_v2.dir_en,
2577 scaler_v2.de.enable, scaler_v2.src_width[0],
2578 scaler_v2.src_height[0], scaler_v2.dst_width,
2579 scaler_v2.dst_height);
2580 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base),
2581 scaler_v2.enable, scaler_v2.dir_en,
2582 scaler_v2.de.enable, scaler_v2.src_width[0],
2583 scaler_v2.src_height[0], scaler_v2.dst_width,
2584 scaler_v2.dst_height);
2585 }
2586
2587 SDE_DEBUG("ds cfg[%d]-ndx(%d) flags(%d) lm(%dx%d)\n",
2588 i, ds_cfg_usr->index, ds_cfg_usr->flags,
2589 ds_cfg_usr->lm_width, ds_cfg_usr->lm_height);
2590 SDE_EVT32_VERBOSE(DRMID(&sde_crtc->base), i, ds_cfg_usr->index,
2591 ds_cfg_usr->flags, ds_cfg_usr->lm_width,
2592 ds_cfg_usr->lm_height);
2593 }
2594
2595 cstate->num_ds = count;
2596 cstate->ds_dirty = true;
2597 return 0;
2598
2599err:
2600 for (; i >= 0; i--)
2601 kfree(cstate->ds_cfg[i].scl3_cfg);
2602
2603 return ret;
2604}
2605
2606/**
2607 * _sde_crtc_check_dest_scaler_data - validate the dest scaler data
2608 * @crtc : Pointer to drm crtc
2609 * @state : Pointer to drm crtc state
2610 */
2611static int _sde_crtc_check_dest_scaler_data(struct drm_crtc *crtc,
2612 struct drm_crtc_state *state)
2613{
2614 struct sde_crtc *sde_crtc;
2615 struct sde_crtc_state *cstate;
2616 struct drm_display_mode *mode;
2617 struct sde_kms *kms;
2618 struct sde_hw_ds *hw_ds;
2619 struct sde_hw_ds_cfg *cfg;
2620 u32 i, ret = 0, lm_idx;
2621 u32 num_ds_enable = 0;
2622 u32 max_in_width = 0, max_out_width = 0;
2623 u32 prev_lm_width = 0, prev_lm_height = 0;
2624
2625 if (!crtc || !state)
2626 return -EINVAL;
2627
2628 sde_crtc = to_sde_crtc(crtc);
2629 cstate = to_sde_crtc_state(state);
2630 kms = _sde_crtc_get_kms(crtc);
2631 mode = &state->adjusted_mode;
2632
2633 SDE_DEBUG("crtc%d\n", crtc->base.id);
2634
2635 if (!cstate->ds_dirty && !cstate->num_ds_enabled) {
2636 SDE_DEBUG("dest scaler property not set, skip validation\n");
2637 return 0;
2638 }
2639
2640 if (!kms || !kms->catalog) {
2641 SDE_ERROR("invalid parameters\n");
2642 return -EINVAL;
2643 }
2644
2645 if (!kms->catalog->mdp[0].has_dest_scaler) {
2646 SDE_DEBUG("dest scaler feature not supported\n");
2647 return 0;
2648 }
2649
2650 if (!sde_crtc->num_mixers) {
2651 SDE_ERROR("mixers not allocated\n");
2652 return -EINVAL;
2653 }
2654
2655 /**
2656 * Check if sufficient hw resources are
2657 * available as per target caps & topology
2658 */
2659 if (sde_crtc->num_mixers > CRTC_DUAL_MIXERS) {
2660 SDE_ERROR("invalid config: mixers(%d) max(%d)\n",
2661 sde_crtc->num_mixers, CRTC_DUAL_MIXERS);
2662 ret = -EINVAL;
2663 goto err;
2664 }
2665
2666 for (i = 0; i < sde_crtc->num_mixers; i++) {
2667 if (!sde_crtc->mixers[i].hw_lm || !sde_crtc->mixers[i].hw_ds) {
2668 SDE_ERROR("insufficient HW resources allocated\n");
2669 ret = -EINVAL;
2670 goto err;
2671 }
2672 }
2673
2674 /**
2675 * Check if DS needs to be enabled or disabled
2676 * In case of enable, validate the data
2677 */
2678 if (!cstate->ds_dirty || !cstate->num_ds ||
2679 !(cstate->ds_cfg[0].flags & SDE_DRM_DESTSCALER_ENABLE)) {
2680 SDE_DEBUG("disable dest scaler,dirty(%d)num(%d)flags(%d)\n",
2681 cstate->ds_dirty, cstate->num_ds,
2682 cstate->ds_cfg[0].flags);
2683 goto disable;
2684 }
2685
2686 /**
2687 * No of dest scalers shouldn't exceed hw ds block count and
2688 * also, match the num of mixers unless it is partial update
2689 * left only/right only use case - currently PU + DS is not supported
2690 */
2691 if (cstate->num_ds > kms->catalog->ds_count ||
2692 ((cstate->num_ds != sde_crtc->num_mixers) &&
2693 !(cstate->ds_cfg[0].flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
2694 SDE_ERROR("invalid cfg: num_ds(%d), hw_ds_cnt(%d) flags(%d)\n",
2695 cstate->num_ds, kms->catalog->ds_count,
2696 cstate->ds_cfg[0].flags);
2697 ret = -EINVAL;
2698 goto err;
2699 }
2700
2701 /* Validate the DS data */
2702 for (i = 0; i < cstate->num_ds; i++) {
2703 cfg = &cstate->ds_cfg[i];
2704 lm_idx = cfg->ndx;
2705
2706 /**
2707 * Validate against topology
2708 * No of dest scalers should match the num of mixers
2709 * unless it is partial update left only/right only use case
2710 */
2711 if (lm_idx >= sde_crtc->num_mixers || (i != lm_idx &&
2712 !(cfg->flags & SDE_DRM_DESTSCALER_PU_ENABLE))) {
2713 SDE_ERROR("invalid user data(%d):idx(%d), flags(%d)\n",
2714 i, lm_idx, cfg->flags);
2715 ret = -EINVAL;
2716 goto err;
2717 }
2718
2719 hw_ds = sde_crtc->mixers[lm_idx].hw_ds;
2720
2721 if (!max_in_width && !max_out_width) {
2722 max_in_width = hw_ds->scl->top->maxinputwidth;
2723 max_out_width = hw_ds->scl->top->maxoutputwidth;
2724
2725 if (cstate->num_ds == CRTC_DUAL_MIXERS)
2726 max_in_width -= SDE_DS_OVERFETCH_SIZE;
2727
2728 SDE_DEBUG("max DS width [%d,%d] for num_ds = %d\n",
2729 max_in_width, max_out_width, cstate->num_ds);
2730 }
2731
2732 /* Check LM width and height */
2733 if (cfg->lm_width > (mode->hdisplay/sde_crtc->num_mixers) ||
2734 cfg->lm_height > mode->vdisplay ||
2735 !cfg->lm_width || !cfg->lm_height) {
2736 SDE_ERROR("invalid lm size[%d,%d] display [%d,%d]\n",
2737 cfg->lm_width,
2738 cfg->lm_height,
2739 mode->hdisplay/sde_crtc->num_mixers,
2740 mode->vdisplay);
2741 ret = -E2BIG;
2742 goto err;
2743 }
2744
2745 if (!prev_lm_width && !prev_lm_height) {
2746 prev_lm_width = cfg->lm_width;
2747 prev_lm_height = cfg->lm_height;
2748 } else {
2749 if (cfg->lm_width != prev_lm_width ||
2750 cfg->lm_height != prev_lm_height) {
2751 SDE_ERROR("lm size:left[%d,%d], right[%d %d]\n",
2752 cfg->lm_width, cfg->lm_height,
2753 prev_lm_width, prev_lm_height);
2754 ret = -EINVAL;
2755 goto err;
2756 }
2757 }
2758
2759 /* Check scaler data */
2760 if (cfg->flags & SDE_DRM_DESTSCALER_SCALE_UPDATE ||
2761 cfg->flags & SDE_DRM_DESTSCALER_ENHANCER_UPDATE) {
2762 if (!cfg->scl3_cfg) {
2763 ret = -EINVAL;
2764 SDE_ERROR("null scale data\n");
2765 goto err;
2766 }
2767 if (cfg->scl3_cfg->src_width[0] > max_in_width ||
2768 cfg->scl3_cfg->dst_width > max_out_width ||
2769 !cfg->scl3_cfg->src_width[0] ||
2770 !cfg->scl3_cfg->dst_width) {
2771 SDE_ERROR("scale width(%d %d) for ds-%d:\n",
2772 cfg->scl3_cfg->src_width[0],
2773 cfg->scl3_cfg->dst_width,
2774 hw_ds->idx - DS_0);
2775 SDE_ERROR("scale_en = %d, DE_en =%d\n",
2776 cfg->scl3_cfg->enable,
2777 cfg->scl3_cfg->de.enable);
2778
2779 cfg->flags &=
2780 ~SDE_DRM_DESTSCALER_SCALE_UPDATE;
2781 cfg->flags &=
2782 ~SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
2783
2784 ret = -EINVAL;
2785 goto err;
2786 }
2787 }
2788
2789 if (cfg->flags & SDE_DRM_DESTSCALER_ENABLE)
2790 num_ds_enable++;
2791
2792 /**
2793 * Validation successful, indicator for flush to be issued
2794 */
2795 cfg->set_lm_flush = true;
2796
2797 SDE_DEBUG("ds[%d]: flags = 0x%X\n",
2798 hw_ds->idx - DS_0, cfg->flags);
2799 }
2800
2801disable:
2802 SDE_DEBUG("dest scaler enable status, old = %d, new = %d",
2803 cstate->num_ds_enabled, num_ds_enable);
2804 SDE_EVT32(DRMID(crtc), cstate->num_ds_enabled, num_ds_enable,
2805 cstate->ds_dirty);
2806
2807 if (cstate->num_ds_enabled != num_ds_enable) {
2808 /* Disabling destination scaler */
2809 if (!num_ds_enable) {
2810 for (i = 0; i < sde_crtc->num_mixers; i++) {
2811 cfg = &cstate->ds_cfg[i];
2812 cfg->ndx = i;
2813 /* Update scaler settings in disable case */
2814 cfg->flags = SDE_DRM_DESTSCALER_SCALE_UPDATE;
2815 cfg->scl3_cfg->enable = 0;
2816 cfg->scl3_cfg->de.enable = 0;
2817 cfg->set_lm_flush = true;
2818 }
2819 }
2820 cstate->num_ds_enabled = num_ds_enable;
2821 cstate->ds_dirty = true;
2822 }
2823
2824 return 0;
2825
2826err:
2827 cstate->ds_dirty = false;
2828 return ret;
2829}
2830
2831/**
Clarence Ipcae1bb62016-07-07 12:07:13 -04002832 * _sde_crtc_wait_for_fences - wait for incoming framebuffer sync fences
2833 * @crtc: Pointer to CRTC object
2834 */
2835static void _sde_crtc_wait_for_fences(struct drm_crtc *crtc)
2836{
2837 struct drm_plane *plane = NULL;
2838 uint32_t wait_ms = 1;
Clarence Ip8dedc232016-09-09 16:41:00 -04002839 ktime_t kt_end, kt_wait;
Dhaval Patel39323d42017-03-01 23:48:24 -08002840 int rc = 0;
Clarence Ipcae1bb62016-07-07 12:07:13 -04002841
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002842 SDE_DEBUG("\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002843
2844 if (!crtc || !crtc->state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07002845 SDE_ERROR("invalid crtc/state %pK\n", crtc);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002846 return;
2847 }
2848
2849 /* use monotonic timer to limit total fence wait time */
Clarence Ip8dedc232016-09-09 16:41:00 -04002850 kt_end = ktime_add_ns(ktime_get(),
2851 to_sde_crtc_state(crtc->state)->input_fence_timeout_ns);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002852
2853 /*
2854 * Wait for fences sequentially, as all of them need to be signalled
2855 * before we can proceed.
2856 *
2857 * Limit total wait time to INPUT_FENCE_TIMEOUT, but still call
2858 * sde_plane_wait_input_fence with wait_ms == 0 after the timeout so
2859 * that each plane can check its fence status and react appropriately
Dhaval Patel39323d42017-03-01 23:48:24 -08002860 * if its fence has timed out. Call input fence wait multiple times if
2861 * fence wait is interrupted due to interrupt call.
Clarence Ipcae1bb62016-07-07 12:07:13 -04002862 */
Narendra Muppalla77b32932017-05-10 13:53:11 -07002863 SDE_ATRACE_BEGIN("plane_wait_input_fence");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002864 drm_atomic_crtc_for_each_plane(plane, crtc) {
Dhaval Patel39323d42017-03-01 23:48:24 -08002865 do {
Clarence Ip8dedc232016-09-09 16:41:00 -04002866 kt_wait = ktime_sub(kt_end, ktime_get());
2867 if (ktime_compare(kt_wait, ktime_set(0, 0)) >= 0)
2868 wait_ms = ktime_to_ms(kt_wait);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002869 else
2870 wait_ms = 0;
Dhaval Patel39323d42017-03-01 23:48:24 -08002871
2872 rc = sde_plane_wait_input_fence(plane, wait_ms);
2873 } while (wait_ms && rc == -ERESTARTSYS);
Clarence Ipcae1bb62016-07-07 12:07:13 -04002874 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07002875 SDE_ATRACE_END("plane_wait_input_fence");
Clarence Ipcae1bb62016-07-07 12:07:13 -04002876}
2877
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002878static void _sde_crtc_setup_mixer_for_encoder(
2879 struct drm_crtc *crtc,
2880 struct drm_encoder *enc)
2881{
2882 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04002883 struct sde_kms *sde_kms = _sde_crtc_get_kms(crtc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002884 struct sde_rm *rm = &sde_kms->rm;
2885 struct sde_crtc_mixer *mixer;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002886 struct sde_hw_ctl *last_valid_ctl = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002887 int i;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302888 struct sde_rm_hw_iter lm_iter, ctl_iter, dspp_iter, ds_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002889
2890 sde_rm_init_hw_iter(&lm_iter, enc->base.id, SDE_HW_BLK_LM);
2891 sde_rm_init_hw_iter(&ctl_iter, enc->base.id, SDE_HW_BLK_CTL);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07002892 sde_rm_init_hw_iter(&dspp_iter, enc->base.id, SDE_HW_BLK_DSPP);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302893 sde_rm_init_hw_iter(&ds_iter, enc->base.id, SDE_HW_BLK_DS);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002894
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002895 /* Set up all the mixers and ctls reserved by this encoder */
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002896 for (i = sde_crtc->num_mixers; i < ARRAY_SIZE(sde_crtc->mixers); i++) {
2897 mixer = &sde_crtc->mixers[i];
2898
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002899 if (!sde_rm_get_hw(rm, &lm_iter))
2900 break;
2901 mixer->hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
2902
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002903 /* CTL may be <= LMs, if <, multiple LMs controlled by 1 CTL */
2904 if (!sde_rm_get_hw(rm, &ctl_iter)) {
2905 SDE_DEBUG("no ctl assigned to lm %d, using previous\n",
Clarence Ip8e69ad02016-12-09 09:43:57 -05002906 mixer->hw_lm->idx - LM_0);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002907 mixer->hw_ctl = last_valid_ctl;
2908 } else {
2909 mixer->hw_ctl = (struct sde_hw_ctl *)ctl_iter.hw;
2910 last_valid_ctl = mixer->hw_ctl;
2911 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002912
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002913 /* Shouldn't happen, mixers are always >= ctls */
2914 if (!mixer->hw_ctl) {
2915 SDE_ERROR("no valid ctls found for lm %d\n",
Clarence Ip8e69ad02016-12-09 09:43:57 -05002916 mixer->hw_lm->idx - LM_0);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04002917 return;
2918 }
2919
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07002920 /* Dspp may be null */
2921 (void) sde_rm_get_hw(rm, &dspp_iter);
2922 mixer->hw_dspp = (struct sde_hw_dspp *)dspp_iter.hw;
2923
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302924 /* DS may be null */
2925 (void) sde_rm_get_hw(rm, &ds_iter);
2926 mixer->hw_ds = (struct sde_hw_ds *)ds_iter.hw;
2927
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002928 mixer->encoder = enc;
2929
2930 sde_crtc->num_mixers++;
Clarence Ipd9f9fa62016-09-09 13:42:32 -04002931 SDE_DEBUG("setup mixer %d: lm %d\n",
2932 i, mixer->hw_lm->idx - LM_0);
2933 SDE_DEBUG("setup mixer %d: ctl %d\n",
2934 i, mixer->hw_ctl->idx - CTL_0);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302935 if (mixer->hw_ds)
2936 SDE_DEBUG("setup mixer %d: ds %d\n",
2937 i, mixer->hw_ds->idx - DS_0);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002938 }
2939}
2940
2941static void _sde_crtc_setup_mixers(struct drm_crtc *crtc)
2942{
2943 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
2944 struct drm_encoder *enc;
2945
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002946 sde_crtc->num_mixers = 0;
Lloyd Atkinson94710bc2017-09-14 14:10:09 -04002947 sde_crtc->mixers_swapped = false;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002948 memset(sde_crtc->mixers, 0, sizeof(sde_crtc->mixers));
2949
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07002950 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002951 /* Check for mixers on all encoders attached to this crtc */
2952 list_for_each_entry(enc, &crtc->dev->mode_config.encoder_list, head) {
2953 if (enc->crtc != crtc)
2954 continue;
2955
2956 _sde_crtc_setup_mixer_for_encoder(crtc, enc);
2957 }
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05002958
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07002959 mutex_unlock(&sde_crtc->crtc_lock);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002960}
2961
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002962static void _sde_crtc_setup_is_ppsplit(struct drm_crtc_state *state)
2963{
2964 int i;
2965 struct sde_crtc_state *cstate;
2966
2967 cstate = to_sde_crtc_state(state);
2968
2969 cstate->is_ppsplit = false;
2970 for (i = 0; i < cstate->num_connectors; i++) {
2971 struct drm_connector *conn = cstate->connectors[i];
2972
2973 if (sde_connector_get_topology_name(conn) ==
2974 SDE_RM_TOPOLOGY_PPSPLIT)
2975 cstate->is_ppsplit = true;
2976 }
2977}
2978
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05002979static void _sde_crtc_setup_lm_bounds(struct drm_crtc *crtc,
2980 struct drm_crtc_state *state)
2981{
2982 struct sde_crtc *sde_crtc;
2983 struct sde_crtc_state *cstate;
2984 struct drm_display_mode *adj_mode;
2985 u32 crtc_split_width;
2986 int i;
2987
2988 if (!crtc || !state) {
2989 SDE_ERROR("invalid args\n");
2990 return;
2991 }
2992
2993 sde_crtc = to_sde_crtc(crtc);
2994 cstate = to_sde_crtc_state(state);
2995
2996 adj_mode = &state->adjusted_mode;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05302997 crtc_split_width = sde_crtc_get_mixer_width(sde_crtc, cstate, adj_mode);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05002998
2999 for (i = 0; i < sde_crtc->num_mixers; i++) {
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003000 cstate->lm_bounds[i].x = crtc_split_width * i;
3001 cstate->lm_bounds[i].y = 0;
3002 cstate->lm_bounds[i].w = crtc_split_width;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303003 cstate->lm_bounds[i].h =
3004 sde_crtc_get_mixer_height(sde_crtc, cstate, adj_mode);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003005 memcpy(&cstate->lm_roi[i], &cstate->lm_bounds[i],
3006 sizeof(cstate->lm_roi[i]));
Dhaval Patela5f75952017-07-25 11:17:41 -07003007 SDE_EVT32_VERBOSE(DRMID(crtc), i,
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003008 cstate->lm_bounds[i].x, cstate->lm_bounds[i].y,
3009 cstate->lm_bounds[i].w, cstate->lm_bounds[i].h);
3010 SDE_DEBUG("%s: lm%d bnd&roi (%d,%d,%d,%d)\n", sde_crtc->name, i,
3011 cstate->lm_roi[i].x, cstate->lm_roi[i].y,
3012 cstate->lm_roi[i].w, cstate->lm_roi[i].h);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003013 }
3014
3015 drm_mode_debug_printmodeline(adj_mode);
3016}
3017
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003018static void sde_crtc_atomic_begin(struct drm_crtc *crtc,
Clarence Ip0d0e96d2016-10-24 18:13:13 -04003019 struct drm_crtc_state *old_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003020{
Clarence Ipcae1bb62016-07-07 12:07:13 -04003021 struct sde_crtc *sde_crtc;
Dhaval Patel0e558f42017-04-30 00:51:40 -07003022 struct drm_encoder *encoder;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003023 struct drm_device *dev;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003024 unsigned long flags;
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003025 struct sde_crtc_smmu_state_data *smmu_state;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003026
Clarence Ipcae1bb62016-07-07 12:07:13 -04003027 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003028 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003029 return;
3030 }
3031
Alan Kwong163d2612016-11-03 00:56:56 -04003032 if (!crtc->state->enable) {
3033 SDE_DEBUG("crtc%d -> enable %d, skip atomic_begin\n",
3034 crtc->base.id, crtc->state->enable);
3035 return;
3036 }
3037
3038 SDE_DEBUG("crtc%d\n", crtc->base.id);
3039
Clarence Ipcae1bb62016-07-07 12:07:13 -04003040 sde_crtc = to_sde_crtc(crtc);
3041 dev = crtc->dev;
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003042 smmu_state = &sde_crtc->smmu_state;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003043
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003044 if (!sde_crtc->num_mixers) {
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003045 _sde_crtc_setup_mixers(crtc);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003046 _sde_crtc_setup_is_ppsplit(crtc->state);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04003047 _sde_crtc_setup_lm_bounds(crtc, crtc->state);
3048 }
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05003049
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003050 if (sde_crtc->event) {
3051 WARN_ON(sde_crtc->event);
3052 } else {
3053 spin_lock_irqsave(&dev->event_lock, flags);
3054 sde_crtc->event = crtc->state->event;
3055 spin_unlock_irqrestore(&dev->event_lock, flags);
3056 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003057
Dhaval Patel0e558f42017-04-30 00:51:40 -07003058 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3059 if (encoder->crtc != crtc)
3060 continue;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003061
Dhaval Patel0e558f42017-04-30 00:51:40 -07003062 /* encoder will trigger pending mask now */
3063 sde_encoder_trigger_kickoff_pending(encoder);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003064 }
3065
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003066 /*
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003067 * If no mixers have been allocated in sde_crtc_atomic_check(),
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003068 * it means we are trying to flush a CRTC whose state is disabled:
3069 * nothing else needs to be done.
3070 */
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003071 if (unlikely(!sde_crtc->num_mixers))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003072 return;
3073
Clarence Ipd9f9fa62016-09-09 13:42:32 -04003074 _sde_crtc_blend_setup(crtc);
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05303075 _sde_crtc_dest_scaler_setup(crtc);
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003076
3077 /*
3078 * Since CP properties use AXI buffer to program the
3079 * HW, check if context bank is in attached
3080 * state,
3081 * apply color processing properties only if
3082 * smmu state is attached,
3083 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07003084 if ((smmu_state->state != DETACHED) &&
Abhijit Kulkarni12cef9c2017-07-13 11:19:03 -07003085 (smmu_state->state != DETACH_ALL_REQ))
3086 sde_cp_crtc_apply_properties(crtc);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003087
3088 /*
3089 * PP_DONE irq is only used by command mode for now.
3090 * It is better to request pending before FLUSH and START trigger
3091 * to make sure no pp_done irq missed.
3092 * This is safe because no pp_done will happen before SW trigger
3093 * in command mode.
3094 */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003095}
3096
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003097static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
3098 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003099{
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003100 struct drm_encoder *encoder;
Clarence Ipcae1bb62016-07-07 12:07:13 -04003101 struct sde_crtc *sde_crtc;
3102 struct drm_device *dev;
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003103 struct drm_plane *plane;
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303104 struct msm_drm_private *priv;
3105 struct msm_drm_thread *event_thread;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003106 unsigned long flags;
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003107 struct sde_crtc_state *cstate;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003108
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303109 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003110 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003111 return;
3112 }
3113
Alan Kwong163d2612016-11-03 00:56:56 -04003114 if (!crtc->state->enable) {
3115 SDE_DEBUG("crtc%d -> enable %d, skip atomic_flush\n",
3116 crtc->base.id, crtc->state->enable);
3117 return;
3118 }
3119
3120 SDE_DEBUG("crtc%d\n", crtc->base.id);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003121
3122 sde_crtc = to_sde_crtc(crtc);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003123 cstate = to_sde_crtc_state(crtc->state);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003124 dev = crtc->dev;
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303125 priv = dev->dev_private;
3126
3127 if (crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3128 SDE_ERROR("invalid crtc index[%d]\n", crtc->index);
3129 return;
3130 }
3131
3132 event_thread = &priv->event_thread[crtc->index];
Clarence Ipcae1bb62016-07-07 12:07:13 -04003133
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003134 if (sde_crtc->event) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003135 SDE_DEBUG("already received sde_crtc->event\n");
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003136 } else {
Lloyd Atkinson265d2212016-05-30 13:12:01 -04003137 spin_lock_irqsave(&dev->event_lock, flags);
3138 sde_crtc->event = crtc->state->event;
3139 spin_unlock_irqrestore(&dev->event_lock, flags);
3140 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003141
3142 /*
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003143 * If no mixers has been allocated in sde_crtc_atomic_check(),
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003144 * it means we are trying to flush a CRTC whose state is disabled:
3145 * nothing else needs to be done.
3146 */
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003147 if (unlikely(!sde_crtc->num_mixers))
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003148 return;
3149
Alan Kwong346223e2017-06-30 15:29:22 -04003150 /*
3151 * For planes without commit update, drm framework will not add
3152 * those planes to current state since hardware update is not
3153 * required. However, if those planes were power collapsed since
3154 * last commit cycle, driver has to restore the hardware state
3155 * of those planes explicitly here prior to plane flush.
3156 */
3157 drm_atomic_crtc_for_each_plane(plane, crtc)
3158 sde_plane_restore(plane);
3159
Clarence Ipcae1bb62016-07-07 12:07:13 -04003160 /* wait for acquire fences before anything else is done */
3161 _sde_crtc_wait_for_fences(crtc);
3162
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003163 if (!cstate->rsc_update) {
3164 drm_for_each_encoder(encoder, dev) {
3165 if (encoder->crtc != crtc)
3166 continue;
3167
3168 cstate->rsc_client =
Dhaval Patel30fae8a2017-04-21 18:42:41 -07003169 sde_encoder_get_rsc_client(encoder);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003170 }
3171 cstate->rsc_update = true;
3172 }
3173
Alan Kwong9aa061c2016-11-06 21:17:12 -05003174 /* update performance setting before crtc kickoff */
3175 sde_core_perf_crtc_update(crtc, 1, false);
3176
Clarence Ipcae1bb62016-07-07 12:07:13 -04003177 /*
3178 * Final plane updates: Give each plane a chance to complete all
3179 * required writes/flushing before crtc's "flush
3180 * everything" call below.
3181 */
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07003182 drm_atomic_crtc_for_each_plane(plane, crtc) {
3183 if (sde_crtc->smmu_state.transition_error)
3184 sde_plane_set_error(plane, true);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003185 sde_plane_flush(plane);
Abhijit Kulkarni1b3340c2017-06-22 12:39:37 -07003186 }
Clarence Ipcae1bb62016-07-07 12:07:13 -04003187
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003188 /* Kickoff will be scheduled by outer layer */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003189}
3190
Clarence Ip7a753bb2016-07-07 11:47:44 -04003191/**
3192 * sde_crtc_destroy_state - state destroy hook
3193 * @crtc: drm CRTC
3194 * @state: CRTC state object to release
3195 */
3196static void sde_crtc_destroy_state(struct drm_crtc *crtc,
3197 struct drm_crtc_state *state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003198{
Clarence Ip7a753bb2016-07-07 11:47:44 -04003199 struct sde_crtc *sde_crtc;
3200 struct sde_crtc_state *cstate;
3201
3202 if (!crtc || !state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003203 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003204 return;
3205 }
3206
3207 sde_crtc = to_sde_crtc(crtc);
3208 cstate = to_sde_crtc_state(state);
3209
Alan Kwong163d2612016-11-03 00:56:56 -04003210 SDE_DEBUG("crtc%d\n", crtc->base.id);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003211
Alan Kwongcdb2f282017-03-18 13:42:06 -07003212 _sde_crtc_rp_destroy(&cstate->rp);
3213
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07003214 __drm_atomic_helper_crtc_destroy_state(state);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003215
3216 /* destroy value helper */
3217 msm_property_destroy_state(&sde_crtc->property_info, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003218 &cstate->property_state);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003219}
3220
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003221static int _sde_crtc_wait_for_frame_done(struct drm_crtc *crtc)
3222{
3223 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran6d21d4c2017-09-30 10:07:15 -07003224 int ret, rc = 0, i;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003225
3226 if (!crtc) {
3227 SDE_ERROR("invalid argument\n");
3228 return -EINVAL;
3229 }
3230 sde_crtc = to_sde_crtc(crtc);
3231
3232 if (!atomic_read(&sde_crtc->frame_pending)) {
3233 SDE_DEBUG("no frames pending\n");
3234 return 0;
3235 }
3236
Veera Sundaram Sankaran6d21d4c2017-09-30 10:07:15 -07003237 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_FUNC_ENTRY);
3238
3239 /*
3240 * flush all the event thread work to make sure all the
3241 * FRAME_EVENTS from encoder are propagated to crtc
3242 */
3243 for (i = 0; i < ARRAY_SIZE(sde_crtc->frame_events); i++) {
3244 if (list_empty(&sde_crtc->frame_events[i].list))
3245 kthread_flush_work(&sde_crtc->frame_events[i].work);
3246 }
3247
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003248 ret = wait_for_completion_timeout(&sde_crtc->frame_done_comp,
3249 msecs_to_jiffies(SDE_FRAME_DONE_TIMEOUT));
3250 if (!ret) {
3251 SDE_ERROR("frame done completion wait timed out, ret:%d\n",
3252 ret);
3253 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_FATAL);
3254 rc = -ETIMEDOUT;
3255 }
Dhaval Patela5f75952017-07-25 11:17:41 -07003256 SDE_EVT32_VERBOSE(DRMID(crtc), SDE_EVTLOG_FUNC_EXIT);
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003257
3258 return rc;
3259}
3260
Clarence Ip8ee49952017-09-20 11:10:50 -04003261static int _sde_crtc_commit_kickoff_rot(struct drm_crtc *crtc,
Clarence Ip95f530b2017-09-06 17:31:41 -04003262 struct sde_crtc_state *cstate)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003263{
Clarence Ipeb39cce2017-07-19 14:12:43 -04003264 struct drm_plane *plane;
Clarence Ip95f530b2017-09-06 17:31:41 -04003265 struct sde_crtc *sde_crtc;
3266 struct sde_hw_ctl *ctl, *master_ctl;
Clarence Ipb776b532017-09-12 18:30:06 -04003267 u32 flush_mask;
Clarence Ip8ee49952017-09-20 11:10:50 -04003268 int i, rc = 0;
Clarence Ip95f530b2017-09-06 17:31:41 -04003269
3270 if (!crtc || !cstate)
Clarence Ip8ee49952017-09-20 11:10:50 -04003271 return -EINVAL;
Clarence Ip95f530b2017-09-06 17:31:41 -04003272
3273 sde_crtc = to_sde_crtc(crtc);
3274
Clarence Ipb776b532017-09-12 18:30:06 -04003275 /*
3276 * Update sbuf configuration and flush bits if a flush
3277 * mask has been defined for either the current or
3278 * previous commit.
3279 *
3280 * Updates are also required for the first commit after
3281 * sbuf_flush_mask becomes 0x0, to properly transition
3282 * the hardware out of sbuf mode.
3283 */
3284 if (!sde_crtc->sbuf_flush_mask_old && !sde_crtc->sbuf_flush_mask)
Clarence Ip8ee49952017-09-20 11:10:50 -04003285 return 0;
Clarence Ipb776b532017-09-12 18:30:06 -04003286
3287 flush_mask = sde_crtc->sbuf_flush_mask_old | sde_crtc->sbuf_flush_mask;
3288 sde_crtc->sbuf_flush_mask_old = sde_crtc->sbuf_flush_mask;
3289
Clarence Ip95f530b2017-09-06 17:31:41 -04003290 SDE_ATRACE_BEGIN("crtc_kickoff_rot");
3291
Clarence Ipb776b532017-09-12 18:30:06 -04003292 if (cstate->sbuf_cfg.rot_op_mode != SDE_CTL_ROT_OP_MODE_OFFLINE) {
3293 drm_atomic_crtc_for_each_plane(plane, crtc) {
Clarence Ip8ee49952017-09-20 11:10:50 -04003294 rc = sde_plane_kickoff_rot(plane);
3295 if (rc) {
3296 SDE_ERROR("crtc%d cancelling inline rotation\n",
3297 crtc->base.id);
3298 SDE_EVT32(DRMID(crtc), SDE_EVTLOG_ERROR);
3299
3300 /* revert to offline on errors */
3301 cstate->sbuf_cfg.rot_op_mode =
3302 SDE_CTL_ROT_OP_MODE_OFFLINE;
3303 break;
3304 }
Clarence Ipb776b532017-09-12 18:30:06 -04003305 }
3306 }
Clarence Ip95f530b2017-09-06 17:31:41 -04003307
3308 master_ctl = NULL;
3309 for (i = 0; i < sde_crtc->num_mixers; i++) {
3310 ctl = sde_crtc->mixers[i].hw_ctl;
Clarence Ip8ee49952017-09-20 11:10:50 -04003311 if (!ctl)
Clarence Ip95f530b2017-09-06 17:31:41 -04003312 continue;
3313
3314 if (!master_ctl || master_ctl->idx > ctl->idx)
3315 master_ctl = ctl;
Clarence Ip95f530b2017-09-06 17:31:41 -04003316 }
3317
Clarence Ip8ee49952017-09-20 11:10:50 -04003318 /* only update sbuf_cfg and flush for master ctl */
3319 if (master_ctl && master_ctl->ops.setup_sbuf_cfg &&
3320 master_ctl->ops.update_pending_flush) {
3321 master_ctl->ops.setup_sbuf_cfg(master_ctl, &cstate->sbuf_cfg);
3322 master_ctl->ops.update_pending_flush(master_ctl, flush_mask);
3323
3324 /* explicitly trigger rotator for async modes */
3325 if (cstate->sbuf_cfg.rot_op_mode ==
3326 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC &&
3327 master_ctl->ops.trigger_rot_start) {
3328 master_ctl->ops.trigger_rot_start(master_ctl);
3329 SDE_EVT32(DRMID(crtc), master_ctl->idx - CTL_0);
3330 }
3331 }
Clarence Ip95f530b2017-09-06 17:31:41 -04003332
3333 SDE_ATRACE_END("crtc_kickoff_rot");
Clarence Ip8ee49952017-09-20 11:10:50 -04003334 return rc;
Clarence Ip95f530b2017-09-06 17:31:41 -04003335}
3336
Clarence Ip662698e2017-09-12 18:34:16 -04003337/**
3338 * _sde_crtc_remove_pipe_flush - remove staged pipes from flush mask
3339 * @sde_crtc: Pointer to sde crtc structure
3340 */
3341static void _sde_crtc_remove_pipe_flush(struct sde_crtc *sde_crtc)
3342{
3343 struct sde_crtc_mixer *mixer;
3344 struct sde_hw_ctl *ctl;
3345 u32 i, flush_mask;
3346
3347 if (!sde_crtc)
3348 return;
3349
3350 mixer = sde_crtc->mixers;
3351 for (i = 0; i < sde_crtc->num_mixers; i++) {
3352 ctl = mixer[i].hw_ctl;
3353 if (!ctl || !ctl->ops.get_pending_flush ||
3354 !ctl->ops.clear_pending_flush ||
3355 !ctl->ops.update_pending_flush)
3356 continue;
3357
3358 flush_mask = ctl->ops.get_pending_flush(ctl);
3359 flush_mask &= ~mixer[i].pipe_mask;
3360 ctl->ops.clear_pending_flush(ctl);
3361 ctl->ops.update_pending_flush(ctl, flush_mask);
3362 }
3363}
3364
Clarence Ip95f530b2017-09-06 17:31:41 -04003365void sde_crtc_commit_kickoff(struct drm_crtc *crtc)
3366{
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003367 struct drm_encoder *encoder;
3368 struct drm_device *dev;
Alan Kwong628d19e2016-10-31 13:50:13 -04003369 struct sde_crtc *sde_crtc;
Alan Kwong67a3f792016-11-01 23:16:53 -04003370 struct msm_drm_private *priv;
3371 struct sde_kms *sde_kms;
Alan Kwong4aacd532017-02-04 18:51:33 -08003372 struct sde_crtc_state *cstate;
Clarence Ip662698e2017-09-12 18:34:16 -04003373 bool is_error;
Clarence Ip95f530b2017-09-06 17:31:41 -04003374 int ret;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003375
3376 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003377 SDE_ERROR("invalid argument\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003378 return;
3379 }
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003380 dev = crtc->dev;
Alan Kwong628d19e2016-10-31 13:50:13 -04003381 sde_crtc = to_sde_crtc(crtc);
Alan Kwong67a3f792016-11-01 23:16:53 -04003382 sde_kms = _sde_crtc_get_kms(crtc);
Clarence Ip662698e2017-09-12 18:34:16 -04003383 is_error = false;
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07003384
3385 if (!sde_kms || !sde_kms->dev || !sde_kms->dev->dev_private) {
3386 SDE_ERROR("invalid argument\n");
3387 return;
3388 }
3389
Alan Kwong67a3f792016-11-01 23:16:53 -04003390 priv = sde_kms->dev->dev_private;
Alan Kwong4aacd532017-02-04 18:51:33 -08003391 cstate = to_sde_crtc_state(crtc->state);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003392
Clarence Ip90b282d2017-05-04 10:00:32 -07003393 /*
3394 * If no mixers has been allocated in sde_crtc_atomic_check(),
3395 * it means we are trying to start a CRTC whose state is disabled:
3396 * nothing else needs to be done.
3397 */
3398 if (unlikely(!sde_crtc->num_mixers))
3399 return;
3400
Narendra Muppalla77b32932017-05-10 13:53:11 -07003401 SDE_ATRACE_BEGIN("crtc_commit");
Clarence Ip95f530b2017-09-06 17:31:41 -04003402
3403 /* default to ASYNC mode for inline rotation */
Clarence Ipb776b532017-09-12 18:30:06 -04003404 cstate->sbuf_cfg.rot_op_mode = sde_crtc->sbuf_flush_mask ?
Clarence Ip95f530b2017-09-06 17:31:41 -04003405 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC : SDE_CTL_ROT_OP_MODE_OFFLINE;
3406
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003407 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
Alan Kwong4aacd532017-02-04 18:51:33 -08003408 struct sde_encoder_kickoff_params params = { 0 };
3409
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003410 if (encoder->crtc != crtc)
3411 continue;
3412
3413 /*
3414 * Encoder will flush/start now, unless it has a tx pending.
3415 * If so, it may delay and flush at an irq event (e.g. ppdone)
3416 */
Alan Kwong4aacd532017-02-04 18:51:33 -08003417 params.inline_rotate_prefill = cstate->sbuf_prefill_line;
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003418 params.affected_displays = _sde_crtc_get_displays_affected(crtc,
3419 crtc->state);
Alan Kwong4aacd532017-02-04 18:51:33 -08003420 sde_encoder_prepare_for_kickoff(encoder, &params);
Clarence Ip95f530b2017-09-06 17:31:41 -04003421
3422 /*
3423 * For inline ASYNC modes, the flush bits are not written
3424 * to hardware atomically, so avoid using it if a video
3425 * mode encoder is active on this CRTC.
3426 */
3427 if (cstate->sbuf_cfg.rot_op_mode ==
3428 SDE_CTL_ROT_OP_MODE_INLINE_ASYNC &&
3429 sde_encoder_get_intf_mode(encoder) ==
3430 INTF_MODE_VIDEO)
3431 cstate->sbuf_cfg.rot_op_mode =
3432 SDE_CTL_ROT_OP_MODE_INLINE_SYNC;
Alan Kwong628d19e2016-10-31 13:50:13 -04003433 }
3434
Clarence Ip95f530b2017-09-06 17:31:41 -04003435 /*
3436 * For ASYNC inline modes, kick off the rotator now so that the H/W
3437 * can start as soon as it's ready.
3438 */
3439 if (cstate->sbuf_cfg.rot_op_mode == SDE_CTL_ROT_OP_MODE_INLINE_ASYNC)
Clarence Ip8ee49952017-09-20 11:10:50 -04003440 if (_sde_crtc_commit_kickoff_rot(crtc, cstate))
3441 is_error = true;
Clarence Ip95f530b2017-09-06 17:31:41 -04003442
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003443 /* wait for frame_event_done completion */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003444 SDE_ATRACE_BEGIN("wait_for_frame_done_event");
3445 ret = _sde_crtc_wait_for_frame_done(crtc);
3446 SDE_ATRACE_END("wait_for_frame_done_event");
3447 if (ret) {
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003448 SDE_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
3449 crtc->base.id,
3450 atomic_read(&sde_crtc->frame_pending));
Clarence Ip662698e2017-09-12 18:34:16 -04003451
3452 is_error = true;
3453
3454 /* force offline rotation mode since the commit has no pipes */
3455 cstate->sbuf_cfg.rot_op_mode = SDE_CTL_ROT_OP_MODE_OFFLINE;
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003456 }
3457
3458 if (atomic_inc_return(&sde_crtc->frame_pending) == 1) {
Alan Kwong628d19e2016-10-31 13:50:13 -04003459 /* acquire bandwidth and other resources */
3460 SDE_DEBUG("crtc%d first commit\n", crtc->base.id);
Clarence Ip95f530b2017-09-06 17:31:41 -04003461 SDE_EVT32(DRMID(crtc), cstate->sbuf_cfg.rot_op_mode,
3462 SDE_EVTLOG_FUNC_CASE1);
Alan Kwong628d19e2016-10-31 13:50:13 -04003463 } else {
3464 SDE_DEBUG("crtc%d commit\n", crtc->base.id);
Clarence Ip95f530b2017-09-06 17:31:41 -04003465 SDE_EVT32(DRMID(crtc), cstate->sbuf_cfg.rot_op_mode,
3466 SDE_EVTLOG_FUNC_CASE2);
Alan Kwong628d19e2016-10-31 13:50:13 -04003467 }
Dhaval Pateld67cf4a2017-06-14 18:08:32 -07003468 sde_crtc->play_count++;
Alan Kwong628d19e2016-10-31 13:50:13 -04003469
Clarence Ip95f530b2017-09-06 17:31:41 -04003470 /*
3471 * For SYNC inline modes, delay the kick off until after the
3472 * wait for frame done in case the wait times out.
Clarence Ipb776b532017-09-12 18:30:06 -04003473 *
3474 * Also perform a final kickoff when transitioning back to
3475 * offline mode.
Clarence Ip95f530b2017-09-06 17:31:41 -04003476 */
Clarence Ipb776b532017-09-12 18:30:06 -04003477 if (cstate->sbuf_cfg.rot_op_mode != SDE_CTL_ROT_OP_MODE_INLINE_ASYNC)
Clarence Ip8ee49952017-09-20 11:10:50 -04003478 if (_sde_crtc_commit_kickoff_rot(crtc, cstate))
3479 is_error = true;
Clarence Ipf6b530a2017-08-21 19:39:18 -04003480
Clarence Ip980405d2017-08-08 18:33:44 -04003481 sde_vbif_clear_errors(sde_kms);
3482
Clarence Ip662698e2017-09-12 18:34:16 -04003483 if (is_error)
3484 _sde_crtc_remove_pipe_flush(sde_crtc);
3485
Alan Kwong628d19e2016-10-31 13:50:13 -04003486 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3487 if (encoder->crtc != crtc)
3488 continue;
3489
Clarence Ip662698e2017-09-12 18:34:16 -04003490 sde_encoder_kickoff(encoder, is_error);
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04003491 }
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003492
Dhaval Patelb9850c02017-08-07 22:55:47 -07003493 reinit_completion(&sde_crtc->frame_done_comp);
Narendra Muppalla77b32932017-05-10 13:53:11 -07003494 SDE_ATRACE_END("crtc_commit");
3495 return;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003496}
3497
Clarence Ip7a753bb2016-07-07 11:47:44 -04003498/**
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003499 * _sde_crtc_vblank_enable_no_lock - update power resource and vblank request
Clarence Ip7f70ce42017-03-20 06:53:46 -07003500 * @sde_crtc: Pointer to sde crtc structure
3501 * @enable: Whether to enable/disable vblanks
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003502 *
3503 * @Return: error code
Clarence Ip7f70ce42017-03-20 06:53:46 -07003504 */
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003505static int _sde_crtc_vblank_enable_no_lock(
Clarence Ip7f70ce42017-03-20 06:53:46 -07003506 struct sde_crtc *sde_crtc, bool enable)
3507{
3508 struct drm_device *dev;
3509 struct drm_crtc *crtc;
3510 struct drm_encoder *enc;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003511
3512 if (!sde_crtc) {
3513 SDE_ERROR("invalid crtc\n");
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003514 return -EINVAL;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003515 }
3516
3517 crtc = &sde_crtc->base;
3518 dev = crtc->dev;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003519
3520 if (enable) {
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003521 int ret;
3522
3523 /* drop lock since power crtc cb may try to re-acquire lock */
3524 mutex_unlock(&sde_crtc->crtc_lock);
3525 ret = _sde_crtc_power_enable(sde_crtc, true);
3526 mutex_lock(&sde_crtc->crtc_lock);
3527 if (ret)
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003528 return ret;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003529
Clarence Ip7f70ce42017-03-20 06:53:46 -07003530 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
3531 if (enc->crtc != crtc)
3532 continue;
3533
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003534 SDE_EVT32(DRMID(&sde_crtc->base), DRMID(enc), enable,
3535 sde_crtc->enabled,
3536 sde_crtc->suspend,
3537 sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003538
3539 sde_encoder_register_vblank_callback(enc,
3540 sde_crtc_vblank_cb, (void *)crtc);
3541 }
3542 } else {
3543 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
3544 if (enc->crtc != crtc)
3545 continue;
3546
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003547 SDE_EVT32(DRMID(&sde_crtc->base), DRMID(enc), enable,
3548 sde_crtc->enabled,
3549 sde_crtc->suspend,
3550 sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003551
3552 sde_encoder_register_vblank_callback(enc, NULL, NULL);
3553 }
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003554
3555 /* drop lock since power crtc cb may try to re-acquire lock */
3556 mutex_unlock(&sde_crtc->crtc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003557 _sde_crtc_power_enable(sde_crtc, false);
Lloyd Atkinson2c554eb2017-05-24 16:22:39 -04003558 mutex_lock(&sde_crtc->crtc_lock);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003559 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003560
3561 return 0;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003562}
3563
3564/**
3565 * _sde_crtc_set_suspend - notify crtc of suspend enable/disable
3566 * @crtc: Pointer to drm crtc object
3567 * @enable: true to enable suspend, false to indicate resume
3568 */
3569static void _sde_crtc_set_suspend(struct drm_crtc *crtc, bool enable)
3570{
3571 struct sde_crtc *sde_crtc;
3572 struct msm_drm_private *priv;
3573 struct sde_kms *sde_kms;
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003574 int ret = 0;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003575
3576 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
3577 SDE_ERROR("invalid crtc\n");
3578 return;
3579 }
3580 sde_crtc = to_sde_crtc(crtc);
3581 priv = crtc->dev->dev_private;
3582
3583 if (!priv->kms) {
3584 SDE_ERROR("invalid crtc kms\n");
3585 return;
3586 }
3587 sde_kms = to_sde_kms(priv->kms);
3588
3589 SDE_DEBUG("crtc%d suspend = %d\n", crtc->base.id, enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003590 SDE_EVT32_VERBOSE(DRMID(crtc), enable);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003591
3592 mutex_lock(&sde_crtc->crtc_lock);
3593
Clarence Ip2f9beeb2017-03-16 11:04:53 -04003594 /*
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04003595 * If the vblank is enabled, release a power reference on suspend
3596 * and take it back during resume (if it is still enabled).
Clarence Ip7f70ce42017-03-20 06:53:46 -07003597 */
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003598 SDE_EVT32(DRMID(&sde_crtc->base), enable, sde_crtc->enabled,
3599 sde_crtc->suspend, sde_crtc->vblank_requested);
Clarence Ip7f70ce42017-03-20 06:53:46 -07003600 if (sde_crtc->suspend == enable)
3601 SDE_DEBUG("crtc%d suspend already set to %d, ignoring update\n",
3602 crtc->base.id, enable);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003603 else if (sde_crtc->enabled && sde_crtc->vblank_requested) {
3604 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, !enable);
3605 if (ret)
3606 SDE_ERROR("%s vblank enable failed: %d\n",
3607 sde_crtc->name, ret);
3608 }
Clarence Ip7f70ce42017-03-20 06:53:46 -07003609
3610 sde_crtc->suspend = enable;
Clarence Ip7f70ce42017-03-20 06:53:46 -07003611 mutex_unlock(&sde_crtc->crtc_lock);
3612}
3613
3614/**
Clarence Ip7a753bb2016-07-07 11:47:44 -04003615 * sde_crtc_duplicate_state - state duplicate hook
3616 * @crtc: Pointer to drm crtc structure
3617 * @Returns: Pointer to new drm_crtc_state structure
3618 */
3619static struct drm_crtc_state *sde_crtc_duplicate_state(struct drm_crtc *crtc)
3620{
3621 struct sde_crtc *sde_crtc;
3622 struct sde_crtc_state *cstate, *old_cstate;
3623
3624 if (!crtc || !crtc->state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003625 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003626 return NULL;
3627 }
3628
3629 sde_crtc = to_sde_crtc(crtc);
3630 old_cstate = to_sde_crtc_state(crtc->state);
3631 cstate = msm_property_alloc_state(&sde_crtc->property_info);
3632 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003633 SDE_ERROR("failed to allocate state\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003634 return NULL;
3635 }
3636
3637 /* duplicate value helper */
3638 msm_property_duplicate_state(&sde_crtc->property_info,
3639 old_cstate, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003640 &cstate->property_state, cstate->property_values);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003641
3642 /* duplicate base helper */
3643 __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
3644
Alan Kwongcdb2f282017-03-18 13:42:06 -07003645 _sde_crtc_rp_duplicate(&old_cstate->rp, &cstate->rp);
3646
Clarence Ip7a753bb2016-07-07 11:47:44 -04003647 return &cstate->base;
3648}
3649
3650/**
3651 * sde_crtc_reset - reset hook for CRTCs
3652 * Resets the atomic state for @crtc by freeing the state pointer (which might
3653 * be NULL, e.g. at driver load time) and allocating a new empty state object.
3654 * @crtc: Pointer to drm crtc structure
3655 */
3656static void sde_crtc_reset(struct drm_crtc *crtc)
3657{
3658 struct sde_crtc *sde_crtc;
3659 struct sde_crtc_state *cstate;
3660
3661 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003662 SDE_ERROR("invalid crtc\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003663 return;
3664 }
3665
Clarence Ip7f70ce42017-03-20 06:53:46 -07003666 /* revert suspend actions, if necessary */
Veera Sundaram Sankarandb43e282017-09-19 18:32:52 -07003667 if (sde_kms_is_suspend_state(crtc->dev)) {
Clarence Ip7f70ce42017-03-20 06:53:46 -07003668 _sde_crtc_set_suspend(crtc, false);
3669
Veera Sundaram Sankarandb43e282017-09-19 18:32:52 -07003670 if (!sde_crtc_is_reset_required(crtc)) {
3671 SDE_DEBUG("avoiding reset for crtc:%d\n",
3672 crtc->base.id);
3673 return;
3674 }
3675 }
3676
Clarence Ip7a753bb2016-07-07 11:47:44 -04003677 /* remove previous state, if present */
3678 if (crtc->state) {
3679 sde_crtc_destroy_state(crtc, crtc->state);
3680 crtc->state = 0;
3681 }
3682
3683 sde_crtc = to_sde_crtc(crtc);
3684 cstate = msm_property_alloc_state(&sde_crtc->property_info);
3685 if (!cstate) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003686 SDE_ERROR("failed to allocate state\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04003687 return;
3688 }
3689
3690 /* reset value helper */
3691 msm_property_reset_state(&sde_crtc->property_info, cstate,
Clarence Ip4a2955d2017-07-04 18:04:33 -04003692 &cstate->property_state,
3693 cstate->property_values);
Clarence Ip7a753bb2016-07-07 11:47:44 -04003694
Clarence Ipcae1bb62016-07-07 12:07:13 -04003695 _sde_crtc_set_input_fence_timeout(cstate);
3696
Alan Kwong310e9b02017-08-03 02:04:07 -04003697 _sde_crtc_rp_reset(&cstate->rp, &sde_crtc->rp_lock,
3698 &sde_crtc->rp_head);
Alan Kwongcdb2f282017-03-18 13:42:06 -07003699
Clarence Ip7a753bb2016-07-07 11:47:44 -04003700 cstate->base.crtc = crtc;
3701 crtc->state = &cstate->base;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003702}
3703
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003704static void sde_crtc_handle_power_event(u32 event_type, void *arg)
3705{
3706 struct drm_crtc *crtc = arg;
3707 struct sde_crtc *sde_crtc;
Dhaval Patel010f5172017-08-01 22:40:09 -07003708 struct drm_plane *plane;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003709 struct drm_encoder *encoder;
Dhaval Patel010f5172017-08-01 22:40:09 -07003710 struct sde_crtc_mixer *m;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07003711 u32 i, misr_status;
Ping Licc868fc2017-08-11 16:56:44 -07003712 unsigned long flags;
3713 struct sde_crtc_irq_info *node = NULL;
3714 int ret = 0;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003715
3716 if (!crtc) {
3717 SDE_ERROR("invalid crtc\n");
3718 return;
3719 }
3720 sde_crtc = to_sde_crtc(crtc);
3721
3722 mutex_lock(&sde_crtc->crtc_lock);
3723
3724 SDE_EVT32(DRMID(crtc), event_type);
3725
Dhaval Patel010f5172017-08-01 22:40:09 -07003726 switch (event_type) {
3727 case SDE_POWER_EVENT_POST_ENABLE:
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003728 /* restore encoder; crtc will be programmed during commit */
3729 drm_for_each_encoder(encoder, crtc->dev) {
3730 if (encoder->crtc != crtc)
3731 continue;
3732
3733 sde_encoder_virt_restore(encoder);
3734 }
Ping Licc868fc2017-08-11 16:56:44 -07003735
3736 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3737 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3738 ret = 0;
3739 if (node->func)
3740 ret = node->func(crtc, true, &node->irq);
3741 if (ret)
3742 SDE_ERROR("%s failed to enable event %x\n",
3743 sde_crtc->name, node->event);
3744 }
3745 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
3746
Ping Lie505f3b2017-06-19 14:19:08 -07003747 sde_cp_crtc_post_ipc(crtc);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003748
Dhaval Patel010f5172017-08-01 22:40:09 -07003749 for (i = 0; i < sde_crtc->num_mixers; ++i) {
3750 m = &sde_crtc->mixers[i];
3751 if (!m->hw_lm || !m->hw_lm->ops.setup_misr ||
3752 !sde_crtc->misr_enable)
3753 continue;
3754
3755 m->hw_lm->ops.setup_misr(m->hw_lm, true,
3756 sde_crtc->misr_frame_count);
3757 }
3758 break;
3759 case SDE_POWER_EVENT_PRE_DISABLE:
3760 for (i = 0; i < sde_crtc->num_mixers; ++i) {
3761 m = &sde_crtc->mixers[i];
3762 if (!m->hw_lm || !m->hw_lm->ops.collect_misr ||
3763 !sde_crtc->misr_enable)
3764 continue;
3765
3766 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
3767 sde_crtc->misr_data[i] = misr_status ? misr_status :
3768 sde_crtc->misr_data[i];
3769 }
Ping Licc868fc2017-08-11 16:56:44 -07003770
3771 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3772 node = NULL;
3773 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3774 ret = 0;
3775 if (node->func)
3776 ret = node->func(crtc, false, &node->irq);
3777 if (ret)
3778 SDE_ERROR("%s failed to disable event %x\n",
3779 sde_crtc->name, node->event);
3780 }
3781 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
3782
Dhaval Patel010f5172017-08-01 22:40:09 -07003783 sde_cp_crtc_pre_ipc(crtc);
3784 break;
3785 case SDE_POWER_EVENT_POST_DISABLE:
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003786 /*
3787 * set revalidate flag in planes, so it will be re-programmed
3788 * in the next frame update
3789 */
3790 drm_atomic_crtc_for_each_plane(plane, crtc)
3791 sde_plane_set_revalidate(plane, true);
Alan Kwong8a9b38a2017-06-22 11:30:52 -04003792
Gopikrishnaiah Anandandb90fa12017-05-09 17:56:08 -07003793 sde_cp_crtc_suspend(crtc);
Dhaval Patel010f5172017-08-01 22:40:09 -07003794 break;
3795 default:
3796 SDE_DEBUG("event:%d not handled\n", event_type);
3797 break;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003798 }
3799
3800 mutex_unlock(&sde_crtc->crtc_lock);
3801}
3802
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003803static void sde_crtc_disable(struct drm_crtc *crtc)
3804{
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003805 struct sde_crtc *sde_crtc;
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003806 struct sde_crtc_state *cstate;
Alan Kwong07da0982016-11-04 12:57:45 -04003807 struct drm_encoder *encoder;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003808 struct msm_drm_private *priv;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07003809 unsigned long flags;
3810 struct sde_crtc_irq_info *node = NULL;
Ping Lic5c2e0b2017-08-02 15:17:59 -07003811 struct drm_event event;
3812 u32 power_on;
Dhaval Patelfd8f7742017-08-10 13:11:22 -07003813 int ret, i;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003814
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003815 if (!crtc || !crtc->dev || !crtc->dev->dev_private || !crtc->state) {
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04003816 SDE_ERROR("invalid crtc\n");
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003817 return;
3818 }
3819 sde_crtc = to_sde_crtc(crtc);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003820 cstate = to_sde_crtc_state(crtc->state);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003821 priv = crtc->dev->dev_private;
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003822
Alan Kwong163d2612016-11-03 00:56:56 -04003823 SDE_DEBUG("crtc%d\n", crtc->base.id);
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003824
Sandeep Panda98d6ab22017-09-05 08:03:16 +05303825 for (i = 0; i < cstate->num_connectors; i++)
3826 sde_connector_schedule_status_work(cstate->connectors[i],
3827 false);
3828
Clarence Ipd86f6e42017-08-08 18:31:00 -04003829 if (sde_kms_is_suspend_state(crtc->dev))
Clarence Ip7f70ce42017-03-20 06:53:46 -07003830 _sde_crtc_set_suspend(crtc, true);
3831
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07003832 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003833 SDE_EVT32_VERBOSE(DRMID(crtc));
Alan Kwong628d19e2016-10-31 13:50:13 -04003834
Ping Lic5c2e0b2017-08-02 15:17:59 -07003835 /* update color processing on suspend */
3836 event.type = DRM_EVENT_CRTC_POWER;
3837 event.length = sizeof(u32);
3838 sde_cp_crtc_suspend(crtc);
3839 power_on = 0;
3840 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
3841 (u8 *)&power_on);
3842
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003843 /* wait for frame_event_done completion */
3844 if (_sde_crtc_wait_for_frame_done(crtc))
3845 SDE_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
3846 crtc->base.id,
3847 atomic_read(&sde_crtc->frame_pending));
3848
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003849 SDE_EVT32(DRMID(crtc), sde_crtc->enabled, sde_crtc->suspend,
3850 sde_crtc->vblank_requested);
3851 if (sde_crtc->enabled && !sde_crtc->suspend &&
3852 sde_crtc->vblank_requested) {
3853 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, false);
3854 if (ret)
3855 SDE_ERROR("%s vblank enable failed: %d\n",
3856 sde_crtc->name, ret);
Alan Kwong07da0982016-11-04 12:57:45 -04003857 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003858 sde_crtc->enabled = false;
Alan Kwong07da0982016-11-04 12:57:45 -04003859
Alan Kwong628d19e2016-10-31 13:50:13 -04003860 if (atomic_read(&sde_crtc->frame_pending)) {
Dhaval Patel6c666622017-03-21 23:02:59 -07003861 SDE_EVT32(DRMID(crtc), atomic_read(&sde_crtc->frame_pending),
3862 SDE_EVTLOG_FUNC_CASE2);
Alan Kwong9aa061c2016-11-06 21:17:12 -05003863 sde_core_perf_crtc_release_bw(crtc);
Alan Kwong628d19e2016-10-31 13:50:13 -04003864 atomic_set(&sde_crtc->frame_pending, 0);
3865 }
3866
Ping Li6d5bf542017-06-27 11:40:28 -07003867 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3868 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3869 ret = 0;
3870 if (node->func)
3871 ret = node->func(crtc, false, &node->irq);
3872 if (ret)
3873 SDE_ERROR("%s failed to disable event %x\n",
3874 sde_crtc->name, node->event);
3875 }
3876 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
3877
Alan Kwong9aa061c2016-11-06 21:17:12 -05003878 sde_core_perf_crtc_update(crtc, 0, true);
3879
Alan Kwong628d19e2016-10-31 13:50:13 -04003880 drm_for_each_encoder(encoder, crtc->dev) {
3881 if (encoder->crtc != crtc)
3882 continue;
3883 sde_encoder_register_frame_event_callback(encoder, NULL, NULL);
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003884 cstate->rsc_client = NULL;
3885 cstate->rsc_update = false;
Alan Kwong628d19e2016-10-31 13:50:13 -04003886 }
3887
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003888 if (sde_crtc->power_event)
3889 sde_power_handle_unregister_event(&priv->phandle,
3890 sde_crtc->power_event);
3891
Dhaval Patelfd8f7742017-08-10 13:11:22 -07003892 /**
3893 * All callbacks are unregistered and frame done waits are complete
3894 * at this point. No buffers are accessed by hardware.
3895 * reset the fence timeline if there is any issue.
3896 */
3897 sde_fence_signal(&sde_crtc->output_fence, ktime_get(), true);
3898 for (i = 0; i < cstate->num_connectors; ++i)
3899 sde_connector_commit_reset(cstate->connectors[i], ktime_get());
3900
Lloyd Atkinsonc44a52e2016-08-16 16:40:17 -04003901 memset(sde_crtc->mixers, 0, sizeof(sde_crtc->mixers));
3902 sde_crtc->num_mixers = 0;
Lloyd Atkinson94710bc2017-09-14 14:10:09 -04003903 sde_crtc->mixers_swapped = false;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07003904
Alan Kwong8411a9112017-06-06 19:29:01 -04003905 /* disable clk & bw control until clk & bw properties are set */
3906 cstate->bw_control = false;
Alan Kwong0230a102017-05-16 11:36:44 -07003907 cstate->bw_split_vote = false;
Alan Kwong8411a9112017-06-06 19:29:01 -04003908
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07003909 mutex_unlock(&sde_crtc->crtc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003910}
3911
3912static void sde_crtc_enable(struct drm_crtc *crtc)
3913{
Clarence Ipcae1bb62016-07-07 12:07:13 -04003914 struct sde_crtc *sde_crtc;
Alan Kwong628d19e2016-10-31 13:50:13 -04003915 struct drm_encoder *encoder;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003916 struct msm_drm_private *priv;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07003917 unsigned long flags;
3918 struct sde_crtc_irq_info *node = NULL;
Ping Lic5c2e0b2017-08-02 15:17:59 -07003919 struct drm_event event;
3920 u32 power_on;
Sandeep Panda98d6ab22017-09-05 08:03:16 +05303921 int ret, i;
3922 struct sde_crtc_state *cstate;
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04003923
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003924 if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003925 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04003926 return;
3927 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003928 priv = crtc->dev->dev_private;
Sandeep Panda98d6ab22017-09-05 08:03:16 +05303929 cstate = to_sde_crtc_state(crtc->state);
Clarence Ipcae1bb62016-07-07 12:07:13 -04003930
Alan Kwong163d2612016-11-03 00:56:56 -04003931 SDE_DEBUG("crtc%d\n", crtc->base.id);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003932 SDE_EVT32_VERBOSE(DRMID(crtc));
Clarence Ipcae1bb62016-07-07 12:07:13 -04003933 sde_crtc = to_sde_crtc(crtc);
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04003934
Alan Kwong628d19e2016-10-31 13:50:13 -04003935 drm_for_each_encoder(encoder, crtc->dev) {
3936 if (encoder->crtc != crtc)
3937 continue;
3938 sde_encoder_register_frame_event_callback(encoder,
3939 sde_crtc_frame_event_cb, (void *)crtc);
3940 }
3941
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04003942 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003943 SDE_EVT32(DRMID(crtc), sde_crtc->enabled, sde_crtc->suspend,
3944 sde_crtc->vblank_requested);
3945 if (!sde_crtc->enabled && !sde_crtc->suspend &&
3946 sde_crtc->vblank_requested) {
3947 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, true);
3948 if (ret)
3949 SDE_ERROR("%s vblank enable failed: %d\n",
3950 sde_crtc->name, ret);
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04003951 }
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04003952 sde_crtc->enabled = true;
Ping Lic5c2e0b2017-08-02 15:17:59 -07003953
3954 /* update color processing on resume */
3955 event.type = DRM_EVENT_CRTC_POWER;
3956 event.length = sizeof(u32);
3957 sde_cp_crtc_resume(crtc);
3958 power_on = 1;
3959 msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
3960 (u8 *)&power_on);
3961
Lloyd Atkinsonb2be0c42017-07-17 16:41:00 -04003962 mutex_unlock(&sde_crtc->crtc_lock);
3963
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07003964 spin_lock_irqsave(&sde_crtc->spin_lock, flags);
3965 list_for_each_entry(node, &sde_crtc->user_event_list, list) {
3966 ret = 0;
3967 if (node->func)
3968 ret = node->func(crtc, true, &node->irq);
3969 if (ret)
3970 SDE_ERROR("%s failed to enable event %x\n",
3971 sde_crtc->name, node->event);
3972 }
3973 spin_unlock_irqrestore(&sde_crtc->spin_lock, flags);
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003974
3975 sde_crtc->power_event = sde_power_handle_register_event(
3976 &priv->phandle,
Ping Lie505f3b2017-06-19 14:19:08 -07003977 SDE_POWER_EVENT_POST_ENABLE | SDE_POWER_EVENT_POST_DISABLE |
3978 SDE_POWER_EVENT_PRE_DISABLE,
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07003979 sde_crtc_handle_power_event, crtc, sde_crtc->name);
Sandeep Panda98d6ab22017-09-05 08:03:16 +05303980
3981 for (i = 0; i < cstate->num_connectors; i++)
3982 sde_connector_schedule_status_work(cstate->connectors[i], true);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003983}
3984
3985struct plane_state {
Dhaval Patelec10fad2016-08-22 14:40:48 -07003986 struct sde_plane_state *sde_pstate;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07003987 const struct drm_plane_state *drm_pstate;
Clarence Ipc47a0692016-10-11 10:54:17 -04003988 int stage;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08003989 u32 pipe_id;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003990};
3991
Clarence Ipc47a0692016-10-11 10:54:17 -04003992static int pstate_cmp(const void *a, const void *b)
3993{
3994 struct plane_state *pa = (struct plane_state *)a;
3995 struct plane_state *pb = (struct plane_state *)b;
3996 int rc = 0;
3997 int pa_zpos, pb_zpos;
3998
3999 pa_zpos = sde_plane_get_property(pa->sde_pstate, PLANE_PROP_ZPOS);
4000 pb_zpos = sde_plane_get_property(pb->sde_pstate, PLANE_PROP_ZPOS);
4001
4002 if (pa_zpos != pb_zpos)
4003 rc = pa_zpos - pb_zpos;
4004 else
4005 rc = pa->drm_pstate->crtc_x - pb->drm_pstate->crtc_x;
4006
4007 return rc;
4008}
4009
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004010static int _sde_crtc_excl_rect_overlap_check(struct plane_state pstates[],
4011 int cnt, int curr_cnt, struct sde_rect *excl_rect, int z_pos)
4012{
4013 struct sde_rect dst_rect, intersect;
4014 int i, rc = -EINVAL;
4015 const struct drm_plane_state *pstate;
4016
4017 /* start checking from next plane */
4018 for (i = curr_cnt; i < cnt; i++) {
4019 pstate = pstates[i].drm_pstate;
4020 POPULATE_RECT(&dst_rect, pstate->crtc_x, pstate->crtc_y,
Veera Sundaram Sankaran9d9ff912017-06-20 10:41:21 -07004021 pstate->crtc_w, pstate->crtc_h, false);
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004022 sde_kms_rect_intersect(&dst_rect, excl_rect, &intersect);
4023
4024 if (intersect.w == excl_rect->w && intersect.h == excl_rect->h
4025 /* next plane may be on same z-order */
4026 && z_pos != pstates[i].stage) {
4027 rc = 0;
4028 goto end;
4029 }
4030 }
4031
4032 SDE_ERROR("excl rect does not find top overlapping rect\n");
4033end:
4034 return rc;
4035}
4036
4037/* no input validation - caller API has all the checks */
4038static int _sde_crtc_excl_dim_layer_check(struct drm_crtc_state *state,
4039 struct plane_state pstates[], int cnt)
4040{
4041 struct sde_crtc_state *cstate = to_sde_crtc_state(state);
4042 struct drm_display_mode *mode = &state->adjusted_mode;
4043 const struct drm_plane_state *pstate;
4044 struct sde_plane_state *sde_pstate;
4045 int rc = 0, i;
4046
4047 /* Check dim layer rect bounds and stage */
4048 for (i = 0; i < cstate->num_dim_layers; i++) {
4049 if ((CHECK_LAYER_BOUNDS(cstate->dim_layer[i].rect.y,
4050 cstate->dim_layer[i].rect.h, mode->vdisplay)) ||
4051 (CHECK_LAYER_BOUNDS(cstate->dim_layer[i].rect.x,
4052 cstate->dim_layer[i].rect.w, mode->hdisplay)) ||
4053 (cstate->dim_layer[i].stage >= SDE_STAGE_MAX) ||
4054 (!cstate->dim_layer[i].rect.w) ||
4055 (!cstate->dim_layer[i].rect.h)) {
4056 SDE_ERROR("invalid dim_layer:{%d,%d,%d,%d}, stage:%d\n",
4057 cstate->dim_layer[i].rect.x,
4058 cstate->dim_layer[i].rect.y,
4059 cstate->dim_layer[i].rect.w,
4060 cstate->dim_layer[i].rect.h,
4061 cstate->dim_layer[i].stage);
4062 SDE_ERROR("display: %dx%d\n", mode->hdisplay,
4063 mode->vdisplay);
4064 rc = -E2BIG;
4065 goto end;
4066 }
4067 }
4068
4069 /* this is traversing on sorted z-order pstates */
4070 for (i = 0; i < cnt; i++) {
4071 pstate = pstates[i].drm_pstate;
4072 sde_pstate = to_sde_plane_state(pstate);
4073 if (sde_pstate->excl_rect.w && sde_pstate->excl_rect.h) {
4074 /* check overlap on all top z-order */
4075 rc = _sde_crtc_excl_rect_overlap_check(pstates, cnt,
4076 i + 1, &sde_pstate->excl_rect, pstates[i].stage);
4077 if (rc)
4078 goto end;
4079 }
4080 }
4081
4082end:
4083 return rc;
4084}
4085
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004086static int _sde_crtc_check_secure_state(struct drm_crtc *crtc,
4087 struct drm_crtc_state *state)
4088{
4089 struct drm_encoder *encoder;
4090 struct sde_crtc_state *cstate;
4091 uint32_t secure;
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004092 uint32_t fb_ns = 0, fb_sec = 0, fb_sec_dir = 0;
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004093 int encoder_cnt = 0;
4094 int rc;
4095
4096 if (!crtc || !state) {
4097 SDE_ERROR("invalid arguments\n");
4098 return -EINVAL;
4099 }
4100
4101 cstate = to_sde_crtc_state(state);
4102
4103 secure = sde_crtc_get_property(cstate,
4104 CRTC_PROP_SECURITY_LEVEL);
4105
4106 rc = _sde_crtc_find_plane_fb_modes(state,
4107 &fb_ns,
4108 &fb_sec,
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004109 &fb_sec_dir);
4110 if (rc)
4111 return rc;
4112
4113 /**
4114 * validate planes
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004115 * fb_sec_dir is for secure camera preview and secure display use case,
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004116 * fb_sec is for secure video playback,
4117 * fb_ns is for normal non secure use cases.
4118 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004119 if ((secure == SDE_DRM_SEC_ONLY) &&
4120 (fb_ns || fb_sec || (fb_sec && fb_sec_dir))) {
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004121 SDE_ERROR(
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004122 "crtc%d: invalid planes fb_modes Sec:%d, NS:%d, Sec_Dir:%d\n",
4123 crtc->base.id, fb_sec, fb_ns, fb_sec_dir);
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004124 return -EINVAL;
4125 }
4126
4127 /**
4128 * secure_crtc is not allowed in a shared toppolgy
4129 * across different encoders.
4130 */
Veera Sundaram Sankaranc5507b72017-08-25 15:25:31 -07004131 if (fb_sec_dir) {
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004132 drm_for_each_encoder(encoder, crtc->dev)
4133 if (encoder->crtc == crtc)
4134 encoder_cnt++;
4135
4136 if (encoder_cnt >
4137 MAX_ALLOWED_ENCODER_CNT_PER_SECURE_CRTC) {
4138 SDE_ERROR(
4139 "crtc%d, invalid virtual encoder crtc%d\n",
4140 crtc->base.id,
4141 encoder_cnt);
4142 return -EINVAL;
4143
4144 }
4145 }
4146 SDE_DEBUG("crtc:%d Secure validation successful\n", crtc->base.id);
4147 return 0;
4148}
4149
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004150static int sde_crtc_atomic_check(struct drm_crtc *crtc,
4151 struct drm_crtc_state *state)
4152{
Clarence Ipcae1bb62016-07-07 12:07:13 -04004153 struct sde_crtc *sde_crtc;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004154 struct plane_state pstates[SDE_STAGE_MAX * 4];
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004155 struct sde_crtc_state *cstate;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004156
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004157 const struct drm_plane_state *pstate;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004158 struct drm_plane *plane;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004159 struct drm_display_mode *mode;
4160
4161 int cnt = 0, rc = 0, mixer_width, i, z_pos;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004162
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004163 struct sde_multirect_plane_states multirect_plane[SDE_STAGE_MAX * 2];
4164 int multirect_count = 0;
4165 const struct drm_plane_state *pipe_staged[SSPP_MAX];
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004166 int left_zpos_cnt = 0, right_zpos_cnt = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004167
Clarence Ipcae1bb62016-07-07 12:07:13 -04004168 if (!crtc) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004169 SDE_ERROR("invalid crtc\n");
Clarence Ipcae1bb62016-07-07 12:07:13 -04004170 return -EINVAL;
4171 }
4172
Alan Kwongcdb2f282017-03-18 13:42:06 -07004173 sde_crtc = to_sde_crtc(crtc);
4174 cstate = to_sde_crtc_state(state);
4175
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004176 if (!state->enable || !state->active) {
4177 SDE_DEBUG("crtc%d -> enable %d, active %d, skip atomic_check\n",
4178 crtc->base.id, state->enable, state->active);
Alan Kwongcdb2f282017-03-18 13:42:06 -07004179 goto end;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004180 }
4181
Dhaval Patelec10fad2016-08-22 14:40:48 -07004182 mode = &state->adjusted_mode;
4183 SDE_DEBUG("%s: check", sde_crtc->name);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004184
Clarence Ip90b282d2017-05-04 10:00:32 -07004185 /* force a full mode set if active state changed */
4186 if (state->active_changed)
4187 state->mode_changed = true;
4188
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004189 memset(pipe_staged, 0, sizeof(pipe_staged));
4190
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304191 rc = _sde_crtc_check_dest_scaler_data(crtc, state);
4192 if (rc) {
4193 SDE_ERROR("crtc%d failed dest scaler check %d\n",
4194 crtc->base.id, rc);
4195 goto end;
4196 }
4197
4198 mixer_width = sde_crtc_get_mixer_width(sde_crtc, cstate, mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004199
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05004200 _sde_crtc_setup_is_ppsplit(state);
Lloyd Atkinsona9d7e752017-01-17 16:31:43 -05004201 _sde_crtc_setup_lm_bounds(crtc, state);
4202
Abhijit Kulkarni7444a7d2017-06-21 18:53:36 -07004203 rc = _sde_crtc_check_secure_state(crtc, state);
4204 if (rc)
4205 return rc;
4206
Dhaval Patelec10fad2016-08-22 14:40:48 -07004207 /* get plane state for all drm planes associated with crtc state */
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004208 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
Clarence Ipc47a0692016-10-11 10:54:17 -04004209 if (IS_ERR_OR_NULL(pstate)) {
4210 rc = PTR_ERR(pstate);
4211 SDE_ERROR("%s: failed to get plane%d state, %d\n",
4212 sde_crtc->name, plane->base.id, rc);
Alan Kwong85767282016-10-03 18:03:37 -04004213 goto end;
4214 }
Clarence Ipc47a0692016-10-11 10:54:17 -04004215 if (cnt >= ARRAY_SIZE(pstates))
4216 continue;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004217
Dhaval Patelec10fad2016-08-22 14:40:48 -07004218 pstates[cnt].sde_pstate = to_sde_plane_state(pstate);
4219 pstates[cnt].drm_pstate = pstate;
Clarence Ipc47a0692016-10-11 10:54:17 -04004220 pstates[cnt].stage = sde_plane_get_property(
4221 pstates[cnt].sde_pstate, PLANE_PROP_ZPOS);
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004222 pstates[cnt].pipe_id = sde_plane_pipe(plane);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004223
4224 /* check dim layer stage with every plane */
4225 for (i = 0; i < cstate->num_dim_layers; i++) {
Veera Sundaram Sankaranb9ed6bd2017-07-11 19:18:03 -07004226 if (cstate->dim_layer[i].stage
4227 == (pstates[cnt].stage + SDE_STAGE_0)) {
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004228 SDE_ERROR(
4229 "plane:%d/dim_layer:%i-same stage:%d\n",
4230 plane->base.id, i,
4231 cstate->dim_layer[i].stage);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004232 rc = -EINVAL;
4233 goto end;
4234 }
4235 }
4236
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004237 if (pipe_staged[pstates[cnt].pipe_id]) {
4238 multirect_plane[multirect_count].r0 =
4239 pipe_staged[pstates[cnt].pipe_id];
4240 multirect_plane[multirect_count].r1 = pstate;
4241 multirect_count++;
4242
4243 pipe_staged[pstates[cnt].pipe_id] = NULL;
4244 } else {
4245 pipe_staged[pstates[cnt].pipe_id] = pstate;
4246 }
4247
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004248 cnt++;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004249
4250 if (CHECK_LAYER_BOUNDS(pstate->crtc_y, pstate->crtc_h,
4251 mode->vdisplay) ||
4252 CHECK_LAYER_BOUNDS(pstate->crtc_x, pstate->crtc_w,
4253 mode->hdisplay)) {
4254 SDE_ERROR("invalid vertical/horizontal destination\n");
4255 SDE_ERROR("y:%d h:%d vdisp:%d x:%d w:%d hdisp:%d\n",
4256 pstate->crtc_y, pstate->crtc_h, mode->vdisplay,
4257 pstate->crtc_x, pstate->crtc_w, mode->hdisplay);
4258 rc = -E2BIG;
4259 goto end;
4260 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004261 }
4262
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004263 for (i = 1; i < SSPP_MAX; i++) {
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07004264 if (pipe_staged[i]) {
4265 sde_plane_clear_multirect(pipe_staged[i]);
4266
4267 if (is_sde_plane_virtual(pipe_staged[i]->plane)) {
Veera Sundaram Sankaran372596d2017-06-21 17:57:25 -07004268 SDE_ERROR(
4269 "r1 only virt plane:%d not supported\n",
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004270 pipe_staged[i]->plane->base.id);
Veera Sundaram Sankaran372596d2017-06-21 17:57:25 -07004271 rc = -EINVAL;
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07004272 goto end;
4273 }
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004274 }
4275 }
4276
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004277 /* assign mixer stages based on sorted zpos property */
4278 sort(pstates, cnt, sizeof(pstates[0]), pstate_cmp, NULL);
4279
Dhaval Patela8d6bc62017-05-10 17:40:18 -07004280 rc = _sde_crtc_excl_dim_layer_check(state, pstates, cnt);
4281 if (rc)
4282 goto end;
4283
Clarence Ipc47a0692016-10-11 10:54:17 -04004284 if (!sde_is_custom_client()) {
4285 int stage_old = pstates[0].stage;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004286
Clarence Ipc47a0692016-10-11 10:54:17 -04004287 z_pos = 0;
4288 for (i = 0; i < cnt; i++) {
4289 if (stage_old != pstates[i].stage)
4290 ++z_pos;
4291 stage_old = pstates[i].stage;
4292 pstates[i].stage = z_pos;
4293 }
4294 }
4295
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004296 z_pos = -1;
Clarence Ipc47a0692016-10-11 10:54:17 -04004297 for (i = 0; i < cnt; i++) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004298 /* reset counts at every new blend stage */
4299 if (pstates[i].stage != z_pos) {
4300 left_zpos_cnt = 0;
4301 right_zpos_cnt = 0;
4302 z_pos = pstates[i].stage;
4303 }
Clarence Ipc47a0692016-10-11 10:54:17 -04004304
4305 /* verify z_pos setting before using it */
Clarence Ip649989a2016-10-21 14:28:34 -04004306 if (z_pos >= SDE_STAGE_MAX - SDE_STAGE_0) {
Clarence Ipc47a0692016-10-11 10:54:17 -04004307 SDE_ERROR("> %d plane stages assigned\n",
4308 SDE_STAGE_MAX - SDE_STAGE_0);
4309 rc = -EINVAL;
4310 goto end;
4311 } else if (pstates[i].drm_pstate->crtc_x < mixer_width) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004312 if (left_zpos_cnt == 2) {
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004313 SDE_ERROR("> 2 planes @ stage %d on left\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07004314 z_pos);
4315 rc = -EINVAL;
4316 goto end;
4317 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004318 left_zpos_cnt++;
4319
Dhaval Patelec10fad2016-08-22 14:40:48 -07004320 } else {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004321 if (right_zpos_cnt == 2) {
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004322 SDE_ERROR("> 2 planes @ stage %d on right\n",
Dhaval Patelec10fad2016-08-22 14:40:48 -07004323 z_pos);
4324 rc = -EINVAL;
4325 goto end;
4326 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004327 right_zpos_cnt++;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004328 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004329
Clarence Ipc47a0692016-10-11 10:54:17 -04004330 pstates[i].sde_pstate->stage = z_pos + SDE_STAGE_0;
Dhaval Patelec10fad2016-08-22 14:40:48 -07004331 SDE_DEBUG("%s: zpos %d", sde_crtc->name, z_pos);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004332 }
4333
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004334 for (i = 0; i < multirect_count; i++) {
4335 if (sde_plane_validate_multirect_v2(&multirect_plane[i])) {
4336 SDE_ERROR(
4337 "multirect validation failed for planes (%d - %d)\n",
4338 multirect_plane[i].r0->plane->base.id,
4339 multirect_plane[i].r1->plane->base.id);
4340 rc = -EINVAL;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004341 goto end;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004342 }
4343 }
4344
Alan Kwong9aa061c2016-11-06 21:17:12 -05004345 rc = sde_core_perf_crtc_check(crtc, state);
4346 if (rc) {
4347 SDE_ERROR("crtc%d failed performance check %d\n",
4348 crtc->base.id, rc);
4349 goto end;
4350 }
4351
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004352 /* validate source split:
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004353 * use pstates sorted by stage to check planes on same stage
4354 * we assume that all pipes are in source split so its valid to compare
4355 * without taking into account left/right mixer placement
4356 */
4357 for (i = 1; i < cnt; i++) {
4358 struct plane_state *prv_pstate, *cur_pstate;
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004359 struct sde_rect left_rect, right_rect;
4360 int32_t left_pid, right_pid;
4361 int32_t stage;
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004362
4363 prv_pstate = &pstates[i - 1];
4364 cur_pstate = &pstates[i];
4365 if (prv_pstate->stage != cur_pstate->stage)
4366 continue;
4367
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004368 stage = cur_pstate->stage;
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004369
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004370 left_pid = prv_pstate->sde_pstate->base.plane->base.id;
4371 POPULATE_RECT(&left_rect, prv_pstate->drm_pstate->crtc_x,
4372 prv_pstate->drm_pstate->crtc_y,
4373 prv_pstate->drm_pstate->crtc_w,
4374 prv_pstate->drm_pstate->crtc_h, false);
4375
4376 right_pid = cur_pstate->sde_pstate->base.plane->base.id;
4377 POPULATE_RECT(&right_rect, cur_pstate->drm_pstate->crtc_x,
4378 cur_pstate->drm_pstate->crtc_y,
4379 cur_pstate->drm_pstate->crtc_w,
4380 cur_pstate->drm_pstate->crtc_h, false);
4381
4382 if (right_rect.x < left_rect.x) {
4383 swap(left_pid, right_pid);
4384 swap(left_rect, right_rect);
4385 }
4386
4387 /**
4388 * - planes are enumerated in pipe-priority order such that
4389 * planes with lower drm_id must be left-most in a shared
4390 * blend-stage when using source split.
4391 * - planes in source split must be contiguous in width
4392 * - planes in source split must have same dest yoff and height
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004393 */
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004394 if (right_pid < left_pid) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004395 SDE_ERROR(
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004396 "invalid src split cfg. priority mismatch. stage: %d left: %d right: %d\n",
4397 stage, left_pid, right_pid);
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004398 rc = -EINVAL;
4399 goto end;
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004400 } else if (right_rect.x != (left_rect.x + left_rect.w)) {
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004401 SDE_ERROR(
Jeykumar Sankaranaaaa0712017-06-12 17:59:16 -07004402 "non-contiguous coordinates for src split. stage: %d left: %d - %d right: %d - %d\n",
4403 stage, left_rect.x, left_rect.w,
4404 right_rect.x, right_rect.w);
4405 rc = -EINVAL;
4406 goto end;
4407 } else if ((left_rect.y != right_rect.y) ||
4408 (left_rect.h != right_rect.h)) {
4409 SDE_ERROR(
4410 "source split at stage: %d. invalid yoff/height: l_y: %d r_y: %d l_h: %d r_h: %d\n",
4411 stage, left_rect.y, right_rect.y,
4412 left_rect.h, right_rect.h);
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004413 rc = -EINVAL;
4414 goto end;
4415 }
4416 }
4417
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004418 rc = _sde_crtc_check_rois(crtc, state);
4419 if (rc) {
4420 SDE_ERROR("crtc%d failed roi check %d\n", crtc->base.id, rc);
4421 goto end;
4422 }
Lloyd Atkinson629ce1f2016-10-27 16:50:26 -04004423
Dhaval Patelec10fad2016-08-22 14:40:48 -07004424end:
Alan Kwongcdb2f282017-03-18 13:42:06 -07004425 _sde_crtc_rp_free_unused(&cstate->rp);
Dhaval Patelec10fad2016-08-22 14:40:48 -07004426 return rc;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004427}
4428
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -04004429int sde_crtc_vblank(struct drm_crtc *crtc, bool en)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004430{
Clarence Ip7f70ce42017-03-20 06:53:46 -07004431 struct sde_crtc *sde_crtc;
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004432 int ret;
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -04004433
Clarence Ip7f70ce42017-03-20 06:53:46 -07004434 if (!crtc) {
4435 SDE_ERROR("invalid crtc\n");
4436 return -EINVAL;
4437 }
4438 sde_crtc = to_sde_crtc(crtc);
4439
4440 mutex_lock(&sde_crtc->crtc_lock);
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004441 SDE_EVT32(DRMID(&sde_crtc->base), en, sde_crtc->enabled,
4442 sde_crtc->suspend, sde_crtc->vblank_requested);
4443 if (sde_crtc->enabled && !sde_crtc->suspend) {
4444 ret = _sde_crtc_vblank_enable_no_lock(sde_crtc, en);
4445 if (ret)
4446 SDE_ERROR("%s vblank enable failed: %d\n",
4447 sde_crtc->name, ret);
4448 }
4449 sde_crtc->vblank_requested = en;
Clarence Ip7f70ce42017-03-20 06:53:46 -07004450 mutex_unlock(&sde_crtc->crtc_lock);
Clarence Ip9728a1d2017-04-18 22:22:13 -04004451
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004452 return 0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004453}
4454
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004455void sde_crtc_cancel_pending_flip(struct drm_crtc *crtc, struct drm_file *file)
4456{
4457 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
4458
Alan Kwong163d2612016-11-03 00:56:56 -04004459 SDE_DEBUG("%s: cancel: %p\n", sde_crtc->name, file);
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004460 _sde_crtc_complete_flip(crtc, file);
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04004461}
4462
Clarence Ip7a753bb2016-07-07 11:47:44 -04004463/**
4464 * sde_crtc_install_properties - install all drm properties for crtc
4465 * @crtc: Pointer to drm crtc structure
4466 */
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004467static void sde_crtc_install_properties(struct drm_crtc *crtc,
4468 struct sde_mdss_cfg *catalog)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004469{
Clarence Ip7a753bb2016-07-07 11:47:44 -04004470 struct sde_crtc *sde_crtc;
4471 struct drm_device *dev;
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004472 struct sde_kms_info *info;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004473 struct sde_kms *sde_kms;
Abhijit Kulkarni50d69442017-04-11 19:50:47 -07004474 static const struct drm_prop_enum_list e_secure_level[] = {
4475 {SDE_DRM_SEC_NON_SEC, "sec_and_non_sec"},
4476 {SDE_DRM_SEC_ONLY, "sec_only"},
4477 };
Clarence Ip7a753bb2016-07-07 11:47:44 -04004478
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004479 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004480
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004481 if (!crtc || !catalog) {
4482 SDE_ERROR("invalid crtc or catalog\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004483 return;
4484 }
4485
4486 sde_crtc = to_sde_crtc(crtc);
4487 dev = crtc->dev;
Alan Kwong9aa061c2016-11-06 21:17:12 -05004488 sde_kms = _sde_crtc_get_kms(crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -04004489
Narendra Muppallaec11a0a2017-06-15 15:35:17 -07004490 if (!sde_kms) {
4491 SDE_ERROR("invalid argument\n");
4492 return;
4493 }
4494
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004495 info = kzalloc(sizeof(struct sde_kms_info), GFP_KERNEL);
4496 if (!info) {
4497 SDE_ERROR("failed to allocate info memory\n");
4498 return;
4499 }
4500
Clarence Ip7a753bb2016-07-07 11:47:44 -04004501 /* range properties */
4502 msm_property_install_range(&sde_crtc->property_info,
Dhaval Patel4e574842016-08-23 15:11:37 -07004503 "input_fence_timeout", 0x0, 0, SDE_CRTC_MAX_INPUT_FENCE_TIMEOUT,
4504 SDE_CRTC_INPUT_FENCE_TIMEOUT, CRTC_PROP_INPUT_FENCE_TIMEOUT);
4505
4506 msm_property_install_range(&sde_crtc->property_info, "output_fence",
4507 0x0, 0, INR_OPEN_MAX, 0x0, CRTC_PROP_OUTPUT_FENCE);
Clarence Ip1d9728b2016-09-01 11:10:54 -04004508
4509 msm_property_install_range(&sde_crtc->property_info,
4510 "output_fence_offset", 0x0, 0, 1, 0,
4511 CRTC_PROP_OUTPUT_FENCE_OFFSET);
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004512
Alan Kwong9aa061c2016-11-06 21:17:12 -05004513 msm_property_install_range(&sde_crtc->property_info,
4514 "core_clk", 0x0, 0, U64_MAX,
4515 sde_kms->perf.max_core_clk_rate,
4516 CRTC_PROP_CORE_CLK);
4517 msm_property_install_range(&sde_crtc->property_info,
4518 "core_ab", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004519 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong9aa061c2016-11-06 21:17:12 -05004520 CRTC_PROP_CORE_AB);
4521 msm_property_install_range(&sde_crtc->property_info,
4522 "core_ib", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004523 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong9aa061c2016-11-06 21:17:12 -05004524 CRTC_PROP_CORE_IB);
Alan Kwong4aacd532017-02-04 18:51:33 -08004525 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong0230a102017-05-16 11:36:44 -07004526 "llcc_ab", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004527 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong0230a102017-05-16 11:36:44 -07004528 CRTC_PROP_LLCC_AB);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004529 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong0230a102017-05-16 11:36:44 -07004530 "llcc_ib", 0x0, 0, U64_MAX,
Alan Kwongff30f4a2017-05-23 12:02:00 -07004531 catalog->perf.max_bw_high * 1000ULL,
Alan Kwong0230a102017-05-16 11:36:44 -07004532 CRTC_PROP_LLCC_IB);
4533 msm_property_install_range(&sde_crtc->property_info,
4534 "dram_ab", 0x0, 0, U64_MAX,
4535 catalog->perf.max_bw_high * 1000ULL,
4536 CRTC_PROP_DRAM_AB);
4537 msm_property_install_range(&sde_crtc->property_info,
4538 "dram_ib", 0x0, 0, U64_MAX,
4539 catalog->perf.max_bw_high * 1000ULL,
4540 CRTC_PROP_DRAM_IB);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004541 msm_property_install_range(&sde_crtc->property_info,
Alan Kwong4aacd532017-02-04 18:51:33 -08004542 "rot_prefill_bw", 0, 0, U64_MAX,
4543 catalog->perf.max_bw_high * 1000ULL,
4544 CRTC_PROP_ROT_PREFILL_BW);
Alan Kwong8c176bf2017-02-09 19:34:32 -08004545 msm_property_install_range(&sde_crtc->property_info,
4546 "rot_clk", 0, 0, U64_MAX,
4547 sde_kms->perf.max_core_clk_rate,
4548 CRTC_PROP_ROT_CLK);
Alan Kwong9aa061c2016-11-06 21:17:12 -05004549
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304550 msm_property_install_range(&sde_crtc->property_info,
Sravanthi Kollukudurubf4c7b82017-09-22 18:04:30 +05304551 "idle_time", IDLE_TIMEOUT, 0, U64_MAX, 0,
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004552 CRTC_PROP_IDLE_TIMEOUT);
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304553
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004554 msm_property_install_blob(&sde_crtc->property_info, "capabilities",
4555 DRM_MODE_PROP_IMMUTABLE, CRTC_PROP_INFO);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004556
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004557 msm_property_install_volatile_range(&sde_crtc->property_info,
4558 "sde_drm_roi_v1", 0x0, 0, ~0, 0, CRTC_PROP_ROI_V1);
4559
Abhijit Kulkarni50d69442017-04-11 19:50:47 -07004560 msm_property_install_enum(&sde_crtc->property_info, "security_level",
4561 0x0, 0, e_secure_level,
4562 ARRAY_SIZE(e_secure_level),
4563 CRTC_PROP_SECURITY_LEVEL);
4564
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004565 sde_kms_info_reset(info);
4566
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004567 if (catalog->has_dim_layer) {
4568 msm_property_install_volatile_range(&sde_crtc->property_info,
4569 "dim_layer_v1", 0x0, 0, ~0, 0, CRTC_PROP_DIM_LAYER_V1);
4570 sde_kms_info_add_keyint(info, "dim_layer_v1_max_layers",
4571 SDE_MAX_DIM_LAYERS);
4572 }
4573
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004574 sde_kms_info_add_keyint(info, "hw_version", catalog->hwversion);
4575 sde_kms_info_add_keyint(info, "max_linewidth",
4576 catalog->max_mixer_width);
4577 sde_kms_info_add_keyint(info, "max_blendstages",
4578 catalog->max_mixer_blendstages);
4579 if (catalog->qseed_type == SDE_SSPP_SCALER_QSEED2)
4580 sde_kms_info_add_keystr(info, "qseed_type", "qseed2");
4581 if (catalog->qseed_type == SDE_SSPP_SCALER_QSEED3)
4582 sde_kms_info_add_keystr(info, "qseed_type", "qseed3");
Jeykumar Sankaran2e655032017-02-04 14:05:45 -08004583
4584 if (sde_is_custom_client()) {
4585 if (catalog->smart_dma_rev == SDE_SSPP_SMART_DMA_V1)
4586 sde_kms_info_add_keystr(info,
4587 "smart_dma_rev", "smart_dma_v1");
4588 if (catalog->smart_dma_rev == SDE_SSPP_SMART_DMA_V2)
4589 sde_kms_info_add_keystr(info,
4590 "smart_dma_rev", "smart_dma_v2");
4591 }
4592
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304593 if (catalog->mdp[0].has_dest_scaler) {
4594 sde_kms_info_add_keyint(info, "has_dest_scaler",
4595 catalog->mdp[0].has_dest_scaler);
4596 sde_kms_info_add_keyint(info, "dest_scaler_count",
4597 catalog->ds_count);
4598
4599 if (catalog->ds[0].top) {
4600 sde_kms_info_add_keyint(info,
4601 "max_dest_scaler_input_width",
4602 catalog->ds[0].top->maxinputwidth);
4603 sde_kms_info_add_keyint(info,
4604 "max_dest_scaler_output_width",
4605 catalog->ds[0].top->maxinputwidth);
4606 sde_kms_info_add_keyint(info, "max_dest_scale_up",
4607 catalog->ds[0].top->maxupscale);
4608 }
4609
4610 if (catalog->ds[0].features & BIT(SDE_SSPP_SCALER_QSEED3)) {
4611 msm_property_install_volatile_range(
4612 &sde_crtc->property_info, "dest_scaler",
4613 0x0, 0, ~0, 0, CRTC_PROP_DEST_SCALER);
4614 msm_property_install_blob(&sde_crtc->property_info,
4615 "ds_lut_ed", 0,
4616 CRTC_PROP_DEST_SCALER_LUT_ED);
4617 msm_property_install_blob(&sde_crtc->property_info,
4618 "ds_lut_cir", 0,
4619 CRTC_PROP_DEST_SCALER_LUT_CIR);
4620 msm_property_install_blob(&sde_crtc->property_info,
4621 "ds_lut_sep", 0,
4622 CRTC_PROP_DEST_SCALER_LUT_SEP);
4623 }
4624 }
4625
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004626 sde_kms_info_add_keyint(info, "has_src_split", catalog->has_src_split);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05004627 if (catalog->perf.max_bw_low)
4628 sde_kms_info_add_keyint(info, "max_bandwidth_low",
Alan Kwong6259a382017-04-04 06:18:02 -07004629 catalog->perf.max_bw_low * 1000LL);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05004630 if (catalog->perf.max_bw_high)
4631 sde_kms_info_add_keyint(info, "max_bandwidth_high",
Alan Kwong6259a382017-04-04 06:18:02 -07004632 catalog->perf.max_bw_high * 1000LL);
Narendra Muppallaa50934b2017-08-15 19:43:37 -07004633 if (catalog->perf.min_core_ib)
4634 sde_kms_info_add_keyint(info, "min_core_ib",
4635 catalog->perf.min_core_ib * 1000LL);
4636 if (catalog->perf.min_llcc_ib)
4637 sde_kms_info_add_keyint(info, "min_llcc_ib",
4638 catalog->perf.min_llcc_ib * 1000LL);
4639 if (catalog->perf.min_dram_ib)
4640 sde_kms_info_add_keyint(info, "min_dram_ib",
4641 catalog->perf.min_dram_ib * 1000LL);
Alan Kwong2f84f8a2016-12-29 13:07:47 -05004642 if (sde_kms->perf.max_core_clk_rate)
4643 sde_kms_info_add_keyint(info, "max_mdp_clk",
4644 sde_kms->perf.max_core_clk_rate);
Alan Kwong6259a382017-04-04 06:18:02 -07004645 sde_kms_info_add_keystr(info, "core_ib_ff",
4646 catalog->perf.core_ib_ff);
4647 sde_kms_info_add_keystr(info, "core_clk_ff",
4648 catalog->perf.core_clk_ff);
4649 sde_kms_info_add_keystr(info, "comp_ratio_rt",
4650 catalog->perf.comp_ratio_rt);
4651 sde_kms_info_add_keystr(info, "comp_ratio_nrt",
4652 catalog->perf.comp_ratio_nrt);
4653 sde_kms_info_add_keyint(info, "dest_scale_prefill_lines",
4654 catalog->perf.dest_scale_prefill_lines);
4655 sde_kms_info_add_keyint(info, "undersized_prefill_lines",
4656 catalog->perf.undersized_prefill_lines);
4657 sde_kms_info_add_keyint(info, "macrotile_prefill_lines",
4658 catalog->perf.macrotile_prefill_lines);
4659 sde_kms_info_add_keyint(info, "yuv_nv12_prefill_lines",
4660 catalog->perf.yuv_nv12_prefill_lines);
4661 sde_kms_info_add_keyint(info, "linear_prefill_lines",
4662 catalog->perf.linear_prefill_lines);
4663 sde_kms_info_add_keyint(info, "downscaling_prefill_lines",
4664 catalog->perf.downscaling_prefill_lines);
4665 sde_kms_info_add_keyint(info, "xtra_prefill_lines",
4666 catalog->perf.xtra_prefill_lines);
4667 sde_kms_info_add_keyint(info, "amortizable_threshold",
4668 catalog->perf.amortizable_threshold);
4669 sde_kms_info_add_keyint(info, "min_prefill_lines",
4670 catalog->perf.min_prefill_lines);
4671
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004672 msm_property_set_blob(&sde_crtc->property_info, &sde_crtc->blob_info,
Narendra Muppalla22d17252017-05-31 15:13:39 -07004673 info->data, SDE_KMS_INFO_DATALEN(info), CRTC_PROP_INFO);
Dhaval Patele4a5dda2016-10-13 19:29:30 -07004674
4675 kfree(info);
Clarence Ip7a753bb2016-07-07 11:47:44 -04004676}
4677
4678/**
4679 * sde_crtc_atomic_set_property - atomically set a crtc drm property
4680 * @crtc: Pointer to drm crtc structure
4681 * @state: Pointer to drm crtc state structure
4682 * @property: Pointer to targeted drm property
4683 * @val: Updated property value
4684 * @Returns: Zero on success
4685 */
4686static int sde_crtc_atomic_set_property(struct drm_crtc *crtc,
4687 struct drm_crtc_state *state,
4688 struct drm_property *property,
4689 uint64_t val)
4690{
4691 struct sde_crtc *sde_crtc;
4692 struct sde_crtc_state *cstate;
Clarence Ipcae1bb62016-07-07 12:07:13 -04004693 int idx, ret = -EINVAL;
Clarence Ip7a753bb2016-07-07 11:47:44 -04004694
4695 if (!crtc || !state || !property) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004696 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004697 } else {
4698 sde_crtc = to_sde_crtc(crtc);
4699 cstate = to_sde_crtc_state(state);
4700 ret = msm_property_atomic_set(&sde_crtc->property_info,
Clarence Ip4a2955d2017-07-04 18:04:33 -04004701 &cstate->property_state, property, val);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004702 if (!ret) {
4703 idx = msm_property_index(&sde_crtc->property_info,
4704 property);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004705 switch (idx) {
4706 case CRTC_PROP_INPUT_FENCE_TIMEOUT:
Clarence Ipcae1bb62016-07-07 12:07:13 -04004707 _sde_crtc_set_input_fence_timeout(cstate);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004708 break;
4709 case CRTC_PROP_DIM_LAYER_V1:
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304710 _sde_crtc_set_dim_layer_v1(cstate,
4711 (void __user *)val);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004712 break;
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004713 case CRTC_PROP_ROI_V1:
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304714 ret = _sde_crtc_set_roi_v1(state,
4715 (void __user *)val);
4716 break;
4717 case CRTC_PROP_DEST_SCALER:
4718 ret = _sde_crtc_set_dest_scaler(sde_crtc,
4719 cstate, (void __user *)val);
4720 break;
4721 case CRTC_PROP_DEST_SCALER_LUT_ED:
4722 case CRTC_PROP_DEST_SCALER_LUT_CIR:
4723 case CRTC_PROP_DEST_SCALER_LUT_SEP:
4724 ret = _sde_crtc_set_dest_scaler_lut(sde_crtc,
4725 cstate, idx);
Lloyd Atkinson8ba47032017-03-22 17:13:32 -04004726 break;
Alan Kwong8411a9112017-06-06 19:29:01 -04004727 case CRTC_PROP_CORE_CLK:
Alan Kwongff30f4a2017-05-23 12:02:00 -07004728 case CRTC_PROP_CORE_AB:
4729 case CRTC_PROP_CORE_IB:
Alan Kwongff30f4a2017-05-23 12:02:00 -07004730 cstate->bw_control = true;
4731 break;
Alan Kwong0230a102017-05-16 11:36:44 -07004732 case CRTC_PROP_LLCC_AB:
4733 case CRTC_PROP_LLCC_IB:
4734 case CRTC_PROP_DRAM_AB:
4735 case CRTC_PROP_DRAM_IB:
4736 cstate->bw_control = true;
4737 cstate->bw_split_vote = true;
4738 break;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004739 case CRTC_PROP_IDLE_TIMEOUT:
4740 _sde_crtc_set_idle_timeout(crtc, val);
Veera Sundaram Sankaran3171ff82017-01-04 14:34:47 -08004741 default:
4742 /* nothing to do */
4743 break;
4744 }
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004745 } else {
4746 ret = sde_cp_crtc_set_property(crtc,
4747 property, val);
Clarence Ipcae1bb62016-07-07 12:07:13 -04004748 }
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004749 if (ret)
4750 DRM_ERROR("failed to set the property\n");
Alan Kwongcdb2f282017-03-18 13:42:06 -07004751
4752 SDE_DEBUG("crtc%d %s[%d] <= 0x%llx ret=%d\n", crtc->base.id,
4753 property->name, property->base.id, val, ret);
Clarence Ip7a753bb2016-07-07 11:47:44 -04004754 }
4755
4756 return ret;
4757}
4758
4759/**
4760 * sde_crtc_set_property - set a crtc drm property
4761 * @crtc: Pointer to drm crtc structure
4762 * @property: Pointer to targeted drm property
4763 * @val: Updated property value
4764 * @Returns: Zero on success
4765 */
4766static int sde_crtc_set_property(struct drm_crtc *crtc,
4767 struct drm_property *property, uint64_t val)
4768{
Lloyd Atkinson4f1c8692016-09-14 14:04:25 -04004769 SDE_DEBUG("\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004770
4771 return sde_crtc_atomic_set_property(crtc, crtc->state, property, val);
4772}
4773
4774/**
4775 * sde_crtc_atomic_get_property - retrieve a crtc drm property
4776 * @crtc: Pointer to drm crtc structure
4777 * @state: Pointer to drm crtc state structure
4778 * @property: Pointer to targeted drm property
4779 * @val: Pointer to variable for receiving property value
4780 * @Returns: Zero on success
4781 */
4782static int sde_crtc_atomic_get_property(struct drm_crtc *crtc,
4783 const struct drm_crtc_state *state,
4784 struct drm_property *property,
4785 uint64_t *val)
4786{
4787 struct sde_crtc *sde_crtc;
4788 struct sde_crtc_state *cstate;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004789 struct drm_encoder *encoder;
Clarence Ip24f80662016-06-13 19:05:32 -04004790 int i, ret = -EINVAL;
Dhaval Patel5cb59be2017-04-20 20:00:56 -07004791 bool conn_offset = 0;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004792 bool is_cmd = true;
Clarence Ip7a753bb2016-07-07 11:47:44 -04004793
4794 if (!crtc || !state) {
Dhaval Patelec10fad2016-08-22 14:40:48 -07004795 SDE_ERROR("invalid argument(s)\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004796 } else {
4797 sde_crtc = to_sde_crtc(crtc);
4798 cstate = to_sde_crtc_state(state);
Dhaval Patel5cb59be2017-04-20 20:00:56 -07004799
4800 for (i = 0; i < cstate->num_connectors; ++i) {
4801 conn_offset = sde_connector_needs_offset(
4802 cstate->connectors[i]);
4803 if (conn_offset)
4804 break;
4805 }
4806
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004807 /**
4808 * set the cmd flag only when all the encoders attached
4809 * to the crtc are in cmd mode. Consider all other cases
4810 * as video mode.
4811 */
4812 drm_for_each_encoder(encoder, crtc->dev) {
4813 if (encoder->crtc == crtc)
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05304814 is_cmd = sde_encoder_check_mode(encoder,
4815 MSM_DISPLAY_CAP_CMD_MODE);
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004816 }
4817
Clarence Ip24f80662016-06-13 19:05:32 -04004818 i = msm_property_index(&sde_crtc->property_info, property);
4819 if (i == CRTC_PROP_OUTPUT_FENCE) {
Dhaval Patel39323d42017-03-01 23:48:24 -08004820 uint32_t offset = sde_crtc_get_property(cstate,
Clarence Ip1d9728b2016-09-01 11:10:54 -04004821 CRTC_PROP_OUTPUT_FENCE_OFFSET);
4822
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004823 /**
4824 * set the offset to 0 only for cmd mode panels, so
4825 * the release fence for the current frame can be
4826 * triggered right after PP_DONE interrupt.
4827 */
4828 offset = is_cmd ? 0 : (offset + conn_offset);
4829
Dhaval Patel5cb59be2017-04-20 20:00:56 -07004830 ret = sde_fence_create(&sde_crtc->output_fence, val,
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07004831 offset);
Clarence Ip1d9728b2016-09-01 11:10:54 -04004832 if (ret)
4833 SDE_ERROR("fence create failed\n");
Clarence Ip24f80662016-06-13 19:05:32 -04004834 } else {
4835 ret = msm_property_atomic_get(&sde_crtc->property_info,
Clarence Ip4a2955d2017-07-04 18:04:33 -04004836 &cstate->property_state,
4837 property, val);
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004838 if (ret)
4839 ret = sde_cp_crtc_get_property(crtc,
4840 property, val);
Clarence Ip24f80662016-06-13 19:05:32 -04004841 }
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07004842 if (ret)
4843 DRM_ERROR("get property failed\n");
Clarence Ip7a753bb2016-07-07 11:47:44 -04004844 }
Clarence Ip7a753bb2016-07-07 11:47:44 -04004845 return ret;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004846}
4847
Alan Kwong67a3f792016-11-01 23:16:53 -04004848#ifdef CONFIG_DEBUG_FS
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004849static int _sde_debugfs_status_show(struct seq_file *s, void *data)
Clarence Ip8f7366c2016-07-05 12:15:26 -04004850{
4851 struct sde_crtc *sde_crtc;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004852 struct sde_plane_state *pstate = NULL;
Clarence Ip8f7366c2016-07-05 12:15:26 -04004853 struct sde_crtc_mixer *m;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004854
4855 struct drm_crtc *crtc;
4856 struct drm_plane *plane;
4857 struct drm_display_mode *mode;
4858 struct drm_framebuffer *fb;
4859 struct drm_plane_state *state;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004860 struct sde_crtc_state *cstate;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004861
4862 int i, out_width;
Clarence Ip8f7366c2016-07-05 12:15:26 -04004863
4864 if (!s || !s->private)
4865 return -EINVAL;
4866
4867 sde_crtc = s->private;
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004868 crtc = &sde_crtc->base;
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004869 cstate = to_sde_crtc_state(crtc->state);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004870
4871 mutex_lock(&sde_crtc->crtc_lock);
4872 mode = &crtc->state->adjusted_mode;
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05304873 out_width = sde_crtc_get_mixer_width(sde_crtc, cstate, mode);
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004874
4875 seq_printf(s, "crtc:%d width:%d height:%d\n", crtc->base.id,
4876 mode->hdisplay, mode->vdisplay);
4877
4878 seq_puts(s, "\n");
4879
Clarence Ip8f7366c2016-07-05 12:15:26 -04004880 for (i = 0; i < sde_crtc->num_mixers; ++i) {
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -04004881 m = &sde_crtc->mixers[i];
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004882 if (!m->hw_lm)
4883 seq_printf(s, "\tmixer[%d] has no lm\n", i);
4884 else if (!m->hw_ctl)
4885 seq_printf(s, "\tmixer[%d] has no ctl\n", i);
4886 else
4887 seq_printf(s, "\tmixer:%d ctl:%d width:%d height:%d\n",
4888 m->hw_lm->idx - LM_0, m->hw_ctl->idx - CTL_0,
4889 out_width, mode->vdisplay);
Clarence Ip8f7366c2016-07-05 12:15:26 -04004890 }
Dhaval Patel44f12472016-08-29 12:19:47 -07004891
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004892 seq_puts(s, "\n");
Dhaval Patel48c76022016-09-01 17:51:23 -07004893
Veera Sundaram Sankaran2cb064f2017-05-05 14:12:17 -07004894 for (i = 0; i < cstate->num_dim_layers; i++) {
4895 struct sde_hw_dim_layer *dim_layer = &cstate->dim_layer[i];
4896
4897 seq_printf(s, "\tdim_layer:%d] stage:%d flags:%d\n",
4898 i, dim_layer->stage, dim_layer->flags);
4899 seq_printf(s, "\tdst_x:%d dst_y:%d dst_w:%d dst_h:%d\n",
4900 dim_layer->rect.x, dim_layer->rect.y,
4901 dim_layer->rect.w, dim_layer->rect.h);
4902 seq_printf(s,
4903 "\tcolor_0:%d color_1:%d color_2:%d color_3:%d\n",
4904 dim_layer->color_fill.color_0,
4905 dim_layer->color_fill.color_1,
4906 dim_layer->color_fill.color_2,
4907 dim_layer->color_fill.color_3);
4908 seq_puts(s, "\n");
4909 }
4910
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004911 drm_atomic_crtc_for_each_plane(plane, crtc) {
4912 pstate = to_sde_plane_state(plane->state);
4913 state = plane->state;
4914
4915 if (!pstate || !state)
4916 continue;
4917
4918 seq_printf(s, "\tplane:%u stage:%d\n", plane->base.id,
4919 pstate->stage);
4920
4921 if (plane->state->fb) {
4922 fb = plane->state->fb;
4923
4924 seq_printf(s, "\tfb:%d image format:%4.4s wxh:%ux%u bpp:%d\n",
4925 fb->base.id, (char *) &fb->pixel_format,
4926 fb->width, fb->height, fb->bits_per_pixel);
4927
4928 seq_puts(s, "\t");
4929 for (i = 0; i < ARRAY_SIZE(fb->modifier); i++)
4930 seq_printf(s, "modifier[%d]:%8llu ", i,
4931 fb->modifier[i]);
4932 seq_puts(s, "\n");
4933
4934 seq_puts(s, "\t");
4935 for (i = 0; i < ARRAY_SIZE(fb->pitches); i++)
4936 seq_printf(s, "pitches[%d]:%8u ", i,
4937 fb->pitches[i]);
4938 seq_puts(s, "\n");
4939
4940 seq_puts(s, "\t");
4941 for (i = 0; i < ARRAY_SIZE(fb->offsets); i++)
4942 seq_printf(s, "offsets[%d]:%8u ", i,
4943 fb->offsets[i]);
Dhaval Patel48c76022016-09-01 17:51:23 -07004944 seq_puts(s, "\n");
4945 }
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004946
4947 seq_printf(s, "\tsrc_x:%4d src_y:%4d src_w:%4d src_h:%4d\n",
4948 state->src_x, state->src_y, state->src_w, state->src_h);
4949
4950 seq_printf(s, "\tdst x:%4d dst_y:%4d dst_w:%4d dst_h:%4d\n",
4951 state->crtc_x, state->crtc_y, state->crtc_w,
4952 state->crtc_h);
Jeykumar Sankarane964dc72017-05-10 19:26:43 -07004953 seq_printf(s, "\tmultirect: mode: %d index: %d\n",
4954 pstate->multirect_mode, pstate->multirect_index);
Veera Sundaram Sankaran58e12812017-05-05 11:51:09 -07004955
4956 seq_printf(s, "\texcl_rect: x:%4d y:%4d w:%4d h:%4d\n",
4957 pstate->excl_rect.x, pstate->excl_rect.y,
4958 pstate->excl_rect.w, pstate->excl_rect.h);
4959
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004960 seq_puts(s, "\n");
Clarence Ip8f7366c2016-07-05 12:15:26 -04004961 }
Alan Kwong07da0982016-11-04 12:57:45 -04004962
4963 if (sde_crtc->vblank_cb_count) {
4964 ktime_t diff = ktime_sub(ktime_get(), sde_crtc->vblank_cb_time);
4965 s64 diff_ms = ktime_to_ms(diff);
4966 s64 fps = diff_ms ? DIV_ROUND_CLOSEST(
4967 sde_crtc->vblank_cb_count * 1000, diff_ms) : 0;
4968
4969 seq_printf(s,
Dhaval Pateld67cf4a2017-06-14 18:08:32 -07004970 "vblank fps:%lld count:%u total:%llums total_framecount:%llu\n",
4971 fps, sde_crtc->vblank_cb_count,
4972 ktime_to_ms(diff), sde_crtc->play_count);
Alan Kwong07da0982016-11-04 12:57:45 -04004973
4974 /* reset time & count for next measurement */
4975 sde_crtc->vblank_cb_count = 0;
4976 sde_crtc->vblank_cb_time = ktime_set(0, 0);
4977 }
4978
Lloyd Atkinsondcb1c4a2017-07-27 10:52:09 -04004979 seq_printf(s, "vblank_enable:%d\n", sde_crtc->vblank_requested);
Alan Kwong07da0982016-11-04 12:57:45 -04004980
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004981 mutex_unlock(&sde_crtc->crtc_lock);
4982
Clarence Ip8f7366c2016-07-05 12:15:26 -04004983 return 0;
4984}
4985
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004986static int _sde_debugfs_status_open(struct inode *inode, struct file *file)
Clarence Ip8f7366c2016-07-05 12:15:26 -04004987{
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07004988 return single_open(file, _sde_debugfs_status_show, inode->i_private);
Clarence Ip8f7366c2016-07-05 12:15:26 -04004989}
4990
Dhaval Patelf9245d62017-03-28 16:24:00 -07004991static ssize_t _sde_crtc_misr_setup(struct file *file,
4992 const char __user *user_buf, size_t count, loff_t *ppos)
4993{
4994 struct sde_crtc *sde_crtc;
4995 struct sde_crtc_mixer *m;
4996 int i = 0, rc;
4997 char buf[MISR_BUFF_SIZE + 1];
4998 u32 frame_count, enable;
4999 size_t buff_copy;
5000
5001 if (!file || !file->private_data)
5002 return -EINVAL;
5003
5004 sde_crtc = file->private_data;
5005 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
5006 if (copy_from_user(buf, user_buf, buff_copy)) {
5007 SDE_ERROR("buffer copy failed\n");
5008 return -EINVAL;
5009 }
5010
5011 buf[buff_copy] = 0; /* end of string */
5012
5013 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
5014 return -EINVAL;
5015
5016 rc = _sde_crtc_power_enable(sde_crtc, true);
5017 if (rc)
5018 return rc;
5019
5020 mutex_lock(&sde_crtc->crtc_lock);
5021 sde_crtc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07005022 sde_crtc->misr_frame_count = frame_count;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005023 for (i = 0; i < sde_crtc->num_mixers; ++i) {
Dhaval Patel010f5172017-08-01 22:40:09 -07005024 sde_crtc->misr_data[i] = 0;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005025 m = &sde_crtc->mixers[i];
Dhaval Patel010f5172017-08-01 22:40:09 -07005026 if (!m->hw_lm || !m->hw_lm->ops.setup_misr)
Dhaval Patelf9245d62017-03-28 16:24:00 -07005027 continue;
5028
5029 m->hw_lm->ops.setup_misr(m->hw_lm, enable, frame_count);
5030 }
5031 mutex_unlock(&sde_crtc->crtc_lock);
5032 _sde_crtc_power_enable(sde_crtc, false);
5033
5034 return count;
5035}
5036
5037static ssize_t _sde_crtc_misr_read(struct file *file,
5038 char __user *user_buff, size_t count, loff_t *ppos)
5039{
5040 struct sde_crtc *sde_crtc;
5041 struct sde_crtc_mixer *m;
5042 int i = 0, rc;
Dhaval Patel010f5172017-08-01 22:40:09 -07005043 u32 misr_status;
Dhaval Patelf9245d62017-03-28 16:24:00 -07005044 ssize_t len = 0;
5045 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
5046
5047 if (*ppos)
5048 return 0;
5049
5050 if (!file || !file->private_data)
5051 return -EINVAL;
5052
5053 sde_crtc = file->private_data;
5054 rc = _sde_crtc_power_enable(sde_crtc, true);
5055 if (rc)
5056 return rc;
5057
5058 mutex_lock(&sde_crtc->crtc_lock);
5059 if (!sde_crtc->misr_enable) {
5060 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
5061 "disabled\n");
5062 goto buff_check;
5063 }
5064
5065 for (i = 0; i < sde_crtc->num_mixers; ++i) {
5066 m = &sde_crtc->mixers[i];
Dhaval Patel010f5172017-08-01 22:40:09 -07005067 if (!m->hw_lm || !m->hw_lm->ops.collect_misr)
Dhaval Patelf9245d62017-03-28 16:24:00 -07005068 continue;
5069
Dhaval Patel010f5172017-08-01 22:40:09 -07005070 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
5071 sde_crtc->misr_data[i] = misr_status ? misr_status :
5072 sde_crtc->misr_data[i];
Dhaval Patelf9245d62017-03-28 16:24:00 -07005073 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "lm idx:%d\n",
5074 m->hw_lm->idx - LM_0);
5075 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
Dhaval Patel010f5172017-08-01 22:40:09 -07005076 sde_crtc->misr_data[i]);
Dhaval Patelf9245d62017-03-28 16:24:00 -07005077 }
5078
5079buff_check:
5080 if (count <= len) {
5081 len = 0;
5082 goto end;
5083 }
5084
5085 if (copy_to_user(user_buff, buf, len)) {
5086 len = -EFAULT;
5087 goto end;
5088 }
5089
5090 *ppos += len; /* increase offset */
5091
5092end:
5093 mutex_unlock(&sde_crtc->crtc_lock);
5094 _sde_crtc_power_enable(sde_crtc, false);
5095 return len;
5096}
5097
5098#define DEFINE_SDE_DEBUGFS_SEQ_FOPS(__prefix) \
Alan Kwong67a3f792016-11-01 23:16:53 -04005099static int __prefix ## _open(struct inode *inode, struct file *file) \
5100{ \
5101 return single_open(file, __prefix ## _show, inode->i_private); \
5102} \
5103static const struct file_operations __prefix ## _fops = { \
5104 .owner = THIS_MODULE, \
5105 .open = __prefix ## _open, \
5106 .release = single_release, \
5107 .read = seq_read, \
5108 .llseek = seq_lseek, \
5109}
5110
5111static int sde_crtc_debugfs_state_show(struct seq_file *s, void *v)
5112{
5113 struct drm_crtc *crtc = (struct drm_crtc *) s->private;
Alan Kwong751cf462017-06-08 10:26:46 -04005114 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
Alan Kwong67a3f792016-11-01 23:16:53 -04005115 struct sde_crtc_state *cstate = to_sde_crtc_state(crtc->state);
Alan Kwongcdb2f282017-03-18 13:42:06 -07005116 struct sde_crtc_res *res;
Alan Kwong310e9b02017-08-03 02:04:07 -04005117 struct sde_crtc_respool *rp;
Alan Kwong0230a102017-05-16 11:36:44 -07005118 int i;
Alan Kwong67a3f792016-11-01 23:16:53 -04005119
5120 seq_printf(s, "num_connectors: %d\n", cstate->num_connectors);
Dhaval Patel4d424602017-02-18 19:40:14 -08005121 seq_printf(s, "client type: %d\n", sde_crtc_get_client_type(crtc));
Alan Kwong3e985f02017-02-12 15:08:44 -08005122 seq_printf(s, "intf_mode: %d\n", sde_crtc_get_intf_mode(crtc));
Alan Kwong751cf462017-06-08 10:26:46 -04005123 seq_printf(s, "core_clk_rate: %llu\n",
5124 sde_crtc->cur_perf.core_clk_rate);
Alan Kwong0230a102017-05-16 11:36:44 -07005125 for (i = SDE_POWER_HANDLE_DBUS_ID_MNOC;
5126 i < SDE_POWER_HANDLE_DBUS_ID_MAX; i++) {
5127 seq_printf(s, "bw_ctl[%s]: %llu\n",
5128 sde_power_handle_get_dbus_name(i),
5129 sde_crtc->cur_perf.bw_ctl[i]);
5130 seq_printf(s, "max_per_pipe_ib[%s]: %llu\n",
5131 sde_power_handle_get_dbus_name(i),
5132 sde_crtc->cur_perf.max_per_pipe_ib[i]);
5133 }
Alan Kwong67a3f792016-11-01 23:16:53 -04005134
Alan Kwong310e9b02017-08-03 02:04:07 -04005135 mutex_lock(&sde_crtc->rp_lock);
5136 list_for_each_entry(rp, &sde_crtc->rp_head, rp_list) {
5137 seq_printf(s, "rp.%d: ", rp->sequence_id);
5138 list_for_each_entry(res, &rp->res_list, list)
5139 seq_printf(s, "0x%x/0x%llx/%pK/%d ",
5140 res->type, res->tag, res->val,
5141 atomic_read(&res->refcount));
5142 seq_puts(s, "\n");
5143 }
5144 mutex_unlock(&sde_crtc->rp_lock);
Alan Kwongcdb2f282017-03-18 13:42:06 -07005145
Alan Kwong67a3f792016-11-01 23:16:53 -04005146 return 0;
5147}
5148DEFINE_SDE_DEBUGFS_SEQ_FOPS(sde_crtc_debugfs_state);
5149
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005150static int _sde_crtc_init_debugfs(struct drm_crtc *crtc)
Clarence Ip8f7366c2016-07-05 12:15:26 -04005151{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005152 struct sde_crtc *sde_crtc;
5153 struct sde_kms *sde_kms;
5154
Dhaval Patel3fbe6bf2016-10-20 20:00:41 -07005155 static const struct file_operations debugfs_status_fops = {
5156 .open = _sde_debugfs_status_open,
Clarence Ip8f7366c2016-07-05 12:15:26 -04005157 .read = seq_read,
5158 .llseek = seq_lseek,
5159 .release = single_release,
5160 };
Dhaval Patelf9245d62017-03-28 16:24:00 -07005161 static const struct file_operations debugfs_misr_fops = {
5162 .open = simple_open,
5163 .read = _sde_crtc_misr_read,
5164 .write = _sde_crtc_misr_setup,
5165 };
Alan Kwong67a3f792016-11-01 23:16:53 -04005166
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005167 if (!crtc)
5168 return -EINVAL;
5169 sde_crtc = to_sde_crtc(crtc);
5170
5171 sde_kms = _sde_crtc_get_kms(crtc);
5172 if (!sde_kms)
5173 return -EINVAL;
5174
5175 sde_crtc->debugfs_root = debugfs_create_dir(sde_crtc->name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07005176 crtc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005177 if (!sde_crtc->debugfs_root)
5178 return -ENOMEM;
5179
5180 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005181 debugfs_create_file("status", 0400,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005182 sde_crtc->debugfs_root,
5183 sde_crtc, &debugfs_status_fops);
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005184 debugfs_create_file("state", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005185 sde_crtc->debugfs_root,
5186 &sde_crtc->base,
5187 &sde_crtc_debugfs_state_fops);
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04005188 debugfs_create_file("misr_data", 0600, sde_crtc->debugfs_root,
Dhaval Patelf9245d62017-03-28 16:24:00 -07005189 sde_crtc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005190
5191 return 0;
5192}
5193
5194static void _sde_crtc_destroy_debugfs(struct drm_crtc *crtc)
5195{
5196 struct sde_crtc *sde_crtc;
5197
5198 if (!crtc)
5199 return;
5200 sde_crtc = to_sde_crtc(crtc);
5201 debugfs_remove_recursive(sde_crtc->debugfs_root);
Clarence Ip8f7366c2016-07-05 12:15:26 -04005202}
Alan Kwong67a3f792016-11-01 23:16:53 -04005203#else
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005204static int _sde_crtc_init_debugfs(struct drm_crtc *crtc)
Alan Kwong67a3f792016-11-01 23:16:53 -04005205{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005206 return 0;
Alan Kwong67a3f792016-11-01 23:16:53 -04005207}
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005208
5209static void _sde_crtc_destroy_debugfs(struct drm_crtc *crtc)
5210{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07005211}
5212#endif /* CONFIG_DEBUG_FS */
5213
5214static int sde_crtc_late_register(struct drm_crtc *crtc)
5215{
5216 return _sde_crtc_init_debugfs(crtc);
5217}
5218
5219static void sde_crtc_early_unregister(struct drm_crtc *crtc)
5220{
5221 _sde_crtc_destroy_debugfs(crtc);
5222}
5223
5224static const struct drm_crtc_funcs sde_crtc_funcs = {
5225 .set_config = drm_atomic_helper_set_config,
5226 .destroy = sde_crtc_destroy,
5227 .page_flip = drm_atomic_helper_page_flip,
5228 .set_property = sde_crtc_set_property,
5229 .atomic_set_property = sde_crtc_atomic_set_property,
5230 .atomic_get_property = sde_crtc_atomic_get_property,
5231 .reset = sde_crtc_reset,
5232 .atomic_duplicate_state = sde_crtc_duplicate_state,
5233 .atomic_destroy_state = sde_crtc_destroy_state,
5234 .late_register = sde_crtc_late_register,
5235 .early_unregister = sde_crtc_early_unregister,
5236};
5237
5238static const struct drm_crtc_helper_funcs sde_crtc_helper_funcs = {
5239 .mode_fixup = sde_crtc_mode_fixup,
5240 .disable = sde_crtc_disable,
5241 .enable = sde_crtc_enable,
5242 .atomic_check = sde_crtc_atomic_check,
5243 .atomic_begin = sde_crtc_atomic_begin,
5244 .atomic_flush = sde_crtc_atomic_flush,
5245};
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005246
Clarence Ipa18d4832017-03-13 12:35:44 -07005247static void _sde_crtc_event_cb(struct kthread_work *work)
5248{
5249 struct sde_crtc_event *event;
5250 struct sde_crtc *sde_crtc;
5251 unsigned long irq_flags;
5252
5253 if (!work) {
5254 SDE_ERROR("invalid work item\n");
5255 return;
5256 }
5257
5258 event = container_of(work, struct sde_crtc_event, kt_work);
Clarence Ipa18d4832017-03-13 12:35:44 -07005259
5260 /* set sde_crtc to NULL for static work structures */
5261 sde_crtc = event->sde_crtc;
5262 if (!sde_crtc)
5263 return;
5264
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005265 if (event->cb_func)
5266 event->cb_func(&sde_crtc->base, event->usr);
5267
Clarence Ipa18d4832017-03-13 12:35:44 -07005268 spin_lock_irqsave(&sde_crtc->event_lock, irq_flags);
5269 list_add_tail(&event->list, &sde_crtc->event_free_list);
5270 spin_unlock_irqrestore(&sde_crtc->event_lock, irq_flags);
5271}
5272
5273int sde_crtc_event_queue(struct drm_crtc *crtc,
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005274 void (*func)(struct drm_crtc *crtc, void *usr), void *usr)
Clarence Ipa18d4832017-03-13 12:35:44 -07005275{
5276 unsigned long irq_flags;
5277 struct sde_crtc *sde_crtc;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005278 struct msm_drm_private *priv;
Clarence Ipa18d4832017-03-13 12:35:44 -07005279 struct sde_crtc_event *event = NULL;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005280 u32 crtc_id;
Clarence Ipa18d4832017-03-13 12:35:44 -07005281
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005282 if (!crtc || !crtc->dev || !crtc->dev->dev_private || !func) {
5283 SDE_ERROR("invalid parameters\n");
Clarence Ipa18d4832017-03-13 12:35:44 -07005284 return -EINVAL;
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005285 }
Clarence Ipa18d4832017-03-13 12:35:44 -07005286 sde_crtc = to_sde_crtc(crtc);
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005287 priv = crtc->dev->dev_private;
5288 crtc_id = drm_crtc_index(crtc);
Clarence Ipa18d4832017-03-13 12:35:44 -07005289
5290 /*
5291 * Obtain an event struct from the private cache. This event
5292 * queue may be called from ISR contexts, so use a private
5293 * cache to avoid calling any memory allocation functions.
5294 */
5295 spin_lock_irqsave(&sde_crtc->event_lock, irq_flags);
5296 if (!list_empty(&sde_crtc->event_free_list)) {
5297 event = list_first_entry(&sde_crtc->event_free_list,
5298 struct sde_crtc_event, list);
5299 list_del_init(&event->list);
5300 }
5301 spin_unlock_irqrestore(&sde_crtc->event_lock, irq_flags);
5302
5303 if (!event)
5304 return -ENOMEM;
5305
5306 /* populate event node */
5307 event->sde_crtc = sde_crtc;
5308 event->cb_func = func;
5309 event->usr = usr;
5310
5311 /* queue new event request */
5312 kthread_init_work(&event->kt_work, _sde_crtc_event_cb);
Veera Sundaram Sankaran10ea2bd2017-06-14 14:10:57 -07005313 kthread_queue_work(&priv->event_thread[crtc_id].worker,
5314 &event->kt_work);
Clarence Ipa18d4832017-03-13 12:35:44 -07005315
5316 return 0;
5317}
5318
5319static int _sde_crtc_init_events(struct sde_crtc *sde_crtc)
5320{
5321 int i, rc = 0;
5322
5323 if (!sde_crtc) {
5324 SDE_ERROR("invalid crtc\n");
5325 return -EINVAL;
5326 }
5327
5328 spin_lock_init(&sde_crtc->event_lock);
5329
5330 INIT_LIST_HEAD(&sde_crtc->event_free_list);
5331 for (i = 0; i < SDE_CRTC_MAX_EVENT_COUNT; ++i)
5332 list_add_tail(&sde_crtc->event_cache[i].list,
5333 &sde_crtc->event_free_list);
5334
Dhaval Patel5023c3c2017-08-22 12:40:11 -07005335 INIT_LIST_HEAD(&sde_crtc->retire_event_list);
5336 for (i = 0; i < ARRAY_SIZE(sde_crtc->retire_events); i++)
5337 INIT_LIST_HEAD(&sde_crtc->retire_events[i].list);
5338
Clarence Ipa18d4832017-03-13 12:35:44 -07005339 return rc;
5340}
5341
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04005342/* initialize crtc */
Lloyd Atkinsonac933642016-09-14 11:52:00 -04005343struct drm_crtc *sde_crtc_init(struct drm_device *dev, struct drm_plane *plane)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005344{
5345 struct drm_crtc *crtc = NULL;
Clarence Ip8f7366c2016-07-05 12:15:26 -04005346 struct sde_crtc *sde_crtc = NULL;
5347 struct msm_drm_private *priv = NULL;
5348 struct sde_kms *kms = NULL;
Clarence Ipa18d4832017-03-13 12:35:44 -07005349 int i, rc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005350
Clarence Ip8f7366c2016-07-05 12:15:26 -04005351 priv = dev->dev_private;
5352 kms = to_sde_kms(priv->kms);
5353
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005354 sde_crtc = kzalloc(sizeof(*sde_crtc), GFP_KERNEL);
5355 if (!sde_crtc)
5356 return ERR_PTR(-ENOMEM);
5357
5358 crtc = &sde_crtc->base;
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07005359 crtc->dev = dev;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005360
Clarence Ip7f70ce42017-03-20 06:53:46 -07005361 mutex_init(&sde_crtc->crtc_lock);
Alan Kwong628d19e2016-10-31 13:50:13 -04005362 spin_lock_init(&sde_crtc->spin_lock);
5363 atomic_set(&sde_crtc->frame_pending, 0);
5364
Alan Kwong310e9b02017-08-03 02:04:07 -04005365 mutex_init(&sde_crtc->rp_lock);
5366 INIT_LIST_HEAD(&sde_crtc->rp_head);
5367
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07005368 init_completion(&sde_crtc->frame_done_comp);
5369
Alan Kwong628d19e2016-10-31 13:50:13 -04005370 INIT_LIST_HEAD(&sde_crtc->frame_event_list);
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005371 INIT_LIST_HEAD(&sde_crtc->user_event_list);
Alan Kwong628d19e2016-10-31 13:50:13 -04005372 for (i = 0; i < ARRAY_SIZE(sde_crtc->frame_events); i++) {
5373 INIT_LIST_HEAD(&sde_crtc->frame_events[i].list);
5374 list_add(&sde_crtc->frame_events[i].list,
5375 &sde_crtc->frame_event_list);
5376 kthread_init_work(&sde_crtc->frame_events[i].work,
5377 sde_crtc_frame_event_work);
5378 }
5379
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07005380 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &sde_crtc_funcs,
5381 NULL);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005382
5383 drm_crtc_helper_add(crtc, &sde_crtc_helper_funcs);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04005384 plane->crtc = crtc;
5385
Clarence Ip8f7366c2016-07-05 12:15:26 -04005386 /* save user friendly CRTC name for later */
5387 snprintf(sde_crtc->name, SDE_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
5388
Clarence Ipa18d4832017-03-13 12:35:44 -07005389 /* initialize event handling */
5390 rc = _sde_crtc_init_events(sde_crtc);
5391 if (rc) {
5392 drm_crtc_cleanup(crtc);
5393 kfree(sde_crtc);
5394 return ERR_PTR(rc);
5395 }
5396
Clarence Ip9a74a442016-08-25 18:29:03 -04005397 /* initialize output fence support */
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04005398 sde_fence_init(&sde_crtc->output_fence, sde_crtc->name, crtc->base.id);
Clarence Ip24f80662016-06-13 19:05:32 -04005399
Clarence Ip7a753bb2016-07-07 11:47:44 -04005400 /* create CRTC properties */
5401 msm_property_init(&sde_crtc->property_info, &crtc->base, dev,
5402 priv->crtc_property, sde_crtc->property_data,
5403 CRTC_PROP_COUNT, CRTC_PROP_BLOBCOUNT,
5404 sizeof(struct sde_crtc_state));
5405
Dhaval Patele4a5dda2016-10-13 19:29:30 -07005406 sde_crtc_install_properties(crtc, kms->catalog);
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005407
Sravanthi Kollukuduruc7bcde92017-06-16 12:44:39 +05305408 /* Init dest scaler */
5409 _sde_crtc_dest_scaler_init(sde_crtc, kms->catalog);
5410
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005411 /* Install color processing properties */
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07005412 sde_cp_crtc_init(crtc);
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07005413 sde_cp_crtc_install_properties(crtc);
Clarence Ip7a753bb2016-07-07 11:47:44 -04005414
Dhaval Patelec10fad2016-08-22 14:40:48 -07005415 SDE_DEBUG("%s: successfully initialized crtc\n", sde_crtc->name);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005416 return crtc;
5417}
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005418
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005419static int _sde_crtc_event_enable(struct sde_kms *kms,
5420 struct drm_crtc *crtc_drm, u32 event)
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005421{
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005422 struct sde_crtc *crtc = NULL;
5423 struct sde_crtc_irq_info *node;
5424 struct msm_drm_private *priv;
5425 unsigned long flags;
5426 bool found = false;
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005427 int ret, i = 0;
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005428
5429 crtc = to_sde_crtc(crtc_drm);
5430 spin_lock_irqsave(&crtc->spin_lock, flags);
5431 list_for_each_entry(node, &crtc->user_event_list, list) {
5432 if (node->event == event) {
5433 found = true;
5434 break;
5435 }
5436 }
5437 spin_unlock_irqrestore(&crtc->spin_lock, flags);
5438
5439 /* event already enabled */
5440 if (found)
5441 return 0;
5442
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005443 node = NULL;
5444 for (i = 0; i < ARRAY_SIZE(custom_events); i++) {
5445 if (custom_events[i].event == event &&
5446 custom_events[i].func) {
5447 node = kzalloc(sizeof(*node), GFP_KERNEL);
5448 if (!node)
5449 return -ENOMEM;
5450 node->event = event;
5451 INIT_LIST_HEAD(&node->list);
5452 node->func = custom_events[i].func;
5453 node->event = event;
5454 break;
5455 }
5456 }
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005457
Gopikrishnaiah Anandanb6b401f2017-03-14 16:39:49 -07005458 if (!node) {
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005459 SDE_ERROR("unsupported event %x\n", event);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005460 return -EINVAL;
5461 }
5462
5463 priv = kms->dev->dev_private;
5464 ret = 0;
5465 if (crtc_drm->enabled) {
5466 sde_power_resource_enable(&priv->phandle, kms->core_client,
5467 true);
Xu Yang37752282017-08-21 13:50:23 +08005468 INIT_LIST_HEAD(&node->irq.list);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005469 ret = node->func(crtc_drm, true, &node->irq);
5470 sde_power_resource_enable(&priv->phandle, kms->core_client,
5471 false);
5472 }
5473
5474 if (!ret) {
5475 spin_lock_irqsave(&crtc->spin_lock, flags);
Xu Yang5e53c2e2017-07-11 16:46:28 +08005476 /* irq is regiestered and enabled and set the state */
5477 node->state = IRQ_ENABLED;
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005478 list_add_tail(&node->list, &crtc->user_event_list);
5479 spin_unlock_irqrestore(&crtc->spin_lock, flags);
5480 } else {
5481 kfree(node);
5482 }
5483
5484 return ret;
5485}
5486
5487static int _sde_crtc_event_disable(struct sde_kms *kms,
5488 struct drm_crtc *crtc_drm, u32 event)
5489{
5490 struct sde_crtc *crtc = NULL;
5491 struct sde_crtc_irq_info *node = NULL;
5492 struct msm_drm_private *priv;
5493 unsigned long flags;
5494 bool found = false;
5495 int ret;
5496
5497 crtc = to_sde_crtc(crtc_drm);
5498 spin_lock_irqsave(&crtc->spin_lock, flags);
5499 list_for_each_entry(node, &crtc->user_event_list, list) {
5500 if (node->event == event) {
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005501 found = true;
5502 break;
5503 }
5504 }
5505 spin_unlock_irqrestore(&crtc->spin_lock, flags);
5506
5507 /* event already disabled */
5508 if (!found)
5509 return 0;
5510
5511 /**
5512 * crtc is disabled interrupts are cleared remove from the list,
5513 * no need to disable/de-register.
5514 */
5515 if (!crtc_drm->enabled) {
Xu Yang5e53c2e2017-07-11 16:46:28 +08005516 list_del(&node->list);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005517 kfree(node);
5518 return 0;
5519 }
5520 priv = kms->dev->dev_private;
5521 sde_power_resource_enable(&priv->phandle, kms->core_client, true);
5522 ret = node->func(crtc_drm, false, &node->irq);
Xu Yang5e53c2e2017-07-11 16:46:28 +08005523 list_del(&node->list);
5524 kfree(node);
Gopikrishnaiah Anandan5154c712017-02-27 17:48:24 -08005525 sde_power_resource_enable(&priv->phandle, kms->core_client, false);
5526 return ret;
5527}
5528
5529int sde_crtc_register_custom_event(struct sde_kms *kms,
5530 struct drm_crtc *crtc_drm, u32 event, bool en)
5531{
5532 struct sde_crtc *crtc = NULL;
5533 int ret;
5534
5535 crtc = to_sde_crtc(crtc_drm);
5536 if (!crtc || !kms || !kms->dev) {
5537 DRM_ERROR("invalid sde_crtc %pK kms %pK dev %pK\n", crtc,
5538 kms, ((kms) ? (kms->dev) : NULL));
5539 return -EINVAL;
5540 }
5541
5542 if (en)
5543 ret = _sde_crtc_event_enable(kms, crtc_drm, event);
5544 else
5545 ret = _sde_crtc_event_disable(kms, crtc_drm, event);
5546
5547 return ret;
Gopikrishnaiah Anandande2c81b2017-03-15 12:41:29 -07005548}
Gopikrishnaiah Anandan84b4f672017-04-26 10:28:51 -07005549
5550static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
5551 bool en, struct sde_irq_callback *irq)
5552{
5553 return 0;
5554}
Benjamin Chan90139102017-06-21 16:00:39 -04005555
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05305556static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
5557 bool en, struct sde_irq_callback *irq)
5558{
5559 return 0;
5560}