blob: 92ab66991ea1746e26ef950a8d8e508441e8ca8e [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
Jayant Shekhar98e78a82018-01-12 17:50:55 +05302 * Copyright (c) 2014-2018, The Linux Foundation. All rights reserved.
Dhaval Patel14d46ce2017-01-17 16:28:12 -08003 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070083/**
84 * enum sde_enc_rc_events - events for resource control state machine
85 * @SDE_ENC_RC_EVENT_KICKOFF:
86 * This event happens at NORMAL priority.
87 * Event that signals the start of the transfer. When this event is
88 * received, enable MDP/DSI core clocks and request RSC with CMD state.
89 * Regardless of the previous state, the resource should be in ON state
90 * at the end of this event.
91 * @SDE_ENC_RC_EVENT_FRAME_DONE:
92 * This event happens at INTERRUPT level.
93 * Event signals the end of the data transfer after the PP FRAME_DONE
94 * event. At the end of this event, a delayed work is scheduled to go to
Dhaval Patelc9e213b2017-11-02 12:13:12 -070095 * IDLE_PC state after IDLE_POWERCOLLAPSE_DURATION time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040096 * @SDE_ENC_RC_EVENT_PRE_STOP:
97 * This event happens at NORMAL priority.
98 * This event, when received during the ON state, set RSC to IDLE, and
99 * and leave the RC STATE in the PRE_OFF state.
100 * It should be followed by the STOP event as part of encoder disable.
101 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700102 * @SDE_ENC_RC_EVENT_STOP:
103 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400104 * When this event is received, disable all the MDP/DSI core clocks, and
105 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
106 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
107 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
108 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700109 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700110 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700111 * Event signals that there is a seamless mode switch is in prgoress. A
112 * client needs to turn of only irq - leave clocks ON to reduce the mode
113 * switch latency.
114 * @SDE_ENC_RC_EVENT_POST_MODESET:
115 * This event happens at NORMAL priority from a work item.
116 * Event signals that seamless mode switch is complete and resources are
117 * acquired. Clients wants to turn on the irq again and update the rsc
118 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700119 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
120 * This event happens at NORMAL priority from a work item.
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700121 * Event signals that there were no frame updates for
122 * IDLE_POWERCOLLAPSE_DURATION time. This would disable MDP/DSI core clocks
123 * and request RSC with IDLE state and change the resource state to IDLE.
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800124 * @SDE_ENC_RC_EVENT_EARLY_WAKEUP:
125 * This event is triggered from the input event thread when touch event is
126 * received from the input device. On receiving this event,
127 * - If the device is in SDE_ENC_RC_STATE_IDLE state, it turns ON the
128 clocks and enable RSC.
129 * - If the device is in SDE_ENC_RC_STATE_ON state, it resets the delayed
130 * off work since a new commit is imminent.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700131 */
132enum sde_enc_rc_events {
133 SDE_ENC_RC_EVENT_KICKOFF = 1,
134 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400135 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700136 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700137 SDE_ENC_RC_EVENT_PRE_MODESET,
138 SDE_ENC_RC_EVENT_POST_MODESET,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800139 SDE_ENC_RC_EVENT_ENTER_IDLE,
140 SDE_ENC_RC_EVENT_EARLY_WAKEUP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141};
142
143/*
144 * enum sde_enc_rc_states - states that the resource control maintains
145 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400146 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700147 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700148 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700149 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
150 */
151enum sde_enc_rc_states {
152 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400153 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700154 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700155 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700156 SDE_ENC_RC_STATE_IDLE
157};
158
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400159/**
160 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
161 * encoders. Virtual encoder manages one "logical" display. Physical
162 * encoders manage one intf block, tied to a specific panel/sub-panel.
163 * Virtual encoder defers as much as possible to the physical encoders.
164 * Virtual encoder registers itself with the DRM Framework as the encoder.
165 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400166 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400167 * @bus_scaling_client: Client handle to the bus scaling interface
168 * @num_phys_encs: Actual number of physical encoders contained.
169 * @phys_encs: Container of physical encoders managed.
170 * @cur_master: Pointer to the current master in this mode. Optimization
171 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700172 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500173 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800174 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500175 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
176 * for partial update right-only cases, such as pingpong
177 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400178 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
179 * notification of the VBLANK
180 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400181 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
182 * all CTL paths
183 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700184 * @debugfs_root: Debug file system root file node
185 * @enc_lock: Lock around physical encoder create/destroy and
186 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400187 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
188 * busy processing current command.
189 * Bit0 = phys_encs[0] etc.
190 * @crtc_frame_event_cb: callback handler for frame event
191 * @crtc_frame_event_cb_data: callback handler private data
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400192 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700193 * @rsc_client: rsc client pointer
194 * @rsc_state_init: boolean to indicate rsc config init
195 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700196 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700197 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700198 * @idle_pc_supported: indicate if idle power collaps is supported
199 * @rc_lock: resource control mutex lock to protect
200 * virt encoder over various state changes
201 * @rc_state: resource controller state
202 * @delayed_off_work: delayed worker to schedule disabling of
203 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400204 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800205 * @input_event_work: worker to handle input device touch events
206 * @input_handler: handler for input device events
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700207 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700208 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700209 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400210 * @cur_conn_roi: current connector roi
211 * @prv_conn_roi: previous connector roi to optimize if unchanged
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800212 * @crtc pointer to drm_crtc
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400213 */
214struct sde_encoder_virt {
215 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400216 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400217 uint32_t bus_scaling_client;
218
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400219 uint32_t display_num_of_h_tiles;
220
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400221 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400222 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
223 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700224 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800225 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400226
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500227 bool intfs_swapped;
228
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400229 void (*crtc_vblank_cb)(void *);
230 void *crtc_vblank_cb_data;
231
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700232 struct dentry *debugfs_root;
233 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400234 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
235 void (*crtc_frame_event_cb)(void *, u32 event);
236 void *crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400237
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400238 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800239
240 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700241 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800242 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700243 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700244 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700245
246 bool idle_pc_supported;
247 struct mutex rc_lock;
248 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400249 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400250 struct kthread_work vsync_event_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800251 struct kthread_work input_event_work;
252 struct input_handler *input_handler;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700253 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700254 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700255
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700256 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400257 struct sde_rect cur_conn_roi;
258 struct sde_rect prv_conn_roi;
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800259 struct drm_crtc *crtc;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400260};
261
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400262#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700263
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -0500264static void _sde_encoder_pm_qos_add_request(struct drm_encoder *drm_enc)
265{
266 struct msm_drm_private *priv;
267 struct sde_kms *sde_kms;
268 struct pm_qos_request *req;
269 u32 cpu_mask;
270 u32 cpu_dma_latency;
271 int cpu;
272
273 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
274 SDE_ERROR("drm device invalid\n");
275 return;
276 }
277
278 priv = drm_enc->dev->dev_private;
279 if (!priv->kms) {
280 SDE_ERROR("invalid kms\n");
281 return;
282 }
283
284 sde_kms = to_sde_kms(priv->kms);
285 if (!sde_kms || !sde_kms->catalog)
286 return;
287
288 cpu_mask = sde_kms->catalog->perf.cpu_mask;
289 cpu_dma_latency = sde_kms->catalog->perf.cpu_dma_latency;
290 if (!cpu_mask)
291 return;
292
293 req = &sde_kms->pm_qos_cpu_req;
294 req->type = PM_QOS_REQ_AFFINE_CORES;
295 cpumask_empty(&req->cpus_affine);
296 for_each_possible_cpu(cpu) {
297 if ((1 << cpu) & cpu_mask)
298 cpumask_set_cpu(cpu, &req->cpus_affine);
299 }
300 pm_qos_add_request(req, PM_QOS_CPU_DMA_LATENCY, cpu_dma_latency);
301
302 SDE_EVT32_VERBOSE(DRMID(drm_enc), cpu_mask, cpu_dma_latency);
303}
304
305static void _sde_encoder_pm_qos_remove_request(struct drm_encoder *drm_enc)
306{
307 struct msm_drm_private *priv;
308 struct sde_kms *sde_kms;
309
310 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
311 SDE_ERROR("drm device invalid\n");
312 return;
313 }
314
315 priv = drm_enc->dev->dev_private;
316 if (!priv->kms) {
317 SDE_ERROR("invalid kms\n");
318 return;
319 }
320
321 sde_kms = to_sde_kms(priv->kms);
322 if (!sde_kms || !sde_kms->catalog || !sde_kms->catalog->perf.cpu_mask)
323 return;
324
325 pm_qos_remove_request(&sde_kms->pm_qos_cpu_req);
326}
327
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700328static struct drm_connector_state *_sde_encoder_get_conn_state(
329 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800330{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700331 struct msm_drm_private *priv;
332 struct sde_kms *sde_kms;
333 struct list_head *connector_list;
334 struct drm_connector *conn_iter;
335
336 if (!drm_enc) {
337 SDE_ERROR("invalid argument\n");
338 return NULL;
339 }
340
341 priv = drm_enc->dev->dev_private;
342 sde_kms = to_sde_kms(priv->kms);
343 connector_list = &sde_kms->dev->mode_config.connector_list;
344
345 list_for_each_entry(conn_iter, connector_list, head)
346 if (conn_iter->encoder == drm_enc)
347 return conn_iter->state;
348
349 return NULL;
350}
351
352static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
353 struct msm_mode_info *mode_info)
354{
355 struct drm_connector_state *conn_state;
356
357 if (!drm_enc || !mode_info) {
358 SDE_ERROR("invalid arguments\n");
359 return -EINVAL;
360 }
361
362 conn_state = _sde_encoder_get_conn_state(drm_enc);
363 if (!conn_state) {
364 SDE_ERROR("invalid connector state for the encoder: %d\n",
365 drm_enc->base.id);
366 return -EINVAL;
367 }
368
369 return sde_connector_get_mode_info(conn_state, mode_info);
370}
371
372static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
373{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400374 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700375 struct msm_mode_info mode_info;
376 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400377
378 if (!drm_enc)
379 return false;
380
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700381 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
382 if (rc) {
383 SDE_ERROR("failed to get mode info, enc: %d\n",
384 drm_enc->base.id);
385 return false;
386 }
387
388 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800389
390 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
391}
392
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400393bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
394{
395 enum sde_rm_topology_name topology;
396 struct sde_encoder_virt *sde_enc;
397 struct drm_connector *drm_conn;
398
399 if (!drm_enc)
400 return false;
401
402 sde_enc = to_sde_encoder_virt(drm_enc);
403 if (!sde_enc->cur_master)
404 return false;
405
406 drm_conn = sde_enc->cur_master->connector;
407 if (!drm_conn)
408 return false;
409
410 topology = sde_connector_get_topology_name(drm_conn);
411 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
412 return true;
413
414 return false;
415}
416
Dhaval Patelf9245d62017-03-28 16:24:00 -0700417static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
418 bool enable)
419{
420 struct drm_encoder *drm_enc;
421 struct msm_drm_private *priv;
422 struct sde_kms *sde_kms;
423
424 if (!sde_enc) {
425 SDE_ERROR("invalid sde enc\n");
426 return -EINVAL;
427 }
428
429 drm_enc = &sde_enc->base;
430 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
431 SDE_ERROR("drm device invalid\n");
432 return -EINVAL;
433 }
434
435 priv = drm_enc->dev->dev_private;
436 if (!priv->kms) {
437 SDE_ERROR("invalid kms\n");
438 return -EINVAL;
439 }
440
441 sde_kms = to_sde_kms(priv->kms);
442
443 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
444 enable);
445}
446
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500447void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
448 enum sde_intr_idx intr_idx)
449{
450 SDE_EVT32(DRMID(phys_enc->parent),
451 phys_enc->intf_idx - INTF_0,
452 phys_enc->hw_pp->idx - PINGPONG_0,
453 intr_idx);
454 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
455
456 if (phys_enc->parent_ops.handle_frame_done)
457 phys_enc->parent_ops.handle_frame_done(
458 phys_enc->parent, phys_enc,
459 SDE_ENCODER_FRAME_EVENT_ERROR);
460}
461
462int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
463 enum sde_intr_idx intr_idx,
464 struct sde_encoder_wait_info *wait_info)
465{
466 struct sde_encoder_irq *irq;
467 u32 irq_status;
468 int ret;
469
470 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
471 SDE_ERROR("invalid params\n");
472 return -EINVAL;
473 }
474 irq = &phys_enc->irq[intr_idx];
475
476 /* note: do master / slave checking outside */
477
478 /* return EWOULDBLOCK since we know the wait isn't necessary */
479 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
480 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400481 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
482 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500483 return -EWOULDBLOCK;
484 }
485
486 if (irq->irq_idx < 0) {
487 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
488 irq->name, irq->hw_idx);
489 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
490 irq->irq_idx);
491 return 0;
492 }
493
494 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
495 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700496 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
497 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
498 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500499
500 ret = sde_encoder_helper_wait_event_timeout(
501 DRMID(phys_enc->parent),
502 irq->hw_idx,
503 wait_info);
504
505 if (ret <= 0) {
506 irq_status = sde_core_irq_read(phys_enc->sde_kms,
507 irq->irq_idx, true);
508 if (irq_status) {
509 unsigned long flags;
510
Dhaval Patela5f75952017-07-25 11:17:41 -0700511 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
512 irq->hw_idx, irq->irq_idx,
513 phys_enc->hw_pp->idx - PINGPONG_0,
514 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500515 SDE_DEBUG_PHYS(phys_enc,
516 "done but irq %d not triggered\n",
517 irq->irq_idx);
518 local_irq_save(flags);
519 irq->cb.func(phys_enc, irq->irq_idx);
520 local_irq_restore(flags);
521 ret = 0;
522 } else {
523 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700524 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
525 irq->hw_idx, irq->irq_idx,
526 phys_enc->hw_pp->idx - PINGPONG_0,
527 atomic_read(wait_info->atomic_cnt), irq_status,
528 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500529 }
530 } else {
531 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700532 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
533 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
534 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500535 }
536
Dhaval Patela5f75952017-07-25 11:17:41 -0700537 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
538 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
539 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500540
541 return ret;
542}
543
544int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
545 enum sde_intr_idx intr_idx)
546{
547 struct sde_encoder_irq *irq;
548 int ret = 0;
549
550 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
551 SDE_ERROR("invalid params\n");
552 return -EINVAL;
553 }
554 irq = &phys_enc->irq[intr_idx];
555
556 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530557 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500558 "skipping already registered irq %s type %d\n",
559 irq->name, irq->intr_type);
560 return 0;
561 }
562
563 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
564 irq->intr_type, irq->hw_idx);
565 if (irq->irq_idx < 0) {
566 SDE_ERROR_PHYS(phys_enc,
567 "failed to lookup IRQ index for %s type:%d\n",
568 irq->name, irq->intr_type);
569 return -EINVAL;
570 }
571
572 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
573 &irq->cb);
574 if (ret) {
575 SDE_ERROR_PHYS(phys_enc,
576 "failed to register IRQ callback for %s\n",
577 irq->name);
578 irq->irq_idx = -EINVAL;
579 return ret;
580 }
581
582 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
583 if (ret) {
584 SDE_ERROR_PHYS(phys_enc,
585 "enable IRQ for intr:%s failed, irq_idx %d\n",
586 irq->name, irq->irq_idx);
587
588 sde_core_irq_unregister_callback(phys_enc->sde_kms,
589 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400590
591 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
592 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500593 irq->irq_idx = -EINVAL;
594 return ret;
595 }
596
597 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
598 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
599 irq->name, irq->irq_idx);
600
601 return ret;
602}
603
604int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
605 enum sde_intr_idx intr_idx)
606{
607 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400608 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500609
610 if (!phys_enc) {
611 SDE_ERROR("invalid encoder\n");
612 return -EINVAL;
613 }
614 irq = &phys_enc->irq[intr_idx];
615
616 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400617 if (irq->irq_idx < 0) {
618 SDE_ERROR(
619 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
620 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
621 irq->irq_idx);
622 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
623 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500624 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400625 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500626
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400627 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
628 if (ret)
629 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
630 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
631
632 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500633 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400634 if (ret)
635 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
636 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500637
638 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
639 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
640
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400641 irq->irq_idx = -EINVAL;
642
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500643 return 0;
644}
645
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400646void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400647 struct sde_encoder_hw_resources *hw_res,
648 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400649{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400650 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700651 struct msm_mode_info mode_info;
652 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400653
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400654 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400655 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
656 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400657 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400658 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400659
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400660 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400661 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400662
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400663 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400664 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400665 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
666
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400667 for (i = 0; i < sde_enc->num_phys_encs; i++) {
668 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
669
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400670 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400671 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400672 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700673
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700674 /**
675 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
676 * called from atomic_check phase. Use the below API to get mode
677 * information of the temporary conn_state passed.
678 */
679 rc = sde_connector_get_mode_info(conn_state, &mode_info);
680 if (rc) {
681 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
682 return;
683 }
684
685 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700686 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400687}
688
Clarence Ip3649f8b2016-10-31 09:59:44 -0400689void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400690{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400691 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400692 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400693
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400694 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400695 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400696 return;
697 }
698
699 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400700 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400701
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700702 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800703 sde_rsc_client_destroy(sde_enc->rsc_client);
704
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700705 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400706 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
707
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400708 if (phys && phys->ops.destroy) {
709 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400710 --sde_enc->num_phys_encs;
711 sde_enc->phys_encs[i] = NULL;
712 }
713 }
714
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700715 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400716 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400717 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700718 sde_enc->num_phys_encs = 0;
719 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400720
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400721 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700722 mutex_destroy(&sde_enc->enc_lock);
723
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800724 if (sde_enc->input_handler) {
725 input_unregister_handler(sde_enc->input_handler);
726 kfree(sde_enc->input_handler);
727 }
728
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400729 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700730}
731
Clarence Ip8e69ad02016-12-09 09:43:57 -0500732void sde_encoder_helper_split_config(
733 struct sde_encoder_phys *phys_enc,
734 enum sde_intf interface)
735{
736 struct sde_encoder_virt *sde_enc;
737 struct split_pipe_cfg cfg = { 0 };
738 struct sde_hw_mdp *hw_mdptop;
739 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700740 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500741
742 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
743 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
744 return;
745 }
746
747 sde_enc = to_sde_encoder_virt(phys_enc->parent);
748 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700749 disp_info = &sde_enc->disp_info;
750
751 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
752 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500753
754 /**
755 * disable split modes since encoder will be operating in as the only
756 * encoder, either for the entire use case in the case of, for example,
757 * single DSI, or for this frame in the case of left/right only partial
758 * update.
759 */
760 if (phys_enc->split_role == ENC_ROLE_SOLO) {
761 if (hw_mdptop->ops.setup_split_pipe)
762 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
763 if (hw_mdptop->ops.setup_pp_split)
764 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
765 return;
766 }
767
768 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500769 cfg.mode = phys_enc->intf_mode;
770 cfg.intf = interface;
771
772 if (cfg.en && phys_enc->ops.needs_single_flush &&
773 phys_enc->ops.needs_single_flush(phys_enc))
774 cfg.split_flush_en = true;
775
776 topology = sde_connector_get_topology_name(phys_enc->connector);
777 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
778 cfg.pp_split_slave = cfg.intf;
779 else
780 cfg.pp_split_slave = INTF_MAX;
781
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500782 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500783 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
784
785 if (hw_mdptop->ops.setup_split_pipe)
786 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400787 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500788 /*
789 * slave encoder
790 * - determine split index from master index,
791 * assume master is first pp
792 */
793 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
794 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
795 cfg.pp_split_index);
796
797 if (hw_mdptop->ops.setup_pp_split)
798 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
799 }
800}
801
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400802static int sde_encoder_virt_atomic_check(
803 struct drm_encoder *drm_enc,
804 struct drm_crtc_state *crtc_state,
805 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400806{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400807 struct sde_encoder_virt *sde_enc;
808 struct msm_drm_private *priv;
809 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400810 const struct drm_display_mode *mode;
811 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700812 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700813 struct sde_connector_state *sde_conn_state = NULL;
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400814 struct sde_crtc_state *sde_crtc_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400815 int i = 0;
816 int ret = 0;
817
Alan Kwongbb27c092016-07-20 16:41:25 -0400818 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400819 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
820 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400821 return -EINVAL;
822 }
823
824 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400825 SDE_DEBUG_ENC(sde_enc, "\n");
826
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400827 priv = drm_enc->dev->dev_private;
828 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400829 mode = &crtc_state->mode;
830 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700831 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700832 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400833 sde_crtc_state = to_sde_crtc_state(crtc_state);
834
835 SDE_EVT32(DRMID(drm_enc), drm_atomic_crtc_needs_modeset(crtc_state));
Alan Kwongbb27c092016-07-20 16:41:25 -0400836
837 /* perform atomic check on the first physical encoder (master) */
838 for (i = 0; i < sde_enc->num_phys_encs; i++) {
839 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
840
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400841 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400842 ret = phys->ops.atomic_check(phys, crtc_state,
843 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400844 else if (phys && phys->ops.mode_fixup)
845 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400846 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400847
848 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400849 SDE_ERROR_ENC(sde_enc,
850 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400851 break;
852 }
853 }
854
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400855 if (!ret && drm_atomic_crtc_needs_modeset(crtc_state)) {
856 struct sde_rect mode_roi, roi;
857
858 mode_roi.x = 0;
859 mode_roi.y = 0;
860 mode_roi.w = crtc_state->adjusted_mode.hdisplay;
861 mode_roi.h = crtc_state->adjusted_mode.vdisplay;
862
863 if (sde_conn_state->rois.num_rects) {
864 sde_kms_rect_merge_rectangles(
865 &sde_conn_state->rois, &roi);
866 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
867 SDE_ERROR_ENC(sde_enc,
868 "roi (%d,%d,%d,%d) on connector invalid during modeset\n",
869 roi.x, roi.y, roi.w, roi.h);
870 ret = -EINVAL;
871 }
872 }
873
874 if (sde_crtc_state->user_roi_list.num_rects) {
875 sde_kms_rect_merge_rectangles(
876 &sde_crtc_state->user_roi_list, &roi);
877 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
878 SDE_ERROR_ENC(sde_enc,
879 "roi (%d,%d,%d,%d) on crtc invalid during modeset\n",
880 roi.x, roi.y, roi.w, roi.h);
881 ret = -EINVAL;
882 }
883 }
884
885 if (ret)
886 return ret;
887 }
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700888
Lloyd Atkinson4ced69e2017-11-03 12:16:09 -0400889 if (!ret) {
890 /**
891 * record topology in previous atomic state to be able to handle
892 * topology transitions correctly.
893 */
894 enum sde_rm_topology_name old_top;
895
896 old_top = sde_connector_get_property(conn_state,
897 CONNECTOR_PROP_TOPOLOGY_NAME);
898 ret = sde_connector_set_old_topology_name(conn_state, old_top);
899 if (ret)
900 return ret;
901 }
902
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700903 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
904 struct msm_display_topology *topology = NULL;
905
906 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700907 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400908 sde_kms->catalog->max_mixer_width,
909 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700910 if (ret) {
911 SDE_ERROR_ENC(sde_enc,
912 "failed to get mode info, rc = %d\n", ret);
913 return ret;
914 }
915
916 /* Reserve dynamic resources, indicating atomic_check phase */
917 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
918 conn_state, true);
919 if (ret) {
920 SDE_ERROR_ENC(sde_enc,
921 "RM failed to reserve resources, rc = %d\n",
922 ret);
923 return ret;
924 }
925
926 /**
927 * Update connector state with the topology selected for the
928 * resource set validated. Reset the topology if we are
929 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700930 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700931 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700932 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700933
934 ret = sde_rm_update_topology(conn_state, topology);
935 if (ret) {
936 SDE_ERROR_ENC(sde_enc,
937 "RM failed to update topology, rc: %d\n", ret);
938 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700939 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700940
Jeykumar Sankaran83ddcb02017-10-27 11:34:50 -0700941 ret = sde_connector_set_blob_data(conn_state->connector,
942 conn_state,
943 CONNECTOR_PROP_SDE_INFO);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700944 if (ret) {
945 SDE_ERROR_ENC(sde_enc,
946 "connector failed to update info, rc: %d\n",
947 ret);
948 return ret;
949 }
950
951 }
952
953 ret = sde_connector_roi_v1_check_roi(conn_state);
954 if (ret) {
955 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
956 ret);
957 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700958 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400959
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700960 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700961 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400962
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400963 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400964
965 return ret;
966}
967
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800968static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
969 int pic_width, int pic_height)
970{
971 if (!dsc || !pic_width || !pic_height) {
972 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
973 pic_width, pic_height);
974 return -EINVAL;
975 }
976
977 if ((pic_width % dsc->slice_width) ||
978 (pic_height % dsc->slice_height)) {
979 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
980 pic_width, pic_height,
981 dsc->slice_width, dsc->slice_height);
982 return -EINVAL;
983 }
984
985 dsc->pic_width = pic_width;
986 dsc->pic_height = pic_height;
987
988 return 0;
989}
990
991static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
992 int intf_width)
993{
994 int slice_per_pkt, slice_per_intf;
995 int bytes_in_slice, total_bytes_per_intf;
996
997 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
998 (intf_width < dsc->slice_width)) {
999 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
1000 intf_width, dsc ? dsc->slice_width : -1);
1001 return;
1002 }
1003
1004 slice_per_pkt = dsc->slice_per_pkt;
1005 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
1006
1007 /*
1008 * If slice_per_pkt is greater than slice_per_intf then default to 1.
1009 * This can happen during partial update.
1010 */
1011 if (slice_per_pkt > slice_per_intf)
1012 slice_per_pkt = 1;
1013
1014 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
1015 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
1016
1017 dsc->eol_byte_num = total_bytes_per_intf % 3;
1018 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
1019 dsc->bytes_in_slice = bytes_in_slice;
1020 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
1021 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
1022}
1023
1024static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
1025 int enc_ip_width)
1026{
1027 int ssm_delay, total_pixels, soft_slice_per_enc;
1028
1029 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
1030
1031 /*
1032 * minimum number of initial line pixels is a sum of:
1033 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
1034 * 91 for 10 bpc) * 3
1035 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
1036 * 3. the initial xmit delay
1037 * 4. total pipeline delay through the "lock step" of encoder (47)
1038 * 5. 6 additional pixels as the output of the rate buffer is
1039 * 48 bits wide
1040 */
1041 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
1042 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
1043 if (soft_slice_per_enc > 1)
1044 total_pixels += (ssm_delay * 3);
1045 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
1046 return 0;
1047}
1048
1049static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
1050 struct msm_display_dsc_info *dsc)
1051{
1052 /*
1053 * As per the DSC spec, ICH_RESET can be either end of the slice line
1054 * or at the end of the slice. HW internally generates ich_reset at
1055 * end of the slice line if DSC_MERGE is used or encoder has two
1056 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
1057 * is not used then it will generate ich_reset at the end of slice.
1058 *
1059 * Now as per the spec, during one PPS session, position where
1060 * ich_reset is generated should not change. Now if full-screen frame
1061 * has more than 1 soft slice then HW will automatically generate
1062 * ich_reset at the end of slice_line. But for the same panel, if
1063 * partial frame is enabled and only 1 encoder is used with 1 slice,
1064 * then HW will generate ich_reset at end of the slice. This is a
1065 * mismatch. Prevent this by overriding HW's decision.
1066 */
1067 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
1068 (dsc->slice_width == dsc->pic_width);
1069}
1070
1071static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
1072 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001073 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001074{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001075 if (!enable) {
1076 if (hw_pp->ops.disable_dsc)
1077 hw_pp->ops.disable_dsc(hw_pp);
1078 return;
1079 }
1080
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001081 if (hw_dsc->ops.dsc_config)
1082 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
1083
1084 if (hw_dsc->ops.dsc_config_thresh)
1085 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
1086
1087 if (hw_pp->ops.setup_dsc)
1088 hw_pp->ops.setup_dsc(hw_pp);
1089
1090 if (hw_pp->ops.enable_dsc)
1091 hw_pp->ops.enable_dsc(hw_pp);
1092}
1093
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001094static void _sde_encoder_get_connector_roi(
1095 struct sde_encoder_virt *sde_enc,
1096 struct sde_rect *merged_conn_roi)
1097{
1098 struct drm_connector *drm_conn;
1099 struct sde_connector_state *c_state;
1100
1101 if (!sde_enc || !merged_conn_roi)
1102 return;
1103
1104 drm_conn = sde_enc->phys_encs[0]->connector;
1105
1106 if (!drm_conn || !drm_conn->state)
1107 return;
1108
1109 c_state = to_sde_connector_state(drm_conn->state);
1110 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1111}
1112
Ingrid Gallardo83532222017-06-02 16:48:51 -07001113static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001114{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001115 int this_frame_slices;
1116 int intf_ip_w, enc_ip_w;
1117 int ich_res, dsc_common_mode = 0;
1118
1119 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1120 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1121 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001122 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001123 struct msm_mode_info mode_info;
1124 struct msm_display_dsc_info *dsc = NULL;
1125 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001126
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001127 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001128 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1129 return -EINVAL;
1130 }
1131
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001132 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1133 if (rc) {
1134 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1135 return -EINVAL;
1136 }
1137
1138 dsc = &mode_info.comp_info.dsc_info;
1139
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001140 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001141
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001142 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001143 intf_ip_w = this_frame_slices * dsc->slice_width;
1144 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1145
1146 enc_ip_w = intf_ip_w;
1147 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1148
1149 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1150
1151 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1152 dsc_common_mode = DSC_MODE_VIDEO;
1153
1154 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001155 roi->w, roi->h, dsc_common_mode);
1156 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001157
1158 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001159 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001160
1161 return 0;
1162}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001163
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001164static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1165 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001166{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001167 int this_frame_slices;
1168 int intf_ip_w, enc_ip_w;
1169 int ich_res, dsc_common_mode;
1170
1171 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001172 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1173 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1174 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1175 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001176 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001177 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001178 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001179
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001180 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1181 hw_pp[i] = sde_enc->hw_pp[i];
1182 hw_dsc[i] = sde_enc->hw_dsc[i];
1183
1184 if (!hw_pp[i] || !hw_dsc[i]) {
1185 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1186 return -EINVAL;
1187 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001188 }
1189
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001190 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1191 if (rc) {
1192 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1193 return -EINVAL;
1194 }
1195
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001196 half_panel_partial_update =
1197 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001198
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001199 dsc_common_mode = 0;
1200 if (!half_panel_partial_update)
1201 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001202 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1203 dsc_common_mode |= DSC_MODE_VIDEO;
1204
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001205 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1206 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001207
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001208 /*
1209 * Since both DSC use same pic dimension, set same pic dimension
1210 * to both DSC structures.
1211 */
1212 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1213 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1214
1215 this_frame_slices = roi->w / dsc[0].slice_width;
1216 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1217
1218 if (!half_panel_partial_update)
1219 intf_ip_w /= 2;
1220
1221 /*
1222 * In this topology when both interfaces are active, they have same
1223 * load so intf_ip_w will be same.
1224 */
1225 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1226 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1227
1228 /*
1229 * In this topology, since there is no dsc_merge, uncompressed input
1230 * to encoder and interface is same.
1231 */
1232 enc_ip_w = intf_ip_w;
1233 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1234 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1235
1236 /*
1237 * __is_ich_reset_override_needed should be called only after
1238 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1239 */
1240 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1241 half_panel_partial_update, &dsc[0]);
1242
1243 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1244 roi->w, roi->h, dsc_common_mode);
1245
1246 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1247 bool active = !!((1 << i) & params->affected_displays);
1248
1249 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1250 dsc_common_mode, i, active);
1251 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1252 dsc_common_mode, ich_res, active);
1253 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001254
1255 return 0;
1256}
1257
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001258static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1259 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001260{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001261 int this_frame_slices;
1262 int intf_ip_w, enc_ip_w;
1263 int ich_res, dsc_common_mode;
1264
1265 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001266 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001267 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1268 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001269 struct msm_display_dsc_info *dsc = NULL;
1270 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001271 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001272 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001273
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001274 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1275 hw_pp[i] = sde_enc->hw_pp[i];
1276 hw_dsc[i] = sde_enc->hw_dsc[i];
1277
1278 if (!hw_pp[i] || !hw_dsc[i]) {
1279 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1280 return -EINVAL;
1281 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001282 }
1283
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001284 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1285 if (rc) {
1286 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1287 return -EINVAL;
1288 }
1289
1290 dsc = &mode_info.comp_info.dsc_info;
1291
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001292 half_panel_partial_update =
1293 hweight_long(params->affected_displays) == 1;
1294
1295 dsc_common_mode = 0;
1296 if (!half_panel_partial_update)
1297 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1298 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1299 dsc_common_mode |= DSC_MODE_VIDEO;
1300
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001301 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001302
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001303 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001304 intf_ip_w = this_frame_slices * dsc->slice_width;
1305 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1306
1307 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001308 * dsc merge case: when using 2 encoders for the same stream,
1309 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001310 */
1311 enc_ip_w = intf_ip_w / 2;
1312 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1313
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001314 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1315 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001316
1317 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001318 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001319 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1320 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001321
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001322 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001323 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001324 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1325 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001326
1327 return 0;
1328}
1329
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001330static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1331{
1332 struct sde_encoder_virt *sde_enc;
1333 struct drm_connector *drm_conn;
1334 struct drm_display_mode *adj_mode;
1335 struct sde_rect roi;
1336
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001337 if (!drm_enc) {
1338 SDE_ERROR("invalid encoder parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001339 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001340 }
1341
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001342 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001343 if (!sde_enc->crtc || !sde_enc->crtc->state) {
1344 SDE_ERROR("invalid crtc parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001345 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001346 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001347
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001348 if (!sde_enc->cur_master) {
1349 SDE_ERROR("invalid cur_master parameter\n");
1350 return -EINVAL;
1351 }
1352
1353 adj_mode = &sde_enc->cur_master->cached_mode;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001354 drm_conn = sde_enc->cur_master->connector;
1355
1356 _sde_encoder_get_connector_roi(sde_enc, &roi);
1357 if (sde_kms_rect_is_null(&roi)) {
1358 roi.w = adj_mode->hdisplay;
1359 roi.h = adj_mode->vdisplay;
1360 }
1361
1362 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1363 sizeof(sde_enc->prv_conn_roi));
1364 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1365
1366 return 0;
1367}
1368
1369static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1370 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001371{
1372 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001373 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001374 int ret = 0;
1375
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001376 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1377 !sde_enc->phys_encs[0]->connector)
1378 return -EINVAL;
1379
1380 drm_conn = sde_enc->phys_encs[0]->connector;
1381
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001382 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001383 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001384 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1385 return -EINVAL;
1386 }
1387
Ingrid Gallardo83532222017-06-02 16:48:51 -07001388 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04001389 SDE_EVT32(DRMID(&sde_enc->base), topology,
1390 sde_enc->cur_conn_roi.x,
1391 sde_enc->cur_conn_roi.y,
1392 sde_enc->cur_conn_roi.w,
1393 sde_enc->cur_conn_roi.h,
1394 sde_enc->prv_conn_roi.x,
1395 sde_enc->prv_conn_roi.y,
1396 sde_enc->prv_conn_roi.w,
1397 sde_enc->prv_conn_roi.h,
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001398 sde_enc->cur_master->cached_mode.hdisplay,
1399 sde_enc->cur_master->cached_mode.vdisplay);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001400
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001401 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1402 &sde_enc->prv_conn_roi))
1403 return ret;
1404
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001405 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001406 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001407 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1408 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001409 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001410 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001411 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001412 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001413 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001414 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001415 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001416 default:
1417 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1418 topology);
1419 return -EINVAL;
1420 };
1421
1422 return ret;
1423}
1424
Dhaval Patelaab9b522017-07-20 12:38:46 -07001425static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1426 struct msm_display_info *disp_info, bool is_dummy)
1427{
1428 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1429 struct msm_drm_private *priv;
1430 struct sde_kms *sde_kms;
1431 struct sde_hw_mdp *hw_mdptop;
1432 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001433 struct msm_mode_info mode_info;
1434 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001435
1436 if (!sde_enc || !disp_info) {
1437 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1438 sde_enc != NULL, disp_info != NULL);
1439 return;
1440 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1441 SDE_ERROR("invalid num phys enc %d/%d\n",
1442 sde_enc->num_phys_encs,
1443 (int) ARRAY_SIZE(sde_enc->hw_pp));
1444 return;
1445 }
1446
1447 drm_enc = &sde_enc->base;
1448 /* this pointers are checked in virt_enable_helper */
1449 priv = drm_enc->dev->dev_private;
1450
1451 sde_kms = to_sde_kms(priv->kms);
1452 if (!sde_kms) {
1453 SDE_ERROR("invalid sde_kms\n");
1454 return;
1455 }
1456
1457 hw_mdptop = sde_kms->hw_mdp;
1458 if (!hw_mdptop) {
1459 SDE_ERROR("invalid mdptop\n");
1460 return;
1461 }
1462
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001463 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1464 if (rc) {
1465 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001466 return;
1467 }
1468
Dhaval Patelaab9b522017-07-20 12:38:46 -07001469 if (hw_mdptop->ops.setup_vsync_source &&
1470 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1471 for (i = 0; i < sde_enc->num_phys_encs; i++)
1472 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1473
1474 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001475 vsync_cfg.frame_rate = mode_info.frame_rate;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001476 if (is_dummy)
1477 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1478 else if (disp_info->is_te_using_watchdog_timer)
1479 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
1480 else
1481 vsync_cfg.vsync_source = SDE_VSYNC0_SOURCE_GPIO;
1482 vsync_cfg.is_dummy = is_dummy;
1483
1484 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1485 }
1486}
1487
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001488static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1489{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001490 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001491 struct sde_hw_pingpong *hw_pp = NULL;
1492 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001493
1494 if (!sde_enc || !sde_enc->phys_encs[0] ||
1495 !sde_enc->phys_encs[0]->connector) {
1496 SDE_ERROR("invalid params %d %d\n",
1497 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1498 return -EINVAL;
1499 }
1500
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001501 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001502 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1503 hw_pp = sde_enc->hw_pp[i];
1504 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001505
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001506 if (hw_pp && hw_pp->ops.disable_dsc)
1507 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001508
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001509 if (hw_dsc && hw_dsc->ops.dsc_disable)
1510 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001511 }
1512
1513 return ret;
1514}
1515
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001516static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001517 struct drm_encoder *drm_enc,
1518 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001519{
1520 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001521 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001522 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001523 struct sde_rsc_cmd_config *rsc_config;
1524 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001525 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001526 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001527 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1528 int wait_count = 0;
1529 struct drm_crtc *primary_crtc;
1530 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001531 int rc = 0;
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001532 int wait_refcount;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001533
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001534 if (!drm_enc || !drm_enc->dev) {
1535 SDE_ERROR("invalid encoder arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001536 return -EINVAL;
1537 }
1538
1539 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001540 crtc = sde_enc->crtc;
1541
1542 if (!sde_enc->crtc) {
1543 SDE_ERROR("invalid crtc parameter\n");
1544 return -EINVAL;
1545 }
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001546 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001547 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001548
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001549 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001550 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001551 return 0;
1552 }
1553
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001554 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1555 if (rc) {
1556 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1557 return 0;
1558 }
1559
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001560 /**
1561 * only primary command mode panel can request CMD state.
1562 * all other panels/displays can request for VID state including
1563 * secondary command mode panel.
1564 */
1565 rsc_state = enable ?
1566 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) &&
1567 disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1568 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001569 prefill_lines = config ? mode_info.prefill_lines +
1570 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001571
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001572 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001573 if ((rsc_config->fps != mode_info.frame_rate) ||
1574 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001575 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001576 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1577 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1578 rsc_config->fps = mode_info.frame_rate;
1579 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001580 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001581 rsc_config->jitter_numer = mode_info.jitter_numer;
1582 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001583 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001584 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001585
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001586 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001587 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001588 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001589 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001590
1591 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001592 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001593 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001594 } else {
1595 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001596 rsc_state, NULL, crtc->base.id,
1597 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001598 }
1599
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001600 /**
1601 * if RSC performed a state change that requires a VBLANK wait, it will
1602 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1603 *
1604 * if we are the primary display, we will need to enable and wait
1605 * locally since we hold the commit thread
1606 *
1607 * if we are an external display, we must send a signal to the primary
1608 * to enable its VBLANK and wait one, since the RSC hardware is driven
1609 * by the primary panel's VBLANK signals
1610 */
1611 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1612 if (ret) {
1613 SDE_ERROR_ENC(sde_enc,
1614 "sde rsc client update failed ret:%d\n", ret);
1615 return ret;
1616 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1617 return ret;
1618 }
1619
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001620 if (wait_vblank_crtc_id)
1621 wait_refcount =
1622 sde_rsc_client_get_vsync_refcount(sde_enc->rsc_client);
1623 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1624 SDE_EVTLOG_FUNC_ENTRY);
1625
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001626 if (crtc->base.id != wait_vblank_crtc_id) {
1627 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1628 if (!primary_crtc) {
1629 SDE_ERROR_ENC(sde_enc,
1630 "failed to find primary crtc id %d\n",
1631 wait_vblank_crtc_id);
1632 return -EINVAL;
1633 }
1634 pipe = drm_crtc_index(primary_crtc);
1635 }
1636
1637 /**
1638 * note: VBLANK is expected to be enabled at this point in
1639 * resource control state machine if on primary CRTC
1640 */
1641 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1642 if (sde_rsc_client_is_state_update_complete(
1643 sde_enc->rsc_client))
1644 break;
1645
1646 if (crtc->base.id == wait_vblank_crtc_id)
1647 ret = sde_encoder_wait_for_event(drm_enc,
1648 MSM_ENC_VBLANK);
1649 else
1650 drm_wait_one_vblank(drm_enc->dev, pipe);
1651
1652 if (ret) {
1653 SDE_ERROR_ENC(sde_enc,
1654 "wait for vblank failed ret:%d\n", ret);
1655 break;
1656 }
1657 }
1658
1659 if (wait_count >= MAX_RSC_WAIT)
1660 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1661 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001662
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001663 if (wait_refcount)
1664 sde_rsc_client_reset_vsync_refcount(sde_enc->rsc_client);
1665 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1666 SDE_EVTLOG_FUNC_EXIT);
1667
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001668 return ret;
1669}
1670
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001671static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1672{
1673 struct sde_encoder_virt *sde_enc;
1674 int i;
1675
1676 if (!drm_enc) {
1677 SDE_ERROR("invalid encoder\n");
1678 return;
1679 }
1680
1681 sde_enc = to_sde_encoder_virt(drm_enc);
1682
1683 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1684 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1685 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1686
1687 if (phys && phys->ops.irq_control)
1688 phys->ops.irq_control(phys, enable);
1689 }
1690
1691}
1692
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001693/* keep track of the userspace vblank during modeset */
1694static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1695 u32 sw_event)
1696{
1697 struct sde_encoder_virt *sde_enc;
1698 bool enable;
1699 int i;
1700
1701 if (!drm_enc) {
1702 SDE_ERROR("invalid encoder\n");
1703 return;
1704 }
1705
1706 sde_enc = to_sde_encoder_virt(drm_enc);
1707 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1708 sw_event, sde_enc->vblank_enabled);
1709
1710 /* nothing to do if vblank not enabled by userspace */
1711 if (!sde_enc->vblank_enabled)
1712 return;
1713
1714 /* disable vblank on pre_modeset */
1715 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1716 enable = false;
1717 /* enable vblank on post_modeset */
1718 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1719 enable = true;
1720 else
1721 return;
1722
1723 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1724 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1725
1726 if (phys && phys->ops.control_vblank_irq)
1727 phys->ops.control_vblank_irq(phys, enable);
1728 }
1729}
1730
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001731struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1732{
1733 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001734
1735 if (!drm_enc)
1736 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001737 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001738 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001739}
1740
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001741static void _sde_encoder_resource_control_rsc_update(
1742 struct drm_encoder *drm_enc, bool enable)
1743{
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001744 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001745 struct sde_encoder_virt *sde_enc;
1746
1747 if (!drm_enc) {
1748 SDE_ERROR("invalid encoder argument\n");
1749 return;
1750 }
1751 sde_enc = to_sde_encoder_virt(drm_enc);
1752 if (!sde_enc->crtc) {
1753 SDE_ERROR("invalid crtc\n");
1754 return;
1755 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001756
1757 if (enable) {
1758 rsc_cfg.inline_rotate_prefill =
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001759 sde_crtc_get_inline_prefill(sde_enc->crtc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001760
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001761 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1762 } else {
1763 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001764 }
1765}
1766
Alan Kwong1124f1f2017-11-10 18:14:39 -05001767static int _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001768 bool enable)
1769{
1770 struct msm_drm_private *priv;
1771 struct sde_kms *sde_kms;
1772 struct sde_encoder_virt *sde_enc;
Alan Kwong1124f1f2017-11-10 18:14:39 -05001773 int rc;
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001774 bool is_cmd_mode, is_primary;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001775
1776 sde_enc = to_sde_encoder_virt(drm_enc);
1777 priv = drm_enc->dev->dev_private;
1778 sde_kms = to_sde_kms(priv->kms);
1779
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001780 is_cmd_mode = sde_enc->disp_info.capabilities &
1781 MSM_DISPLAY_CAP_CMD_MODE;
1782 is_primary = sde_enc->disp_info.is_primary;
1783
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001784 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1785 SDE_EVT32(DRMID(drm_enc), enable);
1786
1787 if (!sde_enc->cur_master) {
1788 SDE_ERROR("encoder master not set\n");
Alan Kwong1124f1f2017-11-10 18:14:39 -05001789 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001790 }
1791
1792 if (enable) {
1793 /* enable SDE core clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001794 rc = sde_power_resource_enable(&priv->phandle,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001795 sde_kms->core_client, true);
Alan Kwong1124f1f2017-11-10 18:14:39 -05001796 if (rc) {
1797 SDE_ERROR("failed to enable power resource %d\n", rc);
1798 SDE_EVT32(rc, SDE_EVTLOG_ERROR);
1799 return rc;
1800 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001801
1802 /* enable DSI clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001803 rc = sde_connector_clk_ctrl(sde_enc->cur_master->connector,
1804 true);
1805 if (rc) {
1806 SDE_ERROR("failed to enable clk control %d\n", rc);
1807 sde_power_resource_enable(&priv->phandle,
1808 sde_kms->core_client, false);
1809 return rc;
1810 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001811
1812 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001813 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001814
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001815 if (is_cmd_mode && is_primary)
1816 _sde_encoder_pm_qos_add_request(drm_enc);
1817
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001818 } else {
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001819 if (is_cmd_mode && is_primary)
1820 _sde_encoder_pm_qos_remove_request(drm_enc);
1821
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001822 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001823 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001824
1825 /* disable DSI clks */
1826 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1827
1828 /* disable SDE core clks */
1829 sde_power_resource_enable(&priv->phandle,
1830 sde_kms->core_client, false);
1831 }
1832
Alan Kwong1124f1f2017-11-10 18:14:39 -05001833 return 0;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001834}
1835
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001836static void sde_encoder_input_event_handler(struct input_handle *handle,
1837 unsigned int type, unsigned int code, int value)
1838{
1839 struct drm_encoder *drm_enc = NULL;
1840 struct sde_encoder_virt *sde_enc = NULL;
1841 struct msm_drm_thread *disp_thread = NULL;
1842 struct msm_drm_private *priv = NULL;
1843
1844 if (!handle || !handle->handler || !handle->handler->private) {
1845 SDE_ERROR("invalid encoder for the input event\n");
1846 return;
1847 }
1848
1849 drm_enc = (struct drm_encoder *)handle->handler->private;
1850 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
1851 SDE_ERROR("invalid parameters\n");
1852 return;
1853 }
1854
1855 priv = drm_enc->dev->dev_private;
1856 sde_enc = to_sde_encoder_virt(drm_enc);
1857 if (!sde_enc->crtc || (sde_enc->crtc->index
1858 >= ARRAY_SIZE(priv->disp_thread))) {
1859 SDE_DEBUG_ENC(sde_enc,
1860 "invalid cached CRTC: %d or crtc index: %d\n",
1861 sde_enc->crtc == NULL,
1862 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
1863 return;
1864 }
1865
1866 SDE_EVT32_VERBOSE(DRMID(drm_enc));
1867
1868 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
1869
1870 kthread_queue_work(&disp_thread->worker,
1871 &sde_enc->input_event_work);
1872}
1873
1874
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001875static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1876 u32 sw_event)
1877{
Dhaval Patel99412a52017-07-24 19:16:45 -07001878 bool autorefresh_enabled = false;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001879 unsigned int lp, idle_pc_duration;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001880 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001881 struct msm_drm_private *priv;
1882 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001883 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001884 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001885
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001886 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1887 SDE_ERROR("invalid encoder parameters, sw_event:%u\n",
1888 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001889 return -EINVAL;
1890 }
1891 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001892 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001893 is_vid_mode = sde_enc->disp_info.capabilities &
1894 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001895
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001896 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001897 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001898 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001899 */
1900 if (!sde_enc->idle_pc_supported &&
1901 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001902 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1903 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1904 sw_event != SDE_ENC_RC_EVENT_STOP &&
1905 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001906 return 0;
1907
1908 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc_supported:%d\n", sw_event,
1909 sde_enc->idle_pc_supported);
Dhaval Patela5f75952017-07-25 11:17:41 -07001910 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001911 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1912
1913 switch (sw_event) {
1914 case SDE_ENC_RC_EVENT_KICKOFF:
1915 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001916 if (kthread_cancel_delayed_work_sync(
1917 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001918 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1919 sw_event);
1920
1921 mutex_lock(&sde_enc->rc_lock);
1922
1923 /* return if the resource control is already in ON state */
1924 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
1925 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
1926 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001927 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1928 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001929 mutex_unlock(&sde_enc->rc_lock);
1930 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001931 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
1932 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
1933 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1934 sw_event, sde_enc->rc_state);
1935 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1936 SDE_EVTLOG_ERROR);
1937 mutex_unlock(&sde_enc->rc_lock);
1938 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001939 }
1940
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001941 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1942 _sde_encoder_irq_control(drm_enc, true);
1943 } else {
1944 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001945 ret = _sde_encoder_resource_control_helper(drm_enc,
1946 true);
1947 if (ret) {
1948 SDE_ERROR_ENC(sde_enc,
1949 "sw_event:%d, rc in state %d\n",
1950 sw_event, sde_enc->rc_state);
1951 SDE_EVT32(DRMID(drm_enc), sw_event,
1952 sde_enc->rc_state,
1953 SDE_EVTLOG_ERROR);
1954 mutex_unlock(&sde_enc->rc_lock);
1955 return ret;
1956 }
1957
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001958 _sde_encoder_resource_control_rsc_update(drm_enc, true);
1959 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001960
1961 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1962 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
1963 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1964
1965 mutex_unlock(&sde_enc->rc_lock);
1966 break;
1967
1968 case SDE_ENC_RC_EVENT_FRAME_DONE:
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001969 if (!sde_enc->crtc) {
1970 SDE_ERROR("invalid crtc, sw_event:%u\n", sw_event);
1971 return -EINVAL;
1972 }
1973
1974 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
1975 SDE_ERROR("invalid crtc index :%u\n",
1976 sde_enc->crtc->index);
1977 return -EINVAL;
1978 }
1979 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
1980
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001981 /*
1982 * mutex lock is not used as this event happens at interrupt
1983 * context. And locking is not required as, the other events
1984 * like KICKOFF and STOP does a wait-for-idle before executing
1985 * the resource_control
1986 */
1987 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1988 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
1989 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001990 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1991 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001992 return -EINVAL;
1993 }
1994
1995 /*
1996 * schedule off work item only when there are no
1997 * frames pending
1998 */
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001999 if (sde_crtc_frame_pending(sde_enc->crtc) > 1) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002000 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002001 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2002 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002003 return 0;
2004 }
2005
Dhaval Patel99412a52017-07-24 19:16:45 -07002006 /* schedule delayed off work if autorefresh is disabled */
2007 if (sde_enc->cur_master &&
2008 sde_enc->cur_master->ops.is_autorefresh_enabled)
2009 autorefresh_enabled =
2010 sde_enc->cur_master->ops.is_autorefresh_enabled(
2011 sde_enc->cur_master);
2012
Clarence Ip89628132017-07-27 13:33:51 -04002013 /* set idle timeout based on master connector's lp value */
2014 if (sde_enc->cur_master)
2015 lp = sde_connector_get_lp(
2016 sde_enc->cur_master->connector);
2017 else
2018 lp = SDE_MODE_DPMS_ON;
2019
2020 if (lp == SDE_MODE_DPMS_LP2)
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002021 idle_pc_duration = IDLE_SHORT_TIMEOUT;
Clarence Ip89628132017-07-27 13:33:51 -04002022 else
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002023 idle_pc_duration = IDLE_POWERCOLLAPSE_DURATION;
Clarence Ip89628132017-07-27 13:33:51 -04002024
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002025 if (!autorefresh_enabled)
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002026 kthread_mod_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002027 &disp_thread->worker,
2028 &sde_enc->delayed_off_work,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002029 msecs_to_jiffies(idle_pc_duration));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002030 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04002031 autorefresh_enabled,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002032 idle_pc_duration, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002033 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
2034 sw_event);
2035 break;
2036
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002037 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002038 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002039 if (kthread_cancel_delayed_work_sync(
2040 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002041 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2042 sw_event);
2043
2044 mutex_lock(&sde_enc->rc_lock);
2045
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002046 if (is_vid_mode &&
2047 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2048 _sde_encoder_irq_control(drm_enc, true);
2049 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002050 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002051 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002052 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2053 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
2054 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002055 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2056 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002057 mutex_unlock(&sde_enc->rc_lock);
2058 return 0;
2059 }
2060
2061 /**
2062 * IRQs are still enabled currently, which allows wait for
2063 * VBLANK which RSC may require to correctly transition to OFF
2064 */
2065 _sde_encoder_resource_control_rsc_update(drm_enc, false);
2066
2067 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2068 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002069 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002070
2071 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
2072
2073 mutex_unlock(&sde_enc->rc_lock);
2074 break;
2075
2076 case SDE_ENC_RC_EVENT_STOP:
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002077 /* cancel vsync event work and timer */
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302078 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002079 del_timer_sync(&sde_enc->vsync_event_timer);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002080
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302081 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002082 /* return if the resource control is already in OFF state */
2083 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
2084 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
2085 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002086 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2087 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002088 mutex_unlock(&sde_enc->rc_lock);
2089 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002090 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
2091 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002092 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2093 sw_event, sde_enc->rc_state);
2094 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2095 SDE_EVTLOG_ERROR);
2096 mutex_unlock(&sde_enc->rc_lock);
2097 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002098 }
2099
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002100 /**
2101 * expect to arrive here only if in either idle state or pre-off
2102 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002103 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002104 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002105 _sde_encoder_resource_control_helper(drm_enc, false);
2106
2107 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002108 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002109
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002110 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
2111
2112 mutex_unlock(&sde_enc->rc_lock);
2113 break;
2114
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002115 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002116 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002117 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002118 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002119 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2120 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002121
2122 mutex_lock(&sde_enc->rc_lock);
2123
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002124 /* return if the resource control is already in ON state */
2125 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2126 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002127 ret = _sde_encoder_resource_control_helper(drm_enc,
2128 true);
2129 if (ret) {
2130 SDE_ERROR_ENC(sde_enc,
2131 "sw_event:%d, rc in state %d\n",
2132 sw_event, sde_enc->rc_state);
2133 SDE_EVT32(DRMID(drm_enc), sw_event,
2134 sde_enc->rc_state,
2135 SDE_EVTLOG_ERROR);
2136 mutex_unlock(&sde_enc->rc_lock);
2137 return ret;
2138 }
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002139
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002140 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002141
2142 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2143 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002144 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002145 }
2146
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002147 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2148 if (ret && ret != -EWOULDBLOCK) {
2149 SDE_ERROR_ENC(sde_enc,
2150 "wait for commit done returned %d\n",
2151 ret);
2152 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2153 ret, SDE_EVTLOG_ERROR);
2154 mutex_unlock(&sde_enc->rc_lock);
2155 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002156 }
2157
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002158 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002159 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002160
2161 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2162 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
2163
2164 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
2165 mutex_unlock(&sde_enc->rc_lock);
2166 break;
2167
2168 case SDE_ENC_RC_EVENT_POST_MODESET:
2169 mutex_lock(&sde_enc->rc_lock);
2170
2171 /* return if the resource control is already in ON state */
2172 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
2173 SDE_ERROR_ENC(sde_enc,
2174 "sw_event:%d, rc:%d !MODESET state\n",
2175 sw_event, sde_enc->rc_state);
2176 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2177 SDE_EVTLOG_ERROR);
2178 mutex_unlock(&sde_enc->rc_lock);
2179 return -EINVAL;
2180 }
2181
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002182 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002183 _sde_encoder_irq_control(drm_enc, true);
2184
2185 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
2186
2187 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2188 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
2189
2190 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2191
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002192 mutex_unlock(&sde_enc->rc_lock);
2193 break;
2194
2195 case SDE_ENC_RC_EVENT_ENTER_IDLE:
2196 mutex_lock(&sde_enc->rc_lock);
2197
2198 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002199 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002200 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002201 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2202 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002203 mutex_unlock(&sde_enc->rc_lock);
2204 return 0;
2205 }
2206
2207 /*
2208 * if we are in ON but a frame was just kicked off,
2209 * ignore the IDLE event, it's probably a stale timer event
2210 */
2211 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002212 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002213 "sw_event:%d, rc:%d frame pending\n",
2214 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002215 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2216 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002217 mutex_unlock(&sde_enc->rc_lock);
2218 return 0;
2219 }
2220
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002221 if (is_vid_mode) {
2222 _sde_encoder_irq_control(drm_enc, false);
2223 } else {
2224 /* disable all the clks and resources */
2225 _sde_encoder_resource_control_rsc_update(drm_enc,
2226 false);
2227 _sde_encoder_resource_control_helper(drm_enc, false);
2228 }
2229
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002230 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002231 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002232 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2233
2234 mutex_unlock(&sde_enc->rc_lock);
2235 break;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002236 case SDE_ENC_RC_EVENT_EARLY_WAKEUP:
2237 if (!sde_enc->crtc ||
2238 sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2239 SDE_DEBUG_ENC(sde_enc,
2240 "invalid crtc:%d or crtc index:%d , sw_event:%u\n",
2241 sde_enc->crtc == NULL,
2242 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL,
2243 sw_event);
2244 return -EINVAL;
2245 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002246
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002247 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2248
2249 mutex_lock(&sde_enc->rc_lock);
2250
2251 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2252 if (sde_enc->cur_master &&
2253 sde_enc->cur_master->ops.is_autorefresh_enabled)
2254 autorefresh_enabled =
2255 sde_enc->cur_master->ops.is_autorefresh_enabled(
2256 sde_enc->cur_master);
2257 if (autorefresh_enabled) {
2258 SDE_DEBUG_ENC(sde_enc,
2259 "not handling early wakeup since auto refresh is enabled\n");
2260 mutex_lock(&sde_enc->rc_lock);
2261 return 0;
2262 }
2263
2264 if (!sde_crtc_frame_pending(sde_enc->crtc))
2265 kthread_mod_delayed_work(&disp_thread->worker,
2266 &sde_enc->delayed_off_work,
2267 msecs_to_jiffies(
2268 IDLE_POWERCOLLAPSE_DURATION));
2269 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2270 /* enable all the clks and resources */
2271 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2272 _sde_encoder_resource_control_helper(drm_enc, true);
2273
2274 kthread_mod_delayed_work(&disp_thread->worker,
2275 &sde_enc->delayed_off_work,
2276 msecs_to_jiffies(
2277 IDLE_POWERCOLLAPSE_DURATION));
2278
2279 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2280 }
2281
2282 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2283 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE8);
2284
2285 mutex_unlock(&sde_enc->rc_lock);
2286 break;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002287 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002288 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002289 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2290 break;
2291 }
2292
Dhaval Patela5f75952017-07-25 11:17:41 -07002293 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002294 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2295 return 0;
2296}
2297
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002298static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2299 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002300 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002301{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002302 struct sde_encoder_virt *sde_enc;
2303 struct msm_drm_private *priv;
2304 struct sde_kms *sde_kms;
2305 struct list_head *connector_list;
2306 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002307 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002308 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002309 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002310 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002311
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002312 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002313 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002314 return;
2315 }
2316
Alan Kwong1124f1f2017-11-10 18:14:39 -05002317 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2318 SDE_ERROR("power resource is not enabled\n");
2319 return;
2320 }
2321
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002322 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002323 SDE_DEBUG_ENC(sde_enc, "\n");
2324
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002325 priv = drm_enc->dev->dev_private;
2326 sde_kms = to_sde_kms(priv->kms);
2327 connector_list = &sde_kms->dev->mode_config.connector_list;
2328
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002329 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002330
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002331 list_for_each_entry(conn_iter, connector_list, head)
2332 if (conn_iter->encoder == drm_enc)
2333 conn = conn_iter;
2334
2335 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002336 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002337 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002338 } else if (!conn->state) {
2339 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2340 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002341 }
2342
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002343 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002344 sde_conn_state = to_sde_connector_state(conn->state);
2345 if (sde_conn && sde_conn_state) {
2346 ret = sde_conn->ops.get_mode_info(adj_mode,
2347 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002348 sde_kms->catalog->max_mixer_width,
2349 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002350 if (ret) {
2351 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002352 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002353 return;
2354 }
2355 }
2356
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002357 /* release resources before seamless mode change */
2358 if (msm_is_mode_seamless_dms(adj_mode)) {
2359 /* restore resource state before releasing them */
2360 ret = sde_encoder_resource_control(drm_enc,
2361 SDE_ENC_RC_EVENT_PRE_MODESET);
2362 if (ret) {
2363 SDE_ERROR_ENC(sde_enc,
2364 "sde resource control failed: %d\n",
2365 ret);
2366 return;
2367 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002368
2369 /*
2370 * Disable dsc before switch the mode and after pre_modeset,
2371 * to guarantee that previous kickoff finished.
2372 */
2373 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002374 }
2375
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002376 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2377 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2378 conn->state, false);
2379 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002380 SDE_ERROR_ENC(sde_enc,
2381 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002382 return;
2383 }
2384
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002385 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2386 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2387 sde_enc->hw_pp[i] = NULL;
2388 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2389 break;
2390 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2391 }
2392
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002393 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2394 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2395 sde_enc->hw_dsc[i] = NULL;
2396 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2397 break;
2398 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2399 }
2400
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002401 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2402 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002403
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002404 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002405 if (!sde_enc->hw_pp[i]) {
2406 SDE_ERROR_ENC(sde_enc,
2407 "invalid pingpong block for the encoder\n");
2408 return;
2409 }
2410 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002411 phys->connector = conn->state->connector;
2412 if (phys->ops.mode_set)
2413 phys->ops.mode_set(phys, mode, adj_mode);
2414 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002415 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002416
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002417 /* update resources after seamless mode change */
2418 if (msm_is_mode_seamless_dms(adj_mode))
2419 sde_encoder_resource_control(&sde_enc->base,
2420 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002421}
2422
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002423void sde_encoder_control_te(struct drm_encoder *drm_enc, bool enable)
2424{
2425 struct sde_encoder_virt *sde_enc;
2426 struct sde_encoder_phys *phys;
2427 int i;
2428
2429 if (!drm_enc) {
2430 SDE_ERROR("invalid parameters\n");
2431 return;
2432 }
2433
2434 sde_enc = to_sde_encoder_virt(drm_enc);
2435 if (!sde_enc) {
2436 SDE_ERROR("invalid sde encoder\n");
2437 return;
2438 }
2439
2440 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2441 phys = sde_enc->phys_encs[i];
2442 if (phys && phys->ops.control_te)
2443 phys->ops.control_te(phys, enable);
2444 }
2445}
2446
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002447static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002448{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002449 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002450 struct msm_drm_private *priv;
2451 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002452
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002453 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2454 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002455 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002456 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002457
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002458 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002459 sde_kms = to_sde_kms(priv->kms);
2460 if (!sde_kms) {
2461 SDE_ERROR("invalid sde_kms\n");
2462 return;
2463 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002464
2465 sde_enc = to_sde_encoder_virt(drm_enc);
2466 if (!sde_enc || !sde_enc->cur_master) {
2467 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002468 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002469 }
2470
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002471 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2472 sde_enc->cur_master->hw_mdptop &&
2473 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2474 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2475 sde_enc->cur_master->hw_mdptop);
2476
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002477 if (sde_enc->cur_master->hw_mdptop &&
2478 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2479 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2480 sde_enc->cur_master->hw_mdptop,
2481 sde_kms->catalog);
2482
Dhaval Patelaab9b522017-07-20 12:38:46 -07002483 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002484 sde_encoder_control_te(drm_enc, true);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002485
2486 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2487 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002488}
2489
2490void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2491{
2492 struct sde_encoder_virt *sde_enc = NULL;
2493 int i;
2494
2495 if (!drm_enc) {
2496 SDE_ERROR("invalid encoder\n");
2497 return;
2498 }
2499 sde_enc = to_sde_encoder_virt(drm_enc);
2500
2501 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2502 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2503
2504 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2505 phys->ops.restore(phys);
2506 }
2507
2508 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2509 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2510
2511 _sde_encoder_virt_enable_helper(drm_enc);
2512}
2513
2514static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2515{
2516 struct sde_encoder_virt *sde_enc = NULL;
2517 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002518 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002519 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002520 struct msm_mode_info mode_info;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002521
2522 if (!drm_enc) {
2523 SDE_ERROR("invalid encoder\n");
2524 return;
2525 }
2526 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002527
Alan Kwong1124f1f2017-11-10 18:14:39 -05002528 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2529 SDE_ERROR("power resource is not enabled\n");
2530 return;
2531 }
2532
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002533 /*
2534 * cache the crtc in sde_enc on enable for duration of use case
2535 * for correctly servicing asynchronous irq events and timers
2536 */
2537 if (!drm_enc->crtc) {
2538 SDE_ERROR("invalid crtc\n");
2539 return;
2540 }
2541 sde_enc->crtc = drm_enc->crtc;
2542
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002543 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2544 if (ret) {
2545 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2546 return;
2547 }
2548
2549 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002550 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002551
Clarence Ip19af1362016-09-23 14:57:51 -04002552 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002553 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002554
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002555 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002556 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2557 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2558
2559 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2560 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2561 sde_enc->cur_master = phys;
2562 break;
2563 }
2564 }
2565
2566 if (!sde_enc->cur_master) {
2567 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2568 return;
2569 }
2570
2571 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2572 if (ret) {
2573 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2574 ret);
2575 return;
2576 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002577
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002578 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2579 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002580
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002581 if (!phys)
2582 continue;
2583
2584 phys->comp_type = comp_info->comp_type;
2585 if (phys != sde_enc->cur_master) {
2586 /**
2587 * on DMS request, the encoder will be enabled
2588 * already. Invoke restore to reconfigure the
2589 * new mode.
2590 */
2591 if (msm_is_mode_seamless_dms(cur_mode) &&
2592 phys->ops.restore)
2593 phys->ops.restore(phys);
2594 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002595 phys->ops.enable(phys);
2596 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002597
2598 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2599 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2600 phys->ops.setup_misr(phys, true,
2601 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002602 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002603
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002604 if (msm_is_mode_seamless_dms(cur_mode) &&
2605 sde_enc->cur_master->ops.restore)
2606 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2607 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002608 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002609
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002610 _sde_encoder_virt_enable_helper(drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002611}
2612
2613static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2614{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002615 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002616 struct msm_drm_private *priv;
2617 struct sde_kms *sde_kms;
Sandeep Panda318cff12017-10-20 13:16:03 +05302618 struct drm_connector *drm_conn = NULL;
Clarence Iped3327b2017-11-01 13:13:58 -04002619 enum sde_intf_mode intf_mode;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002620 int i = 0;
2621
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002622 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002623 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002624 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002625 } else if (!drm_enc->dev) {
2626 SDE_ERROR("invalid dev\n");
2627 return;
2628 } else if (!drm_enc->dev->dev_private) {
2629 SDE_ERROR("invalid dev_private\n");
2630 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002631 }
2632
Alan Kwong1124f1f2017-11-10 18:14:39 -05002633 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2634 SDE_ERROR("power resource is not enabled\n");
2635 return;
2636 }
2637
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002638 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002639 SDE_DEBUG_ENC(sde_enc, "\n");
2640
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002641 priv = drm_enc->dev->dev_private;
2642 sde_kms = to_sde_kms(priv->kms);
Clarence Iped3327b2017-11-01 13:13:58 -04002643 intf_mode = sde_encoder_get_intf_mode(drm_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002644
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002645 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002646
Sandeep Panda318cff12017-10-20 13:16:03 +05302647 /* Disable ESD thread */
2648 drm_conn = sde_enc->cur_master->connector;
2649 sde_connector_schedule_status_work(drm_conn, false);
2650
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002651 /* wait for idle */
2652 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2653
Clarence Iped3327b2017-11-01 13:13:58 -04002654 /*
2655 * For primary command mode encoders, execute the resource control
2656 * pre-stop operations before the physical encoders are disabled, to
2657 * allow the rsc to transition its states properly.
2658 *
2659 * For other encoder types, rsc should not be enabled until after
2660 * they have been fully disabled, so delay the pre-stop operations
2661 * until after the physical disable calls have returned.
2662 */
2663 if (sde_enc->disp_info.is_primary && intf_mode == INTF_MODE_CMD) {
2664 sde_encoder_resource_control(drm_enc,
2665 SDE_ENC_RC_EVENT_PRE_STOP);
2666 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2667 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002668
Clarence Iped3327b2017-11-01 13:13:58 -04002669 if (phys && phys->ops.disable)
2670 phys->ops.disable(phys);
2671 }
2672 } else {
2673 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2674 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002675
Clarence Iped3327b2017-11-01 13:13:58 -04002676 if (phys && phys->ops.disable)
2677 phys->ops.disable(phys);
2678 }
2679 sde_encoder_resource_control(drm_enc,
2680 SDE_ENC_RC_EVENT_PRE_STOP);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002681 }
2682
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002683 /*
2684 * disable dsc after the transfer is complete (for command mode)
2685 * and after physical encoder is disabled, to make sure timing
2686 * engine is already disabled (for video mode).
2687 */
2688 _sde_encoder_dsc_disable(sde_enc);
2689
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002690 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2691
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002692 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2693 if (sde_enc->phys_encs[i])
2694 sde_enc->phys_encs[i]->connector = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002695 }
2696
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002697 sde_enc->cur_master = NULL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002698 /*
2699 * clear the cached crtc in sde_enc on use case finish, after all the
2700 * outstanding events and timers have been completed
2701 */
2702 sde_enc->crtc = NULL;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002703
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002704 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002705
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002706 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002707}
2708
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002709static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002710 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002711{
2712 int i = 0;
2713
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002714 for (i = 0; i < catalog->intf_count; i++) {
2715 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002716 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002717 return catalog->intf[i].id;
2718 }
2719 }
2720
2721 return INTF_MAX;
2722}
2723
Alan Kwongbb27c092016-07-20 16:41:25 -04002724static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2725 enum sde_intf_type type, u32 controller_id)
2726{
2727 if (controller_id < catalog->wb_count)
2728 return catalog->wb[controller_id].id;
2729
2730 return WB_MAX;
2731}
2732
Dhaval Patel81e87882016-10-19 21:41:56 -07002733static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2734 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002735{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002736 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002737 unsigned long lock_flags;
2738
Dhaval Patel81e87882016-10-19 21:41:56 -07002739 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002740 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002741
Narendra Muppalla77b32932017-05-10 13:53:11 -07002742 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002743 sde_enc = to_sde_encoder_virt(drm_enc);
2744
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002745 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002746 if (sde_enc->crtc_vblank_cb)
2747 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002748 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002749
2750 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002751 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002752}
2753
2754static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2755 struct sde_encoder_phys *phy_enc)
2756{
2757 if (!phy_enc)
2758 return;
2759
Narendra Muppalla77b32932017-05-10 13:53:11 -07002760 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002761 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002762 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Ingrid Gallardo36ee68d2017-08-30 17:14:33 -07002763
2764 trace_sde_encoder_underrun(DRMID(drm_enc),
2765 atomic_read(&phy_enc->underrun_cnt));
2766
2767 SDE_DBG_CTRL("stop_ftrace");
2768 SDE_DBG_CTRL("panic_underrun");
2769
Narendra Muppalla77b32932017-05-10 13:53:11 -07002770 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002771}
2772
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002773void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
2774 void (*vbl_cb)(void *), void *vbl_data)
2775{
2776 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2777 unsigned long lock_flags;
2778 bool enable;
2779 int i;
2780
2781 enable = vbl_cb ? true : false;
2782
Clarence Ip19af1362016-09-23 14:57:51 -04002783 if (!drm_enc) {
2784 SDE_ERROR("invalid encoder\n");
2785 return;
2786 }
2787 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002788 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002789
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002790 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002791 sde_enc->crtc_vblank_cb = vbl_cb;
2792 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002793 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002794
2795 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2796 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2797
2798 if (phys && phys->ops.control_vblank_irq)
2799 phys->ops.control_vblank_irq(phys, enable);
2800 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002801 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002802}
2803
Alan Kwong628d19e2016-10-31 13:50:13 -04002804void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
2805 void (*frame_event_cb)(void *, u32 event),
2806 void *frame_event_cb_data)
2807{
2808 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2809 unsigned long lock_flags;
2810 bool enable;
2811
2812 enable = frame_event_cb ? true : false;
2813
2814 if (!drm_enc) {
2815 SDE_ERROR("invalid encoder\n");
2816 return;
2817 }
2818 SDE_DEBUG_ENC(sde_enc, "\n");
2819 SDE_EVT32(DRMID(drm_enc), enable, 0);
2820
2821 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2822 sde_enc->crtc_frame_event_cb = frame_event_cb;
2823 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
2824 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
2825}
2826
2827static void sde_encoder_frame_done_callback(
2828 struct drm_encoder *drm_enc,
2829 struct sde_encoder_phys *ready_phys, u32 event)
2830{
2831 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2832 unsigned int i;
2833
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002834 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
2835 | SDE_ENCODER_FRAME_EVENT_ERROR
2836 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05002837
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002838 if (!sde_enc->frame_busy_mask[0]) {
2839 /**
2840 * suppress frame_done without waiter,
2841 * likely autorefresh
2842 */
2843 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
2844 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04002845 }
2846
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002847 /* One of the physical encoders has become idle */
2848 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2849 if (sde_enc->phys_encs[i] == ready_phys) {
2850 clear_bit(i, sde_enc->frame_busy_mask);
2851 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
2852 sde_enc->frame_busy_mask[0]);
2853 }
2854 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002855
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002856 if (!sde_enc->frame_busy_mask[0]) {
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002857 sde_encoder_resource_control(drm_enc,
2858 SDE_ENC_RC_EVENT_FRAME_DONE);
2859
2860 if (sde_enc->crtc_frame_event_cb)
2861 sde_enc->crtc_frame_event_cb(
2862 sde_enc->crtc_frame_event_cb_data,
2863 event);
2864 }
2865 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04002866 if (sde_enc->crtc_frame_event_cb)
2867 sde_enc->crtc_frame_event_cb(
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002868 sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002869 }
2870}
2871
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002872int sde_encoder_idle_request(struct drm_encoder *drm_enc)
2873{
2874 struct sde_encoder_virt *sde_enc;
2875
2876 if (!drm_enc) {
2877 SDE_ERROR("invalid drm encoder\n");
2878 return -EINVAL;
2879 }
2880
2881 sde_enc = to_sde_encoder_virt(drm_enc);
2882 sde_encoder_resource_control(&sde_enc->base,
2883 SDE_ENC_RC_EVENT_ENTER_IDLE);
2884
2885 return 0;
2886}
2887
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002888static void sde_encoder_off_work(struct kthread_work *work)
2889{
2890 struct sde_encoder_virt *sde_enc = container_of(work,
2891 struct sde_encoder_virt, delayed_off_work.work);
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002892 struct drm_encoder *drm_enc;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002893
2894 if (!sde_enc) {
2895 SDE_ERROR("invalid sde encoder\n");
2896 return;
2897 }
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002898 drm_enc = &sde_enc->base;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002899
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002900 sde_encoder_idle_request(drm_enc);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002901}
2902
Clarence Ip110d15c2016-08-16 14:44:41 -04002903/**
2904 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
2905 * drm_enc: Pointer to drm encoder structure
2906 * phys: Pointer to physical encoder structure
2907 * extra_flush_bits: Additional bit mask to include in flush trigger
2908 */
2909static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
2910 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
2911{
2912 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002913 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04002914
2915 if (!drm_enc || !phys) {
2916 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
2917 drm_enc != 0, phys != 0);
2918 return;
2919 }
2920
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002921 if (!phys->hw_pp) {
2922 SDE_ERROR("invalid pingpong hw\n");
2923 return;
2924 }
2925
Clarence Ip110d15c2016-08-16 14:44:41 -04002926 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04002927 if (!ctl || !phys->ops.trigger_flush) {
2928 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002929 return;
2930 }
2931
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002932 if (phys->split_role == ENC_ROLE_SKIP) {
2933 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2934 "skip flush pp%d ctl%d\n",
2935 phys->hw_pp->idx - PINGPONG_0,
2936 ctl->idx - CTL_0);
2937 return;
2938 }
2939
Clarence Ip8e69ad02016-12-09 09:43:57 -05002940 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05002941
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002942 if (phys->ops.is_master && phys->ops.is_master(phys))
2943 atomic_inc(&phys->pending_retire_fence_cnt);
2944
Clarence Ip110d15c2016-08-16 14:44:41 -04002945 if (extra_flush_bits && ctl->ops.update_pending_flush)
2946 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
2947
Alan Kwong4212dd42017-09-19 17:22:33 -04002948 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07002949
2950 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04002951 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2952 pending_kickoff_cnt, ctl->idx - CTL_0,
2953 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07002954 else
Clarence Ip569d5af2017-10-14 21:09:01 -04002955 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2956 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04002957}
2958
2959/**
2960 * _sde_encoder_trigger_start - trigger start for a physical encoder
2961 * phys: Pointer to physical encoder structure
2962 */
2963static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
2964{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002965 struct sde_hw_ctl *ctl;
2966
Clarence Ip110d15c2016-08-16 14:44:41 -04002967 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002968 SDE_ERROR("invalid argument(s)\n");
2969 return;
2970 }
2971
2972 if (!phys->hw_pp) {
2973 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002974 return;
2975 }
2976
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002977 ctl = phys->hw_ctl;
2978 if (phys->split_role == ENC_ROLE_SKIP) {
2979 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2980 "skip start pp%d ctl%d\n",
2981 phys->hw_pp->idx - PINGPONG_0,
2982 ctl->idx - CTL_0);
2983 return;
2984 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002985 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
2986 phys->ops.trigger_start(phys);
2987}
2988
Alan Kwong4212dd42017-09-19 17:22:33 -04002989void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
2990{
2991 struct sde_hw_ctl *ctl;
2992
2993 if (!phys_enc) {
2994 SDE_ERROR("invalid encoder\n");
2995 return;
2996 }
2997
2998 ctl = phys_enc->hw_ctl;
2999 if (ctl && ctl->ops.trigger_flush)
3000 ctl->ops.trigger_flush(ctl);
3001}
3002
Clarence Ip110d15c2016-08-16 14:44:41 -04003003void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
3004{
3005 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04003006
3007 if (!phys_enc) {
3008 SDE_ERROR("invalid encoder\n");
3009 return;
3010 }
3011
3012 ctl = phys_enc->hw_ctl;
3013 if (ctl && ctl->ops.trigger_start) {
3014 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04003015 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04003016 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003017}
3018
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303019static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
3020 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003021{
3022 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303023 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
3024 ktime_t cur_ktime;
3025 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003026
3027 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003028 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303029 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
3030 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003031
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303032 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
3033 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003034 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003035 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303036 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
3037
3038 return rc;
3039}
3040
3041int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
3042 struct sde_encoder_wait_info *info)
3043{
3044 int rc;
3045 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
3046
3047 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
3048
3049 /**
3050 * handle disabled irq case where timer irq is also delayed.
3051 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
3052 * if it event_timeout expired late detected.
3053 */
3054 if (atomic_read(info->atomic_cnt) && (!rc) &&
3055 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
3056 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
3057 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
3058 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003059
3060 return rc;
3061}
3062
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003063void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
3064{
3065 struct sde_encoder_virt *sde_enc;
3066 struct sde_connector *sde_con;
3067 void *sde_con_disp;
3068 struct sde_hw_ctl *ctl;
3069 int rc;
3070
3071 if (!phys_enc) {
3072 SDE_ERROR("invalid encoder\n");
3073 return;
3074 }
3075 sde_enc = to_sde_encoder_virt(phys_enc->parent);
3076 ctl = phys_enc->hw_ctl;
3077
3078 if (!ctl || !ctl->ops.reset)
3079 return;
3080
3081 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
3082 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
3083
3084 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
3085 phys_enc->connector) {
3086 sde_con = to_sde_connector(phys_enc->connector);
3087 sde_con_disp = sde_connector_get_display(phys_enc->connector);
3088
3089 if (sde_con->ops.soft_reset) {
3090 rc = sde_con->ops.soft_reset(sde_con_disp);
3091 if (rc) {
3092 SDE_ERROR_ENC(sde_enc,
3093 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07003094 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
3095 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003096 }
3097 }
3098 }
3099
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003100 phys_enc->enable_state = SDE_ENC_ENABLED;
3101}
3102
Clarence Ip110d15c2016-08-16 14:44:41 -04003103/**
3104 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
3105 * Iterate through the physical encoders and perform consolidated flush
3106 * and/or control start triggering as needed. This is done in the virtual
3107 * encoder rather than the individual physical ones in order to handle
3108 * use cases that require visibility into multiple physical encoders at
3109 * a time.
3110 * sde_enc: Pointer to virtual encoder structure
3111 */
3112static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
3113{
3114 struct sde_hw_ctl *ctl;
3115 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003116 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -04003117
3118 if (!sde_enc) {
3119 SDE_ERROR("invalid encoder\n");
3120 return;
3121 }
3122
3123 pending_flush = 0x0;
3124
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003125 /*
3126 * Trigger LUT DMA flush, this might need a wait, so we need
3127 * to do this outside of the atomic context
3128 */
3129 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3130 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3131 bool wait_for_dma = false;
3132
3133 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3134 continue;
3135
3136 ctl = phys->hw_ctl;
3137 if (!ctl)
3138 continue;
3139
3140 if (phys->ops.wait_dma_trigger)
3141 wait_for_dma = phys->ops.wait_dma_trigger(phys);
3142
3143 if (phys->hw_ctl->ops.reg_dma_flush)
3144 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
3145 wait_for_dma);
3146 }
3147
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003148 /* update pending counts and trigger kickoff ctl flush atomically */
3149 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3150
Clarence Ip110d15c2016-08-16 14:44:41 -04003151 /* don't perform flush/start operations for slave encoders */
3152 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3153 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07003154 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003155
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003156 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3157 continue;
3158
Clarence Ip110d15c2016-08-16 14:44:41 -04003159 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003160 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04003161 continue;
3162
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003163 if (phys->connector)
3164 topology = sde_connector_get_topology_name(
3165 phys->connector);
3166
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003167 /*
3168 * don't wait on ppsplit slaves or skipped encoders because
3169 * they dont receive irqs
3170 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003171 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003172 phys->split_role == ENC_ROLE_SLAVE) &&
3173 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003174 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003175
Clarence Ip8e69ad02016-12-09 09:43:57 -05003176 if (!phys->ops.needs_single_flush ||
3177 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04003178 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
3179 else if (ctl->ops.get_pending_flush)
3180 pending_flush |= ctl->ops.get_pending_flush(ctl);
3181 }
3182
3183 /* for split flush, combine pending flush masks and send to master */
3184 if (pending_flush && sde_enc->cur_master) {
3185 _sde_encoder_trigger_flush(
3186 &sde_enc->base,
3187 sde_enc->cur_master,
3188 pending_flush);
3189 }
3190
3191 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003192
3193 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -04003194}
3195
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003196static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
3197 struct drm_encoder *drm_enc,
3198 unsigned long *affected_displays,
3199 int num_active_phys)
3200{
3201 struct sde_encoder_virt *sde_enc;
3202 struct sde_encoder_phys *master;
3203 enum sde_rm_topology_name topology;
3204 bool is_right_only;
3205
3206 if (!drm_enc || !affected_displays)
3207 return;
3208
3209 sde_enc = to_sde_encoder_virt(drm_enc);
3210 master = sde_enc->cur_master;
3211 if (!master || !master->connector)
3212 return;
3213
3214 topology = sde_connector_get_topology_name(master->connector);
3215 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
3216 return;
3217
3218 /*
3219 * For pingpong split, the slave pingpong won't generate IRQs. For
3220 * right-only updates, we can't swap pingpongs, or simply swap the
3221 * master/slave assignment, we actually have to swap the interfaces
3222 * so that the master physical encoder will use a pingpong/interface
3223 * that generates irqs on which to wait.
3224 */
3225 is_right_only = !test_bit(0, affected_displays) &&
3226 test_bit(1, affected_displays);
3227
3228 if (is_right_only && !sde_enc->intfs_swapped) {
3229 /* right-only update swap interfaces */
3230 swap(sde_enc->phys_encs[0]->intf_idx,
3231 sde_enc->phys_encs[1]->intf_idx);
3232 sde_enc->intfs_swapped = true;
3233 } else if (!is_right_only && sde_enc->intfs_swapped) {
3234 /* left-only or full update, swap back */
3235 swap(sde_enc->phys_encs[0]->intf_idx,
3236 sde_enc->phys_encs[1]->intf_idx);
3237 sde_enc->intfs_swapped = false;
3238 }
3239
3240 SDE_DEBUG_ENC(sde_enc,
3241 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
3242 is_right_only, sde_enc->intfs_swapped,
3243 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3244 sde_enc->phys_encs[1]->intf_idx - INTF_0);
3245 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
3246 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3247 sde_enc->phys_encs[1]->intf_idx - INTF_0,
3248 *affected_displays);
3249
3250 /* ppsplit always uses master since ppslave invalid for irqs*/
3251 if (num_active_phys == 1)
3252 *affected_displays = BIT(0);
3253}
3254
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003255static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
3256 struct sde_encoder_kickoff_params *params)
3257{
3258 struct sde_encoder_virt *sde_enc;
3259 struct sde_encoder_phys *phys;
3260 int i, num_active_phys;
3261 bool master_assigned = false;
3262
3263 if (!drm_enc || !params)
3264 return;
3265
3266 sde_enc = to_sde_encoder_virt(drm_enc);
3267
3268 if (sde_enc->num_phys_encs <= 1)
3269 return;
3270
3271 /* count bits set */
3272 num_active_phys = hweight_long(params->affected_displays);
3273
3274 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
3275 params->affected_displays, num_active_phys);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04003276 SDE_EVT32_VERBOSE(DRMID(drm_enc), params->affected_displays,
3277 num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003278
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003279 /* for left/right only update, ppsplit master switches interface */
3280 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
3281 &params->affected_displays, num_active_phys);
3282
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003283 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3284 enum sde_enc_split_role prv_role, new_role;
3285 bool active;
3286
3287 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003288 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003289 continue;
3290
3291 active = test_bit(i, &params->affected_displays);
3292 prv_role = phys->split_role;
3293
3294 if (active && num_active_phys == 1)
3295 new_role = ENC_ROLE_SOLO;
3296 else if (active && !master_assigned)
3297 new_role = ENC_ROLE_MASTER;
3298 else if (active)
3299 new_role = ENC_ROLE_SLAVE;
3300 else
3301 new_role = ENC_ROLE_SKIP;
3302
3303 phys->ops.update_split_role(phys, new_role);
3304 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
3305 sde_enc->cur_master = phys;
3306 master_assigned = true;
3307 }
3308
3309 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
3310 phys->hw_pp->idx - PINGPONG_0, prv_role,
3311 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003312 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
3313 phys->hw_pp->idx - PINGPONG_0, prv_role,
3314 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003315 }
3316}
3317
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303318bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003319{
3320 struct sde_encoder_virt *sde_enc;
3321 struct msm_display_info *disp_info;
3322
3323 if (!drm_enc) {
3324 SDE_ERROR("invalid encoder\n");
3325 return false;
3326 }
3327
3328 sde_enc = to_sde_encoder_virt(drm_enc);
3329 disp_info = &sde_enc->disp_info;
3330
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303331 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003332}
3333
Dhaval Patel0e558f42017-04-30 00:51:40 -07003334void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
3335{
3336 struct sde_encoder_virt *sde_enc;
3337 struct sde_encoder_phys *phys;
3338 unsigned int i;
3339 struct sde_hw_ctl *ctl;
3340 struct msm_display_info *disp_info;
3341
3342 if (!drm_enc) {
3343 SDE_ERROR("invalid encoder\n");
3344 return;
3345 }
3346 sde_enc = to_sde_encoder_virt(drm_enc);
3347 disp_info = &sde_enc->disp_info;
3348
3349 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3350 phys = sde_enc->phys_encs[i];
3351
3352 if (phys && phys->hw_ctl) {
3353 ctl = phys->hw_ctl;
3354 if (ctl->ops.clear_pending_flush)
3355 ctl->ops.clear_pending_flush(ctl);
3356
3357 /* update only for command mode primary ctl */
3358 if ((phys == sde_enc->cur_master) &&
3359 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3360 && ctl->ops.trigger_pending)
3361 ctl->ops.trigger_pending(ctl);
3362 }
3363 }
3364}
3365
Ping Li8430ee12017-02-24 14:14:44 -08003366static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3367{
3368 void *dither_cfg;
3369 int ret = 0;
3370 size_t len = 0;
3371 enum sde_rm_topology_name topology;
3372
3373 if (!phys || !phys->connector || !phys->hw_pp ||
3374 !phys->hw_pp->ops.setup_dither)
3375 return;
3376 topology = sde_connector_get_topology_name(phys->connector);
3377 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3378 (phys->split_role == ENC_ROLE_SLAVE))
3379 return;
3380
3381 ret = sde_connector_get_dither_cfg(phys->connector,
3382 phys->connector->state, &dither_cfg, &len);
3383 if (!ret)
3384 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
3385}
3386
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003387static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3388 struct drm_display_mode *mode)
3389{
3390 u64 pclk_rate;
3391 u32 pclk_period;
3392 u32 line_time;
3393
3394 /*
3395 * For linetime calculation, only operate on master encoder.
3396 */
3397 if (!sde_enc->cur_master)
3398 return 0;
3399
3400 if (!sde_enc->cur_master->ops.get_line_count) {
3401 SDE_ERROR("get_line_count function not defined\n");
3402 return 0;
3403 }
3404
3405 pclk_rate = mode->clock; /* pixel clock in kHz */
3406 if (pclk_rate == 0) {
3407 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3408 return 0;
3409 }
3410
3411 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3412 if (pclk_period == 0) {
3413 SDE_ERROR("pclk period is 0\n");
3414 return 0;
3415 }
3416
3417 /*
3418 * Line time calculation based on Pixel clock and HTOTAL.
3419 * Final unit is in ns.
3420 */
3421 line_time = (pclk_period * mode->htotal) / 1000;
3422 if (line_time == 0) {
3423 SDE_ERROR("line time calculation is 0\n");
3424 return 0;
3425 }
3426
3427 SDE_DEBUG_ENC(sde_enc,
3428 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3429 pclk_rate, pclk_period, line_time);
3430
3431 return line_time;
3432}
3433
3434static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3435 ktime_t *wakeup_time)
3436{
3437 struct drm_display_mode *mode;
3438 struct sde_encoder_virt *sde_enc;
3439 u32 cur_line;
3440 u32 line_time;
3441 u32 vtotal, time_to_vsync;
3442 ktime_t cur_time;
3443
3444 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003445 mode = &sde_enc->cur_master->cached_mode;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003446
3447 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3448 if (!line_time)
3449 return -EINVAL;
3450
3451 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3452
3453 vtotal = mode->vtotal;
3454 if (cur_line >= vtotal)
3455 time_to_vsync = line_time * vtotal;
3456 else
3457 time_to_vsync = line_time * (vtotal - cur_line);
3458
3459 if (time_to_vsync == 0) {
3460 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3461 vtotal);
3462 return -EINVAL;
3463 }
3464
3465 cur_time = ktime_get();
3466 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3467
3468 SDE_DEBUG_ENC(sde_enc,
3469 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3470 cur_line, vtotal, time_to_vsync,
3471 ktime_to_ms(cur_time),
3472 ktime_to_ms(*wakeup_time));
3473 return 0;
3474}
3475
3476static void sde_encoder_vsync_event_handler(unsigned long data)
3477{
3478 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3479 struct sde_encoder_virt *sde_enc;
3480 struct msm_drm_private *priv;
3481 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003482
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003483 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3484 SDE_ERROR("invalid encoder parameters\n");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003485 return;
3486 }
3487
3488 sde_enc = to_sde_encoder_virt(drm_enc);
3489 priv = drm_enc->dev->dev_private;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003490 if (!sde_enc->crtc) {
3491 SDE_ERROR("invalid crtc");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003492 return;
3493 }
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003494
3495 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3496 SDE_ERROR("invalid crtc index:%u\n",
3497 sde_enc->crtc->index);
3498 return;
3499 }
3500 event_thread = &priv->event_thread[sde_enc->crtc->index];
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003501 if (!event_thread) {
3502 SDE_ERROR("event_thread not found for crtc:%d\n",
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003503 sde_enc->crtc->index);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003504 return;
3505 }
3506
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303507 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003508 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003509}
3510
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08003511static void sde_encoder_input_event_work_handler(struct kthread_work *work)
3512{
3513 struct sde_encoder_virt *sde_enc = container_of(work,
3514 struct sde_encoder_virt, input_event_work);
3515
3516 if (!sde_enc) {
3517 SDE_ERROR("invalid sde encoder\n");
3518 return;
3519 }
3520
3521 sde_encoder_resource_control(&sde_enc->base,
3522 SDE_ENC_RC_EVENT_EARLY_WAKEUP);
3523}
3524
3525static int _sde_encoder_input_connect(struct input_handler *handler,
3526 struct input_dev *dev, const struct input_device_id *id)
3527{
3528 struct input_handle *handle;
3529 int rc = 0;
3530
3531 handle = kzalloc(sizeof(*handle), GFP_KERNEL);
3532 if (!handle)
3533 return -ENOMEM;
3534
3535 handle->dev = dev;
3536 handle->handler = handler;
3537 handle->name = handler->name;
3538
3539 rc = input_register_handle(handle);
3540 if (rc) {
3541 pr_err("failed to register input handle\n");
3542 goto error;
3543 }
3544
3545 rc = input_open_device(handle);
3546 if (rc) {
3547 pr_err("failed to open input device\n");
3548 goto error_unregister;
3549 }
3550
3551 return 0;
3552
3553error_unregister:
3554 input_unregister_handle(handle);
3555
3556error:
3557 kfree(handle);
3558
3559 return rc;
3560}
3561
3562static void _sde_encoder_input_disconnect(struct input_handle *handle)
3563{
3564 input_close_device(handle);
3565 input_unregister_handle(handle);
3566 kfree(handle);
3567}
3568
3569/**
3570 * Structure for specifying event parameters on which to receive callbacks.
3571 * This structure will trigger a callback in case of a touch event (specified by
3572 * EV_ABS) where there is a change in X and Y coordinates,
3573 */
3574static const struct input_device_id sde_input_ids[] = {
3575 {
3576 .flags = INPUT_DEVICE_ID_MATCH_EVBIT,
3577 .evbit = { BIT_MASK(EV_ABS) },
3578 .absbit = { [BIT_WORD(ABS_MT_POSITION_X)] =
3579 BIT_MASK(ABS_MT_POSITION_X) |
3580 BIT_MASK(ABS_MT_POSITION_Y) },
3581 },
3582 { },
3583};
3584
3585static int _sde_encoder_input_handler(
3586 struct sde_encoder_virt *sde_enc)
3587{
3588 struct input_handler *input_handler = NULL;
3589 int rc = 0;
3590
3591 if (sde_enc->input_handler) {
3592 SDE_ERROR_ENC(sde_enc,
3593 "input_handle is active. unexpected\n");
3594 return -EINVAL;
3595 }
3596
3597 input_handler = kzalloc(sizeof(*sde_enc->input_handler), GFP_KERNEL);
3598 if (!input_handler)
3599 return -ENOMEM;
3600
3601 input_handler->event = sde_encoder_input_event_handler;
3602 input_handler->connect = _sde_encoder_input_connect;
3603 input_handler->disconnect = _sde_encoder_input_disconnect;
3604 input_handler->name = "sde";
3605 input_handler->id_table = sde_input_ids;
3606 input_handler->private = sde_enc;
3607
3608 rc = input_register_handler(input_handler);
3609 if (rc) {
3610 SDE_ERROR_ENC(sde_enc,
3611 "input_register_handler failed, rc= %d\n", rc);
3612 kfree(input_handler);
3613 return rc;
3614 }
3615
3616 sde_enc->input_handler = input_handler;
3617
3618 return rc;
3619}
3620
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003621static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3622{
3623 struct sde_encoder_virt *sde_enc = container_of(work,
3624 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303625 bool autorefresh_enabled = false;
3626 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003627 ktime_t wakeup_time;
3628
3629 if (!sde_enc) {
3630 SDE_ERROR("invalid sde encoder\n");
3631 return;
3632 }
3633
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303634 rc = _sde_encoder_power_enable(sde_enc, true);
3635 if (rc) {
3636 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3637 return;
3638 }
3639
3640 if (sde_enc->cur_master &&
3641 sde_enc->cur_master->ops.is_autorefresh_enabled)
3642 autorefresh_enabled =
3643 sde_enc->cur_master->ops.is_autorefresh_enabled(
3644 sde_enc->cur_master);
3645
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303646 /* Update timer if autorefresh is enabled else return */
3647 if (!autorefresh_enabled)
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003648 goto exit;
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303649
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003650 rc = _sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time);
3651 if (rc)
3652 goto exit;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003653
3654 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3655 mod_timer(&sde_enc->vsync_event_timer,
3656 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003657
3658exit:
3659 _sde_encoder_power_enable(sde_enc, false);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003660}
3661
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003662int sde_encoder_poll_line_counts(struct drm_encoder *drm_enc)
3663{
3664 static const uint64_t timeout_us = 50000;
3665 static const uint64_t sleep_us = 20;
3666 struct sde_encoder_virt *sde_enc;
3667 ktime_t cur_ktime, exp_ktime;
3668 uint32_t line_count, tmp, i;
3669
3670 if (!drm_enc) {
3671 SDE_ERROR("invalid encoder\n");
3672 return -EINVAL;
3673 }
3674 sde_enc = to_sde_encoder_virt(drm_enc);
3675 if (!sde_enc->cur_master ||
3676 !sde_enc->cur_master->ops.get_line_count) {
3677 SDE_DEBUG_ENC(sde_enc, "can't get master line count\n");
3678 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_ERROR);
3679 return -EINVAL;
3680 }
3681
3682 exp_ktime = ktime_add_ms(ktime_get(), timeout_us / 1000);
3683
3684 line_count = sde_enc->cur_master->ops.get_line_count(
3685 sde_enc->cur_master);
3686
3687 for (i = 0; i < (timeout_us * 2 / sleep_us); ++i) {
3688 tmp = line_count;
3689 line_count = sde_enc->cur_master->ops.get_line_count(
3690 sde_enc->cur_master);
3691 if (line_count < tmp) {
3692 SDE_EVT32(DRMID(drm_enc), line_count);
3693 return 0;
3694 }
3695
3696 cur_ktime = ktime_get();
3697 if (ktime_compare_safe(exp_ktime, cur_ktime) <= 0)
3698 break;
3699
3700 usleep_range(sleep_us / 2, sleep_us);
3701 }
3702
3703 SDE_EVT32(DRMID(drm_enc), line_count, SDE_EVTLOG_ERROR);
3704 return -ETIMEDOUT;
3705}
3706
Clarence Ip85f4f4532017-10-04 12:10:13 -04003707int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08003708 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003709{
3710 struct sde_encoder_virt *sde_enc;
3711 struct sde_encoder_phys *phys;
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003712 struct sde_kms *sde_kms = NULL;
3713 struct msm_drm_private *priv = NULL;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003714 bool needs_hw_reset = false;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003715 uint32_t ln_cnt1, ln_cnt2;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003716 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04003717 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003718
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003719 if (!drm_enc || !params || !drm_enc->dev ||
3720 !drm_enc->dev->dev_private) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003721 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04003722 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003723 }
3724 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003725 priv = drm_enc->dev->dev_private;
3726 sde_kms = to_sde_kms(priv->kms);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003727
Clarence Ip19af1362016-09-23 14:57:51 -04003728 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003729 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003730
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003731 /* save this for later, in case of errors */
3732 if (sde_enc->cur_master && sde_enc->cur_master->ops.get_wr_line_count)
3733 ln_cnt1 = sde_enc->cur_master->ops.get_wr_line_count(
3734 sde_enc->cur_master);
3735 else
3736 ln_cnt1 = -EINVAL;
3737
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003738 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003739 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003740 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003741 phys = sde_enc->phys_encs[i];
Jayant Shekhar98e78a82018-01-12 17:50:55 +05303742 params->is_primary = sde_enc->disp_info.is_primary;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003743 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04003744 if (phys->ops.prepare_for_kickoff) {
3745 rc = phys->ops.prepare_for_kickoff(
3746 phys, params);
3747 if (rc)
3748 ret = rc;
3749 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003750 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
3751 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08003752 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003753 }
3754 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003755 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003756
Alan Kwong1124f1f2017-11-10 18:14:39 -05003757 rc = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
3758 if (rc) {
3759 SDE_ERROR_ENC(sde_enc, "resource kickoff failed rc %d\n", rc);
3760 return rc;
3761 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003762
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003763 /* if any phys needs reset, reset all phys, in-order */
3764 if (needs_hw_reset) {
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003765 /* query line count before cur_master is updated */
3766 if (sde_enc->cur_master &&
3767 sde_enc->cur_master->ops.get_wr_line_count)
3768 ln_cnt2 = sde_enc->cur_master->ops.get_wr_line_count(
3769 sde_enc->cur_master);
3770 else
3771 ln_cnt2 = -EINVAL;
3772
3773 SDE_EVT32(DRMID(drm_enc), ln_cnt1, ln_cnt2,
3774 SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003775 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3776 phys = sde_enc->phys_encs[i];
3777 if (phys && phys->ops.hw_reset)
3778 phys->ops.hw_reset(phys);
3779 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003780 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003781
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003782 _sde_encoder_update_master(drm_enc, params);
3783
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003784 _sde_encoder_update_roi(drm_enc);
3785
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003786 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
3787 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003788 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003789 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
3790 sde_enc->cur_master->connector->base.id,
3791 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003792 ret = rc;
3793 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003794 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003795
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003796 if (_sde_encoder_is_dsc_enabled(drm_enc) &&
3797 !sde_kms->splash_data.cont_splash_en) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003798 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003799 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003800 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003801 ret = rc;
3802 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003803 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04003804
3805 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04003806}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003807
Clarence Ip662698e2017-09-12 18:34:16 -04003808/**
3809 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
3810 * with the specified encoder, and unstage all pipes from it
3811 * @encoder: encoder pointer
3812 * Returns: 0 on success
3813 */
3814static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
3815{
3816 struct sde_encoder_virt *sde_enc;
3817 struct sde_encoder_phys *phys;
3818 unsigned int i;
3819 int rc = 0;
3820
3821 if (!drm_enc) {
3822 SDE_ERROR("invalid encoder\n");
3823 return -EINVAL;
3824 }
3825
3826 sde_enc = to_sde_encoder_virt(drm_enc);
3827
3828 SDE_ATRACE_BEGIN("encoder_release_lm");
3829 SDE_DEBUG_ENC(sde_enc, "\n");
3830
3831 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3832 phys = sde_enc->phys_encs[i];
3833 if (!phys)
3834 continue;
3835
3836 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
3837
3838 rc = sde_encoder_helper_reset_mixers(phys, NULL);
3839 if (rc)
3840 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
3841 }
3842
3843 SDE_ATRACE_END("encoder_release_lm");
3844 return rc;
3845}
3846
3847void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04003848{
3849 struct sde_encoder_virt *sde_enc;
3850 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003851 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04003852 unsigned int i;
3853
3854 if (!drm_enc) {
3855 SDE_ERROR("invalid encoder\n");
3856 return;
3857 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07003858 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04003859 sde_enc = to_sde_encoder_virt(drm_enc);
3860
3861 SDE_DEBUG_ENC(sde_enc, "\n");
3862
Clarence Ip662698e2017-09-12 18:34:16 -04003863 /* create a 'no pipes' commit to release buffers on errors */
3864 if (is_error)
3865 _sde_encoder_reset_ctl_hw(drm_enc);
3866
Alan Kwong628d19e2016-10-31 13:50:13 -04003867 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04003868 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003869
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003870 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003871 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003872 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003873 if (phys && phys->ops.handle_post_kickoff)
3874 phys->ops.handle_post_kickoff(phys);
3875 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003876
3877 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
3878 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
3879 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3880 mod_timer(&sde_enc->vsync_event_timer,
3881 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
3882 }
3883
Narendra Muppalla77b32932017-05-10 13:53:11 -07003884 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003885}
3886
Clarence Ip662698e2017-09-12 18:34:16 -04003887int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003888 struct drm_framebuffer *fb)
3889{
3890 struct drm_encoder *drm_enc;
3891 struct sde_hw_mixer_cfg mixer;
3892 struct sde_rm_hw_iter lm_iter;
3893 bool lm_valid = false;
3894
3895 if (!phys_enc || !phys_enc->parent) {
3896 SDE_ERROR("invalid encoder\n");
3897 return -EINVAL;
3898 }
3899
3900 drm_enc = phys_enc->parent;
3901 memset(&mixer, 0, sizeof(mixer));
3902
3903 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003904 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
3905 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
3906
3907 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
3908 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
3909 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
3910
3911 if (!hw_lm)
3912 continue;
3913
3914 /* need to flush LM to remove it */
3915 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
3916 phys_enc->hw_ctl->ops.update_pending_flush)
3917 phys_enc->hw_ctl->ops.update_pending_flush(
3918 phys_enc->hw_ctl,
3919 phys_enc->hw_ctl->ops.get_bitmask_mixer(
3920 phys_enc->hw_ctl, hw_lm->idx));
3921
3922 if (fb) {
3923 /* assume a single LM if targeting a frame buffer */
3924 if (lm_valid)
3925 continue;
3926
3927 mixer.out_height = fb->height;
3928 mixer.out_width = fb->width;
3929
3930 if (hw_lm->ops.setup_mixer_out)
3931 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
3932 }
3933
3934 lm_valid = true;
3935
3936 /* only enable border color on LM */
3937 if (phys_enc->hw_ctl->ops.setup_blendstage)
3938 phys_enc->hw_ctl->ops.setup_blendstage(
Dhaval Patel572cfd22017-06-12 19:33:39 -07003939 phys_enc->hw_ctl, hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003940 }
3941
3942 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04003943 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003944 return -EFAULT;
3945 }
3946 return 0;
3947}
3948
Lloyd Atkinsone123c172017-02-27 13:19:08 -05003949void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
3950{
3951 struct sde_encoder_virt *sde_enc;
3952 struct sde_encoder_phys *phys;
3953 int i;
3954
3955 if (!drm_enc) {
3956 SDE_ERROR("invalid encoder\n");
3957 return;
3958 }
3959 sde_enc = to_sde_encoder_virt(drm_enc);
3960
3961 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3962 phys = sde_enc->phys_encs[i];
3963 if (phys && phys->ops.prepare_commit)
3964 phys->ops.prepare_commit(phys);
3965 }
3966}
3967
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003968#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003969static int _sde_encoder_status_show(struct seq_file *s, void *data)
3970{
3971 struct sde_encoder_virt *sde_enc;
3972 int i;
3973
3974 if (!s || !s->private)
3975 return -EINVAL;
3976
3977 sde_enc = s->private;
3978
3979 mutex_lock(&sde_enc->enc_lock);
3980 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3981 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3982
3983 if (!phys)
3984 continue;
3985
3986 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
3987 phys->intf_idx - INTF_0,
3988 atomic_read(&phys->vsync_cnt),
3989 atomic_read(&phys->underrun_cnt));
3990
3991 switch (phys->intf_mode) {
3992 case INTF_MODE_VIDEO:
3993 seq_puts(s, "mode: video\n");
3994 break;
3995 case INTF_MODE_CMD:
3996 seq_puts(s, "mode: command\n");
3997 break;
3998 case INTF_MODE_WB_BLOCK:
3999 seq_puts(s, "mode: wb block\n");
4000 break;
4001 case INTF_MODE_WB_LINE:
4002 seq_puts(s, "mode: wb line\n");
4003 break;
4004 default:
4005 seq_puts(s, "mode: ???\n");
4006 break;
4007 }
4008 }
4009 mutex_unlock(&sde_enc->enc_lock);
4010
4011 return 0;
4012}
4013
4014static int _sde_encoder_debugfs_status_open(struct inode *inode,
4015 struct file *file)
4016{
4017 return single_open(file, _sde_encoder_status_show, inode->i_private);
4018}
4019
Dhaval Patelf9245d62017-03-28 16:24:00 -07004020static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304021 const char __user *user_buf, size_t count, loff_t *ppos)
4022{
4023 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004024 int i = 0, rc;
4025 char buf[MISR_BUFF_SIZE + 1];
4026 size_t buff_copy;
4027 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304028
Dhaval Patelf9245d62017-03-28 16:24:00 -07004029 if (!file || !file->private_data)
4030 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304031
Dhaval Patelf9245d62017-03-28 16:24:00 -07004032 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304033
Dhaval Patelf9245d62017-03-28 16:24:00 -07004034 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
4035 if (copy_from_user(buf, user_buf, buff_copy))
4036 return -EINVAL;
4037
4038 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304039
4040 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07004041 return -EINVAL;
4042
4043 rc = _sde_encoder_power_enable(sde_enc, true);
4044 if (rc)
4045 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304046
4047 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004048 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07004049 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304050 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4051 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4052
Dhaval Patelf9245d62017-03-28 16:24:00 -07004053 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304054 continue;
4055
Dhaval Patelf9245d62017-03-28 16:24:00 -07004056 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304057 }
4058 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004059 _sde_encoder_power_enable(sde_enc, false);
4060
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304061 return count;
4062}
4063
Dhaval Patelf9245d62017-03-28 16:24:00 -07004064static ssize_t _sde_encoder_misr_read(struct file *file,
4065 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304066{
4067 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004068 int i = 0, len = 0;
4069 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
4070 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304071
4072 if (*ppos)
4073 return 0;
4074
Dhaval Patelf9245d62017-03-28 16:24:00 -07004075 if (!file || !file->private_data)
4076 return -EINVAL;
4077
4078 sde_enc = file->private_data;
4079
4080 rc = _sde_encoder_power_enable(sde_enc, true);
4081 if (rc)
4082 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304083
4084 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004085 if (!sde_enc->misr_enable) {
4086 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4087 "disabled\n");
4088 goto buff_check;
4089 } else if (sde_enc->disp_info.capabilities &
4090 ~MSM_DISPLAY_CAP_VID_MODE) {
4091 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4092 "unsupported\n");
4093 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304094 }
4095
Dhaval Patelf9245d62017-03-28 16:24:00 -07004096 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4097 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004098
Dhaval Patelf9245d62017-03-28 16:24:00 -07004099 if (!phys || !phys->ops.collect_misr)
4100 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304101
Dhaval Patelf9245d62017-03-28 16:24:00 -07004102 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4103 "Intf idx:%d\n", phys->intf_idx - INTF_0);
4104 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
4105 phys->ops.collect_misr(phys));
4106 }
4107
4108buff_check:
4109 if (count <= len) {
4110 len = 0;
4111 goto end;
4112 }
4113
4114 if (copy_to_user(user_buff, buf, len)) {
4115 len = -EFAULT;
4116 goto end;
4117 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304118
4119 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304120
Dhaval Patelf9245d62017-03-28 16:24:00 -07004121end:
4122 mutex_unlock(&sde_enc->enc_lock);
4123 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304124 return len;
4125}
4126
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004127static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004128{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004129 struct sde_encoder_virt *sde_enc;
4130 struct msm_drm_private *priv;
4131 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07004132 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004133
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004134 static const struct file_operations debugfs_status_fops = {
4135 .open = _sde_encoder_debugfs_status_open,
4136 .read = seq_read,
4137 .llseek = seq_lseek,
4138 .release = single_release,
4139 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304140
4141 static const struct file_operations debugfs_misr_fops = {
4142 .open = simple_open,
4143 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004144 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304145 };
4146
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004147 char name[SDE_NAME_SIZE];
4148
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004149 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004150 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004151 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004152 }
4153
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004154 sde_enc = to_sde_encoder_virt(drm_enc);
4155 priv = drm_enc->dev->dev_private;
4156 sde_kms = to_sde_kms(priv->kms);
4157
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004158 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
4159
4160 /* create overall sub-directory for the encoder */
4161 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07004162 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004163 if (!sde_enc->debugfs_root)
4164 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304165
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004166 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004167 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004168 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304169
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004170 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004171 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004172
Alan Kwongf2debb02017-04-05 06:19:29 -07004173 for (i = 0; i < sde_enc->num_phys_encs; i++)
4174 if (sde_enc->phys_encs[i] &&
4175 sde_enc->phys_encs[i]->ops.late_register)
4176 sde_enc->phys_encs[i]->ops.late_register(
4177 sde_enc->phys_encs[i],
4178 sde_enc->debugfs_root);
4179
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004180 return 0;
4181}
4182
4183static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
4184{
4185 struct sde_encoder_virt *sde_enc;
4186
4187 if (!drm_enc)
4188 return;
4189
4190 sde_enc = to_sde_encoder_virt(drm_enc);
4191 debugfs_remove_recursive(sde_enc->debugfs_root);
4192}
4193#else
4194static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
4195{
4196 return 0;
4197}
4198
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004199static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004200{
4201}
4202#endif
4203
4204static int sde_encoder_late_register(struct drm_encoder *encoder)
4205{
4206 return _sde_encoder_init_debugfs(encoder);
4207}
4208
4209static void sde_encoder_early_unregister(struct drm_encoder *encoder)
4210{
4211 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004212}
4213
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004214static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04004215 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004216 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004217 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004218{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004219 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004220
Clarence Ip19af1362016-09-23 14:57:51 -04004221 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004222
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004223 /*
4224 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
4225 * in this function, check up-front.
4226 */
4227 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
4228 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004229 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004230 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004231 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004232 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004233
Clarence Ipa4039322016-07-15 16:23:59 -04004234 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004235 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004236
4237 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004238 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004239 PTR_ERR(enc));
4240 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4241 }
4242
4243 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4244 ++sde_enc->num_phys_encs;
4245 }
4246
Clarence Ipa4039322016-07-15 16:23:59 -04004247 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004248 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004249
4250 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004251 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004252 PTR_ERR(enc));
4253 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4254 }
4255
4256 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4257 ++sde_enc->num_phys_encs;
4258 }
4259
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004260 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004261}
4262
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004263static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
4264 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04004265{
4266 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04004267
Clarence Ip19af1362016-09-23 14:57:51 -04004268 if (!sde_enc) {
4269 SDE_ERROR("invalid encoder\n");
4270 return -EINVAL;
4271 }
4272
4273 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04004274
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004275 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004276 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004277 sde_enc->num_phys_encs);
4278 return -EINVAL;
4279 }
4280
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004281 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004282
4283 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004284 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004285 PTR_ERR(enc));
4286 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4287 }
4288
4289 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4290 ++sde_enc->num_phys_encs;
4291
4292 return 0;
4293}
4294
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004295static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004296 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04004297 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004298 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004299{
4300 int ret = 0;
4301 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004302 enum sde_intf_type intf_type;
4303 struct sde_encoder_virt_ops parent_ops = {
4304 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07004305 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04004306 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004307 };
4308 struct sde_enc_phys_init_params phys_params;
4309
Clarence Ip19af1362016-09-23 14:57:51 -04004310 if (!sde_enc || !sde_kms) {
4311 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
4312 sde_enc != 0, sde_kms != 0);
4313 return -EINVAL;
4314 }
4315
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004316 memset(&phys_params, 0, sizeof(phys_params));
4317 phys_params.sde_kms = sde_kms;
4318 phys_params.parent = &sde_enc->base;
4319 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004320 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004321
Clarence Ip19af1362016-09-23 14:57:51 -04004322 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004323
Clarence Ipa4039322016-07-15 16:23:59 -04004324 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004325 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
4326 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04004327 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004328 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4329 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07004330 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
4331 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4332 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04004333 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
4334 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
4335 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004336 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04004337 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004338 return -EINVAL;
4339 }
4340
Clarence Ip88270a62016-06-26 10:09:34 -04004341 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004342
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004343 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
4344
Clarence Ip19af1362016-09-23 14:57:51 -04004345 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004346
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004347 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
4348 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004349 sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
4350
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004351 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04004352 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004353 /*
4354 * Left-most tile is at index 0, content is controller id
4355 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
4356 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
4357 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004358 u32 controller_id = disp_info->h_tile_instance[i];
4359
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004360 if (disp_info->num_of_h_tiles > 1) {
4361 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004362 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004363 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004364 phys_params.split_role = ENC_ROLE_SLAVE;
4365 } else {
4366 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004367 }
4368
Clarence Ip19af1362016-09-23 14:57:51 -04004369 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004370 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004371
Alan Kwongbb27c092016-07-20 16:41:25 -04004372 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004373 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004374 phys_params.wb_idx = sde_encoder_get_wb(
4375 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04004376 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004377 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004378 SDE_ERROR_ENC(sde_enc,
4379 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004380 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004381 ret = -EINVAL;
4382 }
Alan Kwongbb27c092016-07-20 16:41:25 -04004383 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004384 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004385 phys_params.intf_idx = sde_encoder_get_intf(
4386 sde_kms->catalog, intf_type,
4387 controller_id);
4388 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004389 SDE_ERROR_ENC(sde_enc,
4390 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004391 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004392 ret = -EINVAL;
4393 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004394 }
4395
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004396 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04004397 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004398 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
4399 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004400 else
4401 ret = sde_encoder_virt_add_phys_encs(
4402 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004403 sde_enc,
4404 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004405 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04004406 SDE_ERROR_ENC(sde_enc,
4407 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004408 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004409 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08004410
4411 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4412 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4413
4414 if (phys) {
4415 atomic_set(&phys->vsync_cnt, 0);
4416 atomic_set(&phys->underrun_cnt, 0);
4417 }
4418 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004419 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004420
4421 return ret;
4422}
4423
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004424static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
4425 .mode_set = sde_encoder_virt_mode_set,
4426 .disable = sde_encoder_virt_disable,
4427 .enable = sde_encoder_virt_enable,
4428 .atomic_check = sde_encoder_virt_atomic_check,
4429};
4430
4431static const struct drm_encoder_funcs sde_encoder_funcs = {
4432 .destroy = sde_encoder_destroy,
4433 .late_register = sde_encoder_late_register,
4434 .early_unregister = sde_encoder_early_unregister,
4435};
4436
Clarence Ip3649f8b2016-10-31 09:59:44 -04004437struct drm_encoder *sde_encoder_init(
4438 struct drm_device *dev,
4439 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004440{
4441 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04004442 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004443 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004444 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004445 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08004446 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004447 int ret = 0;
4448
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004449 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
4450 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004451 ret = -ENOMEM;
4452 goto fail;
4453 }
4454
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004455 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004456 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
4457 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004458 if (ret)
4459 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004460
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004461 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004462 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004463 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004464 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004465 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004466
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004467 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
4468 disp_info->is_primary)
4469 setup_timer(&sde_enc->vsync_event_timer,
4470 sde_encoder_vsync_event_handler,
4471 (unsigned long)sde_enc);
4472
Dhaval Patel020f7e122016-11-15 14:39:18 -08004473 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
4474 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004475 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08004476 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07004477 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08004478 PTR_ERR(sde_enc->rsc_client));
4479 sde_enc->rsc_client = NULL;
4480 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004481
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004482 if (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
4483 ret = _sde_encoder_input_handler(sde_enc);
4484 if (ret)
4485 SDE_ERROR(
4486 "input handler registration failed, rc = %d\n", ret);
4487 }
4488
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004489 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04004490 kthread_init_delayed_work(&sde_enc->delayed_off_work,
4491 sde_encoder_off_work);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07004492 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004493
4494 kthread_init_work(&sde_enc->vsync_event_work,
4495 sde_encoder_vsync_event_work_handler);
4496
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004497 kthread_init_work(&sde_enc->input_event_work,
4498 sde_encoder_input_event_work_handler);
4499
Dhaval Patel020f7e122016-11-15 14:39:18 -08004500 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
4501
Clarence Ip19af1362016-09-23 14:57:51 -04004502 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004503
4504 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004505
4506fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004507 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004508 if (drm_enc)
4509 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004510
4511 return ERR_PTR(ret);
4512}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004513
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004514int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4515 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004516{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004517 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004518 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004519 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004520
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004521 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004522 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004523 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004524 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004525 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004526 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004527
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004528 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4529 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004530
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004531 switch (event) {
4532 case MSM_ENC_COMMIT_DONE:
4533 fn_wait = phys->ops.wait_for_commit_done;
4534 break;
4535 case MSM_ENC_TX_COMPLETE:
4536 fn_wait = phys->ops.wait_for_tx_complete;
4537 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004538 case MSM_ENC_VBLANK:
4539 fn_wait = phys->ops.wait_for_vblank;
4540 break;
Sandeep Panda11b20d82017-06-19 12:57:27 +05304541 case MSM_ENC_ACTIVE_REGION:
4542 fn_wait = phys->ops.wait_for_active;
4543 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004544 default:
4545 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4546 event);
4547 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004548 };
4549
4550 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004551 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004552 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004553 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004554 if (ret)
4555 return ret;
4556 }
4557 }
4558
4559 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004560}
4561
Alan Kwong67a3f792016-11-01 23:16:53 -04004562enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4563{
4564 struct sde_encoder_virt *sde_enc = NULL;
4565 int i;
4566
4567 if (!encoder) {
4568 SDE_ERROR("invalid encoder\n");
4569 return INTF_MODE_NONE;
4570 }
4571 sde_enc = to_sde_encoder_virt(encoder);
4572
4573 if (sde_enc->cur_master)
4574 return sde_enc->cur_master->intf_mode;
4575
4576 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4577 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4578
4579 if (phys)
4580 return phys->intf_mode;
4581 }
4582
4583 return INTF_MODE_NONE;
4584}
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004585
4586/**
4587 * sde_encoder_update_caps_for_cont_splash - update encoder settings during
4588 * device bootup when cont_splash is enabled
4589 * @drm_enc: Pointer to drm encoder structure
4590 * @Return: true if successful in updating the encoder structure
4591 */
4592int sde_encoder_update_caps_for_cont_splash(struct drm_encoder *encoder)
4593{
4594 struct sde_encoder_virt *sde_enc;
4595 struct msm_drm_private *priv;
4596 struct sde_kms *sde_kms;
4597 struct drm_connector *conn = NULL;
4598 struct sde_connector *sde_conn = NULL;
4599 struct sde_connector_state *sde_conn_state = NULL;
4600 struct drm_display_mode *drm_mode = NULL;
4601 struct sde_rm_hw_iter dsc_iter, pp_iter, ctl_iter;
4602 int ret = 0, i;
4603
4604 if (!encoder) {
4605 SDE_ERROR("invalid drm enc\n");
4606 return -EINVAL;
4607 }
4608
4609 if (!encoder->dev || !encoder->dev->dev_private) {
4610 SDE_ERROR("drm device invalid\n");
4611 return -EINVAL;
4612 }
4613
4614 priv = encoder->dev->dev_private;
4615 if (!priv->kms) {
4616 SDE_ERROR("invalid kms\n");
4617 return -EINVAL;
4618 }
4619
4620 sde_kms = to_sde_kms(priv->kms);
4621 sde_enc = to_sde_encoder_virt(encoder);
4622 if (!priv->num_connectors) {
4623 SDE_ERROR_ENC(sde_enc, "No connectors registered\n");
4624 return -EINVAL;
4625 }
4626 SDE_DEBUG_ENC(sde_enc,
4627 "num of connectors: %d\n", priv->num_connectors);
4628
4629 for (i = 0; i < priv->num_connectors; i++) {
4630 SDE_DEBUG_ENC(sde_enc, "connector id: %d\n",
4631 priv->connectors[i]->base.id);
4632 sde_conn = to_sde_connector(priv->connectors[i]);
4633 if (!sde_conn->encoder) {
4634 SDE_DEBUG_ENC(sde_enc,
4635 "encoder not attached to connector\n");
4636 continue;
4637 }
4638 if (sde_conn->encoder->base.id
4639 == encoder->base.id) {
4640 conn = (priv->connectors[i]);
4641 break;
4642 }
4643 }
4644
4645 if (!conn || !conn->state) {
4646 SDE_ERROR_ENC(sde_enc, "connector not found\n");
4647 return -EINVAL;
4648 }
4649
4650 sde_conn_state = to_sde_connector_state(conn->state);
4651
4652 if (!sde_conn->ops.get_mode_info) {
4653 SDE_ERROR_ENC(sde_enc, "conn: get_mode_info ops not found\n");
4654 return -EINVAL;
4655 }
4656
4657 ret = sde_conn->ops.get_mode_info(&encoder->crtc->state->adjusted_mode,
4658 &sde_conn_state->mode_info,
4659 sde_kms->catalog->max_mixer_width,
4660 sde_conn->display);
4661 if (ret) {
4662 SDE_ERROR_ENC(sde_enc,
4663 "conn: ->get_mode_info failed. ret=%d\n", ret);
4664 return ret;
4665 }
4666
4667 ret = sde_rm_reserve(&sde_kms->rm, encoder, encoder->crtc->state,
4668 conn->state, false);
4669 if (ret) {
4670 SDE_ERROR_ENC(sde_enc,
4671 "failed to reserve hw resources, %d\n", ret);
4672 return ret;
4673 }
4674
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004675 if (sde_conn->encoder) {
4676 conn->state->best_encoder = sde_conn->encoder;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004677 SDE_DEBUG_ENC(sde_enc,
4678 "configured cstate->best_encoder to ID = %d\n",
4679 conn->state->best_encoder->base.id);
4680 } else {
4681 SDE_ERROR_ENC(sde_enc, "No encoder mapped to connector=%d\n",
4682 conn->base.id);
4683 }
4684
4685 SDE_DEBUG_ENC(sde_enc, "connector topology = %llu\n",
4686 sde_connector_get_topology_name(conn));
4687 drm_mode = &encoder->crtc->state->adjusted_mode;
4688 SDE_DEBUG_ENC(sde_enc, "hdisplay = %d, vdisplay = %d\n",
4689 drm_mode->hdisplay, drm_mode->vdisplay);
4690 drm_set_preferred_mode(conn, drm_mode->hdisplay, drm_mode->vdisplay);
4691
4692 if (encoder->bridge) {
4693 SDE_DEBUG_ENC(sde_enc, "Bridge mapped to encoder\n");
4694 /*
4695 * For cont-splash use case, we update the mode
4696 * configurations manually. This will skip the
4697 * usually mode set call when actual frame is
4698 * pushed from framework. The bridge needs to
4699 * be updated with the current drm mode by
4700 * calling the bridge mode set ops.
4701 */
4702 if (encoder->bridge->funcs) {
4703 SDE_DEBUG_ENC(sde_enc, "calling mode_set\n");
4704 encoder->bridge->funcs->mode_set(encoder->bridge,
4705 drm_mode, drm_mode);
4706 }
4707 } else {
4708 SDE_ERROR_ENC(sde_enc, "No bridge attached to encoder\n");
4709 }
4710
4711 sde_rm_init_hw_iter(&pp_iter, encoder->base.id, SDE_HW_BLK_PINGPONG);
4712 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4713 sde_enc->hw_pp[i] = NULL;
4714 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
4715 break;
4716 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
4717 }
4718
4719 sde_rm_init_hw_iter(&dsc_iter, encoder->base.id, SDE_HW_BLK_DSC);
4720 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4721 sde_enc->hw_dsc[i] = NULL;
4722 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
4723 break;
4724 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
4725 }
4726
4727 sde_rm_init_hw_iter(&ctl_iter, encoder->base.id, SDE_HW_BLK_CTL);
4728 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4729 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4730
4731 phys->hw_ctl = NULL;
4732 if (!sde_rm_get_hw(&sde_kms->rm, &ctl_iter))
4733 break;
4734 phys->hw_ctl = (struct sde_hw_ctl *) ctl_iter.hw;
4735 }
4736
4737 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4738 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4739
4740 if (!phys) {
4741 SDE_ERROR_ENC(sde_enc,
4742 "phys encoders not initialized\n");
4743 return -EINVAL;
4744 }
4745
4746 phys->hw_pp = sde_enc->hw_pp[i];
4747 if (phys->ops.cont_splash_mode_set)
4748 phys->ops.cont_splash_mode_set(phys, drm_mode);
4749
4750 if (phys->ops.is_master && phys->ops.is_master(phys)) {
4751 phys->connector = conn;
4752 sde_enc->cur_master = phys;
4753 }
4754 }
4755
4756 return ret;
4757}