blob: 1614c832326e3ef840e67874b0ac6c76d44b7efa [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
Jayant Shekhar98e78a82018-01-12 17:50:55 +05302 * Copyright (c) 2014-2018, The Linux Foundation. All rights reserved.
Dhaval Patel14d46ce2017-01-17 16:28:12 -08003 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070083/**
84 * enum sde_enc_rc_events - events for resource control state machine
85 * @SDE_ENC_RC_EVENT_KICKOFF:
86 * This event happens at NORMAL priority.
87 * Event that signals the start of the transfer. When this event is
88 * received, enable MDP/DSI core clocks and request RSC with CMD state.
89 * Regardless of the previous state, the resource should be in ON state
90 * at the end of this event.
91 * @SDE_ENC_RC_EVENT_FRAME_DONE:
92 * This event happens at INTERRUPT level.
93 * Event signals the end of the data transfer after the PP FRAME_DONE
94 * event. At the end of this event, a delayed work is scheduled to go to
Dhaval Patelc9e213b2017-11-02 12:13:12 -070095 * IDLE_PC state after IDLE_POWERCOLLAPSE_DURATION time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040096 * @SDE_ENC_RC_EVENT_PRE_STOP:
97 * This event happens at NORMAL priority.
98 * This event, when received during the ON state, set RSC to IDLE, and
99 * and leave the RC STATE in the PRE_OFF state.
100 * It should be followed by the STOP event as part of encoder disable.
101 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700102 * @SDE_ENC_RC_EVENT_STOP:
103 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400104 * When this event is received, disable all the MDP/DSI core clocks, and
105 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
106 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
107 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
108 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700109 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700110 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700111 * Event signals that there is a seamless mode switch is in prgoress. A
112 * client needs to turn of only irq - leave clocks ON to reduce the mode
113 * switch latency.
114 * @SDE_ENC_RC_EVENT_POST_MODESET:
115 * This event happens at NORMAL priority from a work item.
116 * Event signals that seamless mode switch is complete and resources are
117 * acquired. Clients wants to turn on the irq again and update the rsc
118 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700119 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
120 * This event happens at NORMAL priority from a work item.
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700121 * Event signals that there were no frame updates for
122 * IDLE_POWERCOLLAPSE_DURATION time. This would disable MDP/DSI core clocks
123 * and request RSC with IDLE state and change the resource state to IDLE.
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800124 * @SDE_ENC_RC_EVENT_EARLY_WAKEUP:
125 * This event is triggered from the input event thread when touch event is
126 * received from the input device. On receiving this event,
127 * - If the device is in SDE_ENC_RC_STATE_IDLE state, it turns ON the
128 clocks and enable RSC.
129 * - If the device is in SDE_ENC_RC_STATE_ON state, it resets the delayed
130 * off work since a new commit is imminent.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700131 */
132enum sde_enc_rc_events {
133 SDE_ENC_RC_EVENT_KICKOFF = 1,
134 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400135 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700136 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700137 SDE_ENC_RC_EVENT_PRE_MODESET,
138 SDE_ENC_RC_EVENT_POST_MODESET,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800139 SDE_ENC_RC_EVENT_ENTER_IDLE,
140 SDE_ENC_RC_EVENT_EARLY_WAKEUP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141};
142
143/*
144 * enum sde_enc_rc_states - states that the resource control maintains
145 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400146 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700147 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700148 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700149 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
150 */
151enum sde_enc_rc_states {
152 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400153 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700154 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700155 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700156 SDE_ENC_RC_STATE_IDLE
157};
158
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400159/**
160 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
161 * encoders. Virtual encoder manages one "logical" display. Physical
162 * encoders manage one intf block, tied to a specific panel/sub-panel.
163 * Virtual encoder defers as much as possible to the physical encoders.
164 * Virtual encoder registers itself with the DRM Framework as the encoder.
165 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400166 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400167 * @bus_scaling_client: Client handle to the bus scaling interface
168 * @num_phys_encs: Actual number of physical encoders contained.
169 * @phys_encs: Container of physical encoders managed.
170 * @cur_master: Pointer to the current master in this mode. Optimization
171 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700172 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500173 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800174 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500175 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
176 * for partial update right-only cases, such as pingpong
177 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400178 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
179 * notification of the VBLANK
180 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400181 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
182 * all CTL paths
183 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700184 * @debugfs_root: Debug file system root file node
185 * @enc_lock: Lock around physical encoder create/destroy and
186 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400187 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
188 * busy processing current command.
189 * Bit0 = phys_encs[0] etc.
190 * @crtc_frame_event_cb: callback handler for frame event
191 * @crtc_frame_event_cb_data: callback handler private data
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400192 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700193 * @rsc_client: rsc client pointer
194 * @rsc_state_init: boolean to indicate rsc config init
195 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700196 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700197 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700198 * @idle_pc_supported: indicate if idle power collaps is supported
199 * @rc_lock: resource control mutex lock to protect
200 * virt encoder over various state changes
201 * @rc_state: resource controller state
202 * @delayed_off_work: delayed worker to schedule disabling of
203 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400204 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800205 * @input_event_work: worker to handle input device touch events
Dhaval Patel222023e2018-02-27 12:24:07 -0800206 * @esd_trigger_work: worker to handle esd trigger events
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800207 * @input_handler: handler for input device events
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700208 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700209 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700210 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400211 * @cur_conn_roi: current connector roi
212 * @prv_conn_roi: previous connector roi to optimize if unchanged
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800213 * @crtc pointer to drm_crtc
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400214 */
215struct sde_encoder_virt {
216 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400217 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400218 uint32_t bus_scaling_client;
219
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400220 uint32_t display_num_of_h_tiles;
221
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400222 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400223 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
224 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700225 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800226 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400227
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500228 bool intfs_swapped;
229
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400230 void (*crtc_vblank_cb)(void *);
231 void *crtc_vblank_cb_data;
232
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700233 struct dentry *debugfs_root;
234 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400235 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
236 void (*crtc_frame_event_cb)(void *, u32 event);
237 void *crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400238
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400239 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800240
241 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700242 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800243 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700244 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700245 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700246
247 bool idle_pc_supported;
248 struct mutex rc_lock;
249 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400250 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400251 struct kthread_work vsync_event_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800252 struct kthread_work input_event_work;
Dhaval Patel222023e2018-02-27 12:24:07 -0800253 struct kthread_work esd_trigger_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800254 struct input_handler *input_handler;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700255 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700256 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700257
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700258 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400259 struct sde_rect cur_conn_roi;
260 struct sde_rect prv_conn_roi;
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800261 struct drm_crtc *crtc;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400262};
263
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400264#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700265
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -0500266static void _sde_encoder_pm_qos_add_request(struct drm_encoder *drm_enc)
267{
268 struct msm_drm_private *priv;
269 struct sde_kms *sde_kms;
270 struct pm_qos_request *req;
271 u32 cpu_mask;
272 u32 cpu_dma_latency;
273 int cpu;
274
275 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
276 SDE_ERROR("drm device invalid\n");
277 return;
278 }
279
280 priv = drm_enc->dev->dev_private;
281 if (!priv->kms) {
282 SDE_ERROR("invalid kms\n");
283 return;
284 }
285
286 sde_kms = to_sde_kms(priv->kms);
287 if (!sde_kms || !sde_kms->catalog)
288 return;
289
290 cpu_mask = sde_kms->catalog->perf.cpu_mask;
291 cpu_dma_latency = sde_kms->catalog->perf.cpu_dma_latency;
292 if (!cpu_mask)
293 return;
294
295 req = &sde_kms->pm_qos_cpu_req;
296 req->type = PM_QOS_REQ_AFFINE_CORES;
297 cpumask_empty(&req->cpus_affine);
298 for_each_possible_cpu(cpu) {
299 if ((1 << cpu) & cpu_mask)
300 cpumask_set_cpu(cpu, &req->cpus_affine);
301 }
302 pm_qos_add_request(req, PM_QOS_CPU_DMA_LATENCY, cpu_dma_latency);
303
304 SDE_EVT32_VERBOSE(DRMID(drm_enc), cpu_mask, cpu_dma_latency);
305}
306
307static void _sde_encoder_pm_qos_remove_request(struct drm_encoder *drm_enc)
308{
309 struct msm_drm_private *priv;
310 struct sde_kms *sde_kms;
311
312 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
313 SDE_ERROR("drm device invalid\n");
314 return;
315 }
316
317 priv = drm_enc->dev->dev_private;
318 if (!priv->kms) {
319 SDE_ERROR("invalid kms\n");
320 return;
321 }
322
323 sde_kms = to_sde_kms(priv->kms);
324 if (!sde_kms || !sde_kms->catalog || !sde_kms->catalog->perf.cpu_mask)
325 return;
326
327 pm_qos_remove_request(&sde_kms->pm_qos_cpu_req);
328}
329
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700330static struct drm_connector_state *_sde_encoder_get_conn_state(
331 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800332{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700333 struct msm_drm_private *priv;
334 struct sde_kms *sde_kms;
335 struct list_head *connector_list;
336 struct drm_connector *conn_iter;
337
338 if (!drm_enc) {
339 SDE_ERROR("invalid argument\n");
340 return NULL;
341 }
342
343 priv = drm_enc->dev->dev_private;
344 sde_kms = to_sde_kms(priv->kms);
345 connector_list = &sde_kms->dev->mode_config.connector_list;
346
347 list_for_each_entry(conn_iter, connector_list, head)
348 if (conn_iter->encoder == drm_enc)
349 return conn_iter->state;
350
351 return NULL;
352}
353
354static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
355 struct msm_mode_info *mode_info)
356{
357 struct drm_connector_state *conn_state;
358
359 if (!drm_enc || !mode_info) {
360 SDE_ERROR("invalid arguments\n");
361 return -EINVAL;
362 }
363
364 conn_state = _sde_encoder_get_conn_state(drm_enc);
365 if (!conn_state) {
366 SDE_ERROR("invalid connector state for the encoder: %d\n",
367 drm_enc->base.id);
368 return -EINVAL;
369 }
370
371 return sde_connector_get_mode_info(conn_state, mode_info);
372}
373
374static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
375{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400376 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700377 struct msm_mode_info mode_info;
378 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400379
380 if (!drm_enc)
381 return false;
382
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700383 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
384 if (rc) {
385 SDE_ERROR("failed to get mode info, enc: %d\n",
386 drm_enc->base.id);
387 return false;
388 }
389
390 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800391
392 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
393}
394
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400395bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
396{
397 enum sde_rm_topology_name topology;
398 struct sde_encoder_virt *sde_enc;
399 struct drm_connector *drm_conn;
400
401 if (!drm_enc)
402 return false;
403
404 sde_enc = to_sde_encoder_virt(drm_enc);
405 if (!sde_enc->cur_master)
406 return false;
407
408 drm_conn = sde_enc->cur_master->connector;
409 if (!drm_conn)
410 return false;
411
412 topology = sde_connector_get_topology_name(drm_conn);
413 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
414 return true;
415
416 return false;
417}
418
Dhaval Patelf9245d62017-03-28 16:24:00 -0700419static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
420 bool enable)
421{
422 struct drm_encoder *drm_enc;
423 struct msm_drm_private *priv;
424 struct sde_kms *sde_kms;
425
426 if (!sde_enc) {
427 SDE_ERROR("invalid sde enc\n");
428 return -EINVAL;
429 }
430
431 drm_enc = &sde_enc->base;
432 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
433 SDE_ERROR("drm device invalid\n");
434 return -EINVAL;
435 }
436
437 priv = drm_enc->dev->dev_private;
438 if (!priv->kms) {
439 SDE_ERROR("invalid kms\n");
440 return -EINVAL;
441 }
442
443 sde_kms = to_sde_kms(priv->kms);
444
445 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
446 enable);
447}
448
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500449void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
450 enum sde_intr_idx intr_idx)
451{
452 SDE_EVT32(DRMID(phys_enc->parent),
453 phys_enc->intf_idx - INTF_0,
454 phys_enc->hw_pp->idx - PINGPONG_0,
455 intr_idx);
456 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
457
458 if (phys_enc->parent_ops.handle_frame_done)
459 phys_enc->parent_ops.handle_frame_done(
460 phys_enc->parent, phys_enc,
461 SDE_ENCODER_FRAME_EVENT_ERROR);
462}
463
464int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
465 enum sde_intr_idx intr_idx,
466 struct sde_encoder_wait_info *wait_info)
467{
468 struct sde_encoder_irq *irq;
469 u32 irq_status;
470 int ret;
471
472 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
473 SDE_ERROR("invalid params\n");
474 return -EINVAL;
475 }
476 irq = &phys_enc->irq[intr_idx];
477
478 /* note: do master / slave checking outside */
479
480 /* return EWOULDBLOCK since we know the wait isn't necessary */
481 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
482 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400483 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
484 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500485 return -EWOULDBLOCK;
486 }
487
488 if (irq->irq_idx < 0) {
489 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
490 irq->name, irq->hw_idx);
491 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
492 irq->irq_idx);
493 return 0;
494 }
495
496 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
497 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700498 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
499 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
500 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500501
502 ret = sde_encoder_helper_wait_event_timeout(
503 DRMID(phys_enc->parent),
504 irq->hw_idx,
505 wait_info);
506
507 if (ret <= 0) {
508 irq_status = sde_core_irq_read(phys_enc->sde_kms,
509 irq->irq_idx, true);
510 if (irq_status) {
511 unsigned long flags;
512
Dhaval Patela5f75952017-07-25 11:17:41 -0700513 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
514 irq->hw_idx, irq->irq_idx,
515 phys_enc->hw_pp->idx - PINGPONG_0,
516 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500517 SDE_DEBUG_PHYS(phys_enc,
518 "done but irq %d not triggered\n",
519 irq->irq_idx);
520 local_irq_save(flags);
521 irq->cb.func(phys_enc, irq->irq_idx);
522 local_irq_restore(flags);
523 ret = 0;
524 } else {
525 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700526 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
527 irq->hw_idx, irq->irq_idx,
528 phys_enc->hw_pp->idx - PINGPONG_0,
529 atomic_read(wait_info->atomic_cnt), irq_status,
530 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500531 }
532 } else {
533 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700534 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
535 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
536 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500537 }
538
Dhaval Patela5f75952017-07-25 11:17:41 -0700539 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
540 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
541 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500542
543 return ret;
544}
545
546int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
547 enum sde_intr_idx intr_idx)
548{
549 struct sde_encoder_irq *irq;
550 int ret = 0;
551
552 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
553 SDE_ERROR("invalid params\n");
554 return -EINVAL;
555 }
556 irq = &phys_enc->irq[intr_idx];
557
558 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530559 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500560 "skipping already registered irq %s type %d\n",
561 irq->name, irq->intr_type);
562 return 0;
563 }
564
565 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
566 irq->intr_type, irq->hw_idx);
567 if (irq->irq_idx < 0) {
568 SDE_ERROR_PHYS(phys_enc,
569 "failed to lookup IRQ index for %s type:%d\n",
570 irq->name, irq->intr_type);
571 return -EINVAL;
572 }
573
574 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
575 &irq->cb);
576 if (ret) {
577 SDE_ERROR_PHYS(phys_enc,
578 "failed to register IRQ callback for %s\n",
579 irq->name);
580 irq->irq_idx = -EINVAL;
581 return ret;
582 }
583
584 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
585 if (ret) {
586 SDE_ERROR_PHYS(phys_enc,
587 "enable IRQ for intr:%s failed, irq_idx %d\n",
588 irq->name, irq->irq_idx);
589
590 sde_core_irq_unregister_callback(phys_enc->sde_kms,
591 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400592
593 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
594 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500595 irq->irq_idx = -EINVAL;
596 return ret;
597 }
598
599 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
600 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
601 irq->name, irq->irq_idx);
602
603 return ret;
604}
605
606int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
607 enum sde_intr_idx intr_idx)
608{
609 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400610 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500611
612 if (!phys_enc) {
613 SDE_ERROR("invalid encoder\n");
614 return -EINVAL;
615 }
616 irq = &phys_enc->irq[intr_idx];
617
618 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400619 if (irq->irq_idx < 0) {
620 SDE_ERROR(
621 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
622 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
623 irq->irq_idx);
624 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
625 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500626 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400627 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500628
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400629 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
630 if (ret)
631 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
632 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
633
634 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500635 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400636 if (ret)
637 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
638 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500639
640 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
641 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
642
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400643 irq->irq_idx = -EINVAL;
644
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500645 return 0;
646}
647
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400648void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400649 struct sde_encoder_hw_resources *hw_res,
650 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400651{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400652 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700653 struct msm_mode_info mode_info;
654 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400655
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400656 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400657 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
658 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400659 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400660 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400661
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400662 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400663 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400664
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400665 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400666 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400667 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
668
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400669 for (i = 0; i < sde_enc->num_phys_encs; i++) {
670 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
671
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400672 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400673 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400674 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700675
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700676 /**
677 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
678 * called from atomic_check phase. Use the below API to get mode
679 * information of the temporary conn_state passed.
680 */
681 rc = sde_connector_get_mode_info(conn_state, &mode_info);
682 if (rc) {
683 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
684 return;
685 }
686
687 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700688 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400689}
690
Clarence Ip3649f8b2016-10-31 09:59:44 -0400691void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400692{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400693 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400694 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400695
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400696 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400697 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400698 return;
699 }
700
701 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400702 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400703
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700704 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800705 sde_rsc_client_destroy(sde_enc->rsc_client);
706
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700707 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400708 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
709
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400710 if (phys && phys->ops.destroy) {
711 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400712 --sde_enc->num_phys_encs;
713 sde_enc->phys_encs[i] = NULL;
714 }
715 }
716
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700717 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400718 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400719 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700720 sde_enc->num_phys_encs = 0;
721 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400722
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400723 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700724 mutex_destroy(&sde_enc->enc_lock);
725
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800726 if (sde_enc->input_handler) {
727 input_unregister_handler(sde_enc->input_handler);
728 kfree(sde_enc->input_handler);
729 }
730
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400731 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700732}
733
Clarence Ip8e69ad02016-12-09 09:43:57 -0500734void sde_encoder_helper_split_config(
735 struct sde_encoder_phys *phys_enc,
736 enum sde_intf interface)
737{
738 struct sde_encoder_virt *sde_enc;
739 struct split_pipe_cfg cfg = { 0 };
740 struct sde_hw_mdp *hw_mdptop;
741 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700742 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500743
744 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
745 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
746 return;
747 }
748
749 sde_enc = to_sde_encoder_virt(phys_enc->parent);
750 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700751 disp_info = &sde_enc->disp_info;
752
753 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
754 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500755
756 /**
757 * disable split modes since encoder will be operating in as the only
758 * encoder, either for the entire use case in the case of, for example,
759 * single DSI, or for this frame in the case of left/right only partial
760 * update.
761 */
762 if (phys_enc->split_role == ENC_ROLE_SOLO) {
763 if (hw_mdptop->ops.setup_split_pipe)
764 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
765 if (hw_mdptop->ops.setup_pp_split)
766 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
767 return;
768 }
769
770 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500771 cfg.mode = phys_enc->intf_mode;
772 cfg.intf = interface;
773
774 if (cfg.en && phys_enc->ops.needs_single_flush &&
775 phys_enc->ops.needs_single_flush(phys_enc))
776 cfg.split_flush_en = true;
777
778 topology = sde_connector_get_topology_name(phys_enc->connector);
779 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
780 cfg.pp_split_slave = cfg.intf;
781 else
782 cfg.pp_split_slave = INTF_MAX;
783
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500784 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500785 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
786
787 if (hw_mdptop->ops.setup_split_pipe)
788 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400789 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500790 /*
791 * slave encoder
792 * - determine split index from master index,
793 * assume master is first pp
794 */
795 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
796 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
797 cfg.pp_split_index);
798
799 if (hw_mdptop->ops.setup_pp_split)
800 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
801 }
802}
803
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400804static int sde_encoder_virt_atomic_check(
805 struct drm_encoder *drm_enc,
806 struct drm_crtc_state *crtc_state,
807 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400808{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400809 struct sde_encoder_virt *sde_enc;
810 struct msm_drm_private *priv;
811 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400812 const struct drm_display_mode *mode;
813 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700814 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700815 struct sde_connector_state *sde_conn_state = NULL;
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400816 struct sde_crtc_state *sde_crtc_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400817 int i = 0;
818 int ret = 0;
819
Alan Kwongbb27c092016-07-20 16:41:25 -0400820 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400821 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
822 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400823 return -EINVAL;
824 }
825
826 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400827 SDE_DEBUG_ENC(sde_enc, "\n");
828
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400829 priv = drm_enc->dev->dev_private;
830 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400831 mode = &crtc_state->mode;
832 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700833 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700834 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400835 sde_crtc_state = to_sde_crtc_state(crtc_state);
836
837 SDE_EVT32(DRMID(drm_enc), drm_atomic_crtc_needs_modeset(crtc_state));
Alan Kwongbb27c092016-07-20 16:41:25 -0400838
839 /* perform atomic check on the first physical encoder (master) */
840 for (i = 0; i < sde_enc->num_phys_encs; i++) {
841 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
842
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400843 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400844 ret = phys->ops.atomic_check(phys, crtc_state,
845 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400846 else if (phys && phys->ops.mode_fixup)
847 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400848 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400849
850 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400851 SDE_ERROR_ENC(sde_enc,
852 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400853 break;
854 }
855 }
856
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400857 if (!ret && drm_atomic_crtc_needs_modeset(crtc_state)) {
858 struct sde_rect mode_roi, roi;
859
860 mode_roi.x = 0;
861 mode_roi.y = 0;
862 mode_roi.w = crtc_state->adjusted_mode.hdisplay;
863 mode_roi.h = crtc_state->adjusted_mode.vdisplay;
864
865 if (sde_conn_state->rois.num_rects) {
866 sde_kms_rect_merge_rectangles(
867 &sde_conn_state->rois, &roi);
868 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
869 SDE_ERROR_ENC(sde_enc,
870 "roi (%d,%d,%d,%d) on connector invalid during modeset\n",
871 roi.x, roi.y, roi.w, roi.h);
872 ret = -EINVAL;
873 }
874 }
875
876 if (sde_crtc_state->user_roi_list.num_rects) {
877 sde_kms_rect_merge_rectangles(
878 &sde_crtc_state->user_roi_list, &roi);
879 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
880 SDE_ERROR_ENC(sde_enc,
881 "roi (%d,%d,%d,%d) on crtc invalid during modeset\n",
882 roi.x, roi.y, roi.w, roi.h);
883 ret = -EINVAL;
884 }
885 }
886
887 if (ret)
888 return ret;
889 }
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700890
Lloyd Atkinson4ced69e2017-11-03 12:16:09 -0400891 if (!ret) {
892 /**
893 * record topology in previous atomic state to be able to handle
894 * topology transitions correctly.
895 */
896 enum sde_rm_topology_name old_top;
897
898 old_top = sde_connector_get_property(conn_state,
899 CONNECTOR_PROP_TOPOLOGY_NAME);
900 ret = sde_connector_set_old_topology_name(conn_state, old_top);
901 if (ret)
902 return ret;
903 }
904
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700905 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
906 struct msm_display_topology *topology = NULL;
907
908 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700909 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400910 sde_kms->catalog->max_mixer_width,
911 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700912 if (ret) {
913 SDE_ERROR_ENC(sde_enc,
914 "failed to get mode info, rc = %d\n", ret);
915 return ret;
916 }
917
918 /* Reserve dynamic resources, indicating atomic_check phase */
919 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
920 conn_state, true);
921 if (ret) {
922 SDE_ERROR_ENC(sde_enc,
923 "RM failed to reserve resources, rc = %d\n",
924 ret);
925 return ret;
926 }
927
928 /**
929 * Update connector state with the topology selected for the
930 * resource set validated. Reset the topology if we are
931 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700932 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700933 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700934 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700935
936 ret = sde_rm_update_topology(conn_state, topology);
937 if (ret) {
938 SDE_ERROR_ENC(sde_enc,
939 "RM failed to update topology, rc: %d\n", ret);
940 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700941 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700942
Jeykumar Sankaran83ddcb02017-10-27 11:34:50 -0700943 ret = sde_connector_set_blob_data(conn_state->connector,
944 conn_state,
945 CONNECTOR_PROP_SDE_INFO);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700946 if (ret) {
947 SDE_ERROR_ENC(sde_enc,
948 "connector failed to update info, rc: %d\n",
949 ret);
950 return ret;
951 }
952
953 }
954
955 ret = sde_connector_roi_v1_check_roi(conn_state);
956 if (ret) {
957 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
958 ret);
959 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700960 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400961
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700962 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700963 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400964
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400965 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400966
967 return ret;
968}
969
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800970static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
971 int pic_width, int pic_height)
972{
973 if (!dsc || !pic_width || !pic_height) {
974 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
975 pic_width, pic_height);
976 return -EINVAL;
977 }
978
979 if ((pic_width % dsc->slice_width) ||
980 (pic_height % dsc->slice_height)) {
981 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
982 pic_width, pic_height,
983 dsc->slice_width, dsc->slice_height);
984 return -EINVAL;
985 }
986
987 dsc->pic_width = pic_width;
988 dsc->pic_height = pic_height;
989
990 return 0;
991}
992
993static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
994 int intf_width)
995{
996 int slice_per_pkt, slice_per_intf;
997 int bytes_in_slice, total_bytes_per_intf;
998
999 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
1000 (intf_width < dsc->slice_width)) {
1001 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
1002 intf_width, dsc ? dsc->slice_width : -1);
1003 return;
1004 }
1005
1006 slice_per_pkt = dsc->slice_per_pkt;
1007 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
1008
1009 /*
1010 * If slice_per_pkt is greater than slice_per_intf then default to 1.
1011 * This can happen during partial update.
1012 */
1013 if (slice_per_pkt > slice_per_intf)
1014 slice_per_pkt = 1;
1015
1016 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
1017 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
1018
1019 dsc->eol_byte_num = total_bytes_per_intf % 3;
1020 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
1021 dsc->bytes_in_slice = bytes_in_slice;
1022 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
1023 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
1024}
1025
1026static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
1027 int enc_ip_width)
1028{
1029 int ssm_delay, total_pixels, soft_slice_per_enc;
1030
1031 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
1032
1033 /*
1034 * minimum number of initial line pixels is a sum of:
1035 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
1036 * 91 for 10 bpc) * 3
1037 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
1038 * 3. the initial xmit delay
1039 * 4. total pipeline delay through the "lock step" of encoder (47)
1040 * 5. 6 additional pixels as the output of the rate buffer is
1041 * 48 bits wide
1042 */
1043 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
1044 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
1045 if (soft_slice_per_enc > 1)
1046 total_pixels += (ssm_delay * 3);
1047 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
1048 return 0;
1049}
1050
1051static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
1052 struct msm_display_dsc_info *dsc)
1053{
1054 /*
1055 * As per the DSC spec, ICH_RESET can be either end of the slice line
1056 * or at the end of the slice. HW internally generates ich_reset at
1057 * end of the slice line if DSC_MERGE is used or encoder has two
1058 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
1059 * is not used then it will generate ich_reset at the end of slice.
1060 *
1061 * Now as per the spec, during one PPS session, position where
1062 * ich_reset is generated should not change. Now if full-screen frame
1063 * has more than 1 soft slice then HW will automatically generate
1064 * ich_reset at the end of slice_line. But for the same panel, if
1065 * partial frame is enabled and only 1 encoder is used with 1 slice,
1066 * then HW will generate ich_reset at end of the slice. This is a
1067 * mismatch. Prevent this by overriding HW's decision.
1068 */
1069 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
1070 (dsc->slice_width == dsc->pic_width);
1071}
1072
1073static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
1074 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001075 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001076{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001077 if (!enable) {
1078 if (hw_pp->ops.disable_dsc)
1079 hw_pp->ops.disable_dsc(hw_pp);
1080 return;
1081 }
1082
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001083 if (hw_dsc->ops.dsc_config)
1084 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
1085
1086 if (hw_dsc->ops.dsc_config_thresh)
1087 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
1088
1089 if (hw_pp->ops.setup_dsc)
1090 hw_pp->ops.setup_dsc(hw_pp);
1091
1092 if (hw_pp->ops.enable_dsc)
1093 hw_pp->ops.enable_dsc(hw_pp);
1094}
1095
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001096static void _sde_encoder_get_connector_roi(
1097 struct sde_encoder_virt *sde_enc,
1098 struct sde_rect *merged_conn_roi)
1099{
1100 struct drm_connector *drm_conn;
1101 struct sde_connector_state *c_state;
1102
1103 if (!sde_enc || !merged_conn_roi)
1104 return;
1105
1106 drm_conn = sde_enc->phys_encs[0]->connector;
1107
1108 if (!drm_conn || !drm_conn->state)
1109 return;
1110
1111 c_state = to_sde_connector_state(drm_conn->state);
1112 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1113}
1114
Ingrid Gallardo83532222017-06-02 16:48:51 -07001115static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001116{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001117 int this_frame_slices;
1118 int intf_ip_w, enc_ip_w;
1119 int ich_res, dsc_common_mode = 0;
1120
1121 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1122 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1123 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001124 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001125 struct msm_mode_info mode_info;
1126 struct msm_display_dsc_info *dsc = NULL;
1127 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001128
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001129 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001130 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1131 return -EINVAL;
1132 }
1133
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001134 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1135 if (rc) {
1136 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1137 return -EINVAL;
1138 }
1139
1140 dsc = &mode_info.comp_info.dsc_info;
1141
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001142 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001143
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001144 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001145 intf_ip_w = this_frame_slices * dsc->slice_width;
1146 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1147
1148 enc_ip_w = intf_ip_w;
1149 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1150
1151 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1152
1153 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1154 dsc_common_mode = DSC_MODE_VIDEO;
1155
1156 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001157 roi->w, roi->h, dsc_common_mode);
1158 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001159
1160 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001161 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001162
1163 return 0;
1164}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001165
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001166static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1167 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001168{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001169 int this_frame_slices;
1170 int intf_ip_w, enc_ip_w;
1171 int ich_res, dsc_common_mode;
1172
1173 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001174 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1175 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1176 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1177 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001178 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001179 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001180 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001181
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001182 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1183 hw_pp[i] = sde_enc->hw_pp[i];
1184 hw_dsc[i] = sde_enc->hw_dsc[i];
1185
1186 if (!hw_pp[i] || !hw_dsc[i]) {
1187 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1188 return -EINVAL;
1189 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001190 }
1191
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001192 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1193 if (rc) {
1194 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1195 return -EINVAL;
1196 }
1197
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001198 half_panel_partial_update =
1199 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001200
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001201 dsc_common_mode = 0;
1202 if (!half_panel_partial_update)
1203 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001204 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1205 dsc_common_mode |= DSC_MODE_VIDEO;
1206
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001207 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1208 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001209
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001210 /*
1211 * Since both DSC use same pic dimension, set same pic dimension
1212 * to both DSC structures.
1213 */
1214 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1215 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1216
1217 this_frame_slices = roi->w / dsc[0].slice_width;
1218 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1219
1220 if (!half_panel_partial_update)
1221 intf_ip_w /= 2;
1222
1223 /*
1224 * In this topology when both interfaces are active, they have same
1225 * load so intf_ip_w will be same.
1226 */
1227 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1228 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1229
1230 /*
1231 * In this topology, since there is no dsc_merge, uncompressed input
1232 * to encoder and interface is same.
1233 */
1234 enc_ip_w = intf_ip_w;
1235 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1236 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1237
1238 /*
1239 * __is_ich_reset_override_needed should be called only after
1240 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1241 */
1242 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1243 half_panel_partial_update, &dsc[0]);
1244
1245 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1246 roi->w, roi->h, dsc_common_mode);
1247
1248 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1249 bool active = !!((1 << i) & params->affected_displays);
1250
1251 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1252 dsc_common_mode, i, active);
1253 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1254 dsc_common_mode, ich_res, active);
1255 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001256
1257 return 0;
1258}
1259
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001260static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1261 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001262{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001263 int this_frame_slices;
1264 int intf_ip_w, enc_ip_w;
1265 int ich_res, dsc_common_mode;
1266
1267 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001268 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001269 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1270 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001271 struct msm_display_dsc_info *dsc = NULL;
1272 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001273 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001274 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001275
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001276 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1277 hw_pp[i] = sde_enc->hw_pp[i];
1278 hw_dsc[i] = sde_enc->hw_dsc[i];
1279
1280 if (!hw_pp[i] || !hw_dsc[i]) {
1281 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1282 return -EINVAL;
1283 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001284 }
1285
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001286 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1287 if (rc) {
1288 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1289 return -EINVAL;
1290 }
1291
1292 dsc = &mode_info.comp_info.dsc_info;
1293
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001294 half_panel_partial_update =
1295 hweight_long(params->affected_displays) == 1;
1296
1297 dsc_common_mode = 0;
1298 if (!half_panel_partial_update)
1299 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1300 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1301 dsc_common_mode |= DSC_MODE_VIDEO;
1302
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001303 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001304
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001305 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001306 intf_ip_w = this_frame_slices * dsc->slice_width;
1307 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1308
1309 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001310 * dsc merge case: when using 2 encoders for the same stream,
1311 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001312 */
1313 enc_ip_w = intf_ip_w / 2;
1314 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1315
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001316 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1317 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001318
1319 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001320 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001321 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1322 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001323
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001324 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001325 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001326 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1327 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001328
1329 return 0;
1330}
1331
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001332static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1333{
1334 struct sde_encoder_virt *sde_enc;
1335 struct drm_connector *drm_conn;
1336 struct drm_display_mode *adj_mode;
1337 struct sde_rect roi;
1338
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001339 if (!drm_enc) {
1340 SDE_ERROR("invalid encoder parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001341 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001342 }
1343
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001344 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001345 if (!sde_enc->crtc || !sde_enc->crtc->state) {
1346 SDE_ERROR("invalid crtc parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001347 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001348 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001349
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001350 if (!sde_enc->cur_master) {
1351 SDE_ERROR("invalid cur_master parameter\n");
1352 return -EINVAL;
1353 }
1354
1355 adj_mode = &sde_enc->cur_master->cached_mode;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001356 drm_conn = sde_enc->cur_master->connector;
1357
1358 _sde_encoder_get_connector_roi(sde_enc, &roi);
1359 if (sde_kms_rect_is_null(&roi)) {
1360 roi.w = adj_mode->hdisplay;
1361 roi.h = adj_mode->vdisplay;
1362 }
1363
1364 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1365 sizeof(sde_enc->prv_conn_roi));
1366 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1367
1368 return 0;
1369}
1370
1371static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1372 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001373{
1374 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001375 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001376 int ret = 0;
1377
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001378 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1379 !sde_enc->phys_encs[0]->connector)
1380 return -EINVAL;
1381
1382 drm_conn = sde_enc->phys_encs[0]->connector;
1383
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001384 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001385 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001386 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1387 return -EINVAL;
1388 }
1389
Ingrid Gallardo83532222017-06-02 16:48:51 -07001390 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04001391 SDE_EVT32(DRMID(&sde_enc->base), topology,
1392 sde_enc->cur_conn_roi.x,
1393 sde_enc->cur_conn_roi.y,
1394 sde_enc->cur_conn_roi.w,
1395 sde_enc->cur_conn_roi.h,
1396 sde_enc->prv_conn_roi.x,
1397 sde_enc->prv_conn_roi.y,
1398 sde_enc->prv_conn_roi.w,
1399 sde_enc->prv_conn_roi.h,
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001400 sde_enc->cur_master->cached_mode.hdisplay,
1401 sde_enc->cur_master->cached_mode.vdisplay);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001402
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001403 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1404 &sde_enc->prv_conn_roi))
1405 return ret;
1406
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001407 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001408 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001409 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1410 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001411 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001412 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001413 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001414 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001415 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001416 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001417 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001418 default:
1419 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1420 topology);
1421 return -EINVAL;
1422 };
1423
1424 return ret;
1425}
1426
Dhaval Patelaab9b522017-07-20 12:38:46 -07001427static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1428 struct msm_display_info *disp_info, bool is_dummy)
1429{
1430 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1431 struct msm_drm_private *priv;
1432 struct sde_kms *sde_kms;
1433 struct sde_hw_mdp *hw_mdptop;
1434 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001435 struct msm_mode_info mode_info;
1436 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001437
1438 if (!sde_enc || !disp_info) {
1439 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1440 sde_enc != NULL, disp_info != NULL);
1441 return;
1442 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1443 SDE_ERROR("invalid num phys enc %d/%d\n",
1444 sde_enc->num_phys_encs,
1445 (int) ARRAY_SIZE(sde_enc->hw_pp));
1446 return;
1447 }
1448
1449 drm_enc = &sde_enc->base;
1450 /* this pointers are checked in virt_enable_helper */
1451 priv = drm_enc->dev->dev_private;
1452
1453 sde_kms = to_sde_kms(priv->kms);
1454 if (!sde_kms) {
1455 SDE_ERROR("invalid sde_kms\n");
1456 return;
1457 }
1458
1459 hw_mdptop = sde_kms->hw_mdp;
1460 if (!hw_mdptop) {
1461 SDE_ERROR("invalid mdptop\n");
1462 return;
1463 }
1464
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001465 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1466 if (rc) {
1467 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001468 return;
1469 }
1470
Dhaval Patelaab9b522017-07-20 12:38:46 -07001471 if (hw_mdptop->ops.setup_vsync_source &&
1472 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1473 for (i = 0; i < sde_enc->num_phys_encs; i++)
1474 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1475
1476 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001477 vsync_cfg.frame_rate = mode_info.frame_rate;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001478 if (is_dummy)
1479 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1480 else if (disp_info->is_te_using_watchdog_timer)
1481 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
1482 else
1483 vsync_cfg.vsync_source = SDE_VSYNC0_SOURCE_GPIO;
1484 vsync_cfg.is_dummy = is_dummy;
1485
1486 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1487 }
1488}
1489
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001490static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1491{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001492 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001493 struct sde_hw_pingpong *hw_pp = NULL;
1494 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001495
1496 if (!sde_enc || !sde_enc->phys_encs[0] ||
1497 !sde_enc->phys_encs[0]->connector) {
1498 SDE_ERROR("invalid params %d %d\n",
1499 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1500 return -EINVAL;
1501 }
1502
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001503 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001504 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1505 hw_pp = sde_enc->hw_pp[i];
1506 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001507
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001508 if (hw_pp && hw_pp->ops.disable_dsc)
1509 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001510
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001511 if (hw_dsc && hw_dsc->ops.dsc_disable)
1512 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001513 }
1514
1515 return ret;
1516}
1517
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001518static int _sde_encoder_switch_to_watchdog_vsync(struct drm_encoder *drm_enc)
1519{
1520 struct sde_encoder_virt *sde_enc;
1521 struct msm_display_info disp_info;
1522
1523 if (!drm_enc) {
1524 pr_err("invalid drm encoder\n");
1525 return -EINVAL;
1526 }
1527
1528 sde_enc = to_sde_encoder_virt(drm_enc);
1529
1530 sde_encoder_control_te(drm_enc, false);
1531
1532 memcpy(&disp_info, &sde_enc->disp_info, sizeof(disp_info));
1533 disp_info.is_te_using_watchdog_timer = true;
1534 _sde_encoder_update_vsync_source(sde_enc, &disp_info, false);
1535
1536 sde_encoder_control_te(drm_enc, true);
1537
1538 return 0;
1539}
1540
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001541static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001542 struct drm_encoder *drm_enc,
1543 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001544{
1545 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001546 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001547 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001548 struct sde_rsc_cmd_config *rsc_config;
1549 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001550 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001551 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001552 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1553 int wait_count = 0;
1554 struct drm_crtc *primary_crtc;
1555 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001556 int rc = 0;
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001557 int wait_refcount;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001558
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001559 if (!drm_enc || !drm_enc->dev) {
1560 SDE_ERROR("invalid encoder arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001561 return -EINVAL;
1562 }
1563
1564 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001565 crtc = sde_enc->crtc;
1566
1567 if (!sde_enc->crtc) {
1568 SDE_ERROR("invalid crtc parameter\n");
1569 return -EINVAL;
1570 }
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001571 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001572 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001573
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001574 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001575 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001576 return 0;
1577 }
1578
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001579 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1580 if (rc) {
1581 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1582 return 0;
1583 }
1584
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001585 /**
1586 * only primary command mode panel can request CMD state.
1587 * all other panels/displays can request for VID state including
1588 * secondary command mode panel.
1589 */
1590 rsc_state = enable ?
1591 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) &&
1592 disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1593 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001594 prefill_lines = config ? mode_info.prefill_lines +
1595 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001596
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001597 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001598 if ((rsc_config->fps != mode_info.frame_rate) ||
1599 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001600 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001601 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1602 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1603 rsc_config->fps = mode_info.frame_rate;
1604 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001605 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001606 rsc_config->jitter_numer = mode_info.jitter_numer;
1607 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001608 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001609 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001610
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001611 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001612 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001613 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001614 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001615
1616 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001617 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001618 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001619 } else {
1620 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001621 rsc_state, NULL, crtc->base.id,
1622 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001623 }
1624
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001625 /**
1626 * if RSC performed a state change that requires a VBLANK wait, it will
1627 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1628 *
1629 * if we are the primary display, we will need to enable and wait
1630 * locally since we hold the commit thread
1631 *
1632 * if we are an external display, we must send a signal to the primary
1633 * to enable its VBLANK and wait one, since the RSC hardware is driven
1634 * by the primary panel's VBLANK signals
1635 */
1636 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1637 if (ret) {
1638 SDE_ERROR_ENC(sde_enc,
1639 "sde rsc client update failed ret:%d\n", ret);
1640 return ret;
1641 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1642 return ret;
1643 }
1644
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001645 if (wait_vblank_crtc_id)
1646 wait_refcount =
1647 sde_rsc_client_get_vsync_refcount(sde_enc->rsc_client);
1648 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1649 SDE_EVTLOG_FUNC_ENTRY);
1650
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001651 if (crtc->base.id != wait_vblank_crtc_id) {
1652 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1653 if (!primary_crtc) {
1654 SDE_ERROR_ENC(sde_enc,
1655 "failed to find primary crtc id %d\n",
1656 wait_vblank_crtc_id);
1657 return -EINVAL;
1658 }
1659 pipe = drm_crtc_index(primary_crtc);
1660 }
1661
1662 /**
1663 * note: VBLANK is expected to be enabled at this point in
1664 * resource control state machine if on primary CRTC
1665 */
1666 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1667 if (sde_rsc_client_is_state_update_complete(
1668 sde_enc->rsc_client))
1669 break;
1670
1671 if (crtc->base.id == wait_vblank_crtc_id)
1672 ret = sde_encoder_wait_for_event(drm_enc,
1673 MSM_ENC_VBLANK);
1674 else
1675 drm_wait_one_vblank(drm_enc->dev, pipe);
1676
1677 if (ret) {
1678 SDE_ERROR_ENC(sde_enc,
1679 "wait for vblank failed ret:%d\n", ret);
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001680 /**
1681 * rsc hardware may hang without vsync. avoid rsc hang
1682 * by generating the vsync from watchdog timer.
1683 */
1684 if (crtc->base.id == wait_vblank_crtc_id)
1685 _sde_encoder_switch_to_watchdog_vsync(drm_enc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001686 }
1687 }
1688
1689 if (wait_count >= MAX_RSC_WAIT)
1690 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1691 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001692
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001693 if (wait_refcount)
1694 sde_rsc_client_reset_vsync_refcount(sde_enc->rsc_client);
1695 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1696 SDE_EVTLOG_FUNC_EXIT);
1697
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001698 return ret;
1699}
1700
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001701static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1702{
1703 struct sde_encoder_virt *sde_enc;
1704 int i;
1705
1706 if (!drm_enc) {
1707 SDE_ERROR("invalid encoder\n");
1708 return;
1709 }
1710
1711 sde_enc = to_sde_encoder_virt(drm_enc);
1712
1713 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1714 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1715 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1716
1717 if (phys && phys->ops.irq_control)
1718 phys->ops.irq_control(phys, enable);
1719 }
1720
1721}
1722
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001723/* keep track of the userspace vblank during modeset */
1724static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1725 u32 sw_event)
1726{
1727 struct sde_encoder_virt *sde_enc;
1728 bool enable;
1729 int i;
1730
1731 if (!drm_enc) {
1732 SDE_ERROR("invalid encoder\n");
1733 return;
1734 }
1735
1736 sde_enc = to_sde_encoder_virt(drm_enc);
1737 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1738 sw_event, sde_enc->vblank_enabled);
1739
1740 /* nothing to do if vblank not enabled by userspace */
1741 if (!sde_enc->vblank_enabled)
1742 return;
1743
1744 /* disable vblank on pre_modeset */
1745 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1746 enable = false;
1747 /* enable vblank on post_modeset */
1748 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1749 enable = true;
1750 else
1751 return;
1752
1753 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1754 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1755
1756 if (phys && phys->ops.control_vblank_irq)
1757 phys->ops.control_vblank_irq(phys, enable);
1758 }
1759}
1760
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001761struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1762{
1763 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001764
1765 if (!drm_enc)
1766 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001767 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001768 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001769}
1770
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001771static void _sde_encoder_resource_control_rsc_update(
1772 struct drm_encoder *drm_enc, bool enable)
1773{
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001774 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001775 struct sde_encoder_virt *sde_enc;
1776
1777 if (!drm_enc) {
1778 SDE_ERROR("invalid encoder argument\n");
1779 return;
1780 }
1781 sde_enc = to_sde_encoder_virt(drm_enc);
1782 if (!sde_enc->crtc) {
1783 SDE_ERROR("invalid crtc\n");
1784 return;
1785 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001786
1787 if (enable) {
1788 rsc_cfg.inline_rotate_prefill =
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001789 sde_crtc_get_inline_prefill(sde_enc->crtc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001790
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001791 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1792 } else {
1793 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001794 }
1795}
1796
Alan Kwong1124f1f2017-11-10 18:14:39 -05001797static int _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001798 bool enable)
1799{
1800 struct msm_drm_private *priv;
1801 struct sde_kms *sde_kms;
1802 struct sde_encoder_virt *sde_enc;
Alan Kwong1124f1f2017-11-10 18:14:39 -05001803 int rc;
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001804 bool is_cmd_mode, is_primary;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001805
1806 sde_enc = to_sde_encoder_virt(drm_enc);
1807 priv = drm_enc->dev->dev_private;
1808 sde_kms = to_sde_kms(priv->kms);
1809
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001810 is_cmd_mode = sde_enc->disp_info.capabilities &
1811 MSM_DISPLAY_CAP_CMD_MODE;
1812 is_primary = sde_enc->disp_info.is_primary;
1813
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001814 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1815 SDE_EVT32(DRMID(drm_enc), enable);
1816
1817 if (!sde_enc->cur_master) {
1818 SDE_ERROR("encoder master not set\n");
Alan Kwong1124f1f2017-11-10 18:14:39 -05001819 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001820 }
1821
1822 if (enable) {
1823 /* enable SDE core clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001824 rc = sde_power_resource_enable(&priv->phandle,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001825 sde_kms->core_client, true);
Alan Kwong1124f1f2017-11-10 18:14:39 -05001826 if (rc) {
1827 SDE_ERROR("failed to enable power resource %d\n", rc);
1828 SDE_EVT32(rc, SDE_EVTLOG_ERROR);
1829 return rc;
1830 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001831
1832 /* enable DSI clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001833 rc = sde_connector_clk_ctrl(sde_enc->cur_master->connector,
1834 true);
1835 if (rc) {
1836 SDE_ERROR("failed to enable clk control %d\n", rc);
1837 sde_power_resource_enable(&priv->phandle,
1838 sde_kms->core_client, false);
1839 return rc;
1840 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001841
1842 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001843 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001844
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001845 if (is_cmd_mode && is_primary)
1846 _sde_encoder_pm_qos_add_request(drm_enc);
1847
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001848 } else {
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001849 if (is_cmd_mode && is_primary)
1850 _sde_encoder_pm_qos_remove_request(drm_enc);
1851
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001852 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001853 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001854
1855 /* disable DSI clks */
1856 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1857
1858 /* disable SDE core clks */
1859 sde_power_resource_enable(&priv->phandle,
1860 sde_kms->core_client, false);
1861 }
1862
Alan Kwong1124f1f2017-11-10 18:14:39 -05001863 return 0;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001864}
1865
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001866static void sde_encoder_input_event_handler(struct input_handle *handle,
1867 unsigned int type, unsigned int code, int value)
1868{
1869 struct drm_encoder *drm_enc = NULL;
1870 struct sde_encoder_virt *sde_enc = NULL;
1871 struct msm_drm_thread *disp_thread = NULL;
1872 struct msm_drm_private *priv = NULL;
1873
1874 if (!handle || !handle->handler || !handle->handler->private) {
1875 SDE_ERROR("invalid encoder for the input event\n");
1876 return;
1877 }
1878
1879 drm_enc = (struct drm_encoder *)handle->handler->private;
1880 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
1881 SDE_ERROR("invalid parameters\n");
1882 return;
1883 }
1884
1885 priv = drm_enc->dev->dev_private;
1886 sde_enc = to_sde_encoder_virt(drm_enc);
1887 if (!sde_enc->crtc || (sde_enc->crtc->index
1888 >= ARRAY_SIZE(priv->disp_thread))) {
1889 SDE_DEBUG_ENC(sde_enc,
1890 "invalid cached CRTC: %d or crtc index: %d\n",
1891 sde_enc->crtc == NULL,
1892 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
1893 return;
1894 }
1895
1896 SDE_EVT32_VERBOSE(DRMID(drm_enc));
1897
1898 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
1899
1900 kthread_queue_work(&disp_thread->worker,
1901 &sde_enc->input_event_work);
1902}
1903
1904
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001905static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1906 u32 sw_event)
1907{
Dhaval Patel99412a52017-07-24 19:16:45 -07001908 bool autorefresh_enabled = false;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001909 unsigned int lp, idle_pc_duration;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001910 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001911 struct msm_drm_private *priv;
1912 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001913 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001914 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001915
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001916 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1917 SDE_ERROR("invalid encoder parameters, sw_event:%u\n",
1918 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001919 return -EINVAL;
1920 }
1921 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001922 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001923 is_vid_mode = sde_enc->disp_info.capabilities &
1924 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001925
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001926 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001927 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001928 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001929 */
1930 if (!sde_enc->idle_pc_supported &&
1931 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001932 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1933 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1934 sw_event != SDE_ENC_RC_EVENT_STOP &&
1935 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001936 return 0;
1937
1938 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc_supported:%d\n", sw_event,
1939 sde_enc->idle_pc_supported);
Dhaval Patela5f75952017-07-25 11:17:41 -07001940 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001941 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1942
1943 switch (sw_event) {
1944 case SDE_ENC_RC_EVENT_KICKOFF:
1945 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001946 if (kthread_cancel_delayed_work_sync(
1947 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001948 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1949 sw_event);
1950
1951 mutex_lock(&sde_enc->rc_lock);
1952
1953 /* return if the resource control is already in ON state */
1954 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
1955 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
1956 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001957 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1958 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001959 mutex_unlock(&sde_enc->rc_lock);
1960 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001961 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
1962 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
1963 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1964 sw_event, sde_enc->rc_state);
1965 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1966 SDE_EVTLOG_ERROR);
1967 mutex_unlock(&sde_enc->rc_lock);
1968 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001969 }
1970
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001971 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1972 _sde_encoder_irq_control(drm_enc, true);
1973 } else {
1974 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001975 ret = _sde_encoder_resource_control_helper(drm_enc,
1976 true);
1977 if (ret) {
1978 SDE_ERROR_ENC(sde_enc,
1979 "sw_event:%d, rc in state %d\n",
1980 sw_event, sde_enc->rc_state);
1981 SDE_EVT32(DRMID(drm_enc), sw_event,
1982 sde_enc->rc_state,
1983 SDE_EVTLOG_ERROR);
1984 mutex_unlock(&sde_enc->rc_lock);
1985 return ret;
1986 }
1987
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001988 _sde_encoder_resource_control_rsc_update(drm_enc, true);
1989 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001990
1991 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1992 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
1993 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1994
1995 mutex_unlock(&sde_enc->rc_lock);
1996 break;
1997
1998 case SDE_ENC_RC_EVENT_FRAME_DONE:
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001999 if (!sde_enc->crtc) {
2000 SDE_ERROR("invalid crtc, sw_event:%u\n", sw_event);
2001 return -EINVAL;
2002 }
2003
2004 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2005 SDE_ERROR("invalid crtc index :%u\n",
2006 sde_enc->crtc->index);
2007 return -EINVAL;
2008 }
2009 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2010
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002011 /*
2012 * mutex lock is not used as this event happens at interrupt
2013 * context. And locking is not required as, the other events
2014 * like KICKOFF and STOP does a wait-for-idle before executing
2015 * the resource_control
2016 */
2017 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2018 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
2019 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002020 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2021 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002022 return -EINVAL;
2023 }
2024
2025 /*
2026 * schedule off work item only when there are no
2027 * frames pending
2028 */
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002029 if (sde_crtc_frame_pending(sde_enc->crtc) > 1) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002030 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002031 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2032 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002033 return 0;
2034 }
2035
Dhaval Patel99412a52017-07-24 19:16:45 -07002036 /* schedule delayed off work if autorefresh is disabled */
2037 if (sde_enc->cur_master &&
2038 sde_enc->cur_master->ops.is_autorefresh_enabled)
2039 autorefresh_enabled =
2040 sde_enc->cur_master->ops.is_autorefresh_enabled(
2041 sde_enc->cur_master);
2042
Clarence Ip89628132017-07-27 13:33:51 -04002043 /* set idle timeout based on master connector's lp value */
2044 if (sde_enc->cur_master)
2045 lp = sde_connector_get_lp(
2046 sde_enc->cur_master->connector);
2047 else
2048 lp = SDE_MODE_DPMS_ON;
2049
2050 if (lp == SDE_MODE_DPMS_LP2)
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002051 idle_pc_duration = IDLE_SHORT_TIMEOUT;
Clarence Ip89628132017-07-27 13:33:51 -04002052 else
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002053 idle_pc_duration = IDLE_POWERCOLLAPSE_DURATION;
Clarence Ip89628132017-07-27 13:33:51 -04002054
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002055 if (!autorefresh_enabled)
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002056 kthread_mod_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002057 &disp_thread->worker,
2058 &sde_enc->delayed_off_work,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002059 msecs_to_jiffies(idle_pc_duration));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002060 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04002061 autorefresh_enabled,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002062 idle_pc_duration, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002063 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
2064 sw_event);
2065 break;
2066
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002067 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002068 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002069 if (kthread_cancel_delayed_work_sync(
2070 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002071 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2072 sw_event);
2073
2074 mutex_lock(&sde_enc->rc_lock);
2075
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002076 if (is_vid_mode &&
2077 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2078 _sde_encoder_irq_control(drm_enc, true);
2079 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002080 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002081 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002082 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2083 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
2084 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002085 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2086 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002087 mutex_unlock(&sde_enc->rc_lock);
2088 return 0;
2089 }
2090
2091 /**
2092 * IRQs are still enabled currently, which allows wait for
2093 * VBLANK which RSC may require to correctly transition to OFF
2094 */
2095 _sde_encoder_resource_control_rsc_update(drm_enc, false);
2096
2097 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2098 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002099 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002100
2101 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
2102
2103 mutex_unlock(&sde_enc->rc_lock);
2104 break;
2105
2106 case SDE_ENC_RC_EVENT_STOP:
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002107 /* cancel vsync event work and timer */
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302108 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002109 del_timer_sync(&sde_enc->vsync_event_timer);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002110
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302111 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002112 /* return if the resource control is already in OFF state */
2113 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
2114 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
2115 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002116 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2117 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002118 mutex_unlock(&sde_enc->rc_lock);
2119 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002120 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
2121 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002122 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2123 sw_event, sde_enc->rc_state);
2124 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2125 SDE_EVTLOG_ERROR);
2126 mutex_unlock(&sde_enc->rc_lock);
2127 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002128 }
2129
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002130 /**
2131 * expect to arrive here only if in either idle state or pre-off
2132 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002133 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002134 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002135 _sde_encoder_resource_control_helper(drm_enc, false);
2136
2137 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002138 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002139
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002140 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
2141
2142 mutex_unlock(&sde_enc->rc_lock);
2143 break;
2144
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002145 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002146 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002147 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002148 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002149 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2150 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002151
2152 mutex_lock(&sde_enc->rc_lock);
2153
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002154 /* return if the resource control is already in ON state */
2155 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2156 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002157 ret = _sde_encoder_resource_control_helper(drm_enc,
2158 true);
2159 if (ret) {
2160 SDE_ERROR_ENC(sde_enc,
2161 "sw_event:%d, rc in state %d\n",
2162 sw_event, sde_enc->rc_state);
2163 SDE_EVT32(DRMID(drm_enc), sw_event,
2164 sde_enc->rc_state,
2165 SDE_EVTLOG_ERROR);
2166 mutex_unlock(&sde_enc->rc_lock);
2167 return ret;
2168 }
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002169
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002170 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002171
2172 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2173 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002174 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002175 }
2176
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002177 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2178 if (ret && ret != -EWOULDBLOCK) {
2179 SDE_ERROR_ENC(sde_enc,
2180 "wait for commit done returned %d\n",
2181 ret);
2182 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2183 ret, SDE_EVTLOG_ERROR);
2184 mutex_unlock(&sde_enc->rc_lock);
2185 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002186 }
2187
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002188 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002189 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002190
2191 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2192 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
2193
2194 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
2195 mutex_unlock(&sde_enc->rc_lock);
2196 break;
2197
2198 case SDE_ENC_RC_EVENT_POST_MODESET:
2199 mutex_lock(&sde_enc->rc_lock);
2200
2201 /* return if the resource control is already in ON state */
2202 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
2203 SDE_ERROR_ENC(sde_enc,
2204 "sw_event:%d, rc:%d !MODESET state\n",
2205 sw_event, sde_enc->rc_state);
2206 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2207 SDE_EVTLOG_ERROR);
2208 mutex_unlock(&sde_enc->rc_lock);
2209 return -EINVAL;
2210 }
2211
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002212 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002213 _sde_encoder_irq_control(drm_enc, true);
2214
2215 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
2216
2217 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2218 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
2219
2220 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2221
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002222 mutex_unlock(&sde_enc->rc_lock);
2223 break;
2224
2225 case SDE_ENC_RC_EVENT_ENTER_IDLE:
2226 mutex_lock(&sde_enc->rc_lock);
2227
2228 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002229 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002230 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002231 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2232 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002233 mutex_unlock(&sde_enc->rc_lock);
2234 return 0;
2235 }
2236
2237 /*
2238 * if we are in ON but a frame was just kicked off,
2239 * ignore the IDLE event, it's probably a stale timer event
2240 */
2241 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002242 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002243 "sw_event:%d, rc:%d frame pending\n",
2244 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002245 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2246 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002247 mutex_unlock(&sde_enc->rc_lock);
2248 return 0;
2249 }
2250
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002251 if (is_vid_mode) {
2252 _sde_encoder_irq_control(drm_enc, false);
2253 } else {
2254 /* disable all the clks and resources */
2255 _sde_encoder_resource_control_rsc_update(drm_enc,
2256 false);
2257 _sde_encoder_resource_control_helper(drm_enc, false);
2258 }
2259
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002260 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002261 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002262 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2263
2264 mutex_unlock(&sde_enc->rc_lock);
2265 break;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002266 case SDE_ENC_RC_EVENT_EARLY_WAKEUP:
2267 if (!sde_enc->crtc ||
2268 sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2269 SDE_DEBUG_ENC(sde_enc,
2270 "invalid crtc:%d or crtc index:%d , sw_event:%u\n",
2271 sde_enc->crtc == NULL,
2272 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL,
2273 sw_event);
2274 return -EINVAL;
2275 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002276
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002277 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2278
2279 mutex_lock(&sde_enc->rc_lock);
2280
2281 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2282 if (sde_enc->cur_master &&
2283 sde_enc->cur_master->ops.is_autorefresh_enabled)
2284 autorefresh_enabled =
2285 sde_enc->cur_master->ops.is_autorefresh_enabled(
2286 sde_enc->cur_master);
2287 if (autorefresh_enabled) {
2288 SDE_DEBUG_ENC(sde_enc,
2289 "not handling early wakeup since auto refresh is enabled\n");
Jeykumar Sankaran067b3b92018-01-19 10:35:22 -08002290 mutex_unlock(&sde_enc->rc_lock);
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002291 return 0;
2292 }
2293
2294 if (!sde_crtc_frame_pending(sde_enc->crtc))
2295 kthread_mod_delayed_work(&disp_thread->worker,
2296 &sde_enc->delayed_off_work,
2297 msecs_to_jiffies(
2298 IDLE_POWERCOLLAPSE_DURATION));
2299 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2300 /* enable all the clks and resources */
2301 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2302 _sde_encoder_resource_control_helper(drm_enc, true);
2303
2304 kthread_mod_delayed_work(&disp_thread->worker,
2305 &sde_enc->delayed_off_work,
2306 msecs_to_jiffies(
2307 IDLE_POWERCOLLAPSE_DURATION));
2308
2309 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2310 }
2311
2312 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2313 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE8);
2314
2315 mutex_unlock(&sde_enc->rc_lock);
2316 break;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002317 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002318 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002319 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2320 break;
2321 }
2322
Dhaval Patela5f75952017-07-25 11:17:41 -07002323 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002324 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2325 return 0;
2326}
2327
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002328static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2329 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002330 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002331{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002332 struct sde_encoder_virt *sde_enc;
2333 struct msm_drm_private *priv;
2334 struct sde_kms *sde_kms;
2335 struct list_head *connector_list;
2336 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002337 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002338 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002339 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002340 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002341
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002342 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002343 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002344 return;
2345 }
2346
Alan Kwong1124f1f2017-11-10 18:14:39 -05002347 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2348 SDE_ERROR("power resource is not enabled\n");
2349 return;
2350 }
2351
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002352 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002353 SDE_DEBUG_ENC(sde_enc, "\n");
2354
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002355 priv = drm_enc->dev->dev_private;
2356 sde_kms = to_sde_kms(priv->kms);
2357 connector_list = &sde_kms->dev->mode_config.connector_list;
2358
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002359 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002360
Dhaval Patele514aae2018-01-30 11:46:02 -08002361 /*
2362 * cache the crtc in sde_enc on enable for duration of use case
2363 * for correctly servicing asynchronous irq events and timers
2364 */
2365 if (!drm_enc->crtc) {
2366 SDE_ERROR("invalid crtc\n");
2367 return;
2368 }
2369 sde_enc->crtc = drm_enc->crtc;
2370
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002371 list_for_each_entry(conn_iter, connector_list, head)
2372 if (conn_iter->encoder == drm_enc)
2373 conn = conn_iter;
2374
2375 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002376 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002377 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002378 } else if (!conn->state) {
2379 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2380 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002381 }
2382
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002383 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002384 sde_conn_state = to_sde_connector_state(conn->state);
2385 if (sde_conn && sde_conn_state) {
2386 ret = sde_conn->ops.get_mode_info(adj_mode,
2387 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002388 sde_kms->catalog->max_mixer_width,
2389 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002390 if (ret) {
2391 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002392 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002393 return;
2394 }
2395 }
2396
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002397 /* release resources before seamless mode change */
2398 if (msm_is_mode_seamless_dms(adj_mode)) {
2399 /* restore resource state before releasing them */
2400 ret = sde_encoder_resource_control(drm_enc,
2401 SDE_ENC_RC_EVENT_PRE_MODESET);
2402 if (ret) {
2403 SDE_ERROR_ENC(sde_enc,
2404 "sde resource control failed: %d\n",
2405 ret);
2406 return;
2407 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002408
2409 /*
2410 * Disable dsc before switch the mode and after pre_modeset,
2411 * to guarantee that previous kickoff finished.
2412 */
2413 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002414 }
2415
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002416 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2417 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2418 conn->state, false);
2419 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002420 SDE_ERROR_ENC(sde_enc,
2421 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002422 return;
2423 }
2424
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002425 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2426 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2427 sde_enc->hw_pp[i] = NULL;
2428 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2429 break;
2430 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2431 }
2432
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002433 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2434 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2435 sde_enc->hw_dsc[i] = NULL;
2436 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2437 break;
2438 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2439 }
2440
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002441 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2442 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002443
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002444 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002445 if (!sde_enc->hw_pp[i]) {
2446 SDE_ERROR_ENC(sde_enc,
2447 "invalid pingpong block for the encoder\n");
2448 return;
2449 }
2450 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002451 phys->connector = conn->state->connector;
2452 if (phys->ops.mode_set)
2453 phys->ops.mode_set(phys, mode, adj_mode);
2454 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002455 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002456
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002457 /* update resources after seamless mode change */
2458 if (msm_is_mode_seamless_dms(adj_mode))
2459 sde_encoder_resource_control(&sde_enc->base,
2460 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002461}
2462
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002463void sde_encoder_control_te(struct drm_encoder *drm_enc, bool enable)
2464{
2465 struct sde_encoder_virt *sde_enc;
2466 struct sde_encoder_phys *phys;
2467 int i;
2468
2469 if (!drm_enc) {
2470 SDE_ERROR("invalid parameters\n");
2471 return;
2472 }
2473
2474 sde_enc = to_sde_encoder_virt(drm_enc);
2475 if (!sde_enc) {
2476 SDE_ERROR("invalid sde encoder\n");
2477 return;
2478 }
2479
2480 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2481 phys = sde_enc->phys_encs[i];
2482 if (phys && phys->ops.control_te)
2483 phys->ops.control_te(phys, enable);
2484 }
2485}
2486
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002487static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002488{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002489 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002490 struct msm_drm_private *priv;
2491 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002492
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002493 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2494 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002495 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002496 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002497
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002498 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002499 sde_kms = to_sde_kms(priv->kms);
2500 if (!sde_kms) {
2501 SDE_ERROR("invalid sde_kms\n");
2502 return;
2503 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002504
2505 sde_enc = to_sde_encoder_virt(drm_enc);
2506 if (!sde_enc || !sde_enc->cur_master) {
2507 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002508 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002509 }
2510
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002511 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2512 sde_enc->cur_master->hw_mdptop &&
2513 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2514 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2515 sde_enc->cur_master->hw_mdptop);
2516
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002517 if (sde_enc->cur_master->hw_mdptop &&
2518 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2519 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2520 sde_enc->cur_master->hw_mdptop,
2521 sde_kms->catalog);
2522
Dhaval Patelaab9b522017-07-20 12:38:46 -07002523 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002524 sde_encoder_control_te(drm_enc, true);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002525
2526 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2527 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002528}
2529
2530void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2531{
2532 struct sde_encoder_virt *sde_enc = NULL;
2533 int i;
2534
2535 if (!drm_enc) {
2536 SDE_ERROR("invalid encoder\n");
2537 return;
2538 }
2539 sde_enc = to_sde_encoder_virt(drm_enc);
2540
2541 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2542 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2543
2544 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2545 phys->ops.restore(phys);
2546 }
2547
2548 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2549 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2550
2551 _sde_encoder_virt_enable_helper(drm_enc);
2552}
2553
2554static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2555{
2556 struct sde_encoder_virt *sde_enc = NULL;
2557 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002558 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002559 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002560 struct msm_mode_info mode_info;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002561
2562 if (!drm_enc) {
2563 SDE_ERROR("invalid encoder\n");
2564 return;
2565 }
2566 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002567
Alan Kwong1124f1f2017-11-10 18:14:39 -05002568 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2569 SDE_ERROR("power resource is not enabled\n");
2570 return;
2571 }
2572
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002573 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2574 if (ret) {
2575 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2576 return;
2577 }
2578
Dhaval Patelf492c5d2018-02-19 07:56:37 -08002579 if (drm_enc->crtc && !sde_enc->crtc)
2580 sde_enc->crtc = drm_enc->crtc;
2581
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002582 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002583 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002584
Clarence Ip19af1362016-09-23 14:57:51 -04002585 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002586 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002587
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002588 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002589 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2590 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2591
2592 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2593 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2594 sde_enc->cur_master = phys;
2595 break;
2596 }
2597 }
2598
2599 if (!sde_enc->cur_master) {
2600 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2601 return;
2602 }
2603
2604 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2605 if (ret) {
2606 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2607 ret);
2608 return;
2609 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002610
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002611 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2612 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002613
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002614 if (!phys)
2615 continue;
2616
2617 phys->comp_type = comp_info->comp_type;
2618 if (phys != sde_enc->cur_master) {
2619 /**
2620 * on DMS request, the encoder will be enabled
2621 * already. Invoke restore to reconfigure the
2622 * new mode.
2623 */
2624 if (msm_is_mode_seamless_dms(cur_mode) &&
2625 phys->ops.restore)
2626 phys->ops.restore(phys);
2627 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002628 phys->ops.enable(phys);
2629 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002630
2631 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2632 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2633 phys->ops.setup_misr(phys, true,
2634 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002635 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002636
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002637 if (msm_is_mode_seamless_dms(cur_mode) &&
2638 sde_enc->cur_master->ops.restore)
2639 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2640 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002641 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002642
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002643 _sde_encoder_virt_enable_helper(drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002644}
2645
2646static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2647{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002648 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002649 struct msm_drm_private *priv;
2650 struct sde_kms *sde_kms;
Sandeep Panda318cff12017-10-20 13:16:03 +05302651 struct drm_connector *drm_conn = NULL;
Clarence Iped3327b2017-11-01 13:13:58 -04002652 enum sde_intf_mode intf_mode;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002653 int i = 0;
2654
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002655 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002656 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002657 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002658 } else if (!drm_enc->dev) {
2659 SDE_ERROR("invalid dev\n");
2660 return;
2661 } else if (!drm_enc->dev->dev_private) {
2662 SDE_ERROR("invalid dev_private\n");
2663 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002664 }
2665
Alan Kwong1124f1f2017-11-10 18:14:39 -05002666 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2667 SDE_ERROR("power resource is not enabled\n");
2668 return;
2669 }
2670
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002671 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002672 SDE_DEBUG_ENC(sde_enc, "\n");
2673
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002674 priv = drm_enc->dev->dev_private;
2675 sde_kms = to_sde_kms(priv->kms);
Clarence Iped3327b2017-11-01 13:13:58 -04002676 intf_mode = sde_encoder_get_intf_mode(drm_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002677
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002678 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002679
Sandeep Panda318cff12017-10-20 13:16:03 +05302680 /* Disable ESD thread */
2681 drm_conn = sde_enc->cur_master->connector;
2682 sde_connector_schedule_status_work(drm_conn, false);
2683
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002684 /* wait for idle */
2685 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2686
Clarence Iped3327b2017-11-01 13:13:58 -04002687 /*
2688 * For primary command mode encoders, execute the resource control
2689 * pre-stop operations before the physical encoders are disabled, to
2690 * allow the rsc to transition its states properly.
2691 *
2692 * For other encoder types, rsc should not be enabled until after
2693 * they have been fully disabled, so delay the pre-stop operations
2694 * until after the physical disable calls have returned.
2695 */
2696 if (sde_enc->disp_info.is_primary && intf_mode == INTF_MODE_CMD) {
2697 sde_encoder_resource_control(drm_enc,
2698 SDE_ENC_RC_EVENT_PRE_STOP);
2699 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2700 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002701
Clarence Iped3327b2017-11-01 13:13:58 -04002702 if (phys && phys->ops.disable)
2703 phys->ops.disable(phys);
2704 }
2705 } else {
2706 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2707 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002708
Clarence Iped3327b2017-11-01 13:13:58 -04002709 if (phys && phys->ops.disable)
2710 phys->ops.disable(phys);
2711 }
2712 sde_encoder_resource_control(drm_enc,
2713 SDE_ENC_RC_EVENT_PRE_STOP);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002714 }
2715
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002716 /*
2717 * disable dsc after the transfer is complete (for command mode)
2718 * and after physical encoder is disabled, to make sure timing
2719 * engine is already disabled (for video mode).
2720 */
2721 _sde_encoder_dsc_disable(sde_enc);
2722
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002723 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2724
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002725 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2726 if (sde_enc->phys_encs[i])
2727 sde_enc->phys_encs[i]->connector = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002728 }
2729
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002730 sde_enc->cur_master = NULL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002731 /*
2732 * clear the cached crtc in sde_enc on use case finish, after all the
2733 * outstanding events and timers have been completed
2734 */
2735 sde_enc->crtc = NULL;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002736
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002737 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002738
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002739 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002740}
2741
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002742static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002743 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002744{
2745 int i = 0;
2746
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002747 for (i = 0; i < catalog->intf_count; i++) {
2748 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002749 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002750 return catalog->intf[i].id;
2751 }
2752 }
2753
2754 return INTF_MAX;
2755}
2756
Alan Kwongbb27c092016-07-20 16:41:25 -04002757static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2758 enum sde_intf_type type, u32 controller_id)
2759{
2760 if (controller_id < catalog->wb_count)
2761 return catalog->wb[controller_id].id;
2762
2763 return WB_MAX;
2764}
2765
Dhaval Patel81e87882016-10-19 21:41:56 -07002766static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2767 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002768{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002769 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002770 unsigned long lock_flags;
2771
Dhaval Patel81e87882016-10-19 21:41:56 -07002772 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002773 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002774
Narendra Muppalla77b32932017-05-10 13:53:11 -07002775 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002776 sde_enc = to_sde_encoder_virt(drm_enc);
2777
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002778 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002779 if (sde_enc->crtc_vblank_cb)
2780 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002781 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002782
2783 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002784 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002785}
2786
2787static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2788 struct sde_encoder_phys *phy_enc)
2789{
2790 if (!phy_enc)
2791 return;
2792
Narendra Muppalla77b32932017-05-10 13:53:11 -07002793 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002794 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002795 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Ingrid Gallardo36ee68d2017-08-30 17:14:33 -07002796
2797 trace_sde_encoder_underrun(DRMID(drm_enc),
2798 atomic_read(&phy_enc->underrun_cnt));
2799
2800 SDE_DBG_CTRL("stop_ftrace");
2801 SDE_DBG_CTRL("panic_underrun");
2802
Narendra Muppalla77b32932017-05-10 13:53:11 -07002803 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002804}
2805
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002806void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
2807 void (*vbl_cb)(void *), void *vbl_data)
2808{
2809 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2810 unsigned long lock_flags;
2811 bool enable;
2812 int i;
2813
2814 enable = vbl_cb ? true : false;
2815
Clarence Ip19af1362016-09-23 14:57:51 -04002816 if (!drm_enc) {
2817 SDE_ERROR("invalid encoder\n");
2818 return;
2819 }
2820 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002821 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002822
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002823 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002824 sde_enc->crtc_vblank_cb = vbl_cb;
2825 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002826 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002827
2828 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2829 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2830
2831 if (phys && phys->ops.control_vblank_irq)
2832 phys->ops.control_vblank_irq(phys, enable);
2833 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002834 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002835}
2836
Alan Kwong628d19e2016-10-31 13:50:13 -04002837void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
2838 void (*frame_event_cb)(void *, u32 event),
2839 void *frame_event_cb_data)
2840{
2841 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2842 unsigned long lock_flags;
2843 bool enable;
2844
2845 enable = frame_event_cb ? true : false;
2846
2847 if (!drm_enc) {
2848 SDE_ERROR("invalid encoder\n");
2849 return;
2850 }
2851 SDE_DEBUG_ENC(sde_enc, "\n");
2852 SDE_EVT32(DRMID(drm_enc), enable, 0);
2853
2854 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2855 sde_enc->crtc_frame_event_cb = frame_event_cb;
2856 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
2857 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
2858}
2859
2860static void sde_encoder_frame_done_callback(
2861 struct drm_encoder *drm_enc,
2862 struct sde_encoder_phys *ready_phys, u32 event)
2863{
2864 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2865 unsigned int i;
2866
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002867 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
2868 | SDE_ENCODER_FRAME_EVENT_ERROR
2869 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05002870
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002871 if (!sde_enc->frame_busy_mask[0]) {
2872 /**
2873 * suppress frame_done without waiter,
2874 * likely autorefresh
2875 */
2876 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
2877 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04002878 }
2879
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002880 /* One of the physical encoders has become idle */
2881 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2882 if (sde_enc->phys_encs[i] == ready_phys) {
2883 clear_bit(i, sde_enc->frame_busy_mask);
2884 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
2885 sde_enc->frame_busy_mask[0]);
2886 }
2887 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002888
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002889 if (!sde_enc->frame_busy_mask[0]) {
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002890 sde_encoder_resource_control(drm_enc,
2891 SDE_ENC_RC_EVENT_FRAME_DONE);
2892
2893 if (sde_enc->crtc_frame_event_cb)
2894 sde_enc->crtc_frame_event_cb(
2895 sde_enc->crtc_frame_event_cb_data,
2896 event);
2897 }
2898 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04002899 if (sde_enc->crtc_frame_event_cb)
2900 sde_enc->crtc_frame_event_cb(
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002901 sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002902 }
2903}
2904
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002905int sde_encoder_idle_request(struct drm_encoder *drm_enc)
2906{
2907 struct sde_encoder_virt *sde_enc;
2908
2909 if (!drm_enc) {
2910 SDE_ERROR("invalid drm encoder\n");
2911 return -EINVAL;
2912 }
2913
2914 sde_enc = to_sde_encoder_virt(drm_enc);
2915 sde_encoder_resource_control(&sde_enc->base,
2916 SDE_ENC_RC_EVENT_ENTER_IDLE);
2917
2918 return 0;
2919}
2920
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002921static void sde_encoder_off_work(struct kthread_work *work)
2922{
2923 struct sde_encoder_virt *sde_enc = container_of(work,
2924 struct sde_encoder_virt, delayed_off_work.work);
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002925 struct drm_encoder *drm_enc;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002926
2927 if (!sde_enc) {
2928 SDE_ERROR("invalid sde encoder\n");
2929 return;
2930 }
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002931 drm_enc = &sde_enc->base;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002932
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002933 sde_encoder_idle_request(drm_enc);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002934}
2935
Clarence Ip110d15c2016-08-16 14:44:41 -04002936/**
2937 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
2938 * drm_enc: Pointer to drm encoder structure
2939 * phys: Pointer to physical encoder structure
2940 * extra_flush_bits: Additional bit mask to include in flush trigger
2941 */
2942static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
2943 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
2944{
2945 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002946 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04002947
2948 if (!drm_enc || !phys) {
2949 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
2950 drm_enc != 0, phys != 0);
2951 return;
2952 }
2953
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002954 if (!phys->hw_pp) {
2955 SDE_ERROR("invalid pingpong hw\n");
2956 return;
2957 }
2958
Clarence Ip110d15c2016-08-16 14:44:41 -04002959 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04002960 if (!ctl || !phys->ops.trigger_flush) {
2961 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002962 return;
2963 }
2964
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002965 if (phys->split_role == ENC_ROLE_SKIP) {
2966 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2967 "skip flush pp%d ctl%d\n",
2968 phys->hw_pp->idx - PINGPONG_0,
2969 ctl->idx - CTL_0);
2970 return;
2971 }
2972
Clarence Ip8e69ad02016-12-09 09:43:57 -05002973 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05002974
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002975 if (phys->ops.is_master && phys->ops.is_master(phys))
2976 atomic_inc(&phys->pending_retire_fence_cnt);
2977
Clarence Ip110d15c2016-08-16 14:44:41 -04002978 if (extra_flush_bits && ctl->ops.update_pending_flush)
2979 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
2980
Alan Kwong4212dd42017-09-19 17:22:33 -04002981 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07002982
2983 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04002984 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2985 pending_kickoff_cnt, ctl->idx - CTL_0,
2986 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07002987 else
Clarence Ip569d5af2017-10-14 21:09:01 -04002988 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2989 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04002990}
2991
2992/**
2993 * _sde_encoder_trigger_start - trigger start for a physical encoder
2994 * phys: Pointer to physical encoder structure
2995 */
2996static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
2997{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002998 struct sde_hw_ctl *ctl;
2999
Clarence Ip110d15c2016-08-16 14:44:41 -04003000 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003001 SDE_ERROR("invalid argument(s)\n");
3002 return;
3003 }
3004
3005 if (!phys->hw_pp) {
3006 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04003007 return;
3008 }
3009
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003010 ctl = phys->hw_ctl;
3011 if (phys->split_role == ENC_ROLE_SKIP) {
3012 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
3013 "skip start pp%d ctl%d\n",
3014 phys->hw_pp->idx - PINGPONG_0,
3015 ctl->idx - CTL_0);
3016 return;
3017 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003018 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
3019 phys->ops.trigger_start(phys);
3020}
3021
Alan Kwong4212dd42017-09-19 17:22:33 -04003022void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
3023{
3024 struct sde_hw_ctl *ctl;
3025
3026 if (!phys_enc) {
3027 SDE_ERROR("invalid encoder\n");
3028 return;
3029 }
3030
3031 ctl = phys_enc->hw_ctl;
3032 if (ctl && ctl->ops.trigger_flush)
3033 ctl->ops.trigger_flush(ctl);
3034}
3035
Clarence Ip110d15c2016-08-16 14:44:41 -04003036void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
3037{
3038 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04003039
3040 if (!phys_enc) {
3041 SDE_ERROR("invalid encoder\n");
3042 return;
3043 }
3044
3045 ctl = phys_enc->hw_ctl;
3046 if (ctl && ctl->ops.trigger_start) {
3047 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04003048 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04003049 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003050}
3051
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303052static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
3053 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003054{
3055 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303056 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
3057 ktime_t cur_ktime;
3058 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003059
3060 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003061 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303062 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
3063 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003064
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303065 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
3066 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003067 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003068 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303069 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
3070
3071 return rc;
3072}
3073
3074int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
3075 struct sde_encoder_wait_info *info)
3076{
3077 int rc;
3078 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
3079
3080 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
3081
3082 /**
3083 * handle disabled irq case where timer irq is also delayed.
3084 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
3085 * if it event_timeout expired late detected.
3086 */
3087 if (atomic_read(info->atomic_cnt) && (!rc) &&
3088 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
3089 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
3090 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
3091 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003092
3093 return rc;
3094}
3095
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003096void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
3097{
3098 struct sde_encoder_virt *sde_enc;
3099 struct sde_connector *sde_con;
3100 void *sde_con_disp;
3101 struct sde_hw_ctl *ctl;
3102 int rc;
3103
3104 if (!phys_enc) {
3105 SDE_ERROR("invalid encoder\n");
3106 return;
3107 }
3108 sde_enc = to_sde_encoder_virt(phys_enc->parent);
3109 ctl = phys_enc->hw_ctl;
3110
3111 if (!ctl || !ctl->ops.reset)
3112 return;
3113
3114 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
3115 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
3116
3117 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
3118 phys_enc->connector) {
3119 sde_con = to_sde_connector(phys_enc->connector);
3120 sde_con_disp = sde_connector_get_display(phys_enc->connector);
3121
3122 if (sde_con->ops.soft_reset) {
3123 rc = sde_con->ops.soft_reset(sde_con_disp);
3124 if (rc) {
3125 SDE_ERROR_ENC(sde_enc,
3126 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07003127 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
3128 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003129 }
3130 }
3131 }
3132
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003133 phys_enc->enable_state = SDE_ENC_ENABLED;
3134}
3135
Clarence Ip110d15c2016-08-16 14:44:41 -04003136/**
3137 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
3138 * Iterate through the physical encoders and perform consolidated flush
3139 * and/or control start triggering as needed. This is done in the virtual
3140 * encoder rather than the individual physical ones in order to handle
3141 * use cases that require visibility into multiple physical encoders at
3142 * a time.
3143 * sde_enc: Pointer to virtual encoder structure
3144 */
3145static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
3146{
3147 struct sde_hw_ctl *ctl;
3148 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003149 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -04003150
3151 if (!sde_enc) {
3152 SDE_ERROR("invalid encoder\n");
3153 return;
3154 }
3155
3156 pending_flush = 0x0;
3157
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003158 /*
3159 * Trigger LUT DMA flush, this might need a wait, so we need
3160 * to do this outside of the atomic context
3161 */
3162 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3163 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3164 bool wait_for_dma = false;
3165
3166 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3167 continue;
3168
3169 ctl = phys->hw_ctl;
3170 if (!ctl)
3171 continue;
3172
3173 if (phys->ops.wait_dma_trigger)
3174 wait_for_dma = phys->ops.wait_dma_trigger(phys);
3175
3176 if (phys->hw_ctl->ops.reg_dma_flush)
3177 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
3178 wait_for_dma);
3179 }
3180
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003181 /* update pending counts and trigger kickoff ctl flush atomically */
3182 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3183
Clarence Ip110d15c2016-08-16 14:44:41 -04003184 /* don't perform flush/start operations for slave encoders */
3185 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3186 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07003187 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003188
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003189 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3190 continue;
3191
Clarence Ip110d15c2016-08-16 14:44:41 -04003192 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003193 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04003194 continue;
3195
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003196 if (phys->connector)
3197 topology = sde_connector_get_topology_name(
3198 phys->connector);
3199
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003200 /*
3201 * don't wait on ppsplit slaves or skipped encoders because
3202 * they dont receive irqs
3203 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003204 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003205 phys->split_role == ENC_ROLE_SLAVE) &&
3206 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003207 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003208
Clarence Ip8e69ad02016-12-09 09:43:57 -05003209 if (!phys->ops.needs_single_flush ||
3210 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04003211 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
3212 else if (ctl->ops.get_pending_flush)
3213 pending_flush |= ctl->ops.get_pending_flush(ctl);
3214 }
3215
3216 /* for split flush, combine pending flush masks and send to master */
3217 if (pending_flush && sde_enc->cur_master) {
3218 _sde_encoder_trigger_flush(
3219 &sde_enc->base,
3220 sde_enc->cur_master,
3221 pending_flush);
3222 }
3223
3224 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003225
3226 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -04003227}
3228
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003229static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
3230 struct drm_encoder *drm_enc,
3231 unsigned long *affected_displays,
3232 int num_active_phys)
3233{
3234 struct sde_encoder_virt *sde_enc;
3235 struct sde_encoder_phys *master;
3236 enum sde_rm_topology_name topology;
3237 bool is_right_only;
3238
3239 if (!drm_enc || !affected_displays)
3240 return;
3241
3242 sde_enc = to_sde_encoder_virt(drm_enc);
3243 master = sde_enc->cur_master;
3244 if (!master || !master->connector)
3245 return;
3246
3247 topology = sde_connector_get_topology_name(master->connector);
3248 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
3249 return;
3250
3251 /*
3252 * For pingpong split, the slave pingpong won't generate IRQs. For
3253 * right-only updates, we can't swap pingpongs, or simply swap the
3254 * master/slave assignment, we actually have to swap the interfaces
3255 * so that the master physical encoder will use a pingpong/interface
3256 * that generates irqs on which to wait.
3257 */
3258 is_right_only = !test_bit(0, affected_displays) &&
3259 test_bit(1, affected_displays);
3260
3261 if (is_right_only && !sde_enc->intfs_swapped) {
3262 /* right-only update swap interfaces */
3263 swap(sde_enc->phys_encs[0]->intf_idx,
3264 sde_enc->phys_encs[1]->intf_idx);
3265 sde_enc->intfs_swapped = true;
3266 } else if (!is_right_only && sde_enc->intfs_swapped) {
3267 /* left-only or full update, swap back */
3268 swap(sde_enc->phys_encs[0]->intf_idx,
3269 sde_enc->phys_encs[1]->intf_idx);
3270 sde_enc->intfs_swapped = false;
3271 }
3272
3273 SDE_DEBUG_ENC(sde_enc,
3274 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
3275 is_right_only, sde_enc->intfs_swapped,
3276 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3277 sde_enc->phys_encs[1]->intf_idx - INTF_0);
3278 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
3279 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3280 sde_enc->phys_encs[1]->intf_idx - INTF_0,
3281 *affected_displays);
3282
3283 /* ppsplit always uses master since ppslave invalid for irqs*/
3284 if (num_active_phys == 1)
3285 *affected_displays = BIT(0);
3286}
3287
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003288static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
3289 struct sde_encoder_kickoff_params *params)
3290{
3291 struct sde_encoder_virt *sde_enc;
3292 struct sde_encoder_phys *phys;
3293 int i, num_active_phys;
3294 bool master_assigned = false;
3295
3296 if (!drm_enc || !params)
3297 return;
3298
3299 sde_enc = to_sde_encoder_virt(drm_enc);
3300
3301 if (sde_enc->num_phys_encs <= 1)
3302 return;
3303
3304 /* count bits set */
3305 num_active_phys = hweight_long(params->affected_displays);
3306
3307 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
3308 params->affected_displays, num_active_phys);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04003309 SDE_EVT32_VERBOSE(DRMID(drm_enc), params->affected_displays,
3310 num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003311
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003312 /* for left/right only update, ppsplit master switches interface */
3313 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
3314 &params->affected_displays, num_active_phys);
3315
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003316 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3317 enum sde_enc_split_role prv_role, new_role;
3318 bool active;
3319
3320 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003321 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003322 continue;
3323
3324 active = test_bit(i, &params->affected_displays);
3325 prv_role = phys->split_role;
3326
3327 if (active && num_active_phys == 1)
3328 new_role = ENC_ROLE_SOLO;
3329 else if (active && !master_assigned)
3330 new_role = ENC_ROLE_MASTER;
3331 else if (active)
3332 new_role = ENC_ROLE_SLAVE;
3333 else
3334 new_role = ENC_ROLE_SKIP;
3335
3336 phys->ops.update_split_role(phys, new_role);
3337 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
3338 sde_enc->cur_master = phys;
3339 master_assigned = true;
3340 }
3341
3342 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
3343 phys->hw_pp->idx - PINGPONG_0, prv_role,
3344 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003345 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
3346 phys->hw_pp->idx - PINGPONG_0, prv_role,
3347 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003348 }
3349}
3350
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303351bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003352{
3353 struct sde_encoder_virt *sde_enc;
3354 struct msm_display_info *disp_info;
3355
3356 if (!drm_enc) {
3357 SDE_ERROR("invalid encoder\n");
3358 return false;
3359 }
3360
3361 sde_enc = to_sde_encoder_virt(drm_enc);
3362 disp_info = &sde_enc->disp_info;
3363
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303364 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003365}
3366
Dhaval Patel0e558f42017-04-30 00:51:40 -07003367void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
3368{
3369 struct sde_encoder_virt *sde_enc;
3370 struct sde_encoder_phys *phys;
3371 unsigned int i;
3372 struct sde_hw_ctl *ctl;
3373 struct msm_display_info *disp_info;
3374
3375 if (!drm_enc) {
3376 SDE_ERROR("invalid encoder\n");
3377 return;
3378 }
3379 sde_enc = to_sde_encoder_virt(drm_enc);
3380 disp_info = &sde_enc->disp_info;
3381
3382 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3383 phys = sde_enc->phys_encs[i];
3384
3385 if (phys && phys->hw_ctl) {
3386 ctl = phys->hw_ctl;
3387 if (ctl->ops.clear_pending_flush)
3388 ctl->ops.clear_pending_flush(ctl);
3389
3390 /* update only for command mode primary ctl */
3391 if ((phys == sde_enc->cur_master) &&
3392 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3393 && ctl->ops.trigger_pending)
3394 ctl->ops.trigger_pending(ctl);
3395 }
3396 }
3397}
3398
Ping Li8430ee12017-02-24 14:14:44 -08003399static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3400{
3401 void *dither_cfg;
3402 int ret = 0;
3403 size_t len = 0;
3404 enum sde_rm_topology_name topology;
3405
3406 if (!phys || !phys->connector || !phys->hw_pp ||
3407 !phys->hw_pp->ops.setup_dither)
3408 return;
3409 topology = sde_connector_get_topology_name(phys->connector);
3410 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3411 (phys->split_role == ENC_ROLE_SLAVE))
3412 return;
3413
3414 ret = sde_connector_get_dither_cfg(phys->connector,
3415 phys->connector->state, &dither_cfg, &len);
3416 if (!ret)
3417 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
3418}
3419
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003420static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3421 struct drm_display_mode *mode)
3422{
3423 u64 pclk_rate;
3424 u32 pclk_period;
3425 u32 line_time;
3426
3427 /*
3428 * For linetime calculation, only operate on master encoder.
3429 */
3430 if (!sde_enc->cur_master)
3431 return 0;
3432
3433 if (!sde_enc->cur_master->ops.get_line_count) {
3434 SDE_ERROR("get_line_count function not defined\n");
3435 return 0;
3436 }
3437
3438 pclk_rate = mode->clock; /* pixel clock in kHz */
3439 if (pclk_rate == 0) {
3440 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3441 return 0;
3442 }
3443
3444 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3445 if (pclk_period == 0) {
3446 SDE_ERROR("pclk period is 0\n");
3447 return 0;
3448 }
3449
3450 /*
3451 * Line time calculation based on Pixel clock and HTOTAL.
3452 * Final unit is in ns.
3453 */
3454 line_time = (pclk_period * mode->htotal) / 1000;
3455 if (line_time == 0) {
3456 SDE_ERROR("line time calculation is 0\n");
3457 return 0;
3458 }
3459
3460 SDE_DEBUG_ENC(sde_enc,
3461 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3462 pclk_rate, pclk_period, line_time);
3463
3464 return line_time;
3465}
3466
3467static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3468 ktime_t *wakeup_time)
3469{
3470 struct drm_display_mode *mode;
3471 struct sde_encoder_virt *sde_enc;
3472 u32 cur_line;
3473 u32 line_time;
3474 u32 vtotal, time_to_vsync;
3475 ktime_t cur_time;
3476
3477 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003478 mode = &sde_enc->cur_master->cached_mode;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003479
3480 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3481 if (!line_time)
3482 return -EINVAL;
3483
3484 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3485
3486 vtotal = mode->vtotal;
3487 if (cur_line >= vtotal)
3488 time_to_vsync = line_time * vtotal;
3489 else
3490 time_to_vsync = line_time * (vtotal - cur_line);
3491
3492 if (time_to_vsync == 0) {
3493 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3494 vtotal);
3495 return -EINVAL;
3496 }
3497
3498 cur_time = ktime_get();
3499 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3500
3501 SDE_DEBUG_ENC(sde_enc,
3502 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3503 cur_line, vtotal, time_to_vsync,
3504 ktime_to_ms(cur_time),
3505 ktime_to_ms(*wakeup_time));
3506 return 0;
3507}
3508
3509static void sde_encoder_vsync_event_handler(unsigned long data)
3510{
3511 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3512 struct sde_encoder_virt *sde_enc;
3513 struct msm_drm_private *priv;
3514 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003515
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003516 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3517 SDE_ERROR("invalid encoder parameters\n");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003518 return;
3519 }
3520
3521 sde_enc = to_sde_encoder_virt(drm_enc);
3522 priv = drm_enc->dev->dev_private;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003523 if (!sde_enc->crtc) {
3524 SDE_ERROR("invalid crtc");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003525 return;
3526 }
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003527
3528 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3529 SDE_ERROR("invalid crtc index:%u\n",
3530 sde_enc->crtc->index);
3531 return;
3532 }
3533 event_thread = &priv->event_thread[sde_enc->crtc->index];
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003534 if (!event_thread) {
3535 SDE_ERROR("event_thread not found for crtc:%d\n",
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003536 sde_enc->crtc->index);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003537 return;
3538 }
3539
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303540 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003541 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003542}
3543
Dhaval Patel222023e2018-02-27 12:24:07 -08003544static void sde_encoder_esd_trigger_work_handler(struct kthread_work *work)
3545{
3546 struct sde_encoder_virt *sde_enc = container_of(work,
3547 struct sde_encoder_virt, esd_trigger_work);
3548
3549 if (!sde_enc) {
3550 SDE_ERROR("invalid sde encoder\n");
3551 return;
3552 }
3553
3554 sde_encoder_resource_control(&sde_enc->base,
3555 SDE_ENC_RC_EVENT_KICKOFF);
3556}
3557
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08003558static void sde_encoder_input_event_work_handler(struct kthread_work *work)
3559{
3560 struct sde_encoder_virt *sde_enc = container_of(work,
3561 struct sde_encoder_virt, input_event_work);
3562
3563 if (!sde_enc) {
3564 SDE_ERROR("invalid sde encoder\n");
3565 return;
3566 }
3567
3568 sde_encoder_resource_control(&sde_enc->base,
3569 SDE_ENC_RC_EVENT_EARLY_WAKEUP);
3570}
3571
3572static int _sde_encoder_input_connect(struct input_handler *handler,
3573 struct input_dev *dev, const struct input_device_id *id)
3574{
3575 struct input_handle *handle;
3576 int rc = 0;
3577
3578 handle = kzalloc(sizeof(*handle), GFP_KERNEL);
3579 if (!handle)
3580 return -ENOMEM;
3581
3582 handle->dev = dev;
3583 handle->handler = handler;
3584 handle->name = handler->name;
3585
3586 rc = input_register_handle(handle);
3587 if (rc) {
3588 pr_err("failed to register input handle\n");
3589 goto error;
3590 }
3591
3592 rc = input_open_device(handle);
3593 if (rc) {
3594 pr_err("failed to open input device\n");
3595 goto error_unregister;
3596 }
3597
3598 return 0;
3599
3600error_unregister:
3601 input_unregister_handle(handle);
3602
3603error:
3604 kfree(handle);
3605
3606 return rc;
3607}
3608
3609static void _sde_encoder_input_disconnect(struct input_handle *handle)
3610{
3611 input_close_device(handle);
3612 input_unregister_handle(handle);
3613 kfree(handle);
3614}
3615
3616/**
3617 * Structure for specifying event parameters on which to receive callbacks.
3618 * This structure will trigger a callback in case of a touch event (specified by
3619 * EV_ABS) where there is a change in X and Y coordinates,
3620 */
3621static const struct input_device_id sde_input_ids[] = {
3622 {
3623 .flags = INPUT_DEVICE_ID_MATCH_EVBIT,
3624 .evbit = { BIT_MASK(EV_ABS) },
3625 .absbit = { [BIT_WORD(ABS_MT_POSITION_X)] =
3626 BIT_MASK(ABS_MT_POSITION_X) |
3627 BIT_MASK(ABS_MT_POSITION_Y) },
3628 },
3629 { },
3630};
3631
3632static int _sde_encoder_input_handler(
3633 struct sde_encoder_virt *sde_enc)
3634{
3635 struct input_handler *input_handler = NULL;
3636 int rc = 0;
3637
3638 if (sde_enc->input_handler) {
3639 SDE_ERROR_ENC(sde_enc,
3640 "input_handle is active. unexpected\n");
3641 return -EINVAL;
3642 }
3643
3644 input_handler = kzalloc(sizeof(*sde_enc->input_handler), GFP_KERNEL);
3645 if (!input_handler)
3646 return -ENOMEM;
3647
3648 input_handler->event = sde_encoder_input_event_handler;
3649 input_handler->connect = _sde_encoder_input_connect;
3650 input_handler->disconnect = _sde_encoder_input_disconnect;
3651 input_handler->name = "sde";
3652 input_handler->id_table = sde_input_ids;
3653 input_handler->private = sde_enc;
3654
3655 rc = input_register_handler(input_handler);
3656 if (rc) {
3657 SDE_ERROR_ENC(sde_enc,
3658 "input_register_handler failed, rc= %d\n", rc);
3659 kfree(input_handler);
3660 return rc;
3661 }
3662
3663 sde_enc->input_handler = input_handler;
3664
3665 return rc;
3666}
3667
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003668static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3669{
3670 struct sde_encoder_virt *sde_enc = container_of(work,
3671 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303672 bool autorefresh_enabled = false;
3673 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003674 ktime_t wakeup_time;
3675
3676 if (!sde_enc) {
3677 SDE_ERROR("invalid sde encoder\n");
3678 return;
3679 }
3680
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303681 rc = _sde_encoder_power_enable(sde_enc, true);
3682 if (rc) {
3683 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3684 return;
3685 }
3686
3687 if (sde_enc->cur_master &&
3688 sde_enc->cur_master->ops.is_autorefresh_enabled)
3689 autorefresh_enabled =
3690 sde_enc->cur_master->ops.is_autorefresh_enabled(
3691 sde_enc->cur_master);
3692
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303693 /* Update timer if autorefresh is enabled else return */
3694 if (!autorefresh_enabled)
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003695 goto exit;
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303696
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003697 rc = _sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time);
3698 if (rc)
3699 goto exit;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003700
3701 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3702 mod_timer(&sde_enc->vsync_event_timer,
3703 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003704
3705exit:
3706 _sde_encoder_power_enable(sde_enc, false);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003707}
3708
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003709int sde_encoder_poll_line_counts(struct drm_encoder *drm_enc)
3710{
3711 static const uint64_t timeout_us = 50000;
3712 static const uint64_t sleep_us = 20;
3713 struct sde_encoder_virt *sde_enc;
3714 ktime_t cur_ktime, exp_ktime;
3715 uint32_t line_count, tmp, i;
3716
3717 if (!drm_enc) {
3718 SDE_ERROR("invalid encoder\n");
3719 return -EINVAL;
3720 }
3721 sde_enc = to_sde_encoder_virt(drm_enc);
3722 if (!sde_enc->cur_master ||
3723 !sde_enc->cur_master->ops.get_line_count) {
3724 SDE_DEBUG_ENC(sde_enc, "can't get master line count\n");
3725 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_ERROR);
3726 return -EINVAL;
3727 }
3728
3729 exp_ktime = ktime_add_ms(ktime_get(), timeout_us / 1000);
3730
3731 line_count = sde_enc->cur_master->ops.get_line_count(
3732 sde_enc->cur_master);
3733
3734 for (i = 0; i < (timeout_us * 2 / sleep_us); ++i) {
3735 tmp = line_count;
3736 line_count = sde_enc->cur_master->ops.get_line_count(
3737 sde_enc->cur_master);
3738 if (line_count < tmp) {
3739 SDE_EVT32(DRMID(drm_enc), line_count);
3740 return 0;
3741 }
3742
3743 cur_ktime = ktime_get();
3744 if (ktime_compare_safe(exp_ktime, cur_ktime) <= 0)
3745 break;
3746
3747 usleep_range(sleep_us / 2, sleep_us);
3748 }
3749
3750 SDE_EVT32(DRMID(drm_enc), line_count, SDE_EVTLOG_ERROR);
3751 return -ETIMEDOUT;
3752}
3753
Clarence Ip85f4f4532017-10-04 12:10:13 -04003754int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08003755 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003756{
3757 struct sde_encoder_virt *sde_enc;
3758 struct sde_encoder_phys *phys;
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003759 struct sde_kms *sde_kms = NULL;
3760 struct msm_drm_private *priv = NULL;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003761 bool needs_hw_reset = false;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003762 uint32_t ln_cnt1, ln_cnt2;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003763 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04003764 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003765
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003766 if (!drm_enc || !params || !drm_enc->dev ||
3767 !drm_enc->dev->dev_private) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003768 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04003769 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003770 }
3771 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003772 priv = drm_enc->dev->dev_private;
3773 sde_kms = to_sde_kms(priv->kms);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003774
Clarence Ip19af1362016-09-23 14:57:51 -04003775 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003776 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003777
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003778 /* save this for later, in case of errors */
3779 if (sde_enc->cur_master && sde_enc->cur_master->ops.get_wr_line_count)
3780 ln_cnt1 = sde_enc->cur_master->ops.get_wr_line_count(
3781 sde_enc->cur_master);
3782 else
3783 ln_cnt1 = -EINVAL;
3784
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003785 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003786 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003787 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003788 phys = sde_enc->phys_encs[i];
Jayant Shekhar98e78a82018-01-12 17:50:55 +05303789 params->is_primary = sde_enc->disp_info.is_primary;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003790 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04003791 if (phys->ops.prepare_for_kickoff) {
3792 rc = phys->ops.prepare_for_kickoff(
3793 phys, params);
3794 if (rc)
3795 ret = rc;
3796 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003797 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
3798 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08003799 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003800 }
3801 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003802 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003803
Alan Kwong1124f1f2017-11-10 18:14:39 -05003804 rc = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
3805 if (rc) {
3806 SDE_ERROR_ENC(sde_enc, "resource kickoff failed rc %d\n", rc);
3807 return rc;
3808 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003809
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003810 /* if any phys needs reset, reset all phys, in-order */
3811 if (needs_hw_reset) {
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003812 /* query line count before cur_master is updated */
3813 if (sde_enc->cur_master &&
3814 sde_enc->cur_master->ops.get_wr_line_count)
3815 ln_cnt2 = sde_enc->cur_master->ops.get_wr_line_count(
3816 sde_enc->cur_master);
3817 else
3818 ln_cnt2 = -EINVAL;
3819
3820 SDE_EVT32(DRMID(drm_enc), ln_cnt1, ln_cnt2,
3821 SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003822 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3823 phys = sde_enc->phys_encs[i];
3824 if (phys && phys->ops.hw_reset)
3825 phys->ops.hw_reset(phys);
3826 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003827 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003828
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003829 _sde_encoder_update_master(drm_enc, params);
3830
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003831 _sde_encoder_update_roi(drm_enc);
3832
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003833 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
3834 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003835 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003836 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
3837 sde_enc->cur_master->connector->base.id,
3838 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003839 ret = rc;
3840 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003841 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003842
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003843 if (_sde_encoder_is_dsc_enabled(drm_enc) &&
3844 !sde_kms->splash_data.cont_splash_en) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003845 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003846 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003847 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003848 ret = rc;
3849 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003850 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04003851
3852 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04003853}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003854
Clarence Ip662698e2017-09-12 18:34:16 -04003855/**
3856 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
3857 * with the specified encoder, and unstage all pipes from it
3858 * @encoder: encoder pointer
3859 * Returns: 0 on success
3860 */
3861static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
3862{
3863 struct sde_encoder_virt *sde_enc;
3864 struct sde_encoder_phys *phys;
3865 unsigned int i;
3866 int rc = 0;
3867
3868 if (!drm_enc) {
3869 SDE_ERROR("invalid encoder\n");
3870 return -EINVAL;
3871 }
3872
3873 sde_enc = to_sde_encoder_virt(drm_enc);
3874
3875 SDE_ATRACE_BEGIN("encoder_release_lm");
3876 SDE_DEBUG_ENC(sde_enc, "\n");
3877
3878 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3879 phys = sde_enc->phys_encs[i];
3880 if (!phys)
3881 continue;
3882
3883 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
3884
3885 rc = sde_encoder_helper_reset_mixers(phys, NULL);
3886 if (rc)
3887 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
3888 }
3889
3890 SDE_ATRACE_END("encoder_release_lm");
3891 return rc;
3892}
3893
3894void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04003895{
3896 struct sde_encoder_virt *sde_enc;
3897 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003898 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04003899 unsigned int i;
3900
3901 if (!drm_enc) {
3902 SDE_ERROR("invalid encoder\n");
3903 return;
3904 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07003905 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04003906 sde_enc = to_sde_encoder_virt(drm_enc);
3907
3908 SDE_DEBUG_ENC(sde_enc, "\n");
3909
Clarence Ip662698e2017-09-12 18:34:16 -04003910 /* create a 'no pipes' commit to release buffers on errors */
3911 if (is_error)
3912 _sde_encoder_reset_ctl_hw(drm_enc);
3913
Alan Kwong628d19e2016-10-31 13:50:13 -04003914 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04003915 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003916
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003917 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003918 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003919 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003920 if (phys && phys->ops.handle_post_kickoff)
3921 phys->ops.handle_post_kickoff(phys);
3922 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003923
3924 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
3925 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
3926 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3927 mod_timer(&sde_enc->vsync_event_timer,
3928 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
3929 }
3930
Narendra Muppalla77b32932017-05-10 13:53:11 -07003931 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003932}
3933
Clarence Ip662698e2017-09-12 18:34:16 -04003934int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003935 struct drm_framebuffer *fb)
3936{
3937 struct drm_encoder *drm_enc;
3938 struct sde_hw_mixer_cfg mixer;
3939 struct sde_rm_hw_iter lm_iter;
3940 bool lm_valid = false;
3941
3942 if (!phys_enc || !phys_enc->parent) {
3943 SDE_ERROR("invalid encoder\n");
3944 return -EINVAL;
3945 }
3946
3947 drm_enc = phys_enc->parent;
3948 memset(&mixer, 0, sizeof(mixer));
3949
3950 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003951 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
3952 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
3953
3954 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
3955 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
3956 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
3957
3958 if (!hw_lm)
3959 continue;
3960
3961 /* need to flush LM to remove it */
3962 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
3963 phys_enc->hw_ctl->ops.update_pending_flush)
3964 phys_enc->hw_ctl->ops.update_pending_flush(
3965 phys_enc->hw_ctl,
3966 phys_enc->hw_ctl->ops.get_bitmask_mixer(
3967 phys_enc->hw_ctl, hw_lm->idx));
3968
3969 if (fb) {
3970 /* assume a single LM if targeting a frame buffer */
3971 if (lm_valid)
3972 continue;
3973
3974 mixer.out_height = fb->height;
3975 mixer.out_width = fb->width;
3976
3977 if (hw_lm->ops.setup_mixer_out)
3978 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
3979 }
3980
3981 lm_valid = true;
3982
3983 /* only enable border color on LM */
3984 if (phys_enc->hw_ctl->ops.setup_blendstage)
3985 phys_enc->hw_ctl->ops.setup_blendstage(
Dhaval Patel572cfd22017-06-12 19:33:39 -07003986 phys_enc->hw_ctl, hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003987 }
3988
3989 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04003990 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003991 return -EFAULT;
3992 }
3993 return 0;
3994}
3995
Lloyd Atkinsone123c172017-02-27 13:19:08 -05003996void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
3997{
3998 struct sde_encoder_virt *sde_enc;
3999 struct sde_encoder_phys *phys;
4000 int i;
4001
4002 if (!drm_enc) {
4003 SDE_ERROR("invalid encoder\n");
4004 return;
4005 }
4006 sde_enc = to_sde_encoder_virt(drm_enc);
4007
4008 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4009 phys = sde_enc->phys_encs[i];
4010 if (phys && phys->ops.prepare_commit)
4011 phys->ops.prepare_commit(phys);
4012 }
4013}
4014
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004015#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004016static int _sde_encoder_status_show(struct seq_file *s, void *data)
4017{
4018 struct sde_encoder_virt *sde_enc;
4019 int i;
4020
4021 if (!s || !s->private)
4022 return -EINVAL;
4023
4024 sde_enc = s->private;
4025
4026 mutex_lock(&sde_enc->enc_lock);
4027 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4028 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4029
4030 if (!phys)
4031 continue;
4032
4033 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
4034 phys->intf_idx - INTF_0,
4035 atomic_read(&phys->vsync_cnt),
4036 atomic_read(&phys->underrun_cnt));
4037
4038 switch (phys->intf_mode) {
4039 case INTF_MODE_VIDEO:
4040 seq_puts(s, "mode: video\n");
4041 break;
4042 case INTF_MODE_CMD:
4043 seq_puts(s, "mode: command\n");
4044 break;
4045 case INTF_MODE_WB_BLOCK:
4046 seq_puts(s, "mode: wb block\n");
4047 break;
4048 case INTF_MODE_WB_LINE:
4049 seq_puts(s, "mode: wb line\n");
4050 break;
4051 default:
4052 seq_puts(s, "mode: ???\n");
4053 break;
4054 }
4055 }
4056 mutex_unlock(&sde_enc->enc_lock);
4057
4058 return 0;
4059}
4060
4061static int _sde_encoder_debugfs_status_open(struct inode *inode,
4062 struct file *file)
4063{
4064 return single_open(file, _sde_encoder_status_show, inode->i_private);
4065}
4066
Dhaval Patelf9245d62017-03-28 16:24:00 -07004067static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304068 const char __user *user_buf, size_t count, loff_t *ppos)
4069{
4070 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004071 int i = 0, rc;
4072 char buf[MISR_BUFF_SIZE + 1];
4073 size_t buff_copy;
4074 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304075
Dhaval Patelf9245d62017-03-28 16:24:00 -07004076 if (!file || !file->private_data)
4077 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304078
Dhaval Patelf9245d62017-03-28 16:24:00 -07004079 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304080
Dhaval Patelf9245d62017-03-28 16:24:00 -07004081 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
4082 if (copy_from_user(buf, user_buf, buff_copy))
4083 return -EINVAL;
4084
4085 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304086
4087 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07004088 return -EINVAL;
4089
4090 rc = _sde_encoder_power_enable(sde_enc, true);
4091 if (rc)
4092 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304093
4094 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004095 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07004096 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304097 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4098 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4099
Dhaval Patelf9245d62017-03-28 16:24:00 -07004100 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304101 continue;
4102
Dhaval Patelf9245d62017-03-28 16:24:00 -07004103 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304104 }
4105 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004106 _sde_encoder_power_enable(sde_enc, false);
4107
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304108 return count;
4109}
4110
Dhaval Patelf9245d62017-03-28 16:24:00 -07004111static ssize_t _sde_encoder_misr_read(struct file *file,
4112 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304113{
4114 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004115 int i = 0, len = 0;
4116 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
4117 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304118
4119 if (*ppos)
4120 return 0;
4121
Dhaval Patelf9245d62017-03-28 16:24:00 -07004122 if (!file || !file->private_data)
4123 return -EINVAL;
4124
4125 sde_enc = file->private_data;
4126
4127 rc = _sde_encoder_power_enable(sde_enc, true);
4128 if (rc)
4129 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304130
4131 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004132 if (!sde_enc->misr_enable) {
4133 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4134 "disabled\n");
4135 goto buff_check;
4136 } else if (sde_enc->disp_info.capabilities &
4137 ~MSM_DISPLAY_CAP_VID_MODE) {
4138 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4139 "unsupported\n");
4140 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304141 }
4142
Dhaval Patelf9245d62017-03-28 16:24:00 -07004143 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4144 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004145
Dhaval Patelf9245d62017-03-28 16:24:00 -07004146 if (!phys || !phys->ops.collect_misr)
4147 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304148
Dhaval Patelf9245d62017-03-28 16:24:00 -07004149 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4150 "Intf idx:%d\n", phys->intf_idx - INTF_0);
4151 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
4152 phys->ops.collect_misr(phys));
4153 }
4154
4155buff_check:
4156 if (count <= len) {
4157 len = 0;
4158 goto end;
4159 }
4160
4161 if (copy_to_user(user_buff, buf, len)) {
4162 len = -EFAULT;
4163 goto end;
4164 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304165
4166 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304167
Dhaval Patelf9245d62017-03-28 16:24:00 -07004168end:
4169 mutex_unlock(&sde_enc->enc_lock);
4170 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304171 return len;
4172}
4173
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004174static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004175{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004176 struct sde_encoder_virt *sde_enc;
4177 struct msm_drm_private *priv;
4178 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07004179 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004180
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004181 static const struct file_operations debugfs_status_fops = {
4182 .open = _sde_encoder_debugfs_status_open,
4183 .read = seq_read,
4184 .llseek = seq_lseek,
4185 .release = single_release,
4186 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304187
4188 static const struct file_operations debugfs_misr_fops = {
4189 .open = simple_open,
4190 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004191 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304192 };
4193
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004194 char name[SDE_NAME_SIZE];
4195
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004196 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004197 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004198 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004199 }
4200
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004201 sde_enc = to_sde_encoder_virt(drm_enc);
4202 priv = drm_enc->dev->dev_private;
4203 sde_kms = to_sde_kms(priv->kms);
4204
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004205 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
4206
4207 /* create overall sub-directory for the encoder */
4208 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07004209 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004210 if (!sde_enc->debugfs_root)
4211 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304212
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004213 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004214 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004215 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304216
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004217 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004218 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004219
Alan Kwongf2debb02017-04-05 06:19:29 -07004220 for (i = 0; i < sde_enc->num_phys_encs; i++)
4221 if (sde_enc->phys_encs[i] &&
4222 sde_enc->phys_encs[i]->ops.late_register)
4223 sde_enc->phys_encs[i]->ops.late_register(
4224 sde_enc->phys_encs[i],
4225 sde_enc->debugfs_root);
4226
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004227 return 0;
4228}
4229
4230static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
4231{
4232 struct sde_encoder_virt *sde_enc;
4233
4234 if (!drm_enc)
4235 return;
4236
4237 sde_enc = to_sde_encoder_virt(drm_enc);
4238 debugfs_remove_recursive(sde_enc->debugfs_root);
4239}
4240#else
4241static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
4242{
4243 return 0;
4244}
4245
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004246static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004247{
4248}
4249#endif
4250
4251static int sde_encoder_late_register(struct drm_encoder *encoder)
4252{
4253 return _sde_encoder_init_debugfs(encoder);
4254}
4255
4256static void sde_encoder_early_unregister(struct drm_encoder *encoder)
4257{
4258 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004259}
4260
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004261static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04004262 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004263 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004264 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004265{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004266 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004267
Clarence Ip19af1362016-09-23 14:57:51 -04004268 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004269
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004270 /*
4271 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
4272 * in this function, check up-front.
4273 */
4274 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
4275 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004276 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004277 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004278 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004279 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004280
Clarence Ipa4039322016-07-15 16:23:59 -04004281 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004282 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004283
4284 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004285 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004286 PTR_ERR(enc));
4287 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4288 }
4289
4290 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4291 ++sde_enc->num_phys_encs;
4292 }
4293
Clarence Ipa4039322016-07-15 16:23:59 -04004294 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004295 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004296
4297 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004298 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004299 PTR_ERR(enc));
4300 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4301 }
4302
4303 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4304 ++sde_enc->num_phys_encs;
4305 }
4306
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004307 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004308}
4309
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004310static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
4311 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04004312{
4313 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04004314
Clarence Ip19af1362016-09-23 14:57:51 -04004315 if (!sde_enc) {
4316 SDE_ERROR("invalid encoder\n");
4317 return -EINVAL;
4318 }
4319
4320 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04004321
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004322 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004323 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004324 sde_enc->num_phys_encs);
4325 return -EINVAL;
4326 }
4327
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004328 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004329
4330 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004331 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004332 PTR_ERR(enc));
4333 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4334 }
4335
4336 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4337 ++sde_enc->num_phys_encs;
4338
4339 return 0;
4340}
4341
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004342static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004343 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04004344 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004345 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004346{
4347 int ret = 0;
4348 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004349 enum sde_intf_type intf_type;
4350 struct sde_encoder_virt_ops parent_ops = {
4351 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07004352 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04004353 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004354 };
4355 struct sde_enc_phys_init_params phys_params;
4356
Clarence Ip19af1362016-09-23 14:57:51 -04004357 if (!sde_enc || !sde_kms) {
4358 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
4359 sde_enc != 0, sde_kms != 0);
4360 return -EINVAL;
4361 }
4362
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004363 memset(&phys_params, 0, sizeof(phys_params));
4364 phys_params.sde_kms = sde_kms;
4365 phys_params.parent = &sde_enc->base;
4366 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004367 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004368
Clarence Ip19af1362016-09-23 14:57:51 -04004369 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004370
Clarence Ipa4039322016-07-15 16:23:59 -04004371 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004372 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
4373 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04004374 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004375 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4376 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07004377 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
4378 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4379 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04004380 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
4381 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
4382 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004383 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04004384 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004385 return -EINVAL;
4386 }
4387
Clarence Ip88270a62016-06-26 10:09:34 -04004388 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004389
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004390 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
4391
Clarence Ip19af1362016-09-23 14:57:51 -04004392 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004393
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004394 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
4395 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004396 sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
4397
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004398 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04004399 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004400 /*
4401 * Left-most tile is at index 0, content is controller id
4402 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
4403 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
4404 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004405 u32 controller_id = disp_info->h_tile_instance[i];
4406
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004407 if (disp_info->num_of_h_tiles > 1) {
4408 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004409 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004410 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004411 phys_params.split_role = ENC_ROLE_SLAVE;
4412 } else {
4413 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004414 }
4415
Clarence Ip19af1362016-09-23 14:57:51 -04004416 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004417 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004418
Alan Kwongbb27c092016-07-20 16:41:25 -04004419 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004420 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004421 phys_params.wb_idx = sde_encoder_get_wb(
4422 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04004423 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004424 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004425 SDE_ERROR_ENC(sde_enc,
4426 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004427 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004428 ret = -EINVAL;
4429 }
Alan Kwongbb27c092016-07-20 16:41:25 -04004430 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004431 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004432 phys_params.intf_idx = sde_encoder_get_intf(
4433 sde_kms->catalog, intf_type,
4434 controller_id);
4435 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004436 SDE_ERROR_ENC(sde_enc,
4437 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004438 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004439 ret = -EINVAL;
4440 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004441 }
4442
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004443 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04004444 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004445 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
4446 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004447 else
4448 ret = sde_encoder_virt_add_phys_encs(
4449 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004450 sde_enc,
4451 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004452 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04004453 SDE_ERROR_ENC(sde_enc,
4454 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004455 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004456 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08004457
4458 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4459 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4460
4461 if (phys) {
4462 atomic_set(&phys->vsync_cnt, 0);
4463 atomic_set(&phys->underrun_cnt, 0);
4464 }
4465 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004466 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004467
4468 return ret;
4469}
4470
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004471static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
4472 .mode_set = sde_encoder_virt_mode_set,
4473 .disable = sde_encoder_virt_disable,
4474 .enable = sde_encoder_virt_enable,
4475 .atomic_check = sde_encoder_virt_atomic_check,
4476};
4477
4478static const struct drm_encoder_funcs sde_encoder_funcs = {
4479 .destroy = sde_encoder_destroy,
4480 .late_register = sde_encoder_late_register,
4481 .early_unregister = sde_encoder_early_unregister,
4482};
4483
Clarence Ip3649f8b2016-10-31 09:59:44 -04004484struct drm_encoder *sde_encoder_init(
4485 struct drm_device *dev,
4486 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004487{
4488 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04004489 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004490 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004491 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004492 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08004493 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004494 int ret = 0;
4495
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004496 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
4497 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004498 ret = -ENOMEM;
4499 goto fail;
4500 }
4501
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004502 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004503 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
4504 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004505 if (ret)
4506 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004507
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004508 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004509 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004510 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004511 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004512 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004513
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004514 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
4515 disp_info->is_primary)
4516 setup_timer(&sde_enc->vsync_event_timer,
4517 sde_encoder_vsync_event_handler,
4518 (unsigned long)sde_enc);
4519
Dhaval Patel020f7e122016-11-15 14:39:18 -08004520 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
4521 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004522 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08004523 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07004524 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08004525 PTR_ERR(sde_enc->rsc_client));
4526 sde_enc->rsc_client = NULL;
4527 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004528
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004529 if (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
4530 ret = _sde_encoder_input_handler(sde_enc);
4531 if (ret)
4532 SDE_ERROR(
4533 "input handler registration failed, rc = %d\n", ret);
4534 }
4535
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004536 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04004537 kthread_init_delayed_work(&sde_enc->delayed_off_work,
4538 sde_encoder_off_work);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07004539 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004540
4541 kthread_init_work(&sde_enc->vsync_event_work,
4542 sde_encoder_vsync_event_work_handler);
4543
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004544 kthread_init_work(&sde_enc->input_event_work,
4545 sde_encoder_input_event_work_handler);
4546
Dhaval Patel222023e2018-02-27 12:24:07 -08004547 kthread_init_work(&sde_enc->esd_trigger_work,
4548 sde_encoder_esd_trigger_work_handler);
4549
Dhaval Patel020f7e122016-11-15 14:39:18 -08004550 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
4551
Clarence Ip19af1362016-09-23 14:57:51 -04004552 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004553
4554 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004555
4556fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004557 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004558 if (drm_enc)
4559 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004560
4561 return ERR_PTR(ret);
4562}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004563
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004564int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4565 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004566{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004567 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004568 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004569 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004570
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004571 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004572 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004573 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004574 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004575 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004576 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004577
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004578 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4579 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004580
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004581 switch (event) {
4582 case MSM_ENC_COMMIT_DONE:
4583 fn_wait = phys->ops.wait_for_commit_done;
4584 break;
4585 case MSM_ENC_TX_COMPLETE:
4586 fn_wait = phys->ops.wait_for_tx_complete;
4587 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004588 case MSM_ENC_VBLANK:
4589 fn_wait = phys->ops.wait_for_vblank;
4590 break;
Sandeep Panda11b20d82017-06-19 12:57:27 +05304591 case MSM_ENC_ACTIVE_REGION:
4592 fn_wait = phys->ops.wait_for_active;
4593 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004594 default:
4595 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4596 event);
4597 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004598 };
4599
4600 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004601 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004602 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004603 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004604 if (ret)
4605 return ret;
4606 }
4607 }
4608
4609 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004610}
4611
Alan Kwong67a3f792016-11-01 23:16:53 -04004612enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4613{
4614 struct sde_encoder_virt *sde_enc = NULL;
4615 int i;
4616
4617 if (!encoder) {
4618 SDE_ERROR("invalid encoder\n");
4619 return INTF_MODE_NONE;
4620 }
4621 sde_enc = to_sde_encoder_virt(encoder);
4622
4623 if (sde_enc->cur_master)
4624 return sde_enc->cur_master->intf_mode;
4625
4626 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4627 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4628
4629 if (phys)
4630 return phys->intf_mode;
4631 }
4632
4633 return INTF_MODE_NONE;
4634}
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004635
4636/**
4637 * sde_encoder_update_caps_for_cont_splash - update encoder settings during
4638 * device bootup when cont_splash is enabled
4639 * @drm_enc: Pointer to drm encoder structure
4640 * @Return: true if successful in updating the encoder structure
4641 */
4642int sde_encoder_update_caps_for_cont_splash(struct drm_encoder *encoder)
4643{
4644 struct sde_encoder_virt *sde_enc;
4645 struct msm_drm_private *priv;
4646 struct sde_kms *sde_kms;
4647 struct drm_connector *conn = NULL;
4648 struct sde_connector *sde_conn = NULL;
4649 struct sde_connector_state *sde_conn_state = NULL;
4650 struct drm_display_mode *drm_mode = NULL;
4651 struct sde_rm_hw_iter dsc_iter, pp_iter, ctl_iter;
4652 int ret = 0, i;
4653
4654 if (!encoder) {
4655 SDE_ERROR("invalid drm enc\n");
4656 return -EINVAL;
4657 }
4658
4659 if (!encoder->dev || !encoder->dev->dev_private) {
4660 SDE_ERROR("drm device invalid\n");
4661 return -EINVAL;
4662 }
4663
4664 priv = encoder->dev->dev_private;
4665 if (!priv->kms) {
4666 SDE_ERROR("invalid kms\n");
4667 return -EINVAL;
4668 }
4669
4670 sde_kms = to_sde_kms(priv->kms);
4671 sde_enc = to_sde_encoder_virt(encoder);
4672 if (!priv->num_connectors) {
4673 SDE_ERROR_ENC(sde_enc, "No connectors registered\n");
4674 return -EINVAL;
4675 }
4676 SDE_DEBUG_ENC(sde_enc,
4677 "num of connectors: %d\n", priv->num_connectors);
4678
4679 for (i = 0; i < priv->num_connectors; i++) {
4680 SDE_DEBUG_ENC(sde_enc, "connector id: %d\n",
4681 priv->connectors[i]->base.id);
4682 sde_conn = to_sde_connector(priv->connectors[i]);
4683 if (!sde_conn->encoder) {
4684 SDE_DEBUG_ENC(sde_enc,
4685 "encoder not attached to connector\n");
4686 continue;
4687 }
4688 if (sde_conn->encoder->base.id
4689 == encoder->base.id) {
4690 conn = (priv->connectors[i]);
4691 break;
4692 }
4693 }
4694
4695 if (!conn || !conn->state) {
4696 SDE_ERROR_ENC(sde_enc, "connector not found\n");
4697 return -EINVAL;
4698 }
4699
4700 sde_conn_state = to_sde_connector_state(conn->state);
4701
4702 if (!sde_conn->ops.get_mode_info) {
4703 SDE_ERROR_ENC(sde_enc, "conn: get_mode_info ops not found\n");
4704 return -EINVAL;
4705 }
4706
4707 ret = sde_conn->ops.get_mode_info(&encoder->crtc->state->adjusted_mode,
4708 &sde_conn_state->mode_info,
4709 sde_kms->catalog->max_mixer_width,
4710 sde_conn->display);
4711 if (ret) {
4712 SDE_ERROR_ENC(sde_enc,
4713 "conn: ->get_mode_info failed. ret=%d\n", ret);
4714 return ret;
4715 }
4716
4717 ret = sde_rm_reserve(&sde_kms->rm, encoder, encoder->crtc->state,
4718 conn->state, false);
4719 if (ret) {
4720 SDE_ERROR_ENC(sde_enc,
4721 "failed to reserve hw resources, %d\n", ret);
4722 return ret;
4723 }
4724
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004725 if (sde_conn->encoder) {
4726 conn->state->best_encoder = sde_conn->encoder;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004727 SDE_DEBUG_ENC(sde_enc,
4728 "configured cstate->best_encoder to ID = %d\n",
4729 conn->state->best_encoder->base.id);
4730 } else {
4731 SDE_ERROR_ENC(sde_enc, "No encoder mapped to connector=%d\n",
4732 conn->base.id);
4733 }
4734
4735 SDE_DEBUG_ENC(sde_enc, "connector topology = %llu\n",
4736 sde_connector_get_topology_name(conn));
4737 drm_mode = &encoder->crtc->state->adjusted_mode;
4738 SDE_DEBUG_ENC(sde_enc, "hdisplay = %d, vdisplay = %d\n",
4739 drm_mode->hdisplay, drm_mode->vdisplay);
4740 drm_set_preferred_mode(conn, drm_mode->hdisplay, drm_mode->vdisplay);
4741
4742 if (encoder->bridge) {
4743 SDE_DEBUG_ENC(sde_enc, "Bridge mapped to encoder\n");
4744 /*
4745 * For cont-splash use case, we update the mode
4746 * configurations manually. This will skip the
4747 * usually mode set call when actual frame is
4748 * pushed from framework. The bridge needs to
4749 * be updated with the current drm mode by
4750 * calling the bridge mode set ops.
4751 */
4752 if (encoder->bridge->funcs) {
4753 SDE_DEBUG_ENC(sde_enc, "calling mode_set\n");
4754 encoder->bridge->funcs->mode_set(encoder->bridge,
4755 drm_mode, drm_mode);
4756 }
4757 } else {
4758 SDE_ERROR_ENC(sde_enc, "No bridge attached to encoder\n");
4759 }
4760
4761 sde_rm_init_hw_iter(&pp_iter, encoder->base.id, SDE_HW_BLK_PINGPONG);
4762 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4763 sde_enc->hw_pp[i] = NULL;
4764 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
4765 break;
4766 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
4767 }
4768
4769 sde_rm_init_hw_iter(&dsc_iter, encoder->base.id, SDE_HW_BLK_DSC);
4770 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4771 sde_enc->hw_dsc[i] = NULL;
4772 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
4773 break;
4774 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
4775 }
4776
4777 sde_rm_init_hw_iter(&ctl_iter, encoder->base.id, SDE_HW_BLK_CTL);
4778 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4779 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4780
4781 phys->hw_ctl = NULL;
4782 if (!sde_rm_get_hw(&sde_kms->rm, &ctl_iter))
4783 break;
4784 phys->hw_ctl = (struct sde_hw_ctl *) ctl_iter.hw;
4785 }
4786
4787 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4788 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4789
4790 if (!phys) {
4791 SDE_ERROR_ENC(sde_enc,
4792 "phys encoders not initialized\n");
4793 return -EINVAL;
4794 }
4795
4796 phys->hw_pp = sde_enc->hw_pp[i];
4797 if (phys->ops.cont_splash_mode_set)
4798 phys->ops.cont_splash_mode_set(phys, drm_mode);
4799
4800 if (phys->ops.is_master && phys->ops.is_master(phys)) {
4801 phys->connector = conn;
4802 sde_enc->cur_master = phys;
4803 }
4804 }
4805
4806 return ret;
4807}
Dhaval Patelef58f0b2018-01-22 19:13:52 -08004808
4809int sde_encoder_display_failure_notification(struct drm_encoder *enc)
4810{
Dhaval Patel222023e2018-02-27 12:24:07 -08004811 struct msm_drm_thread *disp_thread = NULL;
4812 struct msm_drm_private *priv = NULL;
4813 struct sde_encoder_virt *sde_enc = NULL;
4814
4815 if (!enc || !enc->dev || !enc->dev->dev_private) {
4816 SDE_ERROR("invalid parameters\n");
4817 return -EINVAL;
4818 }
4819
4820 priv = enc->dev->dev_private;
4821 sde_enc = to_sde_encoder_virt(enc);
4822 if (!sde_enc->crtc || (sde_enc->crtc->index
4823 >= ARRAY_SIZE(priv->disp_thread))) {
4824 SDE_DEBUG_ENC(sde_enc,
4825 "invalid cached CRTC: %d or crtc index: %d\n",
4826 sde_enc->crtc == NULL,
4827 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
4828 return -EINVAL;
4829 }
4830
4831 SDE_EVT32_VERBOSE(DRMID(enc));
4832
4833 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
4834
4835 kthread_queue_work(&disp_thread->worker,
4836 &sde_enc->esd_trigger_work);
4837 kthread_flush_work(&sde_enc->esd_trigger_work);
Dhaval Patelef58f0b2018-01-22 19:13:52 -08004838 /**
4839 * panel may stop generating te signal (vsync) during esd failure. rsc
4840 * hardware may hang without vsync. Avoid rsc hang by generating the
4841 * vsync from watchdog timer instead of panel.
4842 */
4843 _sde_encoder_switch_to_watchdog_vsync(enc);
4844
4845 sde_encoder_wait_for_event(enc, MSM_ENC_TX_COMPLETE);
4846
4847 return 0;
4848}