blob: b6888dfe9c2c38ed397bad5025e35fc85b75cf2d [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070083/**
84 * enum sde_enc_rc_events - events for resource control state machine
85 * @SDE_ENC_RC_EVENT_KICKOFF:
86 * This event happens at NORMAL priority.
87 * Event that signals the start of the transfer. When this event is
88 * received, enable MDP/DSI core clocks and request RSC with CMD state.
89 * Regardless of the previous state, the resource should be in ON state
90 * at the end of this event.
91 * @SDE_ENC_RC_EVENT_FRAME_DONE:
92 * This event happens at INTERRUPT level.
93 * Event signals the end of the data transfer after the PP FRAME_DONE
94 * event. At the end of this event, a delayed work is scheduled to go to
Dhaval Patelc9e213b2017-11-02 12:13:12 -070095 * IDLE_PC state after IDLE_POWERCOLLAPSE_DURATION time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040096 * @SDE_ENC_RC_EVENT_PRE_STOP:
97 * This event happens at NORMAL priority.
98 * This event, when received during the ON state, set RSC to IDLE, and
99 * and leave the RC STATE in the PRE_OFF state.
100 * It should be followed by the STOP event as part of encoder disable.
101 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700102 * @SDE_ENC_RC_EVENT_STOP:
103 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400104 * When this event is received, disable all the MDP/DSI core clocks, and
105 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
106 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
107 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
108 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700109 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700110 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700111 * Event signals that there is a seamless mode switch is in prgoress. A
112 * client needs to turn of only irq - leave clocks ON to reduce the mode
113 * switch latency.
114 * @SDE_ENC_RC_EVENT_POST_MODESET:
115 * This event happens at NORMAL priority from a work item.
116 * Event signals that seamless mode switch is complete and resources are
117 * acquired. Clients wants to turn on the irq again and update the rsc
118 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700119 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
120 * This event happens at NORMAL priority from a work item.
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700121 * Event signals that there were no frame updates for
122 * IDLE_POWERCOLLAPSE_DURATION time. This would disable MDP/DSI core clocks
123 * and request RSC with IDLE state and change the resource state to IDLE.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700124 */
125enum sde_enc_rc_events {
126 SDE_ENC_RC_EVENT_KICKOFF = 1,
127 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400128 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700129 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700130 SDE_ENC_RC_EVENT_PRE_MODESET,
131 SDE_ENC_RC_EVENT_POST_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700132 SDE_ENC_RC_EVENT_ENTER_IDLE
133};
134
135/*
136 * enum sde_enc_rc_states - states that the resource control maintains
137 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400138 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700139 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700140 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
142 */
143enum sde_enc_rc_states {
144 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400145 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700146 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700147 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700148 SDE_ENC_RC_STATE_IDLE
149};
150
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400151/**
152 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
153 * encoders. Virtual encoder manages one "logical" display. Physical
154 * encoders manage one intf block, tied to a specific panel/sub-panel.
155 * Virtual encoder defers as much as possible to the physical encoders.
156 * Virtual encoder registers itself with the DRM Framework as the encoder.
157 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400158 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400159 * @bus_scaling_client: Client handle to the bus scaling interface
160 * @num_phys_encs: Actual number of physical encoders contained.
161 * @phys_encs: Container of physical encoders managed.
162 * @cur_master: Pointer to the current master in this mode. Optimization
163 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700164 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500165 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800166 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500167 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
168 * for partial update right-only cases, such as pingpong
169 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400170 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
171 * notification of the VBLANK
172 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400173 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
174 * all CTL paths
175 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700176 * @debugfs_root: Debug file system root file node
177 * @enc_lock: Lock around physical encoder create/destroy and
178 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400179 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
180 * busy processing current command.
181 * Bit0 = phys_encs[0] etc.
182 * @crtc_frame_event_cb: callback handler for frame event
183 * @crtc_frame_event_cb_data: callback handler private data
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400184 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700185 * @rsc_client: rsc client pointer
186 * @rsc_state_init: boolean to indicate rsc config init
187 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700188 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700189 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700190 * @idle_pc_supported: indicate if idle power collaps is supported
191 * @rc_lock: resource control mutex lock to protect
192 * virt encoder over various state changes
193 * @rc_state: resource controller state
194 * @delayed_off_work: delayed worker to schedule disabling of
195 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400196 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700197 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700198 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700199 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400200 * @cur_conn_roi: current connector roi
201 * @prv_conn_roi: previous connector roi to optimize if unchanged
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800202 * @crtc pointer to drm_crtc
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400203 */
204struct sde_encoder_virt {
205 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400206 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400207 uint32_t bus_scaling_client;
208
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400209 uint32_t display_num_of_h_tiles;
210
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400211 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400212 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
213 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700214 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800215 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400216
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500217 bool intfs_swapped;
218
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400219 void (*crtc_vblank_cb)(void *);
220 void *crtc_vblank_cb_data;
221
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700222 struct dentry *debugfs_root;
223 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400224 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
225 void (*crtc_frame_event_cb)(void *, u32 event);
226 void *crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400227
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400228 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800229
230 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700231 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800232 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700233 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700234 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700235
236 bool idle_pc_supported;
237 struct mutex rc_lock;
238 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400239 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400240 struct kthread_work vsync_event_work;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700241 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700242 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700243
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700244 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400245 struct sde_rect cur_conn_roi;
246 struct sde_rect prv_conn_roi;
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800247 struct drm_crtc *crtc;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400248};
249
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400250#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700251
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -0500252static void _sde_encoder_pm_qos_add_request(struct drm_encoder *drm_enc)
253{
254 struct msm_drm_private *priv;
255 struct sde_kms *sde_kms;
256 struct pm_qos_request *req;
257 u32 cpu_mask;
258 u32 cpu_dma_latency;
259 int cpu;
260
261 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
262 SDE_ERROR("drm device invalid\n");
263 return;
264 }
265
266 priv = drm_enc->dev->dev_private;
267 if (!priv->kms) {
268 SDE_ERROR("invalid kms\n");
269 return;
270 }
271
272 sde_kms = to_sde_kms(priv->kms);
273 if (!sde_kms || !sde_kms->catalog)
274 return;
275
276 cpu_mask = sde_kms->catalog->perf.cpu_mask;
277 cpu_dma_latency = sde_kms->catalog->perf.cpu_dma_latency;
278 if (!cpu_mask)
279 return;
280
281 req = &sde_kms->pm_qos_cpu_req;
282 req->type = PM_QOS_REQ_AFFINE_CORES;
283 cpumask_empty(&req->cpus_affine);
284 for_each_possible_cpu(cpu) {
285 if ((1 << cpu) & cpu_mask)
286 cpumask_set_cpu(cpu, &req->cpus_affine);
287 }
288 pm_qos_add_request(req, PM_QOS_CPU_DMA_LATENCY, cpu_dma_latency);
289
290 SDE_EVT32_VERBOSE(DRMID(drm_enc), cpu_mask, cpu_dma_latency);
291}
292
293static void _sde_encoder_pm_qos_remove_request(struct drm_encoder *drm_enc)
294{
295 struct msm_drm_private *priv;
296 struct sde_kms *sde_kms;
297
298 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
299 SDE_ERROR("drm device invalid\n");
300 return;
301 }
302
303 priv = drm_enc->dev->dev_private;
304 if (!priv->kms) {
305 SDE_ERROR("invalid kms\n");
306 return;
307 }
308
309 sde_kms = to_sde_kms(priv->kms);
310 if (!sde_kms || !sde_kms->catalog || !sde_kms->catalog->perf.cpu_mask)
311 return;
312
313 pm_qos_remove_request(&sde_kms->pm_qos_cpu_req);
314}
315
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700316static struct drm_connector_state *_sde_encoder_get_conn_state(
317 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800318{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700319 struct msm_drm_private *priv;
320 struct sde_kms *sde_kms;
321 struct list_head *connector_list;
322 struct drm_connector *conn_iter;
323
324 if (!drm_enc) {
325 SDE_ERROR("invalid argument\n");
326 return NULL;
327 }
328
329 priv = drm_enc->dev->dev_private;
330 sde_kms = to_sde_kms(priv->kms);
331 connector_list = &sde_kms->dev->mode_config.connector_list;
332
333 list_for_each_entry(conn_iter, connector_list, head)
334 if (conn_iter->encoder == drm_enc)
335 return conn_iter->state;
336
337 return NULL;
338}
339
340static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
341 struct msm_mode_info *mode_info)
342{
343 struct drm_connector_state *conn_state;
344
345 if (!drm_enc || !mode_info) {
346 SDE_ERROR("invalid arguments\n");
347 return -EINVAL;
348 }
349
350 conn_state = _sde_encoder_get_conn_state(drm_enc);
351 if (!conn_state) {
352 SDE_ERROR("invalid connector state for the encoder: %d\n",
353 drm_enc->base.id);
354 return -EINVAL;
355 }
356
357 return sde_connector_get_mode_info(conn_state, mode_info);
358}
359
360static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
361{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400362 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700363 struct msm_mode_info mode_info;
364 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400365
366 if (!drm_enc)
367 return false;
368
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700369 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
370 if (rc) {
371 SDE_ERROR("failed to get mode info, enc: %d\n",
372 drm_enc->base.id);
373 return false;
374 }
375
376 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800377
378 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
379}
380
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400381bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
382{
383 enum sde_rm_topology_name topology;
384 struct sde_encoder_virt *sde_enc;
385 struct drm_connector *drm_conn;
386
387 if (!drm_enc)
388 return false;
389
390 sde_enc = to_sde_encoder_virt(drm_enc);
391 if (!sde_enc->cur_master)
392 return false;
393
394 drm_conn = sde_enc->cur_master->connector;
395 if (!drm_conn)
396 return false;
397
398 topology = sde_connector_get_topology_name(drm_conn);
399 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
400 return true;
401
402 return false;
403}
404
Dhaval Patelf9245d62017-03-28 16:24:00 -0700405static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
406 bool enable)
407{
408 struct drm_encoder *drm_enc;
409 struct msm_drm_private *priv;
410 struct sde_kms *sde_kms;
411
412 if (!sde_enc) {
413 SDE_ERROR("invalid sde enc\n");
414 return -EINVAL;
415 }
416
417 drm_enc = &sde_enc->base;
418 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
419 SDE_ERROR("drm device invalid\n");
420 return -EINVAL;
421 }
422
423 priv = drm_enc->dev->dev_private;
424 if (!priv->kms) {
425 SDE_ERROR("invalid kms\n");
426 return -EINVAL;
427 }
428
429 sde_kms = to_sde_kms(priv->kms);
430
431 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
432 enable);
433}
434
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500435void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
436 enum sde_intr_idx intr_idx)
437{
438 SDE_EVT32(DRMID(phys_enc->parent),
439 phys_enc->intf_idx - INTF_0,
440 phys_enc->hw_pp->idx - PINGPONG_0,
441 intr_idx);
442 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
443
444 if (phys_enc->parent_ops.handle_frame_done)
445 phys_enc->parent_ops.handle_frame_done(
446 phys_enc->parent, phys_enc,
447 SDE_ENCODER_FRAME_EVENT_ERROR);
448}
449
450int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
451 enum sde_intr_idx intr_idx,
452 struct sde_encoder_wait_info *wait_info)
453{
454 struct sde_encoder_irq *irq;
455 u32 irq_status;
456 int ret;
457
458 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
459 SDE_ERROR("invalid params\n");
460 return -EINVAL;
461 }
462 irq = &phys_enc->irq[intr_idx];
463
464 /* note: do master / slave checking outside */
465
466 /* return EWOULDBLOCK since we know the wait isn't necessary */
467 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
468 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400469 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
470 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500471 return -EWOULDBLOCK;
472 }
473
474 if (irq->irq_idx < 0) {
475 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
476 irq->name, irq->hw_idx);
477 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
478 irq->irq_idx);
479 return 0;
480 }
481
482 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
483 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700484 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
485 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
486 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500487
488 ret = sde_encoder_helper_wait_event_timeout(
489 DRMID(phys_enc->parent),
490 irq->hw_idx,
491 wait_info);
492
493 if (ret <= 0) {
494 irq_status = sde_core_irq_read(phys_enc->sde_kms,
495 irq->irq_idx, true);
496 if (irq_status) {
497 unsigned long flags;
498
Dhaval Patela5f75952017-07-25 11:17:41 -0700499 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
500 irq->hw_idx, irq->irq_idx,
501 phys_enc->hw_pp->idx - PINGPONG_0,
502 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500503 SDE_DEBUG_PHYS(phys_enc,
504 "done but irq %d not triggered\n",
505 irq->irq_idx);
506 local_irq_save(flags);
507 irq->cb.func(phys_enc, irq->irq_idx);
508 local_irq_restore(flags);
509 ret = 0;
510 } else {
511 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700512 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
513 irq->hw_idx, irq->irq_idx,
514 phys_enc->hw_pp->idx - PINGPONG_0,
515 atomic_read(wait_info->atomic_cnt), irq_status,
516 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500517 }
518 } else {
519 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700520 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
521 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
522 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500523 }
524
Dhaval Patela5f75952017-07-25 11:17:41 -0700525 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
526 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
527 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500528
529 return ret;
530}
531
532int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
533 enum sde_intr_idx intr_idx)
534{
535 struct sde_encoder_irq *irq;
536 int ret = 0;
537
538 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
539 SDE_ERROR("invalid params\n");
540 return -EINVAL;
541 }
542 irq = &phys_enc->irq[intr_idx];
543
544 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530545 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500546 "skipping already registered irq %s type %d\n",
547 irq->name, irq->intr_type);
548 return 0;
549 }
550
551 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
552 irq->intr_type, irq->hw_idx);
553 if (irq->irq_idx < 0) {
554 SDE_ERROR_PHYS(phys_enc,
555 "failed to lookup IRQ index for %s type:%d\n",
556 irq->name, irq->intr_type);
557 return -EINVAL;
558 }
559
560 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
561 &irq->cb);
562 if (ret) {
563 SDE_ERROR_PHYS(phys_enc,
564 "failed to register IRQ callback for %s\n",
565 irq->name);
566 irq->irq_idx = -EINVAL;
567 return ret;
568 }
569
570 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
571 if (ret) {
572 SDE_ERROR_PHYS(phys_enc,
573 "enable IRQ for intr:%s failed, irq_idx %d\n",
574 irq->name, irq->irq_idx);
575
576 sde_core_irq_unregister_callback(phys_enc->sde_kms,
577 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400578
579 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
580 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500581 irq->irq_idx = -EINVAL;
582 return ret;
583 }
584
585 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
586 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
587 irq->name, irq->irq_idx);
588
589 return ret;
590}
591
592int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
593 enum sde_intr_idx intr_idx)
594{
595 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400596 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500597
598 if (!phys_enc) {
599 SDE_ERROR("invalid encoder\n");
600 return -EINVAL;
601 }
602 irq = &phys_enc->irq[intr_idx];
603
604 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400605 if (irq->irq_idx < 0) {
606 SDE_ERROR(
607 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
608 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
609 irq->irq_idx);
610 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
611 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500612 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400613 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500614
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400615 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
616 if (ret)
617 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
618 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
619
620 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500621 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400622 if (ret)
623 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
624 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500625
626 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
627 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
628
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400629 irq->irq_idx = -EINVAL;
630
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500631 return 0;
632}
633
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400634void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400635 struct sde_encoder_hw_resources *hw_res,
636 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400637{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400638 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700639 struct msm_mode_info mode_info;
640 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400641
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400642 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400643 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
644 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400645 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400646 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400647
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400648 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400649 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400650
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400651 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400652 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400653 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
654
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400655 for (i = 0; i < sde_enc->num_phys_encs; i++) {
656 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
657
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400658 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400659 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400660 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700661
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700662 /**
663 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
664 * called from atomic_check phase. Use the below API to get mode
665 * information of the temporary conn_state passed.
666 */
667 rc = sde_connector_get_mode_info(conn_state, &mode_info);
668 if (rc) {
669 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
670 return;
671 }
672
673 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700674 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400675}
676
Clarence Ip3649f8b2016-10-31 09:59:44 -0400677void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400678{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400679 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400680 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400681
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400682 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400683 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400684 return;
685 }
686
687 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400688 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400689
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700690 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800691 sde_rsc_client_destroy(sde_enc->rsc_client);
692
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700693 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400694 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
695
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400696 if (phys && phys->ops.destroy) {
697 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400698 --sde_enc->num_phys_encs;
699 sde_enc->phys_encs[i] = NULL;
700 }
701 }
702
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700703 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400704 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400705 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700706 sde_enc->num_phys_encs = 0;
707 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400708
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400709 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700710 mutex_destroy(&sde_enc->enc_lock);
711
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400712 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700713}
714
Clarence Ip8e69ad02016-12-09 09:43:57 -0500715void sde_encoder_helper_split_config(
716 struct sde_encoder_phys *phys_enc,
717 enum sde_intf interface)
718{
719 struct sde_encoder_virt *sde_enc;
720 struct split_pipe_cfg cfg = { 0 };
721 struct sde_hw_mdp *hw_mdptop;
722 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700723 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500724
725 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
726 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
727 return;
728 }
729
730 sde_enc = to_sde_encoder_virt(phys_enc->parent);
731 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700732 disp_info = &sde_enc->disp_info;
733
734 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
735 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500736
737 /**
738 * disable split modes since encoder will be operating in as the only
739 * encoder, either for the entire use case in the case of, for example,
740 * single DSI, or for this frame in the case of left/right only partial
741 * update.
742 */
743 if (phys_enc->split_role == ENC_ROLE_SOLO) {
744 if (hw_mdptop->ops.setup_split_pipe)
745 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
746 if (hw_mdptop->ops.setup_pp_split)
747 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
748 return;
749 }
750
751 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500752 cfg.mode = phys_enc->intf_mode;
753 cfg.intf = interface;
754
755 if (cfg.en && phys_enc->ops.needs_single_flush &&
756 phys_enc->ops.needs_single_flush(phys_enc))
757 cfg.split_flush_en = true;
758
759 topology = sde_connector_get_topology_name(phys_enc->connector);
760 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
761 cfg.pp_split_slave = cfg.intf;
762 else
763 cfg.pp_split_slave = INTF_MAX;
764
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500765 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500766 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
767
768 if (hw_mdptop->ops.setup_split_pipe)
769 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400770 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500771 /*
772 * slave encoder
773 * - determine split index from master index,
774 * assume master is first pp
775 */
776 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
777 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
778 cfg.pp_split_index);
779
780 if (hw_mdptop->ops.setup_pp_split)
781 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
782 }
783}
784
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400785static int sde_encoder_virt_atomic_check(
786 struct drm_encoder *drm_enc,
787 struct drm_crtc_state *crtc_state,
788 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400789{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400790 struct sde_encoder_virt *sde_enc;
791 struct msm_drm_private *priv;
792 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400793 const struct drm_display_mode *mode;
794 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700795 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700796 struct sde_connector_state *sde_conn_state = NULL;
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400797 struct sde_crtc_state *sde_crtc_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400798 int i = 0;
799 int ret = 0;
800
Alan Kwongbb27c092016-07-20 16:41:25 -0400801 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400802 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
803 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400804 return -EINVAL;
805 }
806
807 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400808 SDE_DEBUG_ENC(sde_enc, "\n");
809
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400810 priv = drm_enc->dev->dev_private;
811 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400812 mode = &crtc_state->mode;
813 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700814 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700815 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400816 sde_crtc_state = to_sde_crtc_state(crtc_state);
817
818 SDE_EVT32(DRMID(drm_enc), drm_atomic_crtc_needs_modeset(crtc_state));
Alan Kwongbb27c092016-07-20 16:41:25 -0400819
820 /* perform atomic check on the first physical encoder (master) */
821 for (i = 0; i < sde_enc->num_phys_encs; i++) {
822 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
823
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400824 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400825 ret = phys->ops.atomic_check(phys, crtc_state,
826 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400827 else if (phys && phys->ops.mode_fixup)
828 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400829 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400830
831 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400832 SDE_ERROR_ENC(sde_enc,
833 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400834 break;
835 }
836 }
837
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400838 if (!ret && drm_atomic_crtc_needs_modeset(crtc_state)) {
839 struct sde_rect mode_roi, roi;
840
841 mode_roi.x = 0;
842 mode_roi.y = 0;
843 mode_roi.w = crtc_state->adjusted_mode.hdisplay;
844 mode_roi.h = crtc_state->adjusted_mode.vdisplay;
845
846 if (sde_conn_state->rois.num_rects) {
847 sde_kms_rect_merge_rectangles(
848 &sde_conn_state->rois, &roi);
849 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
850 SDE_ERROR_ENC(sde_enc,
851 "roi (%d,%d,%d,%d) on connector invalid during modeset\n",
852 roi.x, roi.y, roi.w, roi.h);
853 ret = -EINVAL;
854 }
855 }
856
857 if (sde_crtc_state->user_roi_list.num_rects) {
858 sde_kms_rect_merge_rectangles(
859 &sde_crtc_state->user_roi_list, &roi);
860 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
861 SDE_ERROR_ENC(sde_enc,
862 "roi (%d,%d,%d,%d) on crtc invalid during modeset\n",
863 roi.x, roi.y, roi.w, roi.h);
864 ret = -EINVAL;
865 }
866 }
867
868 if (ret)
869 return ret;
870 }
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700871
Lloyd Atkinson4ced69e2017-11-03 12:16:09 -0400872 if (!ret) {
873 /**
874 * record topology in previous atomic state to be able to handle
875 * topology transitions correctly.
876 */
877 enum sde_rm_topology_name old_top;
878
879 old_top = sde_connector_get_property(conn_state,
880 CONNECTOR_PROP_TOPOLOGY_NAME);
881 ret = sde_connector_set_old_topology_name(conn_state, old_top);
882 if (ret)
883 return ret;
884 }
885
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700886 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
887 struct msm_display_topology *topology = NULL;
888
889 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700890 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400891 sde_kms->catalog->max_mixer_width,
892 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700893 if (ret) {
894 SDE_ERROR_ENC(sde_enc,
895 "failed to get mode info, rc = %d\n", ret);
896 return ret;
897 }
898
899 /* Reserve dynamic resources, indicating atomic_check phase */
900 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
901 conn_state, true);
902 if (ret) {
903 SDE_ERROR_ENC(sde_enc,
904 "RM failed to reserve resources, rc = %d\n",
905 ret);
906 return ret;
907 }
908
909 /**
910 * Update connector state with the topology selected for the
911 * resource set validated. Reset the topology if we are
912 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700913 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700914 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700915 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700916
917 ret = sde_rm_update_topology(conn_state, topology);
918 if (ret) {
919 SDE_ERROR_ENC(sde_enc,
920 "RM failed to update topology, rc: %d\n", ret);
921 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700922 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700923
Jeykumar Sankaran83ddcb02017-10-27 11:34:50 -0700924 ret = sde_connector_set_blob_data(conn_state->connector,
925 conn_state,
926 CONNECTOR_PROP_SDE_INFO);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700927 if (ret) {
928 SDE_ERROR_ENC(sde_enc,
929 "connector failed to update info, rc: %d\n",
930 ret);
931 return ret;
932 }
933
934 }
935
936 ret = sde_connector_roi_v1_check_roi(conn_state);
937 if (ret) {
938 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
939 ret);
940 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700941 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400942
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700943 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700944 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400945
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400946 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400947
948 return ret;
949}
950
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800951static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
952 int pic_width, int pic_height)
953{
954 if (!dsc || !pic_width || !pic_height) {
955 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
956 pic_width, pic_height);
957 return -EINVAL;
958 }
959
960 if ((pic_width % dsc->slice_width) ||
961 (pic_height % dsc->slice_height)) {
962 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
963 pic_width, pic_height,
964 dsc->slice_width, dsc->slice_height);
965 return -EINVAL;
966 }
967
968 dsc->pic_width = pic_width;
969 dsc->pic_height = pic_height;
970
971 return 0;
972}
973
974static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
975 int intf_width)
976{
977 int slice_per_pkt, slice_per_intf;
978 int bytes_in_slice, total_bytes_per_intf;
979
980 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
981 (intf_width < dsc->slice_width)) {
982 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
983 intf_width, dsc ? dsc->slice_width : -1);
984 return;
985 }
986
987 slice_per_pkt = dsc->slice_per_pkt;
988 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
989
990 /*
991 * If slice_per_pkt is greater than slice_per_intf then default to 1.
992 * This can happen during partial update.
993 */
994 if (slice_per_pkt > slice_per_intf)
995 slice_per_pkt = 1;
996
997 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
998 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
999
1000 dsc->eol_byte_num = total_bytes_per_intf % 3;
1001 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
1002 dsc->bytes_in_slice = bytes_in_slice;
1003 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
1004 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
1005}
1006
1007static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
1008 int enc_ip_width)
1009{
1010 int ssm_delay, total_pixels, soft_slice_per_enc;
1011
1012 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
1013
1014 /*
1015 * minimum number of initial line pixels is a sum of:
1016 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
1017 * 91 for 10 bpc) * 3
1018 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
1019 * 3. the initial xmit delay
1020 * 4. total pipeline delay through the "lock step" of encoder (47)
1021 * 5. 6 additional pixels as the output of the rate buffer is
1022 * 48 bits wide
1023 */
1024 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
1025 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
1026 if (soft_slice_per_enc > 1)
1027 total_pixels += (ssm_delay * 3);
1028 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
1029 return 0;
1030}
1031
1032static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
1033 struct msm_display_dsc_info *dsc)
1034{
1035 /*
1036 * As per the DSC spec, ICH_RESET can be either end of the slice line
1037 * or at the end of the slice. HW internally generates ich_reset at
1038 * end of the slice line if DSC_MERGE is used or encoder has two
1039 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
1040 * is not used then it will generate ich_reset at the end of slice.
1041 *
1042 * Now as per the spec, during one PPS session, position where
1043 * ich_reset is generated should not change. Now if full-screen frame
1044 * has more than 1 soft slice then HW will automatically generate
1045 * ich_reset at the end of slice_line. But for the same panel, if
1046 * partial frame is enabled and only 1 encoder is used with 1 slice,
1047 * then HW will generate ich_reset at end of the slice. This is a
1048 * mismatch. Prevent this by overriding HW's decision.
1049 */
1050 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
1051 (dsc->slice_width == dsc->pic_width);
1052}
1053
1054static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
1055 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001056 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001057{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001058 if (!enable) {
1059 if (hw_pp->ops.disable_dsc)
1060 hw_pp->ops.disable_dsc(hw_pp);
1061 return;
1062 }
1063
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001064 if (hw_dsc->ops.dsc_config)
1065 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
1066
1067 if (hw_dsc->ops.dsc_config_thresh)
1068 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
1069
1070 if (hw_pp->ops.setup_dsc)
1071 hw_pp->ops.setup_dsc(hw_pp);
1072
1073 if (hw_pp->ops.enable_dsc)
1074 hw_pp->ops.enable_dsc(hw_pp);
1075}
1076
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001077static void _sde_encoder_get_connector_roi(
1078 struct sde_encoder_virt *sde_enc,
1079 struct sde_rect *merged_conn_roi)
1080{
1081 struct drm_connector *drm_conn;
1082 struct sde_connector_state *c_state;
1083
1084 if (!sde_enc || !merged_conn_roi)
1085 return;
1086
1087 drm_conn = sde_enc->phys_encs[0]->connector;
1088
1089 if (!drm_conn || !drm_conn->state)
1090 return;
1091
1092 c_state = to_sde_connector_state(drm_conn->state);
1093 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1094}
1095
Ingrid Gallardo83532222017-06-02 16:48:51 -07001096static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001097{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001098 int this_frame_slices;
1099 int intf_ip_w, enc_ip_w;
1100 int ich_res, dsc_common_mode = 0;
1101
1102 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1103 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1104 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001105 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001106 struct msm_mode_info mode_info;
1107 struct msm_display_dsc_info *dsc = NULL;
1108 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001109
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001110 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001111 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1112 return -EINVAL;
1113 }
1114
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001115 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1116 if (rc) {
1117 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1118 return -EINVAL;
1119 }
1120
1121 dsc = &mode_info.comp_info.dsc_info;
1122
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001123 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001124
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001125 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001126 intf_ip_w = this_frame_slices * dsc->slice_width;
1127 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1128
1129 enc_ip_w = intf_ip_w;
1130 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1131
1132 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1133
1134 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1135 dsc_common_mode = DSC_MODE_VIDEO;
1136
1137 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001138 roi->w, roi->h, dsc_common_mode);
1139 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001140
1141 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001142 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001143
1144 return 0;
1145}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001146
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001147static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1148 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001149{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001150 int this_frame_slices;
1151 int intf_ip_w, enc_ip_w;
1152 int ich_res, dsc_common_mode;
1153
1154 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001155 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1156 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1157 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1158 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001159 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001160 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001161 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001162
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001163 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1164 hw_pp[i] = sde_enc->hw_pp[i];
1165 hw_dsc[i] = sde_enc->hw_dsc[i];
1166
1167 if (!hw_pp[i] || !hw_dsc[i]) {
1168 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1169 return -EINVAL;
1170 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001171 }
1172
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001173 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1174 if (rc) {
1175 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1176 return -EINVAL;
1177 }
1178
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001179 half_panel_partial_update =
1180 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001181
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001182 dsc_common_mode = 0;
1183 if (!half_panel_partial_update)
1184 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001185 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1186 dsc_common_mode |= DSC_MODE_VIDEO;
1187
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001188 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1189 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001190
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001191 /*
1192 * Since both DSC use same pic dimension, set same pic dimension
1193 * to both DSC structures.
1194 */
1195 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1196 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1197
1198 this_frame_slices = roi->w / dsc[0].slice_width;
1199 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1200
1201 if (!half_panel_partial_update)
1202 intf_ip_w /= 2;
1203
1204 /*
1205 * In this topology when both interfaces are active, they have same
1206 * load so intf_ip_w will be same.
1207 */
1208 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1209 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1210
1211 /*
1212 * In this topology, since there is no dsc_merge, uncompressed input
1213 * to encoder and interface is same.
1214 */
1215 enc_ip_w = intf_ip_w;
1216 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1217 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1218
1219 /*
1220 * __is_ich_reset_override_needed should be called only after
1221 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1222 */
1223 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1224 half_panel_partial_update, &dsc[0]);
1225
1226 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1227 roi->w, roi->h, dsc_common_mode);
1228
1229 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1230 bool active = !!((1 << i) & params->affected_displays);
1231
1232 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1233 dsc_common_mode, i, active);
1234 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1235 dsc_common_mode, ich_res, active);
1236 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001237
1238 return 0;
1239}
1240
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001241static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1242 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001243{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001244 int this_frame_slices;
1245 int intf_ip_w, enc_ip_w;
1246 int ich_res, dsc_common_mode;
1247
1248 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001249 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001250 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1251 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001252 struct msm_display_dsc_info *dsc = NULL;
1253 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001254 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001255 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001256
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001257 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1258 hw_pp[i] = sde_enc->hw_pp[i];
1259 hw_dsc[i] = sde_enc->hw_dsc[i];
1260
1261 if (!hw_pp[i] || !hw_dsc[i]) {
1262 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1263 return -EINVAL;
1264 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001265 }
1266
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001267 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1268 if (rc) {
1269 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1270 return -EINVAL;
1271 }
1272
1273 dsc = &mode_info.comp_info.dsc_info;
1274
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001275 half_panel_partial_update =
1276 hweight_long(params->affected_displays) == 1;
1277
1278 dsc_common_mode = 0;
1279 if (!half_panel_partial_update)
1280 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1281 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1282 dsc_common_mode |= DSC_MODE_VIDEO;
1283
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001284 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001285
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001286 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001287 intf_ip_w = this_frame_slices * dsc->slice_width;
1288 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1289
1290 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001291 * dsc merge case: when using 2 encoders for the same stream,
1292 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001293 */
1294 enc_ip_w = intf_ip_w / 2;
1295 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1296
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001297 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1298 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001299
1300 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001301 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001302 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1303 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001304
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001305 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001306 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001307 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1308 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001309
1310 return 0;
1311}
1312
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001313static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1314{
1315 struct sde_encoder_virt *sde_enc;
1316 struct drm_connector *drm_conn;
1317 struct drm_display_mode *adj_mode;
1318 struct sde_rect roi;
1319
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001320 if (!drm_enc) {
1321 SDE_ERROR("invalid encoder parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001322 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001323 }
1324
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001325 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001326 if (!sde_enc->crtc || !sde_enc->crtc->state) {
1327 SDE_ERROR("invalid crtc parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001328 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001329 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001330
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001331 if (!sde_enc->cur_master) {
1332 SDE_ERROR("invalid cur_master parameter\n");
1333 return -EINVAL;
1334 }
1335
1336 adj_mode = &sde_enc->cur_master->cached_mode;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001337 drm_conn = sde_enc->cur_master->connector;
1338
1339 _sde_encoder_get_connector_roi(sde_enc, &roi);
1340 if (sde_kms_rect_is_null(&roi)) {
1341 roi.w = adj_mode->hdisplay;
1342 roi.h = adj_mode->vdisplay;
1343 }
1344
1345 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1346 sizeof(sde_enc->prv_conn_roi));
1347 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1348
1349 return 0;
1350}
1351
1352static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1353 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001354{
1355 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001356 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001357 int ret = 0;
1358
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001359 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1360 !sde_enc->phys_encs[0]->connector)
1361 return -EINVAL;
1362
1363 drm_conn = sde_enc->phys_encs[0]->connector;
1364
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001365 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001366 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001367 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1368 return -EINVAL;
1369 }
1370
Ingrid Gallardo83532222017-06-02 16:48:51 -07001371 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04001372 SDE_EVT32(DRMID(&sde_enc->base), topology,
1373 sde_enc->cur_conn_roi.x,
1374 sde_enc->cur_conn_roi.y,
1375 sde_enc->cur_conn_roi.w,
1376 sde_enc->cur_conn_roi.h,
1377 sde_enc->prv_conn_roi.x,
1378 sde_enc->prv_conn_roi.y,
1379 sde_enc->prv_conn_roi.w,
1380 sde_enc->prv_conn_roi.h,
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001381 sde_enc->cur_master->cached_mode.hdisplay,
1382 sde_enc->cur_master->cached_mode.vdisplay);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001383
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001384 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1385 &sde_enc->prv_conn_roi))
1386 return ret;
1387
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001388 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001389 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001390 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1391 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001392 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001393 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001394 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001395 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001396 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001397 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001398 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001399 default:
1400 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1401 topology);
1402 return -EINVAL;
1403 };
1404
1405 return ret;
1406}
1407
Dhaval Patelaab9b522017-07-20 12:38:46 -07001408static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1409 struct msm_display_info *disp_info, bool is_dummy)
1410{
1411 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1412 struct msm_drm_private *priv;
1413 struct sde_kms *sde_kms;
1414 struct sde_hw_mdp *hw_mdptop;
1415 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001416 struct msm_mode_info mode_info;
1417 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001418
1419 if (!sde_enc || !disp_info) {
1420 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1421 sde_enc != NULL, disp_info != NULL);
1422 return;
1423 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1424 SDE_ERROR("invalid num phys enc %d/%d\n",
1425 sde_enc->num_phys_encs,
1426 (int) ARRAY_SIZE(sde_enc->hw_pp));
1427 return;
1428 }
1429
1430 drm_enc = &sde_enc->base;
1431 /* this pointers are checked in virt_enable_helper */
1432 priv = drm_enc->dev->dev_private;
1433
1434 sde_kms = to_sde_kms(priv->kms);
1435 if (!sde_kms) {
1436 SDE_ERROR("invalid sde_kms\n");
1437 return;
1438 }
1439
1440 hw_mdptop = sde_kms->hw_mdp;
1441 if (!hw_mdptop) {
1442 SDE_ERROR("invalid mdptop\n");
1443 return;
1444 }
1445
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001446 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1447 if (rc) {
1448 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001449 return;
1450 }
1451
Dhaval Patelaab9b522017-07-20 12:38:46 -07001452 if (hw_mdptop->ops.setup_vsync_source &&
1453 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1454 for (i = 0; i < sde_enc->num_phys_encs; i++)
1455 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1456
1457 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001458 vsync_cfg.frame_rate = mode_info.frame_rate;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001459 if (is_dummy)
1460 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1461 else if (disp_info->is_te_using_watchdog_timer)
1462 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
1463 else
1464 vsync_cfg.vsync_source = SDE_VSYNC0_SOURCE_GPIO;
1465 vsync_cfg.is_dummy = is_dummy;
1466
1467 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1468 }
1469}
1470
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001471static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1472{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001473 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001474 struct sde_hw_pingpong *hw_pp = NULL;
1475 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001476
1477 if (!sde_enc || !sde_enc->phys_encs[0] ||
1478 !sde_enc->phys_encs[0]->connector) {
1479 SDE_ERROR("invalid params %d %d\n",
1480 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1481 return -EINVAL;
1482 }
1483
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001484 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001485 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1486 hw_pp = sde_enc->hw_pp[i];
1487 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001488
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001489 if (hw_pp && hw_pp->ops.disable_dsc)
1490 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001491
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001492 if (hw_dsc && hw_dsc->ops.dsc_disable)
1493 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001494 }
1495
1496 return ret;
1497}
1498
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001499static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001500 struct drm_encoder *drm_enc,
1501 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001502{
1503 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001504 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001505 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001506 struct sde_rsc_cmd_config *rsc_config;
1507 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001508 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001509 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001510 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1511 int wait_count = 0;
1512 struct drm_crtc *primary_crtc;
1513 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001514 int rc = 0;
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001515 int wait_refcount;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001516
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001517 if (!drm_enc || !drm_enc->dev) {
1518 SDE_ERROR("invalid encoder arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001519 return -EINVAL;
1520 }
1521
1522 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001523 crtc = sde_enc->crtc;
1524
1525 if (!sde_enc->crtc) {
1526 SDE_ERROR("invalid crtc parameter\n");
1527 return -EINVAL;
1528 }
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001529 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001530 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001531
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001532 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001533 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001534 return 0;
1535 }
1536
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001537 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1538 if (rc) {
1539 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1540 return 0;
1541 }
1542
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001543 /**
1544 * only primary command mode panel can request CMD state.
1545 * all other panels/displays can request for VID state including
1546 * secondary command mode panel.
1547 */
1548 rsc_state = enable ?
1549 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) &&
1550 disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1551 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001552 prefill_lines = config ? mode_info.prefill_lines +
1553 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001554
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001555 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001556 if ((rsc_config->fps != mode_info.frame_rate) ||
1557 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001558 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001559 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1560 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1561 rsc_config->fps = mode_info.frame_rate;
1562 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001563 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001564 rsc_config->jitter_numer = mode_info.jitter_numer;
1565 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001566 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001567 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001568
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001569 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001570 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001571 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001572 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001573
1574 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001575 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001576 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001577 } else {
1578 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001579 rsc_state, NULL, crtc->base.id,
1580 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001581 }
1582
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001583 /**
1584 * if RSC performed a state change that requires a VBLANK wait, it will
1585 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1586 *
1587 * if we are the primary display, we will need to enable and wait
1588 * locally since we hold the commit thread
1589 *
1590 * if we are an external display, we must send a signal to the primary
1591 * to enable its VBLANK and wait one, since the RSC hardware is driven
1592 * by the primary panel's VBLANK signals
1593 */
1594 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1595 if (ret) {
1596 SDE_ERROR_ENC(sde_enc,
1597 "sde rsc client update failed ret:%d\n", ret);
1598 return ret;
1599 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1600 return ret;
1601 }
1602
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001603 if (wait_vblank_crtc_id)
1604 wait_refcount =
1605 sde_rsc_client_get_vsync_refcount(sde_enc->rsc_client);
1606 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1607 SDE_EVTLOG_FUNC_ENTRY);
1608
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001609 if (crtc->base.id != wait_vblank_crtc_id) {
1610 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1611 if (!primary_crtc) {
1612 SDE_ERROR_ENC(sde_enc,
1613 "failed to find primary crtc id %d\n",
1614 wait_vblank_crtc_id);
1615 return -EINVAL;
1616 }
1617 pipe = drm_crtc_index(primary_crtc);
1618 }
1619
1620 /**
1621 * note: VBLANK is expected to be enabled at this point in
1622 * resource control state machine if on primary CRTC
1623 */
1624 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1625 if (sde_rsc_client_is_state_update_complete(
1626 sde_enc->rsc_client))
1627 break;
1628
1629 if (crtc->base.id == wait_vblank_crtc_id)
1630 ret = sde_encoder_wait_for_event(drm_enc,
1631 MSM_ENC_VBLANK);
1632 else
1633 drm_wait_one_vblank(drm_enc->dev, pipe);
1634
1635 if (ret) {
1636 SDE_ERROR_ENC(sde_enc,
1637 "wait for vblank failed ret:%d\n", ret);
1638 break;
1639 }
1640 }
1641
1642 if (wait_count >= MAX_RSC_WAIT)
1643 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1644 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001645
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001646 if (wait_refcount)
1647 sde_rsc_client_reset_vsync_refcount(sde_enc->rsc_client);
1648 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1649 SDE_EVTLOG_FUNC_EXIT);
1650
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001651 return ret;
1652}
1653
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001654static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1655{
1656 struct sde_encoder_virt *sde_enc;
1657 int i;
1658
1659 if (!drm_enc) {
1660 SDE_ERROR("invalid encoder\n");
1661 return;
1662 }
1663
1664 sde_enc = to_sde_encoder_virt(drm_enc);
1665
1666 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1667 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1668 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1669
1670 if (phys && phys->ops.irq_control)
1671 phys->ops.irq_control(phys, enable);
1672 }
1673
1674}
1675
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001676/* keep track of the userspace vblank during modeset */
1677static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1678 u32 sw_event)
1679{
1680 struct sde_encoder_virt *sde_enc;
1681 bool enable;
1682 int i;
1683
1684 if (!drm_enc) {
1685 SDE_ERROR("invalid encoder\n");
1686 return;
1687 }
1688
1689 sde_enc = to_sde_encoder_virt(drm_enc);
1690 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1691 sw_event, sde_enc->vblank_enabled);
1692
1693 /* nothing to do if vblank not enabled by userspace */
1694 if (!sde_enc->vblank_enabled)
1695 return;
1696
1697 /* disable vblank on pre_modeset */
1698 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1699 enable = false;
1700 /* enable vblank on post_modeset */
1701 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1702 enable = true;
1703 else
1704 return;
1705
1706 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1707 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1708
1709 if (phys && phys->ops.control_vblank_irq)
1710 phys->ops.control_vblank_irq(phys, enable);
1711 }
1712}
1713
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001714struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1715{
1716 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001717
1718 if (!drm_enc)
1719 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001720 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001721 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001722}
1723
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001724static void _sde_encoder_resource_control_rsc_update(
1725 struct drm_encoder *drm_enc, bool enable)
1726{
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001727 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001728 struct sde_encoder_virt *sde_enc;
1729
1730 if (!drm_enc) {
1731 SDE_ERROR("invalid encoder argument\n");
1732 return;
1733 }
1734 sde_enc = to_sde_encoder_virt(drm_enc);
1735 if (!sde_enc->crtc) {
1736 SDE_ERROR("invalid crtc\n");
1737 return;
1738 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001739
1740 if (enable) {
1741 rsc_cfg.inline_rotate_prefill =
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001742 sde_crtc_get_inline_prefill(sde_enc->crtc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001743
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001744 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1745 } else {
1746 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001747 }
1748}
1749
Alan Kwong1124f1f2017-11-10 18:14:39 -05001750static int _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001751 bool enable)
1752{
1753 struct msm_drm_private *priv;
1754 struct sde_kms *sde_kms;
1755 struct sde_encoder_virt *sde_enc;
Alan Kwong1124f1f2017-11-10 18:14:39 -05001756 int rc;
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001757 bool is_cmd_mode, is_primary;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001758
1759 sde_enc = to_sde_encoder_virt(drm_enc);
1760 priv = drm_enc->dev->dev_private;
1761 sde_kms = to_sde_kms(priv->kms);
1762
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001763 is_cmd_mode = sde_enc->disp_info.capabilities &
1764 MSM_DISPLAY_CAP_CMD_MODE;
1765 is_primary = sde_enc->disp_info.is_primary;
1766
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001767 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1768 SDE_EVT32(DRMID(drm_enc), enable);
1769
1770 if (!sde_enc->cur_master) {
1771 SDE_ERROR("encoder master not set\n");
Alan Kwong1124f1f2017-11-10 18:14:39 -05001772 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001773 }
1774
1775 if (enable) {
1776 /* enable SDE core clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001777 rc = sde_power_resource_enable(&priv->phandle,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001778 sde_kms->core_client, true);
Alan Kwong1124f1f2017-11-10 18:14:39 -05001779 if (rc) {
1780 SDE_ERROR("failed to enable power resource %d\n", rc);
1781 SDE_EVT32(rc, SDE_EVTLOG_ERROR);
1782 return rc;
1783 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001784
1785 /* enable DSI clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001786 rc = sde_connector_clk_ctrl(sde_enc->cur_master->connector,
1787 true);
1788 if (rc) {
1789 SDE_ERROR("failed to enable clk control %d\n", rc);
1790 sde_power_resource_enable(&priv->phandle,
1791 sde_kms->core_client, false);
1792 return rc;
1793 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001794
1795 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001796 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001797
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001798 if (is_cmd_mode && is_primary)
1799 _sde_encoder_pm_qos_add_request(drm_enc);
1800
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001801 } else {
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001802 if (is_cmd_mode && is_primary)
1803 _sde_encoder_pm_qos_remove_request(drm_enc);
1804
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001805 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001806 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001807
1808 /* disable DSI clks */
1809 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1810
1811 /* disable SDE core clks */
1812 sde_power_resource_enable(&priv->phandle,
1813 sde_kms->core_client, false);
1814 }
1815
Alan Kwong1124f1f2017-11-10 18:14:39 -05001816 return 0;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001817}
1818
1819static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1820 u32 sw_event)
1821{
Dhaval Patel99412a52017-07-24 19:16:45 -07001822 bool autorefresh_enabled = false;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001823 unsigned int lp, idle_pc_duration;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001824 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001825 struct msm_drm_private *priv;
1826 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001827 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001828 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001829
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001830 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1831 SDE_ERROR("invalid encoder parameters, sw_event:%u\n",
1832 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001833 return -EINVAL;
1834 }
1835 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001836 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001837 is_vid_mode = sde_enc->disp_info.capabilities &
1838 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001839
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001840 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001841 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001842 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001843 */
1844 if (!sde_enc->idle_pc_supported &&
1845 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001846 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1847 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1848 sw_event != SDE_ENC_RC_EVENT_STOP &&
1849 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001850 return 0;
1851
1852 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc_supported:%d\n", sw_event,
1853 sde_enc->idle_pc_supported);
Dhaval Patela5f75952017-07-25 11:17:41 -07001854 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001855 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1856
1857 switch (sw_event) {
1858 case SDE_ENC_RC_EVENT_KICKOFF:
1859 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001860 if (kthread_cancel_delayed_work_sync(
1861 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001862 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1863 sw_event);
1864
1865 mutex_lock(&sde_enc->rc_lock);
1866
1867 /* return if the resource control is already in ON state */
1868 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
1869 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
1870 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001871 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1872 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001873 mutex_unlock(&sde_enc->rc_lock);
1874 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001875 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
1876 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
1877 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1878 sw_event, sde_enc->rc_state);
1879 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1880 SDE_EVTLOG_ERROR);
1881 mutex_unlock(&sde_enc->rc_lock);
1882 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001883 }
1884
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001885 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1886 _sde_encoder_irq_control(drm_enc, true);
1887 } else {
1888 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001889 ret = _sde_encoder_resource_control_helper(drm_enc,
1890 true);
1891 if (ret) {
1892 SDE_ERROR_ENC(sde_enc,
1893 "sw_event:%d, rc in state %d\n",
1894 sw_event, sde_enc->rc_state);
1895 SDE_EVT32(DRMID(drm_enc), sw_event,
1896 sde_enc->rc_state,
1897 SDE_EVTLOG_ERROR);
1898 mutex_unlock(&sde_enc->rc_lock);
1899 return ret;
1900 }
1901
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001902 _sde_encoder_resource_control_rsc_update(drm_enc, true);
1903 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001904
1905 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1906 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
1907 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1908
1909 mutex_unlock(&sde_enc->rc_lock);
1910 break;
1911
1912 case SDE_ENC_RC_EVENT_FRAME_DONE:
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001913 if (!sde_enc->crtc) {
1914 SDE_ERROR("invalid crtc, sw_event:%u\n", sw_event);
1915 return -EINVAL;
1916 }
1917
1918 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
1919 SDE_ERROR("invalid crtc index :%u\n",
1920 sde_enc->crtc->index);
1921 return -EINVAL;
1922 }
1923 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
1924
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001925 /*
1926 * mutex lock is not used as this event happens at interrupt
1927 * context. And locking is not required as, the other events
1928 * like KICKOFF and STOP does a wait-for-idle before executing
1929 * the resource_control
1930 */
1931 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1932 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
1933 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001934 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1935 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001936 return -EINVAL;
1937 }
1938
1939 /*
1940 * schedule off work item only when there are no
1941 * frames pending
1942 */
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001943 if (sde_crtc_frame_pending(sde_enc->crtc) > 1) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001944 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001945 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1946 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001947 return 0;
1948 }
1949
Dhaval Patel99412a52017-07-24 19:16:45 -07001950 /* schedule delayed off work if autorefresh is disabled */
1951 if (sde_enc->cur_master &&
1952 sde_enc->cur_master->ops.is_autorefresh_enabled)
1953 autorefresh_enabled =
1954 sde_enc->cur_master->ops.is_autorefresh_enabled(
1955 sde_enc->cur_master);
1956
Clarence Ip89628132017-07-27 13:33:51 -04001957 /* set idle timeout based on master connector's lp value */
1958 if (sde_enc->cur_master)
1959 lp = sde_connector_get_lp(
1960 sde_enc->cur_master->connector);
1961 else
1962 lp = SDE_MODE_DPMS_ON;
1963
1964 if (lp == SDE_MODE_DPMS_LP2)
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001965 idle_pc_duration = IDLE_SHORT_TIMEOUT;
Clarence Ip89628132017-07-27 13:33:51 -04001966 else
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001967 idle_pc_duration = IDLE_POWERCOLLAPSE_DURATION;
Clarence Ip89628132017-07-27 13:33:51 -04001968
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001969 if (!autorefresh_enabled)
Dhaval Patel99412a52017-07-24 19:16:45 -07001970 kthread_queue_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001971 &disp_thread->worker,
1972 &sde_enc->delayed_off_work,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001973 msecs_to_jiffies(idle_pc_duration));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001974 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04001975 autorefresh_enabled,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001976 idle_pc_duration, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001977 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
1978 sw_event);
1979 break;
1980
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001981 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001982 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001983 if (kthread_cancel_delayed_work_sync(
1984 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001985 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1986 sw_event);
1987
1988 mutex_lock(&sde_enc->rc_lock);
1989
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001990 if (is_vid_mode &&
1991 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1992 _sde_encoder_irq_control(drm_enc, true);
1993 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001994 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001995 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001996 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1997 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
1998 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001999 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2000 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002001 mutex_unlock(&sde_enc->rc_lock);
2002 return 0;
2003 }
2004
2005 /**
2006 * IRQs are still enabled currently, which allows wait for
2007 * VBLANK which RSC may require to correctly transition to OFF
2008 */
2009 _sde_encoder_resource_control_rsc_update(drm_enc, false);
2010
2011 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2012 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002013 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002014
2015 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
2016
2017 mutex_unlock(&sde_enc->rc_lock);
2018 break;
2019
2020 case SDE_ENC_RC_EVENT_STOP:
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002021 /* cancel vsync event work and timer */
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302022 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002023 del_timer_sync(&sde_enc->vsync_event_timer);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002024
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302025 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002026 /* return if the resource control is already in OFF state */
2027 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
2028 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
2029 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002030 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2031 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002032 mutex_unlock(&sde_enc->rc_lock);
2033 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002034 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
2035 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002036 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2037 sw_event, sde_enc->rc_state);
2038 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2039 SDE_EVTLOG_ERROR);
2040 mutex_unlock(&sde_enc->rc_lock);
2041 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002042 }
2043
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002044 /**
2045 * expect to arrive here only if in either idle state or pre-off
2046 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002047 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002048 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002049 _sde_encoder_resource_control_helper(drm_enc, false);
2050
2051 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002052 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002053
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002054 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
2055
2056 mutex_unlock(&sde_enc->rc_lock);
2057 break;
2058
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002059 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002060 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002061 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002062 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002063 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2064 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002065
2066 mutex_lock(&sde_enc->rc_lock);
2067
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002068 /* return if the resource control is already in ON state */
2069 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2070 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002071 ret = _sde_encoder_resource_control_helper(drm_enc,
2072 true);
2073 if (ret) {
2074 SDE_ERROR_ENC(sde_enc,
2075 "sw_event:%d, rc in state %d\n",
2076 sw_event, sde_enc->rc_state);
2077 SDE_EVT32(DRMID(drm_enc), sw_event,
2078 sde_enc->rc_state,
2079 SDE_EVTLOG_ERROR);
2080 mutex_unlock(&sde_enc->rc_lock);
2081 return ret;
2082 }
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002083
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002084 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002085
2086 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2087 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002088 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002089 }
2090
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002091 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2092 if (ret && ret != -EWOULDBLOCK) {
2093 SDE_ERROR_ENC(sde_enc,
2094 "wait for commit done returned %d\n",
2095 ret);
2096 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2097 ret, SDE_EVTLOG_ERROR);
2098 mutex_unlock(&sde_enc->rc_lock);
2099 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002100 }
2101
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002102 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002103 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002104
2105 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2106 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
2107
2108 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
2109 mutex_unlock(&sde_enc->rc_lock);
2110 break;
2111
2112 case SDE_ENC_RC_EVENT_POST_MODESET:
2113 mutex_lock(&sde_enc->rc_lock);
2114
2115 /* return if the resource control is already in ON state */
2116 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
2117 SDE_ERROR_ENC(sde_enc,
2118 "sw_event:%d, rc:%d !MODESET state\n",
2119 sw_event, sde_enc->rc_state);
2120 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2121 SDE_EVTLOG_ERROR);
2122 mutex_unlock(&sde_enc->rc_lock);
2123 return -EINVAL;
2124 }
2125
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002126 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002127 _sde_encoder_irq_control(drm_enc, true);
2128
2129 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
2130
2131 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2132 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
2133
2134 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2135
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002136 mutex_unlock(&sde_enc->rc_lock);
2137 break;
2138
2139 case SDE_ENC_RC_EVENT_ENTER_IDLE:
2140 mutex_lock(&sde_enc->rc_lock);
2141
2142 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002143 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002144 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002145 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2146 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002147 mutex_unlock(&sde_enc->rc_lock);
2148 return 0;
2149 }
2150
2151 /*
2152 * if we are in ON but a frame was just kicked off,
2153 * ignore the IDLE event, it's probably a stale timer event
2154 */
2155 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002156 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002157 "sw_event:%d, rc:%d frame pending\n",
2158 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002159 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2160 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002161 mutex_unlock(&sde_enc->rc_lock);
2162 return 0;
2163 }
2164
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002165 if (is_vid_mode) {
2166 _sde_encoder_irq_control(drm_enc, false);
2167 } else {
2168 /* disable all the clks and resources */
2169 _sde_encoder_resource_control_rsc_update(drm_enc,
2170 false);
2171 _sde_encoder_resource_control_helper(drm_enc, false);
2172 }
2173
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002174 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002175 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002176 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2177
2178 mutex_unlock(&sde_enc->rc_lock);
2179 break;
2180
2181 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002182 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002183 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2184 break;
2185 }
2186
Dhaval Patela5f75952017-07-25 11:17:41 -07002187 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002188 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2189 return 0;
2190}
2191
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002192static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2193 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002194 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002195{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002196 struct sde_encoder_virt *sde_enc;
2197 struct msm_drm_private *priv;
2198 struct sde_kms *sde_kms;
2199 struct list_head *connector_list;
2200 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002201 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002202 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002203 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002204 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002205
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002206 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002207 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002208 return;
2209 }
2210
Alan Kwong1124f1f2017-11-10 18:14:39 -05002211 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2212 SDE_ERROR("power resource is not enabled\n");
2213 return;
2214 }
2215
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002216 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002217 SDE_DEBUG_ENC(sde_enc, "\n");
2218
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002219 priv = drm_enc->dev->dev_private;
2220 sde_kms = to_sde_kms(priv->kms);
2221 connector_list = &sde_kms->dev->mode_config.connector_list;
2222
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002223 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002224
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002225 list_for_each_entry(conn_iter, connector_list, head)
2226 if (conn_iter->encoder == drm_enc)
2227 conn = conn_iter;
2228
2229 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002230 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002231 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002232 } else if (!conn->state) {
2233 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2234 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002235 }
2236
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002237 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002238 sde_conn_state = to_sde_connector_state(conn->state);
2239 if (sde_conn && sde_conn_state) {
2240 ret = sde_conn->ops.get_mode_info(adj_mode,
2241 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002242 sde_kms->catalog->max_mixer_width,
2243 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002244 if (ret) {
2245 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002246 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002247 return;
2248 }
2249 }
2250
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002251 /* release resources before seamless mode change */
2252 if (msm_is_mode_seamless_dms(adj_mode)) {
2253 /* restore resource state before releasing them */
2254 ret = sde_encoder_resource_control(drm_enc,
2255 SDE_ENC_RC_EVENT_PRE_MODESET);
2256 if (ret) {
2257 SDE_ERROR_ENC(sde_enc,
2258 "sde resource control failed: %d\n",
2259 ret);
2260 return;
2261 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002262
2263 /*
2264 * Disable dsc before switch the mode and after pre_modeset,
2265 * to guarantee that previous kickoff finished.
2266 */
2267 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002268 }
2269
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002270 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2271 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2272 conn->state, false);
2273 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002274 SDE_ERROR_ENC(sde_enc,
2275 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002276 return;
2277 }
2278
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002279 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2280 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2281 sde_enc->hw_pp[i] = NULL;
2282 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2283 break;
2284 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2285 }
2286
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002287 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2288 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2289 sde_enc->hw_dsc[i] = NULL;
2290 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2291 break;
2292 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2293 }
2294
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002295 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2296 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002297
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002298 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002299 if (!sde_enc->hw_pp[i]) {
2300 SDE_ERROR_ENC(sde_enc,
2301 "invalid pingpong block for the encoder\n");
2302 return;
2303 }
2304 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002305 phys->connector = conn->state->connector;
2306 if (phys->ops.mode_set)
2307 phys->ops.mode_set(phys, mode, adj_mode);
2308 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002309 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002310
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002311 /* update resources after seamless mode change */
2312 if (msm_is_mode_seamless_dms(adj_mode))
2313 sde_encoder_resource_control(&sde_enc->base,
2314 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002315}
2316
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002317void sde_encoder_control_te(struct drm_encoder *drm_enc, bool enable)
2318{
2319 struct sde_encoder_virt *sde_enc;
2320 struct sde_encoder_phys *phys;
2321 int i;
2322
2323 if (!drm_enc) {
2324 SDE_ERROR("invalid parameters\n");
2325 return;
2326 }
2327
2328 sde_enc = to_sde_encoder_virt(drm_enc);
2329 if (!sde_enc) {
2330 SDE_ERROR("invalid sde encoder\n");
2331 return;
2332 }
2333
2334 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2335 phys = sde_enc->phys_encs[i];
2336 if (phys && phys->ops.control_te)
2337 phys->ops.control_te(phys, enable);
2338 }
2339}
2340
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002341static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002342{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002343 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002344 struct msm_drm_private *priv;
2345 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002346
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002347 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2348 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002349 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002350 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002351
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002352 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002353 sde_kms = to_sde_kms(priv->kms);
2354 if (!sde_kms) {
2355 SDE_ERROR("invalid sde_kms\n");
2356 return;
2357 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002358
2359 sde_enc = to_sde_encoder_virt(drm_enc);
2360 if (!sde_enc || !sde_enc->cur_master) {
2361 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002362 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002363 }
2364
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002365 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2366 sde_enc->cur_master->hw_mdptop &&
2367 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2368 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2369 sde_enc->cur_master->hw_mdptop);
2370
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002371 if (sde_enc->cur_master->hw_mdptop &&
2372 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2373 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2374 sde_enc->cur_master->hw_mdptop,
2375 sde_kms->catalog);
2376
Dhaval Patelaab9b522017-07-20 12:38:46 -07002377 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002378 sde_encoder_control_te(drm_enc, true);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002379
2380 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2381 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002382}
2383
2384void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2385{
2386 struct sde_encoder_virt *sde_enc = NULL;
2387 int i;
2388
2389 if (!drm_enc) {
2390 SDE_ERROR("invalid encoder\n");
2391 return;
2392 }
2393 sde_enc = to_sde_encoder_virt(drm_enc);
2394
2395 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2396 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2397
2398 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2399 phys->ops.restore(phys);
2400 }
2401
2402 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2403 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2404
2405 _sde_encoder_virt_enable_helper(drm_enc);
2406}
2407
2408static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2409{
2410 struct sde_encoder_virt *sde_enc = NULL;
2411 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002412 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002413 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002414 struct msm_mode_info mode_info;
Sandeep Panda318cff12017-10-20 13:16:03 +05302415 struct drm_connector *drm_conn = NULL;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002416
2417 if (!drm_enc) {
2418 SDE_ERROR("invalid encoder\n");
2419 return;
2420 }
2421 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002422
Alan Kwong1124f1f2017-11-10 18:14:39 -05002423 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2424 SDE_ERROR("power resource is not enabled\n");
2425 return;
2426 }
2427
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002428 /*
2429 * cache the crtc in sde_enc on enable for duration of use case
2430 * for correctly servicing asynchronous irq events and timers
2431 */
2432 if (!drm_enc->crtc) {
2433 SDE_ERROR("invalid crtc\n");
2434 return;
2435 }
2436 sde_enc->crtc = drm_enc->crtc;
2437
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002438 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2439 if (ret) {
2440 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2441 return;
2442 }
2443
2444 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002445 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002446
Clarence Ip19af1362016-09-23 14:57:51 -04002447 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002448 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002449
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002450 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002451 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2452 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2453
2454 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2455 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2456 sde_enc->cur_master = phys;
2457 break;
2458 }
2459 }
2460
2461 if (!sde_enc->cur_master) {
2462 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2463 return;
2464 }
2465
2466 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2467 if (ret) {
2468 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2469 ret);
2470 return;
2471 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002472
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002473 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2474 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002475
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002476 if (!phys)
2477 continue;
2478
2479 phys->comp_type = comp_info->comp_type;
2480 if (phys != sde_enc->cur_master) {
2481 /**
2482 * on DMS request, the encoder will be enabled
2483 * already. Invoke restore to reconfigure the
2484 * new mode.
2485 */
2486 if (msm_is_mode_seamless_dms(cur_mode) &&
2487 phys->ops.restore)
2488 phys->ops.restore(phys);
2489 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002490 phys->ops.enable(phys);
2491 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002492
2493 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2494 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2495 phys->ops.setup_misr(phys, true,
2496 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002497 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002498
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002499 if (msm_is_mode_seamless_dms(cur_mode) &&
2500 sde_enc->cur_master->ops.restore)
2501 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2502 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002503 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002504
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002505 _sde_encoder_virt_enable_helper(drm_enc);
Sandeep Panda318cff12017-10-20 13:16:03 +05302506
2507 /* Enable ESD thread */
2508 drm_conn = sde_enc->cur_master->connector;
2509 sde_connector_schedule_status_work(drm_conn, true);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002510}
2511
2512static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2513{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002514 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002515 struct msm_drm_private *priv;
2516 struct sde_kms *sde_kms;
Sandeep Panda318cff12017-10-20 13:16:03 +05302517 struct drm_connector *drm_conn = NULL;
Clarence Iped3327b2017-11-01 13:13:58 -04002518 enum sde_intf_mode intf_mode;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002519 int i = 0;
2520
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002521 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002522 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002523 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002524 } else if (!drm_enc->dev) {
2525 SDE_ERROR("invalid dev\n");
2526 return;
2527 } else if (!drm_enc->dev->dev_private) {
2528 SDE_ERROR("invalid dev_private\n");
2529 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002530 }
2531
Alan Kwong1124f1f2017-11-10 18:14:39 -05002532 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2533 SDE_ERROR("power resource is not enabled\n");
2534 return;
2535 }
2536
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002537 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002538 SDE_DEBUG_ENC(sde_enc, "\n");
2539
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002540 priv = drm_enc->dev->dev_private;
2541 sde_kms = to_sde_kms(priv->kms);
Clarence Iped3327b2017-11-01 13:13:58 -04002542 intf_mode = sde_encoder_get_intf_mode(drm_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002543
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002544 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002545
Sandeep Panda318cff12017-10-20 13:16:03 +05302546 /* Disable ESD thread */
2547 drm_conn = sde_enc->cur_master->connector;
2548 sde_connector_schedule_status_work(drm_conn, false);
2549
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002550 /* wait for idle */
2551 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2552
Clarence Iped3327b2017-11-01 13:13:58 -04002553 /*
2554 * For primary command mode encoders, execute the resource control
2555 * pre-stop operations before the physical encoders are disabled, to
2556 * allow the rsc to transition its states properly.
2557 *
2558 * For other encoder types, rsc should not be enabled until after
2559 * they have been fully disabled, so delay the pre-stop operations
2560 * until after the physical disable calls have returned.
2561 */
2562 if (sde_enc->disp_info.is_primary && intf_mode == INTF_MODE_CMD) {
2563 sde_encoder_resource_control(drm_enc,
2564 SDE_ENC_RC_EVENT_PRE_STOP);
2565 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2566 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002567
Clarence Iped3327b2017-11-01 13:13:58 -04002568 if (phys && phys->ops.disable)
2569 phys->ops.disable(phys);
2570 }
2571 } else {
2572 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2573 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002574
Clarence Iped3327b2017-11-01 13:13:58 -04002575 if (phys && phys->ops.disable)
2576 phys->ops.disable(phys);
2577 }
2578 sde_encoder_resource_control(drm_enc,
2579 SDE_ENC_RC_EVENT_PRE_STOP);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002580 }
2581
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002582 /*
2583 * disable dsc after the transfer is complete (for command mode)
2584 * and after physical encoder is disabled, to make sure timing
2585 * engine is already disabled (for video mode).
2586 */
2587 _sde_encoder_dsc_disable(sde_enc);
2588
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002589 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2590
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002591 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2592 if (sde_enc->phys_encs[i])
2593 sde_enc->phys_encs[i]->connector = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002594 }
2595
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002596 sde_enc->cur_master = NULL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002597 /*
2598 * clear the cached crtc in sde_enc on use case finish, after all the
2599 * outstanding events and timers have been completed
2600 */
2601 sde_enc->crtc = NULL;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002602
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002603 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002604
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002605 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002606}
2607
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002608static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002609 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002610{
2611 int i = 0;
2612
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002613 for (i = 0; i < catalog->intf_count; i++) {
2614 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002615 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002616 return catalog->intf[i].id;
2617 }
2618 }
2619
2620 return INTF_MAX;
2621}
2622
Alan Kwongbb27c092016-07-20 16:41:25 -04002623static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2624 enum sde_intf_type type, u32 controller_id)
2625{
2626 if (controller_id < catalog->wb_count)
2627 return catalog->wb[controller_id].id;
2628
2629 return WB_MAX;
2630}
2631
Dhaval Patel81e87882016-10-19 21:41:56 -07002632static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2633 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002634{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002635 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002636 unsigned long lock_flags;
2637
Dhaval Patel81e87882016-10-19 21:41:56 -07002638 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002639 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002640
Narendra Muppalla77b32932017-05-10 13:53:11 -07002641 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002642 sde_enc = to_sde_encoder_virt(drm_enc);
2643
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002644 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002645 if (sde_enc->crtc_vblank_cb)
2646 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002647 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002648
2649 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002650 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002651}
2652
2653static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2654 struct sde_encoder_phys *phy_enc)
2655{
2656 if (!phy_enc)
2657 return;
2658
Narendra Muppalla77b32932017-05-10 13:53:11 -07002659 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002660 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002661 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Ingrid Gallardo36ee68d2017-08-30 17:14:33 -07002662
2663 trace_sde_encoder_underrun(DRMID(drm_enc),
2664 atomic_read(&phy_enc->underrun_cnt));
2665
2666 SDE_DBG_CTRL("stop_ftrace");
2667 SDE_DBG_CTRL("panic_underrun");
2668
Narendra Muppalla77b32932017-05-10 13:53:11 -07002669 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002670}
2671
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002672void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
2673 void (*vbl_cb)(void *), void *vbl_data)
2674{
2675 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2676 unsigned long lock_flags;
2677 bool enable;
2678 int i;
2679
2680 enable = vbl_cb ? true : false;
2681
Clarence Ip19af1362016-09-23 14:57:51 -04002682 if (!drm_enc) {
2683 SDE_ERROR("invalid encoder\n");
2684 return;
2685 }
2686 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002687 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002688
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002689 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002690 sde_enc->crtc_vblank_cb = vbl_cb;
2691 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002692 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002693
2694 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2695 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2696
2697 if (phys && phys->ops.control_vblank_irq)
2698 phys->ops.control_vblank_irq(phys, enable);
2699 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002700 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002701}
2702
Alan Kwong628d19e2016-10-31 13:50:13 -04002703void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
2704 void (*frame_event_cb)(void *, u32 event),
2705 void *frame_event_cb_data)
2706{
2707 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2708 unsigned long lock_flags;
2709 bool enable;
2710
2711 enable = frame_event_cb ? true : false;
2712
2713 if (!drm_enc) {
2714 SDE_ERROR("invalid encoder\n");
2715 return;
2716 }
2717 SDE_DEBUG_ENC(sde_enc, "\n");
2718 SDE_EVT32(DRMID(drm_enc), enable, 0);
2719
2720 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2721 sde_enc->crtc_frame_event_cb = frame_event_cb;
2722 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
2723 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
2724}
2725
2726static void sde_encoder_frame_done_callback(
2727 struct drm_encoder *drm_enc,
2728 struct sde_encoder_phys *ready_phys, u32 event)
2729{
2730 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2731 unsigned int i;
2732
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002733 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
2734 | SDE_ENCODER_FRAME_EVENT_ERROR
2735 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05002736
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002737 if (!sde_enc->frame_busy_mask[0]) {
2738 /**
2739 * suppress frame_done without waiter,
2740 * likely autorefresh
2741 */
2742 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
2743 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04002744 }
2745
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002746 /* One of the physical encoders has become idle */
2747 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2748 if (sde_enc->phys_encs[i] == ready_phys) {
2749 clear_bit(i, sde_enc->frame_busy_mask);
2750 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
2751 sde_enc->frame_busy_mask[0]);
2752 }
2753 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002754
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002755 if (!sde_enc->frame_busy_mask[0]) {
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002756 sde_encoder_resource_control(drm_enc,
2757 SDE_ENC_RC_EVENT_FRAME_DONE);
2758
2759 if (sde_enc->crtc_frame_event_cb)
2760 sde_enc->crtc_frame_event_cb(
2761 sde_enc->crtc_frame_event_cb_data,
2762 event);
2763 }
2764 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04002765 if (sde_enc->crtc_frame_event_cb)
2766 sde_enc->crtc_frame_event_cb(
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002767 sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002768 }
2769}
2770
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002771static void sde_encoder_off_work(struct kthread_work *work)
2772{
2773 struct sde_encoder_virt *sde_enc = container_of(work,
2774 struct sde_encoder_virt, delayed_off_work.work);
2775
2776 if (!sde_enc) {
2777 SDE_ERROR("invalid sde encoder\n");
2778 return;
2779 }
2780
2781 sde_encoder_resource_control(&sde_enc->base,
2782 SDE_ENC_RC_EVENT_ENTER_IDLE);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002783}
2784
Clarence Ip110d15c2016-08-16 14:44:41 -04002785/**
2786 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
2787 * drm_enc: Pointer to drm encoder structure
2788 * phys: Pointer to physical encoder structure
2789 * extra_flush_bits: Additional bit mask to include in flush trigger
2790 */
2791static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
2792 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
2793{
2794 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002795 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04002796
2797 if (!drm_enc || !phys) {
2798 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
2799 drm_enc != 0, phys != 0);
2800 return;
2801 }
2802
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002803 if (!phys->hw_pp) {
2804 SDE_ERROR("invalid pingpong hw\n");
2805 return;
2806 }
2807
Clarence Ip110d15c2016-08-16 14:44:41 -04002808 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04002809 if (!ctl || !phys->ops.trigger_flush) {
2810 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002811 return;
2812 }
2813
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002814 if (phys->split_role == ENC_ROLE_SKIP) {
2815 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2816 "skip flush pp%d ctl%d\n",
2817 phys->hw_pp->idx - PINGPONG_0,
2818 ctl->idx - CTL_0);
2819 return;
2820 }
2821
Clarence Ip8e69ad02016-12-09 09:43:57 -05002822 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05002823
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002824 if (phys->ops.is_master && phys->ops.is_master(phys))
2825 atomic_inc(&phys->pending_retire_fence_cnt);
2826
Clarence Ip110d15c2016-08-16 14:44:41 -04002827 if (extra_flush_bits && ctl->ops.update_pending_flush)
2828 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
2829
Alan Kwong4212dd42017-09-19 17:22:33 -04002830 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07002831
2832 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04002833 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2834 pending_kickoff_cnt, ctl->idx - CTL_0,
2835 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07002836 else
Clarence Ip569d5af2017-10-14 21:09:01 -04002837 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2838 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04002839}
2840
2841/**
2842 * _sde_encoder_trigger_start - trigger start for a physical encoder
2843 * phys: Pointer to physical encoder structure
2844 */
2845static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
2846{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002847 struct sde_hw_ctl *ctl;
2848
Clarence Ip110d15c2016-08-16 14:44:41 -04002849 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002850 SDE_ERROR("invalid argument(s)\n");
2851 return;
2852 }
2853
2854 if (!phys->hw_pp) {
2855 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002856 return;
2857 }
2858
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002859 ctl = phys->hw_ctl;
2860 if (phys->split_role == ENC_ROLE_SKIP) {
2861 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2862 "skip start pp%d ctl%d\n",
2863 phys->hw_pp->idx - PINGPONG_0,
2864 ctl->idx - CTL_0);
2865 return;
2866 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002867 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
2868 phys->ops.trigger_start(phys);
2869}
2870
Alan Kwong4212dd42017-09-19 17:22:33 -04002871void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
2872{
2873 struct sde_hw_ctl *ctl;
2874
2875 if (!phys_enc) {
2876 SDE_ERROR("invalid encoder\n");
2877 return;
2878 }
2879
2880 ctl = phys_enc->hw_ctl;
2881 if (ctl && ctl->ops.trigger_flush)
2882 ctl->ops.trigger_flush(ctl);
2883}
2884
Clarence Ip110d15c2016-08-16 14:44:41 -04002885void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
2886{
2887 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04002888
2889 if (!phys_enc) {
2890 SDE_ERROR("invalid encoder\n");
2891 return;
2892 }
2893
2894 ctl = phys_enc->hw_ctl;
2895 if (ctl && ctl->ops.trigger_start) {
2896 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04002897 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04002898 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002899}
2900
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302901static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
2902 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002903{
2904 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302905 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
2906 ktime_t cur_ktime;
2907 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002908
2909 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002910 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302911 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
2912 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002913
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302914 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
2915 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002916 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002917 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302918 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
2919
2920 return rc;
2921}
2922
2923int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
2924 struct sde_encoder_wait_info *info)
2925{
2926 int rc;
2927 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
2928
2929 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
2930
2931 /**
2932 * handle disabled irq case where timer irq is also delayed.
2933 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
2934 * if it event_timeout expired late detected.
2935 */
2936 if (atomic_read(info->atomic_cnt) && (!rc) &&
2937 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
2938 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
2939 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
2940 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002941
2942 return rc;
2943}
2944
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002945void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
2946{
2947 struct sde_encoder_virt *sde_enc;
2948 struct sde_connector *sde_con;
2949 void *sde_con_disp;
2950 struct sde_hw_ctl *ctl;
2951 int rc;
2952
2953 if (!phys_enc) {
2954 SDE_ERROR("invalid encoder\n");
2955 return;
2956 }
2957 sde_enc = to_sde_encoder_virt(phys_enc->parent);
2958 ctl = phys_enc->hw_ctl;
2959
2960 if (!ctl || !ctl->ops.reset)
2961 return;
2962
2963 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
2964 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
2965
2966 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
2967 phys_enc->connector) {
2968 sde_con = to_sde_connector(phys_enc->connector);
2969 sde_con_disp = sde_connector_get_display(phys_enc->connector);
2970
2971 if (sde_con->ops.soft_reset) {
2972 rc = sde_con->ops.soft_reset(sde_con_disp);
2973 if (rc) {
2974 SDE_ERROR_ENC(sde_enc,
2975 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07002976 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
2977 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002978 }
2979 }
2980 }
2981
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002982 phys_enc->enable_state = SDE_ENC_ENABLED;
2983}
2984
Clarence Ip110d15c2016-08-16 14:44:41 -04002985/**
2986 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
2987 * Iterate through the physical encoders and perform consolidated flush
2988 * and/or control start triggering as needed. This is done in the virtual
2989 * encoder rather than the individual physical ones in order to handle
2990 * use cases that require visibility into multiple physical encoders at
2991 * a time.
2992 * sde_enc: Pointer to virtual encoder structure
2993 */
2994static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
2995{
2996 struct sde_hw_ctl *ctl;
2997 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002998 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -04002999
3000 if (!sde_enc) {
3001 SDE_ERROR("invalid encoder\n");
3002 return;
3003 }
3004
3005 pending_flush = 0x0;
3006
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003007 /*
3008 * Trigger LUT DMA flush, this might need a wait, so we need
3009 * to do this outside of the atomic context
3010 */
3011 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3012 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3013 bool wait_for_dma = false;
3014
3015 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3016 continue;
3017
3018 ctl = phys->hw_ctl;
3019 if (!ctl)
3020 continue;
3021
3022 if (phys->ops.wait_dma_trigger)
3023 wait_for_dma = phys->ops.wait_dma_trigger(phys);
3024
3025 if (phys->hw_ctl->ops.reg_dma_flush)
3026 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
3027 wait_for_dma);
3028 }
3029
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003030 /* update pending counts and trigger kickoff ctl flush atomically */
3031 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3032
Clarence Ip110d15c2016-08-16 14:44:41 -04003033 /* don't perform flush/start operations for slave encoders */
3034 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3035 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07003036 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003037
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003038 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3039 continue;
3040
Clarence Ip110d15c2016-08-16 14:44:41 -04003041 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003042 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04003043 continue;
3044
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003045 if (phys->connector)
3046 topology = sde_connector_get_topology_name(
3047 phys->connector);
3048
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003049 /*
3050 * don't wait on ppsplit slaves or skipped encoders because
3051 * they dont receive irqs
3052 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003053 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003054 phys->split_role == ENC_ROLE_SLAVE) &&
3055 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003056 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003057
Clarence Ip8e69ad02016-12-09 09:43:57 -05003058 if (!phys->ops.needs_single_flush ||
3059 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04003060 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
3061 else if (ctl->ops.get_pending_flush)
3062 pending_flush |= ctl->ops.get_pending_flush(ctl);
3063 }
3064
3065 /* for split flush, combine pending flush masks and send to master */
3066 if (pending_flush && sde_enc->cur_master) {
3067 _sde_encoder_trigger_flush(
3068 &sde_enc->base,
3069 sde_enc->cur_master,
3070 pending_flush);
3071 }
3072
3073 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003074
3075 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -04003076}
3077
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003078static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
3079 struct drm_encoder *drm_enc,
3080 unsigned long *affected_displays,
3081 int num_active_phys)
3082{
3083 struct sde_encoder_virt *sde_enc;
3084 struct sde_encoder_phys *master;
3085 enum sde_rm_topology_name topology;
3086 bool is_right_only;
3087
3088 if (!drm_enc || !affected_displays)
3089 return;
3090
3091 sde_enc = to_sde_encoder_virt(drm_enc);
3092 master = sde_enc->cur_master;
3093 if (!master || !master->connector)
3094 return;
3095
3096 topology = sde_connector_get_topology_name(master->connector);
3097 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
3098 return;
3099
3100 /*
3101 * For pingpong split, the slave pingpong won't generate IRQs. For
3102 * right-only updates, we can't swap pingpongs, or simply swap the
3103 * master/slave assignment, we actually have to swap the interfaces
3104 * so that the master physical encoder will use a pingpong/interface
3105 * that generates irqs on which to wait.
3106 */
3107 is_right_only = !test_bit(0, affected_displays) &&
3108 test_bit(1, affected_displays);
3109
3110 if (is_right_only && !sde_enc->intfs_swapped) {
3111 /* right-only update swap interfaces */
3112 swap(sde_enc->phys_encs[0]->intf_idx,
3113 sde_enc->phys_encs[1]->intf_idx);
3114 sde_enc->intfs_swapped = true;
3115 } else if (!is_right_only && sde_enc->intfs_swapped) {
3116 /* left-only or full update, swap back */
3117 swap(sde_enc->phys_encs[0]->intf_idx,
3118 sde_enc->phys_encs[1]->intf_idx);
3119 sde_enc->intfs_swapped = false;
3120 }
3121
3122 SDE_DEBUG_ENC(sde_enc,
3123 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
3124 is_right_only, sde_enc->intfs_swapped,
3125 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3126 sde_enc->phys_encs[1]->intf_idx - INTF_0);
3127 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
3128 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3129 sde_enc->phys_encs[1]->intf_idx - INTF_0,
3130 *affected_displays);
3131
3132 /* ppsplit always uses master since ppslave invalid for irqs*/
3133 if (num_active_phys == 1)
3134 *affected_displays = BIT(0);
3135}
3136
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003137static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
3138 struct sde_encoder_kickoff_params *params)
3139{
3140 struct sde_encoder_virt *sde_enc;
3141 struct sde_encoder_phys *phys;
3142 int i, num_active_phys;
3143 bool master_assigned = false;
3144
3145 if (!drm_enc || !params)
3146 return;
3147
3148 sde_enc = to_sde_encoder_virt(drm_enc);
3149
3150 if (sde_enc->num_phys_encs <= 1)
3151 return;
3152
3153 /* count bits set */
3154 num_active_phys = hweight_long(params->affected_displays);
3155
3156 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
3157 params->affected_displays, num_active_phys);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04003158 SDE_EVT32_VERBOSE(DRMID(drm_enc), params->affected_displays,
3159 num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003160
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003161 /* for left/right only update, ppsplit master switches interface */
3162 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
3163 &params->affected_displays, num_active_phys);
3164
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003165 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3166 enum sde_enc_split_role prv_role, new_role;
3167 bool active;
3168
3169 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003170 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003171 continue;
3172
3173 active = test_bit(i, &params->affected_displays);
3174 prv_role = phys->split_role;
3175
3176 if (active && num_active_phys == 1)
3177 new_role = ENC_ROLE_SOLO;
3178 else if (active && !master_assigned)
3179 new_role = ENC_ROLE_MASTER;
3180 else if (active)
3181 new_role = ENC_ROLE_SLAVE;
3182 else
3183 new_role = ENC_ROLE_SKIP;
3184
3185 phys->ops.update_split_role(phys, new_role);
3186 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
3187 sde_enc->cur_master = phys;
3188 master_assigned = true;
3189 }
3190
3191 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
3192 phys->hw_pp->idx - PINGPONG_0, prv_role,
3193 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003194 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
3195 phys->hw_pp->idx - PINGPONG_0, prv_role,
3196 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003197 }
3198}
3199
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303200bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003201{
3202 struct sde_encoder_virt *sde_enc;
3203 struct msm_display_info *disp_info;
3204
3205 if (!drm_enc) {
3206 SDE_ERROR("invalid encoder\n");
3207 return false;
3208 }
3209
3210 sde_enc = to_sde_encoder_virt(drm_enc);
3211 disp_info = &sde_enc->disp_info;
3212
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303213 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003214}
3215
Dhaval Patel0e558f42017-04-30 00:51:40 -07003216void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
3217{
3218 struct sde_encoder_virt *sde_enc;
3219 struct sde_encoder_phys *phys;
3220 unsigned int i;
3221 struct sde_hw_ctl *ctl;
3222 struct msm_display_info *disp_info;
3223
3224 if (!drm_enc) {
3225 SDE_ERROR("invalid encoder\n");
3226 return;
3227 }
3228 sde_enc = to_sde_encoder_virt(drm_enc);
3229 disp_info = &sde_enc->disp_info;
3230
3231 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3232 phys = sde_enc->phys_encs[i];
3233
3234 if (phys && phys->hw_ctl) {
3235 ctl = phys->hw_ctl;
3236 if (ctl->ops.clear_pending_flush)
3237 ctl->ops.clear_pending_flush(ctl);
3238
3239 /* update only for command mode primary ctl */
3240 if ((phys == sde_enc->cur_master) &&
3241 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3242 && ctl->ops.trigger_pending)
3243 ctl->ops.trigger_pending(ctl);
3244 }
3245 }
3246}
3247
Ping Li8430ee12017-02-24 14:14:44 -08003248static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3249{
3250 void *dither_cfg;
3251 int ret = 0;
3252 size_t len = 0;
3253 enum sde_rm_topology_name topology;
3254
3255 if (!phys || !phys->connector || !phys->hw_pp ||
3256 !phys->hw_pp->ops.setup_dither)
3257 return;
3258 topology = sde_connector_get_topology_name(phys->connector);
3259 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3260 (phys->split_role == ENC_ROLE_SLAVE))
3261 return;
3262
3263 ret = sde_connector_get_dither_cfg(phys->connector,
3264 phys->connector->state, &dither_cfg, &len);
3265 if (!ret)
3266 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
3267}
3268
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003269static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3270 struct drm_display_mode *mode)
3271{
3272 u64 pclk_rate;
3273 u32 pclk_period;
3274 u32 line_time;
3275
3276 /*
3277 * For linetime calculation, only operate on master encoder.
3278 */
3279 if (!sde_enc->cur_master)
3280 return 0;
3281
3282 if (!sde_enc->cur_master->ops.get_line_count) {
3283 SDE_ERROR("get_line_count function not defined\n");
3284 return 0;
3285 }
3286
3287 pclk_rate = mode->clock; /* pixel clock in kHz */
3288 if (pclk_rate == 0) {
3289 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3290 return 0;
3291 }
3292
3293 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3294 if (pclk_period == 0) {
3295 SDE_ERROR("pclk period is 0\n");
3296 return 0;
3297 }
3298
3299 /*
3300 * Line time calculation based on Pixel clock and HTOTAL.
3301 * Final unit is in ns.
3302 */
3303 line_time = (pclk_period * mode->htotal) / 1000;
3304 if (line_time == 0) {
3305 SDE_ERROR("line time calculation is 0\n");
3306 return 0;
3307 }
3308
3309 SDE_DEBUG_ENC(sde_enc,
3310 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3311 pclk_rate, pclk_period, line_time);
3312
3313 return line_time;
3314}
3315
3316static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3317 ktime_t *wakeup_time)
3318{
3319 struct drm_display_mode *mode;
3320 struct sde_encoder_virt *sde_enc;
3321 u32 cur_line;
3322 u32 line_time;
3323 u32 vtotal, time_to_vsync;
3324 ktime_t cur_time;
3325
3326 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003327 mode = &sde_enc->cur_master->cached_mode;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003328
3329 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3330 if (!line_time)
3331 return -EINVAL;
3332
3333 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3334
3335 vtotal = mode->vtotal;
3336 if (cur_line >= vtotal)
3337 time_to_vsync = line_time * vtotal;
3338 else
3339 time_to_vsync = line_time * (vtotal - cur_line);
3340
3341 if (time_to_vsync == 0) {
3342 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3343 vtotal);
3344 return -EINVAL;
3345 }
3346
3347 cur_time = ktime_get();
3348 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3349
3350 SDE_DEBUG_ENC(sde_enc,
3351 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3352 cur_line, vtotal, time_to_vsync,
3353 ktime_to_ms(cur_time),
3354 ktime_to_ms(*wakeup_time));
3355 return 0;
3356}
3357
3358static void sde_encoder_vsync_event_handler(unsigned long data)
3359{
3360 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3361 struct sde_encoder_virt *sde_enc;
3362 struct msm_drm_private *priv;
3363 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003364
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003365 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3366 SDE_ERROR("invalid encoder parameters\n");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003367 return;
3368 }
3369
3370 sde_enc = to_sde_encoder_virt(drm_enc);
3371 priv = drm_enc->dev->dev_private;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003372 if (!sde_enc->crtc) {
3373 SDE_ERROR("invalid crtc");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003374 return;
3375 }
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003376
3377 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3378 SDE_ERROR("invalid crtc index:%u\n",
3379 sde_enc->crtc->index);
3380 return;
3381 }
3382 event_thread = &priv->event_thread[sde_enc->crtc->index];
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003383 if (!event_thread) {
3384 SDE_ERROR("event_thread not found for crtc:%d\n",
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003385 sde_enc->crtc->index);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003386 return;
3387 }
3388
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303389 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003390 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003391}
3392
3393static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3394{
3395 struct sde_encoder_virt *sde_enc = container_of(work,
3396 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303397 bool autorefresh_enabled = false;
3398 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003399 ktime_t wakeup_time;
3400
3401 if (!sde_enc) {
3402 SDE_ERROR("invalid sde encoder\n");
3403 return;
3404 }
3405
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303406 rc = _sde_encoder_power_enable(sde_enc, true);
3407 if (rc) {
3408 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3409 return;
3410 }
3411
3412 if (sde_enc->cur_master &&
3413 sde_enc->cur_master->ops.is_autorefresh_enabled)
3414 autorefresh_enabled =
3415 sde_enc->cur_master->ops.is_autorefresh_enabled(
3416 sde_enc->cur_master);
3417
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303418 /* Update timer if autorefresh is enabled else return */
3419 if (!autorefresh_enabled)
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003420 goto exit;
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303421
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003422 rc = _sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time);
3423 if (rc)
3424 goto exit;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003425
3426 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3427 mod_timer(&sde_enc->vsync_event_timer,
3428 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003429
3430exit:
3431 _sde_encoder_power_enable(sde_enc, false);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003432}
3433
Clarence Ip85f4f4532017-10-04 12:10:13 -04003434int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08003435 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003436{
3437 struct sde_encoder_virt *sde_enc;
3438 struct sde_encoder_phys *phys;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003439 bool needs_hw_reset = false;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003440 uint32_t ln_cnt1, ln_cnt2;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003441 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04003442 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003443
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003444 if (!drm_enc || !params) {
3445 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04003446 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003447 }
3448 sde_enc = to_sde_encoder_virt(drm_enc);
3449
Clarence Ip19af1362016-09-23 14:57:51 -04003450 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003451 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003452
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003453 /* save this for later, in case of errors */
3454 if (sde_enc->cur_master && sde_enc->cur_master->ops.get_wr_line_count)
3455 ln_cnt1 = sde_enc->cur_master->ops.get_wr_line_count(
3456 sde_enc->cur_master);
3457 else
3458 ln_cnt1 = -EINVAL;
3459
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003460 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003461 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003462 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003463 phys = sde_enc->phys_encs[i];
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003464 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04003465 if (phys->ops.prepare_for_kickoff) {
3466 rc = phys->ops.prepare_for_kickoff(
3467 phys, params);
3468 if (rc)
3469 ret = rc;
3470 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003471 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
3472 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08003473 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003474 }
3475 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003476 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003477
Alan Kwong1124f1f2017-11-10 18:14:39 -05003478 rc = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
3479 if (rc) {
3480 SDE_ERROR_ENC(sde_enc, "resource kickoff failed rc %d\n", rc);
3481 return rc;
3482 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003483
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003484 /* if any phys needs reset, reset all phys, in-order */
3485 if (needs_hw_reset) {
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003486 /* query line count before cur_master is updated */
3487 if (sde_enc->cur_master &&
3488 sde_enc->cur_master->ops.get_wr_line_count)
3489 ln_cnt2 = sde_enc->cur_master->ops.get_wr_line_count(
3490 sde_enc->cur_master);
3491 else
3492 ln_cnt2 = -EINVAL;
3493
3494 SDE_EVT32(DRMID(drm_enc), ln_cnt1, ln_cnt2,
3495 SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003496 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3497 phys = sde_enc->phys_encs[i];
3498 if (phys && phys->ops.hw_reset)
3499 phys->ops.hw_reset(phys);
3500 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003501 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003502
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003503 _sde_encoder_update_master(drm_enc, params);
3504
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003505 _sde_encoder_update_roi(drm_enc);
3506
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003507 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
3508 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003509 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003510 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
3511 sde_enc->cur_master->connector->base.id,
3512 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003513 ret = rc;
3514 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003515 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003516
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07003517 if (_sde_encoder_is_dsc_enabled(drm_enc)) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003518 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003519 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003520 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003521 ret = rc;
3522 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003523 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04003524
3525 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04003526}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003527
Clarence Ip662698e2017-09-12 18:34:16 -04003528/**
3529 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
3530 * with the specified encoder, and unstage all pipes from it
3531 * @encoder: encoder pointer
3532 * Returns: 0 on success
3533 */
3534static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
3535{
3536 struct sde_encoder_virt *sde_enc;
3537 struct sde_encoder_phys *phys;
3538 unsigned int i;
3539 int rc = 0;
3540
3541 if (!drm_enc) {
3542 SDE_ERROR("invalid encoder\n");
3543 return -EINVAL;
3544 }
3545
3546 sde_enc = to_sde_encoder_virt(drm_enc);
3547
3548 SDE_ATRACE_BEGIN("encoder_release_lm");
3549 SDE_DEBUG_ENC(sde_enc, "\n");
3550
3551 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3552 phys = sde_enc->phys_encs[i];
3553 if (!phys)
3554 continue;
3555
3556 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
3557
3558 rc = sde_encoder_helper_reset_mixers(phys, NULL);
3559 if (rc)
3560 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
3561 }
3562
3563 SDE_ATRACE_END("encoder_release_lm");
3564 return rc;
3565}
3566
3567void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04003568{
3569 struct sde_encoder_virt *sde_enc;
3570 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003571 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04003572 unsigned int i;
3573
3574 if (!drm_enc) {
3575 SDE_ERROR("invalid encoder\n");
3576 return;
3577 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07003578 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04003579 sde_enc = to_sde_encoder_virt(drm_enc);
3580
3581 SDE_DEBUG_ENC(sde_enc, "\n");
3582
Clarence Ip662698e2017-09-12 18:34:16 -04003583 /* create a 'no pipes' commit to release buffers on errors */
3584 if (is_error)
3585 _sde_encoder_reset_ctl_hw(drm_enc);
3586
Alan Kwong628d19e2016-10-31 13:50:13 -04003587 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04003588 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003589
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003590 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003591 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003592 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003593 if (phys && phys->ops.handle_post_kickoff)
3594 phys->ops.handle_post_kickoff(phys);
3595 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003596
3597 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
3598 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
3599 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3600 mod_timer(&sde_enc->vsync_event_timer,
3601 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
3602 }
3603
Narendra Muppalla77b32932017-05-10 13:53:11 -07003604 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003605}
3606
Clarence Ip662698e2017-09-12 18:34:16 -04003607int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003608 struct drm_framebuffer *fb)
3609{
3610 struct drm_encoder *drm_enc;
3611 struct sde_hw_mixer_cfg mixer;
3612 struct sde_rm_hw_iter lm_iter;
3613 bool lm_valid = false;
3614
3615 if (!phys_enc || !phys_enc->parent) {
3616 SDE_ERROR("invalid encoder\n");
3617 return -EINVAL;
3618 }
3619
3620 drm_enc = phys_enc->parent;
3621 memset(&mixer, 0, sizeof(mixer));
3622
3623 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003624 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
3625 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
3626
3627 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
3628 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
3629 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
3630
3631 if (!hw_lm)
3632 continue;
3633
3634 /* need to flush LM to remove it */
3635 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
3636 phys_enc->hw_ctl->ops.update_pending_flush)
3637 phys_enc->hw_ctl->ops.update_pending_flush(
3638 phys_enc->hw_ctl,
3639 phys_enc->hw_ctl->ops.get_bitmask_mixer(
3640 phys_enc->hw_ctl, hw_lm->idx));
3641
3642 if (fb) {
3643 /* assume a single LM if targeting a frame buffer */
3644 if (lm_valid)
3645 continue;
3646
3647 mixer.out_height = fb->height;
3648 mixer.out_width = fb->width;
3649
3650 if (hw_lm->ops.setup_mixer_out)
3651 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
3652 }
3653
3654 lm_valid = true;
3655
3656 /* only enable border color on LM */
3657 if (phys_enc->hw_ctl->ops.setup_blendstage)
3658 phys_enc->hw_ctl->ops.setup_blendstage(
Dhaval Patel572cfd22017-06-12 19:33:39 -07003659 phys_enc->hw_ctl, hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003660 }
3661
3662 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04003663 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003664 return -EFAULT;
3665 }
3666 return 0;
3667}
3668
Lloyd Atkinsone123c172017-02-27 13:19:08 -05003669void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
3670{
3671 struct sde_encoder_virt *sde_enc;
3672 struct sde_encoder_phys *phys;
3673 int i;
3674
3675 if (!drm_enc) {
3676 SDE_ERROR("invalid encoder\n");
3677 return;
3678 }
3679 sde_enc = to_sde_encoder_virt(drm_enc);
3680
3681 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3682 phys = sde_enc->phys_encs[i];
3683 if (phys && phys->ops.prepare_commit)
3684 phys->ops.prepare_commit(phys);
3685 }
3686}
3687
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003688#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003689static int _sde_encoder_status_show(struct seq_file *s, void *data)
3690{
3691 struct sde_encoder_virt *sde_enc;
3692 int i;
3693
3694 if (!s || !s->private)
3695 return -EINVAL;
3696
3697 sde_enc = s->private;
3698
3699 mutex_lock(&sde_enc->enc_lock);
3700 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3701 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3702
3703 if (!phys)
3704 continue;
3705
3706 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
3707 phys->intf_idx - INTF_0,
3708 atomic_read(&phys->vsync_cnt),
3709 atomic_read(&phys->underrun_cnt));
3710
3711 switch (phys->intf_mode) {
3712 case INTF_MODE_VIDEO:
3713 seq_puts(s, "mode: video\n");
3714 break;
3715 case INTF_MODE_CMD:
3716 seq_puts(s, "mode: command\n");
3717 break;
3718 case INTF_MODE_WB_BLOCK:
3719 seq_puts(s, "mode: wb block\n");
3720 break;
3721 case INTF_MODE_WB_LINE:
3722 seq_puts(s, "mode: wb line\n");
3723 break;
3724 default:
3725 seq_puts(s, "mode: ???\n");
3726 break;
3727 }
3728 }
3729 mutex_unlock(&sde_enc->enc_lock);
3730
3731 return 0;
3732}
3733
3734static int _sde_encoder_debugfs_status_open(struct inode *inode,
3735 struct file *file)
3736{
3737 return single_open(file, _sde_encoder_status_show, inode->i_private);
3738}
3739
Dhaval Patelf9245d62017-03-28 16:24:00 -07003740static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303741 const char __user *user_buf, size_t count, loff_t *ppos)
3742{
3743 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003744 int i = 0, rc;
3745 char buf[MISR_BUFF_SIZE + 1];
3746 size_t buff_copy;
3747 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303748
Dhaval Patelf9245d62017-03-28 16:24:00 -07003749 if (!file || !file->private_data)
3750 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303751
Dhaval Patelf9245d62017-03-28 16:24:00 -07003752 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303753
Dhaval Patelf9245d62017-03-28 16:24:00 -07003754 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
3755 if (copy_from_user(buf, user_buf, buff_copy))
3756 return -EINVAL;
3757
3758 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303759
3760 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07003761 return -EINVAL;
3762
3763 rc = _sde_encoder_power_enable(sde_enc, true);
3764 if (rc)
3765 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303766
3767 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003768 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07003769 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303770 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3771 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3772
Dhaval Patelf9245d62017-03-28 16:24:00 -07003773 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303774 continue;
3775
Dhaval Patelf9245d62017-03-28 16:24:00 -07003776 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303777 }
3778 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003779 _sde_encoder_power_enable(sde_enc, false);
3780
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303781 return count;
3782}
3783
Dhaval Patelf9245d62017-03-28 16:24:00 -07003784static ssize_t _sde_encoder_misr_read(struct file *file,
3785 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303786{
3787 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003788 int i = 0, len = 0;
3789 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
3790 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303791
3792 if (*ppos)
3793 return 0;
3794
Dhaval Patelf9245d62017-03-28 16:24:00 -07003795 if (!file || !file->private_data)
3796 return -EINVAL;
3797
3798 sde_enc = file->private_data;
3799
3800 rc = _sde_encoder_power_enable(sde_enc, true);
3801 if (rc)
3802 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303803
3804 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003805 if (!sde_enc->misr_enable) {
3806 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3807 "disabled\n");
3808 goto buff_check;
3809 } else if (sde_enc->disp_info.capabilities &
3810 ~MSM_DISPLAY_CAP_VID_MODE) {
3811 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3812 "unsupported\n");
3813 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303814 }
3815
Dhaval Patelf9245d62017-03-28 16:24:00 -07003816 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3817 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3818 if (!phys || !phys->ops.collect_misr)
3819 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303820
Dhaval Patelf9245d62017-03-28 16:24:00 -07003821 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3822 "Intf idx:%d\n", phys->intf_idx - INTF_0);
3823 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
3824 phys->ops.collect_misr(phys));
3825 }
3826
3827buff_check:
3828 if (count <= len) {
3829 len = 0;
3830 goto end;
3831 }
3832
3833 if (copy_to_user(user_buff, buf, len)) {
3834 len = -EFAULT;
3835 goto end;
3836 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303837
3838 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303839
Dhaval Patelf9245d62017-03-28 16:24:00 -07003840end:
3841 mutex_unlock(&sde_enc->enc_lock);
3842 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303843 return len;
3844}
3845
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003846static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003847{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003848 struct sde_encoder_virt *sde_enc;
3849 struct msm_drm_private *priv;
3850 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07003851 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003852
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003853 static const struct file_operations debugfs_status_fops = {
3854 .open = _sde_encoder_debugfs_status_open,
3855 .read = seq_read,
3856 .llseek = seq_lseek,
3857 .release = single_release,
3858 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303859
3860 static const struct file_operations debugfs_misr_fops = {
3861 .open = simple_open,
3862 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003863 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303864 };
3865
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003866 char name[SDE_NAME_SIZE];
3867
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003868 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003869 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003870 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003871 }
3872
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003873 sde_enc = to_sde_encoder_virt(drm_enc);
3874 priv = drm_enc->dev->dev_private;
3875 sde_kms = to_sde_kms(priv->kms);
3876
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003877 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
3878
3879 /* create overall sub-directory for the encoder */
3880 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07003881 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003882 if (!sde_enc->debugfs_root)
3883 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303884
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003885 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003886 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003887 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303888
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003889 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003890 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003891
Alan Kwongf2debb02017-04-05 06:19:29 -07003892 for (i = 0; i < sde_enc->num_phys_encs; i++)
3893 if (sde_enc->phys_encs[i] &&
3894 sde_enc->phys_encs[i]->ops.late_register)
3895 sde_enc->phys_encs[i]->ops.late_register(
3896 sde_enc->phys_encs[i],
3897 sde_enc->debugfs_root);
3898
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003899 return 0;
3900}
3901
3902static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
3903{
3904 struct sde_encoder_virt *sde_enc;
3905
3906 if (!drm_enc)
3907 return;
3908
3909 sde_enc = to_sde_encoder_virt(drm_enc);
3910 debugfs_remove_recursive(sde_enc->debugfs_root);
3911}
3912#else
3913static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
3914{
3915 return 0;
3916}
3917
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003918static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003919{
3920}
3921#endif
3922
3923static int sde_encoder_late_register(struct drm_encoder *encoder)
3924{
3925 return _sde_encoder_init_debugfs(encoder);
3926}
3927
3928static void sde_encoder_early_unregister(struct drm_encoder *encoder)
3929{
3930 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003931}
3932
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003933static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04003934 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003935 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003936 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003937{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003938 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003939
Clarence Ip19af1362016-09-23 14:57:51 -04003940 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003941
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003942 /*
3943 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
3944 * in this function, check up-front.
3945 */
3946 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
3947 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003948 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003949 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003950 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003951 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003952
Clarence Ipa4039322016-07-15 16:23:59 -04003953 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003954 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003955
3956 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003957 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003958 PTR_ERR(enc));
3959 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3960 }
3961
3962 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3963 ++sde_enc->num_phys_encs;
3964 }
3965
Clarence Ipa4039322016-07-15 16:23:59 -04003966 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003967 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003968
3969 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003970 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003971 PTR_ERR(enc));
3972 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3973 }
3974
3975 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3976 ++sde_enc->num_phys_encs;
3977 }
3978
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003979 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003980}
3981
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003982static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
3983 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04003984{
3985 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04003986
Clarence Ip19af1362016-09-23 14:57:51 -04003987 if (!sde_enc) {
3988 SDE_ERROR("invalid encoder\n");
3989 return -EINVAL;
3990 }
3991
3992 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04003993
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003994 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003995 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04003996 sde_enc->num_phys_encs);
3997 return -EINVAL;
3998 }
3999
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004000 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004001
4002 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004003 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004004 PTR_ERR(enc));
4005 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4006 }
4007
4008 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4009 ++sde_enc->num_phys_encs;
4010
4011 return 0;
4012}
4013
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004014static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004015 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04004016 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004017 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004018{
4019 int ret = 0;
4020 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004021 enum sde_intf_type intf_type;
4022 struct sde_encoder_virt_ops parent_ops = {
4023 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07004024 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04004025 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004026 };
4027 struct sde_enc_phys_init_params phys_params;
4028
Clarence Ip19af1362016-09-23 14:57:51 -04004029 if (!sde_enc || !sde_kms) {
4030 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
4031 sde_enc != 0, sde_kms != 0);
4032 return -EINVAL;
4033 }
4034
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004035 memset(&phys_params, 0, sizeof(phys_params));
4036 phys_params.sde_kms = sde_kms;
4037 phys_params.parent = &sde_enc->base;
4038 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004039 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004040
Clarence Ip19af1362016-09-23 14:57:51 -04004041 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004042
Clarence Ipa4039322016-07-15 16:23:59 -04004043 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004044 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
4045 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04004046 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004047 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4048 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07004049 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
4050 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4051 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04004052 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
4053 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
4054 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004055 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04004056 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004057 return -EINVAL;
4058 }
4059
Clarence Ip88270a62016-06-26 10:09:34 -04004060 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004061
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004062 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
4063
Clarence Ip19af1362016-09-23 14:57:51 -04004064 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004065
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004066 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
4067 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004068 sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
4069
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004070 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04004071 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004072 /*
4073 * Left-most tile is at index 0, content is controller id
4074 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
4075 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
4076 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004077 u32 controller_id = disp_info->h_tile_instance[i];
4078
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004079 if (disp_info->num_of_h_tiles > 1) {
4080 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004081 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004082 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004083 phys_params.split_role = ENC_ROLE_SLAVE;
4084 } else {
4085 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004086 }
4087
Clarence Ip19af1362016-09-23 14:57:51 -04004088 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004089 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004090
Alan Kwongbb27c092016-07-20 16:41:25 -04004091 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004092 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004093 phys_params.wb_idx = sde_encoder_get_wb(
4094 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04004095 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004096 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004097 SDE_ERROR_ENC(sde_enc,
4098 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004099 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004100 ret = -EINVAL;
4101 }
Alan Kwongbb27c092016-07-20 16:41:25 -04004102 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004103 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004104 phys_params.intf_idx = sde_encoder_get_intf(
4105 sde_kms->catalog, intf_type,
4106 controller_id);
4107 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004108 SDE_ERROR_ENC(sde_enc,
4109 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004110 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004111 ret = -EINVAL;
4112 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004113 }
4114
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004115 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04004116 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004117 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
4118 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004119 else
4120 ret = sde_encoder_virt_add_phys_encs(
4121 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004122 sde_enc,
4123 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004124 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04004125 SDE_ERROR_ENC(sde_enc,
4126 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004127 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004128 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08004129
4130 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4131 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4132
4133 if (phys) {
4134 atomic_set(&phys->vsync_cnt, 0);
4135 atomic_set(&phys->underrun_cnt, 0);
4136 }
4137 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004138 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004139
4140 return ret;
4141}
4142
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004143static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
4144 .mode_set = sde_encoder_virt_mode_set,
4145 .disable = sde_encoder_virt_disable,
4146 .enable = sde_encoder_virt_enable,
4147 .atomic_check = sde_encoder_virt_atomic_check,
4148};
4149
4150static const struct drm_encoder_funcs sde_encoder_funcs = {
4151 .destroy = sde_encoder_destroy,
4152 .late_register = sde_encoder_late_register,
4153 .early_unregister = sde_encoder_early_unregister,
4154};
4155
Clarence Ip3649f8b2016-10-31 09:59:44 -04004156struct drm_encoder *sde_encoder_init(
4157 struct drm_device *dev,
4158 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004159{
4160 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04004161 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004162 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004163 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004164 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08004165 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004166 int ret = 0;
4167
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004168 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
4169 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004170 ret = -ENOMEM;
4171 goto fail;
4172 }
4173
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004174 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004175 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
4176 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004177 if (ret)
4178 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004179
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004180 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004181 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004182 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004183 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004184 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004185
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004186 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
4187 disp_info->is_primary)
4188 setup_timer(&sde_enc->vsync_event_timer,
4189 sde_encoder_vsync_event_handler,
4190 (unsigned long)sde_enc);
4191
Dhaval Patel020f7e122016-11-15 14:39:18 -08004192 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
4193 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004194 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08004195 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07004196 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08004197 PTR_ERR(sde_enc->rsc_client));
4198 sde_enc->rsc_client = NULL;
4199 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004200
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004201 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04004202 kthread_init_delayed_work(&sde_enc->delayed_off_work,
4203 sde_encoder_off_work);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07004204 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004205
4206 kthread_init_work(&sde_enc->vsync_event_work,
4207 sde_encoder_vsync_event_work_handler);
4208
Dhaval Patel020f7e122016-11-15 14:39:18 -08004209 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
4210
Clarence Ip19af1362016-09-23 14:57:51 -04004211 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004212
4213 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004214
4215fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004216 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004217 if (drm_enc)
4218 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004219
4220 return ERR_PTR(ret);
4221}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004222
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004223int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4224 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004225{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004226 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004227 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004228 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004229
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004230 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004231 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004232 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004233 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004234 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004235 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004236
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004237 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4238 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004239
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004240 switch (event) {
4241 case MSM_ENC_COMMIT_DONE:
4242 fn_wait = phys->ops.wait_for_commit_done;
4243 break;
4244 case MSM_ENC_TX_COMPLETE:
4245 fn_wait = phys->ops.wait_for_tx_complete;
4246 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004247 case MSM_ENC_VBLANK:
4248 fn_wait = phys->ops.wait_for_vblank;
4249 break;
Sandeep Panda11b20d82017-06-19 12:57:27 +05304250 case MSM_ENC_ACTIVE_REGION:
4251 fn_wait = phys->ops.wait_for_active;
4252 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004253 default:
4254 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4255 event);
4256 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004257 };
4258
4259 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004260 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004261 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004262 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004263 if (ret)
4264 return ret;
4265 }
4266 }
4267
4268 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004269}
4270
Alan Kwong67a3f792016-11-01 23:16:53 -04004271enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4272{
4273 struct sde_encoder_virt *sde_enc = NULL;
4274 int i;
4275
4276 if (!encoder) {
4277 SDE_ERROR("invalid encoder\n");
4278 return INTF_MODE_NONE;
4279 }
4280 sde_enc = to_sde_encoder_virt(encoder);
4281
4282 if (sde_enc->cur_master)
4283 return sde_enc->cur_master->intf_mode;
4284
4285 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4286 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4287
4288 if (phys)
4289 return phys->intf_mode;
4290 }
4291
4292 return INTF_MODE_NONE;
4293}
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004294
4295/**
4296 * sde_encoder_update_caps_for_cont_splash - update encoder settings during
4297 * device bootup when cont_splash is enabled
4298 * @drm_enc: Pointer to drm encoder structure
4299 * @Return: true if successful in updating the encoder structure
4300 */
4301int sde_encoder_update_caps_for_cont_splash(struct drm_encoder *encoder)
4302{
4303 struct sde_encoder_virt *sde_enc;
4304 struct msm_drm_private *priv;
4305 struct sde_kms *sde_kms;
4306 struct drm_connector *conn = NULL;
4307 struct sde_connector *sde_conn = NULL;
4308 struct sde_connector_state *sde_conn_state = NULL;
4309 struct drm_display_mode *drm_mode = NULL;
4310 struct sde_rm_hw_iter dsc_iter, pp_iter, ctl_iter;
4311 int ret = 0, i;
4312
4313 if (!encoder) {
4314 SDE_ERROR("invalid drm enc\n");
4315 return -EINVAL;
4316 }
4317
4318 if (!encoder->dev || !encoder->dev->dev_private) {
4319 SDE_ERROR("drm device invalid\n");
4320 return -EINVAL;
4321 }
4322
4323 priv = encoder->dev->dev_private;
4324 if (!priv->kms) {
4325 SDE_ERROR("invalid kms\n");
4326 return -EINVAL;
4327 }
4328
4329 sde_kms = to_sde_kms(priv->kms);
4330 sde_enc = to_sde_encoder_virt(encoder);
4331 if (!priv->num_connectors) {
4332 SDE_ERROR_ENC(sde_enc, "No connectors registered\n");
4333 return -EINVAL;
4334 }
4335 SDE_DEBUG_ENC(sde_enc,
4336 "num of connectors: %d\n", priv->num_connectors);
4337
4338 for (i = 0; i < priv->num_connectors; i++) {
4339 SDE_DEBUG_ENC(sde_enc, "connector id: %d\n",
4340 priv->connectors[i]->base.id);
4341 sde_conn = to_sde_connector(priv->connectors[i]);
4342 if (!sde_conn->encoder) {
4343 SDE_DEBUG_ENC(sde_enc,
4344 "encoder not attached to connector\n");
4345 continue;
4346 }
4347 if (sde_conn->encoder->base.id
4348 == encoder->base.id) {
4349 conn = (priv->connectors[i]);
4350 break;
4351 }
4352 }
4353
4354 if (!conn || !conn->state) {
4355 SDE_ERROR_ENC(sde_enc, "connector not found\n");
4356 return -EINVAL;
4357 }
4358
4359 sde_conn_state = to_sde_connector_state(conn->state);
4360
4361 if (!sde_conn->ops.get_mode_info) {
4362 SDE_ERROR_ENC(sde_enc, "conn: get_mode_info ops not found\n");
4363 return -EINVAL;
4364 }
4365
4366 ret = sde_conn->ops.get_mode_info(&encoder->crtc->state->adjusted_mode,
4367 &sde_conn_state->mode_info,
4368 sde_kms->catalog->max_mixer_width,
4369 sde_conn->display);
4370 if (ret) {
4371 SDE_ERROR_ENC(sde_enc,
4372 "conn: ->get_mode_info failed. ret=%d\n", ret);
4373 return ret;
4374 }
4375
4376 ret = sde_rm_reserve(&sde_kms->rm, encoder, encoder->crtc->state,
4377 conn->state, false);
4378 if (ret) {
4379 SDE_ERROR_ENC(sde_enc,
4380 "failed to reserve hw resources, %d\n", ret);
4381 return ret;
4382 }
4383
4384 if (conn->encoder) {
4385 conn->state->best_encoder = conn->encoder;
4386 SDE_DEBUG_ENC(sde_enc,
4387 "configured cstate->best_encoder to ID = %d\n",
4388 conn->state->best_encoder->base.id);
4389 } else {
4390 SDE_ERROR_ENC(sde_enc, "No encoder mapped to connector=%d\n",
4391 conn->base.id);
4392 }
4393
4394 SDE_DEBUG_ENC(sde_enc, "connector topology = %llu\n",
4395 sde_connector_get_topology_name(conn));
4396 drm_mode = &encoder->crtc->state->adjusted_mode;
4397 SDE_DEBUG_ENC(sde_enc, "hdisplay = %d, vdisplay = %d\n",
4398 drm_mode->hdisplay, drm_mode->vdisplay);
4399 drm_set_preferred_mode(conn, drm_mode->hdisplay, drm_mode->vdisplay);
4400
4401 if (encoder->bridge) {
4402 SDE_DEBUG_ENC(sde_enc, "Bridge mapped to encoder\n");
4403 /*
4404 * For cont-splash use case, we update the mode
4405 * configurations manually. This will skip the
4406 * usually mode set call when actual frame is
4407 * pushed from framework. The bridge needs to
4408 * be updated with the current drm mode by
4409 * calling the bridge mode set ops.
4410 */
4411 if (encoder->bridge->funcs) {
4412 SDE_DEBUG_ENC(sde_enc, "calling mode_set\n");
4413 encoder->bridge->funcs->mode_set(encoder->bridge,
4414 drm_mode, drm_mode);
4415 }
4416 } else {
4417 SDE_ERROR_ENC(sde_enc, "No bridge attached to encoder\n");
4418 }
4419
4420 sde_rm_init_hw_iter(&pp_iter, encoder->base.id, SDE_HW_BLK_PINGPONG);
4421 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4422 sde_enc->hw_pp[i] = NULL;
4423 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
4424 break;
4425 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
4426 }
4427
4428 sde_rm_init_hw_iter(&dsc_iter, encoder->base.id, SDE_HW_BLK_DSC);
4429 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4430 sde_enc->hw_dsc[i] = NULL;
4431 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
4432 break;
4433 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
4434 }
4435
4436 sde_rm_init_hw_iter(&ctl_iter, encoder->base.id, SDE_HW_BLK_CTL);
4437 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4438 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4439
4440 phys->hw_ctl = NULL;
4441 if (!sde_rm_get_hw(&sde_kms->rm, &ctl_iter))
4442 break;
4443 phys->hw_ctl = (struct sde_hw_ctl *) ctl_iter.hw;
4444 }
4445
4446 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4447 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4448
4449 if (!phys) {
4450 SDE_ERROR_ENC(sde_enc,
4451 "phys encoders not initialized\n");
4452 return -EINVAL;
4453 }
4454
4455 phys->hw_pp = sde_enc->hw_pp[i];
4456 if (phys->ops.cont_splash_mode_set)
4457 phys->ops.cont_splash_mode_set(phys, drm_mode);
4458
4459 if (phys->ops.is_master && phys->ops.is_master(phys)) {
4460 phys->connector = conn;
4461 sde_enc->cur_master = phys;
4462 }
4463 }
4464
4465 return ret;
4466}