blob: 9182194ee912071ccbf98ce7133efd925d6abc27 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
Kalyan Thota27ec06c2019-03-18 13:19:59 +05302 * Copyright (c) 2014-2019, The Linux Foundation. All rights reserved.
Dhaval Patel14d46ce2017-01-17 16:28:12 -08003 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Ping Li16162692018-05-08 14:13:46 -070083#define TOPOLOGY_DUALPIPE_MERGE_MODE(x) \
84 (((x) == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE) || \
85 ((x) == SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE) || \
86 ((x) == SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC))
87
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070088/**
89 * enum sde_enc_rc_events - events for resource control state machine
90 * @SDE_ENC_RC_EVENT_KICKOFF:
91 * This event happens at NORMAL priority.
92 * Event that signals the start of the transfer. When this event is
93 * received, enable MDP/DSI core clocks and request RSC with CMD state.
94 * Regardless of the previous state, the resource should be in ON state
95 * at the end of this event.
96 * @SDE_ENC_RC_EVENT_FRAME_DONE:
97 * This event happens at INTERRUPT level.
98 * Event signals the end of the data transfer after the PP FRAME_DONE
99 * event. At the end of this event, a delayed work is scheduled to go to
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700100 * IDLE_PC state after IDLE_POWERCOLLAPSE_DURATION time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400101 * @SDE_ENC_RC_EVENT_PRE_STOP:
102 * This event happens at NORMAL priority.
103 * This event, when received during the ON state, set RSC to IDLE, and
104 * and leave the RC STATE in the PRE_OFF state.
105 * It should be followed by the STOP event as part of encoder disable.
106 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700107 * @SDE_ENC_RC_EVENT_STOP:
108 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400109 * When this event is received, disable all the MDP/DSI core clocks, and
110 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
111 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
112 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
113 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700114 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700115 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700116 * Event signals that there is a seamless mode switch is in prgoress. A
117 * client needs to turn of only irq - leave clocks ON to reduce the mode
118 * switch latency.
119 * @SDE_ENC_RC_EVENT_POST_MODESET:
120 * This event happens at NORMAL priority from a work item.
121 * Event signals that seamless mode switch is complete and resources are
122 * acquired. Clients wants to turn on the irq again and update the rsc
123 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700124 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
125 * This event happens at NORMAL priority from a work item.
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700126 * Event signals that there were no frame updates for
127 * IDLE_POWERCOLLAPSE_DURATION time. This would disable MDP/DSI core clocks
128 * and request RSC with IDLE state and change the resource state to IDLE.
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800129 * @SDE_ENC_RC_EVENT_EARLY_WAKEUP:
130 * This event is triggered from the input event thread when touch event is
131 * received from the input device. On receiving this event,
132 * - If the device is in SDE_ENC_RC_STATE_IDLE state, it turns ON the
133 clocks and enable RSC.
134 * - If the device is in SDE_ENC_RC_STATE_ON state, it resets the delayed
135 * off work since a new commit is imminent.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700136 */
137enum sde_enc_rc_events {
138 SDE_ENC_RC_EVENT_KICKOFF = 1,
139 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400140 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700142 SDE_ENC_RC_EVENT_PRE_MODESET,
143 SDE_ENC_RC_EVENT_POST_MODESET,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800144 SDE_ENC_RC_EVENT_ENTER_IDLE,
145 SDE_ENC_RC_EVENT_EARLY_WAKEUP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700146};
147
148/*
149 * enum sde_enc_rc_states - states that the resource control maintains
150 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400151 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700152 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700153 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700154 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
155 */
156enum sde_enc_rc_states {
157 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400158 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700159 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700160 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700161 SDE_ENC_RC_STATE_IDLE
162};
163
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400164/**
165 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
166 * encoders. Virtual encoder manages one "logical" display. Physical
167 * encoders manage one intf block, tied to a specific panel/sub-panel.
168 * Virtual encoder defers as much as possible to the physical encoders.
169 * Virtual encoder registers itself with the DRM Framework as the encoder.
170 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400171 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400172 * @bus_scaling_client: Client handle to the bus scaling interface
173 * @num_phys_encs: Actual number of physical encoders contained.
174 * @phys_encs: Container of physical encoders managed.
175 * @cur_master: Pointer to the current master in this mode. Optimization
176 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700177 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500178 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800179 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500180 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
181 * for partial update right-only cases, such as pingpong
182 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400183 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
184 * notification of the VBLANK
185 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400186 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
187 * all CTL paths
188 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700189 * @debugfs_root: Debug file system root file node
190 * @enc_lock: Lock around physical encoder create/destroy and
191 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400192 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
193 * busy processing current command.
194 * Bit0 = phys_encs[0] etc.
195 * @crtc_frame_event_cb: callback handler for frame event
196 * @crtc_frame_event_cb_data: callback handler private data
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400197 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700198 * @rsc_client: rsc client pointer
199 * @rsc_state_init: boolean to indicate rsc config init
200 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700201 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700202 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -0700203 * @idle_pc_enabled: indicate if idle power collapse is enabled
204 * currently. This can be controlled by user-mode
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700205 * @rc_lock: resource control mutex lock to protect
206 * virt encoder over various state changes
207 * @rc_state: resource controller state
208 * @delayed_off_work: delayed worker to schedule disabling of
209 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400210 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800211 * @input_event_work: worker to handle input device touch events
Dhaval Patel222023e2018-02-27 12:24:07 -0800212 * @esd_trigger_work: worker to handle esd trigger events
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800213 * @input_handler: handler for input device events
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700214 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700215 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700216 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400217 * @cur_conn_roi: current connector roi
218 * @prv_conn_roi: previous connector roi to optimize if unchanged
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800219 * @crtc pointer to drm_crtc
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400220 */
221struct sde_encoder_virt {
222 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400223 spinlock_t enc_spinlock;
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +0530224 struct mutex vblank_ctl_lock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400225 uint32_t bus_scaling_client;
226
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400227 uint32_t display_num_of_h_tiles;
228
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400229 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400230 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
231 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700232 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800233 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400234
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500235 bool intfs_swapped;
236
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400237 void (*crtc_vblank_cb)(void *);
238 void *crtc_vblank_cb_data;
239
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700240 struct dentry *debugfs_root;
241 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400242 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
243 void (*crtc_frame_event_cb)(void *, u32 event);
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -0700244 struct sde_crtc_frame_event_cb_data crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400245
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400246 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800247
248 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700249 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800250 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700251 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700252 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700253
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -0700254 bool idle_pc_enabled;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700255 struct mutex rc_lock;
256 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400257 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400258 struct kthread_work vsync_event_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800259 struct kthread_work input_event_work;
Dhaval Patel222023e2018-02-27 12:24:07 -0800260 struct kthread_work esd_trigger_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800261 struct input_handler *input_handler;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +0530262 bool input_handler_registered;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700263 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700264 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700265
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700266 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400267 struct sde_rect cur_conn_roi;
268 struct sde_rect prv_conn_roi;
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800269 struct drm_crtc *crtc;
Dhaval Patel30874eb2018-05-31 13:33:31 -0700270
271 bool elevated_ahb_vote;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400272};
273
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400274#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700275
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -0500276static void _sde_encoder_pm_qos_add_request(struct drm_encoder *drm_enc)
277{
278 struct msm_drm_private *priv;
279 struct sde_kms *sde_kms;
280 struct pm_qos_request *req;
281 u32 cpu_mask;
282 u32 cpu_dma_latency;
283 int cpu;
284
285 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
286 SDE_ERROR("drm device invalid\n");
287 return;
288 }
289
290 priv = drm_enc->dev->dev_private;
291 if (!priv->kms) {
292 SDE_ERROR("invalid kms\n");
293 return;
294 }
295
296 sde_kms = to_sde_kms(priv->kms);
297 if (!sde_kms || !sde_kms->catalog)
298 return;
299
300 cpu_mask = sde_kms->catalog->perf.cpu_mask;
301 cpu_dma_latency = sde_kms->catalog->perf.cpu_dma_latency;
302 if (!cpu_mask)
303 return;
304
305 req = &sde_kms->pm_qos_cpu_req;
306 req->type = PM_QOS_REQ_AFFINE_CORES;
307 cpumask_empty(&req->cpus_affine);
308 for_each_possible_cpu(cpu) {
309 if ((1 << cpu) & cpu_mask)
310 cpumask_set_cpu(cpu, &req->cpus_affine);
311 }
312 pm_qos_add_request(req, PM_QOS_CPU_DMA_LATENCY, cpu_dma_latency);
313
314 SDE_EVT32_VERBOSE(DRMID(drm_enc), cpu_mask, cpu_dma_latency);
315}
316
317static void _sde_encoder_pm_qos_remove_request(struct drm_encoder *drm_enc)
318{
319 struct msm_drm_private *priv;
320 struct sde_kms *sde_kms;
321
322 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
323 SDE_ERROR("drm device invalid\n");
324 return;
325 }
326
327 priv = drm_enc->dev->dev_private;
328 if (!priv->kms) {
329 SDE_ERROR("invalid kms\n");
330 return;
331 }
332
333 sde_kms = to_sde_kms(priv->kms);
334 if (!sde_kms || !sde_kms->catalog || !sde_kms->catalog->perf.cpu_mask)
335 return;
336
337 pm_qos_remove_request(&sde_kms->pm_qos_cpu_req);
338}
339
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700340static struct drm_connector_state *_sde_encoder_get_conn_state(
341 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800342{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700343 struct msm_drm_private *priv;
344 struct sde_kms *sde_kms;
345 struct list_head *connector_list;
346 struct drm_connector *conn_iter;
347
348 if (!drm_enc) {
349 SDE_ERROR("invalid argument\n");
350 return NULL;
351 }
352
353 priv = drm_enc->dev->dev_private;
354 sde_kms = to_sde_kms(priv->kms);
355 connector_list = &sde_kms->dev->mode_config.connector_list;
356
357 list_for_each_entry(conn_iter, connector_list, head)
358 if (conn_iter->encoder == drm_enc)
359 return conn_iter->state;
360
361 return NULL;
362}
363
364static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
365 struct msm_mode_info *mode_info)
366{
367 struct drm_connector_state *conn_state;
368
369 if (!drm_enc || !mode_info) {
370 SDE_ERROR("invalid arguments\n");
371 return -EINVAL;
372 }
373
374 conn_state = _sde_encoder_get_conn_state(drm_enc);
375 if (!conn_state) {
376 SDE_ERROR("invalid connector state for the encoder: %d\n",
377 drm_enc->base.id);
378 return -EINVAL;
379 }
380
381 return sde_connector_get_mode_info(conn_state, mode_info);
382}
383
384static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
385{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400386 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700387 struct msm_mode_info mode_info;
388 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400389
390 if (!drm_enc)
391 return false;
392
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700393 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
394 if (rc) {
395 SDE_ERROR("failed to get mode info, enc: %d\n",
396 drm_enc->base.id);
397 return false;
398 }
399
400 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800401
402 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
403}
404
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400405bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
406{
407 enum sde_rm_topology_name topology;
408 struct sde_encoder_virt *sde_enc;
409 struct drm_connector *drm_conn;
410
411 if (!drm_enc)
412 return false;
413
414 sde_enc = to_sde_encoder_virt(drm_enc);
415 if (!sde_enc->cur_master)
416 return false;
417
418 drm_conn = sde_enc->cur_master->connector;
419 if (!drm_conn)
420 return false;
421
422 topology = sde_connector_get_topology_name(drm_conn);
423 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
424 return true;
425
426 return false;
427}
428
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -0700429int sde_encoder_in_clone_mode(struct drm_encoder *drm_enc)
430{
431 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
432
433 return sde_enc && sde_enc->cur_master &&
434 sde_enc->cur_master->in_clone_mode;
435}
436
Dhaval Patelf9245d62017-03-28 16:24:00 -0700437static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
438 bool enable)
439{
440 struct drm_encoder *drm_enc;
441 struct msm_drm_private *priv;
442 struct sde_kms *sde_kms;
443
444 if (!sde_enc) {
445 SDE_ERROR("invalid sde enc\n");
446 return -EINVAL;
447 }
448
449 drm_enc = &sde_enc->base;
450 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
451 SDE_ERROR("drm device invalid\n");
452 return -EINVAL;
453 }
454
455 priv = drm_enc->dev->dev_private;
456 if (!priv->kms) {
457 SDE_ERROR("invalid kms\n");
458 return -EINVAL;
459 }
460
461 sde_kms = to_sde_kms(priv->kms);
462
463 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
464 enable);
465}
466
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500467void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
468 enum sde_intr_idx intr_idx)
469{
470 SDE_EVT32(DRMID(phys_enc->parent),
471 phys_enc->intf_idx - INTF_0,
472 phys_enc->hw_pp->idx - PINGPONG_0,
473 intr_idx);
474 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
475
476 if (phys_enc->parent_ops.handle_frame_done)
477 phys_enc->parent_ops.handle_frame_done(
478 phys_enc->parent, phys_enc,
479 SDE_ENCODER_FRAME_EVENT_ERROR);
480}
481
482int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
483 enum sde_intr_idx intr_idx,
484 struct sde_encoder_wait_info *wait_info)
485{
486 struct sde_encoder_irq *irq;
487 u32 irq_status;
488 int ret;
489
490 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
491 SDE_ERROR("invalid params\n");
492 return -EINVAL;
493 }
494 irq = &phys_enc->irq[intr_idx];
495
496 /* note: do master / slave checking outside */
497
498 /* return EWOULDBLOCK since we know the wait isn't necessary */
499 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
500 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400501 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
502 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500503 return -EWOULDBLOCK;
504 }
505
506 if (irq->irq_idx < 0) {
507 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
508 irq->name, irq->hw_idx);
509 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
510 irq->irq_idx);
511 return 0;
512 }
513
514 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
515 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700516 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
517 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
518 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500519
520 ret = sde_encoder_helper_wait_event_timeout(
521 DRMID(phys_enc->parent),
522 irq->hw_idx,
523 wait_info);
524
525 if (ret <= 0) {
526 irq_status = sde_core_irq_read(phys_enc->sde_kms,
527 irq->irq_idx, true);
528 if (irq_status) {
529 unsigned long flags;
530
Dhaval Patela5f75952017-07-25 11:17:41 -0700531 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
532 irq->hw_idx, irq->irq_idx,
533 phys_enc->hw_pp->idx - PINGPONG_0,
534 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500535 SDE_DEBUG_PHYS(phys_enc,
536 "done but irq %d not triggered\n",
537 irq->irq_idx);
538 local_irq_save(flags);
539 irq->cb.func(phys_enc, irq->irq_idx);
540 local_irq_restore(flags);
541 ret = 0;
542 } else {
543 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700544 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
545 irq->hw_idx, irq->irq_idx,
546 phys_enc->hw_pp->idx - PINGPONG_0,
547 atomic_read(wait_info->atomic_cnt), irq_status,
548 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500549 }
550 } else {
551 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700552 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
553 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
554 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500555 }
556
Dhaval Patela5f75952017-07-25 11:17:41 -0700557 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
558 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
559 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500560
561 return ret;
562}
563
564int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
565 enum sde_intr_idx intr_idx)
566{
567 struct sde_encoder_irq *irq;
568 int ret = 0;
569
570 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
571 SDE_ERROR("invalid params\n");
572 return -EINVAL;
573 }
574 irq = &phys_enc->irq[intr_idx];
575
576 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530577 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500578 "skipping already registered irq %s type %d\n",
579 irq->name, irq->intr_type);
580 return 0;
581 }
582
583 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
584 irq->intr_type, irq->hw_idx);
585 if (irq->irq_idx < 0) {
586 SDE_ERROR_PHYS(phys_enc,
587 "failed to lookup IRQ index for %s type:%d\n",
588 irq->name, irq->intr_type);
589 return -EINVAL;
590 }
591
592 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
593 &irq->cb);
594 if (ret) {
595 SDE_ERROR_PHYS(phys_enc,
596 "failed to register IRQ callback for %s\n",
597 irq->name);
598 irq->irq_idx = -EINVAL;
599 return ret;
600 }
601
602 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
603 if (ret) {
604 SDE_ERROR_PHYS(phys_enc,
605 "enable IRQ for intr:%s failed, irq_idx %d\n",
606 irq->name, irq->irq_idx);
607
608 sde_core_irq_unregister_callback(phys_enc->sde_kms,
609 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400610
611 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
612 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500613 irq->irq_idx = -EINVAL;
614 return ret;
615 }
616
617 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
618 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
619 irq->name, irq->irq_idx);
620
621 return ret;
622}
623
624int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
625 enum sde_intr_idx intr_idx)
626{
627 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400628 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500629
630 if (!phys_enc) {
631 SDE_ERROR("invalid encoder\n");
632 return -EINVAL;
633 }
634 irq = &phys_enc->irq[intr_idx];
635
636 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400637 if (irq->irq_idx < 0) {
638 SDE_ERROR(
639 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
640 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
641 irq->irq_idx);
642 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
643 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500644 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400645 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500646
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400647 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
648 if (ret)
649 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
650 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
651
652 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500653 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400654 if (ret)
655 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
656 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500657
658 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
659 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
660
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400661 irq->irq_idx = -EINVAL;
662
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500663 return 0;
664}
665
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400666void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400667 struct sde_encoder_hw_resources *hw_res,
668 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400669{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400670 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700671 struct msm_mode_info mode_info;
672 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400673
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400674 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400675 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
676 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400677 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400678 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400679
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400680 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400681 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400682
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400683 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400684 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400685 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
686
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400687 for (i = 0; i < sde_enc->num_phys_encs; i++) {
688 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
689
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400690 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400691 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400692 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700693
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700694 /**
695 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
696 * called from atomic_check phase. Use the below API to get mode
697 * information of the temporary conn_state passed.
698 */
699 rc = sde_connector_get_mode_info(conn_state, &mode_info);
700 if (rc) {
701 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
702 return;
703 }
704
705 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700706 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400707}
708
Clarence Ip3649f8b2016-10-31 09:59:44 -0400709void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400710{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400711 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400712 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400713
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400714 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400715 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400716 return;
717 }
718
719 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400720 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400721
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700722 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800723 sde_rsc_client_destroy(sde_enc->rsc_client);
724
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700725 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400726 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
727
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400728 if (phys && phys->ops.destroy) {
729 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400730 --sde_enc->num_phys_encs;
731 sde_enc->phys_encs[i] = NULL;
732 }
733 }
734
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700735 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400736 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400737 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700738 sde_enc->num_phys_encs = 0;
739 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400740
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400741 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700742 mutex_destroy(&sde_enc->enc_lock);
743
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800744 if (sde_enc->input_handler) {
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800745 kfree(sde_enc->input_handler);
Shubhashree Dhar25b05422018-05-30 15:42:04 +0530746 sde_enc->input_handler = NULL;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +0530747 sde_enc->input_handler_registered = false;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800748 }
749
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400750 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700751}
752
Clarence Ip8e69ad02016-12-09 09:43:57 -0500753void sde_encoder_helper_split_config(
754 struct sde_encoder_phys *phys_enc,
755 enum sde_intf interface)
756{
757 struct sde_encoder_virt *sde_enc;
758 struct split_pipe_cfg cfg = { 0 };
759 struct sde_hw_mdp *hw_mdptop;
760 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700761 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500762
763 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
764 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
765 return;
766 }
767
768 sde_enc = to_sde_encoder_virt(phys_enc->parent);
769 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700770 disp_info = &sde_enc->disp_info;
771
772 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
773 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500774
775 /**
776 * disable split modes since encoder will be operating in as the only
777 * encoder, either for the entire use case in the case of, for example,
778 * single DSI, or for this frame in the case of left/right only partial
779 * update.
780 */
781 if (phys_enc->split_role == ENC_ROLE_SOLO) {
782 if (hw_mdptop->ops.setup_split_pipe)
783 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
784 if (hw_mdptop->ops.setup_pp_split)
785 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
786 return;
787 }
788
789 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500790 cfg.mode = phys_enc->intf_mode;
791 cfg.intf = interface;
792
793 if (cfg.en && phys_enc->ops.needs_single_flush &&
794 phys_enc->ops.needs_single_flush(phys_enc))
795 cfg.split_flush_en = true;
796
797 topology = sde_connector_get_topology_name(phys_enc->connector);
798 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
799 cfg.pp_split_slave = cfg.intf;
800 else
801 cfg.pp_split_slave = INTF_MAX;
802
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500803 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500804 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
805
806 if (hw_mdptop->ops.setup_split_pipe)
807 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400808 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500809 /*
810 * slave encoder
811 * - determine split index from master index,
812 * assume master is first pp
813 */
814 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
815 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
816 cfg.pp_split_index);
817
818 if (hw_mdptop->ops.setup_pp_split)
819 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
820 }
821}
822
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400823static int sde_encoder_virt_atomic_check(
824 struct drm_encoder *drm_enc,
825 struct drm_crtc_state *crtc_state,
826 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400827{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400828 struct sde_encoder_virt *sde_enc;
829 struct msm_drm_private *priv;
830 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400831 const struct drm_display_mode *mode;
832 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700833 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700834 struct sde_connector_state *sde_conn_state = NULL;
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400835 struct sde_crtc_state *sde_crtc_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400836 int i = 0;
837 int ret = 0;
838
Alan Kwongbb27c092016-07-20 16:41:25 -0400839 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400840 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
841 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400842 return -EINVAL;
843 }
844
845 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400846 SDE_DEBUG_ENC(sde_enc, "\n");
847
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400848 priv = drm_enc->dev->dev_private;
849 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400850 mode = &crtc_state->mode;
851 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700852 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700853 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400854 sde_crtc_state = to_sde_crtc_state(crtc_state);
855
856 SDE_EVT32(DRMID(drm_enc), drm_atomic_crtc_needs_modeset(crtc_state));
Alan Kwongbb27c092016-07-20 16:41:25 -0400857
858 /* perform atomic check on the first physical encoder (master) */
859 for (i = 0; i < sde_enc->num_phys_encs; i++) {
860 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
861
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400862 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400863 ret = phys->ops.atomic_check(phys, crtc_state,
864 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400865 else if (phys && phys->ops.mode_fixup)
866 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400867 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400868
869 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400870 SDE_ERROR_ENC(sde_enc,
871 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400872 break;
873 }
874 }
875
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400876 if (!ret && drm_atomic_crtc_needs_modeset(crtc_state)) {
877 struct sde_rect mode_roi, roi;
878
879 mode_roi.x = 0;
880 mode_roi.y = 0;
881 mode_roi.w = crtc_state->adjusted_mode.hdisplay;
882 mode_roi.h = crtc_state->adjusted_mode.vdisplay;
883
884 if (sde_conn_state->rois.num_rects) {
885 sde_kms_rect_merge_rectangles(
886 &sde_conn_state->rois, &roi);
887 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
888 SDE_ERROR_ENC(sde_enc,
889 "roi (%d,%d,%d,%d) on connector invalid during modeset\n",
890 roi.x, roi.y, roi.w, roi.h);
891 ret = -EINVAL;
892 }
893 }
894
895 if (sde_crtc_state->user_roi_list.num_rects) {
896 sde_kms_rect_merge_rectangles(
897 &sde_crtc_state->user_roi_list, &roi);
898 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
899 SDE_ERROR_ENC(sde_enc,
900 "roi (%d,%d,%d,%d) on crtc invalid during modeset\n",
901 roi.x, roi.y, roi.w, roi.h);
902 ret = -EINVAL;
903 }
904 }
905
906 if (ret)
907 return ret;
908 }
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700909
Lloyd Atkinson4ced69e2017-11-03 12:16:09 -0400910 if (!ret) {
911 /**
912 * record topology in previous atomic state to be able to handle
913 * topology transitions correctly.
914 */
915 enum sde_rm_topology_name old_top;
916
917 old_top = sde_connector_get_property(conn_state,
918 CONNECTOR_PROP_TOPOLOGY_NAME);
919 ret = sde_connector_set_old_topology_name(conn_state, old_top);
920 if (ret)
921 return ret;
922 }
923
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700924 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
925 struct msm_display_topology *topology = NULL;
926
927 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700928 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400929 sde_kms->catalog->max_mixer_width,
930 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700931 if (ret) {
932 SDE_ERROR_ENC(sde_enc,
933 "failed to get mode info, rc = %d\n", ret);
934 return ret;
935 }
936
937 /* Reserve dynamic resources, indicating atomic_check phase */
938 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
939 conn_state, true);
940 if (ret) {
941 SDE_ERROR_ENC(sde_enc,
942 "RM failed to reserve resources, rc = %d\n",
943 ret);
944 return ret;
945 }
946
947 /**
948 * Update connector state with the topology selected for the
949 * resource set validated. Reset the topology if we are
950 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700951 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700952 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700953 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700954
955 ret = sde_rm_update_topology(conn_state, topology);
956 if (ret) {
957 SDE_ERROR_ENC(sde_enc,
958 "RM failed to update topology, rc: %d\n", ret);
959 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700960 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700961
Jeykumar Sankaran83ddcb02017-10-27 11:34:50 -0700962 ret = sde_connector_set_blob_data(conn_state->connector,
963 conn_state,
964 CONNECTOR_PROP_SDE_INFO);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700965 if (ret) {
966 SDE_ERROR_ENC(sde_enc,
967 "connector failed to update info, rc: %d\n",
968 ret);
969 return ret;
970 }
971
972 }
973
974 ret = sde_connector_roi_v1_check_roi(conn_state);
975 if (ret) {
976 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
977 ret);
978 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700979 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400980
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700981 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700982 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400983
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400984 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400985
986 return ret;
987}
988
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800989static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
990 int pic_width, int pic_height)
991{
992 if (!dsc || !pic_width || !pic_height) {
993 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
994 pic_width, pic_height);
995 return -EINVAL;
996 }
997
998 if ((pic_width % dsc->slice_width) ||
999 (pic_height % dsc->slice_height)) {
1000 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
1001 pic_width, pic_height,
1002 dsc->slice_width, dsc->slice_height);
1003 return -EINVAL;
1004 }
1005
1006 dsc->pic_width = pic_width;
1007 dsc->pic_height = pic_height;
1008
1009 return 0;
1010}
1011
1012static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
1013 int intf_width)
1014{
1015 int slice_per_pkt, slice_per_intf;
1016 int bytes_in_slice, total_bytes_per_intf;
1017
1018 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
1019 (intf_width < dsc->slice_width)) {
1020 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
1021 intf_width, dsc ? dsc->slice_width : -1);
1022 return;
1023 }
1024
1025 slice_per_pkt = dsc->slice_per_pkt;
1026 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
1027
1028 /*
1029 * If slice_per_pkt is greater than slice_per_intf then default to 1.
1030 * This can happen during partial update.
1031 */
1032 if (slice_per_pkt > slice_per_intf)
1033 slice_per_pkt = 1;
1034
1035 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
1036 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
1037
1038 dsc->eol_byte_num = total_bytes_per_intf % 3;
1039 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
1040 dsc->bytes_in_slice = bytes_in_slice;
1041 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
1042 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
1043}
1044
1045static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
1046 int enc_ip_width)
1047{
1048 int ssm_delay, total_pixels, soft_slice_per_enc;
1049
1050 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
1051
1052 /*
1053 * minimum number of initial line pixels is a sum of:
1054 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
1055 * 91 for 10 bpc) * 3
1056 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
1057 * 3. the initial xmit delay
1058 * 4. total pipeline delay through the "lock step" of encoder (47)
1059 * 5. 6 additional pixels as the output of the rate buffer is
1060 * 48 bits wide
1061 */
1062 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
1063 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
1064 if (soft_slice_per_enc > 1)
1065 total_pixels += (ssm_delay * 3);
1066 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
1067 return 0;
1068}
1069
1070static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
1071 struct msm_display_dsc_info *dsc)
1072{
1073 /*
1074 * As per the DSC spec, ICH_RESET can be either end of the slice line
1075 * or at the end of the slice. HW internally generates ich_reset at
1076 * end of the slice line if DSC_MERGE is used or encoder has two
1077 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
1078 * is not used then it will generate ich_reset at the end of slice.
1079 *
1080 * Now as per the spec, during one PPS session, position where
1081 * ich_reset is generated should not change. Now if full-screen frame
1082 * has more than 1 soft slice then HW will automatically generate
1083 * ich_reset at the end of slice_line. But for the same panel, if
1084 * partial frame is enabled and only 1 encoder is used with 1 slice,
1085 * then HW will generate ich_reset at end of the slice. This is a
1086 * mismatch. Prevent this by overriding HW's decision.
1087 */
1088 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
1089 (dsc->slice_width == dsc->pic_width);
1090}
1091
1092static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
1093 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001094 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001095{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001096 if (!enable) {
1097 if (hw_pp->ops.disable_dsc)
1098 hw_pp->ops.disable_dsc(hw_pp);
1099 return;
1100 }
1101
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001102 if (hw_dsc->ops.dsc_config)
1103 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
1104
1105 if (hw_dsc->ops.dsc_config_thresh)
1106 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
1107
1108 if (hw_pp->ops.setup_dsc)
1109 hw_pp->ops.setup_dsc(hw_pp);
1110
1111 if (hw_pp->ops.enable_dsc)
1112 hw_pp->ops.enable_dsc(hw_pp);
1113}
1114
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001115static void _sde_encoder_get_connector_roi(
1116 struct sde_encoder_virt *sde_enc,
1117 struct sde_rect *merged_conn_roi)
1118{
1119 struct drm_connector *drm_conn;
1120 struct sde_connector_state *c_state;
1121
1122 if (!sde_enc || !merged_conn_roi)
1123 return;
1124
1125 drm_conn = sde_enc->phys_encs[0]->connector;
1126
1127 if (!drm_conn || !drm_conn->state)
1128 return;
1129
1130 c_state = to_sde_connector_state(drm_conn->state);
1131 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1132}
1133
Ingrid Gallardo83532222017-06-02 16:48:51 -07001134static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001135{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001136 int this_frame_slices;
1137 int intf_ip_w, enc_ip_w;
1138 int ich_res, dsc_common_mode = 0;
1139
1140 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1141 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1142 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001143 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001144 struct msm_mode_info mode_info;
1145 struct msm_display_dsc_info *dsc = NULL;
1146 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001147
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001148 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001149 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1150 return -EINVAL;
1151 }
1152
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001153 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1154 if (rc) {
1155 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1156 return -EINVAL;
1157 }
1158
1159 dsc = &mode_info.comp_info.dsc_info;
1160
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001161 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001162
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001163 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001164 intf_ip_w = this_frame_slices * dsc->slice_width;
1165 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1166
1167 enc_ip_w = intf_ip_w;
1168 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1169
1170 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1171
1172 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1173 dsc_common_mode = DSC_MODE_VIDEO;
1174
1175 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001176 roi->w, roi->h, dsc_common_mode);
1177 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001178
1179 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001180 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001181
1182 return 0;
1183}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001184
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001185static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1186 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001187{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001188 int this_frame_slices;
1189 int intf_ip_w, enc_ip_w;
1190 int ich_res, dsc_common_mode;
1191
1192 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001193 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1194 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1195 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1196 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001197 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001198 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001199 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001200
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001201 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1202 hw_pp[i] = sde_enc->hw_pp[i];
1203 hw_dsc[i] = sde_enc->hw_dsc[i];
1204
1205 if (!hw_pp[i] || !hw_dsc[i]) {
1206 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1207 return -EINVAL;
1208 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001209 }
1210
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001211 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1212 if (rc) {
1213 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1214 return -EINVAL;
1215 }
1216
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001217 half_panel_partial_update =
1218 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001219
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001220 dsc_common_mode = 0;
1221 if (!half_panel_partial_update)
1222 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001223 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1224 dsc_common_mode |= DSC_MODE_VIDEO;
1225
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001226 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1227 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001228
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001229 /*
1230 * Since both DSC use same pic dimension, set same pic dimension
1231 * to both DSC structures.
1232 */
1233 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1234 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1235
1236 this_frame_slices = roi->w / dsc[0].slice_width;
1237 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1238
1239 if (!half_panel_partial_update)
1240 intf_ip_w /= 2;
1241
1242 /*
1243 * In this topology when both interfaces are active, they have same
1244 * load so intf_ip_w will be same.
1245 */
1246 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1247 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1248
1249 /*
1250 * In this topology, since there is no dsc_merge, uncompressed input
1251 * to encoder and interface is same.
1252 */
1253 enc_ip_w = intf_ip_w;
1254 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1255 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1256
1257 /*
1258 * __is_ich_reset_override_needed should be called only after
1259 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1260 */
1261 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1262 half_panel_partial_update, &dsc[0]);
1263
1264 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1265 roi->w, roi->h, dsc_common_mode);
1266
1267 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1268 bool active = !!((1 << i) & params->affected_displays);
1269
1270 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1271 dsc_common_mode, i, active);
1272 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1273 dsc_common_mode, ich_res, active);
1274 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001275
1276 return 0;
1277}
1278
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001279static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1280 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001281{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001282 int this_frame_slices;
1283 int intf_ip_w, enc_ip_w;
1284 int ich_res, dsc_common_mode;
1285
1286 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001287 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001288 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1289 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001290 struct msm_display_dsc_info *dsc = NULL;
1291 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001292 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001293 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001294
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001295 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1296 hw_pp[i] = sde_enc->hw_pp[i];
1297 hw_dsc[i] = sde_enc->hw_dsc[i];
1298
1299 if (!hw_pp[i] || !hw_dsc[i]) {
1300 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1301 return -EINVAL;
1302 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001303 }
1304
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001305 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1306 if (rc) {
1307 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1308 return -EINVAL;
1309 }
1310
1311 dsc = &mode_info.comp_info.dsc_info;
1312
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001313 half_panel_partial_update =
1314 hweight_long(params->affected_displays) == 1;
1315
1316 dsc_common_mode = 0;
1317 if (!half_panel_partial_update)
1318 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1319 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1320 dsc_common_mode |= DSC_MODE_VIDEO;
1321
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001322 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001323
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001324 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001325 intf_ip_w = this_frame_slices * dsc->slice_width;
1326 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1327
1328 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001329 * dsc merge case: when using 2 encoders for the same stream,
1330 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001331 */
1332 enc_ip_w = intf_ip_w / 2;
1333 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1334
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001335 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1336 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001337
1338 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001339 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001340 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1341 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001342
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001343 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001344 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001345 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1346 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001347
1348 return 0;
1349}
1350
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001351static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1352{
1353 struct sde_encoder_virt *sde_enc;
1354 struct drm_connector *drm_conn;
1355 struct drm_display_mode *adj_mode;
1356 struct sde_rect roi;
1357
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001358 if (!drm_enc) {
1359 SDE_ERROR("invalid encoder parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001360 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001361 }
1362
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001363 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001364 if (!sde_enc->crtc || !sde_enc->crtc->state) {
1365 SDE_ERROR("invalid crtc parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001366 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001367 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001368
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001369 if (!sde_enc->cur_master) {
1370 SDE_ERROR("invalid cur_master parameter\n");
1371 return -EINVAL;
1372 }
1373
1374 adj_mode = &sde_enc->cur_master->cached_mode;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001375 drm_conn = sde_enc->cur_master->connector;
1376
1377 _sde_encoder_get_connector_roi(sde_enc, &roi);
1378 if (sde_kms_rect_is_null(&roi)) {
1379 roi.w = adj_mode->hdisplay;
1380 roi.h = adj_mode->vdisplay;
1381 }
1382
1383 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1384 sizeof(sde_enc->prv_conn_roi));
1385 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1386
1387 return 0;
1388}
1389
1390static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1391 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001392{
1393 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001394 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001395 int ret = 0;
1396
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001397 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1398 !sde_enc->phys_encs[0]->connector)
1399 return -EINVAL;
1400
1401 drm_conn = sde_enc->phys_encs[0]->connector;
1402
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001403 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001404 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001405 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1406 return -EINVAL;
1407 }
1408
Ingrid Gallardo83532222017-06-02 16:48:51 -07001409 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04001410 SDE_EVT32(DRMID(&sde_enc->base), topology,
1411 sde_enc->cur_conn_roi.x,
1412 sde_enc->cur_conn_roi.y,
1413 sde_enc->cur_conn_roi.w,
1414 sde_enc->cur_conn_roi.h,
1415 sde_enc->prv_conn_roi.x,
1416 sde_enc->prv_conn_roi.y,
1417 sde_enc->prv_conn_roi.w,
1418 sde_enc->prv_conn_roi.h,
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001419 sde_enc->cur_master->cached_mode.hdisplay,
1420 sde_enc->cur_master->cached_mode.vdisplay);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001421
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001422 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1423 &sde_enc->prv_conn_roi))
1424 return ret;
1425
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001426 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001427 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001428 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1429 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001430 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001431 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001432 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001433 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001434 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Kalyan Thota27ec06c2019-03-18 13:19:59 +05301435 case SDE_RM_TOPOLOGY_QUADPIPE_3DMERGE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001436 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001437 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001438 default:
1439 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1440 topology);
1441 return -EINVAL;
1442 };
1443
1444 return ret;
1445}
1446
Dhaval Patelaab9b522017-07-20 12:38:46 -07001447static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1448 struct msm_display_info *disp_info, bool is_dummy)
1449{
1450 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1451 struct msm_drm_private *priv;
1452 struct sde_kms *sde_kms;
1453 struct sde_hw_mdp *hw_mdptop;
1454 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001455 struct msm_mode_info mode_info;
1456 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001457
Jayant Shekhar136e0592018-10-09 18:32:33 +05301458 if (!sde_enc || !sde_enc->cur_master || !disp_info) {
Dhaval Patelaab9b522017-07-20 12:38:46 -07001459 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1460 sde_enc != NULL, disp_info != NULL);
1461 return;
1462 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1463 SDE_ERROR("invalid num phys enc %d/%d\n",
1464 sde_enc->num_phys_encs,
1465 (int) ARRAY_SIZE(sde_enc->hw_pp));
1466 return;
1467 }
1468
1469 drm_enc = &sde_enc->base;
1470 /* this pointers are checked in virt_enable_helper */
1471 priv = drm_enc->dev->dev_private;
1472
1473 sde_kms = to_sde_kms(priv->kms);
1474 if (!sde_kms) {
1475 SDE_ERROR("invalid sde_kms\n");
1476 return;
1477 }
1478
1479 hw_mdptop = sde_kms->hw_mdp;
1480 if (!hw_mdptop) {
1481 SDE_ERROR("invalid mdptop\n");
1482 return;
1483 }
1484
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001485 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1486 if (rc) {
1487 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001488 return;
1489 }
1490
Dhaval Patelaab9b522017-07-20 12:38:46 -07001491 if (hw_mdptop->ops.setup_vsync_source &&
1492 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1493 for (i = 0; i < sde_enc->num_phys_encs; i++)
1494 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1495
1496 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001497 vsync_cfg.frame_rate = mode_info.frame_rate;
Kalyan Thotaa02db2c2018-04-27 11:39:18 +05301498 vsync_cfg.vsync_source =
1499 sde_enc->cur_master->hw_pp->caps->te_source;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001500 if (is_dummy)
1501 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1502 else if (disp_info->is_te_using_watchdog_timer)
1503 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
Kalyan Thotaa02db2c2018-04-27 11:39:18 +05301504
Dhaval Patelaab9b522017-07-20 12:38:46 -07001505 vsync_cfg.is_dummy = is_dummy;
1506
1507 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1508 }
1509}
1510
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001511static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1512{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001513 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001514 struct sde_hw_pingpong *hw_pp = NULL;
1515 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001516
1517 if (!sde_enc || !sde_enc->phys_encs[0] ||
1518 !sde_enc->phys_encs[0]->connector) {
1519 SDE_ERROR("invalid params %d %d\n",
1520 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1521 return -EINVAL;
1522 }
1523
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001524 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001525 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1526 hw_pp = sde_enc->hw_pp[i];
1527 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001528
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001529 if (hw_pp && hw_pp->ops.disable_dsc)
1530 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001531
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001532 if (hw_dsc && hw_dsc->ops.dsc_disable)
1533 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001534 }
1535
1536 return ret;
1537}
1538
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001539static int _sde_encoder_switch_to_watchdog_vsync(struct drm_encoder *drm_enc)
1540{
1541 struct sde_encoder_virt *sde_enc;
1542 struct msm_display_info disp_info;
1543
1544 if (!drm_enc) {
1545 pr_err("invalid drm encoder\n");
1546 return -EINVAL;
1547 }
1548
1549 sde_enc = to_sde_encoder_virt(drm_enc);
1550
1551 sde_encoder_control_te(drm_enc, false);
1552
1553 memcpy(&disp_info, &sde_enc->disp_info, sizeof(disp_info));
1554 disp_info.is_te_using_watchdog_timer = true;
1555 _sde_encoder_update_vsync_source(sde_enc, &disp_info, false);
1556
1557 sde_encoder_control_te(drm_enc, true);
1558
1559 return 0;
1560}
1561
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001562static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001563 struct drm_encoder *drm_enc,
1564 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001565{
1566 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001567 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001568 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001569 struct sde_rsc_cmd_config *rsc_config;
1570 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001571 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001572 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001573 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1574 int wait_count = 0;
1575 struct drm_crtc *primary_crtc;
1576 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001577 int rc = 0;
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001578 int wait_refcount;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001579
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001580 if (!drm_enc || !drm_enc->dev) {
1581 SDE_ERROR("invalid encoder arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001582 return -EINVAL;
1583 }
1584
1585 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001586 crtc = sde_enc->crtc;
1587
1588 if (!sde_enc->crtc) {
1589 SDE_ERROR("invalid crtc parameter\n");
1590 return -EINVAL;
1591 }
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001592 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001593 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001594
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001595 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001596 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001597 return 0;
1598 }
1599
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001600 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1601 if (rc) {
1602 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1603 return 0;
1604 }
1605
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001606 /**
1607 * only primary command mode panel can request CMD state.
1608 * all other panels/displays can request for VID state including
1609 * secondary command mode panel.
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07001610 * Clone mode encoder can request CLK STATE only.
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001611 */
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07001612 if (sde_encoder_in_clone_mode(drm_enc))
1613 rsc_state = enable ? SDE_RSC_CLK_STATE : SDE_RSC_IDLE_STATE;
1614 else
1615 rsc_state = enable ?
1616 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
1617 && disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1618 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
1619
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001620 prefill_lines = config ? mode_info.prefill_lines +
1621 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001622
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001623 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001624 if ((rsc_config->fps != mode_info.frame_rate) ||
1625 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001626 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001627 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1628 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1629 rsc_config->fps = mode_info.frame_rate;
1630 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001631 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001632 rsc_config->jitter_numer = mode_info.jitter_numer;
1633 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001634 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001635 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001636
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001637 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001638 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001639 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001640 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001641
1642 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001643 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001644 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001645 } else {
1646 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001647 rsc_state, NULL, crtc->base.id,
1648 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001649 }
1650
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001651 /**
1652 * if RSC performed a state change that requires a VBLANK wait, it will
1653 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1654 *
1655 * if we are the primary display, we will need to enable and wait
1656 * locally since we hold the commit thread
1657 *
1658 * if we are an external display, we must send a signal to the primary
1659 * to enable its VBLANK and wait one, since the RSC hardware is driven
1660 * by the primary panel's VBLANK signals
1661 */
1662 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1663 if (ret) {
1664 SDE_ERROR_ENC(sde_enc,
1665 "sde rsc client update failed ret:%d\n", ret);
1666 return ret;
1667 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1668 return ret;
1669 }
1670
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001671 if (wait_vblank_crtc_id)
1672 wait_refcount =
1673 sde_rsc_client_get_vsync_refcount(sde_enc->rsc_client);
1674 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1675 SDE_EVTLOG_FUNC_ENTRY);
1676
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001677 if (crtc->base.id != wait_vblank_crtc_id) {
1678 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1679 if (!primary_crtc) {
1680 SDE_ERROR_ENC(sde_enc,
1681 "failed to find primary crtc id %d\n",
1682 wait_vblank_crtc_id);
1683 return -EINVAL;
1684 }
1685 pipe = drm_crtc_index(primary_crtc);
1686 }
1687
1688 /**
1689 * note: VBLANK is expected to be enabled at this point in
1690 * resource control state machine if on primary CRTC
1691 */
1692 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1693 if (sde_rsc_client_is_state_update_complete(
1694 sde_enc->rsc_client))
1695 break;
1696
1697 if (crtc->base.id == wait_vblank_crtc_id)
1698 ret = sde_encoder_wait_for_event(drm_enc,
1699 MSM_ENC_VBLANK);
1700 else
1701 drm_wait_one_vblank(drm_enc->dev, pipe);
1702
1703 if (ret) {
1704 SDE_ERROR_ENC(sde_enc,
1705 "wait for vblank failed ret:%d\n", ret);
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001706 /**
1707 * rsc hardware may hang without vsync. avoid rsc hang
1708 * by generating the vsync from watchdog timer.
1709 */
1710 if (crtc->base.id == wait_vblank_crtc_id)
1711 _sde_encoder_switch_to_watchdog_vsync(drm_enc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001712 }
1713 }
1714
1715 if (wait_count >= MAX_RSC_WAIT)
1716 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1717 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001718
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001719 if (wait_refcount)
1720 sde_rsc_client_reset_vsync_refcount(sde_enc->rsc_client);
1721 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1722 SDE_EVTLOG_FUNC_EXIT);
1723
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001724 return ret;
1725}
1726
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001727static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1728{
1729 struct sde_encoder_virt *sde_enc;
1730 int i;
1731
1732 if (!drm_enc) {
1733 SDE_ERROR("invalid encoder\n");
1734 return;
1735 }
1736
1737 sde_enc = to_sde_encoder_virt(drm_enc);
1738
1739 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1740 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1741 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1742
1743 if (phys && phys->ops.irq_control)
1744 phys->ops.irq_control(phys, enable);
1745 }
1746
1747}
1748
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001749/* keep track of the userspace vblank during modeset */
1750static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1751 u32 sw_event)
1752{
1753 struct sde_encoder_virt *sde_enc;
1754 bool enable;
1755 int i;
1756
1757 if (!drm_enc) {
1758 SDE_ERROR("invalid encoder\n");
1759 return;
1760 }
1761
1762 sde_enc = to_sde_encoder_virt(drm_enc);
1763 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1764 sw_event, sde_enc->vblank_enabled);
1765
1766 /* nothing to do if vblank not enabled by userspace */
1767 if (!sde_enc->vblank_enabled)
1768 return;
1769
1770 /* disable vblank on pre_modeset */
1771 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1772 enable = false;
1773 /* enable vblank on post_modeset */
1774 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1775 enable = true;
1776 else
1777 return;
1778
1779 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1780 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1781
1782 if (phys && phys->ops.control_vblank_irq)
1783 phys->ops.control_vblank_irq(phys, enable);
1784 }
1785}
1786
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001787struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1788{
1789 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001790
1791 if (!drm_enc)
1792 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001793 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001794 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001795}
1796
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001797static void _sde_encoder_resource_control_rsc_update(
1798 struct drm_encoder *drm_enc, bool enable)
1799{
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001800 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001801 struct sde_encoder_virt *sde_enc;
1802
1803 if (!drm_enc) {
1804 SDE_ERROR("invalid encoder argument\n");
1805 return;
1806 }
1807 sde_enc = to_sde_encoder_virt(drm_enc);
1808 if (!sde_enc->crtc) {
1809 SDE_ERROR("invalid crtc\n");
1810 return;
1811 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001812
1813 if (enable) {
1814 rsc_cfg.inline_rotate_prefill =
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001815 sde_crtc_get_inline_prefill(sde_enc->crtc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001816
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001817 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1818 } else {
1819 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001820 }
1821}
1822
Alan Kwong1124f1f2017-11-10 18:14:39 -05001823static int _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001824 bool enable)
1825{
1826 struct msm_drm_private *priv;
1827 struct sde_kms *sde_kms;
1828 struct sde_encoder_virt *sde_enc;
Alan Kwong1124f1f2017-11-10 18:14:39 -05001829 int rc;
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001830 bool is_cmd_mode, is_primary;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001831
1832 sde_enc = to_sde_encoder_virt(drm_enc);
1833 priv = drm_enc->dev->dev_private;
1834 sde_kms = to_sde_kms(priv->kms);
1835
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001836 is_cmd_mode = sde_enc->disp_info.capabilities &
1837 MSM_DISPLAY_CAP_CMD_MODE;
1838 is_primary = sde_enc->disp_info.is_primary;
1839
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001840 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1841 SDE_EVT32(DRMID(drm_enc), enable);
1842
1843 if (!sde_enc->cur_master) {
1844 SDE_ERROR("encoder master not set\n");
Alan Kwong1124f1f2017-11-10 18:14:39 -05001845 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001846 }
1847
1848 if (enable) {
1849 /* enable SDE core clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001850 rc = sde_power_resource_enable(&priv->phandle,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001851 sde_kms->core_client, true);
Alan Kwong1124f1f2017-11-10 18:14:39 -05001852 if (rc) {
1853 SDE_ERROR("failed to enable power resource %d\n", rc);
1854 SDE_EVT32(rc, SDE_EVTLOG_ERROR);
1855 return rc;
1856 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001857
Dhaval Patel30874eb2018-05-31 13:33:31 -07001858 sde_enc->elevated_ahb_vote = true;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001859 /* enable DSI clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001860 rc = sde_connector_clk_ctrl(sde_enc->cur_master->connector,
1861 true);
1862 if (rc) {
1863 SDE_ERROR("failed to enable clk control %d\n", rc);
1864 sde_power_resource_enable(&priv->phandle,
1865 sde_kms->core_client, false);
1866 return rc;
1867 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001868
1869 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001870 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001871
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001872 if (is_cmd_mode && is_primary)
1873 _sde_encoder_pm_qos_add_request(drm_enc);
1874
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001875 } else {
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001876 if (is_cmd_mode && is_primary)
1877 _sde_encoder_pm_qos_remove_request(drm_enc);
1878
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001879 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001880 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001881
1882 /* disable DSI clks */
1883 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1884
1885 /* disable SDE core clks */
1886 sde_power_resource_enable(&priv->phandle,
1887 sde_kms->core_client, false);
1888 }
1889
Alan Kwong1124f1f2017-11-10 18:14:39 -05001890 return 0;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001891}
1892
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001893static void sde_encoder_input_event_handler(struct input_handle *handle,
1894 unsigned int type, unsigned int code, int value)
1895{
1896 struct drm_encoder *drm_enc = NULL;
1897 struct sde_encoder_virt *sde_enc = NULL;
Jayant Shekhar779c7522018-06-13 12:44:44 +05301898 struct msm_drm_thread *event_thread = NULL;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001899 struct msm_drm_private *priv = NULL;
1900
1901 if (!handle || !handle->handler || !handle->handler->private) {
1902 SDE_ERROR("invalid encoder for the input event\n");
1903 return;
1904 }
1905
1906 drm_enc = (struct drm_encoder *)handle->handler->private;
1907 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
1908 SDE_ERROR("invalid parameters\n");
1909 return;
1910 }
1911
1912 priv = drm_enc->dev->dev_private;
1913 sde_enc = to_sde_encoder_virt(drm_enc);
1914 if (!sde_enc->crtc || (sde_enc->crtc->index
Jayant Shekhar779c7522018-06-13 12:44:44 +05301915 >= ARRAY_SIZE(priv->event_thread))) {
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001916 SDE_DEBUG_ENC(sde_enc,
1917 "invalid cached CRTC: %d or crtc index: %d\n",
1918 sde_enc->crtc == NULL,
1919 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
1920 return;
1921 }
1922
1923 SDE_EVT32_VERBOSE(DRMID(drm_enc));
1924
Jayant Shekhar779c7522018-06-13 12:44:44 +05301925 event_thread = &priv->event_thread[sde_enc->crtc->index];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001926
Jayant Shekhar779c7522018-06-13 12:44:44 +05301927 /* Queue input event work to event thread */
1928 kthread_queue_work(&event_thread->worker,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001929 &sde_enc->input_event_work);
1930}
1931
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07001932void sde_encoder_control_idle_pc(struct drm_encoder *drm_enc, bool enable)
1933{
1934 struct sde_encoder_virt *sde_enc;
1935
1936 if (!drm_enc) {
1937 SDE_ERROR("invalid encoder\n");
1938 return;
1939 }
1940 sde_enc = to_sde_encoder_virt(drm_enc);
1941
1942 /* return early if there is no state change */
1943 if (sde_enc->idle_pc_enabled == enable)
1944 return;
1945
1946 sde_enc->idle_pc_enabled = enable;
1947
1948 SDE_DEBUG("idle-pc state:%d\n", sde_enc->idle_pc_enabled);
1949 SDE_EVT32(sde_enc->idle_pc_enabled);
1950}
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001951
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001952static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1953 u32 sw_event)
1954{
Dhaval Patel99412a52017-07-24 19:16:45 -07001955 bool autorefresh_enabled = false;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001956 unsigned int lp, idle_pc_duration;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001957 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001958 struct msm_drm_private *priv;
1959 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001960 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001961 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001962
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001963 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1964 SDE_ERROR("invalid encoder parameters, sw_event:%u\n",
1965 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001966 return -EINVAL;
1967 }
1968 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001969 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001970 is_vid_mode = sde_enc->disp_info.capabilities &
1971 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001972
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001973 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001974 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001975 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001976 */
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07001977 if (!sde_enc->idle_pc_enabled &&
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001978 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001979 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1980 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1981 sw_event != SDE_ENC_RC_EVENT_STOP &&
1982 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001983 return 0;
1984
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07001985 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc:%d\n",
1986 sw_event, sde_enc->idle_pc_enabled);
1987 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_enabled,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001988 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1989
1990 switch (sw_event) {
1991 case SDE_ENC_RC_EVENT_KICKOFF:
1992 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001993 if (kthread_cancel_delayed_work_sync(
1994 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001995 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1996 sw_event);
1997
1998 mutex_lock(&sde_enc->rc_lock);
1999
2000 /* return if the resource control is already in ON state */
2001 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2002 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
2003 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002004 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2005 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002006 mutex_unlock(&sde_enc->rc_lock);
2007 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002008 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
2009 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
2010 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2011 sw_event, sde_enc->rc_state);
2012 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2013 SDE_EVTLOG_ERROR);
2014 mutex_unlock(&sde_enc->rc_lock);
2015 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002016 }
2017
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002018 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2019 _sde_encoder_irq_control(drm_enc, true);
2020 } else {
2021 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002022 ret = _sde_encoder_resource_control_helper(drm_enc,
2023 true);
2024 if (ret) {
2025 SDE_ERROR_ENC(sde_enc,
2026 "sw_event:%d, rc in state %d\n",
2027 sw_event, sde_enc->rc_state);
2028 SDE_EVT32(DRMID(drm_enc), sw_event,
2029 sde_enc->rc_state,
2030 SDE_EVTLOG_ERROR);
2031 mutex_unlock(&sde_enc->rc_lock);
2032 return ret;
2033 }
2034
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002035 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2036 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002037
2038 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2039 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
2040 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2041
2042 mutex_unlock(&sde_enc->rc_lock);
2043 break;
2044
2045 case SDE_ENC_RC_EVENT_FRAME_DONE:
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002046 if (!sde_enc->crtc) {
2047 SDE_ERROR("invalid crtc, sw_event:%u\n", sw_event);
2048 return -EINVAL;
2049 }
2050
2051 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2052 SDE_ERROR("invalid crtc index :%u\n",
2053 sde_enc->crtc->index);
2054 return -EINVAL;
2055 }
2056 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2057
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002058 /*
2059 * mutex lock is not used as this event happens at interrupt
2060 * context. And locking is not required as, the other events
2061 * like KICKOFF and STOP does a wait-for-idle before executing
2062 * the resource_control
2063 */
2064 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2065 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
2066 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002067 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2068 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002069 return -EINVAL;
2070 }
2071
2072 /*
2073 * schedule off work item only when there are no
2074 * frames pending
2075 */
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002076 if (sde_crtc_frame_pending(sde_enc->crtc) > 1) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002077 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002078 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2079 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002080 return 0;
2081 }
2082
Dhaval Patel99412a52017-07-24 19:16:45 -07002083 /* schedule delayed off work if autorefresh is disabled */
2084 if (sde_enc->cur_master &&
2085 sde_enc->cur_master->ops.is_autorefresh_enabled)
2086 autorefresh_enabled =
2087 sde_enc->cur_master->ops.is_autorefresh_enabled(
2088 sde_enc->cur_master);
2089
Clarence Ip89628132017-07-27 13:33:51 -04002090 /* set idle timeout based on master connector's lp value */
2091 if (sde_enc->cur_master)
2092 lp = sde_connector_get_lp(
2093 sde_enc->cur_master->connector);
2094 else
2095 lp = SDE_MODE_DPMS_ON;
2096
2097 if (lp == SDE_MODE_DPMS_LP2)
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002098 idle_pc_duration = IDLE_SHORT_TIMEOUT;
Clarence Ip89628132017-07-27 13:33:51 -04002099 else
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002100 idle_pc_duration = IDLE_POWERCOLLAPSE_DURATION;
Clarence Ip89628132017-07-27 13:33:51 -04002101
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002102 if (!autorefresh_enabled)
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002103 kthread_mod_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002104 &disp_thread->worker,
2105 &sde_enc->delayed_off_work,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002106 msecs_to_jiffies(idle_pc_duration));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002107 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04002108 autorefresh_enabled,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002109 idle_pc_duration, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002110 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
2111 sw_event);
2112 break;
2113
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002114 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002115 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002116 if (kthread_cancel_delayed_work_sync(
2117 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002118 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2119 sw_event);
2120
2121 mutex_lock(&sde_enc->rc_lock);
2122
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002123 if (is_vid_mode &&
2124 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2125 _sde_encoder_irq_control(drm_enc, true);
2126 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002127 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002128 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002129 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2130 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
2131 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002132 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2133 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002134 mutex_unlock(&sde_enc->rc_lock);
2135 return 0;
2136 }
2137
2138 /**
2139 * IRQs are still enabled currently, which allows wait for
2140 * VBLANK which RSC may require to correctly transition to OFF
2141 */
2142 _sde_encoder_resource_control_rsc_update(drm_enc, false);
2143
2144 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2145 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002146 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002147
2148 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
2149
2150 mutex_unlock(&sde_enc->rc_lock);
2151 break;
2152
2153 case SDE_ENC_RC_EVENT_STOP:
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002154 /* cancel vsync event work and timer */
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302155 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002156 del_timer_sync(&sde_enc->vsync_event_timer);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002157
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302158 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002159 /* return if the resource control is already in OFF state */
2160 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
2161 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
2162 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002163 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2164 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002165 mutex_unlock(&sde_enc->rc_lock);
2166 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002167 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
2168 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002169 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2170 sw_event, sde_enc->rc_state);
2171 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2172 SDE_EVTLOG_ERROR);
2173 mutex_unlock(&sde_enc->rc_lock);
2174 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002175 }
2176
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002177 /**
2178 * expect to arrive here only if in either idle state or pre-off
2179 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002180 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002181 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002182 _sde_encoder_resource_control_helper(drm_enc, false);
2183
2184 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002185 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002186
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002187 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
2188
2189 mutex_unlock(&sde_enc->rc_lock);
2190 break;
2191
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002192 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002193 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002194 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002195 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002196 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2197 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002198
2199 mutex_lock(&sde_enc->rc_lock);
2200
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002201 /* return if the resource control is already in ON state */
2202 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2203 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002204 ret = _sde_encoder_resource_control_helper(drm_enc,
2205 true);
2206 if (ret) {
2207 SDE_ERROR_ENC(sde_enc,
2208 "sw_event:%d, rc in state %d\n",
2209 sw_event, sde_enc->rc_state);
2210 SDE_EVT32(DRMID(drm_enc), sw_event,
2211 sde_enc->rc_state,
2212 SDE_EVTLOG_ERROR);
2213 mutex_unlock(&sde_enc->rc_lock);
2214 return ret;
2215 }
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002216
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002217 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002218
2219 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2220 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002221 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002222 }
2223
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002224 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2225 if (ret && ret != -EWOULDBLOCK) {
2226 SDE_ERROR_ENC(sde_enc,
2227 "wait for commit done returned %d\n",
2228 ret);
2229 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2230 ret, SDE_EVTLOG_ERROR);
2231 mutex_unlock(&sde_enc->rc_lock);
2232 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002233 }
2234
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002235 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002236 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002237
2238 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2239 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
2240
2241 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
2242 mutex_unlock(&sde_enc->rc_lock);
2243 break;
2244
2245 case SDE_ENC_RC_EVENT_POST_MODESET:
2246 mutex_lock(&sde_enc->rc_lock);
2247
2248 /* return if the resource control is already in ON state */
2249 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
2250 SDE_ERROR_ENC(sde_enc,
2251 "sw_event:%d, rc:%d !MODESET state\n",
2252 sw_event, sde_enc->rc_state);
2253 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2254 SDE_EVTLOG_ERROR);
2255 mutex_unlock(&sde_enc->rc_lock);
2256 return -EINVAL;
2257 }
2258
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002259 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002260 _sde_encoder_irq_control(drm_enc, true);
2261
2262 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
2263
2264 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2265 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
2266
2267 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2268
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002269 mutex_unlock(&sde_enc->rc_lock);
2270 break;
2271
2272 case SDE_ENC_RC_EVENT_ENTER_IDLE:
2273 mutex_lock(&sde_enc->rc_lock);
2274
2275 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002276 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002277 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002278 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2279 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002280 mutex_unlock(&sde_enc->rc_lock);
2281 return 0;
2282 }
2283
2284 /*
2285 * if we are in ON but a frame was just kicked off,
2286 * ignore the IDLE event, it's probably a stale timer event
2287 */
2288 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002289 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002290 "sw_event:%d, rc:%d frame pending\n",
2291 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002292 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2293 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002294 mutex_unlock(&sde_enc->rc_lock);
2295 return 0;
2296 }
2297
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002298 if (is_vid_mode) {
2299 _sde_encoder_irq_control(drm_enc, false);
2300 } else {
2301 /* disable all the clks and resources */
2302 _sde_encoder_resource_control_rsc_update(drm_enc,
2303 false);
2304 _sde_encoder_resource_control_helper(drm_enc, false);
2305 }
2306
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002307 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002308 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002309 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2310
2311 mutex_unlock(&sde_enc->rc_lock);
2312 break;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002313 case SDE_ENC_RC_EVENT_EARLY_WAKEUP:
2314 if (!sde_enc->crtc ||
2315 sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2316 SDE_DEBUG_ENC(sde_enc,
2317 "invalid crtc:%d or crtc index:%d , sw_event:%u\n",
2318 sde_enc->crtc == NULL,
2319 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL,
2320 sw_event);
2321 return -EINVAL;
2322 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002323
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002324 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2325
2326 mutex_lock(&sde_enc->rc_lock);
2327
2328 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2329 if (sde_enc->cur_master &&
2330 sde_enc->cur_master->ops.is_autorefresh_enabled)
2331 autorefresh_enabled =
2332 sde_enc->cur_master->ops.is_autorefresh_enabled(
2333 sde_enc->cur_master);
2334 if (autorefresh_enabled) {
2335 SDE_DEBUG_ENC(sde_enc,
2336 "not handling early wakeup since auto refresh is enabled\n");
Jeykumar Sankaran067b3b92018-01-19 10:35:22 -08002337 mutex_unlock(&sde_enc->rc_lock);
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002338 return 0;
2339 }
2340
2341 if (!sde_crtc_frame_pending(sde_enc->crtc))
2342 kthread_mod_delayed_work(&disp_thread->worker,
2343 &sde_enc->delayed_off_work,
2344 msecs_to_jiffies(
2345 IDLE_POWERCOLLAPSE_DURATION));
2346 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2347 /* enable all the clks and resources */
2348 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2349 _sde_encoder_resource_control_helper(drm_enc, true);
2350
Jayant Shekhar85c40332018-05-08 11:46:36 +05302351 /*
2352 * In some cases, commit comes with slight delay
2353 * (> 80 ms)after early wake up, prevent clock switch
2354 * off to avoid jank in next update. So, increase the
2355 * command mode idle timeout sufficiently to prevent
2356 * such case.
2357 */
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002358 kthread_mod_delayed_work(&disp_thread->worker,
Jayant Shekhar85c40332018-05-08 11:46:36 +05302359 &sde_enc->delayed_off_work,
2360 msecs_to_jiffies(
2361 IDLE_POWERCOLLAPSE_IN_EARLY_WAKEUP));
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002362
2363 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2364 }
2365
2366 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2367 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE8);
2368
2369 mutex_unlock(&sde_enc->rc_lock);
2370 break;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002371 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002372 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002373 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2374 break;
2375 }
2376
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07002377 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_enabled,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002378 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2379 return 0;
2380}
2381
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002382static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2383 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002384 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002385{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002386 struct sde_encoder_virt *sde_enc;
2387 struct msm_drm_private *priv;
2388 struct sde_kms *sde_kms;
2389 struct list_head *connector_list;
2390 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002391 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002392 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002393 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002394 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002395
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002396 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002397 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002398 return;
2399 }
2400
Alan Kwong1124f1f2017-11-10 18:14:39 -05002401 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2402 SDE_ERROR("power resource is not enabled\n");
2403 return;
2404 }
2405
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002406 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002407 SDE_DEBUG_ENC(sde_enc, "\n");
2408
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002409 priv = drm_enc->dev->dev_private;
2410 sde_kms = to_sde_kms(priv->kms);
2411 connector_list = &sde_kms->dev->mode_config.connector_list;
2412
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002413 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002414
Dhaval Patele514aae2018-01-30 11:46:02 -08002415 /*
2416 * cache the crtc in sde_enc on enable for duration of use case
2417 * for correctly servicing asynchronous irq events and timers
2418 */
2419 if (!drm_enc->crtc) {
2420 SDE_ERROR("invalid crtc\n");
2421 return;
2422 }
2423 sde_enc->crtc = drm_enc->crtc;
2424
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002425 list_for_each_entry(conn_iter, connector_list, head)
2426 if (conn_iter->encoder == drm_enc)
2427 conn = conn_iter;
2428
2429 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002430 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002431 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002432 } else if (!conn->state) {
2433 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2434 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002435 }
2436
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002437 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002438 sde_conn_state = to_sde_connector_state(conn->state);
2439 if (sde_conn && sde_conn_state) {
2440 ret = sde_conn->ops.get_mode_info(adj_mode,
2441 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002442 sde_kms->catalog->max_mixer_width,
2443 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002444 if (ret) {
2445 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002446 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002447 return;
2448 }
2449 }
2450
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002451 /* release resources before seamless mode change */
2452 if (msm_is_mode_seamless_dms(adj_mode)) {
2453 /* restore resource state before releasing them */
2454 ret = sde_encoder_resource_control(drm_enc,
2455 SDE_ENC_RC_EVENT_PRE_MODESET);
2456 if (ret) {
2457 SDE_ERROR_ENC(sde_enc,
2458 "sde resource control failed: %d\n",
2459 ret);
2460 return;
2461 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002462
2463 /*
2464 * Disable dsc before switch the mode and after pre_modeset,
2465 * to guarantee that previous kickoff finished.
2466 */
2467 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002468 }
2469
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002470 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2471 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2472 conn->state, false);
2473 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002474 SDE_ERROR_ENC(sde_enc,
2475 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002476 return;
2477 }
2478
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002479 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2480 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2481 sde_enc->hw_pp[i] = NULL;
2482 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2483 break;
2484 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2485 }
2486
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002487 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2488 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2489 sde_enc->hw_dsc[i] = NULL;
2490 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2491 break;
2492 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2493 }
2494
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002495 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2496 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002497
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002498 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002499 if (!sde_enc->hw_pp[i]) {
2500 SDE_ERROR_ENC(sde_enc,
2501 "invalid pingpong block for the encoder\n");
2502 return;
2503 }
2504 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002505 phys->connector = conn->state->connector;
2506 if (phys->ops.mode_set)
2507 phys->ops.mode_set(phys, mode, adj_mode);
2508 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002509 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002510
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002511 /* update resources after seamless mode change */
2512 if (msm_is_mode_seamless_dms(adj_mode))
2513 sde_encoder_resource_control(&sde_enc->base,
2514 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002515}
2516
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002517void sde_encoder_control_te(struct drm_encoder *drm_enc, bool enable)
2518{
2519 struct sde_encoder_virt *sde_enc;
2520 struct sde_encoder_phys *phys;
2521 int i;
2522
2523 if (!drm_enc) {
2524 SDE_ERROR("invalid parameters\n");
2525 return;
2526 }
2527
2528 sde_enc = to_sde_encoder_virt(drm_enc);
2529 if (!sde_enc) {
2530 SDE_ERROR("invalid sde encoder\n");
2531 return;
2532 }
2533
2534 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2535 phys = sde_enc->phys_encs[i];
2536 if (phys && phys->ops.control_te)
2537 phys->ops.control_te(phys, enable);
2538 }
2539}
2540
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302541static int _sde_encoder_input_connect(struct input_handler *handler,
2542 struct input_dev *dev, const struct input_device_id *id)
2543{
2544 struct input_handle *handle;
2545 int rc = 0;
2546
2547 handle = kzalloc(sizeof(*handle), GFP_KERNEL);
2548 if (!handle)
2549 return -ENOMEM;
2550
2551 handle->dev = dev;
2552 handle->handler = handler;
2553 handle->name = handler->name;
2554
2555 rc = input_register_handle(handle);
2556 if (rc) {
2557 pr_err("failed to register input handle\n");
2558 goto error;
2559 }
2560
2561 rc = input_open_device(handle);
2562 if (rc) {
2563 pr_err("failed to open input device\n");
2564 goto error_unregister;
2565 }
2566
2567 return 0;
2568
2569error_unregister:
2570 input_unregister_handle(handle);
2571
2572error:
2573 kfree(handle);
2574
2575 return rc;
2576}
2577
2578static void _sde_encoder_input_disconnect(struct input_handle *handle)
2579{
2580 input_close_device(handle);
2581 input_unregister_handle(handle);
2582 kfree(handle);
2583}
2584
2585/**
2586 * Structure for specifying event parameters on which to receive callbacks.
2587 * This structure will trigger a callback in case of a touch event (specified by
2588 * EV_ABS) where there is a change in X and Y coordinates,
2589 */
2590static const struct input_device_id sde_input_ids[] = {
2591 {
2592 .flags = INPUT_DEVICE_ID_MATCH_EVBIT,
2593 .evbit = { BIT_MASK(EV_ABS) },
2594 .absbit = { [BIT_WORD(ABS_MT_POSITION_X)] =
2595 BIT_MASK(ABS_MT_POSITION_X) |
2596 BIT_MASK(ABS_MT_POSITION_Y) },
2597 },
2598 { },
2599};
2600
2601static int _sde_encoder_input_handler_register(
2602 struct input_handler *input_handler)
2603{
2604 int rc = 0;
2605
2606 rc = input_register_handler(input_handler);
2607 if (rc) {
2608 pr_err("input_register_handler failed, rc= %d\n", rc);
2609 kfree(input_handler);
2610 return rc;
2611 }
2612
2613 return rc;
2614}
2615
2616static int _sde_encoder_input_handler(
2617 struct sde_encoder_virt *sde_enc)
2618{
2619 struct input_handler *input_handler = NULL;
2620 int rc = 0;
2621
2622 if (sde_enc->input_handler) {
2623 SDE_ERROR_ENC(sde_enc,
2624 "input_handle is active. unexpected\n");
2625 return -EINVAL;
2626 }
2627
2628 input_handler = kzalloc(sizeof(*sde_enc->input_handler), GFP_KERNEL);
2629 if (!input_handler)
2630 return -ENOMEM;
2631
2632 input_handler->event = sde_encoder_input_event_handler;
2633 input_handler->connect = _sde_encoder_input_connect;
2634 input_handler->disconnect = _sde_encoder_input_disconnect;
2635 input_handler->name = "sde";
2636 input_handler->id_table = sde_input_ids;
2637 input_handler->private = sde_enc;
2638
2639 sde_enc->input_handler = input_handler;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302640 sde_enc->input_handler_registered = false;
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302641
2642 return rc;
2643}
2644
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002645static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002646{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002647 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002648 struct msm_drm_private *priv;
2649 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002650
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002651 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2652 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002653 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002654 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002655
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002656 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002657 sde_kms = to_sde_kms(priv->kms);
2658 if (!sde_kms) {
2659 SDE_ERROR("invalid sde_kms\n");
2660 return;
2661 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002662
2663 sde_enc = to_sde_encoder_virt(drm_enc);
2664 if (!sde_enc || !sde_enc->cur_master) {
2665 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002666 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002667 }
2668
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002669 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2670 sde_enc->cur_master->hw_mdptop &&
2671 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2672 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2673 sde_enc->cur_master->hw_mdptop);
2674
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002675 if (sde_enc->cur_master->hw_mdptop &&
2676 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2677 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2678 sde_enc->cur_master->hw_mdptop,
2679 sde_kms->catalog);
2680
Dhaval Patelaab9b522017-07-20 12:38:46 -07002681 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002682 sde_encoder_control_te(drm_enc, true);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002683
2684 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2685 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002686}
2687
2688void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2689{
2690 struct sde_encoder_virt *sde_enc = NULL;
2691 int i;
2692
2693 if (!drm_enc) {
2694 SDE_ERROR("invalid encoder\n");
2695 return;
2696 }
2697 sde_enc = to_sde_encoder_virt(drm_enc);
2698
2699 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2700 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2701
2702 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2703 phys->ops.restore(phys);
2704 }
2705
2706 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2707 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2708
2709 _sde_encoder_virt_enable_helper(drm_enc);
2710}
2711
Jayant Shekhar401bcdf2018-07-27 12:15:03 +05302712static void sde_encoder_off_work(struct kthread_work *work)
2713{
2714 struct sde_encoder_virt *sde_enc = container_of(work,
2715 struct sde_encoder_virt, delayed_off_work.work);
2716 struct drm_encoder *drm_enc;
2717
2718 if (!sde_enc) {
2719 SDE_ERROR("invalid sde encoder\n");
2720 return;
2721 }
2722 drm_enc = &sde_enc->base;
2723
2724 sde_encoder_idle_request(drm_enc);
2725}
2726
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002727static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2728{
2729 struct sde_encoder_virt *sde_enc = NULL;
2730 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002731 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002732 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002733 struct msm_mode_info mode_info;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002734
2735 if (!drm_enc) {
2736 SDE_ERROR("invalid encoder\n");
2737 return;
2738 }
2739 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002740
Alan Kwong1124f1f2017-11-10 18:14:39 -05002741 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2742 SDE_ERROR("power resource is not enabled\n");
2743 return;
2744 }
2745
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002746 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2747 if (ret) {
2748 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2749 return;
2750 }
2751
Dhaval Patelf492c5d2018-02-19 07:56:37 -08002752 if (drm_enc->crtc && !sde_enc->crtc)
2753 sde_enc->crtc = drm_enc->crtc;
2754
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002755 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002756 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002757
Clarence Ip19af1362016-09-23 14:57:51 -04002758 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002759 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002760
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002761 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002762 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2763 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2764
2765 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2766 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2767 sde_enc->cur_master = phys;
2768 break;
2769 }
2770 }
2771
2772 if (!sde_enc->cur_master) {
2773 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2774 return;
2775 }
2776
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302777 if (sde_enc->input_handler && !sde_enc->input_handler_registered) {
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302778 ret = _sde_encoder_input_handler_register(
2779 sde_enc->input_handler);
2780 if (ret)
2781 SDE_ERROR(
2782 "input handler registration failed, rc = %d\n", ret);
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302783 else
2784 sde_enc->input_handler_registered = true;
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302785 }
2786
Jayant Shekhar71a0acb2018-08-17 08:24:51 +05302787 if (!(msm_is_mode_seamless_vrr(cur_mode)
2788 || msm_is_mode_seamless_dms(cur_mode)))
Jayant Shekhar401bcdf2018-07-27 12:15:03 +05302789 kthread_init_delayed_work(&sde_enc->delayed_off_work,
2790 sde_encoder_off_work);
2791
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002792 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2793 if (ret) {
2794 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2795 ret);
2796 return;
2797 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002798
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002799 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2800 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002801
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002802 if (!phys)
2803 continue;
2804
2805 phys->comp_type = comp_info->comp_type;
2806 if (phys != sde_enc->cur_master) {
2807 /**
2808 * on DMS request, the encoder will be enabled
2809 * already. Invoke restore to reconfigure the
2810 * new mode.
2811 */
2812 if (msm_is_mode_seamless_dms(cur_mode) &&
2813 phys->ops.restore)
2814 phys->ops.restore(phys);
2815 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002816 phys->ops.enable(phys);
2817 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002818
2819 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2820 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2821 phys->ops.setup_misr(phys, true,
2822 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002823 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002824
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002825 if (msm_is_mode_seamless_dms(cur_mode) &&
2826 sde_enc->cur_master->ops.restore)
2827 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2828 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002829 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002830
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002831 _sde_encoder_virt_enable_helper(drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002832}
2833
2834static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2835{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002836 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002837 struct msm_drm_private *priv;
2838 struct sde_kms *sde_kms;
Clarence Iped3327b2017-11-01 13:13:58 -04002839 enum sde_intf_mode intf_mode;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002840 int i = 0;
2841
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002842 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002843 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002844 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002845 } else if (!drm_enc->dev) {
2846 SDE_ERROR("invalid dev\n");
2847 return;
2848 } else if (!drm_enc->dev->dev_private) {
2849 SDE_ERROR("invalid dev_private\n");
2850 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002851 }
2852
Alan Kwong1124f1f2017-11-10 18:14:39 -05002853 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2854 SDE_ERROR("power resource is not enabled\n");
2855 return;
2856 }
2857
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002858 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002859 SDE_DEBUG_ENC(sde_enc, "\n");
2860
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002861 priv = drm_enc->dev->dev_private;
2862 sde_kms = to_sde_kms(priv->kms);
Clarence Iped3327b2017-11-01 13:13:58 -04002863 intf_mode = sde_encoder_get_intf_mode(drm_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002864
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002865 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002866
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302867 if (sde_enc->input_handler && sde_enc->input_handler_registered) {
Shubhashree Dhar137adbb2018-06-26 18:03:38 +05302868 input_unregister_handler(sde_enc->input_handler);
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302869 sde_enc->input_handler_registered = false;
2870 }
2871
Shubhashree Dhar137adbb2018-06-26 18:03:38 +05302872
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002873 /* wait for idle */
2874 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2875
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302876 kthread_flush_work(&sde_enc->input_event_work);
2877
Clarence Iped3327b2017-11-01 13:13:58 -04002878 /*
2879 * For primary command mode encoders, execute the resource control
2880 * pre-stop operations before the physical encoders are disabled, to
2881 * allow the rsc to transition its states properly.
2882 *
2883 * For other encoder types, rsc should not be enabled until after
2884 * they have been fully disabled, so delay the pre-stop operations
2885 * until after the physical disable calls have returned.
2886 */
2887 if (sde_enc->disp_info.is_primary && intf_mode == INTF_MODE_CMD) {
2888 sde_encoder_resource_control(drm_enc,
2889 SDE_ENC_RC_EVENT_PRE_STOP);
2890 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2891 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002892
Clarence Iped3327b2017-11-01 13:13:58 -04002893 if (phys && phys->ops.disable)
2894 phys->ops.disable(phys);
2895 }
2896 } else {
2897 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2898 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002899
Clarence Iped3327b2017-11-01 13:13:58 -04002900 if (phys && phys->ops.disable)
2901 phys->ops.disable(phys);
2902 }
2903 sde_encoder_resource_control(drm_enc,
2904 SDE_ENC_RC_EVENT_PRE_STOP);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002905 }
2906
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002907 /*
2908 * disable dsc after the transfer is complete (for command mode)
2909 * and after physical encoder is disabled, to make sure timing
2910 * engine is already disabled (for video mode).
2911 */
2912 _sde_encoder_dsc_disable(sde_enc);
2913
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002914 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2915
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002916 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08002917 if (sde_enc->phys_encs[i]) {
2918 sde_enc->phys_encs[i]->cont_splash_settings = false;
2919 sde_enc->phys_encs[i]->cont_splash_single_flush = 0;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002920 sde_enc->phys_encs[i]->connector = NULL;
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08002921 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002922 }
2923
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002924 sde_enc->cur_master = NULL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002925 /*
2926 * clear the cached crtc in sde_enc on use case finish, after all the
2927 * outstanding events and timers have been completed
2928 */
2929 sde_enc->crtc = NULL;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002930
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002931 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002932
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002933 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002934}
2935
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002936static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002937 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002938{
2939 int i = 0;
2940
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002941 for (i = 0; i < catalog->intf_count; i++) {
2942 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002943 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002944 return catalog->intf[i].id;
2945 }
2946 }
2947
2948 return INTF_MAX;
2949}
2950
Alan Kwongbb27c092016-07-20 16:41:25 -04002951static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2952 enum sde_intf_type type, u32 controller_id)
2953{
2954 if (controller_id < catalog->wb_count)
2955 return catalog->wb[controller_id].id;
2956
2957 return WB_MAX;
2958}
2959
Dhaval Patel81e87882016-10-19 21:41:56 -07002960static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2961 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002962{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002963 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002964 unsigned long lock_flags;
2965
Dhaval Patel81e87882016-10-19 21:41:56 -07002966 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002967 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002968
Narendra Muppalla77b32932017-05-10 13:53:11 -07002969 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002970 sde_enc = to_sde_encoder_virt(drm_enc);
2971
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002972 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002973 if (sde_enc->crtc_vblank_cb)
2974 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002975 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002976
2977 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002978 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002979}
2980
2981static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2982 struct sde_encoder_phys *phy_enc)
2983{
2984 if (!phy_enc)
2985 return;
2986
Narendra Muppalla77b32932017-05-10 13:53:11 -07002987 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002988 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002989 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Ingrid Gallardo36ee68d2017-08-30 17:14:33 -07002990
2991 trace_sde_encoder_underrun(DRMID(drm_enc),
2992 atomic_read(&phy_enc->underrun_cnt));
2993
2994 SDE_DBG_CTRL("stop_ftrace");
2995 SDE_DBG_CTRL("panic_underrun");
2996
Narendra Muppalla77b32932017-05-10 13:53:11 -07002997 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002998}
2999
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003000void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
3001 void (*vbl_cb)(void *), void *vbl_data)
3002{
3003 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3004 unsigned long lock_flags;
3005 bool enable;
3006 int i;
3007
3008 enable = vbl_cb ? true : false;
3009
Clarence Ip19af1362016-09-23 14:57:51 -04003010 if (!drm_enc) {
3011 SDE_ERROR("invalid encoder\n");
3012 return;
3013 }
3014 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003015 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003016
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003017 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003018 sde_enc->crtc_vblank_cb = vbl_cb;
3019 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003020 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003021
3022 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3023 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3024
3025 if (phys && phys->ops.control_vblank_irq)
3026 phys->ops.control_vblank_irq(phys, enable);
3027 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07003028 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003029}
3030
Alan Kwong628d19e2016-10-31 13:50:13 -04003031void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003032 void (*frame_event_cb)(void *, u32 event),
3033 struct drm_crtc *crtc)
Alan Kwong628d19e2016-10-31 13:50:13 -04003034{
3035 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3036 unsigned long lock_flags;
3037 bool enable;
3038
3039 enable = frame_event_cb ? true : false;
3040
3041 if (!drm_enc) {
3042 SDE_ERROR("invalid encoder\n");
3043 return;
3044 }
3045 SDE_DEBUG_ENC(sde_enc, "\n");
3046 SDE_EVT32(DRMID(drm_enc), enable, 0);
3047
3048 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3049 sde_enc->crtc_frame_event_cb = frame_event_cb;
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003050 sde_enc->crtc_frame_event_cb_data.crtc = crtc;
Alan Kwong628d19e2016-10-31 13:50:13 -04003051 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
3052}
3053
3054static void sde_encoder_frame_done_callback(
3055 struct drm_encoder *drm_enc,
3056 struct sde_encoder_phys *ready_phys, u32 event)
3057{
3058 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3059 unsigned int i;
3060
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003061 sde_enc->crtc_frame_event_cb_data.connector =
3062 sde_enc->cur_master->connector;
3063
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003064 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
3065 | SDE_ENCODER_FRAME_EVENT_ERROR
3066 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05003067
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003068 if (!sde_enc->frame_busy_mask[0]) {
3069 /**
3070 * suppress frame_done without waiter,
3071 * likely autorefresh
3072 */
3073 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
3074 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04003075 }
3076
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003077 /* One of the physical encoders has become idle */
3078 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3079 if (sde_enc->phys_encs[i] == ready_phys) {
3080 clear_bit(i, sde_enc->frame_busy_mask);
3081 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
3082 sde_enc->frame_busy_mask[0]);
3083 }
3084 }
Alan Kwong628d19e2016-10-31 13:50:13 -04003085
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003086 if (!sde_enc->frame_busy_mask[0]) {
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003087 sde_encoder_resource_control(drm_enc,
3088 SDE_ENC_RC_EVENT_FRAME_DONE);
3089
3090 if (sde_enc->crtc_frame_event_cb)
3091 sde_enc->crtc_frame_event_cb(
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003092 &sde_enc->crtc_frame_event_cb_data,
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003093 event);
3094 }
3095 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04003096 if (sde_enc->crtc_frame_event_cb)
3097 sde_enc->crtc_frame_event_cb(
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003098 &sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04003099 }
3100}
3101
Dhaval Patel8a7c3282017-12-05 00:41:58 -08003102int sde_encoder_idle_request(struct drm_encoder *drm_enc)
3103{
3104 struct sde_encoder_virt *sde_enc;
3105
3106 if (!drm_enc) {
3107 SDE_ERROR("invalid drm encoder\n");
3108 return -EINVAL;
3109 }
3110
3111 sde_enc = to_sde_encoder_virt(drm_enc);
3112 sde_encoder_resource_control(&sde_enc->base,
3113 SDE_ENC_RC_EVENT_ENTER_IDLE);
3114
3115 return 0;
3116}
3117
Clarence Ip110d15c2016-08-16 14:44:41 -04003118/**
3119 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
3120 * drm_enc: Pointer to drm encoder structure
3121 * phys: Pointer to physical encoder structure
3122 * extra_flush_bits: Additional bit mask to include in flush trigger
3123 */
3124static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
3125 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
3126{
3127 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003128 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04003129
3130 if (!drm_enc || !phys) {
3131 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
3132 drm_enc != 0, phys != 0);
3133 return;
3134 }
3135
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003136 if (!phys->hw_pp) {
3137 SDE_ERROR("invalid pingpong hw\n");
3138 return;
3139 }
3140
Clarence Ip110d15c2016-08-16 14:44:41 -04003141 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04003142 if (!ctl || !phys->ops.trigger_flush) {
3143 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04003144 return;
3145 }
3146
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003147 if (phys->split_role == ENC_ROLE_SKIP) {
3148 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
3149 "skip flush pp%d ctl%d\n",
3150 phys->hw_pp->idx - PINGPONG_0,
3151 ctl->idx - CTL_0);
3152 return;
3153 }
3154
Clarence Ip8e69ad02016-12-09 09:43:57 -05003155 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05003156
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003157 if (phys->ops.is_master && phys->ops.is_master(phys))
3158 atomic_inc(&phys->pending_retire_fence_cnt);
3159
Clarence Ip110d15c2016-08-16 14:44:41 -04003160 if (extra_flush_bits && ctl->ops.update_pending_flush)
3161 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
3162
Alan Kwong4212dd42017-09-19 17:22:33 -04003163 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07003164
3165 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04003166 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
3167 pending_kickoff_cnt, ctl->idx - CTL_0,
3168 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07003169 else
Clarence Ip569d5af2017-10-14 21:09:01 -04003170 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
3171 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04003172}
3173
3174/**
3175 * _sde_encoder_trigger_start - trigger start for a physical encoder
3176 * phys: Pointer to physical encoder structure
3177 */
3178static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
3179{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003180 struct sde_hw_ctl *ctl;
3181
Clarence Ip110d15c2016-08-16 14:44:41 -04003182 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003183 SDE_ERROR("invalid argument(s)\n");
3184 return;
3185 }
3186
3187 if (!phys->hw_pp) {
3188 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04003189 return;
3190 }
3191
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07003192 /* avoid ctrl start for encoder in clone mode */
3193 if (phys->in_clone_mode)
3194 return;
3195
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003196 ctl = phys->hw_ctl;
3197 if (phys->split_role == ENC_ROLE_SKIP) {
3198 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
3199 "skip start pp%d ctl%d\n",
3200 phys->hw_pp->idx - PINGPONG_0,
3201 ctl->idx - CTL_0);
3202 return;
3203 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003204 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
3205 phys->ops.trigger_start(phys);
3206}
3207
Alan Kwong4212dd42017-09-19 17:22:33 -04003208void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
3209{
3210 struct sde_hw_ctl *ctl;
3211
3212 if (!phys_enc) {
3213 SDE_ERROR("invalid encoder\n");
3214 return;
3215 }
3216
3217 ctl = phys_enc->hw_ctl;
3218 if (ctl && ctl->ops.trigger_flush)
3219 ctl->ops.trigger_flush(ctl);
3220}
3221
Clarence Ip110d15c2016-08-16 14:44:41 -04003222void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
3223{
3224 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04003225
3226 if (!phys_enc) {
3227 SDE_ERROR("invalid encoder\n");
3228 return;
3229 }
3230
3231 ctl = phys_enc->hw_ctl;
3232 if (ctl && ctl->ops.trigger_start) {
3233 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04003234 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04003235 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003236}
3237
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303238static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
3239 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003240{
3241 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303242 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
3243 ktime_t cur_ktime;
3244 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003245
3246 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003247 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303248 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
3249 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003250
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303251 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
3252 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003253 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003254 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303255 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
3256
3257 return rc;
3258}
3259
3260int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
3261 struct sde_encoder_wait_info *info)
3262{
3263 int rc;
3264 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
3265
3266 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
3267
3268 /**
3269 * handle disabled irq case where timer irq is also delayed.
3270 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
3271 * if it event_timeout expired late detected.
3272 */
3273 if (atomic_read(info->atomic_cnt) && (!rc) &&
3274 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
3275 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
3276 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
3277 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003278
3279 return rc;
3280}
3281
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003282void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
3283{
3284 struct sde_encoder_virt *sde_enc;
3285 struct sde_connector *sde_con;
3286 void *sde_con_disp;
3287 struct sde_hw_ctl *ctl;
3288 int rc;
3289
3290 if (!phys_enc) {
3291 SDE_ERROR("invalid encoder\n");
3292 return;
3293 }
3294 sde_enc = to_sde_encoder_virt(phys_enc->parent);
3295 ctl = phys_enc->hw_ctl;
3296
3297 if (!ctl || !ctl->ops.reset)
3298 return;
3299
3300 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
3301 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
3302
3303 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
3304 phys_enc->connector) {
3305 sde_con = to_sde_connector(phys_enc->connector);
3306 sde_con_disp = sde_connector_get_display(phys_enc->connector);
3307
3308 if (sde_con->ops.soft_reset) {
3309 rc = sde_con->ops.soft_reset(sde_con_disp);
3310 if (rc) {
3311 SDE_ERROR_ENC(sde_enc,
3312 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07003313 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
3314 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003315 }
3316 }
3317 }
3318
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003319 phys_enc->enable_state = SDE_ENC_ENABLED;
3320}
3321
Clarence Ip110d15c2016-08-16 14:44:41 -04003322/**
3323 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
3324 * Iterate through the physical encoders and perform consolidated flush
3325 * and/or control start triggering as needed. This is done in the virtual
3326 * encoder rather than the individual physical ones in order to handle
3327 * use cases that require visibility into multiple physical encoders at
3328 * a time.
3329 * sde_enc: Pointer to virtual encoder structure
3330 */
3331static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
3332{
3333 struct sde_hw_ctl *ctl;
3334 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003335 unsigned long lock_flags;
Dhaval Patel30874eb2018-05-31 13:33:31 -07003336 struct msm_drm_private *priv = NULL;
3337 struct sde_kms *sde_kms = NULL;
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003338 bool is_vid_mode = false;
Clarence Ip110d15c2016-08-16 14:44:41 -04003339
3340 if (!sde_enc) {
3341 SDE_ERROR("invalid encoder\n");
3342 return;
3343 }
3344
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003345 is_vid_mode = sde_enc->disp_info.capabilities &
3346 MSM_DISPLAY_CAP_VID_MODE;
3347
3348
Clarence Ip110d15c2016-08-16 14:44:41 -04003349 pending_flush = 0x0;
3350
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003351 /*
3352 * Trigger LUT DMA flush, this might need a wait, so we need
3353 * to do this outside of the atomic context
3354 */
3355 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3356 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003357
3358 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3359 continue;
3360
3361 ctl = phys->hw_ctl;
3362 if (!ctl)
3363 continue;
3364
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003365 /* make reg dma kickoff as blocking for vidoe-mode */
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003366 if (phys->hw_ctl->ops.reg_dma_flush)
3367 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003368 is_vid_mode);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003369 }
3370
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003371 /* update pending counts and trigger kickoff ctl flush atomically */
3372 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3373
Clarence Ip110d15c2016-08-16 14:44:41 -04003374 /* don't perform flush/start operations for slave encoders */
3375 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3376 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07003377 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003378
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003379 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3380 continue;
3381
Clarence Ip110d15c2016-08-16 14:44:41 -04003382 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003383 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04003384 continue;
3385
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003386 if (phys->connector)
3387 topology = sde_connector_get_topology_name(
3388 phys->connector);
3389
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003390 /*
3391 * don't wait on ppsplit slaves or skipped encoders because
3392 * they dont receive irqs
3393 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003394 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003395 phys->split_role == ENC_ROLE_SLAVE) &&
3396 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003397 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003398
Clarence Ip8e69ad02016-12-09 09:43:57 -05003399 if (!phys->ops.needs_single_flush ||
3400 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04003401 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
3402 else if (ctl->ops.get_pending_flush)
3403 pending_flush |= ctl->ops.get_pending_flush(ctl);
3404 }
3405
3406 /* for split flush, combine pending flush masks and send to master */
3407 if (pending_flush && sde_enc->cur_master) {
3408 _sde_encoder_trigger_flush(
3409 &sde_enc->base,
3410 sde_enc->cur_master,
3411 pending_flush);
3412 }
3413
3414 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003415
3416 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel30874eb2018-05-31 13:33:31 -07003417
3418 if (sde_enc->elevated_ahb_vote) {
3419 priv = sde_enc->base.dev->dev_private;
3420 if (priv != NULL) {
3421 sde_kms = to_sde_kms(priv->kms);
3422 if (sde_kms != NULL) {
3423 sde_power_scale_reg_bus(&priv->phandle,
3424 sde_kms->core_client,
3425 VOTE_INDEX_LOW,
3426 false);
3427 }
3428 }
3429 sde_enc->elevated_ahb_vote = false;
3430 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003431}
3432
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003433static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
3434 struct drm_encoder *drm_enc,
3435 unsigned long *affected_displays,
3436 int num_active_phys)
3437{
3438 struct sde_encoder_virt *sde_enc;
3439 struct sde_encoder_phys *master;
3440 enum sde_rm_topology_name topology;
3441 bool is_right_only;
3442
3443 if (!drm_enc || !affected_displays)
3444 return;
3445
3446 sde_enc = to_sde_encoder_virt(drm_enc);
3447 master = sde_enc->cur_master;
3448 if (!master || !master->connector)
3449 return;
3450
3451 topology = sde_connector_get_topology_name(master->connector);
3452 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
3453 return;
3454
3455 /*
3456 * For pingpong split, the slave pingpong won't generate IRQs. For
3457 * right-only updates, we can't swap pingpongs, or simply swap the
3458 * master/slave assignment, we actually have to swap the interfaces
3459 * so that the master physical encoder will use a pingpong/interface
3460 * that generates irqs on which to wait.
3461 */
3462 is_right_only = !test_bit(0, affected_displays) &&
3463 test_bit(1, affected_displays);
3464
3465 if (is_right_only && !sde_enc->intfs_swapped) {
3466 /* right-only update swap interfaces */
3467 swap(sde_enc->phys_encs[0]->intf_idx,
3468 sde_enc->phys_encs[1]->intf_idx);
3469 sde_enc->intfs_swapped = true;
3470 } else if (!is_right_only && sde_enc->intfs_swapped) {
3471 /* left-only or full update, swap back */
3472 swap(sde_enc->phys_encs[0]->intf_idx,
3473 sde_enc->phys_encs[1]->intf_idx);
3474 sde_enc->intfs_swapped = false;
3475 }
3476
3477 SDE_DEBUG_ENC(sde_enc,
3478 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
3479 is_right_only, sde_enc->intfs_swapped,
3480 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3481 sde_enc->phys_encs[1]->intf_idx - INTF_0);
3482 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
3483 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3484 sde_enc->phys_encs[1]->intf_idx - INTF_0,
3485 *affected_displays);
3486
3487 /* ppsplit always uses master since ppslave invalid for irqs*/
3488 if (num_active_phys == 1)
3489 *affected_displays = BIT(0);
3490}
3491
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003492static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
3493 struct sde_encoder_kickoff_params *params)
3494{
3495 struct sde_encoder_virt *sde_enc;
3496 struct sde_encoder_phys *phys;
3497 int i, num_active_phys;
3498 bool master_assigned = false;
3499
3500 if (!drm_enc || !params)
3501 return;
3502
3503 sde_enc = to_sde_encoder_virt(drm_enc);
3504
3505 if (sde_enc->num_phys_encs <= 1)
3506 return;
3507
3508 /* count bits set */
3509 num_active_phys = hweight_long(params->affected_displays);
3510
3511 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
3512 params->affected_displays, num_active_phys);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04003513 SDE_EVT32_VERBOSE(DRMID(drm_enc), params->affected_displays,
3514 num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003515
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003516 /* for left/right only update, ppsplit master switches interface */
3517 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
3518 &params->affected_displays, num_active_phys);
3519
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003520 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3521 enum sde_enc_split_role prv_role, new_role;
3522 bool active;
3523
3524 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003525 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003526 continue;
3527
3528 active = test_bit(i, &params->affected_displays);
3529 prv_role = phys->split_role;
3530
3531 if (active && num_active_phys == 1)
3532 new_role = ENC_ROLE_SOLO;
3533 else if (active && !master_assigned)
3534 new_role = ENC_ROLE_MASTER;
3535 else if (active)
3536 new_role = ENC_ROLE_SLAVE;
3537 else
3538 new_role = ENC_ROLE_SKIP;
3539
3540 phys->ops.update_split_role(phys, new_role);
3541 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
3542 sde_enc->cur_master = phys;
3543 master_assigned = true;
3544 }
3545
3546 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
3547 phys->hw_pp->idx - PINGPONG_0, prv_role,
3548 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003549 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
3550 phys->hw_pp->idx - PINGPONG_0, prv_role,
3551 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003552 }
3553}
3554
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303555bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003556{
3557 struct sde_encoder_virt *sde_enc;
3558 struct msm_display_info *disp_info;
3559
3560 if (!drm_enc) {
3561 SDE_ERROR("invalid encoder\n");
3562 return false;
3563 }
3564
3565 sde_enc = to_sde_encoder_virt(drm_enc);
3566 disp_info = &sde_enc->disp_info;
3567
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303568 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003569}
3570
Dhaval Patel0e558f42017-04-30 00:51:40 -07003571void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
3572{
3573 struct sde_encoder_virt *sde_enc;
3574 struct sde_encoder_phys *phys;
3575 unsigned int i;
3576 struct sde_hw_ctl *ctl;
3577 struct msm_display_info *disp_info;
3578
3579 if (!drm_enc) {
3580 SDE_ERROR("invalid encoder\n");
3581 return;
3582 }
3583 sde_enc = to_sde_encoder_virt(drm_enc);
3584 disp_info = &sde_enc->disp_info;
3585
3586 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3587 phys = sde_enc->phys_encs[i];
3588
3589 if (phys && phys->hw_ctl) {
3590 ctl = phys->hw_ctl;
3591 if (ctl->ops.clear_pending_flush)
3592 ctl->ops.clear_pending_flush(ctl);
3593
3594 /* update only for command mode primary ctl */
3595 if ((phys == sde_enc->cur_master) &&
3596 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3597 && ctl->ops.trigger_pending)
3598 ctl->ops.trigger_pending(ctl);
3599 }
3600 }
3601}
3602
Ping Li8430ee12017-02-24 14:14:44 -08003603static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3604{
3605 void *dither_cfg;
Ping Li16162692018-05-08 14:13:46 -07003606 int ret = 0, rc, i = 0;
Ping Li8430ee12017-02-24 14:14:44 -08003607 size_t len = 0;
3608 enum sde_rm_topology_name topology;
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003609 struct drm_encoder *drm_enc;
3610 struct msm_mode_info mode_info;
3611 struct msm_display_dsc_info *dsc = NULL;
3612 struct sde_encoder_virt *sde_enc;
Ping Li16162692018-05-08 14:13:46 -07003613 struct sde_hw_pingpong *hw_pp;
Ping Li8430ee12017-02-24 14:14:44 -08003614
3615 if (!phys || !phys->connector || !phys->hw_pp ||
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003616 !phys->hw_pp->ops.setup_dither || !phys->parent)
Ping Li8430ee12017-02-24 14:14:44 -08003617 return;
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003618
Ping Li8430ee12017-02-24 14:14:44 -08003619 topology = sde_connector_get_topology_name(phys->connector);
3620 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3621 (phys->split_role == ENC_ROLE_SLAVE))
3622 return;
3623
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003624 drm_enc = phys->parent;
3625 sde_enc = to_sde_encoder_virt(drm_enc);
3626 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
3627 if (rc) {
3628 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
3629 return;
3630 }
3631
3632 dsc = &mode_info.comp_info.dsc_info;
3633 /* disable dither for 10 bpp or 10bpc dsc config */
3634 if (dsc->bpp == 10 || dsc->bpc == 10) {
3635 phys->hw_pp->ops.setup_dither(phys->hw_pp, NULL, 0);
Ping Li16162692018-05-08 14:13:46 -07003636 return;
3637 }
3638
3639 ret = sde_connector_get_dither_cfg(phys->connector,
3640 phys->connector->state, &dither_cfg, &len);
3641 if (ret)
3642 return;
3643
3644 if (TOPOLOGY_DUALPIPE_MERGE_MODE(topology)) {
3645 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
3646 hw_pp = sde_enc->hw_pp[i];
3647 if (hw_pp) {
3648 phys->hw_pp->ops.setup_dither(hw_pp, dither_cfg,
3649 len);
3650 }
3651 }
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003652 } else {
Ping Li16162692018-05-08 14:13:46 -07003653 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003654 }
Ping Li8430ee12017-02-24 14:14:44 -08003655}
3656
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003657static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3658 struct drm_display_mode *mode)
3659{
3660 u64 pclk_rate;
3661 u32 pclk_period;
3662 u32 line_time;
3663
3664 /*
3665 * For linetime calculation, only operate on master encoder.
3666 */
3667 if (!sde_enc->cur_master)
3668 return 0;
3669
3670 if (!sde_enc->cur_master->ops.get_line_count) {
3671 SDE_ERROR("get_line_count function not defined\n");
3672 return 0;
3673 }
3674
3675 pclk_rate = mode->clock; /* pixel clock in kHz */
3676 if (pclk_rate == 0) {
3677 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3678 return 0;
3679 }
3680
3681 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3682 if (pclk_period == 0) {
3683 SDE_ERROR("pclk period is 0\n");
3684 return 0;
3685 }
3686
3687 /*
3688 * Line time calculation based on Pixel clock and HTOTAL.
3689 * Final unit is in ns.
3690 */
3691 line_time = (pclk_period * mode->htotal) / 1000;
3692 if (line_time == 0) {
3693 SDE_ERROR("line time calculation is 0\n");
3694 return 0;
3695 }
3696
3697 SDE_DEBUG_ENC(sde_enc,
3698 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3699 pclk_rate, pclk_period, line_time);
3700
3701 return line_time;
3702}
3703
3704static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3705 ktime_t *wakeup_time)
3706{
3707 struct drm_display_mode *mode;
3708 struct sde_encoder_virt *sde_enc;
3709 u32 cur_line;
3710 u32 line_time;
3711 u32 vtotal, time_to_vsync;
3712 ktime_t cur_time;
3713
3714 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003715 mode = &sde_enc->cur_master->cached_mode;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003716
3717 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3718 if (!line_time)
3719 return -EINVAL;
3720
3721 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3722
3723 vtotal = mode->vtotal;
3724 if (cur_line >= vtotal)
3725 time_to_vsync = line_time * vtotal;
3726 else
3727 time_to_vsync = line_time * (vtotal - cur_line);
3728
3729 if (time_to_vsync == 0) {
3730 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3731 vtotal);
3732 return -EINVAL;
3733 }
3734
3735 cur_time = ktime_get();
3736 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3737
3738 SDE_DEBUG_ENC(sde_enc,
3739 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3740 cur_line, vtotal, time_to_vsync,
3741 ktime_to_ms(cur_time),
3742 ktime_to_ms(*wakeup_time));
3743 return 0;
3744}
3745
3746static void sde_encoder_vsync_event_handler(unsigned long data)
3747{
3748 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3749 struct sde_encoder_virt *sde_enc;
3750 struct msm_drm_private *priv;
3751 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003752
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003753 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3754 SDE_ERROR("invalid encoder parameters\n");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003755 return;
3756 }
3757
3758 sde_enc = to_sde_encoder_virt(drm_enc);
3759 priv = drm_enc->dev->dev_private;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003760 if (!sde_enc->crtc) {
3761 SDE_ERROR("invalid crtc");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003762 return;
3763 }
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003764
3765 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3766 SDE_ERROR("invalid crtc index:%u\n",
3767 sde_enc->crtc->index);
3768 return;
3769 }
3770 event_thread = &priv->event_thread[sde_enc->crtc->index];
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003771 if (!event_thread) {
3772 SDE_ERROR("event_thread not found for crtc:%d\n",
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003773 sde_enc->crtc->index);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003774 return;
3775 }
3776
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303777 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003778 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003779}
3780
Dhaval Patel222023e2018-02-27 12:24:07 -08003781static void sde_encoder_esd_trigger_work_handler(struct kthread_work *work)
3782{
3783 struct sde_encoder_virt *sde_enc = container_of(work,
3784 struct sde_encoder_virt, esd_trigger_work);
3785
3786 if (!sde_enc) {
3787 SDE_ERROR("invalid sde encoder\n");
3788 return;
3789 }
3790
3791 sde_encoder_resource_control(&sde_enc->base,
3792 SDE_ENC_RC_EVENT_KICKOFF);
3793}
3794
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08003795static void sde_encoder_input_event_work_handler(struct kthread_work *work)
3796{
3797 struct sde_encoder_virt *sde_enc = container_of(work,
3798 struct sde_encoder_virt, input_event_work);
3799
3800 if (!sde_enc) {
3801 SDE_ERROR("invalid sde encoder\n");
3802 return;
3803 }
3804
3805 sde_encoder_resource_control(&sde_enc->base,
3806 SDE_ENC_RC_EVENT_EARLY_WAKEUP);
3807}
3808
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003809static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3810{
3811 struct sde_encoder_virt *sde_enc = container_of(work,
3812 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303813 bool autorefresh_enabled = false;
3814 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003815 ktime_t wakeup_time;
3816
3817 if (!sde_enc) {
3818 SDE_ERROR("invalid sde encoder\n");
3819 return;
3820 }
3821
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303822 rc = _sde_encoder_power_enable(sde_enc, true);
3823 if (rc) {
3824 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3825 return;
3826 }
3827
3828 if (sde_enc->cur_master &&
3829 sde_enc->cur_master->ops.is_autorefresh_enabled)
3830 autorefresh_enabled =
3831 sde_enc->cur_master->ops.is_autorefresh_enabled(
3832 sde_enc->cur_master);
3833
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303834 /* Update timer if autorefresh is enabled else return */
3835 if (!autorefresh_enabled)
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003836 goto exit;
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303837
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003838 rc = _sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time);
3839 if (rc)
3840 goto exit;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003841
3842 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3843 mod_timer(&sde_enc->vsync_event_timer,
3844 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003845
3846exit:
3847 _sde_encoder_power_enable(sde_enc, false);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003848}
3849
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003850int sde_encoder_poll_line_counts(struct drm_encoder *drm_enc)
3851{
3852 static const uint64_t timeout_us = 50000;
3853 static const uint64_t sleep_us = 20;
3854 struct sde_encoder_virt *sde_enc;
3855 ktime_t cur_ktime, exp_ktime;
3856 uint32_t line_count, tmp, i;
3857
3858 if (!drm_enc) {
3859 SDE_ERROR("invalid encoder\n");
3860 return -EINVAL;
3861 }
3862 sde_enc = to_sde_encoder_virt(drm_enc);
3863 if (!sde_enc->cur_master ||
3864 !sde_enc->cur_master->ops.get_line_count) {
3865 SDE_DEBUG_ENC(sde_enc, "can't get master line count\n");
3866 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_ERROR);
3867 return -EINVAL;
3868 }
3869
3870 exp_ktime = ktime_add_ms(ktime_get(), timeout_us / 1000);
3871
3872 line_count = sde_enc->cur_master->ops.get_line_count(
3873 sde_enc->cur_master);
3874
3875 for (i = 0; i < (timeout_us * 2 / sleep_us); ++i) {
3876 tmp = line_count;
3877 line_count = sde_enc->cur_master->ops.get_line_count(
3878 sde_enc->cur_master);
3879 if (line_count < tmp) {
3880 SDE_EVT32(DRMID(drm_enc), line_count);
3881 return 0;
3882 }
3883
3884 cur_ktime = ktime_get();
3885 if (ktime_compare_safe(exp_ktime, cur_ktime) <= 0)
3886 break;
3887
3888 usleep_range(sleep_us / 2, sleep_us);
3889 }
3890
3891 SDE_EVT32(DRMID(drm_enc), line_count, SDE_EVTLOG_ERROR);
3892 return -ETIMEDOUT;
3893}
3894
Clarence Ip85f4f4532017-10-04 12:10:13 -04003895int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08003896 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003897{
3898 struct sde_encoder_virt *sde_enc;
3899 struct sde_encoder_phys *phys;
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003900 struct sde_kms *sde_kms = NULL;
3901 struct msm_drm_private *priv = NULL;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003902 bool needs_hw_reset = false;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003903 uint32_t ln_cnt1, ln_cnt2;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003904 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04003905 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003906
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003907 if (!drm_enc || !params || !drm_enc->dev ||
3908 !drm_enc->dev->dev_private) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003909 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04003910 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003911 }
3912 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003913 priv = drm_enc->dev->dev_private;
3914 sde_kms = to_sde_kms(priv->kms);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003915
Clarence Ip19af1362016-09-23 14:57:51 -04003916 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003917 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003918
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003919 /* save this for later, in case of errors */
3920 if (sde_enc->cur_master && sde_enc->cur_master->ops.get_wr_line_count)
3921 ln_cnt1 = sde_enc->cur_master->ops.get_wr_line_count(
3922 sde_enc->cur_master);
3923 else
3924 ln_cnt1 = -EINVAL;
3925
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003926 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003927 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003928 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003929 phys = sde_enc->phys_encs[i];
Jayant Shekhar98e78a82018-01-12 17:50:55 +05303930 params->is_primary = sde_enc->disp_info.is_primary;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003931 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04003932 if (phys->ops.prepare_for_kickoff) {
3933 rc = phys->ops.prepare_for_kickoff(
3934 phys, params);
3935 if (rc)
3936 ret = rc;
3937 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003938 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
3939 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08003940 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003941 }
3942 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003943 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003944
Alan Kwong1124f1f2017-11-10 18:14:39 -05003945 rc = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
3946 if (rc) {
3947 SDE_ERROR_ENC(sde_enc, "resource kickoff failed rc %d\n", rc);
3948 return rc;
3949 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003950
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003951 /* if any phys needs reset, reset all phys, in-order */
3952 if (needs_hw_reset) {
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003953 /* query line count before cur_master is updated */
3954 if (sde_enc->cur_master &&
3955 sde_enc->cur_master->ops.get_wr_line_count)
3956 ln_cnt2 = sde_enc->cur_master->ops.get_wr_line_count(
3957 sde_enc->cur_master);
3958 else
3959 ln_cnt2 = -EINVAL;
3960
3961 SDE_EVT32(DRMID(drm_enc), ln_cnt1, ln_cnt2,
3962 SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003963 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3964 phys = sde_enc->phys_encs[i];
3965 if (phys && phys->ops.hw_reset)
3966 phys->ops.hw_reset(phys);
3967 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003968 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003969
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003970 _sde_encoder_update_master(drm_enc, params);
3971
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003972 _sde_encoder_update_roi(drm_enc);
3973
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003974 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
3975 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003976 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003977 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
3978 sde_enc->cur_master->connector->base.id,
3979 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003980 ret = rc;
3981 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003982 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003983
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003984 if (_sde_encoder_is_dsc_enabled(drm_enc) &&
3985 !sde_kms->splash_data.cont_splash_en) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003986 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003987 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003988 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003989 ret = rc;
3990 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003991 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04003992
3993 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04003994}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003995
Clarence Ip662698e2017-09-12 18:34:16 -04003996/**
3997 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
3998 * with the specified encoder, and unstage all pipes from it
3999 * @encoder: encoder pointer
4000 * Returns: 0 on success
4001 */
4002static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
4003{
4004 struct sde_encoder_virt *sde_enc;
4005 struct sde_encoder_phys *phys;
4006 unsigned int i;
4007 int rc = 0;
4008
4009 if (!drm_enc) {
4010 SDE_ERROR("invalid encoder\n");
4011 return -EINVAL;
4012 }
4013
4014 sde_enc = to_sde_encoder_virt(drm_enc);
4015
4016 SDE_ATRACE_BEGIN("encoder_release_lm");
4017 SDE_DEBUG_ENC(sde_enc, "\n");
4018
4019 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4020 phys = sde_enc->phys_encs[i];
4021 if (!phys)
4022 continue;
4023
4024 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
4025
4026 rc = sde_encoder_helper_reset_mixers(phys, NULL);
4027 if (rc)
4028 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
4029 }
4030
4031 SDE_ATRACE_END("encoder_release_lm");
4032 return rc;
4033}
4034
4035void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04004036{
4037 struct sde_encoder_virt *sde_enc;
4038 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004039 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04004040 unsigned int i;
4041
4042 if (!drm_enc) {
4043 SDE_ERROR("invalid encoder\n");
4044 return;
4045 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07004046 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04004047 sde_enc = to_sde_encoder_virt(drm_enc);
4048
4049 SDE_DEBUG_ENC(sde_enc, "\n");
4050
Clarence Ip662698e2017-09-12 18:34:16 -04004051 /* create a 'no pipes' commit to release buffers on errors */
4052 if (is_error)
4053 _sde_encoder_reset_ctl_hw(drm_enc);
4054
Alan Kwong628d19e2016-10-31 13:50:13 -04004055 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04004056 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004057
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05004058 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004059 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05004060 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004061 if (phys && phys->ops.handle_post_kickoff)
4062 phys->ops.handle_post_kickoff(phys);
4063 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004064
4065 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
Tharun Raj Soma88b6dfc2018-05-11 14:19:49 +05304066 sde_enc->disp_info.is_primary &&
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004067 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
4068 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
4069 mod_timer(&sde_enc->vsync_event_timer,
4070 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
4071 }
4072
Narendra Muppalla77b32932017-05-10 13:53:11 -07004073 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004074}
4075
Clarence Ip662698e2017-09-12 18:34:16 -04004076int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004077 struct drm_framebuffer *fb)
4078{
4079 struct drm_encoder *drm_enc;
4080 struct sde_hw_mixer_cfg mixer;
4081 struct sde_rm_hw_iter lm_iter;
4082 bool lm_valid = false;
4083
4084 if (!phys_enc || !phys_enc->parent) {
4085 SDE_ERROR("invalid encoder\n");
4086 return -EINVAL;
4087 }
4088
4089 drm_enc = phys_enc->parent;
4090 memset(&mixer, 0, sizeof(mixer));
4091
4092 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004093 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
4094 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
4095
4096 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
4097 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
4098 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
4099
4100 if (!hw_lm)
4101 continue;
4102
4103 /* need to flush LM to remove it */
4104 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
4105 phys_enc->hw_ctl->ops.update_pending_flush)
4106 phys_enc->hw_ctl->ops.update_pending_flush(
4107 phys_enc->hw_ctl,
4108 phys_enc->hw_ctl->ops.get_bitmask_mixer(
4109 phys_enc->hw_ctl, hw_lm->idx));
4110
4111 if (fb) {
4112 /* assume a single LM if targeting a frame buffer */
4113 if (lm_valid)
4114 continue;
4115
4116 mixer.out_height = fb->height;
4117 mixer.out_width = fb->width;
4118
4119 if (hw_lm->ops.setup_mixer_out)
4120 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
4121 }
4122
4123 lm_valid = true;
4124
4125 /* only enable border color on LM */
4126 if (phys_enc->hw_ctl->ops.setup_blendstage)
4127 phys_enc->hw_ctl->ops.setup_blendstage(
Kalyan Thotafb1800c2019-03-18 14:33:36 +05304128 phys_enc->hw_ctl, hw_lm->cfg.flags,
4129 hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004130 }
4131
4132 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04004133 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004134 return -EFAULT;
4135 }
4136 return 0;
4137}
4138
Lloyd Atkinsone123c172017-02-27 13:19:08 -05004139void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
4140{
4141 struct sde_encoder_virt *sde_enc;
4142 struct sde_encoder_phys *phys;
4143 int i;
4144
4145 if (!drm_enc) {
4146 SDE_ERROR("invalid encoder\n");
4147 return;
4148 }
4149 sde_enc = to_sde_encoder_virt(drm_enc);
4150
4151 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4152 phys = sde_enc->phys_encs[i];
4153 if (phys && phys->ops.prepare_commit)
4154 phys->ops.prepare_commit(phys);
4155 }
4156}
4157
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004158#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004159static int _sde_encoder_status_show(struct seq_file *s, void *data)
4160{
4161 struct sde_encoder_virt *sde_enc;
4162 int i;
4163
4164 if (!s || !s->private)
4165 return -EINVAL;
4166
4167 sde_enc = s->private;
4168
4169 mutex_lock(&sde_enc->enc_lock);
4170 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4171 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4172
4173 if (!phys)
4174 continue;
4175
4176 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
4177 phys->intf_idx - INTF_0,
4178 atomic_read(&phys->vsync_cnt),
4179 atomic_read(&phys->underrun_cnt));
4180
4181 switch (phys->intf_mode) {
4182 case INTF_MODE_VIDEO:
4183 seq_puts(s, "mode: video\n");
4184 break;
4185 case INTF_MODE_CMD:
4186 seq_puts(s, "mode: command\n");
4187 break;
4188 case INTF_MODE_WB_BLOCK:
4189 seq_puts(s, "mode: wb block\n");
4190 break;
4191 case INTF_MODE_WB_LINE:
4192 seq_puts(s, "mode: wb line\n");
4193 break;
4194 default:
4195 seq_puts(s, "mode: ???\n");
4196 break;
4197 }
4198 }
4199 mutex_unlock(&sde_enc->enc_lock);
4200
4201 return 0;
4202}
4203
4204static int _sde_encoder_debugfs_status_open(struct inode *inode,
4205 struct file *file)
4206{
4207 return single_open(file, _sde_encoder_status_show, inode->i_private);
4208}
4209
Dhaval Patelf9245d62017-03-28 16:24:00 -07004210static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304211 const char __user *user_buf, size_t count, loff_t *ppos)
4212{
4213 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004214 int i = 0, rc;
4215 char buf[MISR_BUFF_SIZE + 1];
4216 size_t buff_copy;
4217 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304218
Dhaval Patelf9245d62017-03-28 16:24:00 -07004219 if (!file || !file->private_data)
4220 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304221
Dhaval Patelf9245d62017-03-28 16:24:00 -07004222 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304223
Dhaval Patelf9245d62017-03-28 16:24:00 -07004224 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
4225 if (copy_from_user(buf, user_buf, buff_copy))
4226 return -EINVAL;
4227
4228 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304229
4230 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07004231 return -EINVAL;
4232
4233 rc = _sde_encoder_power_enable(sde_enc, true);
4234 if (rc)
4235 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304236
4237 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004238 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07004239 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304240 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4241 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4242
Dhaval Patelf9245d62017-03-28 16:24:00 -07004243 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304244 continue;
4245
Dhaval Patelf9245d62017-03-28 16:24:00 -07004246 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304247 }
4248 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004249 _sde_encoder_power_enable(sde_enc, false);
4250
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304251 return count;
4252}
4253
Dhaval Patelf9245d62017-03-28 16:24:00 -07004254static ssize_t _sde_encoder_misr_read(struct file *file,
4255 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304256{
4257 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004258 int i = 0, len = 0;
4259 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
4260 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304261
4262 if (*ppos)
4263 return 0;
4264
Dhaval Patelf9245d62017-03-28 16:24:00 -07004265 if (!file || !file->private_data)
4266 return -EINVAL;
4267
4268 sde_enc = file->private_data;
4269
4270 rc = _sde_encoder_power_enable(sde_enc, true);
4271 if (rc)
4272 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304273
4274 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004275 if (!sde_enc->misr_enable) {
4276 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4277 "disabled\n");
4278 goto buff_check;
4279 } else if (sde_enc->disp_info.capabilities &
4280 ~MSM_DISPLAY_CAP_VID_MODE) {
4281 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4282 "unsupported\n");
4283 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304284 }
4285
Dhaval Patelf9245d62017-03-28 16:24:00 -07004286 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4287 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004288
Dhaval Patelf9245d62017-03-28 16:24:00 -07004289 if (!phys || !phys->ops.collect_misr)
4290 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304291
Dhaval Patelf9245d62017-03-28 16:24:00 -07004292 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4293 "Intf idx:%d\n", phys->intf_idx - INTF_0);
4294 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
4295 phys->ops.collect_misr(phys));
4296 }
4297
4298buff_check:
4299 if (count <= len) {
4300 len = 0;
4301 goto end;
4302 }
4303
4304 if (copy_to_user(user_buff, buf, len)) {
4305 len = -EFAULT;
4306 goto end;
4307 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304308
4309 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304310
Dhaval Patelf9245d62017-03-28 16:24:00 -07004311end:
4312 mutex_unlock(&sde_enc->enc_lock);
4313 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304314 return len;
4315}
4316
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004317static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004318{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004319 struct sde_encoder_virt *sde_enc;
4320 struct msm_drm_private *priv;
4321 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07004322 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004323
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004324 static const struct file_operations debugfs_status_fops = {
4325 .open = _sde_encoder_debugfs_status_open,
4326 .read = seq_read,
4327 .llseek = seq_lseek,
4328 .release = single_release,
4329 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304330
4331 static const struct file_operations debugfs_misr_fops = {
4332 .open = simple_open,
4333 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004334 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304335 };
4336
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004337 char name[SDE_NAME_SIZE];
4338
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004339 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004340 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004341 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004342 }
4343
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004344 sde_enc = to_sde_encoder_virt(drm_enc);
4345 priv = drm_enc->dev->dev_private;
4346 sde_kms = to_sde_kms(priv->kms);
4347
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004348 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
4349
4350 /* create overall sub-directory for the encoder */
4351 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07004352 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004353 if (!sde_enc->debugfs_root)
4354 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304355
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004356 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004357 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004358 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304359
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004360 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004361 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004362
Alan Kwongf2debb02017-04-05 06:19:29 -07004363 for (i = 0; i < sde_enc->num_phys_encs; i++)
4364 if (sde_enc->phys_encs[i] &&
4365 sde_enc->phys_encs[i]->ops.late_register)
4366 sde_enc->phys_encs[i]->ops.late_register(
4367 sde_enc->phys_encs[i],
4368 sde_enc->debugfs_root);
4369
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004370 return 0;
4371}
4372
4373static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
4374{
4375 struct sde_encoder_virt *sde_enc;
4376
4377 if (!drm_enc)
4378 return;
4379
4380 sde_enc = to_sde_encoder_virt(drm_enc);
4381 debugfs_remove_recursive(sde_enc->debugfs_root);
4382}
4383#else
4384static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
4385{
4386 return 0;
4387}
4388
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004389static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004390{
4391}
4392#endif
4393
4394static int sde_encoder_late_register(struct drm_encoder *encoder)
4395{
4396 return _sde_encoder_init_debugfs(encoder);
4397}
4398
4399static void sde_encoder_early_unregister(struct drm_encoder *encoder)
4400{
4401 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004402}
4403
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004404static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04004405 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004406 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004407 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004408{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004409 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004410
Clarence Ip19af1362016-09-23 14:57:51 -04004411 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004412
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004413 /*
4414 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
4415 * in this function, check up-front.
4416 */
4417 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
4418 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004419 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004420 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004421 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004422 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004423
Clarence Ipa4039322016-07-15 16:23:59 -04004424 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004425 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004426
4427 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004428 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004429 PTR_ERR(enc));
4430 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4431 }
4432
4433 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4434 ++sde_enc->num_phys_encs;
4435 }
4436
Clarence Ipa4039322016-07-15 16:23:59 -04004437 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004438 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004439
4440 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004441 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004442 PTR_ERR(enc));
4443 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4444 }
4445
4446 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4447 ++sde_enc->num_phys_encs;
4448 }
4449
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004450 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004451}
4452
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004453static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
4454 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04004455{
4456 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04004457
Clarence Ip19af1362016-09-23 14:57:51 -04004458 if (!sde_enc) {
4459 SDE_ERROR("invalid encoder\n");
4460 return -EINVAL;
4461 }
4462
4463 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04004464
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004465 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004466 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004467 sde_enc->num_phys_encs);
4468 return -EINVAL;
4469 }
4470
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004471 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004472
4473 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004474 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004475 PTR_ERR(enc));
4476 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4477 }
4478
4479 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4480 ++sde_enc->num_phys_encs;
4481
4482 return 0;
4483}
4484
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004485static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004486 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04004487 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004488 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004489{
4490 int ret = 0;
4491 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004492 enum sde_intf_type intf_type;
4493 struct sde_encoder_virt_ops parent_ops = {
4494 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07004495 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04004496 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004497 };
4498 struct sde_enc_phys_init_params phys_params;
4499
Clarence Ip19af1362016-09-23 14:57:51 -04004500 if (!sde_enc || !sde_kms) {
4501 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
4502 sde_enc != 0, sde_kms != 0);
4503 return -EINVAL;
4504 }
4505
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004506 memset(&phys_params, 0, sizeof(phys_params));
4507 phys_params.sde_kms = sde_kms;
4508 phys_params.parent = &sde_enc->base;
4509 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004510 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +05304511 phys_params.vblank_ctl_lock = &sde_enc->vblank_ctl_lock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004512
Clarence Ip19af1362016-09-23 14:57:51 -04004513 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004514
Clarence Ipa4039322016-07-15 16:23:59 -04004515 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004516 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
4517 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04004518 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004519 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4520 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07004521 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
4522 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4523 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04004524 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
4525 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
4526 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004527 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04004528 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004529 return -EINVAL;
4530 }
4531
Clarence Ip88270a62016-06-26 10:09:34 -04004532 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004533
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004534 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
4535
Clarence Ip19af1362016-09-23 14:57:51 -04004536 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004537
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004538 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
4539 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07004540 sde_enc->idle_pc_enabled = sde_kms->catalog->has_idle_pc;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004541
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004542 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04004543 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004544 /*
4545 * Left-most tile is at index 0, content is controller id
4546 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
4547 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
4548 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004549 u32 controller_id = disp_info->h_tile_instance[i];
4550
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004551 if (disp_info->num_of_h_tiles > 1) {
4552 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004553 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004554 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004555 phys_params.split_role = ENC_ROLE_SLAVE;
4556 } else {
4557 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004558 }
4559
Clarence Ip19af1362016-09-23 14:57:51 -04004560 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004561 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004562
Alan Kwongbb27c092016-07-20 16:41:25 -04004563 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004564 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004565 phys_params.wb_idx = sde_encoder_get_wb(
4566 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04004567 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004568 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004569 SDE_ERROR_ENC(sde_enc,
4570 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004571 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004572 ret = -EINVAL;
4573 }
Alan Kwongbb27c092016-07-20 16:41:25 -04004574 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004575 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004576 phys_params.intf_idx = sde_encoder_get_intf(
4577 sde_kms->catalog, intf_type,
4578 controller_id);
4579 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004580 SDE_ERROR_ENC(sde_enc,
4581 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004582 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004583 ret = -EINVAL;
4584 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004585 }
4586
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004587 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04004588 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004589 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
4590 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004591 else
4592 ret = sde_encoder_virt_add_phys_encs(
4593 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004594 sde_enc,
4595 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004596 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04004597 SDE_ERROR_ENC(sde_enc,
4598 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004599 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004600 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08004601
4602 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4603 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4604
4605 if (phys) {
4606 atomic_set(&phys->vsync_cnt, 0);
4607 atomic_set(&phys->underrun_cnt, 0);
4608 }
4609 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004610 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004611
4612 return ret;
4613}
4614
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004615static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
4616 .mode_set = sde_encoder_virt_mode_set,
4617 .disable = sde_encoder_virt_disable,
4618 .enable = sde_encoder_virt_enable,
4619 .atomic_check = sde_encoder_virt_atomic_check,
4620};
4621
4622static const struct drm_encoder_funcs sde_encoder_funcs = {
4623 .destroy = sde_encoder_destroy,
4624 .late_register = sde_encoder_late_register,
4625 .early_unregister = sde_encoder_early_unregister,
4626};
4627
Clarence Ip3649f8b2016-10-31 09:59:44 -04004628struct drm_encoder *sde_encoder_init(
4629 struct drm_device *dev,
4630 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004631{
4632 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04004633 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004634 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004635 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004636 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08004637 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004638 int ret = 0;
4639
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004640 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
4641 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004642 ret = -ENOMEM;
4643 goto fail;
4644 }
4645
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004646 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004647 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
4648 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004649 if (ret)
4650 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004651
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004652 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004653 spin_lock_init(&sde_enc->enc_spinlock);
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +05304654 mutex_init(&sde_enc->vblank_ctl_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004655 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004656 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004657 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004658
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004659 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
4660 disp_info->is_primary)
4661 setup_timer(&sde_enc->vsync_event_timer,
4662 sde_encoder_vsync_event_handler,
4663 (unsigned long)sde_enc);
4664
Dhaval Patel020f7e122016-11-15 14:39:18 -08004665 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
4666 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004667 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08004668 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07004669 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08004670 PTR_ERR(sde_enc->rsc_client));
4671 sde_enc->rsc_client = NULL;
4672 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004673
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004674 if (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
4675 ret = _sde_encoder_input_handler(sde_enc);
4676 if (ret)
4677 SDE_ERROR(
4678 "input handler registration failed, rc = %d\n", ret);
4679 }
4680
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004681 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04004682 kthread_init_delayed_work(&sde_enc->delayed_off_work,
4683 sde_encoder_off_work);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07004684 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004685
4686 kthread_init_work(&sde_enc->vsync_event_work,
4687 sde_encoder_vsync_event_work_handler);
4688
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004689 kthread_init_work(&sde_enc->input_event_work,
4690 sde_encoder_input_event_work_handler);
4691
Dhaval Patel222023e2018-02-27 12:24:07 -08004692 kthread_init_work(&sde_enc->esd_trigger_work,
4693 sde_encoder_esd_trigger_work_handler);
4694
Dhaval Patel020f7e122016-11-15 14:39:18 -08004695 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
4696
Clarence Ip19af1362016-09-23 14:57:51 -04004697 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004698
4699 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004700
4701fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004702 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004703 if (drm_enc)
4704 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004705
4706 return ERR_PTR(ret);
4707}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004708
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004709int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4710 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004711{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004712 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004713 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004714 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004715
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004716 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004717 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004718 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004719 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004720 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004721 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004722
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004723 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4724 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004725
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004726 switch (event) {
4727 case MSM_ENC_COMMIT_DONE:
4728 fn_wait = phys->ops.wait_for_commit_done;
4729 break;
4730 case MSM_ENC_TX_COMPLETE:
4731 fn_wait = phys->ops.wait_for_tx_complete;
4732 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004733 case MSM_ENC_VBLANK:
4734 fn_wait = phys->ops.wait_for_vblank;
4735 break;
Sandeep Panda11b20d82017-06-19 12:57:27 +05304736 case MSM_ENC_ACTIVE_REGION:
4737 fn_wait = phys->ops.wait_for_active;
4738 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004739 default:
4740 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4741 event);
4742 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004743 };
4744
4745 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004746 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004747 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004748 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004749 if (ret)
4750 return ret;
4751 }
4752 }
4753
4754 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004755}
4756
Alan Kwong67a3f792016-11-01 23:16:53 -04004757enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4758{
4759 struct sde_encoder_virt *sde_enc = NULL;
4760 int i;
4761
4762 if (!encoder) {
4763 SDE_ERROR("invalid encoder\n");
4764 return INTF_MODE_NONE;
4765 }
4766 sde_enc = to_sde_encoder_virt(encoder);
4767
4768 if (sde_enc->cur_master)
4769 return sde_enc->cur_master->intf_mode;
4770
4771 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4772 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4773
4774 if (phys)
4775 return phys->intf_mode;
4776 }
4777
4778 return INTF_MODE_NONE;
4779}
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004780
4781/**
4782 * sde_encoder_update_caps_for_cont_splash - update encoder settings during
4783 * device bootup when cont_splash is enabled
4784 * @drm_enc: Pointer to drm encoder structure
4785 * @Return: true if successful in updating the encoder structure
4786 */
4787int sde_encoder_update_caps_for_cont_splash(struct drm_encoder *encoder)
4788{
4789 struct sde_encoder_virt *sde_enc;
4790 struct msm_drm_private *priv;
4791 struct sde_kms *sde_kms;
4792 struct drm_connector *conn = NULL;
4793 struct sde_connector *sde_conn = NULL;
4794 struct sde_connector_state *sde_conn_state = NULL;
4795 struct drm_display_mode *drm_mode = NULL;
4796 struct sde_rm_hw_iter dsc_iter, pp_iter, ctl_iter;
4797 int ret = 0, i;
4798
4799 if (!encoder) {
4800 SDE_ERROR("invalid drm enc\n");
4801 return -EINVAL;
4802 }
4803
4804 if (!encoder->dev || !encoder->dev->dev_private) {
4805 SDE_ERROR("drm device invalid\n");
4806 return -EINVAL;
4807 }
4808
4809 priv = encoder->dev->dev_private;
4810 if (!priv->kms) {
4811 SDE_ERROR("invalid kms\n");
4812 return -EINVAL;
4813 }
4814
4815 sde_kms = to_sde_kms(priv->kms);
4816 sde_enc = to_sde_encoder_virt(encoder);
4817 if (!priv->num_connectors) {
4818 SDE_ERROR_ENC(sde_enc, "No connectors registered\n");
4819 return -EINVAL;
4820 }
4821 SDE_DEBUG_ENC(sde_enc,
4822 "num of connectors: %d\n", priv->num_connectors);
4823
4824 for (i = 0; i < priv->num_connectors; i++) {
4825 SDE_DEBUG_ENC(sde_enc, "connector id: %d\n",
4826 priv->connectors[i]->base.id);
4827 sde_conn = to_sde_connector(priv->connectors[i]);
4828 if (!sde_conn->encoder) {
4829 SDE_DEBUG_ENC(sde_enc,
4830 "encoder not attached to connector\n");
4831 continue;
4832 }
4833 if (sde_conn->encoder->base.id
4834 == encoder->base.id) {
4835 conn = (priv->connectors[i]);
4836 break;
4837 }
4838 }
4839
4840 if (!conn || !conn->state) {
4841 SDE_ERROR_ENC(sde_enc, "connector not found\n");
4842 return -EINVAL;
4843 }
4844
4845 sde_conn_state = to_sde_connector_state(conn->state);
4846
4847 if (!sde_conn->ops.get_mode_info) {
4848 SDE_ERROR_ENC(sde_enc, "conn: get_mode_info ops not found\n");
4849 return -EINVAL;
4850 }
4851
4852 ret = sde_conn->ops.get_mode_info(&encoder->crtc->state->adjusted_mode,
4853 &sde_conn_state->mode_info,
4854 sde_kms->catalog->max_mixer_width,
4855 sde_conn->display);
4856 if (ret) {
4857 SDE_ERROR_ENC(sde_enc,
4858 "conn: ->get_mode_info failed. ret=%d\n", ret);
4859 return ret;
4860 }
4861
4862 ret = sde_rm_reserve(&sde_kms->rm, encoder, encoder->crtc->state,
4863 conn->state, false);
4864 if (ret) {
4865 SDE_ERROR_ENC(sde_enc,
4866 "failed to reserve hw resources, %d\n", ret);
4867 return ret;
4868 }
4869
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004870 if (sde_conn->encoder) {
4871 conn->state->best_encoder = sde_conn->encoder;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004872 SDE_DEBUG_ENC(sde_enc,
4873 "configured cstate->best_encoder to ID = %d\n",
4874 conn->state->best_encoder->base.id);
4875 } else {
4876 SDE_ERROR_ENC(sde_enc, "No encoder mapped to connector=%d\n",
4877 conn->base.id);
4878 }
4879
4880 SDE_DEBUG_ENC(sde_enc, "connector topology = %llu\n",
4881 sde_connector_get_topology_name(conn));
4882 drm_mode = &encoder->crtc->state->adjusted_mode;
4883 SDE_DEBUG_ENC(sde_enc, "hdisplay = %d, vdisplay = %d\n",
4884 drm_mode->hdisplay, drm_mode->vdisplay);
4885 drm_set_preferred_mode(conn, drm_mode->hdisplay, drm_mode->vdisplay);
4886
4887 if (encoder->bridge) {
4888 SDE_DEBUG_ENC(sde_enc, "Bridge mapped to encoder\n");
4889 /*
4890 * For cont-splash use case, we update the mode
4891 * configurations manually. This will skip the
4892 * usually mode set call when actual frame is
4893 * pushed from framework. The bridge needs to
4894 * be updated with the current drm mode by
4895 * calling the bridge mode set ops.
4896 */
4897 if (encoder->bridge->funcs) {
4898 SDE_DEBUG_ENC(sde_enc, "calling mode_set\n");
4899 encoder->bridge->funcs->mode_set(encoder->bridge,
4900 drm_mode, drm_mode);
4901 }
4902 } else {
4903 SDE_ERROR_ENC(sde_enc, "No bridge attached to encoder\n");
4904 }
4905
4906 sde_rm_init_hw_iter(&pp_iter, encoder->base.id, SDE_HW_BLK_PINGPONG);
4907 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4908 sde_enc->hw_pp[i] = NULL;
4909 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
4910 break;
4911 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
4912 }
4913
4914 sde_rm_init_hw_iter(&dsc_iter, encoder->base.id, SDE_HW_BLK_DSC);
4915 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4916 sde_enc->hw_dsc[i] = NULL;
4917 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
4918 break;
4919 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
4920 }
4921
4922 sde_rm_init_hw_iter(&ctl_iter, encoder->base.id, SDE_HW_BLK_CTL);
4923 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4924 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4925
4926 phys->hw_ctl = NULL;
4927 if (!sde_rm_get_hw(&sde_kms->rm, &ctl_iter))
4928 break;
4929 phys->hw_ctl = (struct sde_hw_ctl *) ctl_iter.hw;
4930 }
4931
4932 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4933 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4934
4935 if (!phys) {
4936 SDE_ERROR_ENC(sde_enc,
4937 "phys encoders not initialized\n");
4938 return -EINVAL;
4939 }
4940
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08004941 /* update connector for master and slave phys encoders */
4942 phys->connector = conn;
4943 phys->cont_splash_single_flush =
4944 sde_kms->splash_data.single_flush_en;
4945 phys->cont_splash_settings = true;
4946
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004947 phys->hw_pp = sde_enc->hw_pp[i];
4948 if (phys->ops.cont_splash_mode_set)
4949 phys->ops.cont_splash_mode_set(phys, drm_mode);
4950
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08004951 if (phys->ops.is_master && phys->ops.is_master(phys))
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004952 sde_enc->cur_master = phys;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004953 }
4954
4955 return ret;
4956}
Dhaval Patelef58f0b2018-01-22 19:13:52 -08004957
4958int sde_encoder_display_failure_notification(struct drm_encoder *enc)
4959{
Jayant Shekhar00a28e92018-06-04 12:15:23 +05304960 struct msm_drm_thread *event_thread = NULL;
Dhaval Patel222023e2018-02-27 12:24:07 -08004961 struct msm_drm_private *priv = NULL;
4962 struct sde_encoder_virt *sde_enc = NULL;
4963
4964 if (!enc || !enc->dev || !enc->dev->dev_private) {
4965 SDE_ERROR("invalid parameters\n");
4966 return -EINVAL;
4967 }
4968
4969 priv = enc->dev->dev_private;
4970 sde_enc = to_sde_encoder_virt(enc);
4971 if (!sde_enc->crtc || (sde_enc->crtc->index
Jayant Shekhar00a28e92018-06-04 12:15:23 +05304972 >= ARRAY_SIZE(priv->event_thread))) {
Dhaval Patel222023e2018-02-27 12:24:07 -08004973 SDE_DEBUG_ENC(sde_enc,
4974 "invalid cached CRTC: %d or crtc index: %d\n",
4975 sde_enc->crtc == NULL,
4976 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
4977 return -EINVAL;
4978 }
4979
4980 SDE_EVT32_VERBOSE(DRMID(enc));
4981
Jayant Shekhar00a28e92018-06-04 12:15:23 +05304982 event_thread = &priv->event_thread[sde_enc->crtc->index];
4983
4984 kthread_queue_work(&event_thread->worker,
4985 &sde_enc->esd_trigger_work);
4986 kthread_flush_work(&sde_enc->esd_trigger_work);
4987
Dhaval Patelef58f0b2018-01-22 19:13:52 -08004988 /**
4989 * panel may stop generating te signal (vsync) during esd failure. rsc
4990 * hardware may hang without vsync. Avoid rsc hang by generating the
4991 * vsync from watchdog timer instead of panel.
4992 */
4993 _sde_encoder_switch_to_watchdog_vsync(enc);
4994
4995 sde_encoder_wait_for_event(enc, MSM_ENC_TX_COMPLETE);
4996
4997 return 0;
4998}