blob: 814b08b3bc33e20fd11e2987f831a22824b48901 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
Kalyan Thota27ec06c2019-03-18 13:19:59 +05302 * Copyright (c) 2014-2019, The Linux Foundation. All rights reserved.
Dhaval Patel14d46ce2017-01-17 16:28:12 -08003 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Ping Li16162692018-05-08 14:13:46 -070083#define TOPOLOGY_DUALPIPE_MERGE_MODE(x) \
84 (((x) == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE) || \
85 ((x) == SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE) || \
86 ((x) == SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC))
87
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070088/**
89 * enum sde_enc_rc_events - events for resource control state machine
90 * @SDE_ENC_RC_EVENT_KICKOFF:
91 * This event happens at NORMAL priority.
92 * Event that signals the start of the transfer. When this event is
93 * received, enable MDP/DSI core clocks and request RSC with CMD state.
94 * Regardless of the previous state, the resource should be in ON state
95 * at the end of this event.
96 * @SDE_ENC_RC_EVENT_FRAME_DONE:
97 * This event happens at INTERRUPT level.
98 * Event signals the end of the data transfer after the PP FRAME_DONE
99 * event. At the end of this event, a delayed work is scheduled to go to
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700100 * IDLE_PC state after IDLE_POWERCOLLAPSE_DURATION time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400101 * @SDE_ENC_RC_EVENT_PRE_STOP:
102 * This event happens at NORMAL priority.
103 * This event, when received during the ON state, set RSC to IDLE, and
104 * and leave the RC STATE in the PRE_OFF state.
105 * It should be followed by the STOP event as part of encoder disable.
106 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700107 * @SDE_ENC_RC_EVENT_STOP:
108 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400109 * When this event is received, disable all the MDP/DSI core clocks, and
110 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
111 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
112 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
113 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700114 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700115 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700116 * Event signals that there is a seamless mode switch is in prgoress. A
117 * client needs to turn of only irq - leave clocks ON to reduce the mode
118 * switch latency.
119 * @SDE_ENC_RC_EVENT_POST_MODESET:
120 * This event happens at NORMAL priority from a work item.
121 * Event signals that seamless mode switch is complete and resources are
122 * acquired. Clients wants to turn on the irq again and update the rsc
123 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700124 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
125 * This event happens at NORMAL priority from a work item.
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700126 * Event signals that there were no frame updates for
127 * IDLE_POWERCOLLAPSE_DURATION time. This would disable MDP/DSI core clocks
128 * and request RSC with IDLE state and change the resource state to IDLE.
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800129 * @SDE_ENC_RC_EVENT_EARLY_WAKEUP:
130 * This event is triggered from the input event thread when touch event is
131 * received from the input device. On receiving this event,
132 * - If the device is in SDE_ENC_RC_STATE_IDLE state, it turns ON the
133 clocks and enable RSC.
134 * - If the device is in SDE_ENC_RC_STATE_ON state, it resets the delayed
135 * off work since a new commit is imminent.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700136 */
137enum sde_enc_rc_events {
138 SDE_ENC_RC_EVENT_KICKOFF = 1,
139 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400140 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700142 SDE_ENC_RC_EVENT_PRE_MODESET,
143 SDE_ENC_RC_EVENT_POST_MODESET,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800144 SDE_ENC_RC_EVENT_ENTER_IDLE,
145 SDE_ENC_RC_EVENT_EARLY_WAKEUP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700146};
147
148/*
149 * enum sde_enc_rc_states - states that the resource control maintains
150 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400151 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700152 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700153 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700154 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
155 */
156enum sde_enc_rc_states {
157 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400158 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700159 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700160 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700161 SDE_ENC_RC_STATE_IDLE
162};
163
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400164/**
165 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
166 * encoders. Virtual encoder manages one "logical" display. Physical
167 * encoders manage one intf block, tied to a specific panel/sub-panel.
168 * Virtual encoder defers as much as possible to the physical encoders.
169 * Virtual encoder registers itself with the DRM Framework as the encoder.
170 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400171 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400172 * @bus_scaling_client: Client handle to the bus scaling interface
173 * @num_phys_encs: Actual number of physical encoders contained.
174 * @phys_encs: Container of physical encoders managed.
175 * @cur_master: Pointer to the current master in this mode. Optimization
176 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700177 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500178 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800179 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500180 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
181 * for partial update right-only cases, such as pingpong
182 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400183 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
184 * notification of the VBLANK
185 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400186 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
187 * all CTL paths
188 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700189 * @debugfs_root: Debug file system root file node
190 * @enc_lock: Lock around physical encoder create/destroy and
191 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400192 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
193 * busy processing current command.
194 * Bit0 = phys_encs[0] etc.
195 * @crtc_frame_event_cb: callback handler for frame event
196 * @crtc_frame_event_cb_data: callback handler private data
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400197 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700198 * @rsc_client: rsc client pointer
199 * @rsc_state_init: boolean to indicate rsc config init
200 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700201 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700202 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -0700203 * @idle_pc_enabled: indicate if idle power collapse is enabled
204 * currently. This can be controlled by user-mode
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700205 * @rc_lock: resource control mutex lock to protect
206 * virt encoder over various state changes
207 * @rc_state: resource controller state
208 * @delayed_off_work: delayed worker to schedule disabling of
209 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400210 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800211 * @input_event_work: worker to handle input device touch events
Dhaval Patel222023e2018-02-27 12:24:07 -0800212 * @esd_trigger_work: worker to handle esd trigger events
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800213 * @input_handler: handler for input device events
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700214 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700215 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700216 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400217 * @cur_conn_roi: current connector roi
218 * @prv_conn_roi: previous connector roi to optimize if unchanged
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800219 * @crtc pointer to drm_crtc
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400220 */
221struct sde_encoder_virt {
222 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400223 spinlock_t enc_spinlock;
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +0530224 struct mutex vblank_ctl_lock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400225 uint32_t bus_scaling_client;
226
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400227 uint32_t display_num_of_h_tiles;
228
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400229 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400230 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
231 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700232 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800233 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400234
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500235 bool intfs_swapped;
236
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400237 void (*crtc_vblank_cb)(void *);
238 void *crtc_vblank_cb_data;
239
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700240 struct dentry *debugfs_root;
241 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400242 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
243 void (*crtc_frame_event_cb)(void *, u32 event);
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -0700244 struct sde_crtc_frame_event_cb_data crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400245
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400246 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800247
248 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700249 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800250 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700251 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700252 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700253
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -0700254 bool idle_pc_enabled;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700255 struct mutex rc_lock;
256 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400257 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400258 struct kthread_work vsync_event_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800259 struct kthread_work input_event_work;
Dhaval Patel222023e2018-02-27 12:24:07 -0800260 struct kthread_work esd_trigger_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800261 struct input_handler *input_handler;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +0530262 bool input_handler_registered;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700263 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700264 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700265
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700266 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400267 struct sde_rect cur_conn_roi;
268 struct sde_rect prv_conn_roi;
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800269 struct drm_crtc *crtc;
Dhaval Patel30874eb2018-05-31 13:33:31 -0700270
271 bool elevated_ahb_vote;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400272};
273
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400274#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700275
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -0500276static void _sde_encoder_pm_qos_add_request(struct drm_encoder *drm_enc)
277{
278 struct msm_drm_private *priv;
279 struct sde_kms *sde_kms;
280 struct pm_qos_request *req;
281 u32 cpu_mask;
282 u32 cpu_dma_latency;
283 int cpu;
284
285 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
286 SDE_ERROR("drm device invalid\n");
287 return;
288 }
289
290 priv = drm_enc->dev->dev_private;
291 if (!priv->kms) {
292 SDE_ERROR("invalid kms\n");
293 return;
294 }
295
296 sde_kms = to_sde_kms(priv->kms);
297 if (!sde_kms || !sde_kms->catalog)
298 return;
299
300 cpu_mask = sde_kms->catalog->perf.cpu_mask;
301 cpu_dma_latency = sde_kms->catalog->perf.cpu_dma_latency;
302 if (!cpu_mask)
303 return;
304
305 req = &sde_kms->pm_qos_cpu_req;
306 req->type = PM_QOS_REQ_AFFINE_CORES;
307 cpumask_empty(&req->cpus_affine);
308 for_each_possible_cpu(cpu) {
309 if ((1 << cpu) & cpu_mask)
310 cpumask_set_cpu(cpu, &req->cpus_affine);
311 }
312 pm_qos_add_request(req, PM_QOS_CPU_DMA_LATENCY, cpu_dma_latency);
313
314 SDE_EVT32_VERBOSE(DRMID(drm_enc), cpu_mask, cpu_dma_latency);
315}
316
317static void _sde_encoder_pm_qos_remove_request(struct drm_encoder *drm_enc)
318{
319 struct msm_drm_private *priv;
320 struct sde_kms *sde_kms;
321
322 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
323 SDE_ERROR("drm device invalid\n");
324 return;
325 }
326
327 priv = drm_enc->dev->dev_private;
328 if (!priv->kms) {
329 SDE_ERROR("invalid kms\n");
330 return;
331 }
332
333 sde_kms = to_sde_kms(priv->kms);
334 if (!sde_kms || !sde_kms->catalog || !sde_kms->catalog->perf.cpu_mask)
335 return;
336
337 pm_qos_remove_request(&sde_kms->pm_qos_cpu_req);
338}
339
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700340static struct drm_connector_state *_sde_encoder_get_conn_state(
341 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800342{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700343 struct msm_drm_private *priv;
344 struct sde_kms *sde_kms;
345 struct list_head *connector_list;
346 struct drm_connector *conn_iter;
347
348 if (!drm_enc) {
349 SDE_ERROR("invalid argument\n");
350 return NULL;
351 }
352
353 priv = drm_enc->dev->dev_private;
354 sde_kms = to_sde_kms(priv->kms);
355 connector_list = &sde_kms->dev->mode_config.connector_list;
356
357 list_for_each_entry(conn_iter, connector_list, head)
358 if (conn_iter->encoder == drm_enc)
359 return conn_iter->state;
360
361 return NULL;
362}
363
364static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
365 struct msm_mode_info *mode_info)
366{
367 struct drm_connector_state *conn_state;
368
369 if (!drm_enc || !mode_info) {
370 SDE_ERROR("invalid arguments\n");
371 return -EINVAL;
372 }
373
374 conn_state = _sde_encoder_get_conn_state(drm_enc);
375 if (!conn_state) {
376 SDE_ERROR("invalid connector state for the encoder: %d\n",
377 drm_enc->base.id);
378 return -EINVAL;
379 }
380
381 return sde_connector_get_mode_info(conn_state, mode_info);
382}
383
384static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
385{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400386 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700387 struct msm_mode_info mode_info;
388 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400389
390 if (!drm_enc)
391 return false;
392
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700393 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
394 if (rc) {
395 SDE_ERROR("failed to get mode info, enc: %d\n",
396 drm_enc->base.id);
397 return false;
398 }
399
400 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800401
402 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
403}
404
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400405bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
406{
407 enum sde_rm_topology_name topology;
408 struct sde_encoder_virt *sde_enc;
409 struct drm_connector *drm_conn;
410
411 if (!drm_enc)
412 return false;
413
414 sde_enc = to_sde_encoder_virt(drm_enc);
415 if (!sde_enc->cur_master)
416 return false;
417
418 drm_conn = sde_enc->cur_master->connector;
419 if (!drm_conn)
420 return false;
421
422 topology = sde_connector_get_topology_name(drm_conn);
423 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
424 return true;
425
426 return false;
427}
428
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -0700429int sde_encoder_in_clone_mode(struct drm_encoder *drm_enc)
430{
431 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
432
433 return sde_enc && sde_enc->cur_master &&
434 sde_enc->cur_master->in_clone_mode;
435}
436
Dhaval Patelf9245d62017-03-28 16:24:00 -0700437static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
438 bool enable)
439{
440 struct drm_encoder *drm_enc;
441 struct msm_drm_private *priv;
442 struct sde_kms *sde_kms;
443
444 if (!sde_enc) {
445 SDE_ERROR("invalid sde enc\n");
446 return -EINVAL;
447 }
448
449 drm_enc = &sde_enc->base;
450 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
451 SDE_ERROR("drm device invalid\n");
452 return -EINVAL;
453 }
454
455 priv = drm_enc->dev->dev_private;
456 if (!priv->kms) {
457 SDE_ERROR("invalid kms\n");
458 return -EINVAL;
459 }
460
461 sde_kms = to_sde_kms(priv->kms);
462
463 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
464 enable);
465}
466
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500467void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
468 enum sde_intr_idx intr_idx)
469{
470 SDE_EVT32(DRMID(phys_enc->parent),
471 phys_enc->intf_idx - INTF_0,
472 phys_enc->hw_pp->idx - PINGPONG_0,
473 intr_idx);
474 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
475
476 if (phys_enc->parent_ops.handle_frame_done)
477 phys_enc->parent_ops.handle_frame_done(
478 phys_enc->parent, phys_enc,
479 SDE_ENCODER_FRAME_EVENT_ERROR);
480}
481
482int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
483 enum sde_intr_idx intr_idx,
484 struct sde_encoder_wait_info *wait_info)
485{
486 struct sde_encoder_irq *irq;
487 u32 irq_status;
488 int ret;
489
490 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
491 SDE_ERROR("invalid params\n");
492 return -EINVAL;
493 }
494 irq = &phys_enc->irq[intr_idx];
495
496 /* note: do master / slave checking outside */
497
498 /* return EWOULDBLOCK since we know the wait isn't necessary */
499 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
500 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400501 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
502 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500503 return -EWOULDBLOCK;
504 }
505
506 if (irq->irq_idx < 0) {
507 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
508 irq->name, irq->hw_idx);
509 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
510 irq->irq_idx);
511 return 0;
512 }
513
514 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
515 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700516 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
517 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
518 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500519
520 ret = sde_encoder_helper_wait_event_timeout(
521 DRMID(phys_enc->parent),
522 irq->hw_idx,
523 wait_info);
524
525 if (ret <= 0) {
526 irq_status = sde_core_irq_read(phys_enc->sde_kms,
527 irq->irq_idx, true);
528 if (irq_status) {
529 unsigned long flags;
530
Dhaval Patela5f75952017-07-25 11:17:41 -0700531 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
532 irq->hw_idx, irq->irq_idx,
533 phys_enc->hw_pp->idx - PINGPONG_0,
534 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500535 SDE_DEBUG_PHYS(phys_enc,
536 "done but irq %d not triggered\n",
537 irq->irq_idx);
538 local_irq_save(flags);
539 irq->cb.func(phys_enc, irq->irq_idx);
540 local_irq_restore(flags);
541 ret = 0;
542 } else {
543 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700544 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
545 irq->hw_idx, irq->irq_idx,
546 phys_enc->hw_pp->idx - PINGPONG_0,
547 atomic_read(wait_info->atomic_cnt), irq_status,
548 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500549 }
550 } else {
551 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700552 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
553 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
554 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500555 }
556
Dhaval Patela5f75952017-07-25 11:17:41 -0700557 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
558 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
559 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500560
561 return ret;
562}
563
564int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
565 enum sde_intr_idx intr_idx)
566{
567 struct sde_encoder_irq *irq;
568 int ret = 0;
569
570 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
571 SDE_ERROR("invalid params\n");
572 return -EINVAL;
573 }
574 irq = &phys_enc->irq[intr_idx];
575
576 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530577 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500578 "skipping already registered irq %s type %d\n",
579 irq->name, irq->intr_type);
580 return 0;
581 }
582
583 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
584 irq->intr_type, irq->hw_idx);
585 if (irq->irq_idx < 0) {
586 SDE_ERROR_PHYS(phys_enc,
587 "failed to lookup IRQ index for %s type:%d\n",
588 irq->name, irq->intr_type);
589 return -EINVAL;
590 }
591
592 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
593 &irq->cb);
594 if (ret) {
595 SDE_ERROR_PHYS(phys_enc,
596 "failed to register IRQ callback for %s\n",
597 irq->name);
598 irq->irq_idx = -EINVAL;
599 return ret;
600 }
601
602 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
603 if (ret) {
604 SDE_ERROR_PHYS(phys_enc,
605 "enable IRQ for intr:%s failed, irq_idx %d\n",
606 irq->name, irq->irq_idx);
607
608 sde_core_irq_unregister_callback(phys_enc->sde_kms,
609 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400610
611 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
612 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500613 irq->irq_idx = -EINVAL;
614 return ret;
615 }
616
617 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
618 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
619 irq->name, irq->irq_idx);
620
621 return ret;
622}
623
624int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
625 enum sde_intr_idx intr_idx)
626{
627 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400628 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500629
630 if (!phys_enc) {
631 SDE_ERROR("invalid encoder\n");
632 return -EINVAL;
633 }
634 irq = &phys_enc->irq[intr_idx];
635
636 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400637 if (irq->irq_idx < 0) {
638 SDE_ERROR(
639 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
640 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
641 irq->irq_idx);
642 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
643 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500644 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400645 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500646
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400647 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
648 if (ret)
649 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
650 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
651
652 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500653 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400654 if (ret)
655 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
656 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500657
658 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
659 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
660
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400661 irq->irq_idx = -EINVAL;
662
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500663 return 0;
664}
665
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400666void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400667 struct sde_encoder_hw_resources *hw_res,
668 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400669{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400670 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700671 struct msm_mode_info mode_info;
672 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400673
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400674 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400675 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
676 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400677 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400678 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400679
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400680 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400681 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400682
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400683 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400684 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400685 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
686
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400687 for (i = 0; i < sde_enc->num_phys_encs; i++) {
688 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
689
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400690 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400691 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400692 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700693
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700694 /**
695 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
696 * called from atomic_check phase. Use the below API to get mode
697 * information of the temporary conn_state passed.
698 */
699 rc = sde_connector_get_mode_info(conn_state, &mode_info);
700 if (rc) {
701 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
702 return;
703 }
704
705 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700706 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400707}
708
Clarence Ip3649f8b2016-10-31 09:59:44 -0400709void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400710{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400711 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400712 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400713
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400714 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400715 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400716 return;
717 }
718
719 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400720 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400721
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700722 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800723 sde_rsc_client_destroy(sde_enc->rsc_client);
724
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700725 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400726 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
727
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400728 if (phys && phys->ops.destroy) {
729 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400730 --sde_enc->num_phys_encs;
731 sde_enc->phys_encs[i] = NULL;
732 }
733 }
734
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700735 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400736 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400737 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700738 sde_enc->num_phys_encs = 0;
739 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400740
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400741 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700742 mutex_destroy(&sde_enc->enc_lock);
743
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800744 if (sde_enc->input_handler) {
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800745 kfree(sde_enc->input_handler);
Shubhashree Dhar25b05422018-05-30 15:42:04 +0530746 sde_enc->input_handler = NULL;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +0530747 sde_enc->input_handler_registered = false;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800748 }
749
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400750 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700751}
752
Clarence Ip8e69ad02016-12-09 09:43:57 -0500753void sde_encoder_helper_split_config(
754 struct sde_encoder_phys *phys_enc,
755 enum sde_intf interface)
756{
757 struct sde_encoder_virt *sde_enc;
758 struct split_pipe_cfg cfg = { 0 };
759 struct sde_hw_mdp *hw_mdptop;
760 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700761 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500762
763 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
764 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
765 return;
766 }
767
768 sde_enc = to_sde_encoder_virt(phys_enc->parent);
769 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700770 disp_info = &sde_enc->disp_info;
771
772 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
773 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500774
775 /**
776 * disable split modes since encoder will be operating in as the only
777 * encoder, either for the entire use case in the case of, for example,
778 * single DSI, or for this frame in the case of left/right only partial
779 * update.
780 */
781 if (phys_enc->split_role == ENC_ROLE_SOLO) {
782 if (hw_mdptop->ops.setup_split_pipe)
783 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
784 if (hw_mdptop->ops.setup_pp_split)
785 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
786 return;
787 }
788
789 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500790 cfg.mode = phys_enc->intf_mode;
791 cfg.intf = interface;
792
793 if (cfg.en && phys_enc->ops.needs_single_flush &&
794 phys_enc->ops.needs_single_flush(phys_enc))
795 cfg.split_flush_en = true;
796
797 topology = sde_connector_get_topology_name(phys_enc->connector);
798 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
799 cfg.pp_split_slave = cfg.intf;
800 else
801 cfg.pp_split_slave = INTF_MAX;
802
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500803 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500804 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
805
806 if (hw_mdptop->ops.setup_split_pipe)
807 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400808 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500809 /*
810 * slave encoder
811 * - determine split index from master index,
812 * assume master is first pp
813 */
814 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
815 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
816 cfg.pp_split_index);
817
818 if (hw_mdptop->ops.setup_pp_split)
819 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
820 }
821}
822
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400823static int sde_encoder_virt_atomic_check(
824 struct drm_encoder *drm_enc,
825 struct drm_crtc_state *crtc_state,
826 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400827{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400828 struct sde_encoder_virt *sde_enc;
829 struct msm_drm_private *priv;
830 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400831 const struct drm_display_mode *mode;
832 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700833 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700834 struct sde_connector_state *sde_conn_state = NULL;
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400835 struct sde_crtc_state *sde_crtc_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400836 int i = 0;
837 int ret = 0;
838
Alan Kwongbb27c092016-07-20 16:41:25 -0400839 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400840 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
841 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400842 return -EINVAL;
843 }
844
845 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400846 SDE_DEBUG_ENC(sde_enc, "\n");
847
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400848 priv = drm_enc->dev->dev_private;
849 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400850 mode = &crtc_state->mode;
851 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700852 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700853 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400854 sde_crtc_state = to_sde_crtc_state(crtc_state);
855
856 SDE_EVT32(DRMID(drm_enc), drm_atomic_crtc_needs_modeset(crtc_state));
Alan Kwongbb27c092016-07-20 16:41:25 -0400857
858 /* perform atomic check on the first physical encoder (master) */
859 for (i = 0; i < sde_enc->num_phys_encs; i++) {
860 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
861
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400862 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400863 ret = phys->ops.atomic_check(phys, crtc_state,
864 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400865 else if (phys && phys->ops.mode_fixup)
866 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400867 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400868
869 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400870 SDE_ERROR_ENC(sde_enc,
871 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400872 break;
873 }
874 }
875
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400876 if (!ret && drm_atomic_crtc_needs_modeset(crtc_state)) {
877 struct sde_rect mode_roi, roi;
878
879 mode_roi.x = 0;
880 mode_roi.y = 0;
881 mode_roi.w = crtc_state->adjusted_mode.hdisplay;
882 mode_roi.h = crtc_state->adjusted_mode.vdisplay;
883
884 if (sde_conn_state->rois.num_rects) {
885 sde_kms_rect_merge_rectangles(
886 &sde_conn_state->rois, &roi);
887 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
888 SDE_ERROR_ENC(sde_enc,
889 "roi (%d,%d,%d,%d) on connector invalid during modeset\n",
890 roi.x, roi.y, roi.w, roi.h);
891 ret = -EINVAL;
892 }
893 }
894
895 if (sde_crtc_state->user_roi_list.num_rects) {
896 sde_kms_rect_merge_rectangles(
897 &sde_crtc_state->user_roi_list, &roi);
898 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
899 SDE_ERROR_ENC(sde_enc,
900 "roi (%d,%d,%d,%d) on crtc invalid during modeset\n",
901 roi.x, roi.y, roi.w, roi.h);
902 ret = -EINVAL;
903 }
904 }
905
906 if (ret)
907 return ret;
908 }
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700909
Lloyd Atkinson4ced69e2017-11-03 12:16:09 -0400910 if (!ret) {
911 /**
912 * record topology in previous atomic state to be able to handle
913 * topology transitions correctly.
914 */
915 enum sde_rm_topology_name old_top;
916
917 old_top = sde_connector_get_property(conn_state,
918 CONNECTOR_PROP_TOPOLOGY_NAME);
919 ret = sde_connector_set_old_topology_name(conn_state, old_top);
920 if (ret)
921 return ret;
922 }
923
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700924 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
925 struct msm_display_topology *topology = NULL;
926
927 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700928 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400929 sde_kms->catalog->max_mixer_width,
930 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700931 if (ret) {
932 SDE_ERROR_ENC(sde_enc,
933 "failed to get mode info, rc = %d\n", ret);
934 return ret;
935 }
936
937 /* Reserve dynamic resources, indicating atomic_check phase */
938 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
939 conn_state, true);
940 if (ret) {
941 SDE_ERROR_ENC(sde_enc,
942 "RM failed to reserve resources, rc = %d\n",
943 ret);
944 return ret;
945 }
946
947 /**
948 * Update connector state with the topology selected for the
949 * resource set validated. Reset the topology if we are
950 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700951 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700952 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700953 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700954
955 ret = sde_rm_update_topology(conn_state, topology);
956 if (ret) {
957 SDE_ERROR_ENC(sde_enc,
958 "RM failed to update topology, rc: %d\n", ret);
959 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700960 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700961
Jeykumar Sankaran83ddcb02017-10-27 11:34:50 -0700962 ret = sde_connector_set_blob_data(conn_state->connector,
963 conn_state,
964 CONNECTOR_PROP_SDE_INFO);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700965 if (ret) {
966 SDE_ERROR_ENC(sde_enc,
967 "connector failed to update info, rc: %d\n",
968 ret);
969 return ret;
970 }
971
972 }
973
974 ret = sde_connector_roi_v1_check_roi(conn_state);
975 if (ret) {
976 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
977 ret);
978 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700979 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400980
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700981 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700982 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400983
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400984 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400985
986 return ret;
987}
988
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800989static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
990 int pic_width, int pic_height)
991{
992 if (!dsc || !pic_width || !pic_height) {
993 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
994 pic_width, pic_height);
995 return -EINVAL;
996 }
997
998 if ((pic_width % dsc->slice_width) ||
999 (pic_height % dsc->slice_height)) {
1000 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
1001 pic_width, pic_height,
1002 dsc->slice_width, dsc->slice_height);
1003 return -EINVAL;
1004 }
1005
1006 dsc->pic_width = pic_width;
1007 dsc->pic_height = pic_height;
1008
1009 return 0;
1010}
1011
1012static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
1013 int intf_width)
1014{
1015 int slice_per_pkt, slice_per_intf;
1016 int bytes_in_slice, total_bytes_per_intf;
1017
1018 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
1019 (intf_width < dsc->slice_width)) {
1020 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
1021 intf_width, dsc ? dsc->slice_width : -1);
1022 return;
1023 }
1024
1025 slice_per_pkt = dsc->slice_per_pkt;
1026 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
1027
1028 /*
1029 * If slice_per_pkt is greater than slice_per_intf then default to 1.
1030 * This can happen during partial update.
1031 */
1032 if (slice_per_pkt > slice_per_intf)
1033 slice_per_pkt = 1;
1034
1035 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
1036 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
1037
1038 dsc->eol_byte_num = total_bytes_per_intf % 3;
1039 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
1040 dsc->bytes_in_slice = bytes_in_slice;
1041 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
1042 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
1043}
1044
1045static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
1046 int enc_ip_width)
1047{
1048 int ssm_delay, total_pixels, soft_slice_per_enc;
1049
1050 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
1051
1052 /*
1053 * minimum number of initial line pixels is a sum of:
1054 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
1055 * 91 for 10 bpc) * 3
1056 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
1057 * 3. the initial xmit delay
1058 * 4. total pipeline delay through the "lock step" of encoder (47)
1059 * 5. 6 additional pixels as the output of the rate buffer is
1060 * 48 bits wide
1061 */
1062 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
1063 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
1064 if (soft_slice_per_enc > 1)
1065 total_pixels += (ssm_delay * 3);
1066 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
1067 return 0;
1068}
1069
1070static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
1071 struct msm_display_dsc_info *dsc)
1072{
1073 /*
1074 * As per the DSC spec, ICH_RESET can be either end of the slice line
1075 * or at the end of the slice. HW internally generates ich_reset at
1076 * end of the slice line if DSC_MERGE is used or encoder has two
1077 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
1078 * is not used then it will generate ich_reset at the end of slice.
1079 *
1080 * Now as per the spec, during one PPS session, position where
1081 * ich_reset is generated should not change. Now if full-screen frame
1082 * has more than 1 soft slice then HW will automatically generate
1083 * ich_reset at the end of slice_line. But for the same panel, if
1084 * partial frame is enabled and only 1 encoder is used with 1 slice,
1085 * then HW will generate ich_reset at end of the slice. This is a
1086 * mismatch. Prevent this by overriding HW's decision.
1087 */
1088 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
1089 (dsc->slice_width == dsc->pic_width);
1090}
1091
1092static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
1093 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001094 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001095{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001096 if (!enable) {
1097 if (hw_pp->ops.disable_dsc)
1098 hw_pp->ops.disable_dsc(hw_pp);
1099 return;
1100 }
1101
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001102 if (hw_dsc->ops.dsc_config)
1103 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
1104
1105 if (hw_dsc->ops.dsc_config_thresh)
1106 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
1107
1108 if (hw_pp->ops.setup_dsc)
1109 hw_pp->ops.setup_dsc(hw_pp);
1110
1111 if (hw_pp->ops.enable_dsc)
1112 hw_pp->ops.enable_dsc(hw_pp);
1113}
1114
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001115static void _sde_encoder_get_connector_roi(
1116 struct sde_encoder_virt *sde_enc,
1117 struct sde_rect *merged_conn_roi)
1118{
1119 struct drm_connector *drm_conn;
1120 struct sde_connector_state *c_state;
1121
1122 if (!sde_enc || !merged_conn_roi)
1123 return;
1124
1125 drm_conn = sde_enc->phys_encs[0]->connector;
1126
1127 if (!drm_conn || !drm_conn->state)
1128 return;
1129
1130 c_state = to_sde_connector_state(drm_conn->state);
1131 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1132}
1133
Ingrid Gallardo83532222017-06-02 16:48:51 -07001134static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001135{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001136 int this_frame_slices;
1137 int intf_ip_w, enc_ip_w;
1138 int ich_res, dsc_common_mode = 0;
1139
1140 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1141 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1142 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001143 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001144 struct msm_mode_info mode_info;
1145 struct msm_display_dsc_info *dsc = NULL;
1146 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001147
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001148 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001149 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1150 return -EINVAL;
1151 }
1152
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001153 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1154 if (rc) {
1155 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1156 return -EINVAL;
1157 }
1158
1159 dsc = &mode_info.comp_info.dsc_info;
1160
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001161 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001162
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001163 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001164 intf_ip_w = this_frame_slices * dsc->slice_width;
1165 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1166
1167 enc_ip_w = intf_ip_w;
1168 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1169
1170 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1171
1172 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1173 dsc_common_mode = DSC_MODE_VIDEO;
1174
1175 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001176 roi->w, roi->h, dsc_common_mode);
1177 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001178
1179 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001180 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001181
1182 return 0;
1183}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001184
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001185static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1186 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001187{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001188 int this_frame_slices;
1189 int intf_ip_w, enc_ip_w;
1190 int ich_res, dsc_common_mode;
1191
1192 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001193 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1194 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1195 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1196 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001197 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001198 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001199 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001200
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001201 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1202 hw_pp[i] = sde_enc->hw_pp[i];
1203 hw_dsc[i] = sde_enc->hw_dsc[i];
1204
1205 if (!hw_pp[i] || !hw_dsc[i]) {
1206 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1207 return -EINVAL;
1208 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001209 }
1210
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001211 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1212 if (rc) {
1213 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1214 return -EINVAL;
1215 }
1216
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001217 half_panel_partial_update =
1218 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001219
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001220 dsc_common_mode = 0;
1221 if (!half_panel_partial_update)
1222 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001223 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1224 dsc_common_mode |= DSC_MODE_VIDEO;
1225
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001226 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1227 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001228
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001229 /*
1230 * Since both DSC use same pic dimension, set same pic dimension
1231 * to both DSC structures.
1232 */
1233 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1234 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1235
1236 this_frame_slices = roi->w / dsc[0].slice_width;
1237 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1238
1239 if (!half_panel_partial_update)
1240 intf_ip_w /= 2;
1241
1242 /*
1243 * In this topology when both interfaces are active, they have same
1244 * load so intf_ip_w will be same.
1245 */
1246 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1247 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1248
1249 /*
1250 * In this topology, since there is no dsc_merge, uncompressed input
1251 * to encoder and interface is same.
1252 */
1253 enc_ip_w = intf_ip_w;
1254 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1255 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1256
1257 /*
1258 * __is_ich_reset_override_needed should be called only after
1259 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1260 */
1261 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1262 half_panel_partial_update, &dsc[0]);
1263
1264 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1265 roi->w, roi->h, dsc_common_mode);
1266
1267 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1268 bool active = !!((1 << i) & params->affected_displays);
1269
1270 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1271 dsc_common_mode, i, active);
1272 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1273 dsc_common_mode, ich_res, active);
1274 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001275
1276 return 0;
1277}
1278
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001279static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1280 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001281{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001282 int this_frame_slices;
1283 int intf_ip_w, enc_ip_w;
1284 int ich_res, dsc_common_mode;
1285
1286 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001287 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001288 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1289 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001290 struct msm_display_dsc_info *dsc = NULL;
1291 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001292 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001293 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001294
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001295 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1296 hw_pp[i] = sde_enc->hw_pp[i];
1297 hw_dsc[i] = sde_enc->hw_dsc[i];
1298
1299 if (!hw_pp[i] || !hw_dsc[i]) {
1300 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1301 return -EINVAL;
1302 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001303 }
1304
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001305 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1306 if (rc) {
1307 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1308 return -EINVAL;
1309 }
1310
1311 dsc = &mode_info.comp_info.dsc_info;
1312
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001313 half_panel_partial_update =
1314 hweight_long(params->affected_displays) == 1;
1315
1316 dsc_common_mode = 0;
1317 if (!half_panel_partial_update)
1318 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1319 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1320 dsc_common_mode |= DSC_MODE_VIDEO;
1321
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001322 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001323
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001324 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001325 intf_ip_w = this_frame_slices * dsc->slice_width;
1326 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1327
1328 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001329 * dsc merge case: when using 2 encoders for the same stream,
1330 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001331 */
1332 enc_ip_w = intf_ip_w / 2;
1333 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1334
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001335 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1336 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001337
1338 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001339 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001340 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1341 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001342
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001343 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001344 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001345 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1346 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001347
1348 return 0;
1349}
1350
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001351static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1352{
1353 struct sde_encoder_virt *sde_enc;
1354 struct drm_connector *drm_conn;
1355 struct drm_display_mode *adj_mode;
1356 struct sde_rect roi;
1357
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001358 if (!drm_enc) {
1359 SDE_ERROR("invalid encoder parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001360 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001361 }
1362
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001363 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001364 if (!sde_enc->crtc || !sde_enc->crtc->state) {
1365 SDE_ERROR("invalid crtc parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001366 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001367 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001368
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001369 if (!sde_enc->cur_master) {
1370 SDE_ERROR("invalid cur_master parameter\n");
1371 return -EINVAL;
1372 }
1373
1374 adj_mode = &sde_enc->cur_master->cached_mode;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001375 drm_conn = sde_enc->cur_master->connector;
1376
1377 _sde_encoder_get_connector_roi(sde_enc, &roi);
1378 if (sde_kms_rect_is_null(&roi)) {
1379 roi.w = adj_mode->hdisplay;
1380 roi.h = adj_mode->vdisplay;
1381 }
1382
1383 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1384 sizeof(sde_enc->prv_conn_roi));
1385 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1386
1387 return 0;
1388}
1389
1390static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1391 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001392{
1393 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001394 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001395 int ret = 0;
1396
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001397 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1398 !sde_enc->phys_encs[0]->connector)
1399 return -EINVAL;
1400
1401 drm_conn = sde_enc->phys_encs[0]->connector;
1402
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001403 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001404 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001405 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1406 return -EINVAL;
1407 }
1408
Jayant Shekharac7bd942019-02-26 15:44:54 +05301409 params->num_channels =
1410 sde_rm_get_topology_num_encoders(topology);
1411
Ingrid Gallardo83532222017-06-02 16:48:51 -07001412 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04001413 SDE_EVT32(DRMID(&sde_enc->base), topology,
1414 sde_enc->cur_conn_roi.x,
1415 sde_enc->cur_conn_roi.y,
1416 sde_enc->cur_conn_roi.w,
1417 sde_enc->cur_conn_roi.h,
1418 sde_enc->prv_conn_roi.x,
1419 sde_enc->prv_conn_roi.y,
1420 sde_enc->prv_conn_roi.w,
1421 sde_enc->prv_conn_roi.h,
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001422 sde_enc->cur_master->cached_mode.hdisplay,
1423 sde_enc->cur_master->cached_mode.vdisplay);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001424
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001425 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1426 &sde_enc->prv_conn_roi))
1427 return ret;
1428
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001429 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001430 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001431 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1432 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001433 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001434 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001435 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001436 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001437 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Kalyan Thota27ec06c2019-03-18 13:19:59 +05301438 case SDE_RM_TOPOLOGY_QUADPIPE_3DMERGE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001439 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001440 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001441 default:
1442 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1443 topology);
1444 return -EINVAL;
1445 };
1446
1447 return ret;
1448}
1449
Dhaval Patelaab9b522017-07-20 12:38:46 -07001450static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1451 struct msm_display_info *disp_info, bool is_dummy)
1452{
1453 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1454 struct msm_drm_private *priv;
1455 struct sde_kms *sde_kms;
1456 struct sde_hw_mdp *hw_mdptop;
1457 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001458 struct msm_mode_info mode_info;
1459 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001460
Jayant Shekhar136e0592018-10-09 18:32:33 +05301461 if (!sde_enc || !sde_enc->cur_master || !disp_info) {
Dhaval Patelaab9b522017-07-20 12:38:46 -07001462 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1463 sde_enc != NULL, disp_info != NULL);
1464 return;
1465 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1466 SDE_ERROR("invalid num phys enc %d/%d\n",
1467 sde_enc->num_phys_encs,
1468 (int) ARRAY_SIZE(sde_enc->hw_pp));
1469 return;
1470 }
1471
1472 drm_enc = &sde_enc->base;
1473 /* this pointers are checked in virt_enable_helper */
1474 priv = drm_enc->dev->dev_private;
1475
1476 sde_kms = to_sde_kms(priv->kms);
1477 if (!sde_kms) {
1478 SDE_ERROR("invalid sde_kms\n");
1479 return;
1480 }
1481
1482 hw_mdptop = sde_kms->hw_mdp;
1483 if (!hw_mdptop) {
1484 SDE_ERROR("invalid mdptop\n");
1485 return;
1486 }
1487
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001488 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1489 if (rc) {
1490 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001491 return;
1492 }
1493
Dhaval Patelaab9b522017-07-20 12:38:46 -07001494 if (hw_mdptop->ops.setup_vsync_source &&
1495 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1496 for (i = 0; i < sde_enc->num_phys_encs; i++)
1497 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1498
1499 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001500 vsync_cfg.frame_rate = mode_info.frame_rate;
Kalyan Thotaa02db2c2018-04-27 11:39:18 +05301501 vsync_cfg.vsync_source =
1502 sde_enc->cur_master->hw_pp->caps->te_source;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001503 if (is_dummy)
1504 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1505 else if (disp_info->is_te_using_watchdog_timer)
1506 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
Kalyan Thotaa02db2c2018-04-27 11:39:18 +05301507
Dhaval Patelaab9b522017-07-20 12:38:46 -07001508 vsync_cfg.is_dummy = is_dummy;
1509
1510 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1511 }
1512}
1513
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001514static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1515{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001516 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001517 struct sde_hw_pingpong *hw_pp = NULL;
1518 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001519
1520 if (!sde_enc || !sde_enc->phys_encs[0] ||
1521 !sde_enc->phys_encs[0]->connector) {
1522 SDE_ERROR("invalid params %d %d\n",
1523 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1524 return -EINVAL;
1525 }
1526
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001527 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001528 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1529 hw_pp = sde_enc->hw_pp[i];
1530 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001531
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001532 if (hw_pp && hw_pp->ops.disable_dsc)
1533 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001534
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001535 if (hw_dsc && hw_dsc->ops.dsc_disable)
1536 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001537 }
1538
1539 return ret;
1540}
1541
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001542static int _sde_encoder_switch_to_watchdog_vsync(struct drm_encoder *drm_enc)
1543{
1544 struct sde_encoder_virt *sde_enc;
1545 struct msm_display_info disp_info;
1546
1547 if (!drm_enc) {
1548 pr_err("invalid drm encoder\n");
1549 return -EINVAL;
1550 }
1551
1552 sde_enc = to_sde_encoder_virt(drm_enc);
1553
1554 sde_encoder_control_te(drm_enc, false);
1555
1556 memcpy(&disp_info, &sde_enc->disp_info, sizeof(disp_info));
1557 disp_info.is_te_using_watchdog_timer = true;
1558 _sde_encoder_update_vsync_source(sde_enc, &disp_info, false);
1559
1560 sde_encoder_control_te(drm_enc, true);
1561
1562 return 0;
1563}
1564
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001565static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001566 struct drm_encoder *drm_enc,
1567 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001568{
1569 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001570 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001571 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001572 struct sde_rsc_cmd_config *rsc_config;
1573 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001574 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001575 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001576 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1577 int wait_count = 0;
1578 struct drm_crtc *primary_crtc;
1579 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001580 int rc = 0;
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001581 int wait_refcount;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001582
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001583 if (!drm_enc || !drm_enc->dev) {
1584 SDE_ERROR("invalid encoder arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001585 return -EINVAL;
1586 }
1587
1588 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001589 crtc = sde_enc->crtc;
1590
1591 if (!sde_enc->crtc) {
1592 SDE_ERROR("invalid crtc parameter\n");
1593 return -EINVAL;
1594 }
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001595 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001596 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001597
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001598 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001599 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001600 return 0;
1601 }
1602
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001603 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1604 if (rc) {
1605 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1606 return 0;
1607 }
1608
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001609 /**
1610 * only primary command mode panel can request CMD state.
1611 * all other panels/displays can request for VID state including
1612 * secondary command mode panel.
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07001613 * Clone mode encoder can request CLK STATE only.
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001614 */
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07001615 if (sde_encoder_in_clone_mode(drm_enc))
1616 rsc_state = enable ? SDE_RSC_CLK_STATE : SDE_RSC_IDLE_STATE;
1617 else
1618 rsc_state = enable ?
1619 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
1620 && disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1621 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
1622
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001623 prefill_lines = config ? mode_info.prefill_lines +
1624 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001625
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001626 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001627 if ((rsc_config->fps != mode_info.frame_rate) ||
1628 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001629 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001630 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1631 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1632 rsc_config->fps = mode_info.frame_rate;
1633 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001634 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001635 rsc_config->jitter_numer = mode_info.jitter_numer;
1636 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001637 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001638 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001639
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001640 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001641 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001642 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001643 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001644
1645 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001646 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001647 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001648 } else {
1649 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001650 rsc_state, NULL, crtc->base.id,
1651 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001652 }
1653
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001654 /**
1655 * if RSC performed a state change that requires a VBLANK wait, it will
1656 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1657 *
1658 * if we are the primary display, we will need to enable and wait
1659 * locally since we hold the commit thread
1660 *
1661 * if we are an external display, we must send a signal to the primary
1662 * to enable its VBLANK and wait one, since the RSC hardware is driven
1663 * by the primary panel's VBLANK signals
1664 */
1665 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1666 if (ret) {
1667 SDE_ERROR_ENC(sde_enc,
1668 "sde rsc client update failed ret:%d\n", ret);
1669 return ret;
1670 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1671 return ret;
1672 }
1673
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001674 if (wait_vblank_crtc_id)
1675 wait_refcount =
1676 sde_rsc_client_get_vsync_refcount(sde_enc->rsc_client);
1677 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1678 SDE_EVTLOG_FUNC_ENTRY);
1679
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001680 if (crtc->base.id != wait_vblank_crtc_id) {
1681 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1682 if (!primary_crtc) {
1683 SDE_ERROR_ENC(sde_enc,
1684 "failed to find primary crtc id %d\n",
1685 wait_vblank_crtc_id);
1686 return -EINVAL;
1687 }
1688 pipe = drm_crtc_index(primary_crtc);
1689 }
1690
1691 /**
1692 * note: VBLANK is expected to be enabled at this point in
1693 * resource control state machine if on primary CRTC
1694 */
1695 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1696 if (sde_rsc_client_is_state_update_complete(
1697 sde_enc->rsc_client))
1698 break;
1699
1700 if (crtc->base.id == wait_vblank_crtc_id)
1701 ret = sde_encoder_wait_for_event(drm_enc,
1702 MSM_ENC_VBLANK);
1703 else
1704 drm_wait_one_vblank(drm_enc->dev, pipe);
1705
1706 if (ret) {
1707 SDE_ERROR_ENC(sde_enc,
1708 "wait for vblank failed ret:%d\n", ret);
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001709 /**
1710 * rsc hardware may hang without vsync. avoid rsc hang
1711 * by generating the vsync from watchdog timer.
1712 */
1713 if (crtc->base.id == wait_vblank_crtc_id)
1714 _sde_encoder_switch_to_watchdog_vsync(drm_enc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001715 }
1716 }
1717
1718 if (wait_count >= MAX_RSC_WAIT)
1719 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1720 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001721
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001722 if (wait_refcount)
1723 sde_rsc_client_reset_vsync_refcount(sde_enc->rsc_client);
1724 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1725 SDE_EVTLOG_FUNC_EXIT);
1726
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001727 return ret;
1728}
1729
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001730static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1731{
1732 struct sde_encoder_virt *sde_enc;
1733 int i;
1734
1735 if (!drm_enc) {
1736 SDE_ERROR("invalid encoder\n");
1737 return;
1738 }
1739
1740 sde_enc = to_sde_encoder_virt(drm_enc);
1741
1742 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1743 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1744 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1745
1746 if (phys && phys->ops.irq_control)
1747 phys->ops.irq_control(phys, enable);
1748 }
1749
1750}
1751
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001752/* keep track of the userspace vblank during modeset */
1753static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1754 u32 sw_event)
1755{
1756 struct sde_encoder_virt *sde_enc;
1757 bool enable;
1758 int i;
1759
1760 if (!drm_enc) {
1761 SDE_ERROR("invalid encoder\n");
1762 return;
1763 }
1764
1765 sde_enc = to_sde_encoder_virt(drm_enc);
1766 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1767 sw_event, sde_enc->vblank_enabled);
1768
1769 /* nothing to do if vblank not enabled by userspace */
1770 if (!sde_enc->vblank_enabled)
1771 return;
1772
1773 /* disable vblank on pre_modeset */
1774 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1775 enable = false;
1776 /* enable vblank on post_modeset */
1777 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1778 enable = true;
1779 else
1780 return;
1781
1782 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1783 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1784
1785 if (phys && phys->ops.control_vblank_irq)
1786 phys->ops.control_vblank_irq(phys, enable);
1787 }
1788}
1789
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001790struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1791{
1792 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001793
1794 if (!drm_enc)
1795 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001796 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001797 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001798}
1799
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001800static void _sde_encoder_resource_control_rsc_update(
1801 struct drm_encoder *drm_enc, bool enable)
1802{
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001803 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001804 struct sde_encoder_virt *sde_enc;
1805
1806 if (!drm_enc) {
1807 SDE_ERROR("invalid encoder argument\n");
1808 return;
1809 }
1810 sde_enc = to_sde_encoder_virt(drm_enc);
1811 if (!sde_enc->crtc) {
1812 SDE_ERROR("invalid crtc\n");
1813 return;
1814 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001815
1816 if (enable) {
1817 rsc_cfg.inline_rotate_prefill =
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001818 sde_crtc_get_inline_prefill(sde_enc->crtc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001819
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001820 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1821 } else {
1822 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001823 }
1824}
1825
Alan Kwong1124f1f2017-11-10 18:14:39 -05001826static int _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001827 bool enable)
1828{
1829 struct msm_drm_private *priv;
1830 struct sde_kms *sde_kms;
1831 struct sde_encoder_virt *sde_enc;
Alan Kwong1124f1f2017-11-10 18:14:39 -05001832 int rc;
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001833 bool is_cmd_mode, is_primary;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001834
1835 sde_enc = to_sde_encoder_virt(drm_enc);
1836 priv = drm_enc->dev->dev_private;
1837 sde_kms = to_sde_kms(priv->kms);
1838
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001839 is_cmd_mode = sde_enc->disp_info.capabilities &
1840 MSM_DISPLAY_CAP_CMD_MODE;
1841 is_primary = sde_enc->disp_info.is_primary;
1842
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001843 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1844 SDE_EVT32(DRMID(drm_enc), enable);
1845
1846 if (!sde_enc->cur_master) {
1847 SDE_ERROR("encoder master not set\n");
Alan Kwong1124f1f2017-11-10 18:14:39 -05001848 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001849 }
1850
1851 if (enable) {
1852 /* enable SDE core clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001853 rc = sde_power_resource_enable(&priv->phandle,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001854 sde_kms->core_client, true);
Alan Kwong1124f1f2017-11-10 18:14:39 -05001855 if (rc) {
1856 SDE_ERROR("failed to enable power resource %d\n", rc);
1857 SDE_EVT32(rc, SDE_EVTLOG_ERROR);
1858 return rc;
1859 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001860
Dhaval Patel30874eb2018-05-31 13:33:31 -07001861 sde_enc->elevated_ahb_vote = true;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001862 /* enable DSI clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001863 rc = sde_connector_clk_ctrl(sde_enc->cur_master->connector,
1864 true);
1865 if (rc) {
1866 SDE_ERROR("failed to enable clk control %d\n", rc);
1867 sde_power_resource_enable(&priv->phandle,
1868 sde_kms->core_client, false);
1869 return rc;
1870 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001871
1872 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001873 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001874
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001875 if (is_cmd_mode && is_primary)
1876 _sde_encoder_pm_qos_add_request(drm_enc);
1877
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001878 } else {
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001879 if (is_cmd_mode && is_primary)
1880 _sde_encoder_pm_qos_remove_request(drm_enc);
1881
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001882 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001883 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001884
1885 /* disable DSI clks */
1886 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1887
1888 /* disable SDE core clks */
1889 sde_power_resource_enable(&priv->phandle,
1890 sde_kms->core_client, false);
1891 }
1892
Alan Kwong1124f1f2017-11-10 18:14:39 -05001893 return 0;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001894}
1895
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001896static void sde_encoder_input_event_handler(struct input_handle *handle,
1897 unsigned int type, unsigned int code, int value)
1898{
1899 struct drm_encoder *drm_enc = NULL;
1900 struct sde_encoder_virt *sde_enc = NULL;
Jayant Shekhar779c7522018-06-13 12:44:44 +05301901 struct msm_drm_thread *event_thread = NULL;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001902 struct msm_drm_private *priv = NULL;
1903
1904 if (!handle || !handle->handler || !handle->handler->private) {
1905 SDE_ERROR("invalid encoder for the input event\n");
1906 return;
1907 }
1908
1909 drm_enc = (struct drm_encoder *)handle->handler->private;
1910 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
1911 SDE_ERROR("invalid parameters\n");
1912 return;
1913 }
1914
1915 priv = drm_enc->dev->dev_private;
1916 sde_enc = to_sde_encoder_virt(drm_enc);
1917 if (!sde_enc->crtc || (sde_enc->crtc->index
Jayant Shekhar779c7522018-06-13 12:44:44 +05301918 >= ARRAY_SIZE(priv->event_thread))) {
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001919 SDE_DEBUG_ENC(sde_enc,
1920 "invalid cached CRTC: %d or crtc index: %d\n",
1921 sde_enc->crtc == NULL,
1922 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
1923 return;
1924 }
1925
1926 SDE_EVT32_VERBOSE(DRMID(drm_enc));
1927
Jayant Shekhar779c7522018-06-13 12:44:44 +05301928 event_thread = &priv->event_thread[sde_enc->crtc->index];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001929
Jayant Shekhar779c7522018-06-13 12:44:44 +05301930 /* Queue input event work to event thread */
1931 kthread_queue_work(&event_thread->worker,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001932 &sde_enc->input_event_work);
1933}
1934
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07001935void sde_encoder_control_idle_pc(struct drm_encoder *drm_enc, bool enable)
1936{
1937 struct sde_encoder_virt *sde_enc;
1938
1939 if (!drm_enc) {
1940 SDE_ERROR("invalid encoder\n");
1941 return;
1942 }
1943 sde_enc = to_sde_encoder_virt(drm_enc);
1944
1945 /* return early if there is no state change */
1946 if (sde_enc->idle_pc_enabled == enable)
1947 return;
1948
1949 sde_enc->idle_pc_enabled = enable;
1950
1951 SDE_DEBUG("idle-pc state:%d\n", sde_enc->idle_pc_enabled);
1952 SDE_EVT32(sde_enc->idle_pc_enabled);
1953}
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08001954
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001955static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1956 u32 sw_event)
1957{
Dhaval Patel99412a52017-07-24 19:16:45 -07001958 bool autorefresh_enabled = false;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07001959 unsigned int lp, idle_pc_duration;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001960 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001961 struct msm_drm_private *priv;
1962 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001963 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001964 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001965
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001966 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1967 SDE_ERROR("invalid encoder parameters, sw_event:%u\n",
1968 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001969 return -EINVAL;
1970 }
1971 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001972 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001973 is_vid_mode = sde_enc->disp_info.capabilities &
1974 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001975
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001976 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001977 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001978 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001979 */
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07001980 if (!sde_enc->idle_pc_enabled &&
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001981 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001982 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1983 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1984 sw_event != SDE_ENC_RC_EVENT_STOP &&
1985 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001986 return 0;
1987
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07001988 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc:%d\n",
1989 sw_event, sde_enc->idle_pc_enabled);
1990 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_enabled,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001991 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1992
1993 switch (sw_event) {
1994 case SDE_ENC_RC_EVENT_KICKOFF:
1995 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001996 if (kthread_cancel_delayed_work_sync(
1997 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001998 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1999 sw_event);
2000
2001 mutex_lock(&sde_enc->rc_lock);
2002
2003 /* return if the resource control is already in ON state */
2004 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2005 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
2006 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002007 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2008 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002009 mutex_unlock(&sde_enc->rc_lock);
2010 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002011 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
2012 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
2013 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2014 sw_event, sde_enc->rc_state);
2015 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2016 SDE_EVTLOG_ERROR);
2017 mutex_unlock(&sde_enc->rc_lock);
2018 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002019 }
2020
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002021 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2022 _sde_encoder_irq_control(drm_enc, true);
2023 } else {
2024 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002025 ret = _sde_encoder_resource_control_helper(drm_enc,
2026 true);
2027 if (ret) {
2028 SDE_ERROR_ENC(sde_enc,
2029 "sw_event:%d, rc in state %d\n",
2030 sw_event, sde_enc->rc_state);
2031 SDE_EVT32(DRMID(drm_enc), sw_event,
2032 sde_enc->rc_state,
2033 SDE_EVTLOG_ERROR);
2034 mutex_unlock(&sde_enc->rc_lock);
2035 return ret;
2036 }
2037
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002038 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2039 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002040
2041 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2042 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
2043 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2044
2045 mutex_unlock(&sde_enc->rc_lock);
2046 break;
2047
2048 case SDE_ENC_RC_EVENT_FRAME_DONE:
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002049 if (!sde_enc->crtc) {
2050 SDE_ERROR("invalid crtc, sw_event:%u\n", sw_event);
2051 return -EINVAL;
2052 }
2053
2054 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2055 SDE_ERROR("invalid crtc index :%u\n",
2056 sde_enc->crtc->index);
2057 return -EINVAL;
2058 }
2059 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2060
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002061 /*
2062 * mutex lock is not used as this event happens at interrupt
2063 * context. And locking is not required as, the other events
2064 * like KICKOFF and STOP does a wait-for-idle before executing
2065 * the resource_control
2066 */
2067 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2068 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
2069 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002070 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2071 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002072 return -EINVAL;
2073 }
2074
2075 /*
2076 * schedule off work item only when there are no
2077 * frames pending
2078 */
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002079 if (sde_crtc_frame_pending(sde_enc->crtc) > 1) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002080 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002081 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2082 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002083 return 0;
2084 }
2085
Dhaval Patel99412a52017-07-24 19:16:45 -07002086 /* schedule delayed off work if autorefresh is disabled */
2087 if (sde_enc->cur_master &&
2088 sde_enc->cur_master->ops.is_autorefresh_enabled)
2089 autorefresh_enabled =
2090 sde_enc->cur_master->ops.is_autorefresh_enabled(
2091 sde_enc->cur_master);
2092
Clarence Ip89628132017-07-27 13:33:51 -04002093 /* set idle timeout based on master connector's lp value */
2094 if (sde_enc->cur_master)
2095 lp = sde_connector_get_lp(
2096 sde_enc->cur_master->connector);
2097 else
2098 lp = SDE_MODE_DPMS_ON;
2099
2100 if (lp == SDE_MODE_DPMS_LP2)
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002101 idle_pc_duration = IDLE_SHORT_TIMEOUT;
Clarence Ip89628132017-07-27 13:33:51 -04002102 else
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002103 idle_pc_duration = IDLE_POWERCOLLAPSE_DURATION;
Clarence Ip89628132017-07-27 13:33:51 -04002104
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002105 if (!autorefresh_enabled)
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002106 kthread_mod_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002107 &disp_thread->worker,
2108 &sde_enc->delayed_off_work,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002109 msecs_to_jiffies(idle_pc_duration));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002110 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04002111 autorefresh_enabled,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002112 idle_pc_duration, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002113 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
2114 sw_event);
2115 break;
2116
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002117 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002118 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002119 if (kthread_cancel_delayed_work_sync(
2120 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002121 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2122 sw_event);
2123
2124 mutex_lock(&sde_enc->rc_lock);
2125
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002126 if (is_vid_mode &&
2127 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2128 _sde_encoder_irq_control(drm_enc, true);
2129 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002130 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002131 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002132 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2133 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
2134 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002135 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2136 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002137 mutex_unlock(&sde_enc->rc_lock);
2138 return 0;
2139 }
2140
2141 /**
2142 * IRQs are still enabled currently, which allows wait for
2143 * VBLANK which RSC may require to correctly transition to OFF
2144 */
2145 _sde_encoder_resource_control_rsc_update(drm_enc, false);
2146
2147 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2148 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002149 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002150
2151 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
2152
2153 mutex_unlock(&sde_enc->rc_lock);
2154 break;
2155
2156 case SDE_ENC_RC_EVENT_STOP:
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002157 /* cancel vsync event work and timer */
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302158 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002159 del_timer_sync(&sde_enc->vsync_event_timer);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002160
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302161 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002162 /* return if the resource control is already in OFF state */
2163 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
2164 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
2165 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002166 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2167 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002168 mutex_unlock(&sde_enc->rc_lock);
2169 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002170 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
2171 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002172 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2173 sw_event, sde_enc->rc_state);
2174 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2175 SDE_EVTLOG_ERROR);
2176 mutex_unlock(&sde_enc->rc_lock);
2177 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002178 }
2179
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002180 /**
2181 * expect to arrive here only if in either idle state or pre-off
2182 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002183 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002184 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002185 _sde_encoder_resource_control_helper(drm_enc, false);
2186
2187 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002188 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002189
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002190 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
2191
2192 mutex_unlock(&sde_enc->rc_lock);
2193 break;
2194
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002195 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002196 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002197 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002198 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002199 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2200 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002201
2202 mutex_lock(&sde_enc->rc_lock);
2203
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002204 /* return if the resource control is already in ON state */
2205 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2206 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002207 ret = _sde_encoder_resource_control_helper(drm_enc,
2208 true);
2209 if (ret) {
2210 SDE_ERROR_ENC(sde_enc,
2211 "sw_event:%d, rc in state %d\n",
2212 sw_event, sde_enc->rc_state);
2213 SDE_EVT32(DRMID(drm_enc), sw_event,
2214 sde_enc->rc_state,
2215 SDE_EVTLOG_ERROR);
2216 mutex_unlock(&sde_enc->rc_lock);
2217 return ret;
2218 }
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002219
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002220 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002221
2222 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2223 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002224 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002225 }
2226
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002227 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2228 if (ret && ret != -EWOULDBLOCK) {
2229 SDE_ERROR_ENC(sde_enc,
2230 "wait for commit done returned %d\n",
2231 ret);
2232 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2233 ret, SDE_EVTLOG_ERROR);
2234 mutex_unlock(&sde_enc->rc_lock);
2235 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002236 }
2237
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002238 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002239 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002240
2241 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2242 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
2243
2244 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
2245 mutex_unlock(&sde_enc->rc_lock);
2246 break;
2247
2248 case SDE_ENC_RC_EVENT_POST_MODESET:
2249 mutex_lock(&sde_enc->rc_lock);
2250
2251 /* return if the resource control is already in ON state */
2252 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
2253 SDE_ERROR_ENC(sde_enc,
2254 "sw_event:%d, rc:%d !MODESET state\n",
2255 sw_event, sde_enc->rc_state);
2256 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2257 SDE_EVTLOG_ERROR);
2258 mutex_unlock(&sde_enc->rc_lock);
2259 return -EINVAL;
2260 }
2261
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002262 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002263 _sde_encoder_irq_control(drm_enc, true);
2264
2265 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
2266
2267 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2268 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
2269
2270 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2271
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002272 mutex_unlock(&sde_enc->rc_lock);
2273 break;
2274
2275 case SDE_ENC_RC_EVENT_ENTER_IDLE:
2276 mutex_lock(&sde_enc->rc_lock);
2277
2278 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002279 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002280 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002281 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2282 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002283 mutex_unlock(&sde_enc->rc_lock);
2284 return 0;
2285 }
2286
2287 /*
2288 * if we are in ON but a frame was just kicked off,
2289 * ignore the IDLE event, it's probably a stale timer event
2290 */
2291 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002292 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002293 "sw_event:%d, rc:%d frame pending\n",
2294 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002295 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2296 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002297 mutex_unlock(&sde_enc->rc_lock);
2298 return 0;
2299 }
2300
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002301 if (is_vid_mode) {
2302 _sde_encoder_irq_control(drm_enc, false);
2303 } else {
2304 /* disable all the clks and resources */
2305 _sde_encoder_resource_control_rsc_update(drm_enc,
2306 false);
2307 _sde_encoder_resource_control_helper(drm_enc, false);
2308 }
2309
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002310 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002311 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002312 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2313
2314 mutex_unlock(&sde_enc->rc_lock);
2315 break;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002316 case SDE_ENC_RC_EVENT_EARLY_WAKEUP:
2317 if (!sde_enc->crtc ||
2318 sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2319 SDE_DEBUG_ENC(sde_enc,
2320 "invalid crtc:%d or crtc index:%d , sw_event:%u\n",
2321 sde_enc->crtc == NULL,
2322 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL,
2323 sw_event);
2324 return -EINVAL;
2325 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002326
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002327 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2328
2329 mutex_lock(&sde_enc->rc_lock);
2330
2331 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2332 if (sde_enc->cur_master &&
2333 sde_enc->cur_master->ops.is_autorefresh_enabled)
2334 autorefresh_enabled =
2335 sde_enc->cur_master->ops.is_autorefresh_enabled(
2336 sde_enc->cur_master);
2337 if (autorefresh_enabled) {
2338 SDE_DEBUG_ENC(sde_enc,
2339 "not handling early wakeup since auto refresh is enabled\n");
Jeykumar Sankaran067b3b92018-01-19 10:35:22 -08002340 mutex_unlock(&sde_enc->rc_lock);
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002341 return 0;
2342 }
2343
2344 if (!sde_crtc_frame_pending(sde_enc->crtc))
2345 kthread_mod_delayed_work(&disp_thread->worker,
2346 &sde_enc->delayed_off_work,
2347 msecs_to_jiffies(
2348 IDLE_POWERCOLLAPSE_DURATION));
2349 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2350 /* enable all the clks and resources */
2351 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2352 _sde_encoder_resource_control_helper(drm_enc, true);
2353
Jayant Shekhar85c40332018-05-08 11:46:36 +05302354 /*
2355 * In some cases, commit comes with slight delay
2356 * (> 80 ms)after early wake up, prevent clock switch
2357 * off to avoid jank in next update. So, increase the
2358 * command mode idle timeout sufficiently to prevent
2359 * such case.
2360 */
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002361 kthread_mod_delayed_work(&disp_thread->worker,
Jayant Shekhar85c40332018-05-08 11:46:36 +05302362 &sde_enc->delayed_off_work,
2363 msecs_to_jiffies(
2364 IDLE_POWERCOLLAPSE_IN_EARLY_WAKEUP));
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002365
2366 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2367 }
2368
2369 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2370 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE8);
2371
2372 mutex_unlock(&sde_enc->rc_lock);
2373 break;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002374 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002375 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002376 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2377 break;
2378 }
2379
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07002380 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_enabled,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002381 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2382 return 0;
2383}
2384
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002385static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2386 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002387 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002388{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002389 struct sde_encoder_virt *sde_enc;
2390 struct msm_drm_private *priv;
2391 struct sde_kms *sde_kms;
2392 struct list_head *connector_list;
2393 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002394 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002395 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002396 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002397 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002398
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002399 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002400 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002401 return;
2402 }
2403
Alan Kwong1124f1f2017-11-10 18:14:39 -05002404 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2405 SDE_ERROR("power resource is not enabled\n");
2406 return;
2407 }
2408
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002409 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002410 SDE_DEBUG_ENC(sde_enc, "\n");
2411
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002412 priv = drm_enc->dev->dev_private;
2413 sde_kms = to_sde_kms(priv->kms);
2414 connector_list = &sde_kms->dev->mode_config.connector_list;
2415
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002416 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002417
Dhaval Patele514aae2018-01-30 11:46:02 -08002418 /*
2419 * cache the crtc in sde_enc on enable for duration of use case
2420 * for correctly servicing asynchronous irq events and timers
2421 */
2422 if (!drm_enc->crtc) {
2423 SDE_ERROR("invalid crtc\n");
2424 return;
2425 }
2426 sde_enc->crtc = drm_enc->crtc;
2427
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002428 list_for_each_entry(conn_iter, connector_list, head)
2429 if (conn_iter->encoder == drm_enc)
2430 conn = conn_iter;
2431
2432 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002433 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002434 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002435 } else if (!conn->state) {
2436 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2437 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002438 }
2439
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002440 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002441 sde_conn_state = to_sde_connector_state(conn->state);
2442 if (sde_conn && sde_conn_state) {
2443 ret = sde_conn->ops.get_mode_info(adj_mode,
2444 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002445 sde_kms->catalog->max_mixer_width,
2446 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002447 if (ret) {
2448 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002449 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002450 return;
2451 }
2452 }
2453
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002454 /* release resources before seamless mode change */
2455 if (msm_is_mode_seamless_dms(adj_mode)) {
2456 /* restore resource state before releasing them */
2457 ret = sde_encoder_resource_control(drm_enc,
2458 SDE_ENC_RC_EVENT_PRE_MODESET);
2459 if (ret) {
2460 SDE_ERROR_ENC(sde_enc,
2461 "sde resource control failed: %d\n",
2462 ret);
2463 return;
2464 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002465
2466 /*
2467 * Disable dsc before switch the mode and after pre_modeset,
2468 * to guarantee that previous kickoff finished.
2469 */
2470 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002471 }
2472
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002473 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2474 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2475 conn->state, false);
2476 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002477 SDE_ERROR_ENC(sde_enc,
2478 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002479 return;
2480 }
2481
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002482 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2483 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2484 sde_enc->hw_pp[i] = NULL;
2485 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2486 break;
2487 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2488 }
2489
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002490 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2491 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2492 sde_enc->hw_dsc[i] = NULL;
2493 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2494 break;
2495 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2496 }
2497
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002498 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2499 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002500
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002501 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002502 if (!sde_enc->hw_pp[i]) {
2503 SDE_ERROR_ENC(sde_enc,
2504 "invalid pingpong block for the encoder\n");
2505 return;
2506 }
2507 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002508 phys->connector = conn->state->connector;
2509 if (phys->ops.mode_set)
2510 phys->ops.mode_set(phys, mode, adj_mode);
2511 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002512 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002513
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002514 /* update resources after seamless mode change */
2515 if (msm_is_mode_seamless_dms(adj_mode))
2516 sde_encoder_resource_control(&sde_enc->base,
2517 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002518}
2519
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002520void sde_encoder_control_te(struct drm_encoder *drm_enc, bool enable)
2521{
2522 struct sde_encoder_virt *sde_enc;
2523 struct sde_encoder_phys *phys;
2524 int i;
2525
2526 if (!drm_enc) {
2527 SDE_ERROR("invalid parameters\n");
2528 return;
2529 }
2530
2531 sde_enc = to_sde_encoder_virt(drm_enc);
2532 if (!sde_enc) {
2533 SDE_ERROR("invalid sde encoder\n");
2534 return;
2535 }
2536
2537 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2538 phys = sde_enc->phys_encs[i];
2539 if (phys && phys->ops.control_te)
2540 phys->ops.control_te(phys, enable);
2541 }
2542}
2543
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302544static int _sde_encoder_input_connect(struct input_handler *handler,
2545 struct input_dev *dev, const struct input_device_id *id)
2546{
2547 struct input_handle *handle;
2548 int rc = 0;
2549
2550 handle = kzalloc(sizeof(*handle), GFP_KERNEL);
2551 if (!handle)
2552 return -ENOMEM;
2553
2554 handle->dev = dev;
2555 handle->handler = handler;
2556 handle->name = handler->name;
2557
2558 rc = input_register_handle(handle);
2559 if (rc) {
2560 pr_err("failed to register input handle\n");
2561 goto error;
2562 }
2563
2564 rc = input_open_device(handle);
2565 if (rc) {
2566 pr_err("failed to open input device\n");
2567 goto error_unregister;
2568 }
2569
2570 return 0;
2571
2572error_unregister:
2573 input_unregister_handle(handle);
2574
2575error:
2576 kfree(handle);
2577
2578 return rc;
2579}
2580
2581static void _sde_encoder_input_disconnect(struct input_handle *handle)
2582{
2583 input_close_device(handle);
2584 input_unregister_handle(handle);
2585 kfree(handle);
2586}
2587
2588/**
2589 * Structure for specifying event parameters on which to receive callbacks.
2590 * This structure will trigger a callback in case of a touch event (specified by
2591 * EV_ABS) where there is a change in X and Y coordinates,
2592 */
2593static const struct input_device_id sde_input_ids[] = {
2594 {
2595 .flags = INPUT_DEVICE_ID_MATCH_EVBIT,
2596 .evbit = { BIT_MASK(EV_ABS) },
2597 .absbit = { [BIT_WORD(ABS_MT_POSITION_X)] =
2598 BIT_MASK(ABS_MT_POSITION_X) |
2599 BIT_MASK(ABS_MT_POSITION_Y) },
2600 },
2601 { },
2602};
2603
2604static int _sde_encoder_input_handler_register(
2605 struct input_handler *input_handler)
2606{
2607 int rc = 0;
2608
2609 rc = input_register_handler(input_handler);
2610 if (rc) {
2611 pr_err("input_register_handler failed, rc= %d\n", rc);
2612 kfree(input_handler);
2613 return rc;
2614 }
2615
2616 return rc;
2617}
2618
2619static int _sde_encoder_input_handler(
2620 struct sde_encoder_virt *sde_enc)
2621{
2622 struct input_handler *input_handler = NULL;
2623 int rc = 0;
2624
2625 if (sde_enc->input_handler) {
2626 SDE_ERROR_ENC(sde_enc,
2627 "input_handle is active. unexpected\n");
2628 return -EINVAL;
2629 }
2630
2631 input_handler = kzalloc(sizeof(*sde_enc->input_handler), GFP_KERNEL);
2632 if (!input_handler)
2633 return -ENOMEM;
2634
2635 input_handler->event = sde_encoder_input_event_handler;
2636 input_handler->connect = _sde_encoder_input_connect;
2637 input_handler->disconnect = _sde_encoder_input_disconnect;
2638 input_handler->name = "sde";
2639 input_handler->id_table = sde_input_ids;
2640 input_handler->private = sde_enc;
2641
2642 sde_enc->input_handler = input_handler;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302643 sde_enc->input_handler_registered = false;
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302644
2645 return rc;
2646}
2647
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002648static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002649{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002650 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002651 struct msm_drm_private *priv;
2652 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002653
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002654 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2655 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002656 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002657 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002658
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002659 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002660 sde_kms = to_sde_kms(priv->kms);
2661 if (!sde_kms) {
2662 SDE_ERROR("invalid sde_kms\n");
2663 return;
2664 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002665
2666 sde_enc = to_sde_encoder_virt(drm_enc);
2667 if (!sde_enc || !sde_enc->cur_master) {
2668 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002669 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002670 }
2671
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002672 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2673 sde_enc->cur_master->hw_mdptop &&
2674 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2675 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2676 sde_enc->cur_master->hw_mdptop);
2677
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002678 if (sde_enc->cur_master->hw_mdptop &&
2679 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2680 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2681 sde_enc->cur_master->hw_mdptop,
2682 sde_kms->catalog);
2683
Dhaval Patelaab9b522017-07-20 12:38:46 -07002684 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002685 sde_encoder_control_te(drm_enc, true);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002686
2687 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2688 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002689}
2690
2691void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2692{
2693 struct sde_encoder_virt *sde_enc = NULL;
2694 int i;
2695
2696 if (!drm_enc) {
2697 SDE_ERROR("invalid encoder\n");
2698 return;
2699 }
2700 sde_enc = to_sde_encoder_virt(drm_enc);
2701
2702 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2703 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2704
2705 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2706 phys->ops.restore(phys);
2707 }
2708
2709 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2710 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2711
2712 _sde_encoder_virt_enable_helper(drm_enc);
2713}
2714
Jayant Shekhar401bcdf2018-07-27 12:15:03 +05302715static void sde_encoder_off_work(struct kthread_work *work)
2716{
2717 struct sde_encoder_virt *sde_enc = container_of(work,
2718 struct sde_encoder_virt, delayed_off_work.work);
2719 struct drm_encoder *drm_enc;
2720
2721 if (!sde_enc) {
2722 SDE_ERROR("invalid sde encoder\n");
2723 return;
2724 }
2725 drm_enc = &sde_enc->base;
2726
2727 sde_encoder_idle_request(drm_enc);
2728}
2729
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002730static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2731{
2732 struct sde_encoder_virt *sde_enc = NULL;
2733 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002734 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002735 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002736 struct msm_mode_info mode_info;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002737
2738 if (!drm_enc) {
2739 SDE_ERROR("invalid encoder\n");
2740 return;
2741 }
2742 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002743
Alan Kwong1124f1f2017-11-10 18:14:39 -05002744 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2745 SDE_ERROR("power resource is not enabled\n");
2746 return;
2747 }
2748
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002749 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2750 if (ret) {
2751 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2752 return;
2753 }
2754
Dhaval Patelf492c5d2018-02-19 07:56:37 -08002755 if (drm_enc->crtc && !sde_enc->crtc)
2756 sde_enc->crtc = drm_enc->crtc;
2757
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002758 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002759 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002760
Clarence Ip19af1362016-09-23 14:57:51 -04002761 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002762 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002763
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002764 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002765 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2766 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2767
2768 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2769 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2770 sde_enc->cur_master = phys;
2771 break;
2772 }
2773 }
2774
2775 if (!sde_enc->cur_master) {
2776 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2777 return;
2778 }
2779
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302780 if (sde_enc->input_handler && !sde_enc->input_handler_registered) {
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302781 ret = _sde_encoder_input_handler_register(
2782 sde_enc->input_handler);
2783 if (ret)
2784 SDE_ERROR(
2785 "input handler registration failed, rc = %d\n", ret);
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302786 else
2787 sde_enc->input_handler_registered = true;
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302788 }
2789
Jayant Shekhar71a0acb2018-08-17 08:24:51 +05302790 if (!(msm_is_mode_seamless_vrr(cur_mode)
2791 || msm_is_mode_seamless_dms(cur_mode)))
Jayant Shekhar401bcdf2018-07-27 12:15:03 +05302792 kthread_init_delayed_work(&sde_enc->delayed_off_work,
2793 sde_encoder_off_work);
2794
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002795 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2796 if (ret) {
2797 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2798 ret);
2799 return;
2800 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002801
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002802 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2803 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002804
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002805 if (!phys)
2806 continue;
2807
2808 phys->comp_type = comp_info->comp_type;
2809 if (phys != sde_enc->cur_master) {
2810 /**
2811 * on DMS request, the encoder will be enabled
2812 * already. Invoke restore to reconfigure the
2813 * new mode.
2814 */
2815 if (msm_is_mode_seamless_dms(cur_mode) &&
2816 phys->ops.restore)
2817 phys->ops.restore(phys);
2818 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002819 phys->ops.enable(phys);
2820 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002821
2822 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2823 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2824 phys->ops.setup_misr(phys, true,
2825 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002826 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002827
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002828 if (msm_is_mode_seamless_dms(cur_mode) &&
2829 sde_enc->cur_master->ops.restore)
2830 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2831 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002832 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002833
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002834 _sde_encoder_virt_enable_helper(drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002835}
2836
2837static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2838{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002839 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002840 struct msm_drm_private *priv;
2841 struct sde_kms *sde_kms;
Clarence Iped3327b2017-11-01 13:13:58 -04002842 enum sde_intf_mode intf_mode;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002843 int i = 0;
2844
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002845 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002846 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002847 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002848 } else if (!drm_enc->dev) {
2849 SDE_ERROR("invalid dev\n");
2850 return;
2851 } else if (!drm_enc->dev->dev_private) {
2852 SDE_ERROR("invalid dev_private\n");
2853 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002854 }
2855
Alan Kwong1124f1f2017-11-10 18:14:39 -05002856 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2857 SDE_ERROR("power resource is not enabled\n");
2858 return;
2859 }
2860
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002861 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002862 SDE_DEBUG_ENC(sde_enc, "\n");
2863
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002864 priv = drm_enc->dev->dev_private;
2865 sde_kms = to_sde_kms(priv->kms);
Clarence Iped3327b2017-11-01 13:13:58 -04002866 intf_mode = sde_encoder_get_intf_mode(drm_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002867
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002868 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002869
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302870 if (sde_enc->input_handler && sde_enc->input_handler_registered) {
Shubhashree Dhar137adbb2018-06-26 18:03:38 +05302871 input_unregister_handler(sde_enc->input_handler);
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302872 sde_enc->input_handler_registered = false;
2873 }
2874
Shubhashree Dhar137adbb2018-06-26 18:03:38 +05302875
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002876 /* wait for idle */
2877 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2878
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302879 kthread_flush_work(&sde_enc->input_event_work);
2880
Clarence Iped3327b2017-11-01 13:13:58 -04002881 /*
2882 * For primary command mode encoders, execute the resource control
2883 * pre-stop operations before the physical encoders are disabled, to
2884 * allow the rsc to transition its states properly.
2885 *
2886 * For other encoder types, rsc should not be enabled until after
2887 * they have been fully disabled, so delay the pre-stop operations
2888 * until after the physical disable calls have returned.
2889 */
2890 if (sde_enc->disp_info.is_primary && intf_mode == INTF_MODE_CMD) {
2891 sde_encoder_resource_control(drm_enc,
2892 SDE_ENC_RC_EVENT_PRE_STOP);
2893 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2894 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002895
Clarence Iped3327b2017-11-01 13:13:58 -04002896 if (phys && phys->ops.disable)
2897 phys->ops.disable(phys);
2898 }
2899 } else {
2900 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2901 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002902
Clarence Iped3327b2017-11-01 13:13:58 -04002903 if (phys && phys->ops.disable)
2904 phys->ops.disable(phys);
2905 }
2906 sde_encoder_resource_control(drm_enc,
2907 SDE_ENC_RC_EVENT_PRE_STOP);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002908 }
2909
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002910 /*
2911 * disable dsc after the transfer is complete (for command mode)
2912 * and after physical encoder is disabled, to make sure timing
2913 * engine is already disabled (for video mode).
2914 */
2915 _sde_encoder_dsc_disable(sde_enc);
2916
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002917 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2918
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002919 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08002920 if (sde_enc->phys_encs[i]) {
2921 sde_enc->phys_encs[i]->cont_splash_settings = false;
2922 sde_enc->phys_encs[i]->cont_splash_single_flush = 0;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002923 sde_enc->phys_encs[i]->connector = NULL;
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08002924 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002925 }
2926
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002927 sde_enc->cur_master = NULL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002928 /*
2929 * clear the cached crtc in sde_enc on use case finish, after all the
2930 * outstanding events and timers have been completed
2931 */
2932 sde_enc->crtc = NULL;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002933
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002934 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002935
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002936 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002937}
2938
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002939static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002940 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002941{
2942 int i = 0;
2943
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002944 for (i = 0; i < catalog->intf_count; i++) {
2945 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002946 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002947 return catalog->intf[i].id;
2948 }
2949 }
2950
2951 return INTF_MAX;
2952}
2953
Alan Kwongbb27c092016-07-20 16:41:25 -04002954static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2955 enum sde_intf_type type, u32 controller_id)
2956{
2957 if (controller_id < catalog->wb_count)
2958 return catalog->wb[controller_id].id;
2959
2960 return WB_MAX;
2961}
2962
Dhaval Patel81e87882016-10-19 21:41:56 -07002963static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2964 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002965{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002966 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002967 unsigned long lock_flags;
2968
Dhaval Patel81e87882016-10-19 21:41:56 -07002969 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002970 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002971
Narendra Muppalla77b32932017-05-10 13:53:11 -07002972 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002973 sde_enc = to_sde_encoder_virt(drm_enc);
2974
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002975 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002976 if (sde_enc->crtc_vblank_cb)
2977 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002978 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002979
2980 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002981 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002982}
2983
2984static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2985 struct sde_encoder_phys *phy_enc)
2986{
2987 if (!phy_enc)
2988 return;
2989
Narendra Muppalla77b32932017-05-10 13:53:11 -07002990 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002991 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002992 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Ingrid Gallardo36ee68d2017-08-30 17:14:33 -07002993
2994 trace_sde_encoder_underrun(DRMID(drm_enc),
2995 atomic_read(&phy_enc->underrun_cnt));
2996
2997 SDE_DBG_CTRL("stop_ftrace");
2998 SDE_DBG_CTRL("panic_underrun");
2999
Narendra Muppalla77b32932017-05-10 13:53:11 -07003000 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003001}
3002
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003003void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
3004 void (*vbl_cb)(void *), void *vbl_data)
3005{
3006 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3007 unsigned long lock_flags;
3008 bool enable;
3009 int i;
3010
3011 enable = vbl_cb ? true : false;
3012
Clarence Ip19af1362016-09-23 14:57:51 -04003013 if (!drm_enc) {
3014 SDE_ERROR("invalid encoder\n");
3015 return;
3016 }
3017 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003018 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003019
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003020 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003021 sde_enc->crtc_vblank_cb = vbl_cb;
3022 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003023 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003024
3025 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3026 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3027
3028 if (phys && phys->ops.control_vblank_irq)
3029 phys->ops.control_vblank_irq(phys, enable);
3030 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07003031 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003032}
3033
Alan Kwong628d19e2016-10-31 13:50:13 -04003034void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003035 void (*frame_event_cb)(void *, u32 event),
3036 struct drm_crtc *crtc)
Alan Kwong628d19e2016-10-31 13:50:13 -04003037{
3038 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3039 unsigned long lock_flags;
3040 bool enable;
3041
3042 enable = frame_event_cb ? true : false;
3043
3044 if (!drm_enc) {
3045 SDE_ERROR("invalid encoder\n");
3046 return;
3047 }
3048 SDE_DEBUG_ENC(sde_enc, "\n");
3049 SDE_EVT32(DRMID(drm_enc), enable, 0);
3050
3051 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3052 sde_enc->crtc_frame_event_cb = frame_event_cb;
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003053 sde_enc->crtc_frame_event_cb_data.crtc = crtc;
Alan Kwong628d19e2016-10-31 13:50:13 -04003054 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
3055}
3056
3057static void sde_encoder_frame_done_callback(
3058 struct drm_encoder *drm_enc,
3059 struct sde_encoder_phys *ready_phys, u32 event)
3060{
3061 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3062 unsigned int i;
3063
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003064 sde_enc->crtc_frame_event_cb_data.connector =
3065 sde_enc->cur_master->connector;
3066
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003067 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
3068 | SDE_ENCODER_FRAME_EVENT_ERROR
3069 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05003070
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003071 if (!sde_enc->frame_busy_mask[0]) {
3072 /**
3073 * suppress frame_done without waiter,
3074 * likely autorefresh
3075 */
3076 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
3077 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04003078 }
3079
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003080 /* One of the physical encoders has become idle */
3081 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3082 if (sde_enc->phys_encs[i] == ready_phys) {
3083 clear_bit(i, sde_enc->frame_busy_mask);
3084 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
3085 sde_enc->frame_busy_mask[0]);
3086 }
3087 }
Alan Kwong628d19e2016-10-31 13:50:13 -04003088
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003089 if (!sde_enc->frame_busy_mask[0]) {
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003090 sde_encoder_resource_control(drm_enc,
3091 SDE_ENC_RC_EVENT_FRAME_DONE);
3092
3093 if (sde_enc->crtc_frame_event_cb)
3094 sde_enc->crtc_frame_event_cb(
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003095 &sde_enc->crtc_frame_event_cb_data,
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003096 event);
3097 }
3098 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04003099 if (sde_enc->crtc_frame_event_cb)
3100 sde_enc->crtc_frame_event_cb(
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003101 &sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04003102 }
3103}
3104
Dhaval Patel8a7c3282017-12-05 00:41:58 -08003105int sde_encoder_idle_request(struct drm_encoder *drm_enc)
3106{
3107 struct sde_encoder_virt *sde_enc;
3108
3109 if (!drm_enc) {
3110 SDE_ERROR("invalid drm encoder\n");
3111 return -EINVAL;
3112 }
3113
3114 sde_enc = to_sde_encoder_virt(drm_enc);
3115 sde_encoder_resource_control(&sde_enc->base,
3116 SDE_ENC_RC_EVENT_ENTER_IDLE);
3117
3118 return 0;
3119}
3120
Clarence Ip110d15c2016-08-16 14:44:41 -04003121/**
3122 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
3123 * drm_enc: Pointer to drm encoder structure
3124 * phys: Pointer to physical encoder structure
3125 * extra_flush_bits: Additional bit mask to include in flush trigger
3126 */
3127static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
3128 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
3129{
3130 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003131 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04003132
3133 if (!drm_enc || !phys) {
3134 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
3135 drm_enc != 0, phys != 0);
3136 return;
3137 }
3138
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003139 if (!phys->hw_pp) {
3140 SDE_ERROR("invalid pingpong hw\n");
3141 return;
3142 }
3143
Clarence Ip110d15c2016-08-16 14:44:41 -04003144 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04003145 if (!ctl || !phys->ops.trigger_flush) {
3146 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04003147 return;
3148 }
3149
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003150 if (phys->split_role == ENC_ROLE_SKIP) {
3151 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
3152 "skip flush pp%d ctl%d\n",
3153 phys->hw_pp->idx - PINGPONG_0,
3154 ctl->idx - CTL_0);
3155 return;
3156 }
3157
Clarence Ip8e69ad02016-12-09 09:43:57 -05003158 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05003159
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003160 if (phys->ops.is_master && phys->ops.is_master(phys))
3161 atomic_inc(&phys->pending_retire_fence_cnt);
3162
Clarence Ip110d15c2016-08-16 14:44:41 -04003163 if (extra_flush_bits && ctl->ops.update_pending_flush)
3164 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
3165
Alan Kwong4212dd42017-09-19 17:22:33 -04003166 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07003167
3168 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04003169 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
3170 pending_kickoff_cnt, ctl->idx - CTL_0,
3171 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07003172 else
Clarence Ip569d5af2017-10-14 21:09:01 -04003173 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
3174 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04003175}
3176
3177/**
3178 * _sde_encoder_trigger_start - trigger start for a physical encoder
3179 * phys: Pointer to physical encoder structure
3180 */
3181static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
3182{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003183 struct sde_hw_ctl *ctl;
3184
Clarence Ip110d15c2016-08-16 14:44:41 -04003185 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003186 SDE_ERROR("invalid argument(s)\n");
3187 return;
3188 }
3189
3190 if (!phys->hw_pp) {
3191 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04003192 return;
3193 }
3194
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07003195 /* avoid ctrl start for encoder in clone mode */
3196 if (phys->in_clone_mode)
3197 return;
3198
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003199 ctl = phys->hw_ctl;
3200 if (phys->split_role == ENC_ROLE_SKIP) {
3201 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
3202 "skip start pp%d ctl%d\n",
3203 phys->hw_pp->idx - PINGPONG_0,
3204 ctl->idx - CTL_0);
3205 return;
3206 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003207 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
3208 phys->ops.trigger_start(phys);
3209}
3210
Alan Kwong4212dd42017-09-19 17:22:33 -04003211void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
3212{
3213 struct sde_hw_ctl *ctl;
3214
3215 if (!phys_enc) {
3216 SDE_ERROR("invalid encoder\n");
3217 return;
3218 }
3219
3220 ctl = phys_enc->hw_ctl;
3221 if (ctl && ctl->ops.trigger_flush)
3222 ctl->ops.trigger_flush(ctl);
3223}
3224
Clarence Ip110d15c2016-08-16 14:44:41 -04003225void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
3226{
3227 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04003228
3229 if (!phys_enc) {
3230 SDE_ERROR("invalid encoder\n");
3231 return;
3232 }
3233
3234 ctl = phys_enc->hw_ctl;
3235 if (ctl && ctl->ops.trigger_start) {
3236 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04003237 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04003238 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003239}
3240
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303241static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
3242 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003243{
3244 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303245 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
3246 ktime_t cur_ktime;
3247 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003248
3249 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003250 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303251 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
3252 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003253
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303254 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
3255 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003256 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003257 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303258 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
3259
3260 return rc;
3261}
3262
3263int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
3264 struct sde_encoder_wait_info *info)
3265{
3266 int rc;
3267 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
3268
3269 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
3270
3271 /**
3272 * handle disabled irq case where timer irq is also delayed.
3273 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
3274 * if it event_timeout expired late detected.
3275 */
3276 if (atomic_read(info->atomic_cnt) && (!rc) &&
3277 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
3278 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
3279 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
3280 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003281
3282 return rc;
3283}
3284
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003285void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
3286{
3287 struct sde_encoder_virt *sde_enc;
3288 struct sde_connector *sde_con;
3289 void *sde_con_disp;
3290 struct sde_hw_ctl *ctl;
3291 int rc;
3292
3293 if (!phys_enc) {
3294 SDE_ERROR("invalid encoder\n");
3295 return;
3296 }
3297 sde_enc = to_sde_encoder_virt(phys_enc->parent);
3298 ctl = phys_enc->hw_ctl;
3299
3300 if (!ctl || !ctl->ops.reset)
3301 return;
3302
3303 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
3304 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
3305
3306 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
3307 phys_enc->connector) {
3308 sde_con = to_sde_connector(phys_enc->connector);
3309 sde_con_disp = sde_connector_get_display(phys_enc->connector);
3310
3311 if (sde_con->ops.soft_reset) {
3312 rc = sde_con->ops.soft_reset(sde_con_disp);
3313 if (rc) {
3314 SDE_ERROR_ENC(sde_enc,
3315 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07003316 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
3317 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003318 }
3319 }
3320 }
3321
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003322 phys_enc->enable_state = SDE_ENC_ENABLED;
3323}
3324
Clarence Ip110d15c2016-08-16 14:44:41 -04003325/**
3326 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
3327 * Iterate through the physical encoders and perform consolidated flush
3328 * and/or control start triggering as needed. This is done in the virtual
3329 * encoder rather than the individual physical ones in order to handle
3330 * use cases that require visibility into multiple physical encoders at
3331 * a time.
3332 * sde_enc: Pointer to virtual encoder structure
3333 */
3334static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
3335{
3336 struct sde_hw_ctl *ctl;
3337 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003338 unsigned long lock_flags;
Dhaval Patel30874eb2018-05-31 13:33:31 -07003339 struct msm_drm_private *priv = NULL;
3340 struct sde_kms *sde_kms = NULL;
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003341 bool is_vid_mode = false;
Clarence Ip110d15c2016-08-16 14:44:41 -04003342
3343 if (!sde_enc) {
3344 SDE_ERROR("invalid encoder\n");
3345 return;
3346 }
3347
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003348 is_vid_mode = sde_enc->disp_info.capabilities &
3349 MSM_DISPLAY_CAP_VID_MODE;
3350
3351
Clarence Ip110d15c2016-08-16 14:44:41 -04003352 pending_flush = 0x0;
3353
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003354 /*
3355 * Trigger LUT DMA flush, this might need a wait, so we need
3356 * to do this outside of the atomic context
3357 */
3358 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3359 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003360
3361 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3362 continue;
3363
3364 ctl = phys->hw_ctl;
3365 if (!ctl)
3366 continue;
3367
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003368 /* make reg dma kickoff as blocking for vidoe-mode */
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003369 if (phys->hw_ctl->ops.reg_dma_flush)
3370 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003371 is_vid_mode);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003372 }
3373
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003374 /* update pending counts and trigger kickoff ctl flush atomically */
3375 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3376
Clarence Ip110d15c2016-08-16 14:44:41 -04003377 /* don't perform flush/start operations for slave encoders */
3378 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3379 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07003380 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003381
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003382 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3383 continue;
3384
Clarence Ip110d15c2016-08-16 14:44:41 -04003385 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003386 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04003387 continue;
3388
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003389 if (phys->connector)
3390 topology = sde_connector_get_topology_name(
3391 phys->connector);
3392
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003393 /*
3394 * don't wait on ppsplit slaves or skipped encoders because
3395 * they dont receive irqs
3396 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003397 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003398 phys->split_role == ENC_ROLE_SLAVE) &&
3399 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003400 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003401
Clarence Ip8e69ad02016-12-09 09:43:57 -05003402 if (!phys->ops.needs_single_flush ||
3403 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04003404 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
3405 else if (ctl->ops.get_pending_flush)
3406 pending_flush |= ctl->ops.get_pending_flush(ctl);
3407 }
3408
3409 /* for split flush, combine pending flush masks and send to master */
3410 if (pending_flush && sde_enc->cur_master) {
3411 _sde_encoder_trigger_flush(
3412 &sde_enc->base,
3413 sde_enc->cur_master,
3414 pending_flush);
3415 }
3416
3417 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003418
3419 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel30874eb2018-05-31 13:33:31 -07003420
3421 if (sde_enc->elevated_ahb_vote) {
3422 priv = sde_enc->base.dev->dev_private;
3423 if (priv != NULL) {
3424 sde_kms = to_sde_kms(priv->kms);
3425 if (sde_kms != NULL) {
3426 sde_power_scale_reg_bus(&priv->phandle,
3427 sde_kms->core_client,
3428 VOTE_INDEX_LOW,
3429 false);
3430 }
3431 }
3432 sde_enc->elevated_ahb_vote = false;
3433 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003434}
3435
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003436static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
3437 struct drm_encoder *drm_enc,
3438 unsigned long *affected_displays,
3439 int num_active_phys)
3440{
3441 struct sde_encoder_virt *sde_enc;
3442 struct sde_encoder_phys *master;
3443 enum sde_rm_topology_name topology;
3444 bool is_right_only;
3445
3446 if (!drm_enc || !affected_displays)
3447 return;
3448
3449 sde_enc = to_sde_encoder_virt(drm_enc);
3450 master = sde_enc->cur_master;
3451 if (!master || !master->connector)
3452 return;
3453
3454 topology = sde_connector_get_topology_name(master->connector);
3455 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
3456 return;
3457
3458 /*
3459 * For pingpong split, the slave pingpong won't generate IRQs. For
3460 * right-only updates, we can't swap pingpongs, or simply swap the
3461 * master/slave assignment, we actually have to swap the interfaces
3462 * so that the master physical encoder will use a pingpong/interface
3463 * that generates irqs on which to wait.
3464 */
3465 is_right_only = !test_bit(0, affected_displays) &&
3466 test_bit(1, affected_displays);
3467
3468 if (is_right_only && !sde_enc->intfs_swapped) {
3469 /* right-only update swap interfaces */
3470 swap(sde_enc->phys_encs[0]->intf_idx,
3471 sde_enc->phys_encs[1]->intf_idx);
3472 sde_enc->intfs_swapped = true;
3473 } else if (!is_right_only && sde_enc->intfs_swapped) {
3474 /* left-only or full update, swap back */
3475 swap(sde_enc->phys_encs[0]->intf_idx,
3476 sde_enc->phys_encs[1]->intf_idx);
3477 sde_enc->intfs_swapped = false;
3478 }
3479
3480 SDE_DEBUG_ENC(sde_enc,
3481 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
3482 is_right_only, sde_enc->intfs_swapped,
3483 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3484 sde_enc->phys_encs[1]->intf_idx - INTF_0);
3485 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
3486 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3487 sde_enc->phys_encs[1]->intf_idx - INTF_0,
3488 *affected_displays);
3489
3490 /* ppsplit always uses master since ppslave invalid for irqs*/
3491 if (num_active_phys == 1)
3492 *affected_displays = BIT(0);
3493}
3494
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003495static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
3496 struct sde_encoder_kickoff_params *params)
3497{
3498 struct sde_encoder_virt *sde_enc;
3499 struct sde_encoder_phys *phys;
3500 int i, num_active_phys;
3501 bool master_assigned = false;
3502
3503 if (!drm_enc || !params)
3504 return;
3505
3506 sde_enc = to_sde_encoder_virt(drm_enc);
3507
3508 if (sde_enc->num_phys_encs <= 1)
3509 return;
3510
3511 /* count bits set */
3512 num_active_phys = hweight_long(params->affected_displays);
3513
3514 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
3515 params->affected_displays, num_active_phys);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04003516 SDE_EVT32_VERBOSE(DRMID(drm_enc), params->affected_displays,
3517 num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003518
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003519 /* for left/right only update, ppsplit master switches interface */
3520 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
3521 &params->affected_displays, num_active_phys);
3522
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003523 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3524 enum sde_enc_split_role prv_role, new_role;
3525 bool active;
3526
3527 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003528 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003529 continue;
3530
3531 active = test_bit(i, &params->affected_displays);
3532 prv_role = phys->split_role;
3533
3534 if (active && num_active_phys == 1)
3535 new_role = ENC_ROLE_SOLO;
3536 else if (active && !master_assigned)
3537 new_role = ENC_ROLE_MASTER;
3538 else if (active)
3539 new_role = ENC_ROLE_SLAVE;
3540 else
3541 new_role = ENC_ROLE_SKIP;
3542
3543 phys->ops.update_split_role(phys, new_role);
3544 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
3545 sde_enc->cur_master = phys;
3546 master_assigned = true;
3547 }
3548
3549 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
3550 phys->hw_pp->idx - PINGPONG_0, prv_role,
3551 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003552 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
3553 phys->hw_pp->idx - PINGPONG_0, prv_role,
3554 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003555 }
3556}
3557
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303558bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003559{
3560 struct sde_encoder_virt *sde_enc;
3561 struct msm_display_info *disp_info;
3562
3563 if (!drm_enc) {
3564 SDE_ERROR("invalid encoder\n");
3565 return false;
3566 }
3567
3568 sde_enc = to_sde_encoder_virt(drm_enc);
3569 disp_info = &sde_enc->disp_info;
3570
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303571 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003572}
3573
Dhaval Patel0e558f42017-04-30 00:51:40 -07003574void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
3575{
3576 struct sde_encoder_virt *sde_enc;
3577 struct sde_encoder_phys *phys;
3578 unsigned int i;
3579 struct sde_hw_ctl *ctl;
3580 struct msm_display_info *disp_info;
3581
3582 if (!drm_enc) {
3583 SDE_ERROR("invalid encoder\n");
3584 return;
3585 }
3586 sde_enc = to_sde_encoder_virt(drm_enc);
3587 disp_info = &sde_enc->disp_info;
3588
3589 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3590 phys = sde_enc->phys_encs[i];
3591
3592 if (phys && phys->hw_ctl) {
3593 ctl = phys->hw_ctl;
3594 if (ctl->ops.clear_pending_flush)
3595 ctl->ops.clear_pending_flush(ctl);
3596
3597 /* update only for command mode primary ctl */
3598 if ((phys == sde_enc->cur_master) &&
3599 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3600 && ctl->ops.trigger_pending)
3601 ctl->ops.trigger_pending(ctl);
3602 }
3603 }
3604}
3605
Ping Li8430ee12017-02-24 14:14:44 -08003606static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3607{
3608 void *dither_cfg;
Ping Li16162692018-05-08 14:13:46 -07003609 int ret = 0, rc, i = 0;
Ping Li8430ee12017-02-24 14:14:44 -08003610 size_t len = 0;
3611 enum sde_rm_topology_name topology;
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003612 struct drm_encoder *drm_enc;
3613 struct msm_mode_info mode_info;
3614 struct msm_display_dsc_info *dsc = NULL;
3615 struct sde_encoder_virt *sde_enc;
Ping Li16162692018-05-08 14:13:46 -07003616 struct sde_hw_pingpong *hw_pp;
Ping Li8430ee12017-02-24 14:14:44 -08003617
3618 if (!phys || !phys->connector || !phys->hw_pp ||
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003619 !phys->hw_pp->ops.setup_dither || !phys->parent)
Ping Li8430ee12017-02-24 14:14:44 -08003620 return;
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003621
Ping Li8430ee12017-02-24 14:14:44 -08003622 topology = sde_connector_get_topology_name(phys->connector);
3623 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3624 (phys->split_role == ENC_ROLE_SLAVE))
3625 return;
3626
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003627 drm_enc = phys->parent;
3628 sde_enc = to_sde_encoder_virt(drm_enc);
3629 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
3630 if (rc) {
3631 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
3632 return;
3633 }
3634
3635 dsc = &mode_info.comp_info.dsc_info;
3636 /* disable dither for 10 bpp or 10bpc dsc config */
3637 if (dsc->bpp == 10 || dsc->bpc == 10) {
3638 phys->hw_pp->ops.setup_dither(phys->hw_pp, NULL, 0);
Ping Li16162692018-05-08 14:13:46 -07003639 return;
3640 }
3641
3642 ret = sde_connector_get_dither_cfg(phys->connector,
3643 phys->connector->state, &dither_cfg, &len);
3644 if (ret)
3645 return;
3646
3647 if (TOPOLOGY_DUALPIPE_MERGE_MODE(topology)) {
3648 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
3649 hw_pp = sde_enc->hw_pp[i];
3650 if (hw_pp) {
3651 phys->hw_pp->ops.setup_dither(hw_pp, dither_cfg,
3652 len);
3653 }
3654 }
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003655 } else {
Ping Li16162692018-05-08 14:13:46 -07003656 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003657 }
Ping Li8430ee12017-02-24 14:14:44 -08003658}
3659
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003660static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3661 struct drm_display_mode *mode)
3662{
3663 u64 pclk_rate;
3664 u32 pclk_period;
3665 u32 line_time;
3666
3667 /*
3668 * For linetime calculation, only operate on master encoder.
3669 */
3670 if (!sde_enc->cur_master)
3671 return 0;
3672
3673 if (!sde_enc->cur_master->ops.get_line_count) {
3674 SDE_ERROR("get_line_count function not defined\n");
3675 return 0;
3676 }
3677
3678 pclk_rate = mode->clock; /* pixel clock in kHz */
3679 if (pclk_rate == 0) {
3680 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3681 return 0;
3682 }
3683
3684 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3685 if (pclk_period == 0) {
3686 SDE_ERROR("pclk period is 0\n");
3687 return 0;
3688 }
3689
3690 /*
3691 * Line time calculation based on Pixel clock and HTOTAL.
3692 * Final unit is in ns.
3693 */
3694 line_time = (pclk_period * mode->htotal) / 1000;
3695 if (line_time == 0) {
3696 SDE_ERROR("line time calculation is 0\n");
3697 return 0;
3698 }
3699
3700 SDE_DEBUG_ENC(sde_enc,
3701 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3702 pclk_rate, pclk_period, line_time);
3703
3704 return line_time;
3705}
3706
3707static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3708 ktime_t *wakeup_time)
3709{
3710 struct drm_display_mode *mode;
3711 struct sde_encoder_virt *sde_enc;
3712 u32 cur_line;
3713 u32 line_time;
3714 u32 vtotal, time_to_vsync;
3715 ktime_t cur_time;
3716
3717 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003718 mode = &sde_enc->cur_master->cached_mode;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003719
3720 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3721 if (!line_time)
3722 return -EINVAL;
3723
3724 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3725
3726 vtotal = mode->vtotal;
3727 if (cur_line >= vtotal)
3728 time_to_vsync = line_time * vtotal;
3729 else
3730 time_to_vsync = line_time * (vtotal - cur_line);
3731
3732 if (time_to_vsync == 0) {
3733 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3734 vtotal);
3735 return -EINVAL;
3736 }
3737
3738 cur_time = ktime_get();
3739 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3740
3741 SDE_DEBUG_ENC(sde_enc,
3742 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3743 cur_line, vtotal, time_to_vsync,
3744 ktime_to_ms(cur_time),
3745 ktime_to_ms(*wakeup_time));
3746 return 0;
3747}
3748
3749static void sde_encoder_vsync_event_handler(unsigned long data)
3750{
3751 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3752 struct sde_encoder_virt *sde_enc;
3753 struct msm_drm_private *priv;
3754 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003755
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003756 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3757 SDE_ERROR("invalid encoder parameters\n");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003758 return;
3759 }
3760
3761 sde_enc = to_sde_encoder_virt(drm_enc);
3762 priv = drm_enc->dev->dev_private;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003763 if (!sde_enc->crtc) {
3764 SDE_ERROR("invalid crtc");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003765 return;
3766 }
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003767
3768 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3769 SDE_ERROR("invalid crtc index:%u\n",
3770 sde_enc->crtc->index);
3771 return;
3772 }
3773 event_thread = &priv->event_thread[sde_enc->crtc->index];
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003774 if (!event_thread) {
3775 SDE_ERROR("event_thread not found for crtc:%d\n",
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003776 sde_enc->crtc->index);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003777 return;
3778 }
3779
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303780 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003781 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003782}
3783
Dhaval Patel222023e2018-02-27 12:24:07 -08003784static void sde_encoder_esd_trigger_work_handler(struct kthread_work *work)
3785{
3786 struct sde_encoder_virt *sde_enc = container_of(work,
3787 struct sde_encoder_virt, esd_trigger_work);
3788
3789 if (!sde_enc) {
3790 SDE_ERROR("invalid sde encoder\n");
3791 return;
3792 }
3793
3794 sde_encoder_resource_control(&sde_enc->base,
3795 SDE_ENC_RC_EVENT_KICKOFF);
3796}
3797
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08003798static void sde_encoder_input_event_work_handler(struct kthread_work *work)
3799{
3800 struct sde_encoder_virt *sde_enc = container_of(work,
3801 struct sde_encoder_virt, input_event_work);
3802
3803 if (!sde_enc) {
3804 SDE_ERROR("invalid sde encoder\n");
3805 return;
3806 }
3807
3808 sde_encoder_resource_control(&sde_enc->base,
3809 SDE_ENC_RC_EVENT_EARLY_WAKEUP);
3810}
3811
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003812static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3813{
3814 struct sde_encoder_virt *sde_enc = container_of(work,
3815 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303816 bool autorefresh_enabled = false;
3817 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003818 ktime_t wakeup_time;
3819
3820 if (!sde_enc) {
3821 SDE_ERROR("invalid sde encoder\n");
3822 return;
3823 }
3824
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303825 rc = _sde_encoder_power_enable(sde_enc, true);
3826 if (rc) {
3827 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3828 return;
3829 }
3830
3831 if (sde_enc->cur_master &&
3832 sde_enc->cur_master->ops.is_autorefresh_enabled)
3833 autorefresh_enabled =
3834 sde_enc->cur_master->ops.is_autorefresh_enabled(
3835 sde_enc->cur_master);
3836
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303837 /* Update timer if autorefresh is enabled else return */
3838 if (!autorefresh_enabled)
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003839 goto exit;
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303840
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003841 rc = _sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time);
3842 if (rc)
3843 goto exit;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003844
3845 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3846 mod_timer(&sde_enc->vsync_event_timer,
3847 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003848
3849exit:
3850 _sde_encoder_power_enable(sde_enc, false);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003851}
3852
Clarence Ip5adc0fb2017-12-15 16:08:01 -05003853int sde_encoder_poll_line_counts(struct drm_encoder *drm_enc)
3854{
3855 static const uint64_t timeout_us = 50000;
3856 static const uint64_t sleep_us = 20;
3857 struct sde_encoder_virt *sde_enc;
3858 ktime_t cur_ktime, exp_ktime;
3859 uint32_t line_count, tmp, i;
3860
3861 if (!drm_enc) {
3862 SDE_ERROR("invalid encoder\n");
3863 return -EINVAL;
3864 }
3865 sde_enc = to_sde_encoder_virt(drm_enc);
3866 if (!sde_enc->cur_master ||
3867 !sde_enc->cur_master->ops.get_line_count) {
3868 SDE_DEBUG_ENC(sde_enc, "can't get master line count\n");
3869 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_ERROR);
3870 return -EINVAL;
3871 }
3872
3873 exp_ktime = ktime_add_ms(ktime_get(), timeout_us / 1000);
3874
3875 line_count = sde_enc->cur_master->ops.get_line_count(
3876 sde_enc->cur_master);
3877
3878 for (i = 0; i < (timeout_us * 2 / sleep_us); ++i) {
3879 tmp = line_count;
3880 line_count = sde_enc->cur_master->ops.get_line_count(
3881 sde_enc->cur_master);
3882 if (line_count < tmp) {
3883 SDE_EVT32(DRMID(drm_enc), line_count);
3884 return 0;
3885 }
3886
3887 cur_ktime = ktime_get();
3888 if (ktime_compare_safe(exp_ktime, cur_ktime) <= 0)
3889 break;
3890
3891 usleep_range(sleep_us / 2, sleep_us);
3892 }
3893
3894 SDE_EVT32(DRMID(drm_enc), line_count, SDE_EVTLOG_ERROR);
3895 return -ETIMEDOUT;
3896}
3897
Clarence Ip85f4f4532017-10-04 12:10:13 -04003898int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08003899 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003900{
3901 struct sde_encoder_virt *sde_enc;
3902 struct sde_encoder_phys *phys;
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003903 struct sde_kms *sde_kms = NULL;
3904 struct msm_drm_private *priv = NULL;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003905 bool needs_hw_reset = false;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003906 uint32_t ln_cnt1, ln_cnt2;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003907 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04003908 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003909
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003910 if (!drm_enc || !params || !drm_enc->dev ||
3911 !drm_enc->dev->dev_private) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003912 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04003913 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003914 }
3915 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003916 priv = drm_enc->dev->dev_private;
3917 sde_kms = to_sde_kms(priv->kms);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003918
Clarence Ip19af1362016-09-23 14:57:51 -04003919 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003920 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003921
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003922 /* save this for later, in case of errors */
3923 if (sde_enc->cur_master && sde_enc->cur_master->ops.get_wr_line_count)
3924 ln_cnt1 = sde_enc->cur_master->ops.get_wr_line_count(
3925 sde_enc->cur_master);
3926 else
3927 ln_cnt1 = -EINVAL;
3928
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003929 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003930 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003931 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003932 phys = sde_enc->phys_encs[i];
Jayant Shekhar98e78a82018-01-12 17:50:55 +05303933 params->is_primary = sde_enc->disp_info.is_primary;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003934 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04003935 if (phys->ops.prepare_for_kickoff) {
3936 rc = phys->ops.prepare_for_kickoff(
3937 phys, params);
3938 if (rc)
3939 ret = rc;
3940 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003941 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
3942 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08003943 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003944 }
3945 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003946 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003947
Alan Kwong1124f1f2017-11-10 18:14:39 -05003948 rc = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
3949 if (rc) {
3950 SDE_ERROR_ENC(sde_enc, "resource kickoff failed rc %d\n", rc);
3951 return rc;
3952 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003953
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003954 /* if any phys needs reset, reset all phys, in-order */
3955 if (needs_hw_reset) {
Clarence Ip5e3df1d2017-11-07 21:28:25 -05003956 /* query line count before cur_master is updated */
3957 if (sde_enc->cur_master &&
3958 sde_enc->cur_master->ops.get_wr_line_count)
3959 ln_cnt2 = sde_enc->cur_master->ops.get_wr_line_count(
3960 sde_enc->cur_master);
3961 else
3962 ln_cnt2 = -EINVAL;
3963
3964 SDE_EVT32(DRMID(drm_enc), ln_cnt1, ln_cnt2,
3965 SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003966 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3967 phys = sde_enc->phys_encs[i];
3968 if (phys && phys->ops.hw_reset)
3969 phys->ops.hw_reset(phys);
3970 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003971 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003972
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003973 _sde_encoder_update_master(drm_enc, params);
3974
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003975 _sde_encoder_update_roi(drm_enc);
3976
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003977 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
3978 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003979 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003980 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
3981 sde_enc->cur_master->connector->base.id,
3982 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003983 ret = rc;
3984 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003985 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003986
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08003987 if (_sde_encoder_is_dsc_enabled(drm_enc) &&
3988 !sde_kms->splash_data.cont_splash_en) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003989 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003990 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003991 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003992 ret = rc;
3993 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003994 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04003995
3996 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04003997}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003998
Clarence Ip662698e2017-09-12 18:34:16 -04003999/**
4000 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
4001 * with the specified encoder, and unstage all pipes from it
4002 * @encoder: encoder pointer
4003 * Returns: 0 on success
4004 */
4005static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
4006{
4007 struct sde_encoder_virt *sde_enc;
4008 struct sde_encoder_phys *phys;
4009 unsigned int i;
4010 int rc = 0;
4011
4012 if (!drm_enc) {
4013 SDE_ERROR("invalid encoder\n");
4014 return -EINVAL;
4015 }
4016
4017 sde_enc = to_sde_encoder_virt(drm_enc);
4018
4019 SDE_ATRACE_BEGIN("encoder_release_lm");
4020 SDE_DEBUG_ENC(sde_enc, "\n");
4021
4022 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4023 phys = sde_enc->phys_encs[i];
4024 if (!phys)
4025 continue;
4026
4027 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
4028
4029 rc = sde_encoder_helper_reset_mixers(phys, NULL);
4030 if (rc)
4031 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
4032 }
4033
4034 SDE_ATRACE_END("encoder_release_lm");
4035 return rc;
4036}
4037
4038void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04004039{
4040 struct sde_encoder_virt *sde_enc;
4041 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004042 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04004043 unsigned int i;
4044
4045 if (!drm_enc) {
4046 SDE_ERROR("invalid encoder\n");
4047 return;
4048 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07004049 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04004050 sde_enc = to_sde_encoder_virt(drm_enc);
4051
4052 SDE_DEBUG_ENC(sde_enc, "\n");
4053
Clarence Ip662698e2017-09-12 18:34:16 -04004054 /* create a 'no pipes' commit to release buffers on errors */
4055 if (is_error)
4056 _sde_encoder_reset_ctl_hw(drm_enc);
4057
Alan Kwong628d19e2016-10-31 13:50:13 -04004058 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04004059 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004060
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05004061 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004062 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05004063 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004064 if (phys && phys->ops.handle_post_kickoff)
4065 phys->ops.handle_post_kickoff(phys);
4066 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004067
4068 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
Tharun Raj Soma88b6dfc2018-05-11 14:19:49 +05304069 sde_enc->disp_info.is_primary &&
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004070 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
4071 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
4072 mod_timer(&sde_enc->vsync_event_timer,
4073 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
4074 }
4075
Narendra Muppalla77b32932017-05-10 13:53:11 -07004076 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004077}
4078
Clarence Ip662698e2017-09-12 18:34:16 -04004079int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004080 struct drm_framebuffer *fb)
4081{
4082 struct drm_encoder *drm_enc;
4083 struct sde_hw_mixer_cfg mixer;
4084 struct sde_rm_hw_iter lm_iter;
4085 bool lm_valid = false;
4086
4087 if (!phys_enc || !phys_enc->parent) {
4088 SDE_ERROR("invalid encoder\n");
4089 return -EINVAL;
4090 }
4091
4092 drm_enc = phys_enc->parent;
4093 memset(&mixer, 0, sizeof(mixer));
4094
4095 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004096 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
4097 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
4098
4099 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
4100 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
4101 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
4102
4103 if (!hw_lm)
4104 continue;
4105
4106 /* need to flush LM to remove it */
4107 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
4108 phys_enc->hw_ctl->ops.update_pending_flush)
4109 phys_enc->hw_ctl->ops.update_pending_flush(
4110 phys_enc->hw_ctl,
4111 phys_enc->hw_ctl->ops.get_bitmask_mixer(
4112 phys_enc->hw_ctl, hw_lm->idx));
4113
4114 if (fb) {
4115 /* assume a single LM if targeting a frame buffer */
4116 if (lm_valid)
4117 continue;
4118
4119 mixer.out_height = fb->height;
4120 mixer.out_width = fb->width;
4121
4122 if (hw_lm->ops.setup_mixer_out)
4123 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
4124 }
4125
4126 lm_valid = true;
4127
4128 /* only enable border color on LM */
4129 if (phys_enc->hw_ctl->ops.setup_blendstage)
4130 phys_enc->hw_ctl->ops.setup_blendstage(
Kalyan Thotafb1800c2019-03-18 14:33:36 +05304131 phys_enc->hw_ctl, hw_lm->cfg.flags,
4132 hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004133 }
4134
4135 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04004136 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004137 return -EFAULT;
4138 }
4139 return 0;
4140}
4141
Lloyd Atkinsone123c172017-02-27 13:19:08 -05004142void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
4143{
4144 struct sde_encoder_virt *sde_enc;
4145 struct sde_encoder_phys *phys;
4146 int i;
4147
4148 if (!drm_enc) {
4149 SDE_ERROR("invalid encoder\n");
4150 return;
4151 }
4152 sde_enc = to_sde_encoder_virt(drm_enc);
4153
4154 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4155 phys = sde_enc->phys_encs[i];
4156 if (phys && phys->ops.prepare_commit)
4157 phys->ops.prepare_commit(phys);
4158 }
4159}
4160
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004161#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004162static int _sde_encoder_status_show(struct seq_file *s, void *data)
4163{
4164 struct sde_encoder_virt *sde_enc;
4165 int i;
4166
4167 if (!s || !s->private)
4168 return -EINVAL;
4169
4170 sde_enc = s->private;
4171
4172 mutex_lock(&sde_enc->enc_lock);
4173 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4174 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4175
4176 if (!phys)
4177 continue;
4178
4179 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
4180 phys->intf_idx - INTF_0,
4181 atomic_read(&phys->vsync_cnt),
4182 atomic_read(&phys->underrun_cnt));
4183
4184 switch (phys->intf_mode) {
4185 case INTF_MODE_VIDEO:
4186 seq_puts(s, "mode: video\n");
4187 break;
4188 case INTF_MODE_CMD:
4189 seq_puts(s, "mode: command\n");
4190 break;
4191 case INTF_MODE_WB_BLOCK:
4192 seq_puts(s, "mode: wb block\n");
4193 break;
4194 case INTF_MODE_WB_LINE:
4195 seq_puts(s, "mode: wb line\n");
4196 break;
4197 default:
4198 seq_puts(s, "mode: ???\n");
4199 break;
4200 }
4201 }
4202 mutex_unlock(&sde_enc->enc_lock);
4203
4204 return 0;
4205}
4206
4207static int _sde_encoder_debugfs_status_open(struct inode *inode,
4208 struct file *file)
4209{
4210 return single_open(file, _sde_encoder_status_show, inode->i_private);
4211}
4212
Dhaval Patelf9245d62017-03-28 16:24:00 -07004213static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304214 const char __user *user_buf, size_t count, loff_t *ppos)
4215{
4216 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004217 int i = 0, rc;
4218 char buf[MISR_BUFF_SIZE + 1];
4219 size_t buff_copy;
4220 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304221
Dhaval Patelf9245d62017-03-28 16:24:00 -07004222 if (!file || !file->private_data)
4223 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304224
Dhaval Patelf9245d62017-03-28 16:24:00 -07004225 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304226
Dhaval Patelf9245d62017-03-28 16:24:00 -07004227 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
4228 if (copy_from_user(buf, user_buf, buff_copy))
4229 return -EINVAL;
4230
4231 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304232
4233 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07004234 return -EINVAL;
4235
4236 rc = _sde_encoder_power_enable(sde_enc, true);
4237 if (rc)
4238 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304239
4240 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004241 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07004242 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304243 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4244 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4245
Dhaval Patelf9245d62017-03-28 16:24:00 -07004246 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304247 continue;
4248
Dhaval Patelf9245d62017-03-28 16:24:00 -07004249 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304250 }
4251 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004252 _sde_encoder_power_enable(sde_enc, false);
4253
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304254 return count;
4255}
4256
Dhaval Patelf9245d62017-03-28 16:24:00 -07004257static ssize_t _sde_encoder_misr_read(struct file *file,
4258 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304259{
4260 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004261 int i = 0, len = 0;
4262 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
4263 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304264
4265 if (*ppos)
4266 return 0;
4267
Dhaval Patelf9245d62017-03-28 16:24:00 -07004268 if (!file || !file->private_data)
4269 return -EINVAL;
4270
4271 sde_enc = file->private_data;
4272
4273 rc = _sde_encoder_power_enable(sde_enc, true);
4274 if (rc)
4275 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304276
4277 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004278 if (!sde_enc->misr_enable) {
4279 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4280 "disabled\n");
4281 goto buff_check;
4282 } else if (sde_enc->disp_info.capabilities &
4283 ~MSM_DISPLAY_CAP_VID_MODE) {
4284 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4285 "unsupported\n");
4286 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304287 }
4288
Dhaval Patelf9245d62017-03-28 16:24:00 -07004289 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4290 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004291
Dhaval Patelf9245d62017-03-28 16:24:00 -07004292 if (!phys || !phys->ops.collect_misr)
4293 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304294
Dhaval Patelf9245d62017-03-28 16:24:00 -07004295 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4296 "Intf idx:%d\n", phys->intf_idx - INTF_0);
4297 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
4298 phys->ops.collect_misr(phys));
4299 }
4300
4301buff_check:
4302 if (count <= len) {
4303 len = 0;
4304 goto end;
4305 }
4306
4307 if (copy_to_user(user_buff, buf, len)) {
4308 len = -EFAULT;
4309 goto end;
4310 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304311
4312 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304313
Dhaval Patelf9245d62017-03-28 16:24:00 -07004314end:
4315 mutex_unlock(&sde_enc->enc_lock);
4316 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304317 return len;
4318}
4319
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004320static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004321{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004322 struct sde_encoder_virt *sde_enc;
4323 struct msm_drm_private *priv;
4324 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07004325 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004326
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004327 static const struct file_operations debugfs_status_fops = {
4328 .open = _sde_encoder_debugfs_status_open,
4329 .read = seq_read,
4330 .llseek = seq_lseek,
4331 .release = single_release,
4332 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304333
4334 static const struct file_operations debugfs_misr_fops = {
4335 .open = simple_open,
4336 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004337 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304338 };
4339
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004340 char name[SDE_NAME_SIZE];
4341
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004342 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004343 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004344 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004345 }
4346
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004347 sde_enc = to_sde_encoder_virt(drm_enc);
4348 priv = drm_enc->dev->dev_private;
4349 sde_kms = to_sde_kms(priv->kms);
4350
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004351 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
4352
4353 /* create overall sub-directory for the encoder */
4354 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07004355 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004356 if (!sde_enc->debugfs_root)
4357 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304358
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004359 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004360 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004361 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304362
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004363 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004364 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004365
Alan Kwongf2debb02017-04-05 06:19:29 -07004366 for (i = 0; i < sde_enc->num_phys_encs; i++)
4367 if (sde_enc->phys_encs[i] &&
4368 sde_enc->phys_encs[i]->ops.late_register)
4369 sde_enc->phys_encs[i]->ops.late_register(
4370 sde_enc->phys_encs[i],
4371 sde_enc->debugfs_root);
4372
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004373 return 0;
4374}
4375
4376static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
4377{
4378 struct sde_encoder_virt *sde_enc;
4379
4380 if (!drm_enc)
4381 return;
4382
4383 sde_enc = to_sde_encoder_virt(drm_enc);
4384 debugfs_remove_recursive(sde_enc->debugfs_root);
4385}
4386#else
4387static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
4388{
4389 return 0;
4390}
4391
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004392static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004393{
4394}
4395#endif
4396
4397static int sde_encoder_late_register(struct drm_encoder *encoder)
4398{
4399 return _sde_encoder_init_debugfs(encoder);
4400}
4401
4402static void sde_encoder_early_unregister(struct drm_encoder *encoder)
4403{
4404 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004405}
4406
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004407static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04004408 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004409 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004410 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004411{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004412 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004413
Clarence Ip19af1362016-09-23 14:57:51 -04004414 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004415
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004416 /*
4417 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
4418 * in this function, check up-front.
4419 */
4420 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
4421 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004422 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004423 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004424 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004425 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004426
Clarence Ipa4039322016-07-15 16:23:59 -04004427 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004428 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004429
4430 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004431 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004432 PTR_ERR(enc));
4433 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4434 }
4435
4436 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4437 ++sde_enc->num_phys_encs;
4438 }
4439
Clarence Ipa4039322016-07-15 16:23:59 -04004440 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004441 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004442
4443 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004444 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004445 PTR_ERR(enc));
4446 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4447 }
4448
4449 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4450 ++sde_enc->num_phys_encs;
4451 }
4452
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004453 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004454}
4455
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004456static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
4457 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04004458{
4459 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04004460
Clarence Ip19af1362016-09-23 14:57:51 -04004461 if (!sde_enc) {
4462 SDE_ERROR("invalid encoder\n");
4463 return -EINVAL;
4464 }
4465
4466 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04004467
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004468 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004469 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004470 sde_enc->num_phys_encs);
4471 return -EINVAL;
4472 }
4473
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004474 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004475
4476 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004477 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004478 PTR_ERR(enc));
4479 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4480 }
4481
4482 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4483 ++sde_enc->num_phys_encs;
4484
4485 return 0;
4486}
4487
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004488static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004489 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04004490 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004491 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004492{
4493 int ret = 0;
4494 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004495 enum sde_intf_type intf_type;
4496 struct sde_encoder_virt_ops parent_ops = {
4497 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07004498 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04004499 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004500 };
4501 struct sde_enc_phys_init_params phys_params;
4502
Clarence Ip19af1362016-09-23 14:57:51 -04004503 if (!sde_enc || !sde_kms) {
4504 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
4505 sde_enc != 0, sde_kms != 0);
4506 return -EINVAL;
4507 }
4508
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004509 memset(&phys_params, 0, sizeof(phys_params));
4510 phys_params.sde_kms = sde_kms;
4511 phys_params.parent = &sde_enc->base;
4512 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004513 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +05304514 phys_params.vblank_ctl_lock = &sde_enc->vblank_ctl_lock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004515
Clarence Ip19af1362016-09-23 14:57:51 -04004516 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004517
Clarence Ipa4039322016-07-15 16:23:59 -04004518 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004519 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
4520 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04004521 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004522 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4523 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07004524 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
4525 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4526 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04004527 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
4528 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
4529 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004530 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04004531 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004532 return -EINVAL;
4533 }
4534
Clarence Ip88270a62016-06-26 10:09:34 -04004535 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004536
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004537 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
4538
Clarence Ip19af1362016-09-23 14:57:51 -04004539 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004540
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004541 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
4542 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07004543 sde_enc->idle_pc_enabled = sde_kms->catalog->has_idle_pc;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004544
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004545 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04004546 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004547 /*
4548 * Left-most tile is at index 0, content is controller id
4549 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
4550 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
4551 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004552 u32 controller_id = disp_info->h_tile_instance[i];
4553
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004554 if (disp_info->num_of_h_tiles > 1) {
4555 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004556 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004557 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004558 phys_params.split_role = ENC_ROLE_SLAVE;
4559 } else {
4560 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004561 }
4562
Clarence Ip19af1362016-09-23 14:57:51 -04004563 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004564 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004565
Alan Kwongbb27c092016-07-20 16:41:25 -04004566 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004567 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004568 phys_params.wb_idx = sde_encoder_get_wb(
4569 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04004570 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004571 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004572 SDE_ERROR_ENC(sde_enc,
4573 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004574 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004575 ret = -EINVAL;
4576 }
Alan Kwongbb27c092016-07-20 16:41:25 -04004577 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004578 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004579 phys_params.intf_idx = sde_encoder_get_intf(
4580 sde_kms->catalog, intf_type,
4581 controller_id);
4582 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004583 SDE_ERROR_ENC(sde_enc,
4584 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004585 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004586 ret = -EINVAL;
4587 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004588 }
4589
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004590 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04004591 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004592 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
4593 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004594 else
4595 ret = sde_encoder_virt_add_phys_encs(
4596 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004597 sde_enc,
4598 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004599 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04004600 SDE_ERROR_ENC(sde_enc,
4601 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004602 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004603 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08004604
4605 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4606 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4607
4608 if (phys) {
4609 atomic_set(&phys->vsync_cnt, 0);
4610 atomic_set(&phys->underrun_cnt, 0);
4611 }
4612 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004613 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004614
4615 return ret;
4616}
4617
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004618static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
4619 .mode_set = sde_encoder_virt_mode_set,
4620 .disable = sde_encoder_virt_disable,
4621 .enable = sde_encoder_virt_enable,
4622 .atomic_check = sde_encoder_virt_atomic_check,
4623};
4624
4625static const struct drm_encoder_funcs sde_encoder_funcs = {
4626 .destroy = sde_encoder_destroy,
4627 .late_register = sde_encoder_late_register,
4628 .early_unregister = sde_encoder_early_unregister,
4629};
4630
Clarence Ip3649f8b2016-10-31 09:59:44 -04004631struct drm_encoder *sde_encoder_init(
4632 struct drm_device *dev,
4633 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004634{
4635 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04004636 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004637 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004638 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004639 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08004640 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004641 int ret = 0;
4642
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004643 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
4644 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004645 ret = -ENOMEM;
4646 goto fail;
4647 }
4648
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004649 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004650 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
4651 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004652 if (ret)
4653 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004654
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004655 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004656 spin_lock_init(&sde_enc->enc_spinlock);
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +05304657 mutex_init(&sde_enc->vblank_ctl_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004658 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004659 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004660 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004661
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004662 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
4663 disp_info->is_primary)
4664 setup_timer(&sde_enc->vsync_event_timer,
4665 sde_encoder_vsync_event_handler,
4666 (unsigned long)sde_enc);
4667
Dhaval Patel020f7e122016-11-15 14:39:18 -08004668 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
4669 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004670 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08004671 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07004672 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08004673 PTR_ERR(sde_enc->rsc_client));
4674 sde_enc->rsc_client = NULL;
4675 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004676
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004677 if (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
4678 ret = _sde_encoder_input_handler(sde_enc);
4679 if (ret)
4680 SDE_ERROR(
4681 "input handler registration failed, rc = %d\n", ret);
4682 }
4683
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004684 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04004685 kthread_init_delayed_work(&sde_enc->delayed_off_work,
4686 sde_encoder_off_work);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07004687 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004688
4689 kthread_init_work(&sde_enc->vsync_event_work,
4690 sde_encoder_vsync_event_work_handler);
4691
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004692 kthread_init_work(&sde_enc->input_event_work,
4693 sde_encoder_input_event_work_handler);
4694
Dhaval Patel222023e2018-02-27 12:24:07 -08004695 kthread_init_work(&sde_enc->esd_trigger_work,
4696 sde_encoder_esd_trigger_work_handler);
4697
Dhaval Patel020f7e122016-11-15 14:39:18 -08004698 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
4699
Clarence Ip19af1362016-09-23 14:57:51 -04004700 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004701
4702 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004703
4704fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004705 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004706 if (drm_enc)
4707 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004708
4709 return ERR_PTR(ret);
4710}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004711
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004712int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4713 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004714{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004715 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004716 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004717 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004718
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004719 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004720 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004721 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004722 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004723 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004724 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004725
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004726 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4727 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004728
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004729 switch (event) {
4730 case MSM_ENC_COMMIT_DONE:
4731 fn_wait = phys->ops.wait_for_commit_done;
4732 break;
4733 case MSM_ENC_TX_COMPLETE:
4734 fn_wait = phys->ops.wait_for_tx_complete;
4735 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004736 case MSM_ENC_VBLANK:
4737 fn_wait = phys->ops.wait_for_vblank;
4738 break;
Sandeep Panda11b20d82017-06-19 12:57:27 +05304739 case MSM_ENC_ACTIVE_REGION:
4740 fn_wait = phys->ops.wait_for_active;
4741 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004742 default:
4743 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4744 event);
4745 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004746 };
4747
4748 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004749 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004750 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004751 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004752 if (ret)
4753 return ret;
4754 }
4755 }
4756
4757 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004758}
4759
Alan Kwong67a3f792016-11-01 23:16:53 -04004760enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4761{
4762 struct sde_encoder_virt *sde_enc = NULL;
4763 int i;
4764
4765 if (!encoder) {
4766 SDE_ERROR("invalid encoder\n");
4767 return INTF_MODE_NONE;
4768 }
4769 sde_enc = to_sde_encoder_virt(encoder);
4770
4771 if (sde_enc->cur_master)
4772 return sde_enc->cur_master->intf_mode;
4773
4774 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4775 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4776
4777 if (phys)
4778 return phys->intf_mode;
4779 }
4780
4781 return INTF_MODE_NONE;
4782}
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004783
4784/**
4785 * sde_encoder_update_caps_for_cont_splash - update encoder settings during
4786 * device bootup when cont_splash is enabled
4787 * @drm_enc: Pointer to drm encoder structure
4788 * @Return: true if successful in updating the encoder structure
4789 */
4790int sde_encoder_update_caps_for_cont_splash(struct drm_encoder *encoder)
4791{
4792 struct sde_encoder_virt *sde_enc;
4793 struct msm_drm_private *priv;
4794 struct sde_kms *sde_kms;
4795 struct drm_connector *conn = NULL;
4796 struct sde_connector *sde_conn = NULL;
4797 struct sde_connector_state *sde_conn_state = NULL;
4798 struct drm_display_mode *drm_mode = NULL;
4799 struct sde_rm_hw_iter dsc_iter, pp_iter, ctl_iter;
4800 int ret = 0, i;
4801
4802 if (!encoder) {
4803 SDE_ERROR("invalid drm enc\n");
4804 return -EINVAL;
4805 }
4806
4807 if (!encoder->dev || !encoder->dev->dev_private) {
4808 SDE_ERROR("drm device invalid\n");
4809 return -EINVAL;
4810 }
4811
4812 priv = encoder->dev->dev_private;
4813 if (!priv->kms) {
4814 SDE_ERROR("invalid kms\n");
4815 return -EINVAL;
4816 }
4817
4818 sde_kms = to_sde_kms(priv->kms);
4819 sde_enc = to_sde_encoder_virt(encoder);
4820 if (!priv->num_connectors) {
4821 SDE_ERROR_ENC(sde_enc, "No connectors registered\n");
4822 return -EINVAL;
4823 }
4824 SDE_DEBUG_ENC(sde_enc,
4825 "num of connectors: %d\n", priv->num_connectors);
4826
4827 for (i = 0; i < priv->num_connectors; i++) {
4828 SDE_DEBUG_ENC(sde_enc, "connector id: %d\n",
4829 priv->connectors[i]->base.id);
4830 sde_conn = to_sde_connector(priv->connectors[i]);
4831 if (!sde_conn->encoder) {
4832 SDE_DEBUG_ENC(sde_enc,
4833 "encoder not attached to connector\n");
4834 continue;
4835 }
4836 if (sde_conn->encoder->base.id
4837 == encoder->base.id) {
4838 conn = (priv->connectors[i]);
4839 break;
4840 }
4841 }
4842
4843 if (!conn || !conn->state) {
4844 SDE_ERROR_ENC(sde_enc, "connector not found\n");
4845 return -EINVAL;
4846 }
4847
4848 sde_conn_state = to_sde_connector_state(conn->state);
4849
4850 if (!sde_conn->ops.get_mode_info) {
4851 SDE_ERROR_ENC(sde_enc, "conn: get_mode_info ops not found\n");
4852 return -EINVAL;
4853 }
4854
4855 ret = sde_conn->ops.get_mode_info(&encoder->crtc->state->adjusted_mode,
4856 &sde_conn_state->mode_info,
4857 sde_kms->catalog->max_mixer_width,
4858 sde_conn->display);
4859 if (ret) {
4860 SDE_ERROR_ENC(sde_enc,
4861 "conn: ->get_mode_info failed. ret=%d\n", ret);
4862 return ret;
4863 }
4864
4865 ret = sde_rm_reserve(&sde_kms->rm, encoder, encoder->crtc->state,
4866 conn->state, false);
4867 if (ret) {
4868 SDE_ERROR_ENC(sde_enc,
4869 "failed to reserve hw resources, %d\n", ret);
4870 return ret;
4871 }
4872
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004873 if (sde_conn->encoder) {
4874 conn->state->best_encoder = sde_conn->encoder;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004875 SDE_DEBUG_ENC(sde_enc,
4876 "configured cstate->best_encoder to ID = %d\n",
4877 conn->state->best_encoder->base.id);
4878 } else {
4879 SDE_ERROR_ENC(sde_enc, "No encoder mapped to connector=%d\n",
4880 conn->base.id);
4881 }
4882
4883 SDE_DEBUG_ENC(sde_enc, "connector topology = %llu\n",
4884 sde_connector_get_topology_name(conn));
4885 drm_mode = &encoder->crtc->state->adjusted_mode;
4886 SDE_DEBUG_ENC(sde_enc, "hdisplay = %d, vdisplay = %d\n",
4887 drm_mode->hdisplay, drm_mode->vdisplay);
4888 drm_set_preferred_mode(conn, drm_mode->hdisplay, drm_mode->vdisplay);
4889
4890 if (encoder->bridge) {
4891 SDE_DEBUG_ENC(sde_enc, "Bridge mapped to encoder\n");
4892 /*
4893 * For cont-splash use case, we update the mode
4894 * configurations manually. This will skip the
4895 * usually mode set call when actual frame is
4896 * pushed from framework. The bridge needs to
4897 * be updated with the current drm mode by
4898 * calling the bridge mode set ops.
4899 */
4900 if (encoder->bridge->funcs) {
4901 SDE_DEBUG_ENC(sde_enc, "calling mode_set\n");
4902 encoder->bridge->funcs->mode_set(encoder->bridge,
4903 drm_mode, drm_mode);
4904 }
4905 } else {
4906 SDE_ERROR_ENC(sde_enc, "No bridge attached to encoder\n");
4907 }
4908
4909 sde_rm_init_hw_iter(&pp_iter, encoder->base.id, SDE_HW_BLK_PINGPONG);
4910 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4911 sde_enc->hw_pp[i] = NULL;
4912 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
4913 break;
4914 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
4915 }
4916
4917 sde_rm_init_hw_iter(&dsc_iter, encoder->base.id, SDE_HW_BLK_DSC);
4918 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
4919 sde_enc->hw_dsc[i] = NULL;
4920 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
4921 break;
4922 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
4923 }
4924
4925 sde_rm_init_hw_iter(&ctl_iter, encoder->base.id, SDE_HW_BLK_CTL);
4926 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4927 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4928
4929 phys->hw_ctl = NULL;
4930 if (!sde_rm_get_hw(&sde_kms->rm, &ctl_iter))
4931 break;
4932 phys->hw_ctl = (struct sde_hw_ctl *) ctl_iter.hw;
4933 }
4934
4935 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4936 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4937
4938 if (!phys) {
4939 SDE_ERROR_ENC(sde_enc,
4940 "phys encoders not initialized\n");
4941 return -EINVAL;
4942 }
4943
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08004944 /* update connector for master and slave phys encoders */
4945 phys->connector = conn;
4946 phys->cont_splash_single_flush =
4947 sde_kms->splash_data.single_flush_en;
4948 phys->cont_splash_settings = true;
4949
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004950 phys->hw_pp = sde_enc->hw_pp[i];
4951 if (phys->ops.cont_splash_mode_set)
4952 phys->ops.cont_splash_mode_set(phys, drm_mode);
4953
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08004954 if (phys->ops.is_master && phys->ops.is_master(phys))
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004955 sde_enc->cur_master = phys;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004956 }
4957
4958 return ret;
4959}
Dhaval Patelef58f0b2018-01-22 19:13:52 -08004960
4961int sde_encoder_display_failure_notification(struct drm_encoder *enc)
4962{
Jayant Shekhar00a28e92018-06-04 12:15:23 +05304963 struct msm_drm_thread *event_thread = NULL;
Dhaval Patel222023e2018-02-27 12:24:07 -08004964 struct msm_drm_private *priv = NULL;
4965 struct sde_encoder_virt *sde_enc = NULL;
4966
4967 if (!enc || !enc->dev || !enc->dev->dev_private) {
4968 SDE_ERROR("invalid parameters\n");
4969 return -EINVAL;
4970 }
4971
4972 priv = enc->dev->dev_private;
4973 sde_enc = to_sde_encoder_virt(enc);
4974 if (!sde_enc->crtc || (sde_enc->crtc->index
Jayant Shekhar00a28e92018-06-04 12:15:23 +05304975 >= ARRAY_SIZE(priv->event_thread))) {
Dhaval Patel222023e2018-02-27 12:24:07 -08004976 SDE_DEBUG_ENC(sde_enc,
4977 "invalid cached CRTC: %d or crtc index: %d\n",
4978 sde_enc->crtc == NULL,
4979 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
4980 return -EINVAL;
4981 }
4982
4983 SDE_EVT32_VERBOSE(DRMID(enc));
4984
Jayant Shekhar00a28e92018-06-04 12:15:23 +05304985 event_thread = &priv->event_thread[sde_enc->crtc->index];
4986
4987 kthread_queue_work(&event_thread->worker,
4988 &sde_enc->esd_trigger_work);
4989 kthread_flush_work(&sde_enc->esd_trigger_work);
4990
Dhaval Patelef58f0b2018-01-22 19:13:52 -08004991 /**
4992 * panel may stop generating te signal (vsync) during esd failure. rsc
4993 * hardware may hang without vsync. Avoid rsc hang by generating the
4994 * vsync from watchdog timer instead of panel.
4995 */
4996 _sde_encoder_switch_to_watchdog_vsync(enc);
4997
4998 sde_encoder_wait_for_event(enc, MSM_ENC_TX_COMPLETE);
4999
5000 return 0;
5001}