blob: 81c0d2ccd291fd9c4c9a51eeb0d425b4911cc04f [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070083/**
84 * enum sde_enc_rc_events - events for resource control state machine
85 * @SDE_ENC_RC_EVENT_KICKOFF:
86 * This event happens at NORMAL priority.
87 * Event that signals the start of the transfer. When this event is
88 * received, enable MDP/DSI core clocks and request RSC with CMD state.
89 * Regardless of the previous state, the resource should be in ON state
90 * at the end of this event.
91 * @SDE_ENC_RC_EVENT_FRAME_DONE:
92 * This event happens at INTERRUPT level.
93 * Event signals the end of the data transfer after the PP FRAME_DONE
94 * event. At the end of this event, a delayed work is scheduled to go to
95 * IDLE_PC state after IDLE_TIMEOUT time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040096 * @SDE_ENC_RC_EVENT_PRE_STOP:
97 * This event happens at NORMAL priority.
98 * This event, when received during the ON state, set RSC to IDLE, and
99 * and leave the RC STATE in the PRE_OFF state.
100 * It should be followed by the STOP event as part of encoder disable.
101 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700102 * @SDE_ENC_RC_EVENT_STOP:
103 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400104 * When this event is received, disable all the MDP/DSI core clocks, and
105 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
106 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
107 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
108 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700109 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700110 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700111 * Event signals that there is a seamless mode switch is in prgoress. A
112 * client needs to turn of only irq - leave clocks ON to reduce the mode
113 * switch latency.
114 * @SDE_ENC_RC_EVENT_POST_MODESET:
115 * This event happens at NORMAL priority from a work item.
116 * Event signals that seamless mode switch is complete and resources are
117 * acquired. Clients wants to turn on the irq again and update the rsc
118 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700119 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
120 * This event happens at NORMAL priority from a work item.
121 * Event signals that there were no frame updates for IDLE_TIMEOUT time.
122 * This would disable MDP/DSI core clocks and request RSC with IDLE state
123 * and change the resource state to IDLE.
124 */
125enum sde_enc_rc_events {
126 SDE_ENC_RC_EVENT_KICKOFF = 1,
127 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400128 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700129 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700130 SDE_ENC_RC_EVENT_PRE_MODESET,
131 SDE_ENC_RC_EVENT_POST_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700132 SDE_ENC_RC_EVENT_ENTER_IDLE
133};
134
135/*
136 * enum sde_enc_rc_states - states that the resource control maintains
137 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400138 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700139 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700140 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
142 */
143enum sde_enc_rc_states {
144 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400145 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700146 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700147 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700148 SDE_ENC_RC_STATE_IDLE
149};
150
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400151/**
152 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
153 * encoders. Virtual encoder manages one "logical" display. Physical
154 * encoders manage one intf block, tied to a specific panel/sub-panel.
155 * Virtual encoder defers as much as possible to the physical encoders.
156 * Virtual encoder registers itself with the DRM Framework as the encoder.
157 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400158 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400159 * @bus_scaling_client: Client handle to the bus scaling interface
160 * @num_phys_encs: Actual number of physical encoders contained.
161 * @phys_encs: Container of physical encoders managed.
162 * @cur_master: Pointer to the current master in this mode. Optimization
163 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700164 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500165 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800166 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500167 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
168 * for partial update right-only cases, such as pingpong
169 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400170 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
171 * notification of the VBLANK
172 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400173 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
174 * all CTL paths
175 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700176 * @debugfs_root: Debug file system root file node
177 * @enc_lock: Lock around physical encoder create/destroy and
178 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400179 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
180 * busy processing current command.
181 * Bit0 = phys_encs[0] etc.
182 * @crtc_frame_event_cb: callback handler for frame event
183 * @crtc_frame_event_cb_data: callback handler private data
Alan Kwong628d19e2016-10-31 13:50:13 -0400184 * @frame_done_timeout: frame done timeout in Hz
185 * @frame_done_timer: watchdog timer for frame done event
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400186 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700187 * @rsc_client: rsc client pointer
188 * @rsc_state_init: boolean to indicate rsc config init
189 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700190 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700191 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700192 * @idle_pc_supported: indicate if idle power collaps is supported
193 * @rc_lock: resource control mutex lock to protect
194 * virt encoder over various state changes
195 * @rc_state: resource controller state
196 * @delayed_off_work: delayed worker to schedule disabling of
197 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400198 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700199 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700200 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700201 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400202 * @cur_conn_roi: current connector roi
203 * @prv_conn_roi: previous connector roi to optimize if unchanged
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700204 * @idle_timeout: idle timeout duration in milliseconds
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400205 */
206struct sde_encoder_virt {
207 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400208 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400209 uint32_t bus_scaling_client;
210
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400211 uint32_t display_num_of_h_tiles;
212
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400213 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400214 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
215 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700216 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800217 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400218
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500219 bool intfs_swapped;
220
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400221 void (*crtc_vblank_cb)(void *);
222 void *crtc_vblank_cb_data;
223
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700224 struct dentry *debugfs_root;
225 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400226 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
227 void (*crtc_frame_event_cb)(void *, u32 event);
228 void *crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400229
230 atomic_t frame_done_timeout;
231 struct timer_list frame_done_timer;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400232 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800233
234 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700235 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800236 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700237 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700238 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700239
240 bool idle_pc_supported;
241 struct mutex rc_lock;
242 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400243 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400244 struct kthread_work vsync_event_work;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700245 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700246 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700247
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700248 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400249 struct sde_rect cur_conn_roi;
250 struct sde_rect prv_conn_roi;
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700251
252 u32 idle_timeout;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400253};
254
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400255#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700256
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700257static struct drm_connector_state *_sde_encoder_get_conn_state(
258 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800259{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700260 struct msm_drm_private *priv;
261 struct sde_kms *sde_kms;
262 struct list_head *connector_list;
263 struct drm_connector *conn_iter;
264
265 if (!drm_enc) {
266 SDE_ERROR("invalid argument\n");
267 return NULL;
268 }
269
270 priv = drm_enc->dev->dev_private;
271 sde_kms = to_sde_kms(priv->kms);
272 connector_list = &sde_kms->dev->mode_config.connector_list;
273
274 list_for_each_entry(conn_iter, connector_list, head)
275 if (conn_iter->encoder == drm_enc)
276 return conn_iter->state;
277
278 return NULL;
279}
280
281static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
282 struct msm_mode_info *mode_info)
283{
284 struct drm_connector_state *conn_state;
285
286 if (!drm_enc || !mode_info) {
287 SDE_ERROR("invalid arguments\n");
288 return -EINVAL;
289 }
290
291 conn_state = _sde_encoder_get_conn_state(drm_enc);
292 if (!conn_state) {
293 SDE_ERROR("invalid connector state for the encoder: %d\n",
294 drm_enc->base.id);
295 return -EINVAL;
296 }
297
298 return sde_connector_get_mode_info(conn_state, mode_info);
299}
300
301static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
302{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400303 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700304 struct msm_mode_info mode_info;
305 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400306
307 if (!drm_enc)
308 return false;
309
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700310 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
311 if (rc) {
312 SDE_ERROR("failed to get mode info, enc: %d\n",
313 drm_enc->base.id);
314 return false;
315 }
316
317 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800318
319 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
320}
321
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700322void sde_encoder_set_idle_timeout(struct drm_encoder *drm_enc, u32 idle_timeout)
323{
324 struct sde_encoder_virt *sde_enc;
325
326 if (!drm_enc)
327 return;
328
329 sde_enc = to_sde_encoder_virt(drm_enc);
330 sde_enc->idle_timeout = idle_timeout;
331}
332
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400333bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
334{
335 enum sde_rm_topology_name topology;
336 struct sde_encoder_virt *sde_enc;
337 struct drm_connector *drm_conn;
338
339 if (!drm_enc)
340 return false;
341
342 sde_enc = to_sde_encoder_virt(drm_enc);
343 if (!sde_enc->cur_master)
344 return false;
345
346 drm_conn = sde_enc->cur_master->connector;
347 if (!drm_conn)
348 return false;
349
350 topology = sde_connector_get_topology_name(drm_conn);
351 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
352 return true;
353
354 return false;
355}
356
Dhaval Patelf9245d62017-03-28 16:24:00 -0700357static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
358 bool enable)
359{
360 struct drm_encoder *drm_enc;
361 struct msm_drm_private *priv;
362 struct sde_kms *sde_kms;
363
364 if (!sde_enc) {
365 SDE_ERROR("invalid sde enc\n");
366 return -EINVAL;
367 }
368
369 drm_enc = &sde_enc->base;
370 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
371 SDE_ERROR("drm device invalid\n");
372 return -EINVAL;
373 }
374
375 priv = drm_enc->dev->dev_private;
376 if (!priv->kms) {
377 SDE_ERROR("invalid kms\n");
378 return -EINVAL;
379 }
380
381 sde_kms = to_sde_kms(priv->kms);
382
383 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
384 enable);
385}
386
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500387void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
388 enum sde_intr_idx intr_idx)
389{
390 SDE_EVT32(DRMID(phys_enc->parent),
391 phys_enc->intf_idx - INTF_0,
392 phys_enc->hw_pp->idx - PINGPONG_0,
393 intr_idx);
394 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
395
396 if (phys_enc->parent_ops.handle_frame_done)
397 phys_enc->parent_ops.handle_frame_done(
398 phys_enc->parent, phys_enc,
399 SDE_ENCODER_FRAME_EVENT_ERROR);
400}
401
402int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
403 enum sde_intr_idx intr_idx,
404 struct sde_encoder_wait_info *wait_info)
405{
406 struct sde_encoder_irq *irq;
407 u32 irq_status;
408 int ret;
409
410 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
411 SDE_ERROR("invalid params\n");
412 return -EINVAL;
413 }
414 irq = &phys_enc->irq[intr_idx];
415
416 /* note: do master / slave checking outside */
417
418 /* return EWOULDBLOCK since we know the wait isn't necessary */
419 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
420 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400421 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
422 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500423 return -EWOULDBLOCK;
424 }
425
426 if (irq->irq_idx < 0) {
427 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
428 irq->name, irq->hw_idx);
429 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
430 irq->irq_idx);
431 return 0;
432 }
433
434 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
435 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700436 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
437 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
438 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500439
440 ret = sde_encoder_helper_wait_event_timeout(
441 DRMID(phys_enc->parent),
442 irq->hw_idx,
443 wait_info);
444
445 if (ret <= 0) {
446 irq_status = sde_core_irq_read(phys_enc->sde_kms,
447 irq->irq_idx, true);
448 if (irq_status) {
449 unsigned long flags;
450
Dhaval Patela5f75952017-07-25 11:17:41 -0700451 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
452 irq->hw_idx, irq->irq_idx,
453 phys_enc->hw_pp->idx - PINGPONG_0,
454 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500455 SDE_DEBUG_PHYS(phys_enc,
456 "done but irq %d not triggered\n",
457 irq->irq_idx);
458 local_irq_save(flags);
459 irq->cb.func(phys_enc, irq->irq_idx);
460 local_irq_restore(flags);
461 ret = 0;
462 } else {
463 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700464 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
465 irq->hw_idx, irq->irq_idx,
466 phys_enc->hw_pp->idx - PINGPONG_0,
467 atomic_read(wait_info->atomic_cnt), irq_status,
468 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500469 }
470 } else {
471 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700472 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
473 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
474 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500475 }
476
Dhaval Patela5f75952017-07-25 11:17:41 -0700477 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
478 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
479 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500480
481 return ret;
482}
483
484int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
485 enum sde_intr_idx intr_idx)
486{
487 struct sde_encoder_irq *irq;
488 int ret = 0;
489
490 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
491 SDE_ERROR("invalid params\n");
492 return -EINVAL;
493 }
494 irq = &phys_enc->irq[intr_idx];
495
496 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530497 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500498 "skipping already registered irq %s type %d\n",
499 irq->name, irq->intr_type);
500 return 0;
501 }
502
503 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
504 irq->intr_type, irq->hw_idx);
505 if (irq->irq_idx < 0) {
506 SDE_ERROR_PHYS(phys_enc,
507 "failed to lookup IRQ index for %s type:%d\n",
508 irq->name, irq->intr_type);
509 return -EINVAL;
510 }
511
512 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
513 &irq->cb);
514 if (ret) {
515 SDE_ERROR_PHYS(phys_enc,
516 "failed to register IRQ callback for %s\n",
517 irq->name);
518 irq->irq_idx = -EINVAL;
519 return ret;
520 }
521
522 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
523 if (ret) {
524 SDE_ERROR_PHYS(phys_enc,
525 "enable IRQ for intr:%s failed, irq_idx %d\n",
526 irq->name, irq->irq_idx);
527
528 sde_core_irq_unregister_callback(phys_enc->sde_kms,
529 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400530
531 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
532 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500533 irq->irq_idx = -EINVAL;
534 return ret;
535 }
536
537 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
538 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
539 irq->name, irq->irq_idx);
540
541 return ret;
542}
543
544int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
545 enum sde_intr_idx intr_idx)
546{
547 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400548 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500549
550 if (!phys_enc) {
551 SDE_ERROR("invalid encoder\n");
552 return -EINVAL;
553 }
554 irq = &phys_enc->irq[intr_idx];
555
556 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400557 if (irq->irq_idx < 0) {
558 SDE_ERROR(
559 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
560 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
561 irq->irq_idx);
562 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
563 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500564 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400565 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500566
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400567 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
568 if (ret)
569 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
570 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
571
572 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500573 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400574 if (ret)
575 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
576 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500577
578 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
579 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
580
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400581 irq->irq_idx = -EINVAL;
582
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500583 return 0;
584}
585
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400586void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400587 struct sde_encoder_hw_resources *hw_res,
588 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400589{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400590 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700591 struct msm_mode_info mode_info;
592 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400593
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400594 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400595 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
596 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400597 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400598 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400599
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400600 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400601 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400602
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400603 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400604 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400605 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
606
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400607 for (i = 0; i < sde_enc->num_phys_encs; i++) {
608 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
609
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400610 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400611 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400612 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700613
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700614 /**
615 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
616 * called from atomic_check phase. Use the below API to get mode
617 * information of the temporary conn_state passed.
618 */
619 rc = sde_connector_get_mode_info(conn_state, &mode_info);
620 if (rc) {
621 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
622 return;
623 }
624
625 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700626 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400627}
628
Clarence Ip3649f8b2016-10-31 09:59:44 -0400629void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400630{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400631 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400632 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400633
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400634 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400635 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400636 return;
637 }
638
639 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400640 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400641
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700642 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800643 sde_rsc_client_destroy(sde_enc->rsc_client);
644
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700645 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400646 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
647
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400648 if (phys && phys->ops.destroy) {
649 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400650 --sde_enc->num_phys_encs;
651 sde_enc->phys_encs[i] = NULL;
652 }
653 }
654
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700655 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400656 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400657 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700658 sde_enc->num_phys_encs = 0;
659 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400660
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400661 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700662 mutex_destroy(&sde_enc->enc_lock);
663
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400664 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700665}
666
Clarence Ip8e69ad02016-12-09 09:43:57 -0500667void sde_encoder_helper_split_config(
668 struct sde_encoder_phys *phys_enc,
669 enum sde_intf interface)
670{
671 struct sde_encoder_virt *sde_enc;
672 struct split_pipe_cfg cfg = { 0 };
673 struct sde_hw_mdp *hw_mdptop;
674 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700675 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500676
677 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
678 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
679 return;
680 }
681
682 sde_enc = to_sde_encoder_virt(phys_enc->parent);
683 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700684 disp_info = &sde_enc->disp_info;
685
686 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
687 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500688
689 /**
690 * disable split modes since encoder will be operating in as the only
691 * encoder, either for the entire use case in the case of, for example,
692 * single DSI, or for this frame in the case of left/right only partial
693 * update.
694 */
695 if (phys_enc->split_role == ENC_ROLE_SOLO) {
696 if (hw_mdptop->ops.setup_split_pipe)
697 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
698 if (hw_mdptop->ops.setup_pp_split)
699 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
700 return;
701 }
702
703 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500704 cfg.mode = phys_enc->intf_mode;
705 cfg.intf = interface;
706
707 if (cfg.en && phys_enc->ops.needs_single_flush &&
708 phys_enc->ops.needs_single_flush(phys_enc))
709 cfg.split_flush_en = true;
710
711 topology = sde_connector_get_topology_name(phys_enc->connector);
712 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
713 cfg.pp_split_slave = cfg.intf;
714 else
715 cfg.pp_split_slave = INTF_MAX;
716
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500717 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500718 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
719
720 if (hw_mdptop->ops.setup_split_pipe)
721 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400722 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500723 /*
724 * slave encoder
725 * - determine split index from master index,
726 * assume master is first pp
727 */
728 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
729 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
730 cfg.pp_split_index);
731
732 if (hw_mdptop->ops.setup_pp_split)
733 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
734 }
735}
736
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700737static void _sde_encoder_adjust_mode(struct drm_connector *connector,
738 struct drm_display_mode *adj_mode)
739{
740 struct drm_display_mode *cur_mode;
741
742 if (!connector || !adj_mode)
743 return;
744
745 list_for_each_entry(cur_mode, &connector->modes, head) {
746 if (cur_mode->vdisplay == adj_mode->vdisplay &&
747 cur_mode->hdisplay == adj_mode->hdisplay &&
748 cur_mode->vrefresh == adj_mode->vrefresh) {
749 adj_mode->private = cur_mode->private;
Jeykumar Sankaran69934622017-05-31 18:16:25 -0700750 adj_mode->private_flags |= cur_mode->private_flags;
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700751 }
752 }
753}
754
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400755static int sde_encoder_virt_atomic_check(
756 struct drm_encoder *drm_enc,
757 struct drm_crtc_state *crtc_state,
758 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400759{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400760 struct sde_encoder_virt *sde_enc;
761 struct msm_drm_private *priv;
762 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400763 const struct drm_display_mode *mode;
764 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700765 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700766 struct sde_connector_state *sde_conn_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400767 int i = 0;
768 int ret = 0;
769
Alan Kwongbb27c092016-07-20 16:41:25 -0400770 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400771 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
772 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400773 return -EINVAL;
774 }
775
776 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400777 SDE_DEBUG_ENC(sde_enc, "\n");
778
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400779 priv = drm_enc->dev->dev_private;
780 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400781 mode = &crtc_state->mode;
782 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700783 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700784 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400785 SDE_EVT32(DRMID(drm_enc));
Alan Kwongbb27c092016-07-20 16:41:25 -0400786
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700787 /*
788 * display drivers may populate private fields of the drm display mode
789 * structure while registering possible modes of a connector with DRM.
790 * These private fields are not populated back while DRM invokes
791 * the mode_set callbacks. This module retrieves and populates the
792 * private fields of the given mode.
793 */
794 _sde_encoder_adjust_mode(conn_state->connector, adj_mode);
795
Alan Kwongbb27c092016-07-20 16:41:25 -0400796 /* perform atomic check on the first physical encoder (master) */
797 for (i = 0; i < sde_enc->num_phys_encs; i++) {
798 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
799
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400800 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400801 ret = phys->ops.atomic_check(phys, crtc_state,
802 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400803 else if (phys && phys->ops.mode_fixup)
804 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400805 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400806
807 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400808 SDE_ERROR_ENC(sde_enc,
809 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400810 break;
811 }
812 }
813
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700814
815 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
816 struct msm_display_topology *topology = NULL;
817
818 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700819 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400820 sde_kms->catalog->max_mixer_width,
821 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700822 if (ret) {
823 SDE_ERROR_ENC(sde_enc,
824 "failed to get mode info, rc = %d\n", ret);
825 return ret;
826 }
827
828 /* Reserve dynamic resources, indicating atomic_check phase */
829 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
830 conn_state, true);
831 if (ret) {
832 SDE_ERROR_ENC(sde_enc,
833 "RM failed to reserve resources, rc = %d\n",
834 ret);
835 return ret;
836 }
837
838 /**
839 * Update connector state with the topology selected for the
840 * resource set validated. Reset the topology if we are
841 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700842 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700843 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700844 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700845
846 ret = sde_rm_update_topology(conn_state, topology);
847 if (ret) {
848 SDE_ERROR_ENC(sde_enc,
849 "RM failed to update topology, rc: %d\n", ret);
850 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700851 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700852
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700853 ret = sde_connector_set_info(conn_state->connector, conn_state);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700854 if (ret) {
855 SDE_ERROR_ENC(sde_enc,
856 "connector failed to update info, rc: %d\n",
857 ret);
858 return ret;
859 }
860
861 }
862
863 ret = sde_connector_roi_v1_check_roi(conn_state);
864 if (ret) {
865 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
866 ret);
867 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700868 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400869
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700870 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700871 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400872
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400873 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400874
875 return ret;
876}
877
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800878static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
879 int pic_width, int pic_height)
880{
881 if (!dsc || !pic_width || !pic_height) {
882 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
883 pic_width, pic_height);
884 return -EINVAL;
885 }
886
887 if ((pic_width % dsc->slice_width) ||
888 (pic_height % dsc->slice_height)) {
889 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
890 pic_width, pic_height,
891 dsc->slice_width, dsc->slice_height);
892 return -EINVAL;
893 }
894
895 dsc->pic_width = pic_width;
896 dsc->pic_height = pic_height;
897
898 return 0;
899}
900
901static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
902 int intf_width)
903{
904 int slice_per_pkt, slice_per_intf;
905 int bytes_in_slice, total_bytes_per_intf;
906
907 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
908 (intf_width < dsc->slice_width)) {
909 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
910 intf_width, dsc ? dsc->slice_width : -1);
911 return;
912 }
913
914 slice_per_pkt = dsc->slice_per_pkt;
915 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
916
917 /*
918 * If slice_per_pkt is greater than slice_per_intf then default to 1.
919 * This can happen during partial update.
920 */
921 if (slice_per_pkt > slice_per_intf)
922 slice_per_pkt = 1;
923
924 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
925 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
926
927 dsc->eol_byte_num = total_bytes_per_intf % 3;
928 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
929 dsc->bytes_in_slice = bytes_in_slice;
930 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
931 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
932}
933
934static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
935 int enc_ip_width)
936{
937 int ssm_delay, total_pixels, soft_slice_per_enc;
938
939 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
940
941 /*
942 * minimum number of initial line pixels is a sum of:
943 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
944 * 91 for 10 bpc) * 3
945 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
946 * 3. the initial xmit delay
947 * 4. total pipeline delay through the "lock step" of encoder (47)
948 * 5. 6 additional pixels as the output of the rate buffer is
949 * 48 bits wide
950 */
951 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
952 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
953 if (soft_slice_per_enc > 1)
954 total_pixels += (ssm_delay * 3);
955 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
956 return 0;
957}
958
959static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
960 struct msm_display_dsc_info *dsc)
961{
962 /*
963 * As per the DSC spec, ICH_RESET can be either end of the slice line
964 * or at the end of the slice. HW internally generates ich_reset at
965 * end of the slice line if DSC_MERGE is used or encoder has two
966 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
967 * is not used then it will generate ich_reset at the end of slice.
968 *
969 * Now as per the spec, during one PPS session, position where
970 * ich_reset is generated should not change. Now if full-screen frame
971 * has more than 1 soft slice then HW will automatically generate
972 * ich_reset at the end of slice_line. But for the same panel, if
973 * partial frame is enabled and only 1 encoder is used with 1 slice,
974 * then HW will generate ich_reset at end of the slice. This is a
975 * mismatch. Prevent this by overriding HW's decision.
976 */
977 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
978 (dsc->slice_width == dsc->pic_width);
979}
980
981static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
982 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400983 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800984{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400985 if (!enable) {
986 if (hw_pp->ops.disable_dsc)
987 hw_pp->ops.disable_dsc(hw_pp);
988 return;
989 }
990
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800991 if (hw_dsc->ops.dsc_config)
992 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
993
994 if (hw_dsc->ops.dsc_config_thresh)
995 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
996
997 if (hw_pp->ops.setup_dsc)
998 hw_pp->ops.setup_dsc(hw_pp);
999
1000 if (hw_pp->ops.enable_dsc)
1001 hw_pp->ops.enable_dsc(hw_pp);
1002}
1003
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001004static void _sde_encoder_get_connector_roi(
1005 struct sde_encoder_virt *sde_enc,
1006 struct sde_rect *merged_conn_roi)
1007{
1008 struct drm_connector *drm_conn;
1009 struct sde_connector_state *c_state;
1010
1011 if (!sde_enc || !merged_conn_roi)
1012 return;
1013
1014 drm_conn = sde_enc->phys_encs[0]->connector;
1015
1016 if (!drm_conn || !drm_conn->state)
1017 return;
1018
1019 c_state = to_sde_connector_state(drm_conn->state);
1020 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1021}
1022
Ingrid Gallardo83532222017-06-02 16:48:51 -07001023static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001024{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001025 int this_frame_slices;
1026 int intf_ip_w, enc_ip_w;
1027 int ich_res, dsc_common_mode = 0;
1028
1029 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1030 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1031 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001032 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001033 struct msm_mode_info mode_info;
1034 struct msm_display_dsc_info *dsc = NULL;
1035 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001036
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001037 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001038 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1039 return -EINVAL;
1040 }
1041
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001042 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1043 if (rc) {
1044 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1045 return -EINVAL;
1046 }
1047
1048 dsc = &mode_info.comp_info.dsc_info;
1049
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001050 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001051
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001052 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001053 intf_ip_w = this_frame_slices * dsc->slice_width;
1054 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1055
1056 enc_ip_w = intf_ip_w;
1057 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1058
1059 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1060
1061 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1062 dsc_common_mode = DSC_MODE_VIDEO;
1063
1064 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001065 roi->w, roi->h, dsc_common_mode);
1066 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001067
1068 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001069 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001070
1071 return 0;
1072}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001073
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001074static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1075 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001076{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001077 int this_frame_slices;
1078 int intf_ip_w, enc_ip_w;
1079 int ich_res, dsc_common_mode;
1080
1081 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001082 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1083 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1084 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1085 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001086 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001087 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001088 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001089
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001090 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1091 hw_pp[i] = sde_enc->hw_pp[i];
1092 hw_dsc[i] = sde_enc->hw_dsc[i];
1093
1094 if (!hw_pp[i] || !hw_dsc[i]) {
1095 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1096 return -EINVAL;
1097 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001098 }
1099
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001100 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1101 if (rc) {
1102 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1103 return -EINVAL;
1104 }
1105
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001106 half_panel_partial_update =
1107 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001108
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001109 dsc_common_mode = 0;
1110 if (!half_panel_partial_update)
1111 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001112 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1113 dsc_common_mode |= DSC_MODE_VIDEO;
1114
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001115 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1116 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001117
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001118 /*
1119 * Since both DSC use same pic dimension, set same pic dimension
1120 * to both DSC structures.
1121 */
1122 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1123 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1124
1125 this_frame_slices = roi->w / dsc[0].slice_width;
1126 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1127
1128 if (!half_panel_partial_update)
1129 intf_ip_w /= 2;
1130
1131 /*
1132 * In this topology when both interfaces are active, they have same
1133 * load so intf_ip_w will be same.
1134 */
1135 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1136 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1137
1138 /*
1139 * In this topology, since there is no dsc_merge, uncompressed input
1140 * to encoder and interface is same.
1141 */
1142 enc_ip_w = intf_ip_w;
1143 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1144 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1145
1146 /*
1147 * __is_ich_reset_override_needed should be called only after
1148 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1149 */
1150 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1151 half_panel_partial_update, &dsc[0]);
1152
1153 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1154 roi->w, roi->h, dsc_common_mode);
1155
1156 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1157 bool active = !!((1 << i) & params->affected_displays);
1158
1159 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1160 dsc_common_mode, i, active);
1161 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1162 dsc_common_mode, ich_res, active);
1163 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001164
1165 return 0;
1166}
1167
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001168static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1169 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001170{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001171 int this_frame_slices;
1172 int intf_ip_w, enc_ip_w;
1173 int ich_res, dsc_common_mode;
1174
1175 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001176 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001177 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1178 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001179 struct msm_display_dsc_info *dsc = NULL;
1180 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001181 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001182 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001183
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001184 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1185 hw_pp[i] = sde_enc->hw_pp[i];
1186 hw_dsc[i] = sde_enc->hw_dsc[i];
1187
1188 if (!hw_pp[i] || !hw_dsc[i]) {
1189 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1190 return -EINVAL;
1191 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001192 }
1193
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001194 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1195 if (rc) {
1196 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1197 return -EINVAL;
1198 }
1199
1200 dsc = &mode_info.comp_info.dsc_info;
1201
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001202 half_panel_partial_update =
1203 hweight_long(params->affected_displays) == 1;
1204
1205 dsc_common_mode = 0;
1206 if (!half_panel_partial_update)
1207 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1208 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1209 dsc_common_mode |= DSC_MODE_VIDEO;
1210
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001211 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001212
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001213 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001214 intf_ip_w = this_frame_slices * dsc->slice_width;
1215 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1216
1217 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001218 * dsc merge case: when using 2 encoders for the same stream,
1219 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001220 */
1221 enc_ip_w = intf_ip_w / 2;
1222 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1223
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001224 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1225 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001226
1227 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001228 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001229 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1230 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001231
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001232 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001233 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001234 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1235 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001236
1237 return 0;
1238}
1239
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001240static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1241{
1242 struct sde_encoder_virt *sde_enc;
1243 struct drm_connector *drm_conn;
1244 struct drm_display_mode *adj_mode;
1245 struct sde_rect roi;
1246
1247 if (!drm_enc || !drm_enc->crtc || !drm_enc->crtc->state)
1248 return -EINVAL;
1249 sde_enc = to_sde_encoder_virt(drm_enc);
1250
1251 if (!sde_enc->cur_master)
1252 return -EINVAL;
1253
1254 adj_mode = &sde_enc->base.crtc->state->adjusted_mode;
1255 drm_conn = sde_enc->cur_master->connector;
1256
1257 _sde_encoder_get_connector_roi(sde_enc, &roi);
1258 if (sde_kms_rect_is_null(&roi)) {
1259 roi.w = adj_mode->hdisplay;
1260 roi.h = adj_mode->vdisplay;
1261 }
1262
1263 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1264 sizeof(sde_enc->prv_conn_roi));
1265 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1266
1267 return 0;
1268}
1269
1270static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1271 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001272{
1273 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001274 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001275 int ret = 0;
1276
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001277 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1278 !sde_enc->phys_encs[0]->connector)
1279 return -EINVAL;
1280
1281 drm_conn = sde_enc->phys_encs[0]->connector;
1282
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001283 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001284 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001285 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1286 return -EINVAL;
1287 }
1288
Ingrid Gallardo83532222017-06-02 16:48:51 -07001289 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001290 SDE_EVT32(DRMID(&sde_enc->base));
1291
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001292 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1293 &sde_enc->prv_conn_roi))
1294 return ret;
1295
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001296 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001297 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001298 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1299 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001300 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001301 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001302 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001303 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001304 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001305 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001306 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001307 default:
1308 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1309 topology);
1310 return -EINVAL;
1311 };
1312
1313 return ret;
1314}
1315
Dhaval Patelaab9b522017-07-20 12:38:46 -07001316static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1317 struct msm_display_info *disp_info, bool is_dummy)
1318{
1319 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1320 struct msm_drm_private *priv;
1321 struct sde_kms *sde_kms;
1322 struct sde_hw_mdp *hw_mdptop;
1323 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001324 struct msm_mode_info mode_info;
1325 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001326
1327 if (!sde_enc || !disp_info) {
1328 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1329 sde_enc != NULL, disp_info != NULL);
1330 return;
1331 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1332 SDE_ERROR("invalid num phys enc %d/%d\n",
1333 sde_enc->num_phys_encs,
1334 (int) ARRAY_SIZE(sde_enc->hw_pp));
1335 return;
1336 }
1337
1338 drm_enc = &sde_enc->base;
1339 /* this pointers are checked in virt_enable_helper */
1340 priv = drm_enc->dev->dev_private;
1341
1342 sde_kms = to_sde_kms(priv->kms);
1343 if (!sde_kms) {
1344 SDE_ERROR("invalid sde_kms\n");
1345 return;
1346 }
1347
1348 hw_mdptop = sde_kms->hw_mdp;
1349 if (!hw_mdptop) {
1350 SDE_ERROR("invalid mdptop\n");
1351 return;
1352 }
1353
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001354 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1355 if (rc) {
1356 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001357 return;
1358 }
1359
Dhaval Patelaab9b522017-07-20 12:38:46 -07001360 if (hw_mdptop->ops.setup_vsync_source &&
1361 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1362 for (i = 0; i < sde_enc->num_phys_encs; i++)
1363 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1364
1365 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001366 vsync_cfg.frame_rate = mode_info.frame_rate;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001367 if (is_dummy)
1368 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1369 else if (disp_info->is_te_using_watchdog_timer)
1370 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
1371 else
1372 vsync_cfg.vsync_source = SDE_VSYNC0_SOURCE_GPIO;
1373 vsync_cfg.is_dummy = is_dummy;
1374
1375 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1376 }
1377}
1378
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001379static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1380{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001381 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001382 struct sde_hw_pingpong *hw_pp = NULL;
1383 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001384
1385 if (!sde_enc || !sde_enc->phys_encs[0] ||
1386 !sde_enc->phys_encs[0]->connector) {
1387 SDE_ERROR("invalid params %d %d\n",
1388 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1389 return -EINVAL;
1390 }
1391
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001392 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001393 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1394 hw_pp = sde_enc->hw_pp[i];
1395 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001396
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001397 if (hw_pp && hw_pp->ops.disable_dsc)
1398 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001399
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001400 if (hw_dsc && hw_dsc->ops.dsc_disable)
1401 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001402 }
1403
1404 return ret;
1405}
1406
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001407static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001408 struct drm_encoder *drm_enc,
1409 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001410{
1411 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001412 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001413 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001414 struct sde_rsc_cmd_config *rsc_config;
1415 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001416 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001417 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001418 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1419 int wait_count = 0;
1420 struct drm_crtc *primary_crtc;
1421 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001422 int rc = 0;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001423
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001424 if (!drm_enc || !drm_enc->crtc || !drm_enc->dev) {
1425 SDE_ERROR("invalid arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001426 return -EINVAL;
1427 }
1428
1429 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001430 crtc = drm_enc->crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001431 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001432 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001433
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001434 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001435 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001436 return 0;
1437 }
1438
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001439 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1440 if (rc) {
1441 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1442 return 0;
1443 }
1444
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001445 /**
1446 * only primary command mode panel can request CMD state.
1447 * all other panels/displays can request for VID state including
1448 * secondary command mode panel.
1449 */
1450 rsc_state = enable ?
1451 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) &&
1452 disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1453 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001454 prefill_lines = config ? mode_info.prefill_lines +
1455 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001456
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001457 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001458 if ((rsc_config->fps != mode_info.frame_rate) ||
1459 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001460 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001461 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1462 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1463 rsc_config->fps = mode_info.frame_rate;
1464 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001465 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001466 rsc_config->jitter_numer = mode_info.jitter_numer;
1467 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001468 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001469 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001470
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001471 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001472 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001473 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001474 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001475
1476 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001477 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001478 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001479 } else {
1480 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001481 rsc_state, NULL, crtc->base.id,
1482 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001483 }
1484
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001485 /**
1486 * if RSC performed a state change that requires a VBLANK wait, it will
1487 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1488 *
1489 * if we are the primary display, we will need to enable and wait
1490 * locally since we hold the commit thread
1491 *
1492 * if we are an external display, we must send a signal to the primary
1493 * to enable its VBLANK and wait one, since the RSC hardware is driven
1494 * by the primary panel's VBLANK signals
1495 */
1496 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1497 if (ret) {
1498 SDE_ERROR_ENC(sde_enc,
1499 "sde rsc client update failed ret:%d\n", ret);
1500 return ret;
1501 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1502 return ret;
1503 }
1504
1505 if (crtc->base.id != wait_vblank_crtc_id) {
1506 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1507 if (!primary_crtc) {
1508 SDE_ERROR_ENC(sde_enc,
1509 "failed to find primary crtc id %d\n",
1510 wait_vblank_crtc_id);
1511 return -EINVAL;
1512 }
1513 pipe = drm_crtc_index(primary_crtc);
1514 }
1515
1516 /**
1517 * note: VBLANK is expected to be enabled at this point in
1518 * resource control state machine if on primary CRTC
1519 */
1520 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1521 if (sde_rsc_client_is_state_update_complete(
1522 sde_enc->rsc_client))
1523 break;
1524
1525 if (crtc->base.id == wait_vblank_crtc_id)
1526 ret = sde_encoder_wait_for_event(drm_enc,
1527 MSM_ENC_VBLANK);
1528 else
1529 drm_wait_one_vblank(drm_enc->dev, pipe);
1530
1531 if (ret) {
1532 SDE_ERROR_ENC(sde_enc,
1533 "wait for vblank failed ret:%d\n", ret);
1534 break;
1535 }
1536 }
1537
1538 if (wait_count >= MAX_RSC_WAIT)
1539 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1540 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001541
1542 return ret;
1543}
1544
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001545static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1546{
1547 struct sde_encoder_virt *sde_enc;
1548 int i;
1549
1550 if (!drm_enc) {
1551 SDE_ERROR("invalid encoder\n");
1552 return;
1553 }
1554
1555 sde_enc = to_sde_encoder_virt(drm_enc);
1556
1557 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1558 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1559 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1560
1561 if (phys && phys->ops.irq_control)
1562 phys->ops.irq_control(phys, enable);
1563 }
1564
1565}
1566
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001567/* keep track of the userspace vblank during modeset */
1568static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1569 u32 sw_event)
1570{
1571 struct sde_encoder_virt *sde_enc;
1572 bool enable;
1573 int i;
1574
1575 if (!drm_enc) {
1576 SDE_ERROR("invalid encoder\n");
1577 return;
1578 }
1579
1580 sde_enc = to_sde_encoder_virt(drm_enc);
1581 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1582 sw_event, sde_enc->vblank_enabled);
1583
1584 /* nothing to do if vblank not enabled by userspace */
1585 if (!sde_enc->vblank_enabled)
1586 return;
1587
1588 /* disable vblank on pre_modeset */
1589 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1590 enable = false;
1591 /* enable vblank on post_modeset */
1592 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1593 enable = true;
1594 else
1595 return;
1596
1597 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1598 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1599
1600 if (phys && phys->ops.control_vblank_irq)
1601 phys->ops.control_vblank_irq(phys, enable);
1602 }
1603}
1604
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001605struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1606{
1607 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001608
1609 if (!drm_enc)
1610 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001611 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001612 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001613}
1614
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001615static void _sde_encoder_resource_control_rsc_update(
1616 struct drm_encoder *drm_enc, bool enable)
1617{
1618 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1619 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Dhaval Patelc1e4bfc2017-09-15 14:51:36 -07001620 int i;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001621
1622 if (enable) {
1623 rsc_cfg.inline_rotate_prefill =
1624 sde_crtc_get_inline_prefill(drm_enc->crtc);
1625
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001626 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1627 } else {
1628 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
1629
1630 /**
Dhaval Patelc1e4bfc2017-09-15 14:51:36 -07001631 * disable the vsync source after updating the rsc state. rsc
1632 * state update might have vsync wait and vsync source must be
1633 * disabled after it. It will avoid generating any vsync from
1634 * this point till mode-2 entry. It is SW workaround for
1635 * HW limitation and should not be removed without checking the
1636 * updated design.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001637 */
Dhaval Patelc1e4bfc2017-09-15 14:51:36 -07001638 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1639 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1640
1641 if (phys && phys->ops.prepare_idle_pc)
1642 phys->ops.prepare_idle_pc(phys);
1643 }
1644
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001645 }
1646}
1647
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001648static void _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
1649 bool enable)
1650{
1651 struct msm_drm_private *priv;
1652 struct sde_kms *sde_kms;
1653 struct sde_encoder_virt *sde_enc;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001654
1655 sde_enc = to_sde_encoder_virt(drm_enc);
1656 priv = drm_enc->dev->dev_private;
1657 sde_kms = to_sde_kms(priv->kms);
1658
1659 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1660 SDE_EVT32(DRMID(drm_enc), enable);
1661
1662 if (!sde_enc->cur_master) {
1663 SDE_ERROR("encoder master not set\n");
1664 return;
1665 }
1666
1667 if (enable) {
1668 /* enable SDE core clks */
1669 sde_power_resource_enable(&priv->phandle,
1670 sde_kms->core_client, true);
1671
1672 /* enable DSI clks */
1673 sde_connector_clk_ctrl(sde_enc->cur_master->connector, true);
1674
1675 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001676 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001677
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001678 } else {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001679 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001680 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001681
1682 /* disable DSI clks */
1683 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1684
1685 /* disable SDE core clks */
1686 sde_power_resource_enable(&priv->phandle,
1687 sde_kms->core_client, false);
1688 }
1689
1690}
1691
1692static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1693 u32 sw_event)
1694{
Dhaval Patel99412a52017-07-24 19:16:45 -07001695 bool autorefresh_enabled = false;
Clarence Ip89628132017-07-27 13:33:51 -04001696 unsigned int lp, idle_timeout;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001697 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001698 struct msm_drm_private *priv;
1699 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001700 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001701 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001702
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001703 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
1704 !drm_enc->crtc) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001705 SDE_ERROR("invalid parameters\n");
1706 return -EINVAL;
1707 }
1708 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001709 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001710 is_vid_mode = sde_enc->disp_info.capabilities &
1711 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001712
1713 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
1714 SDE_ERROR("invalid crtc index\n");
1715 return -EINVAL;
1716 }
1717 disp_thread = &priv->disp_thread[drm_enc->crtc->index];
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001718
1719 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001720 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001721 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001722 */
1723 if (!sde_enc->idle_pc_supported &&
1724 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001725 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1726 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1727 sw_event != SDE_ENC_RC_EVENT_STOP &&
1728 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001729 return 0;
1730
1731 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc_supported:%d\n", sw_event,
1732 sde_enc->idle_pc_supported);
Dhaval Patela5f75952017-07-25 11:17:41 -07001733 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001734 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1735
1736 switch (sw_event) {
1737 case SDE_ENC_RC_EVENT_KICKOFF:
1738 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001739 if (kthread_cancel_delayed_work_sync(
1740 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001741 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1742 sw_event);
1743
1744 mutex_lock(&sde_enc->rc_lock);
1745
1746 /* return if the resource control is already in ON state */
1747 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
1748 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
1749 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001750 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1751 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001752 mutex_unlock(&sde_enc->rc_lock);
1753 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001754 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
1755 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
1756 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1757 sw_event, sde_enc->rc_state);
1758 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1759 SDE_EVTLOG_ERROR);
1760 mutex_unlock(&sde_enc->rc_lock);
1761 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001762 }
1763
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001764 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1765 _sde_encoder_irq_control(drm_enc, true);
1766 } else {
1767 /* enable all the clks and resources */
1768 _sde_encoder_resource_control_helper(drm_enc, true);
1769 _sde_encoder_resource_control_rsc_update(drm_enc, true);
1770 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001771
1772 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1773 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
1774 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1775
1776 mutex_unlock(&sde_enc->rc_lock);
1777 break;
1778
1779 case SDE_ENC_RC_EVENT_FRAME_DONE:
1780 /*
1781 * mutex lock is not used as this event happens at interrupt
1782 * context. And locking is not required as, the other events
1783 * like KICKOFF and STOP does a wait-for-idle before executing
1784 * the resource_control
1785 */
1786 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1787 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
1788 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001789 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1790 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001791 return -EINVAL;
1792 }
1793
1794 /*
1795 * schedule off work item only when there are no
1796 * frames pending
1797 */
1798 if (sde_crtc_frame_pending(drm_enc->crtc) > 1) {
1799 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001800 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1801 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001802 return 0;
1803 }
1804
Dhaval Patel99412a52017-07-24 19:16:45 -07001805 /* schedule delayed off work if autorefresh is disabled */
1806 if (sde_enc->cur_master &&
1807 sde_enc->cur_master->ops.is_autorefresh_enabled)
1808 autorefresh_enabled =
1809 sde_enc->cur_master->ops.is_autorefresh_enabled(
1810 sde_enc->cur_master);
1811
Clarence Ip89628132017-07-27 13:33:51 -04001812 /* set idle timeout based on master connector's lp value */
1813 if (sde_enc->cur_master)
1814 lp = sde_connector_get_lp(
1815 sde_enc->cur_master->connector);
1816 else
1817 lp = SDE_MODE_DPMS_ON;
1818
1819 if (lp == SDE_MODE_DPMS_LP2)
1820 idle_timeout = IDLE_SHORT_TIMEOUT;
1821 else
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001822 idle_timeout = sde_enc->idle_timeout;
Clarence Ip89628132017-07-27 13:33:51 -04001823
Sravanthi Kollukuduru247adcf2017-10-11 16:19:23 +05301824 if (!autorefresh_enabled && idle_timeout)
Dhaval Patel99412a52017-07-24 19:16:45 -07001825 kthread_queue_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001826 &disp_thread->worker,
1827 &sde_enc->delayed_off_work,
Clarence Ip89628132017-07-27 13:33:51 -04001828 msecs_to_jiffies(idle_timeout));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001829 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04001830 autorefresh_enabled,
1831 idle_timeout, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001832 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
1833 sw_event);
1834 break;
1835
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001836 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001837 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001838 if (kthread_cancel_delayed_work_sync(
1839 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001840 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1841 sw_event);
1842
1843 mutex_lock(&sde_enc->rc_lock);
1844
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001845 if (is_vid_mode &&
1846 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1847 _sde_encoder_irq_control(drm_enc, true);
1848 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001849 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001850 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001851 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1852 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
1853 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001854 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1855 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001856 mutex_unlock(&sde_enc->rc_lock);
1857 return 0;
1858 }
1859
1860 /**
1861 * IRQs are still enabled currently, which allows wait for
1862 * VBLANK which RSC may require to correctly transition to OFF
1863 */
1864 _sde_encoder_resource_control_rsc_update(drm_enc, false);
1865
1866 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1867 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001868 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001869
1870 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
1871
1872 mutex_unlock(&sde_enc->rc_lock);
1873 break;
1874
1875 case SDE_ENC_RC_EVENT_STOP:
Jayant Shekhar12d908f2017-10-10 12:11:48 +05301876 /* cancel vsync event work */
1877 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001878
Jayant Shekhar12d908f2017-10-10 12:11:48 +05301879 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001880 /* return if the resource control is already in OFF state */
1881 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
1882 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
1883 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001884 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1885 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001886 mutex_unlock(&sde_enc->rc_lock);
1887 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001888 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
1889 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001890 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1891 sw_event, sde_enc->rc_state);
1892 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1893 SDE_EVTLOG_ERROR);
1894 mutex_unlock(&sde_enc->rc_lock);
1895 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001896 }
1897
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001898 /**
1899 * expect to arrive here only if in either idle state or pre-off
1900 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001901 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001902 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001903 _sde_encoder_resource_control_helper(drm_enc, false);
1904
1905 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001906 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001907
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001908 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
1909
1910 mutex_unlock(&sde_enc->rc_lock);
1911 break;
1912
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001913 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001914 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001915 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001916 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001917 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1918 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001919
1920 mutex_lock(&sde_enc->rc_lock);
1921
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001922 /* return if the resource control is already in ON state */
1923 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1924 /* enable all the clks and resources */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001925 _sde_encoder_resource_control_helper(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001926
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001927 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001928
1929 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1930 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001931 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001932 }
1933
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001934 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1935 if (ret && ret != -EWOULDBLOCK) {
1936 SDE_ERROR_ENC(sde_enc,
1937 "wait for commit done returned %d\n",
1938 ret);
1939 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1940 ret, SDE_EVTLOG_ERROR);
1941 mutex_unlock(&sde_enc->rc_lock);
1942 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001943 }
1944
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001945 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001946 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001947
1948 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1949 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
1950
1951 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
1952 mutex_unlock(&sde_enc->rc_lock);
1953 break;
1954
1955 case SDE_ENC_RC_EVENT_POST_MODESET:
1956 mutex_lock(&sde_enc->rc_lock);
1957
1958 /* return if the resource control is already in ON state */
1959 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
1960 SDE_ERROR_ENC(sde_enc,
1961 "sw_event:%d, rc:%d !MODESET state\n",
1962 sw_event, sde_enc->rc_state);
1963 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1964 SDE_EVTLOG_ERROR);
1965 mutex_unlock(&sde_enc->rc_lock);
1966 return -EINVAL;
1967 }
1968
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001969 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001970 _sde_encoder_irq_control(drm_enc, true);
1971
1972 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
1973
1974 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1975 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
1976
1977 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1978
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001979 mutex_unlock(&sde_enc->rc_lock);
1980 break;
1981
1982 case SDE_ENC_RC_EVENT_ENTER_IDLE:
1983 mutex_lock(&sde_enc->rc_lock);
1984
1985 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001986 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001987 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001988 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1989 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001990 mutex_unlock(&sde_enc->rc_lock);
1991 return 0;
1992 }
1993
1994 /*
1995 * if we are in ON but a frame was just kicked off,
1996 * ignore the IDLE event, it's probably a stale timer event
1997 */
1998 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001999 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002000 "sw_event:%d, rc:%d frame pending\n",
2001 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002002 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2003 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002004 mutex_unlock(&sde_enc->rc_lock);
2005 return 0;
2006 }
2007
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002008 if (is_vid_mode) {
2009 _sde_encoder_irq_control(drm_enc, false);
2010 } else {
2011 /* disable all the clks and resources */
2012 _sde_encoder_resource_control_rsc_update(drm_enc,
2013 false);
2014 _sde_encoder_resource_control_helper(drm_enc, false);
2015 }
2016
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002017 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002018 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002019 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2020
2021 mutex_unlock(&sde_enc->rc_lock);
2022 break;
2023
2024 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002025 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002026 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2027 break;
2028 }
2029
Dhaval Patela5f75952017-07-25 11:17:41 -07002030 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002031 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2032 return 0;
2033}
2034
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002035static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2036 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002037 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002038{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002039 struct sde_encoder_virt *sde_enc;
2040 struct msm_drm_private *priv;
2041 struct sde_kms *sde_kms;
2042 struct list_head *connector_list;
2043 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002044 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002045 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002046 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002047 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002048
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002049 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002050 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002051 return;
2052 }
2053
2054 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002055 SDE_DEBUG_ENC(sde_enc, "\n");
2056
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002057 priv = drm_enc->dev->dev_private;
2058 sde_kms = to_sde_kms(priv->kms);
2059 connector_list = &sde_kms->dev->mode_config.connector_list;
2060
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002061 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002062
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002063 list_for_each_entry(conn_iter, connector_list, head)
2064 if (conn_iter->encoder == drm_enc)
2065 conn = conn_iter;
2066
2067 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002068 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002069 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002070 } else if (!conn->state) {
2071 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2072 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002073 }
2074
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002075 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002076 sde_conn_state = to_sde_connector_state(conn->state);
2077 if (sde_conn && sde_conn_state) {
2078 ret = sde_conn->ops.get_mode_info(adj_mode,
2079 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002080 sde_kms->catalog->max_mixer_width,
2081 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002082 if (ret) {
2083 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002084 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002085 return;
2086 }
2087 }
2088
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002089 /* release resources before seamless mode change */
2090 if (msm_is_mode_seamless_dms(adj_mode)) {
2091 /* restore resource state before releasing them */
2092 ret = sde_encoder_resource_control(drm_enc,
2093 SDE_ENC_RC_EVENT_PRE_MODESET);
2094 if (ret) {
2095 SDE_ERROR_ENC(sde_enc,
2096 "sde resource control failed: %d\n",
2097 ret);
2098 return;
2099 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002100
2101 /*
2102 * Disable dsc before switch the mode and after pre_modeset,
2103 * to guarantee that previous kickoff finished.
2104 */
2105 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002106 }
2107
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002108 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2109 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2110 conn->state, false);
2111 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002112 SDE_ERROR_ENC(sde_enc,
2113 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002114 return;
2115 }
2116
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002117 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2118 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2119 sde_enc->hw_pp[i] = NULL;
2120 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2121 break;
2122 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2123 }
2124
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002125 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2126 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2127 sde_enc->hw_dsc[i] = NULL;
2128 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2129 break;
2130 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2131 }
2132
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002133 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2134 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002135
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002136 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002137 if (!sde_enc->hw_pp[i]) {
2138 SDE_ERROR_ENC(sde_enc,
2139 "invalid pingpong block for the encoder\n");
2140 return;
2141 }
2142 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002143 phys->connector = conn->state->connector;
2144 if (phys->ops.mode_set)
2145 phys->ops.mode_set(phys, mode, adj_mode);
2146 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002147 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002148
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002149 /* update resources after seamless mode change */
2150 if (msm_is_mode_seamless_dms(adj_mode))
2151 sde_encoder_resource_control(&sde_enc->base,
2152 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002153}
2154
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002155static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002156{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002157 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002158 struct msm_drm_private *priv;
2159 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002160
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002161 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2162 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002163 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002164 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002165
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002166 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002167 sde_kms = to_sde_kms(priv->kms);
2168 if (!sde_kms) {
2169 SDE_ERROR("invalid sde_kms\n");
2170 return;
2171 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002172
2173 sde_enc = to_sde_encoder_virt(drm_enc);
2174 if (!sde_enc || !sde_enc->cur_master) {
2175 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002176 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002177 }
2178
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002179 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2180 sde_enc->cur_master->hw_mdptop &&
2181 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2182 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2183 sde_enc->cur_master->hw_mdptop);
2184
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002185 if (sde_enc->cur_master->hw_mdptop &&
2186 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2187 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2188 sde_enc->cur_master->hw_mdptop,
2189 sde_kms->catalog);
2190
Dhaval Patelaab9b522017-07-20 12:38:46 -07002191 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002192
2193 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2194 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002195}
2196
2197void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2198{
2199 struct sde_encoder_virt *sde_enc = NULL;
2200 int i;
2201
2202 if (!drm_enc) {
2203 SDE_ERROR("invalid encoder\n");
2204 return;
2205 }
2206 sde_enc = to_sde_encoder_virt(drm_enc);
2207
2208 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2209 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2210
2211 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2212 phys->ops.restore(phys);
2213 }
2214
2215 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2216 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2217
2218 _sde_encoder_virt_enable_helper(drm_enc);
2219}
2220
2221static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2222{
2223 struct sde_encoder_virt *sde_enc = NULL;
2224 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002225 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002226 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002227 struct msm_mode_info mode_info;
Sandeep Panda318cff12017-10-20 13:16:03 +05302228 struct drm_connector *drm_conn = NULL;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002229
2230 if (!drm_enc) {
2231 SDE_ERROR("invalid encoder\n");
2232 return;
2233 }
2234 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002235
2236 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2237 if (ret) {
2238 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2239 return;
2240 }
2241
2242 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002243 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002244
Clarence Ip19af1362016-09-23 14:57:51 -04002245 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002246 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002247
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002248 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002249 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2250 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2251
2252 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2253 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2254 sde_enc->cur_master = phys;
2255 break;
2256 }
2257 }
2258
2259 if (!sde_enc->cur_master) {
2260 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2261 return;
2262 }
2263
2264 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2265 if (ret) {
2266 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2267 ret);
2268 return;
2269 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002270
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002271 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2272 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002273
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002274 if (!phys)
2275 continue;
2276
2277 phys->comp_type = comp_info->comp_type;
2278 if (phys != sde_enc->cur_master) {
2279 /**
2280 * on DMS request, the encoder will be enabled
2281 * already. Invoke restore to reconfigure the
2282 * new mode.
2283 */
2284 if (msm_is_mode_seamless_dms(cur_mode) &&
2285 phys->ops.restore)
2286 phys->ops.restore(phys);
2287 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002288 phys->ops.enable(phys);
2289 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002290
2291 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2292 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2293 phys->ops.setup_misr(phys, true,
2294 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002295 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002296
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002297 if (msm_is_mode_seamless_dms(cur_mode) &&
2298 sde_enc->cur_master->ops.restore)
2299 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2300 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002301 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002302
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002303 _sde_encoder_virt_enable_helper(drm_enc);
Sandeep Panda318cff12017-10-20 13:16:03 +05302304
2305 /* Enable ESD thread */
2306 drm_conn = sde_enc->cur_master->connector;
2307 sde_connector_schedule_status_work(drm_conn, true);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002308}
2309
2310static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2311{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002312 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002313 struct msm_drm_private *priv;
2314 struct sde_kms *sde_kms;
Sandeep Panda318cff12017-10-20 13:16:03 +05302315 struct drm_connector *drm_conn = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002316 int i = 0;
2317
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002318 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002319 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002320 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002321 } else if (!drm_enc->dev) {
2322 SDE_ERROR("invalid dev\n");
2323 return;
2324 } else if (!drm_enc->dev->dev_private) {
2325 SDE_ERROR("invalid dev_private\n");
2326 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002327 }
2328
2329 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002330 SDE_DEBUG_ENC(sde_enc, "\n");
2331
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002332 priv = drm_enc->dev->dev_private;
2333 sde_kms = to_sde_kms(priv->kms);
2334
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002335 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002336
Sandeep Panda318cff12017-10-20 13:16:03 +05302337 /* Disable ESD thread */
2338 drm_conn = sde_enc->cur_master->connector;
2339 sde_connector_schedule_status_work(drm_conn, false);
2340
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002341 /* wait for idle */
2342 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2343
2344 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_PRE_STOP);
2345
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002346 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2347 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2348
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002349 if (phys && phys->ops.disable)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002350 phys->ops.disable(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002351 }
2352
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002353 /*
2354 * disable dsc after the transfer is complete (for command mode)
2355 * and after physical encoder is disabled, to make sure timing
2356 * engine is already disabled (for video mode).
2357 */
2358 _sde_encoder_dsc_disable(sde_enc);
2359
Lloyd Atkinson03810e32017-03-14 13:38:06 -07002360 /* after phys waits for frame-done, should be no more frames pending */
2361 if (atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
2362 SDE_ERROR("enc%d timeout pending\n", drm_enc->base.id);
2363 del_timer_sync(&sde_enc->frame_done_timer);
2364 }
2365
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002366 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2367
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002368 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2369 if (sde_enc->phys_encs[i])
2370 sde_enc->phys_encs[i]->connector = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002371 }
2372
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002373 sde_enc->cur_master = NULL;
2374
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002375 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002376
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002377 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002378}
2379
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002380static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002381 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002382{
2383 int i = 0;
2384
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002385 for (i = 0; i < catalog->intf_count; i++) {
2386 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002387 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002388 return catalog->intf[i].id;
2389 }
2390 }
2391
2392 return INTF_MAX;
2393}
2394
Alan Kwongbb27c092016-07-20 16:41:25 -04002395static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2396 enum sde_intf_type type, u32 controller_id)
2397{
2398 if (controller_id < catalog->wb_count)
2399 return catalog->wb[controller_id].id;
2400
2401 return WB_MAX;
2402}
2403
Dhaval Patel81e87882016-10-19 21:41:56 -07002404static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2405 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002406{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002407 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002408 unsigned long lock_flags;
2409
Dhaval Patel81e87882016-10-19 21:41:56 -07002410 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002411 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002412
Narendra Muppalla77b32932017-05-10 13:53:11 -07002413 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002414 sde_enc = to_sde_encoder_virt(drm_enc);
2415
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002416 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002417 if (sde_enc->crtc_vblank_cb)
2418 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002419 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002420
2421 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002422 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002423}
2424
2425static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2426 struct sde_encoder_phys *phy_enc)
2427{
2428 if (!phy_enc)
2429 return;
2430
Narendra Muppalla77b32932017-05-10 13:53:11 -07002431 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002432 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002433 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Narendra Muppalla77b32932017-05-10 13:53:11 -07002434 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002435}
2436
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002437void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
2438 void (*vbl_cb)(void *), void *vbl_data)
2439{
2440 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2441 unsigned long lock_flags;
2442 bool enable;
2443 int i;
2444
2445 enable = vbl_cb ? true : false;
2446
Clarence Ip19af1362016-09-23 14:57:51 -04002447 if (!drm_enc) {
2448 SDE_ERROR("invalid encoder\n");
2449 return;
2450 }
2451 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002452 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002453
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002454 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002455 sde_enc->crtc_vblank_cb = vbl_cb;
2456 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002457 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002458
2459 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2460 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2461
2462 if (phys && phys->ops.control_vblank_irq)
2463 phys->ops.control_vblank_irq(phys, enable);
2464 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002465 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002466}
2467
Alan Kwong628d19e2016-10-31 13:50:13 -04002468void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
2469 void (*frame_event_cb)(void *, u32 event),
2470 void *frame_event_cb_data)
2471{
2472 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2473 unsigned long lock_flags;
2474 bool enable;
2475
2476 enable = frame_event_cb ? true : false;
2477
2478 if (!drm_enc) {
2479 SDE_ERROR("invalid encoder\n");
2480 return;
2481 }
2482 SDE_DEBUG_ENC(sde_enc, "\n");
2483 SDE_EVT32(DRMID(drm_enc), enable, 0);
2484
2485 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2486 sde_enc->crtc_frame_event_cb = frame_event_cb;
2487 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
2488 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
2489}
2490
2491static void sde_encoder_frame_done_callback(
2492 struct drm_encoder *drm_enc,
2493 struct sde_encoder_phys *ready_phys, u32 event)
2494{
2495 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2496 unsigned int i;
2497
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002498 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
2499 | SDE_ENCODER_FRAME_EVENT_ERROR
2500 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05002501
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002502 if (!sde_enc->frame_busy_mask[0]) {
2503 /**
2504 * suppress frame_done without waiter,
2505 * likely autorefresh
2506 */
2507 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
2508 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04002509 }
2510
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002511 /* One of the physical encoders has become idle */
2512 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2513 if (sde_enc->phys_encs[i] == ready_phys) {
2514 clear_bit(i, sde_enc->frame_busy_mask);
2515 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
2516 sde_enc->frame_busy_mask[0]);
2517 }
2518 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002519
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002520 if (!sde_enc->frame_busy_mask[0]) {
2521 atomic_set(&sde_enc->frame_done_timeout, 0);
2522 del_timer(&sde_enc->frame_done_timer);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002523
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002524 sde_encoder_resource_control(drm_enc,
2525 SDE_ENC_RC_EVENT_FRAME_DONE);
2526
2527 if (sde_enc->crtc_frame_event_cb)
2528 sde_enc->crtc_frame_event_cb(
2529 sde_enc->crtc_frame_event_cb_data,
2530 event);
2531 }
2532 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04002533 if (sde_enc->crtc_frame_event_cb)
2534 sde_enc->crtc_frame_event_cb(
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002535 sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002536 }
2537}
2538
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002539static void sde_encoder_off_work(struct kthread_work *work)
2540{
2541 struct sde_encoder_virt *sde_enc = container_of(work,
2542 struct sde_encoder_virt, delayed_off_work.work);
2543
2544 if (!sde_enc) {
2545 SDE_ERROR("invalid sde encoder\n");
2546 return;
2547 }
2548
2549 sde_encoder_resource_control(&sde_enc->base,
2550 SDE_ENC_RC_EVENT_ENTER_IDLE);
2551
2552 sde_encoder_frame_done_callback(&sde_enc->base, NULL,
2553 SDE_ENCODER_FRAME_EVENT_IDLE);
2554}
2555
Clarence Ip110d15c2016-08-16 14:44:41 -04002556/**
2557 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
2558 * drm_enc: Pointer to drm encoder structure
2559 * phys: Pointer to physical encoder structure
2560 * extra_flush_bits: Additional bit mask to include in flush trigger
2561 */
2562static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
2563 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
2564{
2565 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002566 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04002567
2568 if (!drm_enc || !phys) {
2569 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
2570 drm_enc != 0, phys != 0);
2571 return;
2572 }
2573
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002574 if (!phys->hw_pp) {
2575 SDE_ERROR("invalid pingpong hw\n");
2576 return;
2577 }
2578
Clarence Ip110d15c2016-08-16 14:44:41 -04002579 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04002580 if (!ctl || !phys->ops.trigger_flush) {
2581 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002582 return;
2583 }
2584
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002585 if (phys->split_role == ENC_ROLE_SKIP) {
2586 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2587 "skip flush pp%d ctl%d\n",
2588 phys->hw_pp->idx - PINGPONG_0,
2589 ctl->idx - CTL_0);
2590 return;
2591 }
2592
Clarence Ip8e69ad02016-12-09 09:43:57 -05002593 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05002594
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002595 if (phys->ops.is_master && phys->ops.is_master(phys))
2596 atomic_inc(&phys->pending_retire_fence_cnt);
2597
Clarence Ip110d15c2016-08-16 14:44:41 -04002598 if (extra_flush_bits && ctl->ops.update_pending_flush)
2599 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
2600
Alan Kwong4212dd42017-09-19 17:22:33 -04002601 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07002602
2603 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04002604 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2605 pending_kickoff_cnt, ctl->idx - CTL_0,
2606 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07002607 else
Clarence Ip569d5af2017-10-14 21:09:01 -04002608 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2609 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04002610}
2611
2612/**
2613 * _sde_encoder_trigger_start - trigger start for a physical encoder
2614 * phys: Pointer to physical encoder structure
2615 */
2616static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
2617{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002618 struct sde_hw_ctl *ctl;
2619
Clarence Ip110d15c2016-08-16 14:44:41 -04002620 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002621 SDE_ERROR("invalid argument(s)\n");
2622 return;
2623 }
2624
2625 if (!phys->hw_pp) {
2626 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002627 return;
2628 }
2629
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002630 ctl = phys->hw_ctl;
2631 if (phys->split_role == ENC_ROLE_SKIP) {
2632 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2633 "skip start pp%d ctl%d\n",
2634 phys->hw_pp->idx - PINGPONG_0,
2635 ctl->idx - CTL_0);
2636 return;
2637 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002638 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
2639 phys->ops.trigger_start(phys);
2640}
2641
Alan Kwong4212dd42017-09-19 17:22:33 -04002642void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
2643{
2644 struct sde_hw_ctl *ctl;
2645
2646 if (!phys_enc) {
2647 SDE_ERROR("invalid encoder\n");
2648 return;
2649 }
2650
2651 ctl = phys_enc->hw_ctl;
2652 if (ctl && ctl->ops.trigger_flush)
2653 ctl->ops.trigger_flush(ctl);
2654}
2655
Clarence Ip110d15c2016-08-16 14:44:41 -04002656void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
2657{
2658 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04002659
2660 if (!phys_enc) {
2661 SDE_ERROR("invalid encoder\n");
2662 return;
2663 }
2664
2665 ctl = phys_enc->hw_ctl;
2666 if (ctl && ctl->ops.trigger_start) {
2667 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04002668 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04002669 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002670}
2671
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302672static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
2673 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002674{
2675 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302676 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
2677 ktime_t cur_ktime;
2678 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002679
2680 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002681 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302682 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
2683 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002684
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302685 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
2686 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002687 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002688 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302689 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
2690
2691 return rc;
2692}
2693
2694int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
2695 struct sde_encoder_wait_info *info)
2696{
2697 int rc;
2698 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
2699
2700 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
2701
2702 /**
2703 * handle disabled irq case where timer irq is also delayed.
2704 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
2705 * if it event_timeout expired late detected.
2706 */
2707 if (atomic_read(info->atomic_cnt) && (!rc) &&
2708 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
2709 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
2710 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
2711 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002712
2713 return rc;
2714}
2715
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002716void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
2717{
2718 struct sde_encoder_virt *sde_enc;
2719 struct sde_connector *sde_con;
2720 void *sde_con_disp;
2721 struct sde_hw_ctl *ctl;
2722 int rc;
2723
2724 if (!phys_enc) {
2725 SDE_ERROR("invalid encoder\n");
2726 return;
2727 }
2728 sde_enc = to_sde_encoder_virt(phys_enc->parent);
2729 ctl = phys_enc->hw_ctl;
2730
2731 if (!ctl || !ctl->ops.reset)
2732 return;
2733
2734 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
2735 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
2736
2737 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
2738 phys_enc->connector) {
2739 sde_con = to_sde_connector(phys_enc->connector);
2740 sde_con_disp = sde_connector_get_display(phys_enc->connector);
2741
2742 if (sde_con->ops.soft_reset) {
2743 rc = sde_con->ops.soft_reset(sde_con_disp);
2744 if (rc) {
2745 SDE_ERROR_ENC(sde_enc,
2746 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07002747 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
2748 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002749 }
2750 }
2751 }
2752
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002753 phys_enc->enable_state = SDE_ENC_ENABLED;
2754}
2755
Clarence Ip110d15c2016-08-16 14:44:41 -04002756/**
2757 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
2758 * Iterate through the physical encoders and perform consolidated flush
2759 * and/or control start triggering as needed. This is done in the virtual
2760 * encoder rather than the individual physical ones in order to handle
2761 * use cases that require visibility into multiple physical encoders at
2762 * a time.
2763 * sde_enc: Pointer to virtual encoder structure
2764 */
2765static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
2766{
2767 struct sde_hw_ctl *ctl;
2768 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002769 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -04002770
2771 if (!sde_enc) {
2772 SDE_ERROR("invalid encoder\n");
2773 return;
2774 }
2775
2776 pending_flush = 0x0;
2777
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07002778 /*
2779 * Trigger LUT DMA flush, this might need a wait, so we need
2780 * to do this outside of the atomic context
2781 */
2782 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2783 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2784 bool wait_for_dma = false;
2785
2786 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
2787 continue;
2788
2789 ctl = phys->hw_ctl;
2790 if (!ctl)
2791 continue;
2792
2793 if (phys->ops.wait_dma_trigger)
2794 wait_for_dma = phys->ops.wait_dma_trigger(phys);
2795
2796 if (phys->hw_ctl->ops.reg_dma_flush)
2797 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
2798 wait_for_dma);
2799 }
2800
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002801 /* update pending counts and trigger kickoff ctl flush atomically */
2802 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2803
Clarence Ip110d15c2016-08-16 14:44:41 -04002804 /* don't perform flush/start operations for slave encoders */
2805 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2806 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002807 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002808
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002809 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
2810 continue;
2811
Clarence Ip110d15c2016-08-16 14:44:41 -04002812 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002813 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04002814 continue;
2815
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002816 if (phys->connector)
2817 topology = sde_connector_get_topology_name(
2818 phys->connector);
2819
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002820 /*
2821 * don't wait on ppsplit slaves or skipped encoders because
2822 * they dont receive irqs
2823 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002824 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002825 phys->split_role == ENC_ROLE_SLAVE) &&
2826 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002827 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07002828
Clarence Ip8e69ad02016-12-09 09:43:57 -05002829 if (!phys->ops.needs_single_flush ||
2830 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04002831 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
2832 else if (ctl->ops.get_pending_flush)
2833 pending_flush |= ctl->ops.get_pending_flush(ctl);
2834 }
2835
2836 /* for split flush, combine pending flush masks and send to master */
2837 if (pending_flush && sde_enc->cur_master) {
2838 _sde_encoder_trigger_flush(
2839 &sde_enc->base,
2840 sde_enc->cur_master,
2841 pending_flush);
2842 }
2843
2844 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002845
2846 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -04002847}
2848
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002849static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
2850 struct drm_encoder *drm_enc,
2851 unsigned long *affected_displays,
2852 int num_active_phys)
2853{
2854 struct sde_encoder_virt *sde_enc;
2855 struct sde_encoder_phys *master;
2856 enum sde_rm_topology_name topology;
2857 bool is_right_only;
2858
2859 if (!drm_enc || !affected_displays)
2860 return;
2861
2862 sde_enc = to_sde_encoder_virt(drm_enc);
2863 master = sde_enc->cur_master;
2864 if (!master || !master->connector)
2865 return;
2866
2867 topology = sde_connector_get_topology_name(master->connector);
2868 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
2869 return;
2870
2871 /*
2872 * For pingpong split, the slave pingpong won't generate IRQs. For
2873 * right-only updates, we can't swap pingpongs, or simply swap the
2874 * master/slave assignment, we actually have to swap the interfaces
2875 * so that the master physical encoder will use a pingpong/interface
2876 * that generates irqs on which to wait.
2877 */
2878 is_right_only = !test_bit(0, affected_displays) &&
2879 test_bit(1, affected_displays);
2880
2881 if (is_right_only && !sde_enc->intfs_swapped) {
2882 /* right-only update swap interfaces */
2883 swap(sde_enc->phys_encs[0]->intf_idx,
2884 sde_enc->phys_encs[1]->intf_idx);
2885 sde_enc->intfs_swapped = true;
2886 } else if (!is_right_only && sde_enc->intfs_swapped) {
2887 /* left-only or full update, swap back */
2888 swap(sde_enc->phys_encs[0]->intf_idx,
2889 sde_enc->phys_encs[1]->intf_idx);
2890 sde_enc->intfs_swapped = false;
2891 }
2892
2893 SDE_DEBUG_ENC(sde_enc,
2894 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
2895 is_right_only, sde_enc->intfs_swapped,
2896 sde_enc->phys_encs[0]->intf_idx - INTF_0,
2897 sde_enc->phys_encs[1]->intf_idx - INTF_0);
2898 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
2899 sde_enc->phys_encs[0]->intf_idx - INTF_0,
2900 sde_enc->phys_encs[1]->intf_idx - INTF_0,
2901 *affected_displays);
2902
2903 /* ppsplit always uses master since ppslave invalid for irqs*/
2904 if (num_active_phys == 1)
2905 *affected_displays = BIT(0);
2906}
2907
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002908static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
2909 struct sde_encoder_kickoff_params *params)
2910{
2911 struct sde_encoder_virt *sde_enc;
2912 struct sde_encoder_phys *phys;
2913 int i, num_active_phys;
2914 bool master_assigned = false;
2915
2916 if (!drm_enc || !params)
2917 return;
2918
2919 sde_enc = to_sde_encoder_virt(drm_enc);
2920
2921 if (sde_enc->num_phys_encs <= 1)
2922 return;
2923
2924 /* count bits set */
2925 num_active_phys = hweight_long(params->affected_displays);
2926
2927 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
2928 params->affected_displays, num_active_phys);
2929
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002930 /* for left/right only update, ppsplit master switches interface */
2931 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
2932 &params->affected_displays, num_active_phys);
2933
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002934 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2935 enum sde_enc_split_role prv_role, new_role;
2936 bool active;
2937
2938 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002939 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002940 continue;
2941
2942 active = test_bit(i, &params->affected_displays);
2943 prv_role = phys->split_role;
2944
2945 if (active && num_active_phys == 1)
2946 new_role = ENC_ROLE_SOLO;
2947 else if (active && !master_assigned)
2948 new_role = ENC_ROLE_MASTER;
2949 else if (active)
2950 new_role = ENC_ROLE_SLAVE;
2951 else
2952 new_role = ENC_ROLE_SKIP;
2953
2954 phys->ops.update_split_role(phys, new_role);
2955 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
2956 sde_enc->cur_master = phys;
2957 master_assigned = true;
2958 }
2959
2960 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
2961 phys->hw_pp->idx - PINGPONG_0, prv_role,
2962 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002963 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
2964 phys->hw_pp->idx - PINGPONG_0, prv_role,
2965 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002966 }
2967}
2968
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05302969bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07002970{
2971 struct sde_encoder_virt *sde_enc;
2972 struct msm_display_info *disp_info;
2973
2974 if (!drm_enc) {
2975 SDE_ERROR("invalid encoder\n");
2976 return false;
2977 }
2978
2979 sde_enc = to_sde_encoder_virt(drm_enc);
2980 disp_info = &sde_enc->disp_info;
2981
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05302982 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07002983}
2984
Dhaval Patel0e558f42017-04-30 00:51:40 -07002985void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
2986{
2987 struct sde_encoder_virt *sde_enc;
2988 struct sde_encoder_phys *phys;
2989 unsigned int i;
2990 struct sde_hw_ctl *ctl;
2991 struct msm_display_info *disp_info;
2992
2993 if (!drm_enc) {
2994 SDE_ERROR("invalid encoder\n");
2995 return;
2996 }
2997 sde_enc = to_sde_encoder_virt(drm_enc);
2998 disp_info = &sde_enc->disp_info;
2999
3000 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3001 phys = sde_enc->phys_encs[i];
3002
3003 if (phys && phys->hw_ctl) {
3004 ctl = phys->hw_ctl;
3005 if (ctl->ops.clear_pending_flush)
3006 ctl->ops.clear_pending_flush(ctl);
3007
3008 /* update only for command mode primary ctl */
3009 if ((phys == sde_enc->cur_master) &&
3010 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3011 && ctl->ops.trigger_pending)
3012 ctl->ops.trigger_pending(ctl);
3013 }
3014 }
3015}
3016
Ping Li8430ee12017-02-24 14:14:44 -08003017static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3018{
3019 void *dither_cfg;
3020 int ret = 0;
3021 size_t len = 0;
3022 enum sde_rm_topology_name topology;
3023
3024 if (!phys || !phys->connector || !phys->hw_pp ||
3025 !phys->hw_pp->ops.setup_dither)
3026 return;
3027 topology = sde_connector_get_topology_name(phys->connector);
3028 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3029 (phys->split_role == ENC_ROLE_SLAVE))
3030 return;
3031
3032 ret = sde_connector_get_dither_cfg(phys->connector,
3033 phys->connector->state, &dither_cfg, &len);
3034 if (!ret)
3035 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
3036}
3037
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003038static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3039 struct drm_display_mode *mode)
3040{
3041 u64 pclk_rate;
3042 u32 pclk_period;
3043 u32 line_time;
3044
3045 /*
3046 * For linetime calculation, only operate on master encoder.
3047 */
3048 if (!sde_enc->cur_master)
3049 return 0;
3050
3051 if (!sde_enc->cur_master->ops.get_line_count) {
3052 SDE_ERROR("get_line_count function not defined\n");
3053 return 0;
3054 }
3055
3056 pclk_rate = mode->clock; /* pixel clock in kHz */
3057 if (pclk_rate == 0) {
3058 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3059 return 0;
3060 }
3061
3062 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3063 if (pclk_period == 0) {
3064 SDE_ERROR("pclk period is 0\n");
3065 return 0;
3066 }
3067
3068 /*
3069 * Line time calculation based on Pixel clock and HTOTAL.
3070 * Final unit is in ns.
3071 */
3072 line_time = (pclk_period * mode->htotal) / 1000;
3073 if (line_time == 0) {
3074 SDE_ERROR("line time calculation is 0\n");
3075 return 0;
3076 }
3077
3078 SDE_DEBUG_ENC(sde_enc,
3079 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3080 pclk_rate, pclk_period, line_time);
3081
3082 return line_time;
3083}
3084
3085static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3086 ktime_t *wakeup_time)
3087{
3088 struct drm_display_mode *mode;
3089 struct sde_encoder_virt *sde_enc;
3090 u32 cur_line;
3091 u32 line_time;
3092 u32 vtotal, time_to_vsync;
3093 ktime_t cur_time;
3094
3095 sde_enc = to_sde_encoder_virt(drm_enc);
3096
3097 if (!drm_enc->crtc || !drm_enc->crtc->state) {
3098 SDE_ERROR("crtc/crtc state object is NULL\n");
3099 return -EINVAL;
3100 }
3101 mode = &drm_enc->crtc->state->adjusted_mode;
3102
3103 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3104 if (!line_time)
3105 return -EINVAL;
3106
3107 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3108
3109 vtotal = mode->vtotal;
3110 if (cur_line >= vtotal)
3111 time_to_vsync = line_time * vtotal;
3112 else
3113 time_to_vsync = line_time * (vtotal - cur_line);
3114
3115 if (time_to_vsync == 0) {
3116 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3117 vtotal);
3118 return -EINVAL;
3119 }
3120
3121 cur_time = ktime_get();
3122 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3123
3124 SDE_DEBUG_ENC(sde_enc,
3125 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3126 cur_line, vtotal, time_to_vsync,
3127 ktime_to_ms(cur_time),
3128 ktime_to_ms(*wakeup_time));
3129 return 0;
3130}
3131
3132static void sde_encoder_vsync_event_handler(unsigned long data)
3133{
3134 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3135 struct sde_encoder_virt *sde_enc;
3136 struct msm_drm_private *priv;
3137 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003138
3139 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
3140 !drm_enc->crtc) {
3141 SDE_ERROR("invalid parameters\n");
3142 return;
3143 }
3144
3145 sde_enc = to_sde_encoder_virt(drm_enc);
3146 priv = drm_enc->dev->dev_private;
3147
3148 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3149 SDE_ERROR("invalid crtc index\n");
3150 return;
3151 }
3152 event_thread = &priv->event_thread[drm_enc->crtc->index];
3153 if (!event_thread) {
3154 SDE_ERROR("event_thread not found for crtc:%d\n",
3155 drm_enc->crtc->index);
3156 return;
3157 }
3158
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303159 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003160 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003161}
3162
3163static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3164{
3165 struct sde_encoder_virt *sde_enc = container_of(work,
3166 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303167 bool autorefresh_enabled = false;
3168 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003169 ktime_t wakeup_time;
3170
3171 if (!sde_enc) {
3172 SDE_ERROR("invalid sde encoder\n");
3173 return;
3174 }
3175
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303176 rc = _sde_encoder_power_enable(sde_enc, true);
3177 if (rc) {
3178 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3179 return;
3180 }
3181
3182 if (sde_enc->cur_master &&
3183 sde_enc->cur_master->ops.is_autorefresh_enabled)
3184 autorefresh_enabled =
3185 sde_enc->cur_master->ops.is_autorefresh_enabled(
3186 sde_enc->cur_master);
3187
3188 _sde_encoder_power_enable(sde_enc, false);
3189
3190 /* Update timer if autorefresh is enabled else return */
3191 if (!autorefresh_enabled)
3192 return;
3193
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003194 if (_sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time))
3195 return;
3196
3197 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3198 mod_timer(&sde_enc->vsync_event_timer,
3199 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
3200}
3201
Clarence Ip85f4f4532017-10-04 12:10:13 -04003202int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08003203 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003204{
3205 struct sde_encoder_virt *sde_enc;
3206 struct sde_encoder_phys *phys;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003207 bool needs_hw_reset = false;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003208 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04003209 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003210
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003211 if (!drm_enc || !params) {
3212 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04003213 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003214 }
3215 sde_enc = to_sde_encoder_virt(drm_enc);
3216
Clarence Ip19af1362016-09-23 14:57:51 -04003217 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003218 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003219
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003220 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003221 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003222 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003223 phys = sde_enc->phys_encs[i];
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003224 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04003225 if (phys->ops.prepare_for_kickoff) {
3226 rc = phys->ops.prepare_for_kickoff(
3227 phys, params);
3228 if (rc)
3229 ret = rc;
3230 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003231 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
3232 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08003233 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003234 }
3235 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003236 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003237
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003238 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
3239
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003240 /* if any phys needs reset, reset all phys, in-order */
3241 if (needs_hw_reset) {
Dhaval Patel0e558f42017-04-30 00:51:40 -07003242 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003243 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3244 phys = sde_enc->phys_encs[i];
3245 if (phys && phys->ops.hw_reset)
3246 phys->ops.hw_reset(phys);
3247 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003248 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003249
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003250 _sde_encoder_update_master(drm_enc, params);
3251
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003252 _sde_encoder_update_roi(drm_enc);
3253
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003254 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
3255 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003256 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003257 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
3258 sde_enc->cur_master->connector->base.id,
3259 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003260 ret = rc;
3261 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003262 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003263
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07003264 if (_sde_encoder_is_dsc_enabled(drm_enc)) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003265 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003266 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003267 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003268 ret = rc;
3269 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003270 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04003271
3272 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04003273}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003274
Clarence Ip662698e2017-09-12 18:34:16 -04003275/**
3276 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
3277 * with the specified encoder, and unstage all pipes from it
3278 * @encoder: encoder pointer
3279 * Returns: 0 on success
3280 */
3281static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
3282{
3283 struct sde_encoder_virt *sde_enc;
3284 struct sde_encoder_phys *phys;
3285 unsigned int i;
3286 int rc = 0;
3287
3288 if (!drm_enc) {
3289 SDE_ERROR("invalid encoder\n");
3290 return -EINVAL;
3291 }
3292
3293 sde_enc = to_sde_encoder_virt(drm_enc);
3294
3295 SDE_ATRACE_BEGIN("encoder_release_lm");
3296 SDE_DEBUG_ENC(sde_enc, "\n");
3297
3298 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3299 phys = sde_enc->phys_encs[i];
3300 if (!phys)
3301 continue;
3302
3303 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
3304
3305 rc = sde_encoder_helper_reset_mixers(phys, NULL);
3306 if (rc)
3307 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
3308 }
3309
3310 SDE_ATRACE_END("encoder_release_lm");
3311 return rc;
3312}
3313
3314void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04003315{
3316 struct sde_encoder_virt *sde_enc;
3317 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003318 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04003319 unsigned int i;
3320
3321 if (!drm_enc) {
3322 SDE_ERROR("invalid encoder\n");
3323 return;
3324 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07003325 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04003326 sde_enc = to_sde_encoder_virt(drm_enc);
3327
3328 SDE_DEBUG_ENC(sde_enc, "\n");
3329
3330 atomic_set(&sde_enc->frame_done_timeout,
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003331 SDE_FRAME_DONE_TIMEOUT * 1000 /
Alan Kwong628d19e2016-10-31 13:50:13 -04003332 drm_enc->crtc->state->adjusted_mode.vrefresh);
3333 mod_timer(&sde_enc->frame_done_timer, jiffies +
3334 ((atomic_read(&sde_enc->frame_done_timeout) * HZ) / 1000));
3335
Clarence Ip662698e2017-09-12 18:34:16 -04003336 /* create a 'no pipes' commit to release buffers on errors */
3337 if (is_error)
3338 _sde_encoder_reset_ctl_hw(drm_enc);
3339
Alan Kwong628d19e2016-10-31 13:50:13 -04003340 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04003341 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003342
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003343 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003344 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003345 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003346 if (phys && phys->ops.handle_post_kickoff)
3347 phys->ops.handle_post_kickoff(phys);
3348 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003349
3350 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
3351 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
3352 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3353 mod_timer(&sde_enc->vsync_event_timer,
3354 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
3355 }
3356
Narendra Muppalla77b32932017-05-10 13:53:11 -07003357 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003358}
3359
Clarence Ip662698e2017-09-12 18:34:16 -04003360int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003361 struct drm_framebuffer *fb)
3362{
3363 struct drm_encoder *drm_enc;
3364 struct sde_hw_mixer_cfg mixer;
3365 struct sde_rm_hw_iter lm_iter;
3366 bool lm_valid = false;
3367
3368 if (!phys_enc || !phys_enc->parent) {
3369 SDE_ERROR("invalid encoder\n");
3370 return -EINVAL;
3371 }
3372
3373 drm_enc = phys_enc->parent;
3374 memset(&mixer, 0, sizeof(mixer));
3375
3376 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003377 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
3378 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
3379
3380 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
3381 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
3382 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
3383
3384 if (!hw_lm)
3385 continue;
3386
3387 /* need to flush LM to remove it */
3388 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
3389 phys_enc->hw_ctl->ops.update_pending_flush)
3390 phys_enc->hw_ctl->ops.update_pending_flush(
3391 phys_enc->hw_ctl,
3392 phys_enc->hw_ctl->ops.get_bitmask_mixer(
3393 phys_enc->hw_ctl, hw_lm->idx));
3394
3395 if (fb) {
3396 /* assume a single LM if targeting a frame buffer */
3397 if (lm_valid)
3398 continue;
3399
3400 mixer.out_height = fb->height;
3401 mixer.out_width = fb->width;
3402
3403 if (hw_lm->ops.setup_mixer_out)
3404 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
3405 }
3406
3407 lm_valid = true;
3408
3409 /* only enable border color on LM */
3410 if (phys_enc->hw_ctl->ops.setup_blendstage)
3411 phys_enc->hw_ctl->ops.setup_blendstage(
Dhaval Patel572cfd22017-06-12 19:33:39 -07003412 phys_enc->hw_ctl, hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003413 }
3414
3415 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04003416 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003417 return -EFAULT;
3418 }
3419 return 0;
3420}
3421
Lloyd Atkinsone123c172017-02-27 13:19:08 -05003422void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
3423{
3424 struct sde_encoder_virt *sde_enc;
3425 struct sde_encoder_phys *phys;
3426 int i;
3427
3428 if (!drm_enc) {
3429 SDE_ERROR("invalid encoder\n");
3430 return;
3431 }
3432 sde_enc = to_sde_encoder_virt(drm_enc);
3433
3434 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3435 phys = sde_enc->phys_encs[i];
3436 if (phys && phys->ops.prepare_commit)
3437 phys->ops.prepare_commit(phys);
3438 }
3439}
3440
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003441#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003442static int _sde_encoder_status_show(struct seq_file *s, void *data)
3443{
3444 struct sde_encoder_virt *sde_enc;
3445 int i;
3446
3447 if (!s || !s->private)
3448 return -EINVAL;
3449
3450 sde_enc = s->private;
3451
3452 mutex_lock(&sde_enc->enc_lock);
3453 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3454 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3455
3456 if (!phys)
3457 continue;
3458
3459 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
3460 phys->intf_idx - INTF_0,
3461 atomic_read(&phys->vsync_cnt),
3462 atomic_read(&phys->underrun_cnt));
3463
3464 switch (phys->intf_mode) {
3465 case INTF_MODE_VIDEO:
3466 seq_puts(s, "mode: video\n");
3467 break;
3468 case INTF_MODE_CMD:
3469 seq_puts(s, "mode: command\n");
3470 break;
3471 case INTF_MODE_WB_BLOCK:
3472 seq_puts(s, "mode: wb block\n");
3473 break;
3474 case INTF_MODE_WB_LINE:
3475 seq_puts(s, "mode: wb line\n");
3476 break;
3477 default:
3478 seq_puts(s, "mode: ???\n");
3479 break;
3480 }
3481 }
3482 mutex_unlock(&sde_enc->enc_lock);
3483
3484 return 0;
3485}
3486
3487static int _sde_encoder_debugfs_status_open(struct inode *inode,
3488 struct file *file)
3489{
3490 return single_open(file, _sde_encoder_status_show, inode->i_private);
3491}
3492
Dhaval Patelf9245d62017-03-28 16:24:00 -07003493static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303494 const char __user *user_buf, size_t count, loff_t *ppos)
3495{
3496 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003497 int i = 0, rc;
3498 char buf[MISR_BUFF_SIZE + 1];
3499 size_t buff_copy;
3500 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303501
Dhaval Patelf9245d62017-03-28 16:24:00 -07003502 if (!file || !file->private_data)
3503 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303504
Dhaval Patelf9245d62017-03-28 16:24:00 -07003505 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303506
Dhaval Patelf9245d62017-03-28 16:24:00 -07003507 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
3508 if (copy_from_user(buf, user_buf, buff_copy))
3509 return -EINVAL;
3510
3511 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303512
3513 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07003514 return -EINVAL;
3515
3516 rc = _sde_encoder_power_enable(sde_enc, true);
3517 if (rc)
3518 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303519
3520 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003521 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07003522 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303523 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3524 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3525
Dhaval Patelf9245d62017-03-28 16:24:00 -07003526 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303527 continue;
3528
Dhaval Patelf9245d62017-03-28 16:24:00 -07003529 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303530 }
3531 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003532 _sde_encoder_power_enable(sde_enc, false);
3533
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303534 return count;
3535}
3536
Dhaval Patelf9245d62017-03-28 16:24:00 -07003537static ssize_t _sde_encoder_misr_read(struct file *file,
3538 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303539{
3540 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003541 int i = 0, len = 0;
3542 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
3543 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303544
3545 if (*ppos)
3546 return 0;
3547
Dhaval Patelf9245d62017-03-28 16:24:00 -07003548 if (!file || !file->private_data)
3549 return -EINVAL;
3550
3551 sde_enc = file->private_data;
3552
3553 rc = _sde_encoder_power_enable(sde_enc, true);
3554 if (rc)
3555 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303556
3557 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003558 if (!sde_enc->misr_enable) {
3559 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3560 "disabled\n");
3561 goto buff_check;
3562 } else if (sde_enc->disp_info.capabilities &
3563 ~MSM_DISPLAY_CAP_VID_MODE) {
3564 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3565 "unsupported\n");
3566 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303567 }
3568
Dhaval Patelf9245d62017-03-28 16:24:00 -07003569 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3570 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3571 if (!phys || !phys->ops.collect_misr)
3572 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303573
Dhaval Patelf9245d62017-03-28 16:24:00 -07003574 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3575 "Intf idx:%d\n", phys->intf_idx - INTF_0);
3576 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
3577 phys->ops.collect_misr(phys));
3578 }
3579
3580buff_check:
3581 if (count <= len) {
3582 len = 0;
3583 goto end;
3584 }
3585
3586 if (copy_to_user(user_buff, buf, len)) {
3587 len = -EFAULT;
3588 goto end;
3589 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303590
3591 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303592
Dhaval Patelf9245d62017-03-28 16:24:00 -07003593end:
3594 mutex_unlock(&sde_enc->enc_lock);
3595 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303596 return len;
3597}
3598
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003599static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003600{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003601 struct sde_encoder_virt *sde_enc;
3602 struct msm_drm_private *priv;
3603 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07003604 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003605
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003606 static const struct file_operations debugfs_status_fops = {
3607 .open = _sde_encoder_debugfs_status_open,
3608 .read = seq_read,
3609 .llseek = seq_lseek,
3610 .release = single_release,
3611 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303612
3613 static const struct file_operations debugfs_misr_fops = {
3614 .open = simple_open,
3615 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003616 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303617 };
3618
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003619 char name[SDE_NAME_SIZE];
3620
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003621 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003622 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003623 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003624 }
3625
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003626 sde_enc = to_sde_encoder_virt(drm_enc);
3627 priv = drm_enc->dev->dev_private;
3628 sde_kms = to_sde_kms(priv->kms);
3629
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003630 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
3631
3632 /* create overall sub-directory for the encoder */
3633 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07003634 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003635 if (!sde_enc->debugfs_root)
3636 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303637
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003638 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003639 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003640 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303641
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003642 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003643 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003644
Alan Kwongf2debb02017-04-05 06:19:29 -07003645 for (i = 0; i < sde_enc->num_phys_encs; i++)
3646 if (sde_enc->phys_encs[i] &&
3647 sde_enc->phys_encs[i]->ops.late_register)
3648 sde_enc->phys_encs[i]->ops.late_register(
3649 sde_enc->phys_encs[i],
3650 sde_enc->debugfs_root);
3651
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003652 return 0;
3653}
3654
3655static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
3656{
3657 struct sde_encoder_virt *sde_enc;
3658
3659 if (!drm_enc)
3660 return;
3661
3662 sde_enc = to_sde_encoder_virt(drm_enc);
3663 debugfs_remove_recursive(sde_enc->debugfs_root);
3664}
3665#else
3666static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
3667{
3668 return 0;
3669}
3670
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003671static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003672{
3673}
3674#endif
3675
3676static int sde_encoder_late_register(struct drm_encoder *encoder)
3677{
3678 return _sde_encoder_init_debugfs(encoder);
3679}
3680
3681static void sde_encoder_early_unregister(struct drm_encoder *encoder)
3682{
3683 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003684}
3685
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003686static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04003687 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003688 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003689 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003690{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003691 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003692
Clarence Ip19af1362016-09-23 14:57:51 -04003693 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003694
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003695 /*
3696 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
3697 * in this function, check up-front.
3698 */
3699 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
3700 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003701 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003702 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003703 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003704 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003705
Clarence Ipa4039322016-07-15 16:23:59 -04003706 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003707 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003708
3709 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003710 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003711 PTR_ERR(enc));
3712 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3713 }
3714
3715 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3716 ++sde_enc->num_phys_encs;
3717 }
3718
Clarence Ipa4039322016-07-15 16:23:59 -04003719 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003720 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003721
3722 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003723 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003724 PTR_ERR(enc));
3725 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3726 }
3727
3728 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3729 ++sde_enc->num_phys_encs;
3730 }
3731
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003732 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003733}
3734
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003735static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
3736 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04003737{
3738 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04003739
Clarence Ip19af1362016-09-23 14:57:51 -04003740 if (!sde_enc) {
3741 SDE_ERROR("invalid encoder\n");
3742 return -EINVAL;
3743 }
3744
3745 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04003746
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003747 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003748 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04003749 sde_enc->num_phys_encs);
3750 return -EINVAL;
3751 }
3752
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003753 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04003754
3755 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003756 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04003757 PTR_ERR(enc));
3758 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3759 }
3760
3761 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3762 ++sde_enc->num_phys_encs;
3763
3764 return 0;
3765}
3766
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003767static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003768 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04003769 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003770 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003771{
3772 int ret = 0;
3773 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003774 enum sde_intf_type intf_type;
3775 struct sde_encoder_virt_ops parent_ops = {
3776 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07003777 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04003778 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003779 };
3780 struct sde_enc_phys_init_params phys_params;
3781
Clarence Ip19af1362016-09-23 14:57:51 -04003782 if (!sde_enc || !sde_kms) {
3783 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
3784 sde_enc != 0, sde_kms != 0);
3785 return -EINVAL;
3786 }
3787
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003788 memset(&phys_params, 0, sizeof(phys_params));
3789 phys_params.sde_kms = sde_kms;
3790 phys_params.parent = &sde_enc->base;
3791 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003792 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003793
Clarence Ip19af1362016-09-23 14:57:51 -04003794 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003795
Clarence Ipa4039322016-07-15 16:23:59 -04003796 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003797 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
3798 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04003799 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003800 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
3801 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07003802 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
3803 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
3804 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04003805 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
3806 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
3807 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003808 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04003809 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003810 return -EINVAL;
3811 }
3812
Clarence Ip88270a62016-06-26 10:09:34 -04003813 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003814
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003815 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
3816
Clarence Ip19af1362016-09-23 14:57:51 -04003817 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003818
Dhaval Patele17e0ee2017-08-23 18:01:42 -07003819 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
3820 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003821 sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
3822
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003823 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04003824 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003825 /*
3826 * Left-most tile is at index 0, content is controller id
3827 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
3828 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
3829 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003830 u32 controller_id = disp_info->h_tile_instance[i];
3831
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003832 if (disp_info->num_of_h_tiles > 1) {
3833 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003834 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003835 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003836 phys_params.split_role = ENC_ROLE_SLAVE;
3837 } else {
3838 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003839 }
3840
Clarence Ip19af1362016-09-23 14:57:51 -04003841 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003842 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003843
Alan Kwongbb27c092016-07-20 16:41:25 -04003844 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003845 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003846 phys_params.wb_idx = sde_encoder_get_wb(
3847 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04003848 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003849 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04003850 SDE_ERROR_ENC(sde_enc,
3851 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003852 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04003853 ret = -EINVAL;
3854 }
Alan Kwongbb27c092016-07-20 16:41:25 -04003855 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003856 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003857 phys_params.intf_idx = sde_encoder_get_intf(
3858 sde_kms->catalog, intf_type,
3859 controller_id);
3860 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04003861 SDE_ERROR_ENC(sde_enc,
3862 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003863 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04003864 ret = -EINVAL;
3865 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003866 }
3867
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003868 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04003869 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003870 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
3871 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04003872 else
3873 ret = sde_encoder_virt_add_phys_encs(
3874 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003875 sde_enc,
3876 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003877 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04003878 SDE_ERROR_ENC(sde_enc,
3879 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003880 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003881 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08003882
3883 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3884 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3885
3886 if (phys) {
3887 atomic_set(&phys->vsync_cnt, 0);
3888 atomic_set(&phys->underrun_cnt, 0);
3889 }
3890 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003891 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003892
3893 return ret;
3894}
3895
Alan Kwong628d19e2016-10-31 13:50:13 -04003896static void sde_encoder_frame_done_timeout(unsigned long data)
3897{
3898 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3899 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3900 struct msm_drm_private *priv;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003901 u32 event;
Alan Kwong628d19e2016-10-31 13:50:13 -04003902
3903 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3904 SDE_ERROR("invalid parameters\n");
3905 return;
3906 }
3907 priv = drm_enc->dev->dev_private;
3908
3909 if (!sde_enc->frame_busy_mask[0] || !sde_enc->crtc_frame_event_cb) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003910 SDE_DEBUG_ENC(sde_enc, "invalid timeout\n");
3911 SDE_EVT32(DRMID(drm_enc), sde_enc->frame_busy_mask[0], 0);
Alan Kwong628d19e2016-10-31 13:50:13 -04003912 return;
3913 } else if (!atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003914 SDE_ERROR_ENC(sde_enc, "invalid timeout\n");
Alan Kwong628d19e2016-10-31 13:50:13 -04003915 SDE_EVT32(DRMID(drm_enc), 0, 1);
3916 return;
3917 }
3918
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003919 SDE_ERROR_ENC(sde_enc, "frame done timeout\n");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003920
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003921 event = SDE_ENCODER_FRAME_EVENT_ERROR;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003922 SDE_EVT32(DRMID(drm_enc), event);
3923 sde_enc->crtc_frame_event_cb(sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04003924}
3925
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003926static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
3927 .mode_set = sde_encoder_virt_mode_set,
3928 .disable = sde_encoder_virt_disable,
3929 .enable = sde_encoder_virt_enable,
3930 .atomic_check = sde_encoder_virt_atomic_check,
3931};
3932
3933static const struct drm_encoder_funcs sde_encoder_funcs = {
3934 .destroy = sde_encoder_destroy,
3935 .late_register = sde_encoder_late_register,
3936 .early_unregister = sde_encoder_early_unregister,
3937};
3938
Clarence Ip3649f8b2016-10-31 09:59:44 -04003939struct drm_encoder *sde_encoder_init(
3940 struct drm_device *dev,
3941 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003942{
3943 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04003944 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003945 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003946 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003947 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08003948 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003949 int ret = 0;
3950
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003951 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
3952 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003953 ret = -ENOMEM;
3954 goto fail;
3955 }
3956
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003957 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003958 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
3959 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003960 if (ret)
3961 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003962
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003963 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003964 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003965 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07003966 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003967 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003968
Alan Kwong628d19e2016-10-31 13:50:13 -04003969 atomic_set(&sde_enc->frame_done_timeout, 0);
3970 setup_timer(&sde_enc->frame_done_timer, sde_encoder_frame_done_timeout,
3971 (unsigned long) sde_enc);
3972
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003973 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
3974 disp_info->is_primary)
3975 setup_timer(&sde_enc->vsync_event_timer,
3976 sde_encoder_vsync_event_handler,
3977 (unsigned long)sde_enc);
3978
Dhaval Patel020f7e122016-11-15 14:39:18 -08003979 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
3980 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003981 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08003982 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07003983 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08003984 PTR_ERR(sde_enc->rsc_client));
3985 sde_enc->rsc_client = NULL;
3986 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003987
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003988 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04003989 kthread_init_delayed_work(&sde_enc->delayed_off_work,
3990 sde_encoder_off_work);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07003991 sde_enc->idle_timeout = IDLE_TIMEOUT;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07003992 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003993
3994 kthread_init_work(&sde_enc->vsync_event_work,
3995 sde_encoder_vsync_event_work_handler);
3996
Dhaval Patel020f7e122016-11-15 14:39:18 -08003997 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
3998
Clarence Ip19af1362016-09-23 14:57:51 -04003999 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004000
4001 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004002
4003fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004004 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004005 if (drm_enc)
4006 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004007
4008 return ERR_PTR(ret);
4009}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004010
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004011int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4012 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004013{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004014 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004015 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004016 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004017
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004018 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004019 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004020 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004021 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004022 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004023 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004024
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004025 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4026 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004027
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004028 switch (event) {
4029 case MSM_ENC_COMMIT_DONE:
4030 fn_wait = phys->ops.wait_for_commit_done;
4031 break;
4032 case MSM_ENC_TX_COMPLETE:
4033 fn_wait = phys->ops.wait_for_tx_complete;
4034 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004035 case MSM_ENC_VBLANK:
4036 fn_wait = phys->ops.wait_for_vblank;
4037 break;
4038 default:
4039 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4040 event);
4041 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004042 };
4043
4044 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004045 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004046 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004047 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004048 if (ret)
4049 return ret;
4050 }
4051 }
4052
4053 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004054}
4055
Alan Kwong67a3f792016-11-01 23:16:53 -04004056enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4057{
4058 struct sde_encoder_virt *sde_enc = NULL;
4059 int i;
4060
4061 if (!encoder) {
4062 SDE_ERROR("invalid encoder\n");
4063 return INTF_MODE_NONE;
4064 }
4065 sde_enc = to_sde_encoder_virt(encoder);
4066
4067 if (sde_enc->cur_master)
4068 return sde_enc->cur_master->intf_mode;
4069
4070 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4071 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4072
4073 if (phys)
4074 return phys->intf_mode;
4075 }
4076
4077 return INTF_MODE_NONE;
4078}