blob: f83949eda182b6d20588353fe5874eb89c7b9852 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070083/**
84 * enum sde_enc_rc_events - events for resource control state machine
85 * @SDE_ENC_RC_EVENT_KICKOFF:
86 * This event happens at NORMAL priority.
87 * Event that signals the start of the transfer. When this event is
88 * received, enable MDP/DSI core clocks and request RSC with CMD state.
89 * Regardless of the previous state, the resource should be in ON state
90 * at the end of this event.
91 * @SDE_ENC_RC_EVENT_FRAME_DONE:
92 * This event happens at INTERRUPT level.
93 * Event signals the end of the data transfer after the PP FRAME_DONE
94 * event. At the end of this event, a delayed work is scheduled to go to
95 * IDLE_PC state after IDLE_TIMEOUT time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040096 * @SDE_ENC_RC_EVENT_PRE_STOP:
97 * This event happens at NORMAL priority.
98 * This event, when received during the ON state, set RSC to IDLE, and
99 * and leave the RC STATE in the PRE_OFF state.
100 * It should be followed by the STOP event as part of encoder disable.
101 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700102 * @SDE_ENC_RC_EVENT_STOP:
103 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400104 * When this event is received, disable all the MDP/DSI core clocks, and
105 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
106 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
107 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
108 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700109 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700110 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700111 * Event signals that there is a seamless mode switch is in prgoress. A
112 * client needs to turn of only irq - leave clocks ON to reduce the mode
113 * switch latency.
114 * @SDE_ENC_RC_EVENT_POST_MODESET:
115 * This event happens at NORMAL priority from a work item.
116 * Event signals that seamless mode switch is complete and resources are
117 * acquired. Clients wants to turn on the irq again and update the rsc
118 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700119 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
120 * This event happens at NORMAL priority from a work item.
121 * Event signals that there were no frame updates for IDLE_TIMEOUT time.
122 * This would disable MDP/DSI core clocks and request RSC with IDLE state
123 * and change the resource state to IDLE.
124 */
125enum sde_enc_rc_events {
126 SDE_ENC_RC_EVENT_KICKOFF = 1,
127 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400128 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700129 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700130 SDE_ENC_RC_EVENT_PRE_MODESET,
131 SDE_ENC_RC_EVENT_POST_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700132 SDE_ENC_RC_EVENT_ENTER_IDLE
133};
134
135/*
136 * enum sde_enc_rc_states - states that the resource control maintains
137 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400138 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700139 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700140 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
142 */
143enum sde_enc_rc_states {
144 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400145 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700146 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700147 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700148 SDE_ENC_RC_STATE_IDLE
149};
150
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400151/**
152 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
153 * encoders. Virtual encoder manages one "logical" display. Physical
154 * encoders manage one intf block, tied to a specific panel/sub-panel.
155 * Virtual encoder defers as much as possible to the physical encoders.
156 * Virtual encoder registers itself with the DRM Framework as the encoder.
157 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400158 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400159 * @bus_scaling_client: Client handle to the bus scaling interface
160 * @num_phys_encs: Actual number of physical encoders contained.
161 * @phys_encs: Container of physical encoders managed.
162 * @cur_master: Pointer to the current master in this mode. Optimization
163 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700164 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500165 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800166 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500167 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
168 * for partial update right-only cases, such as pingpong
169 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400170 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
171 * notification of the VBLANK
172 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400173 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
174 * all CTL paths
175 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700176 * @debugfs_root: Debug file system root file node
177 * @enc_lock: Lock around physical encoder create/destroy and
178 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400179 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
180 * busy processing current command.
181 * Bit0 = phys_encs[0] etc.
182 * @crtc_frame_event_cb: callback handler for frame event
183 * @crtc_frame_event_cb_data: callback handler private data
Alan Kwong628d19e2016-10-31 13:50:13 -0400184 * @frame_done_timeout: frame done timeout in Hz
185 * @frame_done_timer: watchdog timer for frame done event
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400186 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700187 * @rsc_client: rsc client pointer
188 * @rsc_state_init: boolean to indicate rsc config init
189 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700190 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700191 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700192 * @idle_pc_supported: indicate if idle power collaps is supported
193 * @rc_lock: resource control mutex lock to protect
194 * virt encoder over various state changes
195 * @rc_state: resource controller state
196 * @delayed_off_work: delayed worker to schedule disabling of
197 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400198 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700199 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700200 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700201 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400202 * @cur_conn_roi: current connector roi
203 * @prv_conn_roi: previous connector roi to optimize if unchanged
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700204 * @idle_timeout: idle timeout duration in milliseconds
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400205 */
206struct sde_encoder_virt {
207 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400208 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400209 uint32_t bus_scaling_client;
210
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400211 uint32_t display_num_of_h_tiles;
212
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400213 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400214 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
215 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700216 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800217 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400218
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500219 bool intfs_swapped;
220
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400221 void (*crtc_vblank_cb)(void *);
222 void *crtc_vblank_cb_data;
223
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700224 struct dentry *debugfs_root;
225 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400226 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
227 void (*crtc_frame_event_cb)(void *, u32 event);
228 void *crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400229
230 atomic_t frame_done_timeout;
231 struct timer_list frame_done_timer;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400232 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800233
234 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700235 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800236 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700237 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700238 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700239
240 bool idle_pc_supported;
241 struct mutex rc_lock;
242 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400243 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400244 struct kthread_work vsync_event_work;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700245 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700246 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700247
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700248 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400249 struct sde_rect cur_conn_roi;
250 struct sde_rect prv_conn_roi;
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700251
252 u32 idle_timeout;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400253};
254
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400255#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700256
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700257static struct drm_connector_state *_sde_encoder_get_conn_state(
258 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800259{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700260 struct msm_drm_private *priv;
261 struct sde_kms *sde_kms;
262 struct list_head *connector_list;
263 struct drm_connector *conn_iter;
264
265 if (!drm_enc) {
266 SDE_ERROR("invalid argument\n");
267 return NULL;
268 }
269
270 priv = drm_enc->dev->dev_private;
271 sde_kms = to_sde_kms(priv->kms);
272 connector_list = &sde_kms->dev->mode_config.connector_list;
273
274 list_for_each_entry(conn_iter, connector_list, head)
275 if (conn_iter->encoder == drm_enc)
276 return conn_iter->state;
277
278 return NULL;
279}
280
281static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
282 struct msm_mode_info *mode_info)
283{
284 struct drm_connector_state *conn_state;
285
286 if (!drm_enc || !mode_info) {
287 SDE_ERROR("invalid arguments\n");
288 return -EINVAL;
289 }
290
291 conn_state = _sde_encoder_get_conn_state(drm_enc);
292 if (!conn_state) {
293 SDE_ERROR("invalid connector state for the encoder: %d\n",
294 drm_enc->base.id);
295 return -EINVAL;
296 }
297
298 return sde_connector_get_mode_info(conn_state, mode_info);
299}
300
301static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
302{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400303 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700304 struct msm_mode_info mode_info;
305 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400306
307 if (!drm_enc)
308 return false;
309
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700310 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
311 if (rc) {
312 SDE_ERROR("failed to get mode info, enc: %d\n",
313 drm_enc->base.id);
314 return false;
315 }
316
317 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800318
319 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
320}
321
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700322void sde_encoder_set_idle_timeout(struct drm_encoder *drm_enc, u32 idle_timeout)
323{
324 struct sde_encoder_virt *sde_enc;
325
326 if (!drm_enc)
327 return;
328
329 sde_enc = to_sde_encoder_virt(drm_enc);
330 sde_enc->idle_timeout = idle_timeout;
331}
332
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400333bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
334{
335 enum sde_rm_topology_name topology;
336 struct sde_encoder_virt *sde_enc;
337 struct drm_connector *drm_conn;
338
339 if (!drm_enc)
340 return false;
341
342 sde_enc = to_sde_encoder_virt(drm_enc);
343 if (!sde_enc->cur_master)
344 return false;
345
346 drm_conn = sde_enc->cur_master->connector;
347 if (!drm_conn)
348 return false;
349
350 topology = sde_connector_get_topology_name(drm_conn);
351 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
352 return true;
353
354 return false;
355}
356
Dhaval Patelf9245d62017-03-28 16:24:00 -0700357static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
358 bool enable)
359{
360 struct drm_encoder *drm_enc;
361 struct msm_drm_private *priv;
362 struct sde_kms *sde_kms;
363
364 if (!sde_enc) {
365 SDE_ERROR("invalid sde enc\n");
366 return -EINVAL;
367 }
368
369 drm_enc = &sde_enc->base;
370 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
371 SDE_ERROR("drm device invalid\n");
372 return -EINVAL;
373 }
374
375 priv = drm_enc->dev->dev_private;
376 if (!priv->kms) {
377 SDE_ERROR("invalid kms\n");
378 return -EINVAL;
379 }
380
381 sde_kms = to_sde_kms(priv->kms);
382
383 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
384 enable);
385}
386
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500387void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
388 enum sde_intr_idx intr_idx)
389{
390 SDE_EVT32(DRMID(phys_enc->parent),
391 phys_enc->intf_idx - INTF_0,
392 phys_enc->hw_pp->idx - PINGPONG_0,
393 intr_idx);
394 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
395
396 if (phys_enc->parent_ops.handle_frame_done)
397 phys_enc->parent_ops.handle_frame_done(
398 phys_enc->parent, phys_enc,
399 SDE_ENCODER_FRAME_EVENT_ERROR);
400}
401
402int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
403 enum sde_intr_idx intr_idx,
404 struct sde_encoder_wait_info *wait_info)
405{
406 struct sde_encoder_irq *irq;
407 u32 irq_status;
408 int ret;
409
410 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
411 SDE_ERROR("invalid params\n");
412 return -EINVAL;
413 }
414 irq = &phys_enc->irq[intr_idx];
415
416 /* note: do master / slave checking outside */
417
418 /* return EWOULDBLOCK since we know the wait isn't necessary */
419 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
420 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400421 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
422 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500423 return -EWOULDBLOCK;
424 }
425
426 if (irq->irq_idx < 0) {
427 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
428 irq->name, irq->hw_idx);
429 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
430 irq->irq_idx);
431 return 0;
432 }
433
434 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
435 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700436 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
437 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
438 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500439
440 ret = sde_encoder_helper_wait_event_timeout(
441 DRMID(phys_enc->parent),
442 irq->hw_idx,
443 wait_info);
444
445 if (ret <= 0) {
446 irq_status = sde_core_irq_read(phys_enc->sde_kms,
447 irq->irq_idx, true);
448 if (irq_status) {
449 unsigned long flags;
450
Dhaval Patela5f75952017-07-25 11:17:41 -0700451 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
452 irq->hw_idx, irq->irq_idx,
453 phys_enc->hw_pp->idx - PINGPONG_0,
454 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500455 SDE_DEBUG_PHYS(phys_enc,
456 "done but irq %d not triggered\n",
457 irq->irq_idx);
458 local_irq_save(flags);
459 irq->cb.func(phys_enc, irq->irq_idx);
460 local_irq_restore(flags);
461 ret = 0;
462 } else {
463 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700464 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
465 irq->hw_idx, irq->irq_idx,
466 phys_enc->hw_pp->idx - PINGPONG_0,
467 atomic_read(wait_info->atomic_cnt), irq_status,
468 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500469 }
470 } else {
471 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700472 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
473 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
474 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500475 }
476
Dhaval Patela5f75952017-07-25 11:17:41 -0700477 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
478 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
479 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500480
481 return ret;
482}
483
484int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
485 enum sde_intr_idx intr_idx)
486{
487 struct sde_encoder_irq *irq;
488 int ret = 0;
489
490 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
491 SDE_ERROR("invalid params\n");
492 return -EINVAL;
493 }
494 irq = &phys_enc->irq[intr_idx];
495
496 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530497 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500498 "skipping already registered irq %s type %d\n",
499 irq->name, irq->intr_type);
500 return 0;
501 }
502
503 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
504 irq->intr_type, irq->hw_idx);
505 if (irq->irq_idx < 0) {
506 SDE_ERROR_PHYS(phys_enc,
507 "failed to lookup IRQ index for %s type:%d\n",
508 irq->name, irq->intr_type);
509 return -EINVAL;
510 }
511
512 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
513 &irq->cb);
514 if (ret) {
515 SDE_ERROR_PHYS(phys_enc,
516 "failed to register IRQ callback for %s\n",
517 irq->name);
518 irq->irq_idx = -EINVAL;
519 return ret;
520 }
521
522 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
523 if (ret) {
524 SDE_ERROR_PHYS(phys_enc,
525 "enable IRQ for intr:%s failed, irq_idx %d\n",
526 irq->name, irq->irq_idx);
527
528 sde_core_irq_unregister_callback(phys_enc->sde_kms,
529 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400530
531 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
532 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500533 irq->irq_idx = -EINVAL;
534 return ret;
535 }
536
537 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
538 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
539 irq->name, irq->irq_idx);
540
541 return ret;
542}
543
544int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
545 enum sde_intr_idx intr_idx)
546{
547 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400548 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500549
550 if (!phys_enc) {
551 SDE_ERROR("invalid encoder\n");
552 return -EINVAL;
553 }
554 irq = &phys_enc->irq[intr_idx];
555
556 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400557 if (irq->irq_idx < 0) {
558 SDE_ERROR(
559 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
560 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
561 irq->irq_idx);
562 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
563 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500564 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400565 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500566
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400567 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
568 if (ret)
569 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
570 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
571
572 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500573 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400574 if (ret)
575 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
576 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500577
578 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
579 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
580
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400581 irq->irq_idx = -EINVAL;
582
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500583 return 0;
584}
585
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400586void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400587 struct sde_encoder_hw_resources *hw_res,
588 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400589{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400590 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700591 struct msm_mode_info mode_info;
592 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400593
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400594 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400595 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
596 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400597 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400598 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400599
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400600 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400601 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400602
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400603 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400604 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400605 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
606
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400607 for (i = 0; i < sde_enc->num_phys_encs; i++) {
608 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
609
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400610 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400611 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400612 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700613
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700614 /**
615 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
616 * called from atomic_check phase. Use the below API to get mode
617 * information of the temporary conn_state passed.
618 */
619 rc = sde_connector_get_mode_info(conn_state, &mode_info);
620 if (rc) {
621 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
622 return;
623 }
624
625 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700626 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400627}
628
Clarence Ip3649f8b2016-10-31 09:59:44 -0400629void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400630{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400631 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400632 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400633
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400634 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400635 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400636 return;
637 }
638
639 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400640 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400641
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700642 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800643 sde_rsc_client_destroy(sde_enc->rsc_client);
644
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700645 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400646 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
647
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400648 if (phys && phys->ops.destroy) {
649 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400650 --sde_enc->num_phys_encs;
651 sde_enc->phys_encs[i] = NULL;
652 }
653 }
654
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700655 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400656 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400657 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700658 sde_enc->num_phys_encs = 0;
659 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400660
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400661 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700662 mutex_destroy(&sde_enc->enc_lock);
663
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400664 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700665}
666
Clarence Ip8e69ad02016-12-09 09:43:57 -0500667void sde_encoder_helper_split_config(
668 struct sde_encoder_phys *phys_enc,
669 enum sde_intf interface)
670{
671 struct sde_encoder_virt *sde_enc;
672 struct split_pipe_cfg cfg = { 0 };
673 struct sde_hw_mdp *hw_mdptop;
674 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700675 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500676
677 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
678 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
679 return;
680 }
681
682 sde_enc = to_sde_encoder_virt(phys_enc->parent);
683 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700684 disp_info = &sde_enc->disp_info;
685
686 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
687 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500688
689 /**
690 * disable split modes since encoder will be operating in as the only
691 * encoder, either for the entire use case in the case of, for example,
692 * single DSI, or for this frame in the case of left/right only partial
693 * update.
694 */
695 if (phys_enc->split_role == ENC_ROLE_SOLO) {
696 if (hw_mdptop->ops.setup_split_pipe)
697 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
698 if (hw_mdptop->ops.setup_pp_split)
699 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
700 return;
701 }
702
703 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500704 cfg.mode = phys_enc->intf_mode;
705 cfg.intf = interface;
706
707 if (cfg.en && phys_enc->ops.needs_single_flush &&
708 phys_enc->ops.needs_single_flush(phys_enc))
709 cfg.split_flush_en = true;
710
711 topology = sde_connector_get_topology_name(phys_enc->connector);
712 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
713 cfg.pp_split_slave = cfg.intf;
714 else
715 cfg.pp_split_slave = INTF_MAX;
716
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500717 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500718 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
719
720 if (hw_mdptop->ops.setup_split_pipe)
721 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400722 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500723 /*
724 * slave encoder
725 * - determine split index from master index,
726 * assume master is first pp
727 */
728 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
729 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
730 cfg.pp_split_index);
731
732 if (hw_mdptop->ops.setup_pp_split)
733 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
734 }
735}
736
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700737static void _sde_encoder_adjust_mode(struct drm_connector *connector,
738 struct drm_display_mode *adj_mode)
739{
740 struct drm_display_mode *cur_mode;
741
742 if (!connector || !adj_mode)
743 return;
744
745 list_for_each_entry(cur_mode, &connector->modes, head) {
746 if (cur_mode->vdisplay == adj_mode->vdisplay &&
747 cur_mode->hdisplay == adj_mode->hdisplay &&
748 cur_mode->vrefresh == adj_mode->vrefresh) {
749 adj_mode->private = cur_mode->private;
Jeykumar Sankaran69934622017-05-31 18:16:25 -0700750 adj_mode->private_flags |= cur_mode->private_flags;
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700751 }
752 }
753}
754
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400755static int sde_encoder_virt_atomic_check(
756 struct drm_encoder *drm_enc,
757 struct drm_crtc_state *crtc_state,
758 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400759{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400760 struct sde_encoder_virt *sde_enc;
761 struct msm_drm_private *priv;
762 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400763 const struct drm_display_mode *mode;
764 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700765 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700766 struct sde_connector_state *sde_conn_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400767 int i = 0;
768 int ret = 0;
769
Alan Kwongbb27c092016-07-20 16:41:25 -0400770 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400771 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
772 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400773 return -EINVAL;
774 }
775
776 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400777 SDE_DEBUG_ENC(sde_enc, "\n");
778
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400779 priv = drm_enc->dev->dev_private;
780 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400781 mode = &crtc_state->mode;
782 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700783 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700784 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400785 SDE_EVT32(DRMID(drm_enc));
Alan Kwongbb27c092016-07-20 16:41:25 -0400786
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700787 /*
788 * display drivers may populate private fields of the drm display mode
789 * structure while registering possible modes of a connector with DRM.
790 * These private fields are not populated back while DRM invokes
791 * the mode_set callbacks. This module retrieves and populates the
792 * private fields of the given mode.
793 */
794 _sde_encoder_adjust_mode(conn_state->connector, adj_mode);
795
Alan Kwongbb27c092016-07-20 16:41:25 -0400796 /* perform atomic check on the first physical encoder (master) */
797 for (i = 0; i < sde_enc->num_phys_encs; i++) {
798 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
799
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400800 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400801 ret = phys->ops.atomic_check(phys, crtc_state,
802 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400803 else if (phys && phys->ops.mode_fixup)
804 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400805 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400806
807 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400808 SDE_ERROR_ENC(sde_enc,
809 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400810 break;
811 }
812 }
813
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700814
815 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
816 struct msm_display_topology *topology = NULL;
817
818 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700819 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400820 sde_kms->catalog->max_mixer_width,
821 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700822 if (ret) {
823 SDE_ERROR_ENC(sde_enc,
824 "failed to get mode info, rc = %d\n", ret);
825 return ret;
826 }
827
828 /* Reserve dynamic resources, indicating atomic_check phase */
829 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
830 conn_state, true);
831 if (ret) {
832 SDE_ERROR_ENC(sde_enc,
833 "RM failed to reserve resources, rc = %d\n",
834 ret);
835 return ret;
836 }
837
838 /**
839 * Update connector state with the topology selected for the
840 * resource set validated. Reset the topology if we are
841 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700842 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700843 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700844 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700845
846 ret = sde_rm_update_topology(conn_state, topology);
847 if (ret) {
848 SDE_ERROR_ENC(sde_enc,
849 "RM failed to update topology, rc: %d\n", ret);
850 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700851 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700852
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700853 ret = sde_connector_set_info(conn_state->connector, conn_state);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700854 if (ret) {
855 SDE_ERROR_ENC(sde_enc,
856 "connector failed to update info, rc: %d\n",
857 ret);
858 return ret;
859 }
860
861 }
862
863 ret = sde_connector_roi_v1_check_roi(conn_state);
864 if (ret) {
865 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
866 ret);
867 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700868 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400869
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700870 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700871 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400872
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400873 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400874
875 return ret;
876}
877
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800878static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
879 int pic_width, int pic_height)
880{
881 if (!dsc || !pic_width || !pic_height) {
882 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
883 pic_width, pic_height);
884 return -EINVAL;
885 }
886
887 if ((pic_width % dsc->slice_width) ||
888 (pic_height % dsc->slice_height)) {
889 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
890 pic_width, pic_height,
891 dsc->slice_width, dsc->slice_height);
892 return -EINVAL;
893 }
894
895 dsc->pic_width = pic_width;
896 dsc->pic_height = pic_height;
897
898 return 0;
899}
900
901static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
902 int intf_width)
903{
904 int slice_per_pkt, slice_per_intf;
905 int bytes_in_slice, total_bytes_per_intf;
906
907 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
908 (intf_width < dsc->slice_width)) {
909 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
910 intf_width, dsc ? dsc->slice_width : -1);
911 return;
912 }
913
914 slice_per_pkt = dsc->slice_per_pkt;
915 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
916
917 /*
918 * If slice_per_pkt is greater than slice_per_intf then default to 1.
919 * This can happen during partial update.
920 */
921 if (slice_per_pkt > slice_per_intf)
922 slice_per_pkt = 1;
923
924 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
925 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
926
927 dsc->eol_byte_num = total_bytes_per_intf % 3;
928 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
929 dsc->bytes_in_slice = bytes_in_slice;
930 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
931 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
932}
933
934static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
935 int enc_ip_width)
936{
937 int ssm_delay, total_pixels, soft_slice_per_enc;
938
939 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
940
941 /*
942 * minimum number of initial line pixels is a sum of:
943 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
944 * 91 for 10 bpc) * 3
945 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
946 * 3. the initial xmit delay
947 * 4. total pipeline delay through the "lock step" of encoder (47)
948 * 5. 6 additional pixels as the output of the rate buffer is
949 * 48 bits wide
950 */
951 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
952 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
953 if (soft_slice_per_enc > 1)
954 total_pixels += (ssm_delay * 3);
955 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
956 return 0;
957}
958
959static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
960 struct msm_display_dsc_info *dsc)
961{
962 /*
963 * As per the DSC spec, ICH_RESET can be either end of the slice line
964 * or at the end of the slice. HW internally generates ich_reset at
965 * end of the slice line if DSC_MERGE is used or encoder has two
966 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
967 * is not used then it will generate ich_reset at the end of slice.
968 *
969 * Now as per the spec, during one PPS session, position where
970 * ich_reset is generated should not change. Now if full-screen frame
971 * has more than 1 soft slice then HW will automatically generate
972 * ich_reset at the end of slice_line. But for the same panel, if
973 * partial frame is enabled and only 1 encoder is used with 1 slice,
974 * then HW will generate ich_reset at end of the slice. This is a
975 * mismatch. Prevent this by overriding HW's decision.
976 */
977 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
978 (dsc->slice_width == dsc->pic_width);
979}
980
981static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
982 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400983 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800984{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400985 if (!enable) {
986 if (hw_pp->ops.disable_dsc)
987 hw_pp->ops.disable_dsc(hw_pp);
988 return;
989 }
990
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800991 if (hw_dsc->ops.dsc_config)
992 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
993
994 if (hw_dsc->ops.dsc_config_thresh)
995 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
996
997 if (hw_pp->ops.setup_dsc)
998 hw_pp->ops.setup_dsc(hw_pp);
999
1000 if (hw_pp->ops.enable_dsc)
1001 hw_pp->ops.enable_dsc(hw_pp);
1002}
1003
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001004static void _sde_encoder_get_connector_roi(
1005 struct sde_encoder_virt *sde_enc,
1006 struct sde_rect *merged_conn_roi)
1007{
1008 struct drm_connector *drm_conn;
1009 struct sde_connector_state *c_state;
1010
1011 if (!sde_enc || !merged_conn_roi)
1012 return;
1013
1014 drm_conn = sde_enc->phys_encs[0]->connector;
1015
1016 if (!drm_conn || !drm_conn->state)
1017 return;
1018
1019 c_state = to_sde_connector_state(drm_conn->state);
1020 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1021}
1022
Ingrid Gallardo83532222017-06-02 16:48:51 -07001023static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001024{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001025 int this_frame_slices;
1026 int intf_ip_w, enc_ip_w;
1027 int ich_res, dsc_common_mode = 0;
1028
1029 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1030 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1031 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001032 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001033 struct msm_mode_info mode_info;
1034 struct msm_display_dsc_info *dsc = NULL;
1035 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001036
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001037 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001038 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1039 return -EINVAL;
1040 }
1041
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001042 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1043 if (rc) {
1044 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1045 return -EINVAL;
1046 }
1047
1048 dsc = &mode_info.comp_info.dsc_info;
1049
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001050 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001051
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001052 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001053 intf_ip_w = this_frame_slices * dsc->slice_width;
1054 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1055
1056 enc_ip_w = intf_ip_w;
1057 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1058
1059 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1060
1061 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1062 dsc_common_mode = DSC_MODE_VIDEO;
1063
1064 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001065 roi->w, roi->h, dsc_common_mode);
1066 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001067
1068 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001069 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001070
1071 return 0;
1072}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001073
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001074static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1075 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001076{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001077 int this_frame_slices;
1078 int intf_ip_w, enc_ip_w;
1079 int ich_res, dsc_common_mode;
1080
1081 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001082 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1083 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1084 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1085 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001086 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001087 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001088 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001089
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001090 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1091 hw_pp[i] = sde_enc->hw_pp[i];
1092 hw_dsc[i] = sde_enc->hw_dsc[i];
1093
1094 if (!hw_pp[i] || !hw_dsc[i]) {
1095 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1096 return -EINVAL;
1097 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001098 }
1099
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001100 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1101 if (rc) {
1102 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1103 return -EINVAL;
1104 }
1105
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001106 half_panel_partial_update =
1107 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001108
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001109 dsc_common_mode = 0;
1110 if (!half_panel_partial_update)
1111 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001112 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1113 dsc_common_mode |= DSC_MODE_VIDEO;
1114
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001115 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1116 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001117
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001118 /*
1119 * Since both DSC use same pic dimension, set same pic dimension
1120 * to both DSC structures.
1121 */
1122 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1123 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1124
1125 this_frame_slices = roi->w / dsc[0].slice_width;
1126 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1127
1128 if (!half_panel_partial_update)
1129 intf_ip_w /= 2;
1130
1131 /*
1132 * In this topology when both interfaces are active, they have same
1133 * load so intf_ip_w will be same.
1134 */
1135 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1136 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1137
1138 /*
1139 * In this topology, since there is no dsc_merge, uncompressed input
1140 * to encoder and interface is same.
1141 */
1142 enc_ip_w = intf_ip_w;
1143 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1144 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1145
1146 /*
1147 * __is_ich_reset_override_needed should be called only after
1148 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1149 */
1150 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1151 half_panel_partial_update, &dsc[0]);
1152
1153 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1154 roi->w, roi->h, dsc_common_mode);
1155
1156 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1157 bool active = !!((1 << i) & params->affected_displays);
1158
1159 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1160 dsc_common_mode, i, active);
1161 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1162 dsc_common_mode, ich_res, active);
1163 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001164
1165 return 0;
1166}
1167
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001168static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1169 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001170{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001171 int this_frame_slices;
1172 int intf_ip_w, enc_ip_w;
1173 int ich_res, dsc_common_mode;
1174
1175 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001176 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001177 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1178 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001179 struct msm_display_dsc_info *dsc = NULL;
1180 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001181 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001182 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001183
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001184 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1185 hw_pp[i] = sde_enc->hw_pp[i];
1186 hw_dsc[i] = sde_enc->hw_dsc[i];
1187
1188 if (!hw_pp[i] || !hw_dsc[i]) {
1189 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1190 return -EINVAL;
1191 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001192 }
1193
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001194 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1195 if (rc) {
1196 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1197 return -EINVAL;
1198 }
1199
1200 dsc = &mode_info.comp_info.dsc_info;
1201
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001202 half_panel_partial_update =
1203 hweight_long(params->affected_displays) == 1;
1204
1205 dsc_common_mode = 0;
1206 if (!half_panel_partial_update)
1207 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1208 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1209 dsc_common_mode |= DSC_MODE_VIDEO;
1210
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001211 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001212
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001213 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001214 intf_ip_w = this_frame_slices * dsc->slice_width;
1215 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1216
1217 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001218 * dsc merge case: when using 2 encoders for the same stream,
1219 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001220 */
1221 enc_ip_w = intf_ip_w / 2;
1222 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1223
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001224 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1225 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001226
1227 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001228 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001229 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1230 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001231
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001232 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001233 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001234 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1235 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001236
1237 return 0;
1238}
1239
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001240static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1241{
1242 struct sde_encoder_virt *sde_enc;
1243 struct drm_connector *drm_conn;
1244 struct drm_display_mode *adj_mode;
1245 struct sde_rect roi;
1246
1247 if (!drm_enc || !drm_enc->crtc || !drm_enc->crtc->state)
1248 return -EINVAL;
1249 sde_enc = to_sde_encoder_virt(drm_enc);
1250
1251 if (!sde_enc->cur_master)
1252 return -EINVAL;
1253
1254 adj_mode = &sde_enc->base.crtc->state->adjusted_mode;
1255 drm_conn = sde_enc->cur_master->connector;
1256
1257 _sde_encoder_get_connector_roi(sde_enc, &roi);
1258 if (sde_kms_rect_is_null(&roi)) {
1259 roi.w = adj_mode->hdisplay;
1260 roi.h = adj_mode->vdisplay;
1261 }
1262
1263 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1264 sizeof(sde_enc->prv_conn_roi));
1265 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1266
1267 return 0;
1268}
1269
1270static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1271 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001272{
1273 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001274 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001275 int ret = 0;
1276
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001277 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1278 !sde_enc->phys_encs[0]->connector)
1279 return -EINVAL;
1280
1281 drm_conn = sde_enc->phys_encs[0]->connector;
1282
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001283 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001284 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001285 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1286 return -EINVAL;
1287 }
1288
Ingrid Gallardo83532222017-06-02 16:48:51 -07001289 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001290 SDE_EVT32(DRMID(&sde_enc->base));
1291
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001292 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1293 &sde_enc->prv_conn_roi))
1294 return ret;
1295
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001296 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001297 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001298 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1299 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001300 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001301 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001302 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001303 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001304 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001305 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001306 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001307 default:
1308 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1309 topology);
1310 return -EINVAL;
1311 };
1312
1313 return ret;
1314}
1315
Dhaval Patelaab9b522017-07-20 12:38:46 -07001316static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1317 struct msm_display_info *disp_info, bool is_dummy)
1318{
1319 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1320 struct msm_drm_private *priv;
1321 struct sde_kms *sde_kms;
1322 struct sde_hw_mdp *hw_mdptop;
1323 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001324 struct msm_mode_info mode_info;
1325 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001326
1327 if (!sde_enc || !disp_info) {
1328 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1329 sde_enc != NULL, disp_info != NULL);
1330 return;
1331 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1332 SDE_ERROR("invalid num phys enc %d/%d\n",
1333 sde_enc->num_phys_encs,
1334 (int) ARRAY_SIZE(sde_enc->hw_pp));
1335 return;
1336 }
1337
1338 drm_enc = &sde_enc->base;
1339 /* this pointers are checked in virt_enable_helper */
1340 priv = drm_enc->dev->dev_private;
1341
1342 sde_kms = to_sde_kms(priv->kms);
1343 if (!sde_kms) {
1344 SDE_ERROR("invalid sde_kms\n");
1345 return;
1346 }
1347
1348 hw_mdptop = sde_kms->hw_mdp;
1349 if (!hw_mdptop) {
1350 SDE_ERROR("invalid mdptop\n");
1351 return;
1352 }
1353
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001354 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1355 if (rc) {
1356 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001357 return;
1358 }
1359
Dhaval Patelaab9b522017-07-20 12:38:46 -07001360 if (hw_mdptop->ops.setup_vsync_source &&
1361 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1362 for (i = 0; i < sde_enc->num_phys_encs; i++)
1363 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1364
1365 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001366 vsync_cfg.frame_rate = mode_info.frame_rate;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001367 if (is_dummy)
1368 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1369 else if (disp_info->is_te_using_watchdog_timer)
1370 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
1371 else
1372 vsync_cfg.vsync_source = SDE_VSYNC0_SOURCE_GPIO;
1373 vsync_cfg.is_dummy = is_dummy;
1374
1375 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1376 }
1377}
1378
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001379static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1380{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001381 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001382 struct sde_hw_pingpong *hw_pp = NULL;
1383 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001384
1385 if (!sde_enc || !sde_enc->phys_encs[0] ||
1386 !sde_enc->phys_encs[0]->connector) {
1387 SDE_ERROR("invalid params %d %d\n",
1388 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1389 return -EINVAL;
1390 }
1391
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001392 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001393 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1394 hw_pp = sde_enc->hw_pp[i];
1395 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001396
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001397 if (hw_pp && hw_pp->ops.disable_dsc)
1398 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001399
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001400 if (hw_dsc && hw_dsc->ops.dsc_disable)
1401 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001402 }
1403
1404 return ret;
1405}
1406
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001407static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001408 struct drm_encoder *drm_enc,
1409 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001410{
1411 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001412 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001413 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001414 struct sde_rsc_cmd_config *rsc_config;
1415 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001416 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001417 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001418 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1419 int wait_count = 0;
1420 struct drm_crtc *primary_crtc;
1421 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001422 int rc = 0;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001423
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001424 if (!drm_enc || !drm_enc->crtc || !drm_enc->dev) {
1425 SDE_ERROR("invalid arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001426 return -EINVAL;
1427 }
1428
1429 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001430 crtc = drm_enc->crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001431 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001432 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001433
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001434 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001435 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001436 return 0;
1437 }
1438
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001439 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1440 if (rc) {
1441 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1442 return 0;
1443 }
1444
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001445 /**
1446 * only primary command mode panel can request CMD state.
1447 * all other panels/displays can request for VID state including
1448 * secondary command mode panel.
1449 */
1450 rsc_state = enable ?
1451 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) &&
1452 disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1453 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001454 prefill_lines = config ? mode_info.prefill_lines +
1455 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001456
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001457 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001458 if ((rsc_config->fps != mode_info.frame_rate) ||
1459 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001460 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001461 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1462 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1463 rsc_config->fps = mode_info.frame_rate;
1464 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001465 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001466 rsc_config->jitter_numer = mode_info.jitter_numer;
1467 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001468 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001469 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001470
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001471 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001472 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001473 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001474 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001475
1476 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001477 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001478 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001479 } else {
1480 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001481 rsc_state, NULL, crtc->base.id,
1482 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001483 }
1484
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001485 /**
1486 * if RSC performed a state change that requires a VBLANK wait, it will
1487 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1488 *
1489 * if we are the primary display, we will need to enable and wait
1490 * locally since we hold the commit thread
1491 *
1492 * if we are an external display, we must send a signal to the primary
1493 * to enable its VBLANK and wait one, since the RSC hardware is driven
1494 * by the primary panel's VBLANK signals
1495 */
1496 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1497 if (ret) {
1498 SDE_ERROR_ENC(sde_enc,
1499 "sde rsc client update failed ret:%d\n", ret);
1500 return ret;
1501 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1502 return ret;
1503 }
1504
1505 if (crtc->base.id != wait_vblank_crtc_id) {
1506 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1507 if (!primary_crtc) {
1508 SDE_ERROR_ENC(sde_enc,
1509 "failed to find primary crtc id %d\n",
1510 wait_vblank_crtc_id);
1511 return -EINVAL;
1512 }
1513 pipe = drm_crtc_index(primary_crtc);
1514 }
1515
1516 /**
1517 * note: VBLANK is expected to be enabled at this point in
1518 * resource control state machine if on primary CRTC
1519 */
1520 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1521 if (sde_rsc_client_is_state_update_complete(
1522 sde_enc->rsc_client))
1523 break;
1524
1525 if (crtc->base.id == wait_vblank_crtc_id)
1526 ret = sde_encoder_wait_for_event(drm_enc,
1527 MSM_ENC_VBLANK);
1528 else
1529 drm_wait_one_vblank(drm_enc->dev, pipe);
1530
1531 if (ret) {
1532 SDE_ERROR_ENC(sde_enc,
1533 "wait for vblank failed ret:%d\n", ret);
1534 break;
1535 }
1536 }
1537
1538 if (wait_count >= MAX_RSC_WAIT)
1539 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1540 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001541
1542 return ret;
1543}
1544
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001545static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1546{
1547 struct sde_encoder_virt *sde_enc;
1548 int i;
1549
1550 if (!drm_enc) {
1551 SDE_ERROR("invalid encoder\n");
1552 return;
1553 }
1554
1555 sde_enc = to_sde_encoder_virt(drm_enc);
1556
1557 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1558 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1559 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1560
1561 if (phys && phys->ops.irq_control)
1562 phys->ops.irq_control(phys, enable);
1563 }
1564
1565}
1566
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001567/* keep track of the userspace vblank during modeset */
1568static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1569 u32 sw_event)
1570{
1571 struct sde_encoder_virt *sde_enc;
1572 bool enable;
1573 int i;
1574
1575 if (!drm_enc) {
1576 SDE_ERROR("invalid encoder\n");
1577 return;
1578 }
1579
1580 sde_enc = to_sde_encoder_virt(drm_enc);
1581 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1582 sw_event, sde_enc->vblank_enabled);
1583
1584 /* nothing to do if vblank not enabled by userspace */
1585 if (!sde_enc->vblank_enabled)
1586 return;
1587
1588 /* disable vblank on pre_modeset */
1589 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1590 enable = false;
1591 /* enable vblank on post_modeset */
1592 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1593 enable = true;
1594 else
1595 return;
1596
1597 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1598 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1599
1600 if (phys && phys->ops.control_vblank_irq)
1601 phys->ops.control_vblank_irq(phys, enable);
1602 }
1603}
1604
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001605struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1606{
1607 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001608
1609 if (!drm_enc)
1610 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001611 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001612 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001613}
1614
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001615static void _sde_encoder_resource_control_rsc_update(
1616 struct drm_encoder *drm_enc, bool enable)
1617{
1618 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1619 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Dhaval Patelc1e4bfc2017-09-15 14:51:36 -07001620 int i;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001621
1622 if (enable) {
1623 rsc_cfg.inline_rotate_prefill =
1624 sde_crtc_get_inline_prefill(drm_enc->crtc);
1625
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001626 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1627 } else {
1628 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
1629
1630 /**
Dhaval Patelc1e4bfc2017-09-15 14:51:36 -07001631 * disable the vsync source after updating the rsc state. rsc
1632 * state update might have vsync wait and vsync source must be
1633 * disabled after it. It will avoid generating any vsync from
1634 * this point till mode-2 entry. It is SW workaround for
1635 * HW limitation and should not be removed without checking the
1636 * updated design.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001637 */
Dhaval Patelc1e4bfc2017-09-15 14:51:36 -07001638 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1639 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1640
1641 if (phys && phys->ops.prepare_idle_pc)
1642 phys->ops.prepare_idle_pc(phys);
1643 }
1644
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001645 }
1646}
1647
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001648static void _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
1649 bool enable)
1650{
1651 struct msm_drm_private *priv;
1652 struct sde_kms *sde_kms;
1653 struct sde_encoder_virt *sde_enc;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001654
1655 sde_enc = to_sde_encoder_virt(drm_enc);
1656 priv = drm_enc->dev->dev_private;
1657 sde_kms = to_sde_kms(priv->kms);
1658
1659 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1660 SDE_EVT32(DRMID(drm_enc), enable);
1661
1662 if (!sde_enc->cur_master) {
1663 SDE_ERROR("encoder master not set\n");
1664 return;
1665 }
1666
1667 if (enable) {
1668 /* enable SDE core clks */
1669 sde_power_resource_enable(&priv->phandle,
1670 sde_kms->core_client, true);
1671
1672 /* enable DSI clks */
1673 sde_connector_clk_ctrl(sde_enc->cur_master->connector, true);
1674
1675 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001676 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001677
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001678 } else {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001679 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001680 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001681
1682 /* disable DSI clks */
1683 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1684
1685 /* disable SDE core clks */
1686 sde_power_resource_enable(&priv->phandle,
1687 sde_kms->core_client, false);
1688 }
1689
1690}
1691
1692static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1693 u32 sw_event)
1694{
Dhaval Patel99412a52017-07-24 19:16:45 -07001695 bool autorefresh_enabled = false;
Clarence Ip89628132017-07-27 13:33:51 -04001696 unsigned int lp, idle_timeout;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001697 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001698 struct msm_drm_private *priv;
1699 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001700 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001701 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001702
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001703 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
1704 !drm_enc->crtc) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001705 SDE_ERROR("invalid parameters\n");
1706 return -EINVAL;
1707 }
1708 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001709 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001710 is_vid_mode = sde_enc->disp_info.capabilities &
1711 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001712
1713 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
1714 SDE_ERROR("invalid crtc index\n");
1715 return -EINVAL;
1716 }
1717 disp_thread = &priv->disp_thread[drm_enc->crtc->index];
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001718
1719 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001720 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001721 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001722 */
1723 if (!sde_enc->idle_pc_supported &&
1724 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001725 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1726 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1727 sw_event != SDE_ENC_RC_EVENT_STOP &&
1728 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001729 return 0;
1730
1731 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc_supported:%d\n", sw_event,
1732 sde_enc->idle_pc_supported);
Dhaval Patela5f75952017-07-25 11:17:41 -07001733 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001734 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1735
1736 switch (sw_event) {
1737 case SDE_ENC_RC_EVENT_KICKOFF:
1738 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001739 if (kthread_cancel_delayed_work_sync(
1740 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001741 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1742 sw_event);
1743
1744 mutex_lock(&sde_enc->rc_lock);
1745
1746 /* return if the resource control is already in ON state */
1747 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
1748 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
1749 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001750 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1751 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001752 mutex_unlock(&sde_enc->rc_lock);
1753 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001754 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
1755 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
1756 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1757 sw_event, sde_enc->rc_state);
1758 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1759 SDE_EVTLOG_ERROR);
1760 mutex_unlock(&sde_enc->rc_lock);
1761 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001762 }
1763
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001764 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1765 _sde_encoder_irq_control(drm_enc, true);
1766 } else {
1767 /* enable all the clks and resources */
1768 _sde_encoder_resource_control_helper(drm_enc, true);
1769 _sde_encoder_resource_control_rsc_update(drm_enc, true);
1770 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001771
1772 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1773 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
1774 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1775
1776 mutex_unlock(&sde_enc->rc_lock);
1777 break;
1778
1779 case SDE_ENC_RC_EVENT_FRAME_DONE:
1780 /*
1781 * mutex lock is not used as this event happens at interrupt
1782 * context. And locking is not required as, the other events
1783 * like KICKOFF and STOP does a wait-for-idle before executing
1784 * the resource_control
1785 */
1786 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1787 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
1788 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001789 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1790 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001791 return -EINVAL;
1792 }
1793
1794 /*
1795 * schedule off work item only when there are no
1796 * frames pending
1797 */
1798 if (sde_crtc_frame_pending(drm_enc->crtc) > 1) {
1799 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001800 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1801 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001802 return 0;
1803 }
1804
Dhaval Patel99412a52017-07-24 19:16:45 -07001805 /* schedule delayed off work if autorefresh is disabled */
1806 if (sde_enc->cur_master &&
1807 sde_enc->cur_master->ops.is_autorefresh_enabled)
1808 autorefresh_enabled =
1809 sde_enc->cur_master->ops.is_autorefresh_enabled(
1810 sde_enc->cur_master);
1811
Clarence Ip89628132017-07-27 13:33:51 -04001812 /* set idle timeout based on master connector's lp value */
1813 if (sde_enc->cur_master)
1814 lp = sde_connector_get_lp(
1815 sde_enc->cur_master->connector);
1816 else
1817 lp = SDE_MODE_DPMS_ON;
1818
1819 if (lp == SDE_MODE_DPMS_LP2)
1820 idle_timeout = IDLE_SHORT_TIMEOUT;
1821 else
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001822 idle_timeout = sde_enc->idle_timeout;
Clarence Ip89628132017-07-27 13:33:51 -04001823
Sravanthi Kollukuduru247adcf2017-10-11 16:19:23 +05301824 if (!autorefresh_enabled && idle_timeout)
Dhaval Patel99412a52017-07-24 19:16:45 -07001825 kthread_queue_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001826 &disp_thread->worker,
1827 &sde_enc->delayed_off_work,
Clarence Ip89628132017-07-27 13:33:51 -04001828 msecs_to_jiffies(idle_timeout));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001829 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04001830 autorefresh_enabled,
1831 idle_timeout, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001832 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
1833 sw_event);
1834 break;
1835
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001836 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001837 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001838 if (kthread_cancel_delayed_work_sync(
1839 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001840 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1841 sw_event);
1842
1843 mutex_lock(&sde_enc->rc_lock);
1844
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001845 if (is_vid_mode &&
1846 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1847 _sde_encoder_irq_control(drm_enc, true);
1848 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001849 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001850 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001851 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1852 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
1853 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001854 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1855 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001856 mutex_unlock(&sde_enc->rc_lock);
1857 return 0;
1858 }
1859
1860 /**
1861 * IRQs are still enabled currently, which allows wait for
1862 * VBLANK which RSC may require to correctly transition to OFF
1863 */
1864 _sde_encoder_resource_control_rsc_update(drm_enc, false);
1865
1866 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1867 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001868 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001869
1870 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
1871
1872 mutex_unlock(&sde_enc->rc_lock);
1873 break;
1874
1875 case SDE_ENC_RC_EVENT_STOP:
Jayant Shekhar12d908f2017-10-10 12:11:48 +05301876 /* cancel vsync event work */
1877 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001878
Jayant Shekhar12d908f2017-10-10 12:11:48 +05301879 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001880 /* return if the resource control is already in OFF state */
1881 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
1882 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
1883 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001884 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1885 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001886 mutex_unlock(&sde_enc->rc_lock);
1887 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001888 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
1889 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001890 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1891 sw_event, sde_enc->rc_state);
1892 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1893 SDE_EVTLOG_ERROR);
1894 mutex_unlock(&sde_enc->rc_lock);
1895 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001896 }
1897
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001898 /**
1899 * expect to arrive here only if in either idle state or pre-off
1900 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001901 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001902 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001903 _sde_encoder_resource_control_helper(drm_enc, false);
1904
1905 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001906 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001907
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001908 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
1909
1910 mutex_unlock(&sde_enc->rc_lock);
1911 break;
1912
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001913 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001914 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001915 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001916 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001917 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1918 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001919
1920 mutex_lock(&sde_enc->rc_lock);
1921
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001922 /* return if the resource control is already in ON state */
1923 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1924 /* enable all the clks and resources */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001925 _sde_encoder_resource_control_helper(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001926
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001927 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001928
1929 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1930 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001931 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001932 }
1933
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001934 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1935 if (ret && ret != -EWOULDBLOCK) {
1936 SDE_ERROR_ENC(sde_enc,
1937 "wait for commit done returned %d\n",
1938 ret);
1939 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1940 ret, SDE_EVTLOG_ERROR);
1941 mutex_unlock(&sde_enc->rc_lock);
1942 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001943 }
1944
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001945 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001946 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001947
1948 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1949 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
1950
1951 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
1952 mutex_unlock(&sde_enc->rc_lock);
1953 break;
1954
1955 case SDE_ENC_RC_EVENT_POST_MODESET:
1956 mutex_lock(&sde_enc->rc_lock);
1957
1958 /* return if the resource control is already in ON state */
1959 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
1960 SDE_ERROR_ENC(sde_enc,
1961 "sw_event:%d, rc:%d !MODESET state\n",
1962 sw_event, sde_enc->rc_state);
1963 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1964 SDE_EVTLOG_ERROR);
1965 mutex_unlock(&sde_enc->rc_lock);
1966 return -EINVAL;
1967 }
1968
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001969 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001970 _sde_encoder_irq_control(drm_enc, true);
1971
1972 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
1973
1974 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1975 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
1976
1977 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1978
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001979 mutex_unlock(&sde_enc->rc_lock);
1980 break;
1981
1982 case SDE_ENC_RC_EVENT_ENTER_IDLE:
1983 mutex_lock(&sde_enc->rc_lock);
1984
1985 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001986 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001987 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001988 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1989 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001990 mutex_unlock(&sde_enc->rc_lock);
1991 return 0;
1992 }
1993
1994 /*
1995 * if we are in ON but a frame was just kicked off,
1996 * ignore the IDLE event, it's probably a stale timer event
1997 */
1998 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001999 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002000 "sw_event:%d, rc:%d frame pending\n",
2001 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002002 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2003 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002004 mutex_unlock(&sde_enc->rc_lock);
2005 return 0;
2006 }
2007
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002008 if (is_vid_mode) {
2009 _sde_encoder_irq_control(drm_enc, false);
2010 } else {
2011 /* disable all the clks and resources */
2012 _sde_encoder_resource_control_rsc_update(drm_enc,
2013 false);
2014 _sde_encoder_resource_control_helper(drm_enc, false);
2015 }
2016
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002017 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002018 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002019 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2020
2021 mutex_unlock(&sde_enc->rc_lock);
2022 break;
2023
2024 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002025 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002026 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2027 break;
2028 }
2029
Dhaval Patela5f75952017-07-25 11:17:41 -07002030 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002031 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2032 return 0;
2033}
2034
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002035static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2036 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002037 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002038{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002039 struct sde_encoder_virt *sde_enc;
2040 struct msm_drm_private *priv;
2041 struct sde_kms *sde_kms;
2042 struct list_head *connector_list;
2043 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002044 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002045 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002046 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002047 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002048
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002049 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002050 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002051 return;
2052 }
2053
2054 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002055 SDE_DEBUG_ENC(sde_enc, "\n");
2056
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002057 priv = drm_enc->dev->dev_private;
2058 sde_kms = to_sde_kms(priv->kms);
2059 connector_list = &sde_kms->dev->mode_config.connector_list;
2060
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002061 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002062
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002063 list_for_each_entry(conn_iter, connector_list, head)
2064 if (conn_iter->encoder == drm_enc)
2065 conn = conn_iter;
2066
2067 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002068 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002069 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002070 } else if (!conn->state) {
2071 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2072 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002073 }
2074
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002075 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002076 sde_conn_state = to_sde_connector_state(conn->state);
2077 if (sde_conn && sde_conn_state) {
2078 ret = sde_conn->ops.get_mode_info(adj_mode,
2079 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002080 sde_kms->catalog->max_mixer_width,
2081 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002082 if (ret) {
2083 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002084 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002085 return;
2086 }
2087 }
2088
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002089 /* release resources before seamless mode change */
2090 if (msm_is_mode_seamless_dms(adj_mode)) {
2091 /* restore resource state before releasing them */
2092 ret = sde_encoder_resource_control(drm_enc,
2093 SDE_ENC_RC_EVENT_PRE_MODESET);
2094 if (ret) {
2095 SDE_ERROR_ENC(sde_enc,
2096 "sde resource control failed: %d\n",
2097 ret);
2098 return;
2099 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002100
2101 /*
2102 * Disable dsc before switch the mode and after pre_modeset,
2103 * to guarantee that previous kickoff finished.
2104 */
2105 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002106 }
2107
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002108 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2109 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2110 conn->state, false);
2111 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002112 SDE_ERROR_ENC(sde_enc,
2113 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002114 return;
2115 }
2116
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002117 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2118 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2119 sde_enc->hw_pp[i] = NULL;
2120 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2121 break;
2122 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2123 }
2124
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002125 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2126 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2127 sde_enc->hw_dsc[i] = NULL;
2128 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2129 break;
2130 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2131 }
2132
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002133 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2134 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002135
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002136 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002137 if (!sde_enc->hw_pp[i]) {
2138 SDE_ERROR_ENC(sde_enc,
2139 "invalid pingpong block for the encoder\n");
2140 return;
2141 }
2142 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002143 phys->connector = conn->state->connector;
2144 if (phys->ops.mode_set)
2145 phys->ops.mode_set(phys, mode, adj_mode);
2146 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002147 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002148
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002149 /* update resources after seamless mode change */
2150 if (msm_is_mode_seamless_dms(adj_mode))
2151 sde_encoder_resource_control(&sde_enc->base,
2152 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002153}
2154
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002155static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002156{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002157 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002158 struct msm_drm_private *priv;
2159 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002160
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002161 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2162 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002163 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002164 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002165
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002166 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002167 sde_kms = to_sde_kms(priv->kms);
2168 if (!sde_kms) {
2169 SDE_ERROR("invalid sde_kms\n");
2170 return;
2171 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002172
2173 sde_enc = to_sde_encoder_virt(drm_enc);
2174 if (!sde_enc || !sde_enc->cur_master) {
2175 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002176 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002177 }
2178
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002179 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2180 sde_enc->cur_master->hw_mdptop &&
2181 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2182 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2183 sde_enc->cur_master->hw_mdptop);
2184
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002185 if (sde_enc->cur_master->hw_mdptop &&
2186 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2187 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2188 sde_enc->cur_master->hw_mdptop,
2189 sde_kms->catalog);
2190
Dhaval Patelaab9b522017-07-20 12:38:46 -07002191 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002192
2193 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2194 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002195}
2196
2197void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2198{
2199 struct sde_encoder_virt *sde_enc = NULL;
2200 int i;
2201
2202 if (!drm_enc) {
2203 SDE_ERROR("invalid encoder\n");
2204 return;
2205 }
2206 sde_enc = to_sde_encoder_virt(drm_enc);
2207
2208 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2209 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2210
2211 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2212 phys->ops.restore(phys);
2213 }
2214
2215 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2216 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2217
2218 _sde_encoder_virt_enable_helper(drm_enc);
2219}
2220
2221static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2222{
2223 struct sde_encoder_virt *sde_enc = NULL;
2224 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002225 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002226 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002227 struct msm_mode_info mode_info;
Sandeep Panda318cff12017-10-20 13:16:03 +05302228 struct drm_connector *drm_conn = NULL;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002229
2230 if (!drm_enc) {
2231 SDE_ERROR("invalid encoder\n");
2232 return;
2233 }
2234 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002235
2236 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2237 if (ret) {
2238 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2239 return;
2240 }
2241
2242 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002243 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002244
Clarence Ip19af1362016-09-23 14:57:51 -04002245 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002246 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002247
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002248 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002249 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2250 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2251
2252 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2253 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2254 sde_enc->cur_master = phys;
2255 break;
2256 }
2257 }
2258
2259 if (!sde_enc->cur_master) {
2260 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2261 return;
2262 }
2263
2264 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2265 if (ret) {
2266 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2267 ret);
2268 return;
2269 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002270
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002271 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2272 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002273
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002274 if (!phys)
2275 continue;
2276
2277 phys->comp_type = comp_info->comp_type;
2278 if (phys != sde_enc->cur_master) {
2279 /**
2280 * on DMS request, the encoder will be enabled
2281 * already. Invoke restore to reconfigure the
2282 * new mode.
2283 */
2284 if (msm_is_mode_seamless_dms(cur_mode) &&
2285 phys->ops.restore)
2286 phys->ops.restore(phys);
2287 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002288 phys->ops.enable(phys);
2289 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002290
2291 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2292 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2293 phys->ops.setup_misr(phys, true,
2294 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002295 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002296
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002297 if (msm_is_mode_seamless_dms(cur_mode) &&
2298 sde_enc->cur_master->ops.restore)
2299 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2300 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002301 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002302
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002303 _sde_encoder_virt_enable_helper(drm_enc);
Sandeep Panda318cff12017-10-20 13:16:03 +05302304
2305 /* Enable ESD thread */
2306 drm_conn = sde_enc->cur_master->connector;
2307 sde_connector_schedule_status_work(drm_conn, true);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002308}
2309
2310static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2311{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002312 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002313 struct msm_drm_private *priv;
2314 struct sde_kms *sde_kms;
Sandeep Panda318cff12017-10-20 13:16:03 +05302315 struct drm_connector *drm_conn = NULL;
Clarence Iped3327b2017-11-01 13:13:58 -04002316 enum sde_intf_mode intf_mode;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002317 int i = 0;
2318
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002319 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002320 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002321 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002322 } else if (!drm_enc->dev) {
2323 SDE_ERROR("invalid dev\n");
2324 return;
2325 } else if (!drm_enc->dev->dev_private) {
2326 SDE_ERROR("invalid dev_private\n");
2327 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002328 }
2329
2330 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002331 SDE_DEBUG_ENC(sde_enc, "\n");
2332
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002333 priv = drm_enc->dev->dev_private;
2334 sde_kms = to_sde_kms(priv->kms);
Clarence Iped3327b2017-11-01 13:13:58 -04002335 intf_mode = sde_encoder_get_intf_mode(drm_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002336
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002337 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002338
Sandeep Panda318cff12017-10-20 13:16:03 +05302339 /* Disable ESD thread */
2340 drm_conn = sde_enc->cur_master->connector;
2341 sde_connector_schedule_status_work(drm_conn, false);
2342
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002343 /* wait for idle */
2344 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2345
Clarence Iped3327b2017-11-01 13:13:58 -04002346 /*
2347 * For primary command mode encoders, execute the resource control
2348 * pre-stop operations before the physical encoders are disabled, to
2349 * allow the rsc to transition its states properly.
2350 *
2351 * For other encoder types, rsc should not be enabled until after
2352 * they have been fully disabled, so delay the pre-stop operations
2353 * until after the physical disable calls have returned.
2354 */
2355 if (sde_enc->disp_info.is_primary && intf_mode == INTF_MODE_CMD) {
2356 sde_encoder_resource_control(drm_enc,
2357 SDE_ENC_RC_EVENT_PRE_STOP);
2358 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2359 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002360
Clarence Iped3327b2017-11-01 13:13:58 -04002361 if (phys && phys->ops.disable)
2362 phys->ops.disable(phys);
2363 }
2364 } else {
2365 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2366 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002367
Clarence Iped3327b2017-11-01 13:13:58 -04002368 if (phys && phys->ops.disable)
2369 phys->ops.disable(phys);
2370 }
2371 sde_encoder_resource_control(drm_enc,
2372 SDE_ENC_RC_EVENT_PRE_STOP);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002373 }
2374
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002375 /*
2376 * disable dsc after the transfer is complete (for command mode)
2377 * and after physical encoder is disabled, to make sure timing
2378 * engine is already disabled (for video mode).
2379 */
2380 _sde_encoder_dsc_disable(sde_enc);
2381
Lloyd Atkinson03810e32017-03-14 13:38:06 -07002382 /* after phys waits for frame-done, should be no more frames pending */
2383 if (atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
2384 SDE_ERROR("enc%d timeout pending\n", drm_enc->base.id);
2385 del_timer_sync(&sde_enc->frame_done_timer);
2386 }
2387
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002388 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2389
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002390 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2391 if (sde_enc->phys_encs[i])
2392 sde_enc->phys_encs[i]->connector = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002393 }
2394
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04002395 sde_enc->cur_master = NULL;
2396
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002397 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002398
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002399 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002400}
2401
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002402static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002403 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002404{
2405 int i = 0;
2406
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002407 for (i = 0; i < catalog->intf_count; i++) {
2408 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002409 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002410 return catalog->intf[i].id;
2411 }
2412 }
2413
2414 return INTF_MAX;
2415}
2416
Alan Kwongbb27c092016-07-20 16:41:25 -04002417static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2418 enum sde_intf_type type, u32 controller_id)
2419{
2420 if (controller_id < catalog->wb_count)
2421 return catalog->wb[controller_id].id;
2422
2423 return WB_MAX;
2424}
2425
Dhaval Patel81e87882016-10-19 21:41:56 -07002426static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2427 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002428{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002429 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002430 unsigned long lock_flags;
2431
Dhaval Patel81e87882016-10-19 21:41:56 -07002432 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002433 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002434
Narendra Muppalla77b32932017-05-10 13:53:11 -07002435 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002436 sde_enc = to_sde_encoder_virt(drm_enc);
2437
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002438 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002439 if (sde_enc->crtc_vblank_cb)
2440 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002441 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002442
2443 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002444 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002445}
2446
2447static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2448 struct sde_encoder_phys *phy_enc)
2449{
2450 if (!phy_enc)
2451 return;
2452
Narendra Muppalla77b32932017-05-10 13:53:11 -07002453 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002454 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002455 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Narendra Muppalla77b32932017-05-10 13:53:11 -07002456 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002457}
2458
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002459void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
2460 void (*vbl_cb)(void *), void *vbl_data)
2461{
2462 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2463 unsigned long lock_flags;
2464 bool enable;
2465 int i;
2466
2467 enable = vbl_cb ? true : false;
2468
Clarence Ip19af1362016-09-23 14:57:51 -04002469 if (!drm_enc) {
2470 SDE_ERROR("invalid encoder\n");
2471 return;
2472 }
2473 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002474 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002475
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002476 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002477 sde_enc->crtc_vblank_cb = vbl_cb;
2478 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002479 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002480
2481 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2482 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2483
2484 if (phys && phys->ops.control_vblank_irq)
2485 phys->ops.control_vblank_irq(phys, enable);
2486 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002487 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002488}
2489
Alan Kwong628d19e2016-10-31 13:50:13 -04002490void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
2491 void (*frame_event_cb)(void *, u32 event),
2492 void *frame_event_cb_data)
2493{
2494 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2495 unsigned long lock_flags;
2496 bool enable;
2497
2498 enable = frame_event_cb ? true : false;
2499
2500 if (!drm_enc) {
2501 SDE_ERROR("invalid encoder\n");
2502 return;
2503 }
2504 SDE_DEBUG_ENC(sde_enc, "\n");
2505 SDE_EVT32(DRMID(drm_enc), enable, 0);
2506
2507 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2508 sde_enc->crtc_frame_event_cb = frame_event_cb;
2509 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
2510 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
2511}
2512
2513static void sde_encoder_frame_done_callback(
2514 struct drm_encoder *drm_enc,
2515 struct sde_encoder_phys *ready_phys, u32 event)
2516{
2517 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2518 unsigned int i;
2519
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002520 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
2521 | SDE_ENCODER_FRAME_EVENT_ERROR
2522 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05002523
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002524 if (!sde_enc->frame_busy_mask[0]) {
2525 /**
2526 * suppress frame_done without waiter,
2527 * likely autorefresh
2528 */
2529 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
2530 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04002531 }
2532
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002533 /* One of the physical encoders has become idle */
2534 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2535 if (sde_enc->phys_encs[i] == ready_phys) {
2536 clear_bit(i, sde_enc->frame_busy_mask);
2537 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
2538 sde_enc->frame_busy_mask[0]);
2539 }
2540 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002541
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002542 if (!sde_enc->frame_busy_mask[0]) {
2543 atomic_set(&sde_enc->frame_done_timeout, 0);
2544 del_timer(&sde_enc->frame_done_timer);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002545
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002546 sde_encoder_resource_control(drm_enc,
2547 SDE_ENC_RC_EVENT_FRAME_DONE);
2548
2549 if (sde_enc->crtc_frame_event_cb)
2550 sde_enc->crtc_frame_event_cb(
2551 sde_enc->crtc_frame_event_cb_data,
2552 event);
2553 }
2554 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04002555 if (sde_enc->crtc_frame_event_cb)
2556 sde_enc->crtc_frame_event_cb(
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002557 sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002558 }
2559}
2560
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002561static void sde_encoder_off_work(struct kthread_work *work)
2562{
2563 struct sde_encoder_virt *sde_enc = container_of(work,
2564 struct sde_encoder_virt, delayed_off_work.work);
2565
2566 if (!sde_enc) {
2567 SDE_ERROR("invalid sde encoder\n");
2568 return;
2569 }
2570
2571 sde_encoder_resource_control(&sde_enc->base,
2572 SDE_ENC_RC_EVENT_ENTER_IDLE);
2573
2574 sde_encoder_frame_done_callback(&sde_enc->base, NULL,
2575 SDE_ENCODER_FRAME_EVENT_IDLE);
2576}
2577
Clarence Ip110d15c2016-08-16 14:44:41 -04002578/**
2579 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
2580 * drm_enc: Pointer to drm encoder structure
2581 * phys: Pointer to physical encoder structure
2582 * extra_flush_bits: Additional bit mask to include in flush trigger
2583 */
2584static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
2585 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
2586{
2587 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002588 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04002589
2590 if (!drm_enc || !phys) {
2591 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
2592 drm_enc != 0, phys != 0);
2593 return;
2594 }
2595
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002596 if (!phys->hw_pp) {
2597 SDE_ERROR("invalid pingpong hw\n");
2598 return;
2599 }
2600
Clarence Ip110d15c2016-08-16 14:44:41 -04002601 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04002602 if (!ctl || !phys->ops.trigger_flush) {
2603 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002604 return;
2605 }
2606
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002607 if (phys->split_role == ENC_ROLE_SKIP) {
2608 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2609 "skip flush pp%d ctl%d\n",
2610 phys->hw_pp->idx - PINGPONG_0,
2611 ctl->idx - CTL_0);
2612 return;
2613 }
2614
Clarence Ip8e69ad02016-12-09 09:43:57 -05002615 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05002616
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002617 if (phys->ops.is_master && phys->ops.is_master(phys))
2618 atomic_inc(&phys->pending_retire_fence_cnt);
2619
Clarence Ip110d15c2016-08-16 14:44:41 -04002620 if (extra_flush_bits && ctl->ops.update_pending_flush)
2621 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
2622
Alan Kwong4212dd42017-09-19 17:22:33 -04002623 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07002624
2625 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04002626 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2627 pending_kickoff_cnt, ctl->idx - CTL_0,
2628 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07002629 else
Clarence Ip569d5af2017-10-14 21:09:01 -04002630 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
2631 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04002632}
2633
2634/**
2635 * _sde_encoder_trigger_start - trigger start for a physical encoder
2636 * phys: Pointer to physical encoder structure
2637 */
2638static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
2639{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002640 struct sde_hw_ctl *ctl;
2641
Clarence Ip110d15c2016-08-16 14:44:41 -04002642 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002643 SDE_ERROR("invalid argument(s)\n");
2644 return;
2645 }
2646
2647 if (!phys->hw_pp) {
2648 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002649 return;
2650 }
2651
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002652 ctl = phys->hw_ctl;
2653 if (phys->split_role == ENC_ROLE_SKIP) {
2654 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2655 "skip start pp%d ctl%d\n",
2656 phys->hw_pp->idx - PINGPONG_0,
2657 ctl->idx - CTL_0);
2658 return;
2659 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002660 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
2661 phys->ops.trigger_start(phys);
2662}
2663
Alan Kwong4212dd42017-09-19 17:22:33 -04002664void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
2665{
2666 struct sde_hw_ctl *ctl;
2667
2668 if (!phys_enc) {
2669 SDE_ERROR("invalid encoder\n");
2670 return;
2671 }
2672
2673 ctl = phys_enc->hw_ctl;
2674 if (ctl && ctl->ops.trigger_flush)
2675 ctl->ops.trigger_flush(ctl);
2676}
2677
Clarence Ip110d15c2016-08-16 14:44:41 -04002678void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
2679{
2680 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04002681
2682 if (!phys_enc) {
2683 SDE_ERROR("invalid encoder\n");
2684 return;
2685 }
2686
2687 ctl = phys_enc->hw_ctl;
2688 if (ctl && ctl->ops.trigger_start) {
2689 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04002690 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04002691 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002692}
2693
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302694static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
2695 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002696{
2697 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302698 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
2699 ktime_t cur_ktime;
2700 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002701
2702 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002703 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302704 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
2705 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002706
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302707 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
2708 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002709 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002710 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05302711 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
2712
2713 return rc;
2714}
2715
2716int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
2717 struct sde_encoder_wait_info *info)
2718{
2719 int rc;
2720 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
2721
2722 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
2723
2724 /**
2725 * handle disabled irq case where timer irq is also delayed.
2726 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
2727 * if it event_timeout expired late detected.
2728 */
2729 if (atomic_read(info->atomic_cnt) && (!rc) &&
2730 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
2731 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
2732 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
2733 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002734
2735 return rc;
2736}
2737
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002738void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
2739{
2740 struct sde_encoder_virt *sde_enc;
2741 struct sde_connector *sde_con;
2742 void *sde_con_disp;
2743 struct sde_hw_ctl *ctl;
2744 int rc;
2745
2746 if (!phys_enc) {
2747 SDE_ERROR("invalid encoder\n");
2748 return;
2749 }
2750 sde_enc = to_sde_encoder_virt(phys_enc->parent);
2751 ctl = phys_enc->hw_ctl;
2752
2753 if (!ctl || !ctl->ops.reset)
2754 return;
2755
2756 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
2757 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
2758
2759 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
2760 phys_enc->connector) {
2761 sde_con = to_sde_connector(phys_enc->connector);
2762 sde_con_disp = sde_connector_get_display(phys_enc->connector);
2763
2764 if (sde_con->ops.soft_reset) {
2765 rc = sde_con->ops.soft_reset(sde_con_disp);
2766 if (rc) {
2767 SDE_ERROR_ENC(sde_enc,
2768 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07002769 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
2770 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002771 }
2772 }
2773 }
2774
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002775 phys_enc->enable_state = SDE_ENC_ENABLED;
2776}
2777
Clarence Ip110d15c2016-08-16 14:44:41 -04002778/**
2779 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
2780 * Iterate through the physical encoders and perform consolidated flush
2781 * and/or control start triggering as needed. This is done in the virtual
2782 * encoder rather than the individual physical ones in order to handle
2783 * use cases that require visibility into multiple physical encoders at
2784 * a time.
2785 * sde_enc: Pointer to virtual encoder structure
2786 */
2787static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
2788{
2789 struct sde_hw_ctl *ctl;
2790 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002791 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -04002792
2793 if (!sde_enc) {
2794 SDE_ERROR("invalid encoder\n");
2795 return;
2796 }
2797
2798 pending_flush = 0x0;
2799
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07002800 /*
2801 * Trigger LUT DMA flush, this might need a wait, so we need
2802 * to do this outside of the atomic context
2803 */
2804 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2805 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2806 bool wait_for_dma = false;
2807
2808 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
2809 continue;
2810
2811 ctl = phys->hw_ctl;
2812 if (!ctl)
2813 continue;
2814
2815 if (phys->ops.wait_dma_trigger)
2816 wait_for_dma = phys->ops.wait_dma_trigger(phys);
2817
2818 if (phys->hw_ctl->ops.reg_dma_flush)
2819 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
2820 wait_for_dma);
2821 }
2822
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002823 /* update pending counts and trigger kickoff ctl flush atomically */
2824 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2825
Clarence Ip110d15c2016-08-16 14:44:41 -04002826 /* don't perform flush/start operations for slave encoders */
2827 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2828 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002829 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002830
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002831 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
2832 continue;
2833
Clarence Ip110d15c2016-08-16 14:44:41 -04002834 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002835 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04002836 continue;
2837
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002838 if (phys->connector)
2839 topology = sde_connector_get_topology_name(
2840 phys->connector);
2841
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002842 /*
2843 * don't wait on ppsplit slaves or skipped encoders because
2844 * they dont receive irqs
2845 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002846 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002847 phys->split_role == ENC_ROLE_SLAVE) &&
2848 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002849 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07002850
Clarence Ip8e69ad02016-12-09 09:43:57 -05002851 if (!phys->ops.needs_single_flush ||
2852 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04002853 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
2854 else if (ctl->ops.get_pending_flush)
2855 pending_flush |= ctl->ops.get_pending_flush(ctl);
2856 }
2857
2858 /* for split flush, combine pending flush masks and send to master */
2859 if (pending_flush && sde_enc->cur_master) {
2860 _sde_encoder_trigger_flush(
2861 &sde_enc->base,
2862 sde_enc->cur_master,
2863 pending_flush);
2864 }
2865
2866 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002867
2868 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -04002869}
2870
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002871static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
2872 struct drm_encoder *drm_enc,
2873 unsigned long *affected_displays,
2874 int num_active_phys)
2875{
2876 struct sde_encoder_virt *sde_enc;
2877 struct sde_encoder_phys *master;
2878 enum sde_rm_topology_name topology;
2879 bool is_right_only;
2880
2881 if (!drm_enc || !affected_displays)
2882 return;
2883
2884 sde_enc = to_sde_encoder_virt(drm_enc);
2885 master = sde_enc->cur_master;
2886 if (!master || !master->connector)
2887 return;
2888
2889 topology = sde_connector_get_topology_name(master->connector);
2890 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
2891 return;
2892
2893 /*
2894 * For pingpong split, the slave pingpong won't generate IRQs. For
2895 * right-only updates, we can't swap pingpongs, or simply swap the
2896 * master/slave assignment, we actually have to swap the interfaces
2897 * so that the master physical encoder will use a pingpong/interface
2898 * that generates irqs on which to wait.
2899 */
2900 is_right_only = !test_bit(0, affected_displays) &&
2901 test_bit(1, affected_displays);
2902
2903 if (is_right_only && !sde_enc->intfs_swapped) {
2904 /* right-only update swap interfaces */
2905 swap(sde_enc->phys_encs[0]->intf_idx,
2906 sde_enc->phys_encs[1]->intf_idx);
2907 sde_enc->intfs_swapped = true;
2908 } else if (!is_right_only && sde_enc->intfs_swapped) {
2909 /* left-only or full update, swap back */
2910 swap(sde_enc->phys_encs[0]->intf_idx,
2911 sde_enc->phys_encs[1]->intf_idx);
2912 sde_enc->intfs_swapped = false;
2913 }
2914
2915 SDE_DEBUG_ENC(sde_enc,
2916 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
2917 is_right_only, sde_enc->intfs_swapped,
2918 sde_enc->phys_encs[0]->intf_idx - INTF_0,
2919 sde_enc->phys_encs[1]->intf_idx - INTF_0);
2920 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
2921 sde_enc->phys_encs[0]->intf_idx - INTF_0,
2922 sde_enc->phys_encs[1]->intf_idx - INTF_0,
2923 *affected_displays);
2924
2925 /* ppsplit always uses master since ppslave invalid for irqs*/
2926 if (num_active_phys == 1)
2927 *affected_displays = BIT(0);
2928}
2929
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002930static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
2931 struct sde_encoder_kickoff_params *params)
2932{
2933 struct sde_encoder_virt *sde_enc;
2934 struct sde_encoder_phys *phys;
2935 int i, num_active_phys;
2936 bool master_assigned = false;
2937
2938 if (!drm_enc || !params)
2939 return;
2940
2941 sde_enc = to_sde_encoder_virt(drm_enc);
2942
2943 if (sde_enc->num_phys_encs <= 1)
2944 return;
2945
2946 /* count bits set */
2947 num_active_phys = hweight_long(params->affected_displays);
2948
2949 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
2950 params->affected_displays, num_active_phys);
2951
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002952 /* for left/right only update, ppsplit master switches interface */
2953 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
2954 &params->affected_displays, num_active_phys);
2955
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002956 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2957 enum sde_enc_split_role prv_role, new_role;
2958 bool active;
2959
2960 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002961 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002962 continue;
2963
2964 active = test_bit(i, &params->affected_displays);
2965 prv_role = phys->split_role;
2966
2967 if (active && num_active_phys == 1)
2968 new_role = ENC_ROLE_SOLO;
2969 else if (active && !master_assigned)
2970 new_role = ENC_ROLE_MASTER;
2971 else if (active)
2972 new_role = ENC_ROLE_SLAVE;
2973 else
2974 new_role = ENC_ROLE_SKIP;
2975
2976 phys->ops.update_split_role(phys, new_role);
2977 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
2978 sde_enc->cur_master = phys;
2979 master_assigned = true;
2980 }
2981
2982 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
2983 phys->hw_pp->idx - PINGPONG_0, prv_role,
2984 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002985 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
2986 phys->hw_pp->idx - PINGPONG_0, prv_role,
2987 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002988 }
2989}
2990
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05302991bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07002992{
2993 struct sde_encoder_virt *sde_enc;
2994 struct msm_display_info *disp_info;
2995
2996 if (!drm_enc) {
2997 SDE_ERROR("invalid encoder\n");
2998 return false;
2999 }
3000
3001 sde_enc = to_sde_encoder_virt(drm_enc);
3002 disp_info = &sde_enc->disp_info;
3003
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303004 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003005}
3006
Dhaval Patel0e558f42017-04-30 00:51:40 -07003007void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
3008{
3009 struct sde_encoder_virt *sde_enc;
3010 struct sde_encoder_phys *phys;
3011 unsigned int i;
3012 struct sde_hw_ctl *ctl;
3013 struct msm_display_info *disp_info;
3014
3015 if (!drm_enc) {
3016 SDE_ERROR("invalid encoder\n");
3017 return;
3018 }
3019 sde_enc = to_sde_encoder_virt(drm_enc);
3020 disp_info = &sde_enc->disp_info;
3021
3022 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3023 phys = sde_enc->phys_encs[i];
3024
3025 if (phys && phys->hw_ctl) {
3026 ctl = phys->hw_ctl;
3027 if (ctl->ops.clear_pending_flush)
3028 ctl->ops.clear_pending_flush(ctl);
3029
3030 /* update only for command mode primary ctl */
3031 if ((phys == sde_enc->cur_master) &&
3032 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3033 && ctl->ops.trigger_pending)
3034 ctl->ops.trigger_pending(ctl);
3035 }
3036 }
3037}
3038
Ping Li8430ee12017-02-24 14:14:44 -08003039static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3040{
3041 void *dither_cfg;
3042 int ret = 0;
3043 size_t len = 0;
3044 enum sde_rm_topology_name topology;
3045
3046 if (!phys || !phys->connector || !phys->hw_pp ||
3047 !phys->hw_pp->ops.setup_dither)
3048 return;
3049 topology = sde_connector_get_topology_name(phys->connector);
3050 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3051 (phys->split_role == ENC_ROLE_SLAVE))
3052 return;
3053
3054 ret = sde_connector_get_dither_cfg(phys->connector,
3055 phys->connector->state, &dither_cfg, &len);
3056 if (!ret)
3057 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
3058}
3059
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003060static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3061 struct drm_display_mode *mode)
3062{
3063 u64 pclk_rate;
3064 u32 pclk_period;
3065 u32 line_time;
3066
3067 /*
3068 * For linetime calculation, only operate on master encoder.
3069 */
3070 if (!sde_enc->cur_master)
3071 return 0;
3072
3073 if (!sde_enc->cur_master->ops.get_line_count) {
3074 SDE_ERROR("get_line_count function not defined\n");
3075 return 0;
3076 }
3077
3078 pclk_rate = mode->clock; /* pixel clock in kHz */
3079 if (pclk_rate == 0) {
3080 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3081 return 0;
3082 }
3083
3084 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3085 if (pclk_period == 0) {
3086 SDE_ERROR("pclk period is 0\n");
3087 return 0;
3088 }
3089
3090 /*
3091 * Line time calculation based on Pixel clock and HTOTAL.
3092 * Final unit is in ns.
3093 */
3094 line_time = (pclk_period * mode->htotal) / 1000;
3095 if (line_time == 0) {
3096 SDE_ERROR("line time calculation is 0\n");
3097 return 0;
3098 }
3099
3100 SDE_DEBUG_ENC(sde_enc,
3101 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3102 pclk_rate, pclk_period, line_time);
3103
3104 return line_time;
3105}
3106
3107static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3108 ktime_t *wakeup_time)
3109{
3110 struct drm_display_mode *mode;
3111 struct sde_encoder_virt *sde_enc;
3112 u32 cur_line;
3113 u32 line_time;
3114 u32 vtotal, time_to_vsync;
3115 ktime_t cur_time;
3116
3117 sde_enc = to_sde_encoder_virt(drm_enc);
3118
3119 if (!drm_enc->crtc || !drm_enc->crtc->state) {
3120 SDE_ERROR("crtc/crtc state object is NULL\n");
3121 return -EINVAL;
3122 }
3123 mode = &drm_enc->crtc->state->adjusted_mode;
3124
3125 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3126 if (!line_time)
3127 return -EINVAL;
3128
3129 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3130
3131 vtotal = mode->vtotal;
3132 if (cur_line >= vtotal)
3133 time_to_vsync = line_time * vtotal;
3134 else
3135 time_to_vsync = line_time * (vtotal - cur_line);
3136
3137 if (time_to_vsync == 0) {
3138 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3139 vtotal);
3140 return -EINVAL;
3141 }
3142
3143 cur_time = ktime_get();
3144 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3145
3146 SDE_DEBUG_ENC(sde_enc,
3147 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3148 cur_line, vtotal, time_to_vsync,
3149 ktime_to_ms(cur_time),
3150 ktime_to_ms(*wakeup_time));
3151 return 0;
3152}
3153
3154static void sde_encoder_vsync_event_handler(unsigned long data)
3155{
3156 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3157 struct sde_encoder_virt *sde_enc;
3158 struct msm_drm_private *priv;
3159 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003160
3161 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
3162 !drm_enc->crtc) {
3163 SDE_ERROR("invalid parameters\n");
3164 return;
3165 }
3166
3167 sde_enc = to_sde_encoder_virt(drm_enc);
3168 priv = drm_enc->dev->dev_private;
3169
3170 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3171 SDE_ERROR("invalid crtc index\n");
3172 return;
3173 }
3174 event_thread = &priv->event_thread[drm_enc->crtc->index];
3175 if (!event_thread) {
3176 SDE_ERROR("event_thread not found for crtc:%d\n",
3177 drm_enc->crtc->index);
3178 return;
3179 }
3180
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303181 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003182 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003183}
3184
3185static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3186{
3187 struct sde_encoder_virt *sde_enc = container_of(work,
3188 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303189 bool autorefresh_enabled = false;
3190 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003191 ktime_t wakeup_time;
3192
3193 if (!sde_enc) {
3194 SDE_ERROR("invalid sde encoder\n");
3195 return;
3196 }
3197
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303198 rc = _sde_encoder_power_enable(sde_enc, true);
3199 if (rc) {
3200 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3201 return;
3202 }
3203
3204 if (sde_enc->cur_master &&
3205 sde_enc->cur_master->ops.is_autorefresh_enabled)
3206 autorefresh_enabled =
3207 sde_enc->cur_master->ops.is_autorefresh_enabled(
3208 sde_enc->cur_master);
3209
3210 _sde_encoder_power_enable(sde_enc, false);
3211
3212 /* Update timer if autorefresh is enabled else return */
3213 if (!autorefresh_enabled)
3214 return;
3215
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003216 if (_sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time))
3217 return;
3218
3219 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3220 mod_timer(&sde_enc->vsync_event_timer,
3221 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
3222}
3223
Clarence Ip85f4f4532017-10-04 12:10:13 -04003224int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08003225 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003226{
3227 struct sde_encoder_virt *sde_enc;
3228 struct sde_encoder_phys *phys;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003229 bool needs_hw_reset = false;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003230 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04003231 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003232
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003233 if (!drm_enc || !params) {
3234 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04003235 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003236 }
3237 sde_enc = to_sde_encoder_virt(drm_enc);
3238
Clarence Ip19af1362016-09-23 14:57:51 -04003239 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003240 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003241
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003242 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003243 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003244 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003245 phys = sde_enc->phys_encs[i];
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003246 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04003247 if (phys->ops.prepare_for_kickoff) {
3248 rc = phys->ops.prepare_for_kickoff(
3249 phys, params);
3250 if (rc)
3251 ret = rc;
3252 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003253 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
3254 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08003255 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003256 }
3257 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003258 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003259
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003260 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
3261
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003262 /* if any phys needs reset, reset all phys, in-order */
3263 if (needs_hw_reset) {
Dhaval Patel0e558f42017-04-30 00:51:40 -07003264 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003265 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3266 phys = sde_enc->phys_encs[i];
3267 if (phys && phys->ops.hw_reset)
3268 phys->ops.hw_reset(phys);
3269 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003270 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003271
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003272 _sde_encoder_update_master(drm_enc, params);
3273
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003274 _sde_encoder_update_roi(drm_enc);
3275
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003276 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
3277 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003278 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003279 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
3280 sde_enc->cur_master->connector->base.id,
3281 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003282 ret = rc;
3283 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05003284 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003285
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07003286 if (_sde_encoder_is_dsc_enabled(drm_enc)) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003287 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003288 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003289 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04003290 ret = rc;
3291 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04003292 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04003293
3294 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04003295}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003296
Clarence Ip662698e2017-09-12 18:34:16 -04003297/**
3298 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
3299 * with the specified encoder, and unstage all pipes from it
3300 * @encoder: encoder pointer
3301 * Returns: 0 on success
3302 */
3303static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
3304{
3305 struct sde_encoder_virt *sde_enc;
3306 struct sde_encoder_phys *phys;
3307 unsigned int i;
3308 int rc = 0;
3309
3310 if (!drm_enc) {
3311 SDE_ERROR("invalid encoder\n");
3312 return -EINVAL;
3313 }
3314
3315 sde_enc = to_sde_encoder_virt(drm_enc);
3316
3317 SDE_ATRACE_BEGIN("encoder_release_lm");
3318 SDE_DEBUG_ENC(sde_enc, "\n");
3319
3320 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3321 phys = sde_enc->phys_encs[i];
3322 if (!phys)
3323 continue;
3324
3325 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
3326
3327 rc = sde_encoder_helper_reset_mixers(phys, NULL);
3328 if (rc)
3329 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
3330 }
3331
3332 SDE_ATRACE_END("encoder_release_lm");
3333 return rc;
3334}
3335
3336void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04003337{
3338 struct sde_encoder_virt *sde_enc;
3339 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003340 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04003341 unsigned int i;
3342
3343 if (!drm_enc) {
3344 SDE_ERROR("invalid encoder\n");
3345 return;
3346 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07003347 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04003348 sde_enc = to_sde_encoder_virt(drm_enc);
3349
3350 SDE_DEBUG_ENC(sde_enc, "\n");
3351
3352 atomic_set(&sde_enc->frame_done_timeout,
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003353 SDE_FRAME_DONE_TIMEOUT * 1000 /
Alan Kwong628d19e2016-10-31 13:50:13 -04003354 drm_enc->crtc->state->adjusted_mode.vrefresh);
3355 mod_timer(&sde_enc->frame_done_timer, jiffies +
3356 ((atomic_read(&sde_enc->frame_done_timeout) * HZ) / 1000));
3357
Clarence Ip662698e2017-09-12 18:34:16 -04003358 /* create a 'no pipes' commit to release buffers on errors */
3359 if (is_error)
3360 _sde_encoder_reset_ctl_hw(drm_enc);
3361
Alan Kwong628d19e2016-10-31 13:50:13 -04003362 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04003363 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003364
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003365 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003366 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003367 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003368 if (phys && phys->ops.handle_post_kickoff)
3369 phys->ops.handle_post_kickoff(phys);
3370 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003371
3372 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
3373 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
3374 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
3375 mod_timer(&sde_enc->vsync_event_timer,
3376 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
3377 }
3378
Narendra Muppalla77b32932017-05-10 13:53:11 -07003379 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003380}
3381
Clarence Ip662698e2017-09-12 18:34:16 -04003382int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003383 struct drm_framebuffer *fb)
3384{
3385 struct drm_encoder *drm_enc;
3386 struct sde_hw_mixer_cfg mixer;
3387 struct sde_rm_hw_iter lm_iter;
3388 bool lm_valid = false;
3389
3390 if (!phys_enc || !phys_enc->parent) {
3391 SDE_ERROR("invalid encoder\n");
3392 return -EINVAL;
3393 }
3394
3395 drm_enc = phys_enc->parent;
3396 memset(&mixer, 0, sizeof(mixer));
3397
3398 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003399 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
3400 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
3401
3402 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
3403 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
3404 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
3405
3406 if (!hw_lm)
3407 continue;
3408
3409 /* need to flush LM to remove it */
3410 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
3411 phys_enc->hw_ctl->ops.update_pending_flush)
3412 phys_enc->hw_ctl->ops.update_pending_flush(
3413 phys_enc->hw_ctl,
3414 phys_enc->hw_ctl->ops.get_bitmask_mixer(
3415 phys_enc->hw_ctl, hw_lm->idx));
3416
3417 if (fb) {
3418 /* assume a single LM if targeting a frame buffer */
3419 if (lm_valid)
3420 continue;
3421
3422 mixer.out_height = fb->height;
3423 mixer.out_width = fb->width;
3424
3425 if (hw_lm->ops.setup_mixer_out)
3426 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
3427 }
3428
3429 lm_valid = true;
3430
3431 /* only enable border color on LM */
3432 if (phys_enc->hw_ctl->ops.setup_blendstage)
3433 phys_enc->hw_ctl->ops.setup_blendstage(
Dhaval Patel572cfd22017-06-12 19:33:39 -07003434 phys_enc->hw_ctl, hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003435 }
3436
3437 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04003438 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07003439 return -EFAULT;
3440 }
3441 return 0;
3442}
3443
Lloyd Atkinsone123c172017-02-27 13:19:08 -05003444void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
3445{
3446 struct sde_encoder_virt *sde_enc;
3447 struct sde_encoder_phys *phys;
3448 int i;
3449
3450 if (!drm_enc) {
3451 SDE_ERROR("invalid encoder\n");
3452 return;
3453 }
3454 sde_enc = to_sde_encoder_virt(drm_enc);
3455
3456 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3457 phys = sde_enc->phys_encs[i];
3458 if (phys && phys->ops.prepare_commit)
3459 phys->ops.prepare_commit(phys);
3460 }
3461}
3462
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003463#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003464static int _sde_encoder_status_show(struct seq_file *s, void *data)
3465{
3466 struct sde_encoder_virt *sde_enc;
3467 int i;
3468
3469 if (!s || !s->private)
3470 return -EINVAL;
3471
3472 sde_enc = s->private;
3473
3474 mutex_lock(&sde_enc->enc_lock);
3475 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3476 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3477
3478 if (!phys)
3479 continue;
3480
3481 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
3482 phys->intf_idx - INTF_0,
3483 atomic_read(&phys->vsync_cnt),
3484 atomic_read(&phys->underrun_cnt));
3485
3486 switch (phys->intf_mode) {
3487 case INTF_MODE_VIDEO:
3488 seq_puts(s, "mode: video\n");
3489 break;
3490 case INTF_MODE_CMD:
3491 seq_puts(s, "mode: command\n");
3492 break;
3493 case INTF_MODE_WB_BLOCK:
3494 seq_puts(s, "mode: wb block\n");
3495 break;
3496 case INTF_MODE_WB_LINE:
3497 seq_puts(s, "mode: wb line\n");
3498 break;
3499 default:
3500 seq_puts(s, "mode: ???\n");
3501 break;
3502 }
3503 }
3504 mutex_unlock(&sde_enc->enc_lock);
3505
3506 return 0;
3507}
3508
3509static int _sde_encoder_debugfs_status_open(struct inode *inode,
3510 struct file *file)
3511{
3512 return single_open(file, _sde_encoder_status_show, inode->i_private);
3513}
3514
Dhaval Patelf9245d62017-03-28 16:24:00 -07003515static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303516 const char __user *user_buf, size_t count, loff_t *ppos)
3517{
3518 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003519 int i = 0, rc;
3520 char buf[MISR_BUFF_SIZE + 1];
3521 size_t buff_copy;
3522 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303523
Dhaval Patelf9245d62017-03-28 16:24:00 -07003524 if (!file || !file->private_data)
3525 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303526
Dhaval Patelf9245d62017-03-28 16:24:00 -07003527 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303528
Dhaval Patelf9245d62017-03-28 16:24:00 -07003529 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
3530 if (copy_from_user(buf, user_buf, buff_copy))
3531 return -EINVAL;
3532
3533 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303534
3535 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07003536 return -EINVAL;
3537
3538 rc = _sde_encoder_power_enable(sde_enc, true);
3539 if (rc)
3540 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303541
3542 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003543 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07003544 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303545 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3546 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3547
Dhaval Patelf9245d62017-03-28 16:24:00 -07003548 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303549 continue;
3550
Dhaval Patelf9245d62017-03-28 16:24:00 -07003551 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303552 }
3553 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003554 _sde_encoder_power_enable(sde_enc, false);
3555
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303556 return count;
3557}
3558
Dhaval Patelf9245d62017-03-28 16:24:00 -07003559static ssize_t _sde_encoder_misr_read(struct file *file,
3560 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303561{
3562 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003563 int i = 0, len = 0;
3564 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
3565 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303566
3567 if (*ppos)
3568 return 0;
3569
Dhaval Patelf9245d62017-03-28 16:24:00 -07003570 if (!file || !file->private_data)
3571 return -EINVAL;
3572
3573 sde_enc = file->private_data;
3574
3575 rc = _sde_encoder_power_enable(sde_enc, true);
3576 if (rc)
3577 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303578
3579 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003580 if (!sde_enc->misr_enable) {
3581 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3582 "disabled\n");
3583 goto buff_check;
3584 } else if (sde_enc->disp_info.capabilities &
3585 ~MSM_DISPLAY_CAP_VID_MODE) {
3586 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3587 "unsupported\n");
3588 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303589 }
3590
Dhaval Patelf9245d62017-03-28 16:24:00 -07003591 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3592 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3593 if (!phys || !phys->ops.collect_misr)
3594 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303595
Dhaval Patelf9245d62017-03-28 16:24:00 -07003596 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3597 "Intf idx:%d\n", phys->intf_idx - INTF_0);
3598 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
3599 phys->ops.collect_misr(phys));
3600 }
3601
3602buff_check:
3603 if (count <= len) {
3604 len = 0;
3605 goto end;
3606 }
3607
3608 if (copy_to_user(user_buff, buf, len)) {
3609 len = -EFAULT;
3610 goto end;
3611 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303612
3613 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303614
Dhaval Patelf9245d62017-03-28 16:24:00 -07003615end:
3616 mutex_unlock(&sde_enc->enc_lock);
3617 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303618 return len;
3619}
3620
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003621static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003622{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003623 struct sde_encoder_virt *sde_enc;
3624 struct msm_drm_private *priv;
3625 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07003626 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003627
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003628 static const struct file_operations debugfs_status_fops = {
3629 .open = _sde_encoder_debugfs_status_open,
3630 .read = seq_read,
3631 .llseek = seq_lseek,
3632 .release = single_release,
3633 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303634
3635 static const struct file_operations debugfs_misr_fops = {
3636 .open = simple_open,
3637 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003638 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303639 };
3640
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003641 char name[SDE_NAME_SIZE];
3642
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003643 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003644 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003645 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003646 }
3647
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003648 sde_enc = to_sde_encoder_virt(drm_enc);
3649 priv = drm_enc->dev->dev_private;
3650 sde_kms = to_sde_kms(priv->kms);
3651
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003652 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
3653
3654 /* create overall sub-directory for the encoder */
3655 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07003656 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003657 if (!sde_enc->debugfs_root)
3658 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303659
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003660 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003661 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003662 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303663
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003664 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003665 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003666
Alan Kwongf2debb02017-04-05 06:19:29 -07003667 for (i = 0; i < sde_enc->num_phys_encs; i++)
3668 if (sde_enc->phys_encs[i] &&
3669 sde_enc->phys_encs[i]->ops.late_register)
3670 sde_enc->phys_encs[i]->ops.late_register(
3671 sde_enc->phys_encs[i],
3672 sde_enc->debugfs_root);
3673
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003674 return 0;
3675}
3676
3677static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
3678{
3679 struct sde_encoder_virt *sde_enc;
3680
3681 if (!drm_enc)
3682 return;
3683
3684 sde_enc = to_sde_encoder_virt(drm_enc);
3685 debugfs_remove_recursive(sde_enc->debugfs_root);
3686}
3687#else
3688static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
3689{
3690 return 0;
3691}
3692
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003693static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003694{
3695}
3696#endif
3697
3698static int sde_encoder_late_register(struct drm_encoder *encoder)
3699{
3700 return _sde_encoder_init_debugfs(encoder);
3701}
3702
3703static void sde_encoder_early_unregister(struct drm_encoder *encoder)
3704{
3705 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003706}
3707
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003708static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04003709 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003710 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003711 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003712{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003713 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003714
Clarence Ip19af1362016-09-23 14:57:51 -04003715 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003716
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003717 /*
3718 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
3719 * in this function, check up-front.
3720 */
3721 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
3722 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003723 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003724 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003725 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003726 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003727
Clarence Ipa4039322016-07-15 16:23:59 -04003728 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003729 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003730
3731 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003732 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003733 PTR_ERR(enc));
3734 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3735 }
3736
3737 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3738 ++sde_enc->num_phys_encs;
3739 }
3740
Clarence Ipa4039322016-07-15 16:23:59 -04003741 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003742 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003743
3744 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003745 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003746 PTR_ERR(enc));
3747 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3748 }
3749
3750 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3751 ++sde_enc->num_phys_encs;
3752 }
3753
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003754 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003755}
3756
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003757static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
3758 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04003759{
3760 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04003761
Clarence Ip19af1362016-09-23 14:57:51 -04003762 if (!sde_enc) {
3763 SDE_ERROR("invalid encoder\n");
3764 return -EINVAL;
3765 }
3766
3767 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04003768
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003769 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003770 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04003771 sde_enc->num_phys_encs);
3772 return -EINVAL;
3773 }
3774
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003775 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04003776
3777 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003778 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04003779 PTR_ERR(enc));
3780 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3781 }
3782
3783 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3784 ++sde_enc->num_phys_encs;
3785
3786 return 0;
3787}
3788
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003789static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003790 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04003791 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003792 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003793{
3794 int ret = 0;
3795 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003796 enum sde_intf_type intf_type;
3797 struct sde_encoder_virt_ops parent_ops = {
3798 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07003799 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04003800 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003801 };
3802 struct sde_enc_phys_init_params phys_params;
3803
Clarence Ip19af1362016-09-23 14:57:51 -04003804 if (!sde_enc || !sde_kms) {
3805 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
3806 sde_enc != 0, sde_kms != 0);
3807 return -EINVAL;
3808 }
3809
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003810 memset(&phys_params, 0, sizeof(phys_params));
3811 phys_params.sde_kms = sde_kms;
3812 phys_params.parent = &sde_enc->base;
3813 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003814 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003815
Clarence Ip19af1362016-09-23 14:57:51 -04003816 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003817
Clarence Ipa4039322016-07-15 16:23:59 -04003818 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003819 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
3820 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04003821 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003822 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
3823 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07003824 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
3825 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
3826 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04003827 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
3828 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
3829 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003830 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04003831 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003832 return -EINVAL;
3833 }
3834
Clarence Ip88270a62016-06-26 10:09:34 -04003835 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003836
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003837 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
3838
Clarence Ip19af1362016-09-23 14:57:51 -04003839 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003840
Dhaval Patele17e0ee2017-08-23 18:01:42 -07003841 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
3842 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003843 sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
3844
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003845 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04003846 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003847 /*
3848 * Left-most tile is at index 0, content is controller id
3849 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
3850 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
3851 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003852 u32 controller_id = disp_info->h_tile_instance[i];
3853
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003854 if (disp_info->num_of_h_tiles > 1) {
3855 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003856 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003857 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003858 phys_params.split_role = ENC_ROLE_SLAVE;
3859 } else {
3860 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003861 }
3862
Clarence Ip19af1362016-09-23 14:57:51 -04003863 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003864 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003865
Alan Kwongbb27c092016-07-20 16:41:25 -04003866 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003867 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003868 phys_params.wb_idx = sde_encoder_get_wb(
3869 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04003870 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003871 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04003872 SDE_ERROR_ENC(sde_enc,
3873 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003874 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04003875 ret = -EINVAL;
3876 }
Alan Kwongbb27c092016-07-20 16:41:25 -04003877 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003878 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003879 phys_params.intf_idx = sde_encoder_get_intf(
3880 sde_kms->catalog, intf_type,
3881 controller_id);
3882 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04003883 SDE_ERROR_ENC(sde_enc,
3884 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003885 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04003886 ret = -EINVAL;
3887 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003888 }
3889
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003890 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04003891 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003892 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
3893 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04003894 else
3895 ret = sde_encoder_virt_add_phys_encs(
3896 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003897 sde_enc,
3898 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003899 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04003900 SDE_ERROR_ENC(sde_enc,
3901 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003902 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003903 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08003904
3905 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3906 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3907
3908 if (phys) {
3909 atomic_set(&phys->vsync_cnt, 0);
3910 atomic_set(&phys->underrun_cnt, 0);
3911 }
3912 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003913 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003914
3915 return ret;
3916}
3917
Alan Kwong628d19e2016-10-31 13:50:13 -04003918static void sde_encoder_frame_done_timeout(unsigned long data)
3919{
3920 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3921 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3922 struct msm_drm_private *priv;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003923 u32 event;
Alan Kwong628d19e2016-10-31 13:50:13 -04003924
3925 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3926 SDE_ERROR("invalid parameters\n");
3927 return;
3928 }
3929 priv = drm_enc->dev->dev_private;
3930
3931 if (!sde_enc->frame_busy_mask[0] || !sde_enc->crtc_frame_event_cb) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003932 SDE_DEBUG_ENC(sde_enc, "invalid timeout\n");
3933 SDE_EVT32(DRMID(drm_enc), sde_enc->frame_busy_mask[0], 0);
Alan Kwong628d19e2016-10-31 13:50:13 -04003934 return;
3935 } else if (!atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003936 SDE_ERROR_ENC(sde_enc, "invalid timeout\n");
Alan Kwong628d19e2016-10-31 13:50:13 -04003937 SDE_EVT32(DRMID(drm_enc), 0, 1);
3938 return;
3939 }
3940
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003941 SDE_ERROR_ENC(sde_enc, "frame done timeout\n");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003942
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003943 event = SDE_ENCODER_FRAME_EVENT_ERROR;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003944 SDE_EVT32(DRMID(drm_enc), event);
3945 sde_enc->crtc_frame_event_cb(sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04003946}
3947
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003948static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
3949 .mode_set = sde_encoder_virt_mode_set,
3950 .disable = sde_encoder_virt_disable,
3951 .enable = sde_encoder_virt_enable,
3952 .atomic_check = sde_encoder_virt_atomic_check,
3953};
3954
3955static const struct drm_encoder_funcs sde_encoder_funcs = {
3956 .destroy = sde_encoder_destroy,
3957 .late_register = sde_encoder_late_register,
3958 .early_unregister = sde_encoder_early_unregister,
3959};
3960
Clarence Ip3649f8b2016-10-31 09:59:44 -04003961struct drm_encoder *sde_encoder_init(
3962 struct drm_device *dev,
3963 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003964{
3965 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04003966 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003967 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003968 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003969 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08003970 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003971 int ret = 0;
3972
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003973 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
3974 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003975 ret = -ENOMEM;
3976 goto fail;
3977 }
3978
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003979 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003980 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
3981 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003982 if (ret)
3983 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003984
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003985 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003986 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003987 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07003988 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003989 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003990
Alan Kwong628d19e2016-10-31 13:50:13 -04003991 atomic_set(&sde_enc->frame_done_timeout, 0);
3992 setup_timer(&sde_enc->frame_done_timer, sde_encoder_frame_done_timeout,
3993 (unsigned long) sde_enc);
3994
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003995 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
3996 disp_info->is_primary)
3997 setup_timer(&sde_enc->vsync_event_timer,
3998 sde_encoder_vsync_event_handler,
3999 (unsigned long)sde_enc);
4000
Dhaval Patel020f7e122016-11-15 14:39:18 -08004001 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
4002 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004003 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08004004 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07004005 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08004006 PTR_ERR(sde_enc->rsc_client));
4007 sde_enc->rsc_client = NULL;
4008 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004009
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004010 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04004011 kthread_init_delayed_work(&sde_enc->delayed_off_work,
4012 sde_encoder_off_work);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004013 sde_enc->idle_timeout = IDLE_TIMEOUT;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07004014 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004015
4016 kthread_init_work(&sde_enc->vsync_event_work,
4017 sde_encoder_vsync_event_work_handler);
4018
Dhaval Patel020f7e122016-11-15 14:39:18 -08004019 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
4020
Clarence Ip19af1362016-09-23 14:57:51 -04004021 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004022
4023 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004024
4025fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004026 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004027 if (drm_enc)
4028 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004029
4030 return ERR_PTR(ret);
4031}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004032
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004033int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4034 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004035{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004036 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004037 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004038 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004039
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004040 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004041 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004042 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004043 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004044 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004045 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004046
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004047 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4048 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004049
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004050 switch (event) {
4051 case MSM_ENC_COMMIT_DONE:
4052 fn_wait = phys->ops.wait_for_commit_done;
4053 break;
4054 case MSM_ENC_TX_COMPLETE:
4055 fn_wait = phys->ops.wait_for_tx_complete;
4056 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004057 case MSM_ENC_VBLANK:
4058 fn_wait = phys->ops.wait_for_vblank;
4059 break;
4060 default:
4061 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4062 event);
4063 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004064 };
4065
4066 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004067 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004068 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004069 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004070 if (ret)
4071 return ret;
4072 }
4073 }
4074
4075 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004076}
4077
Alan Kwong67a3f792016-11-01 23:16:53 -04004078enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4079{
4080 struct sde_encoder_virt *sde_enc = NULL;
4081 int i;
4082
4083 if (!encoder) {
4084 SDE_ERROR("invalid encoder\n");
4085 return INTF_MODE_NONE;
4086 }
4087 sde_enc = to_sde_encoder_virt(encoder);
4088
4089 if (sde_enc->cur_master)
4090 return sde_enc->cur_master->intf_mode;
4091
4092 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4093 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4094
4095 if (phys)
4096 return phys->intf_mode;
4097 }
4098
4099 return INTF_MODE_NONE;
4100}