blob: 8a46d6655b75fb01ac07a5f70bd97d60148d35c4 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070#define MAX_CHANNELS_PER_ENC 2
71
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040076/* Maximum number of VSYNC wait attempts for RSC state transition */
77#define MAX_RSC_WAIT 5
78
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070079/**
80 * enum sde_enc_rc_events - events for resource control state machine
81 * @SDE_ENC_RC_EVENT_KICKOFF:
82 * This event happens at NORMAL priority.
83 * Event that signals the start of the transfer. When this event is
84 * received, enable MDP/DSI core clocks and request RSC with CMD state.
85 * Regardless of the previous state, the resource should be in ON state
86 * at the end of this event.
87 * @SDE_ENC_RC_EVENT_FRAME_DONE:
88 * This event happens at INTERRUPT level.
89 * Event signals the end of the data transfer after the PP FRAME_DONE
90 * event. At the end of this event, a delayed work is scheduled to go to
91 * IDLE_PC state after IDLE_TIMEOUT time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040092 * @SDE_ENC_RC_EVENT_PRE_STOP:
93 * This event happens at NORMAL priority.
94 * This event, when received during the ON state, set RSC to IDLE, and
95 * and leave the RC STATE in the PRE_OFF state.
96 * It should be followed by the STOP event as part of encoder disable.
97 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070098 * @SDE_ENC_RC_EVENT_STOP:
99 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400100 * When this event is received, disable all the MDP/DSI core clocks, and
101 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
102 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
103 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
104 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700105 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700106 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700107 * Event signals that there is a seamless mode switch is in prgoress. A
108 * client needs to turn of only irq - leave clocks ON to reduce the mode
109 * switch latency.
110 * @SDE_ENC_RC_EVENT_POST_MODESET:
111 * This event happens at NORMAL priority from a work item.
112 * Event signals that seamless mode switch is complete and resources are
113 * acquired. Clients wants to turn on the irq again and update the rsc
114 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700115 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
116 * This event happens at NORMAL priority from a work item.
117 * Event signals that there were no frame updates for IDLE_TIMEOUT time.
118 * This would disable MDP/DSI core clocks and request RSC with IDLE state
119 * and change the resource state to IDLE.
120 */
121enum sde_enc_rc_events {
122 SDE_ENC_RC_EVENT_KICKOFF = 1,
123 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400124 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700125 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700126 SDE_ENC_RC_EVENT_PRE_MODESET,
127 SDE_ENC_RC_EVENT_POST_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700128 SDE_ENC_RC_EVENT_ENTER_IDLE
129};
130
131/*
132 * enum sde_enc_rc_states - states that the resource control maintains
133 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400134 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700135 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700136 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700137 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
138 */
139enum sde_enc_rc_states {
140 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400141 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700142 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700143 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700144 SDE_ENC_RC_STATE_IDLE
145};
146
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400147/**
148 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
149 * encoders. Virtual encoder manages one "logical" display. Physical
150 * encoders manage one intf block, tied to a specific panel/sub-panel.
151 * Virtual encoder defers as much as possible to the physical encoders.
152 * Virtual encoder registers itself with the DRM Framework as the encoder.
153 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400154 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400155 * @bus_scaling_client: Client handle to the bus scaling interface
156 * @num_phys_encs: Actual number of physical encoders contained.
157 * @phys_encs: Container of physical encoders managed.
158 * @cur_master: Pointer to the current master in this mode. Optimization
159 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700160 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500161 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800162 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500163 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
164 * for partial update right-only cases, such as pingpong
165 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400166 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
167 * notification of the VBLANK
168 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400169 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
170 * all CTL paths
171 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700172 * @debugfs_root: Debug file system root file node
173 * @enc_lock: Lock around physical encoder create/destroy and
174 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400175 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
176 * busy processing current command.
177 * Bit0 = phys_encs[0] etc.
178 * @crtc_frame_event_cb: callback handler for frame event
179 * @crtc_frame_event_cb_data: callback handler private data
Alan Kwong628d19e2016-10-31 13:50:13 -0400180 * @frame_done_timeout: frame done timeout in Hz
181 * @frame_done_timer: watchdog timer for frame done event
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700182 * @rsc_client: rsc client pointer
183 * @rsc_state_init: boolean to indicate rsc config init
184 * @disp_info: local copy of msm_display_info struct
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -0700185 * @mode_info: local copy of msm_mode_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700186 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700187 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700188 * @idle_pc_supported: indicate if idle power collaps is supported
189 * @rc_lock: resource control mutex lock to protect
190 * virt encoder over various state changes
191 * @rc_state: resource controller state
192 * @delayed_off_work: delayed worker to schedule disabling of
193 * clks and resources after IDLE_TIMEOUT time.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700194 * @topology: topology of the display
195 * @mode_set_complete: flag to indicate modeset completion
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700196 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400197 * @cur_conn_roi: current connector roi
198 * @prv_conn_roi: previous connector roi to optimize if unchanged
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700199 * @idle_timeout: idle timeout duration in milliseconds
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400200 */
201struct sde_encoder_virt {
202 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400203 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400204 uint32_t bus_scaling_client;
205
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400206 uint32_t display_num_of_h_tiles;
207
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400208 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400209 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
210 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700211 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800212 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400213
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500214 bool intfs_swapped;
215
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400216 void (*crtc_vblank_cb)(void *);
217 void *crtc_vblank_cb_data;
218
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700219 struct dentry *debugfs_root;
220 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400221 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
222 void (*crtc_frame_event_cb)(void *, u32 event);
223 void *crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400224
225 atomic_t frame_done_timeout;
226 struct timer_list frame_done_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800227
228 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700229 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800230 struct msm_display_info disp_info;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -0700231 struct msm_mode_info mode_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700232 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700233 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700234
235 bool idle_pc_supported;
236 struct mutex rc_lock;
237 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400238 struct kthread_delayed_work delayed_off_work;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700239 struct msm_display_topology topology;
240 bool mode_set_complete;
Alan Kwong56f1a942017-04-04 11:53:42 -0700241
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700242 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400243 struct sde_rect cur_conn_roi;
244 struct sde_rect prv_conn_roi;
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700245
246 u32 idle_timeout;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400247};
248
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400249#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700250
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400251bool sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
252
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800253{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400254 struct sde_encoder_virt *sde_enc;
255 struct msm_compression_info *comp_info;
256
257 if (!drm_enc)
258 return false;
259
260 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -0700261 comp_info = &sde_enc->mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800262
263 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
264}
265
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700266void sde_encoder_set_idle_timeout(struct drm_encoder *drm_enc, u32 idle_timeout)
267{
268 struct sde_encoder_virt *sde_enc;
269
270 if (!drm_enc)
271 return;
272
273 sde_enc = to_sde_encoder_virt(drm_enc);
274 sde_enc->idle_timeout = idle_timeout;
275}
276
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400277bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
278{
279 enum sde_rm_topology_name topology;
280 struct sde_encoder_virt *sde_enc;
281 struct drm_connector *drm_conn;
282
283 if (!drm_enc)
284 return false;
285
286 sde_enc = to_sde_encoder_virt(drm_enc);
287 if (!sde_enc->cur_master)
288 return false;
289
290 drm_conn = sde_enc->cur_master->connector;
291 if (!drm_conn)
292 return false;
293
294 topology = sde_connector_get_topology_name(drm_conn);
295 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
296 return true;
297
298 return false;
299}
300
Dhaval Patelf9245d62017-03-28 16:24:00 -0700301static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
302 bool enable)
303{
304 struct drm_encoder *drm_enc;
305 struct msm_drm_private *priv;
306 struct sde_kms *sde_kms;
307
308 if (!sde_enc) {
309 SDE_ERROR("invalid sde enc\n");
310 return -EINVAL;
311 }
312
313 drm_enc = &sde_enc->base;
314 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
315 SDE_ERROR("drm device invalid\n");
316 return -EINVAL;
317 }
318
319 priv = drm_enc->dev->dev_private;
320 if (!priv->kms) {
321 SDE_ERROR("invalid kms\n");
322 return -EINVAL;
323 }
324
325 sde_kms = to_sde_kms(priv->kms);
326
327 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
328 enable);
329}
330
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500331void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
332 enum sde_intr_idx intr_idx)
333{
334 SDE_EVT32(DRMID(phys_enc->parent),
335 phys_enc->intf_idx - INTF_0,
336 phys_enc->hw_pp->idx - PINGPONG_0,
337 intr_idx);
338 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
339
340 if (phys_enc->parent_ops.handle_frame_done)
341 phys_enc->parent_ops.handle_frame_done(
342 phys_enc->parent, phys_enc,
343 SDE_ENCODER_FRAME_EVENT_ERROR);
344}
345
346int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
347 enum sde_intr_idx intr_idx,
348 struct sde_encoder_wait_info *wait_info)
349{
350 struct sde_encoder_irq *irq;
351 u32 irq_status;
352 int ret;
353
354 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
355 SDE_ERROR("invalid params\n");
356 return -EINVAL;
357 }
358 irq = &phys_enc->irq[intr_idx];
359
360 /* note: do master / slave checking outside */
361
362 /* return EWOULDBLOCK since we know the wait isn't necessary */
363 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
364 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400365 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
366 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500367 return -EWOULDBLOCK;
368 }
369
370 if (irq->irq_idx < 0) {
371 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
372 irq->name, irq->hw_idx);
373 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
374 irq->irq_idx);
375 return 0;
376 }
377
378 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
379 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700380 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
381 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
382 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500383
384 ret = sde_encoder_helper_wait_event_timeout(
385 DRMID(phys_enc->parent),
386 irq->hw_idx,
387 wait_info);
388
389 if (ret <= 0) {
390 irq_status = sde_core_irq_read(phys_enc->sde_kms,
391 irq->irq_idx, true);
392 if (irq_status) {
393 unsigned long flags;
394
Dhaval Patela5f75952017-07-25 11:17:41 -0700395 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
396 irq->hw_idx, irq->irq_idx,
397 phys_enc->hw_pp->idx - PINGPONG_0,
398 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500399 SDE_DEBUG_PHYS(phys_enc,
400 "done but irq %d not triggered\n",
401 irq->irq_idx);
402 local_irq_save(flags);
403 irq->cb.func(phys_enc, irq->irq_idx);
404 local_irq_restore(flags);
405 ret = 0;
406 } else {
407 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700408 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
409 irq->hw_idx, irq->irq_idx,
410 phys_enc->hw_pp->idx - PINGPONG_0,
411 atomic_read(wait_info->atomic_cnt), irq_status,
412 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500413 }
414 } else {
415 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700416 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
417 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
418 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500419 }
420
Dhaval Patela5f75952017-07-25 11:17:41 -0700421 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
422 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
423 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500424
425 return ret;
426}
427
428int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
429 enum sde_intr_idx intr_idx)
430{
431 struct sde_encoder_irq *irq;
432 int ret = 0;
433
434 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
435 SDE_ERROR("invalid params\n");
436 return -EINVAL;
437 }
438 irq = &phys_enc->irq[intr_idx];
439
440 if (irq->irq_idx >= 0) {
441 SDE_ERROR_PHYS(phys_enc,
442 "skipping already registered irq %s type %d\n",
443 irq->name, irq->intr_type);
444 return 0;
445 }
446
447 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
448 irq->intr_type, irq->hw_idx);
449 if (irq->irq_idx < 0) {
450 SDE_ERROR_PHYS(phys_enc,
451 "failed to lookup IRQ index for %s type:%d\n",
452 irq->name, irq->intr_type);
453 return -EINVAL;
454 }
455
456 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
457 &irq->cb);
458 if (ret) {
459 SDE_ERROR_PHYS(phys_enc,
460 "failed to register IRQ callback for %s\n",
461 irq->name);
462 irq->irq_idx = -EINVAL;
463 return ret;
464 }
465
466 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
467 if (ret) {
468 SDE_ERROR_PHYS(phys_enc,
469 "enable IRQ for intr:%s failed, irq_idx %d\n",
470 irq->name, irq->irq_idx);
471
472 sde_core_irq_unregister_callback(phys_enc->sde_kms,
473 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400474
475 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
476 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500477 irq->irq_idx = -EINVAL;
478 return ret;
479 }
480
481 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
482 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
483 irq->name, irq->irq_idx);
484
485 return ret;
486}
487
488int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
489 enum sde_intr_idx intr_idx)
490{
491 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400492 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500493
494 if (!phys_enc) {
495 SDE_ERROR("invalid encoder\n");
496 return -EINVAL;
497 }
498 irq = &phys_enc->irq[intr_idx];
499
500 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400501 if (irq->irq_idx < 0) {
502 SDE_ERROR(
503 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
504 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
505 irq->irq_idx);
506 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
507 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500508 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400509 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500510
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400511 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
512 if (ret)
513 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
514 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
515
516 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500517 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400518 if (ret)
519 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
520 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500521
522 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
523 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
524
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400525 irq->irq_idx = -EINVAL;
526
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500527 return 0;
528}
529
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400530void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400531 struct sde_encoder_hw_resources *hw_res,
532 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400533{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400534 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400535 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400536
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400537 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400538 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
539 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400540 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400541 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400542
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400543 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400544 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400545
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400546 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400547 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400548 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
549
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400550 for (i = 0; i < sde_enc->num_phys_encs; i++) {
551 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
552
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400553 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400554 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400555 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700556
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -0700557 hw_res->topology = sde_enc->mode_info.topology;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400558}
559
Clarence Ip3649f8b2016-10-31 09:59:44 -0400560void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400561{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400562 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400563 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400564
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400565 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400566 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400567 return;
568 }
569
570 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400571 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400572
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700573 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800574 sde_rsc_client_destroy(sde_enc->rsc_client);
575
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700576 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400577 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
578
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400579 if (phys && phys->ops.destroy) {
580 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400581 --sde_enc->num_phys_encs;
582 sde_enc->phys_encs[i] = NULL;
583 }
584 }
585
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700586 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400587 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400588 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700589 sde_enc->num_phys_encs = 0;
590 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400591
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400592 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700593 mutex_destroy(&sde_enc->enc_lock);
594
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400595 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700596}
597
Clarence Ip8e69ad02016-12-09 09:43:57 -0500598void sde_encoder_helper_split_config(
599 struct sde_encoder_phys *phys_enc,
600 enum sde_intf interface)
601{
602 struct sde_encoder_virt *sde_enc;
603 struct split_pipe_cfg cfg = { 0 };
604 struct sde_hw_mdp *hw_mdptop;
605 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700606 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500607
608 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
609 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
610 return;
611 }
612
613 sde_enc = to_sde_encoder_virt(phys_enc->parent);
614 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700615 disp_info = &sde_enc->disp_info;
616
617 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
618 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500619
620 /**
621 * disable split modes since encoder will be operating in as the only
622 * encoder, either for the entire use case in the case of, for example,
623 * single DSI, or for this frame in the case of left/right only partial
624 * update.
625 */
626 if (phys_enc->split_role == ENC_ROLE_SOLO) {
627 if (hw_mdptop->ops.setup_split_pipe)
628 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
629 if (hw_mdptop->ops.setup_pp_split)
630 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
631 return;
632 }
633
634 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500635 cfg.mode = phys_enc->intf_mode;
636 cfg.intf = interface;
637
638 if (cfg.en && phys_enc->ops.needs_single_flush &&
639 phys_enc->ops.needs_single_flush(phys_enc))
640 cfg.split_flush_en = true;
641
642 topology = sde_connector_get_topology_name(phys_enc->connector);
643 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
644 cfg.pp_split_slave = cfg.intf;
645 else
646 cfg.pp_split_slave = INTF_MAX;
647
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500648 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500649 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
650
651 if (hw_mdptop->ops.setup_split_pipe)
652 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400653 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500654 /*
655 * slave encoder
656 * - determine split index from master index,
657 * assume master is first pp
658 */
659 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
660 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
661 cfg.pp_split_index);
662
663 if (hw_mdptop->ops.setup_pp_split)
664 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
665 }
666}
667
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700668static void _sde_encoder_adjust_mode(struct drm_connector *connector,
669 struct drm_display_mode *adj_mode)
670{
671 struct drm_display_mode *cur_mode;
672
673 if (!connector || !adj_mode)
674 return;
675
676 list_for_each_entry(cur_mode, &connector->modes, head) {
677 if (cur_mode->vdisplay == adj_mode->vdisplay &&
678 cur_mode->hdisplay == adj_mode->hdisplay &&
679 cur_mode->vrefresh == adj_mode->vrefresh) {
680 adj_mode->private = cur_mode->private;
Jeykumar Sankaran69934622017-05-31 18:16:25 -0700681 adj_mode->private_flags |= cur_mode->private_flags;
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700682 }
683 }
684}
685
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400686static int sde_encoder_virt_atomic_check(
687 struct drm_encoder *drm_enc,
688 struct drm_crtc_state *crtc_state,
689 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400690{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400691 struct sde_encoder_virt *sde_enc;
692 struct msm_drm_private *priv;
693 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400694 const struct drm_display_mode *mode;
695 struct drm_display_mode *adj_mode;
696 int i = 0;
697 int ret = 0;
698
Alan Kwongbb27c092016-07-20 16:41:25 -0400699 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400700 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
701 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400702 return -EINVAL;
703 }
704
705 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400706 SDE_DEBUG_ENC(sde_enc, "\n");
707
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400708 priv = drm_enc->dev->dev_private;
709 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400710 mode = &crtc_state->mode;
711 adj_mode = &crtc_state->adjusted_mode;
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400712 SDE_EVT32(DRMID(drm_enc));
Alan Kwongbb27c092016-07-20 16:41:25 -0400713
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700714 /*
715 * display drivers may populate private fields of the drm display mode
716 * structure while registering possible modes of a connector with DRM.
717 * These private fields are not populated back while DRM invokes
718 * the mode_set callbacks. This module retrieves and populates the
719 * private fields of the given mode.
720 */
721 _sde_encoder_adjust_mode(conn_state->connector, adj_mode);
722
Alan Kwongbb27c092016-07-20 16:41:25 -0400723 /* perform atomic check on the first physical encoder (master) */
724 for (i = 0; i < sde_enc->num_phys_encs; i++) {
725 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
726
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400727 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400728 ret = phys->ops.atomic_check(phys, crtc_state,
729 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400730 else if (phys && phys->ops.mode_fixup)
731 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400732 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400733
734 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400735 SDE_ERROR_ENC(sde_enc,
736 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400737 break;
738 }
739 }
740
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400741 /* Reserve dynamic resources now. Indicating AtomicTest phase */
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700742 if (!ret) {
743 /*
744 * Avoid reserving resources when mode set is pending. Topology
745 * info may not be available to complete reservation.
746 */
747 if (drm_atomic_crtc_needs_modeset(crtc_state)
748 && sde_enc->mode_set_complete) {
749 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400750 conn_state, true);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700751 sde_enc->mode_set_complete = false;
752 }
753 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400754
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700755 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700756 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400757
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400758 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400759
760 return ret;
761}
762
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800763static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
764 int pic_width, int pic_height)
765{
766 if (!dsc || !pic_width || !pic_height) {
767 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
768 pic_width, pic_height);
769 return -EINVAL;
770 }
771
772 if ((pic_width % dsc->slice_width) ||
773 (pic_height % dsc->slice_height)) {
774 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
775 pic_width, pic_height,
776 dsc->slice_width, dsc->slice_height);
777 return -EINVAL;
778 }
779
780 dsc->pic_width = pic_width;
781 dsc->pic_height = pic_height;
782
783 return 0;
784}
785
786static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
787 int intf_width)
788{
789 int slice_per_pkt, slice_per_intf;
790 int bytes_in_slice, total_bytes_per_intf;
791
792 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
793 (intf_width < dsc->slice_width)) {
794 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
795 intf_width, dsc ? dsc->slice_width : -1);
796 return;
797 }
798
799 slice_per_pkt = dsc->slice_per_pkt;
800 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
801
802 /*
803 * If slice_per_pkt is greater than slice_per_intf then default to 1.
804 * This can happen during partial update.
805 */
806 if (slice_per_pkt > slice_per_intf)
807 slice_per_pkt = 1;
808
809 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
810 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
811
812 dsc->eol_byte_num = total_bytes_per_intf % 3;
813 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
814 dsc->bytes_in_slice = bytes_in_slice;
815 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
816 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
817}
818
819static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
820 int enc_ip_width)
821{
822 int ssm_delay, total_pixels, soft_slice_per_enc;
823
824 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
825
826 /*
827 * minimum number of initial line pixels is a sum of:
828 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
829 * 91 for 10 bpc) * 3
830 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
831 * 3. the initial xmit delay
832 * 4. total pipeline delay through the "lock step" of encoder (47)
833 * 5. 6 additional pixels as the output of the rate buffer is
834 * 48 bits wide
835 */
836 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
837 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
838 if (soft_slice_per_enc > 1)
839 total_pixels += (ssm_delay * 3);
840 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
841 return 0;
842}
843
844static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
845 struct msm_display_dsc_info *dsc)
846{
847 /*
848 * As per the DSC spec, ICH_RESET can be either end of the slice line
849 * or at the end of the slice. HW internally generates ich_reset at
850 * end of the slice line if DSC_MERGE is used or encoder has two
851 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
852 * is not used then it will generate ich_reset at the end of slice.
853 *
854 * Now as per the spec, during one PPS session, position where
855 * ich_reset is generated should not change. Now if full-screen frame
856 * has more than 1 soft slice then HW will automatically generate
857 * ich_reset at the end of slice_line. But for the same panel, if
858 * partial frame is enabled and only 1 encoder is used with 1 slice,
859 * then HW will generate ich_reset at end of the slice. This is a
860 * mismatch. Prevent this by overriding HW's decision.
861 */
862 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
863 (dsc->slice_width == dsc->pic_width);
864}
865
866static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
867 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400868 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800869{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400870 if (!enable) {
871 if (hw_pp->ops.disable_dsc)
872 hw_pp->ops.disable_dsc(hw_pp);
873 return;
874 }
875
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800876 if (hw_dsc->ops.dsc_config)
877 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
878
879 if (hw_dsc->ops.dsc_config_thresh)
880 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
881
882 if (hw_pp->ops.setup_dsc)
883 hw_pp->ops.setup_dsc(hw_pp);
884
885 if (hw_pp->ops.enable_dsc)
886 hw_pp->ops.enable_dsc(hw_pp);
887}
888
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400889static void _sde_encoder_get_connector_roi(
890 struct sde_encoder_virt *sde_enc,
891 struct sde_rect *merged_conn_roi)
892{
893 struct drm_connector *drm_conn;
894 struct sde_connector_state *c_state;
895
896 if (!sde_enc || !merged_conn_roi)
897 return;
898
899 drm_conn = sde_enc->phys_encs[0]->connector;
900
901 if (!drm_conn || !drm_conn->state)
902 return;
903
904 c_state = to_sde_connector_state(drm_conn->state);
905 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
906}
907
Ingrid Gallardo83532222017-06-02 16:48:51 -0700908static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800909{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800910 int this_frame_slices;
911 int intf_ip_w, enc_ip_w;
912 int ich_res, dsc_common_mode = 0;
913
914 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
915 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
916 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400917 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800918 struct msm_display_dsc_info *dsc =
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -0700919 &sde_enc->mode_info.comp_info.dsc_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800920
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400921 if (dsc == NULL || hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800922 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
923 return -EINVAL;
924 }
925
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400926 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800927
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400928 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800929 intf_ip_w = this_frame_slices * dsc->slice_width;
930 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
931
932 enc_ip_w = intf_ip_w;
933 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
934
935 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
936
937 if (enc_master->intf_mode == INTF_MODE_VIDEO)
938 dsc_common_mode = DSC_MODE_VIDEO;
939
940 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400941 roi->w, roi->h, dsc_common_mode);
942 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800943
944 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400945 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800946
947 return 0;
948}
Ingrid Gallardo83532222017-06-02 16:48:51 -0700949
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400950static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
951 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800952{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800953 int this_frame_slices;
954 int intf_ip_w, enc_ip_w;
955 int ich_res, dsc_common_mode;
956
957 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400958 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
959 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
960 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
961 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
962 bool half_panel_partial_update;
963 int i;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800964
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400965 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
966 hw_pp[i] = sde_enc->hw_pp[i];
967 hw_dsc[i] = sde_enc->hw_dsc[i];
968
969 if (!hw_pp[i] || !hw_dsc[i]) {
970 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
971 return -EINVAL;
972 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800973 }
974
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400975 half_panel_partial_update =
976 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800977
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400978 dsc_common_mode = 0;
979 if (!half_panel_partial_update)
980 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800981 if (enc_master->intf_mode == INTF_MODE_VIDEO)
982 dsc_common_mode |= DSC_MODE_VIDEO;
983
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -0700984 memcpy(&dsc[0], &sde_enc->mode_info.comp_info.dsc_info, sizeof(dsc[0]));
985 memcpy(&dsc[1], &sde_enc->mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800986
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400987 /*
988 * Since both DSC use same pic dimension, set same pic dimension
989 * to both DSC structures.
990 */
991 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
992 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
993
994 this_frame_slices = roi->w / dsc[0].slice_width;
995 intf_ip_w = this_frame_slices * dsc[0].slice_width;
996
997 if (!half_panel_partial_update)
998 intf_ip_w /= 2;
999
1000 /*
1001 * In this topology when both interfaces are active, they have same
1002 * load so intf_ip_w will be same.
1003 */
1004 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1005 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1006
1007 /*
1008 * In this topology, since there is no dsc_merge, uncompressed input
1009 * to encoder and interface is same.
1010 */
1011 enc_ip_w = intf_ip_w;
1012 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1013 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1014
1015 /*
1016 * __is_ich_reset_override_needed should be called only after
1017 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1018 */
1019 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1020 half_panel_partial_update, &dsc[0]);
1021
1022 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1023 roi->w, roi->h, dsc_common_mode);
1024
1025 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1026 bool active = !!((1 << i) & params->affected_displays);
1027
1028 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1029 dsc_common_mode, i, active);
1030 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1031 dsc_common_mode, ich_res, active);
1032 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001033
1034 return 0;
1035}
1036
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001037static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1038 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001039{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001040 int this_frame_slices;
1041 int intf_ip_w, enc_ip_w;
1042 int ich_res, dsc_common_mode;
1043
1044 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001045 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001046 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1047 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001048 struct msm_display_dsc_info *dsc =
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001049 &sde_enc->mode_info.comp_info.dsc_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001050 bool half_panel_partial_update;
1051 int i;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001052
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001053 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1054 hw_pp[i] = sde_enc->hw_pp[i];
1055 hw_dsc[i] = sde_enc->hw_dsc[i];
1056
1057 if (!hw_pp[i] || !hw_dsc[i]) {
1058 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1059 return -EINVAL;
1060 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001061 }
1062
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001063 half_panel_partial_update =
1064 hweight_long(params->affected_displays) == 1;
1065
1066 dsc_common_mode = 0;
1067 if (!half_panel_partial_update)
1068 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1069 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1070 dsc_common_mode |= DSC_MODE_VIDEO;
1071
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001072 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001073
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001074 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001075 intf_ip_w = this_frame_slices * dsc->slice_width;
1076 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1077
1078 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001079 * dsc merge case: when using 2 encoders for the same stream,
1080 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001081 */
1082 enc_ip_w = intf_ip_w / 2;
1083 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1084
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001085 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1086 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001087
1088 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001089 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001090 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1091 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001092
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001093 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001094 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001095 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1096 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001097
1098 return 0;
1099}
1100
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001101static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1102{
1103 struct sde_encoder_virt *sde_enc;
1104 struct drm_connector *drm_conn;
1105 struct drm_display_mode *adj_mode;
1106 struct sde_rect roi;
1107
1108 if (!drm_enc || !drm_enc->crtc || !drm_enc->crtc->state)
1109 return -EINVAL;
1110 sde_enc = to_sde_encoder_virt(drm_enc);
1111
1112 if (!sde_enc->cur_master)
1113 return -EINVAL;
1114
1115 adj_mode = &sde_enc->base.crtc->state->adjusted_mode;
1116 drm_conn = sde_enc->cur_master->connector;
1117
1118 _sde_encoder_get_connector_roi(sde_enc, &roi);
1119 if (sde_kms_rect_is_null(&roi)) {
1120 roi.w = adj_mode->hdisplay;
1121 roi.h = adj_mode->vdisplay;
1122 }
1123
1124 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1125 sizeof(sde_enc->prv_conn_roi));
1126 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1127
1128 return 0;
1129}
1130
1131static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1132 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001133{
1134 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001135 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001136 int ret = 0;
1137
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001138 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1139 !sde_enc->phys_encs[0]->connector)
1140 return -EINVAL;
1141
1142 drm_conn = sde_enc->phys_encs[0]->connector;
1143
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001144 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001145 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001146 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1147 return -EINVAL;
1148 }
1149
Ingrid Gallardo83532222017-06-02 16:48:51 -07001150 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001151 SDE_EVT32(DRMID(&sde_enc->base));
1152
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001153 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1154 &sde_enc->prv_conn_roi))
1155 return ret;
1156
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001157 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001158 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001159 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1160 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001161 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001162 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001163 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001164 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001165 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001166 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001167 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001168 default:
1169 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1170 topology);
1171 return -EINVAL;
1172 };
1173
1174 return ret;
1175}
1176
Dhaval Patelaab9b522017-07-20 12:38:46 -07001177static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1178 struct msm_display_info *disp_info, bool is_dummy)
1179{
1180 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1181 struct msm_drm_private *priv;
1182 struct sde_kms *sde_kms;
1183 struct sde_hw_mdp *hw_mdptop;
1184 struct drm_encoder *drm_enc;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001185 struct msm_mode_info *mode_info;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001186 int i;
1187
1188 if (!sde_enc || !disp_info) {
1189 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1190 sde_enc != NULL, disp_info != NULL);
1191 return;
1192 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1193 SDE_ERROR("invalid num phys enc %d/%d\n",
1194 sde_enc->num_phys_encs,
1195 (int) ARRAY_SIZE(sde_enc->hw_pp));
1196 return;
1197 }
1198
1199 drm_enc = &sde_enc->base;
1200 /* this pointers are checked in virt_enable_helper */
1201 priv = drm_enc->dev->dev_private;
1202
1203 sde_kms = to_sde_kms(priv->kms);
1204 if (!sde_kms) {
1205 SDE_ERROR("invalid sde_kms\n");
1206 return;
1207 }
1208
1209 hw_mdptop = sde_kms->hw_mdp;
1210 if (!hw_mdptop) {
1211 SDE_ERROR("invalid mdptop\n");
1212 return;
1213 }
1214
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001215 mode_info = &sde_enc->mode_info;
1216 if (!mode_info) {
1217 SDE_ERROR("invalid mode info\n");
1218 return;
1219 }
1220
Dhaval Patelaab9b522017-07-20 12:38:46 -07001221 if (hw_mdptop->ops.setup_vsync_source &&
1222 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1223 for (i = 0; i < sde_enc->num_phys_encs; i++)
1224 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1225
1226 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001227 vsync_cfg.frame_rate = mode_info->frame_rate;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001228 if (is_dummy)
1229 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1230 else if (disp_info->is_te_using_watchdog_timer)
1231 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
1232 else
1233 vsync_cfg.vsync_source = SDE_VSYNC0_SOURCE_GPIO;
1234 vsync_cfg.is_dummy = is_dummy;
1235
1236 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1237 }
1238}
1239
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001240static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001241 struct drm_encoder *drm_enc,
1242 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001243{
1244 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001245 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001246 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001247 struct sde_rsc_cmd_config *rsc_config;
1248 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001249 struct msm_display_info *disp_info;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001250 struct msm_mode_info *mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001251 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1252 int wait_count = 0;
1253 struct drm_crtc *primary_crtc;
1254 int pipe = -1;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001255
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001256 if (!drm_enc || !drm_enc->crtc || !drm_enc->dev) {
1257 SDE_ERROR("invalid arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001258 return -EINVAL;
1259 }
1260
1261 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001262 crtc = drm_enc->crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001263 disp_info = &sde_enc->disp_info;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001264 mode_info = &sde_enc->mode_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001265 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001266
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001267 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001268 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001269 return 0;
1270 }
1271
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001272 /**
1273 * only primary command mode panel can request CMD state.
1274 * all other panels/displays can request for VID state including
1275 * secondary command mode panel.
1276 */
1277 rsc_state = enable ?
1278 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) &&
1279 disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1280 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001281 prefill_lines = config ? mode_info->prefill_lines +
1282 config->inline_rotate_prefill : mode_info->prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001283
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001284 /* compare specific items and reconfigure the rsc */
1285 if ((rsc_config->fps != mode_info->frame_rate) ||
1286 (rsc_config->vtotal != mode_info->vtotal) ||
1287 (rsc_config->prefill_lines != prefill_lines) ||
1288 (rsc_config->jitter_numer != mode_info->jitter_numer) ||
1289 (rsc_config->jitter_denom != mode_info->jitter_denom)) {
1290 rsc_config->fps = mode_info->frame_rate;
1291 rsc_config->vtotal = mode_info->vtotal;
1292 rsc_config->prefill_lines = prefill_lines;
1293 rsc_config->jitter_numer = mode_info->jitter_numer;
1294 rsc_config->jitter_denom = mode_info->jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001295 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001296 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001297
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001298 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001299 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001300 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001301 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001302
1303 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001304 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001305 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001306 } else {
1307 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001308 rsc_state, NULL, crtc->base.id,
1309 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001310 }
1311
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001312 /**
1313 * if RSC performed a state change that requires a VBLANK wait, it will
1314 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1315 *
1316 * if we are the primary display, we will need to enable and wait
1317 * locally since we hold the commit thread
1318 *
1319 * if we are an external display, we must send a signal to the primary
1320 * to enable its VBLANK and wait one, since the RSC hardware is driven
1321 * by the primary panel's VBLANK signals
1322 */
1323 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1324 if (ret) {
1325 SDE_ERROR_ENC(sde_enc,
1326 "sde rsc client update failed ret:%d\n", ret);
1327 return ret;
1328 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1329 return ret;
1330 }
1331
1332 if (crtc->base.id != wait_vblank_crtc_id) {
1333 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1334 if (!primary_crtc) {
1335 SDE_ERROR_ENC(sde_enc,
1336 "failed to find primary crtc id %d\n",
1337 wait_vblank_crtc_id);
1338 return -EINVAL;
1339 }
1340 pipe = drm_crtc_index(primary_crtc);
1341 }
1342
1343 /**
1344 * note: VBLANK is expected to be enabled at this point in
1345 * resource control state machine if on primary CRTC
1346 */
1347 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1348 if (sde_rsc_client_is_state_update_complete(
1349 sde_enc->rsc_client))
1350 break;
1351
1352 if (crtc->base.id == wait_vblank_crtc_id)
1353 ret = sde_encoder_wait_for_event(drm_enc,
1354 MSM_ENC_VBLANK);
1355 else
1356 drm_wait_one_vblank(drm_enc->dev, pipe);
1357
1358 if (ret) {
1359 SDE_ERROR_ENC(sde_enc,
1360 "wait for vblank failed ret:%d\n", ret);
1361 break;
1362 }
1363 }
1364
1365 if (wait_count >= MAX_RSC_WAIT)
1366 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1367 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001368
1369 return ret;
1370}
1371
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001372static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1373{
1374 struct sde_encoder_virt *sde_enc;
1375 int i;
1376
1377 if (!drm_enc) {
1378 SDE_ERROR("invalid encoder\n");
1379 return;
1380 }
1381
1382 sde_enc = to_sde_encoder_virt(drm_enc);
1383
1384 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1385 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1386 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1387
1388 if (phys && phys->ops.irq_control)
1389 phys->ops.irq_control(phys, enable);
1390 }
1391
1392}
1393
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001394struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1395{
1396 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001397
1398 if (!drm_enc)
1399 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001400 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001401 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001402}
1403
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001404static void _sde_encoder_resource_control_rsc_update(
1405 struct drm_encoder *drm_enc, bool enable)
1406{
1407 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1408 struct sde_encoder_rsc_config rsc_cfg = { 0 };
1409
1410 if (enable) {
1411 rsc_cfg.inline_rotate_prefill =
1412 sde_crtc_get_inline_prefill(drm_enc->crtc);
1413
1414 /* connect the TE source to actual TE GPIO to drive RSC */
1415 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info,
1416 false);
1417
1418 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1419 } else {
1420 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
1421
1422 /**
1423 * disconnect the TE source from the actual TE GPIO for RSC
1424 *
1425 * this call is for hardware workaround on sdm845 and should
1426 * not be removed without considering the design changes for
1427 * sde rsc + command mode concurrency. It may lead to pp
1428 * timeout due to vsync from panel for command mode panel.
1429 */
1430 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info,
1431 true);
1432 }
1433}
1434
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001435static void _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
1436 bool enable)
1437{
1438 struct msm_drm_private *priv;
1439 struct sde_kms *sde_kms;
1440 struct sde_encoder_virt *sde_enc;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001441
1442 sde_enc = to_sde_encoder_virt(drm_enc);
1443 priv = drm_enc->dev->dev_private;
1444 sde_kms = to_sde_kms(priv->kms);
1445
1446 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1447 SDE_EVT32(DRMID(drm_enc), enable);
1448
1449 if (!sde_enc->cur_master) {
1450 SDE_ERROR("encoder master not set\n");
1451 return;
1452 }
1453
1454 if (enable) {
1455 /* enable SDE core clks */
1456 sde_power_resource_enable(&priv->phandle,
1457 sde_kms->core_client, true);
1458
1459 /* enable DSI clks */
1460 sde_connector_clk_ctrl(sde_enc->cur_master->connector, true);
1461
1462 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001463 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001464
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001465 } else {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001466 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001467 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001468
1469 /* disable DSI clks */
1470 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1471
1472 /* disable SDE core clks */
1473 sde_power_resource_enable(&priv->phandle,
1474 sde_kms->core_client, false);
1475 }
1476
1477}
1478
1479static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1480 u32 sw_event)
1481{
Dhaval Patel99412a52017-07-24 19:16:45 -07001482 bool autorefresh_enabled = false;
Clarence Ip89628132017-07-27 13:33:51 -04001483 unsigned int lp, idle_timeout;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001484 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001485 struct msm_drm_private *priv;
1486 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001487 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001488 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001489
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001490 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
1491 !drm_enc->crtc) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001492 SDE_ERROR("invalid parameters\n");
1493 return -EINVAL;
1494 }
1495 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001496 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001497 is_vid_mode = sde_enc->disp_info.capabilities &
1498 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001499
1500 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
1501 SDE_ERROR("invalid crtc index\n");
1502 return -EINVAL;
1503 }
1504 disp_thread = &priv->disp_thread[drm_enc->crtc->index];
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001505
1506 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001507 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001508 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001509 */
1510 if (!sde_enc->idle_pc_supported &&
1511 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001512 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
1513 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
1514 sw_event != SDE_ENC_RC_EVENT_STOP &&
1515 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001516 return 0;
1517
1518 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc_supported:%d\n", sw_event,
1519 sde_enc->idle_pc_supported);
Dhaval Patela5f75952017-07-25 11:17:41 -07001520 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001521 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1522
1523 switch (sw_event) {
1524 case SDE_ENC_RC_EVENT_KICKOFF:
1525 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001526 if (kthread_cancel_delayed_work_sync(
1527 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001528 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1529 sw_event);
1530
1531 mutex_lock(&sde_enc->rc_lock);
1532
1533 /* return if the resource control is already in ON state */
1534 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
1535 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
1536 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001537 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1538 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001539 mutex_unlock(&sde_enc->rc_lock);
1540 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001541 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
1542 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
1543 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1544 sw_event, sde_enc->rc_state);
1545 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1546 SDE_EVTLOG_ERROR);
1547 mutex_unlock(&sde_enc->rc_lock);
1548 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001549 }
1550
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001551 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1552 _sde_encoder_irq_control(drm_enc, true);
1553 } else {
1554 /* enable all the clks and resources */
1555 _sde_encoder_resource_control_helper(drm_enc, true);
1556 _sde_encoder_resource_control_rsc_update(drm_enc, true);
1557 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001558
1559 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1560 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
1561 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1562
1563 mutex_unlock(&sde_enc->rc_lock);
1564 break;
1565
1566 case SDE_ENC_RC_EVENT_FRAME_DONE:
1567 /*
1568 * mutex lock is not used as this event happens at interrupt
1569 * context. And locking is not required as, the other events
1570 * like KICKOFF and STOP does a wait-for-idle before executing
1571 * the resource_control
1572 */
1573 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1574 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
1575 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001576 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1577 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001578 return -EINVAL;
1579 }
1580
1581 /*
1582 * schedule off work item only when there are no
1583 * frames pending
1584 */
1585 if (sde_crtc_frame_pending(drm_enc->crtc) > 1) {
1586 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001587 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1588 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001589 return 0;
1590 }
1591
Dhaval Patel99412a52017-07-24 19:16:45 -07001592 /* schedule delayed off work if autorefresh is disabled */
1593 if (sde_enc->cur_master &&
1594 sde_enc->cur_master->ops.is_autorefresh_enabled)
1595 autorefresh_enabled =
1596 sde_enc->cur_master->ops.is_autorefresh_enabled(
1597 sde_enc->cur_master);
1598
Clarence Ip89628132017-07-27 13:33:51 -04001599 /* set idle timeout based on master connector's lp value */
1600 if (sde_enc->cur_master)
1601 lp = sde_connector_get_lp(
1602 sde_enc->cur_master->connector);
1603 else
1604 lp = SDE_MODE_DPMS_ON;
1605
1606 if (lp == SDE_MODE_DPMS_LP2)
1607 idle_timeout = IDLE_SHORT_TIMEOUT;
1608 else
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001609 idle_timeout = sde_enc->idle_timeout;
Clarence Ip89628132017-07-27 13:33:51 -04001610
Dhaval Patel99412a52017-07-24 19:16:45 -07001611 if (!autorefresh_enabled)
1612 kthread_queue_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001613 &disp_thread->worker,
1614 &sde_enc->delayed_off_work,
Clarence Ip89628132017-07-27 13:33:51 -04001615 msecs_to_jiffies(idle_timeout));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001616 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04001617 autorefresh_enabled,
1618 idle_timeout, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001619 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
1620 sw_event);
1621 break;
1622
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001623 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001624 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001625 if (kthread_cancel_delayed_work_sync(
1626 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001627 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1628 sw_event);
1629
1630 mutex_lock(&sde_enc->rc_lock);
1631
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001632 if (is_vid_mode &&
1633 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1634 _sde_encoder_irq_control(drm_enc, true);
1635 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001636 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001637 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001638 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1639 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
1640 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001641 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1642 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001643 mutex_unlock(&sde_enc->rc_lock);
1644 return 0;
1645 }
1646
1647 /**
1648 * IRQs are still enabled currently, which allows wait for
1649 * VBLANK which RSC may require to correctly transition to OFF
1650 */
1651 _sde_encoder_resource_control_rsc_update(drm_enc, false);
1652
1653 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1654 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001655 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001656
1657 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
1658
1659 mutex_unlock(&sde_enc->rc_lock);
1660 break;
1661
1662 case SDE_ENC_RC_EVENT_STOP:
1663 mutex_lock(&sde_enc->rc_lock);
1664
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001665 /* return if the resource control is already in OFF state */
1666 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
1667 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
1668 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001669 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1670 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001671 mutex_unlock(&sde_enc->rc_lock);
1672 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001673 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
1674 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001675 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
1676 sw_event, sde_enc->rc_state);
1677 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1678 SDE_EVTLOG_ERROR);
1679 mutex_unlock(&sde_enc->rc_lock);
1680 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001681 }
1682
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001683 /**
1684 * expect to arrive here only if in either idle state or pre-off
1685 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001686 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001687 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001688 _sde_encoder_resource_control_helper(drm_enc, false);
1689
1690 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001691 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001692
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001693 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
1694
1695 mutex_unlock(&sde_enc->rc_lock);
1696 break;
1697
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001698 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001699 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001700 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001701 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001702 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1703 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001704
1705 mutex_lock(&sde_enc->rc_lock);
1706
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001707 /* return if the resource control is already in ON state */
1708 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1709 /* enable all the clks and resources */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001710 _sde_encoder_resource_control_helper(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001711
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001712 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001713
1714 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1715 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001716 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001717 }
1718
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001719 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1720 if (ret && ret != -EWOULDBLOCK) {
1721 SDE_ERROR_ENC(sde_enc,
1722 "wait for commit done returned %d\n",
1723 ret);
1724 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1725 ret, SDE_EVTLOG_ERROR);
1726 mutex_unlock(&sde_enc->rc_lock);
1727 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001728 }
1729
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001730 _sde_encoder_irq_control(drm_enc, false);
1731
1732 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1733 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
1734
1735 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
1736 mutex_unlock(&sde_enc->rc_lock);
1737 break;
1738
1739 case SDE_ENC_RC_EVENT_POST_MODESET:
1740 mutex_lock(&sde_enc->rc_lock);
1741
1742 /* return if the resource control is already in ON state */
1743 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
1744 SDE_ERROR_ENC(sde_enc,
1745 "sw_event:%d, rc:%d !MODESET state\n",
1746 sw_event, sde_enc->rc_state);
1747 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1748 SDE_EVTLOG_ERROR);
1749 mutex_unlock(&sde_enc->rc_lock);
1750 return -EINVAL;
1751 }
1752
1753 _sde_encoder_irq_control(drm_enc, true);
1754
1755 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
1756
1757 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1758 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
1759
1760 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1761
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001762 mutex_unlock(&sde_enc->rc_lock);
1763 break;
1764
1765 case SDE_ENC_RC_EVENT_ENTER_IDLE:
1766 mutex_lock(&sde_enc->rc_lock);
1767
1768 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001769 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001770 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001771 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1772 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001773 mutex_unlock(&sde_enc->rc_lock);
1774 return 0;
1775 }
1776
1777 /*
1778 * if we are in ON but a frame was just kicked off,
1779 * ignore the IDLE event, it's probably a stale timer event
1780 */
1781 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001782 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04001783 "sw_event:%d, rc:%d frame pending\n",
1784 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001785 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1786 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001787 mutex_unlock(&sde_enc->rc_lock);
1788 return 0;
1789 }
1790
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001791 if (is_vid_mode) {
1792 _sde_encoder_irq_control(drm_enc, false);
1793 } else {
1794 /* disable all the clks and resources */
1795 _sde_encoder_resource_control_rsc_update(drm_enc,
1796 false);
1797 _sde_encoder_resource_control_helper(drm_enc, false);
1798 }
1799
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001800 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001801 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001802 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
1803
1804 mutex_unlock(&sde_enc->rc_lock);
1805 break;
1806
1807 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07001808 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001809 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
1810 break;
1811 }
1812
Dhaval Patela5f75952017-07-25 11:17:41 -07001813 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001814 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
1815 return 0;
1816}
1817
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001818static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
1819 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04001820 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001821{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001822 struct sde_encoder_virt *sde_enc;
1823 struct msm_drm_private *priv;
1824 struct sde_kms *sde_kms;
1825 struct list_head *connector_list;
1826 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001827 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001828 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001829 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001830
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001831 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04001832 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001833 return;
1834 }
1835
1836 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04001837 SDE_DEBUG_ENC(sde_enc, "\n");
1838
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001839 priv = drm_enc->dev->dev_private;
1840 sde_kms = to_sde_kms(priv->kms);
1841 connector_list = &sde_kms->dev->mode_config.connector_list;
1842
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04001843 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001844
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001845 list_for_each_entry(conn_iter, connector_list, head)
1846 if (conn_iter->encoder == drm_enc)
1847 conn = conn_iter;
1848
1849 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04001850 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001851 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04001852 } else if (!conn->state) {
1853 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
1854 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001855 }
1856
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001857 sde_conn = to_sde_connector(conn);
1858 if (sde_conn) {
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001859 ret = sde_conn->ops.get_mode_info(adj_mode, &sde_enc->mode_info,
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001860 sde_kms->catalog->max_mixer_width);
1861 if (ret) {
1862 SDE_ERROR_ENC(sde_enc,
1863 "invalid topology for the mode\n");
1864 return;
1865 }
1866 }
1867
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001868 /* release resources before seamless mode change */
1869 if (msm_is_mode_seamless_dms(adj_mode)) {
1870 /* restore resource state before releasing them */
1871 ret = sde_encoder_resource_control(drm_enc,
1872 SDE_ENC_RC_EVENT_PRE_MODESET);
1873 if (ret) {
1874 SDE_ERROR_ENC(sde_enc,
1875 "sde resource control failed: %d\n",
1876 ret);
1877 return;
1878 }
1879 }
1880
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001881 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
1882 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
1883 conn->state, false);
1884 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04001885 SDE_ERROR_ENC(sde_enc,
1886 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001887 return;
1888 }
1889
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07001890 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
1891 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1892 sde_enc->hw_pp[i] = NULL;
1893 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
1894 break;
1895 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
1896 }
1897
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001898 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
1899 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1900 sde_enc->hw_dsc[i] = NULL;
1901 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
1902 break;
1903 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
1904 }
1905
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001906 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1907 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04001908
Lloyd Atkinson55987b02016-08-16 16:57:46 -04001909 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07001910 if (!sde_enc->hw_pp[i]) {
1911 SDE_ERROR_ENC(sde_enc,
1912 "invalid pingpong block for the encoder\n");
1913 return;
1914 }
1915 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04001916 phys->connector = conn->state->connector;
1917 if (phys->ops.mode_set)
1918 phys->ops.mode_set(phys, mode, adj_mode);
1919 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04001920 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001921
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001922 /* update resources after seamless mode change */
1923 if (msm_is_mode_seamless_dms(adj_mode))
1924 sde_encoder_resource_control(&sde_enc->base,
1925 SDE_ENC_RC_EVENT_POST_MODESET);
1926
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001927 sde_enc->mode_set_complete = true;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001928}
1929
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001930static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001931{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001932 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07001933 struct msm_drm_private *priv;
1934 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001935
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001936 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1937 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001938 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001939 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07001940
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001941 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001942 sde_kms = to_sde_kms(priv->kms);
1943 if (!sde_kms) {
1944 SDE_ERROR("invalid sde_kms\n");
1945 return;
1946 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001947
1948 sde_enc = to_sde_encoder_virt(drm_enc);
1949 if (!sde_enc || !sde_enc->cur_master) {
1950 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04001951 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001952 }
1953
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07001954 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
1955 sde_enc->cur_master->hw_mdptop &&
1956 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
1957 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
1958 sde_enc->cur_master->hw_mdptop);
1959
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001960 if (sde_enc->cur_master->hw_mdptop &&
1961 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
1962 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
1963 sde_enc->cur_master->hw_mdptop,
1964 sde_kms->catalog);
1965
Dhaval Patelaab9b522017-07-20 12:38:46 -07001966 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04001967
1968 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
1969 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001970}
1971
1972void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
1973{
1974 struct sde_encoder_virt *sde_enc = NULL;
1975 int i;
1976
1977 if (!drm_enc) {
1978 SDE_ERROR("invalid encoder\n");
1979 return;
1980 }
1981 sde_enc = to_sde_encoder_virt(drm_enc);
1982
1983 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1984 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1985
1986 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
1987 phys->ops.restore(phys);
1988 }
1989
1990 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
1991 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
1992
1993 _sde_encoder_virt_enable_helper(drm_enc);
1994}
1995
1996static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
1997{
1998 struct sde_encoder_virt *sde_enc = NULL;
1999 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002000 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002001 struct drm_display_mode *cur_mode = NULL;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002002
2003 if (!drm_enc) {
2004 SDE_ERROR("invalid encoder\n");
2005 return;
2006 }
2007 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002008 comp_info = &sde_enc->mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002009 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002010
Clarence Ip19af1362016-09-23 14:57:51 -04002011 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002012 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002013
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002014 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002015 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2016 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2017
2018 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2019 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2020 sde_enc->cur_master = phys;
2021 break;
2022 }
2023 }
2024
2025 if (!sde_enc->cur_master) {
2026 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2027 return;
2028 }
2029
2030 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2031 if (ret) {
2032 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2033 ret);
2034 return;
2035 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002036
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002037 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2038 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002039
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002040 if (!phys)
2041 continue;
2042
2043 phys->comp_type = comp_info->comp_type;
2044 if (phys != sde_enc->cur_master) {
2045 /**
2046 * on DMS request, the encoder will be enabled
2047 * already. Invoke restore to reconfigure the
2048 * new mode.
2049 */
2050 if (msm_is_mode_seamless_dms(cur_mode) &&
2051 phys->ops.restore)
2052 phys->ops.restore(phys);
2053 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002054 phys->ops.enable(phys);
2055 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002056
2057 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2058 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2059 phys->ops.setup_misr(phys, true,
2060 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002061 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002062
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002063 if (msm_is_mode_seamless_dms(cur_mode) &&
2064 sde_enc->cur_master->ops.restore)
2065 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2066 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002067 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002068
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002069 _sde_encoder_virt_enable_helper(drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002070}
2071
2072static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2073{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002074 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002075 struct msm_drm_private *priv;
2076 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002077 int i = 0;
2078
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002079 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002080 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002081 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002082 } else if (!drm_enc->dev) {
2083 SDE_ERROR("invalid dev\n");
2084 return;
2085 } else if (!drm_enc->dev->dev_private) {
2086 SDE_ERROR("invalid dev_private\n");
2087 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002088 }
2089
2090 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002091 SDE_DEBUG_ENC(sde_enc, "\n");
2092
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002093 priv = drm_enc->dev->dev_private;
2094 sde_kms = to_sde_kms(priv->kms);
2095
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002096 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002097
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002098 /* wait for idle */
2099 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2100
2101 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_PRE_STOP);
2102
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002103 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2104 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2105
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002106 if (phys && phys->ops.disable)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002107 phys->ops.disable(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002108 }
2109
Lloyd Atkinson03810e32017-03-14 13:38:06 -07002110 /* after phys waits for frame-done, should be no more frames pending */
2111 if (atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
2112 SDE_ERROR("enc%d timeout pending\n", drm_enc->base.id);
2113 del_timer_sync(&sde_enc->frame_done_timer);
2114 }
2115
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002116 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
2117
2118 if (sde_enc->cur_master) {
2119 sde_enc->cur_master->connector = NULL;
2120 sde_enc->cur_master = NULL;
2121 }
2122
2123 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002124
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002125 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002126}
2127
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002128static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002129 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002130{
2131 int i = 0;
2132
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002133 for (i = 0; i < catalog->intf_count; i++) {
2134 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002135 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002136 return catalog->intf[i].id;
2137 }
2138 }
2139
2140 return INTF_MAX;
2141}
2142
Alan Kwongbb27c092016-07-20 16:41:25 -04002143static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
2144 enum sde_intf_type type, u32 controller_id)
2145{
2146 if (controller_id < catalog->wb_count)
2147 return catalog->wb[controller_id].id;
2148
2149 return WB_MAX;
2150}
2151
Dhaval Patel81e87882016-10-19 21:41:56 -07002152static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
2153 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002154{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002155 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002156 unsigned long lock_flags;
2157
Dhaval Patel81e87882016-10-19 21:41:56 -07002158 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002159 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002160
Narendra Muppalla77b32932017-05-10 13:53:11 -07002161 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002162 sde_enc = to_sde_encoder_virt(drm_enc);
2163
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002164 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002165 if (sde_enc->crtc_vblank_cb)
2166 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002167 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07002168
2169 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07002170 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002171}
2172
2173static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
2174 struct sde_encoder_phys *phy_enc)
2175{
2176 if (!phy_enc)
2177 return;
2178
Narendra Muppalla77b32932017-05-10 13:53:11 -07002179 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07002180 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05002181 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Narendra Muppalla77b32932017-05-10 13:53:11 -07002182 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002183}
2184
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002185void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
2186 void (*vbl_cb)(void *), void *vbl_data)
2187{
2188 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2189 unsigned long lock_flags;
2190 bool enable;
2191 int i;
2192
2193 enable = vbl_cb ? true : false;
2194
Clarence Ip19af1362016-09-23 14:57:51 -04002195 if (!drm_enc) {
2196 SDE_ERROR("invalid encoder\n");
2197 return;
2198 }
2199 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002200 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002201
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002202 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002203 sde_enc->crtc_vblank_cb = vbl_cb;
2204 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002205 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002206
2207 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2208 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2209
2210 if (phys && phys->ops.control_vblank_irq)
2211 phys->ops.control_vblank_irq(phys, enable);
2212 }
2213}
2214
Alan Kwong628d19e2016-10-31 13:50:13 -04002215void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
2216 void (*frame_event_cb)(void *, u32 event),
2217 void *frame_event_cb_data)
2218{
2219 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2220 unsigned long lock_flags;
2221 bool enable;
2222
2223 enable = frame_event_cb ? true : false;
2224
2225 if (!drm_enc) {
2226 SDE_ERROR("invalid encoder\n");
2227 return;
2228 }
2229 SDE_DEBUG_ENC(sde_enc, "\n");
2230 SDE_EVT32(DRMID(drm_enc), enable, 0);
2231
2232 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2233 sde_enc->crtc_frame_event_cb = frame_event_cb;
2234 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
2235 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
2236}
2237
2238static void sde_encoder_frame_done_callback(
2239 struct drm_encoder *drm_enc,
2240 struct sde_encoder_phys *ready_phys, u32 event)
2241{
2242 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2243 unsigned int i;
2244
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002245 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
2246 | SDE_ENCODER_FRAME_EVENT_ERROR
2247 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05002248
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002249 if (!sde_enc->frame_busy_mask[0]) {
2250 /**
2251 * suppress frame_done without waiter,
2252 * likely autorefresh
2253 */
2254 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
2255 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04002256 }
2257
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002258 /* One of the physical encoders has become idle */
2259 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2260 if (sde_enc->phys_encs[i] == ready_phys) {
2261 clear_bit(i, sde_enc->frame_busy_mask);
2262 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
2263 sde_enc->frame_busy_mask[0]);
2264 }
2265 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002266
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002267 if (!sde_enc->frame_busy_mask[0]) {
2268 atomic_set(&sde_enc->frame_done_timeout, 0);
2269 del_timer(&sde_enc->frame_done_timer);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002270
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002271 sde_encoder_resource_control(drm_enc,
2272 SDE_ENC_RC_EVENT_FRAME_DONE);
2273
2274 if (sde_enc->crtc_frame_event_cb)
2275 sde_enc->crtc_frame_event_cb(
2276 sde_enc->crtc_frame_event_cb_data,
2277 event);
2278 }
2279 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04002280 if (sde_enc->crtc_frame_event_cb)
2281 sde_enc->crtc_frame_event_cb(
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002282 sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002283 }
2284}
2285
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002286static void sde_encoder_off_work(struct kthread_work *work)
2287{
2288 struct sde_encoder_virt *sde_enc = container_of(work,
2289 struct sde_encoder_virt, delayed_off_work.work);
2290
2291 if (!sde_enc) {
2292 SDE_ERROR("invalid sde encoder\n");
2293 return;
2294 }
2295
2296 sde_encoder_resource_control(&sde_enc->base,
2297 SDE_ENC_RC_EVENT_ENTER_IDLE);
2298
2299 sde_encoder_frame_done_callback(&sde_enc->base, NULL,
2300 SDE_ENCODER_FRAME_EVENT_IDLE);
2301}
2302
Clarence Ip110d15c2016-08-16 14:44:41 -04002303/**
2304 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
2305 * drm_enc: Pointer to drm encoder structure
2306 * phys: Pointer to physical encoder structure
2307 * extra_flush_bits: Additional bit mask to include in flush trigger
2308 */
2309static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
2310 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
2311{
2312 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002313 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04002314
2315 if (!drm_enc || !phys) {
2316 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
2317 drm_enc != 0, phys != 0);
2318 return;
2319 }
2320
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002321 if (!phys->hw_pp) {
2322 SDE_ERROR("invalid pingpong hw\n");
2323 return;
2324 }
2325
Clarence Ip110d15c2016-08-16 14:44:41 -04002326 ctl = phys->hw_ctl;
2327 if (!ctl || !ctl->ops.trigger_flush) {
2328 SDE_ERROR("missing trigger cb\n");
2329 return;
2330 }
2331
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002332 if (phys->split_role == ENC_ROLE_SKIP) {
2333 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2334 "skip flush pp%d ctl%d\n",
2335 phys->hw_pp->idx - PINGPONG_0,
2336 ctl->idx - CTL_0);
2337 return;
2338 }
2339
Clarence Ip8e69ad02016-12-09 09:43:57 -05002340 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05002341
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07002342 if (phys->ops.is_master && phys->ops.is_master(phys))
2343 atomic_inc(&phys->pending_retire_fence_cnt);
2344
Clarence Ip110d15c2016-08-16 14:44:41 -04002345 if (extra_flush_bits && ctl->ops.update_pending_flush)
2346 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
2347
2348 ctl->ops.trigger_flush(ctl);
Dhaval Patel6c666622017-03-21 23:02:59 -07002349
2350 if (ctl->ops.get_pending_flush)
2351 SDE_EVT32(DRMID(drm_enc), phys->intf_idx, pending_kickoff_cnt,
2352 ctl->idx, ctl->ops.get_pending_flush(ctl));
2353 else
2354 SDE_EVT32(DRMID(drm_enc), phys->intf_idx, ctl->idx,
2355 pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04002356}
2357
2358/**
2359 * _sde_encoder_trigger_start - trigger start for a physical encoder
2360 * phys: Pointer to physical encoder structure
2361 */
2362static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
2363{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002364 struct sde_hw_ctl *ctl;
2365
Clarence Ip110d15c2016-08-16 14:44:41 -04002366 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002367 SDE_ERROR("invalid argument(s)\n");
2368 return;
2369 }
2370
2371 if (!phys->hw_pp) {
2372 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04002373 return;
2374 }
2375
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002376 ctl = phys->hw_ctl;
2377 if (phys->split_role == ENC_ROLE_SKIP) {
2378 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
2379 "skip start pp%d ctl%d\n",
2380 phys->hw_pp->idx - PINGPONG_0,
2381 ctl->idx - CTL_0);
2382 return;
2383 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002384 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
2385 phys->ops.trigger_start(phys);
2386}
2387
2388void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
2389{
2390 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04002391
2392 if (!phys_enc) {
2393 SDE_ERROR("invalid encoder\n");
2394 return;
2395 }
2396
2397 ctl = phys_enc->hw_ctl;
2398 if (ctl && ctl->ops.trigger_start) {
2399 ctl->ops.trigger_start(ctl);
Dhaval Patel6c666622017-03-21 23:02:59 -07002400 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
Clarence Ip110d15c2016-08-16 14:44:41 -04002401 }
Clarence Ip110d15c2016-08-16 14:44:41 -04002402}
2403
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002404int sde_encoder_helper_wait_event_timeout(
2405 int32_t drm_id,
2406 int32_t hw_id,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002407 struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002408{
2409 int rc = 0;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002410 s64 expected_time = ktime_to_ms(ktime_get()) + info->timeout_ms;
2411 s64 jiffies = msecs_to_jiffies(info->timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002412 s64 time;
2413
2414 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002415 rc = wait_event_timeout(*(info->wq),
2416 atomic_read(info->atomic_cnt) == 0, jiffies);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002417 time = ktime_to_ms(ktime_get());
2418
Dhaval Patela5f75952017-07-25 11:17:41 -07002419 SDE_EVT32_VERBOSE(drm_id, hw_id, rc, time, expected_time,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002420 atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002421 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05002422 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
2423 (time < expected_time));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002424
2425 return rc;
2426}
2427
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002428void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
2429{
2430 struct sde_encoder_virt *sde_enc;
2431 struct sde_connector *sde_con;
2432 void *sde_con_disp;
2433 struct sde_hw_ctl *ctl;
2434 int rc;
2435
2436 if (!phys_enc) {
2437 SDE_ERROR("invalid encoder\n");
2438 return;
2439 }
2440 sde_enc = to_sde_encoder_virt(phys_enc->parent);
2441 ctl = phys_enc->hw_ctl;
2442
2443 if (!ctl || !ctl->ops.reset)
2444 return;
2445
2446 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
2447 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
2448
2449 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
2450 phys_enc->connector) {
2451 sde_con = to_sde_connector(phys_enc->connector);
2452 sde_con_disp = sde_connector_get_display(phys_enc->connector);
2453
2454 if (sde_con->ops.soft_reset) {
2455 rc = sde_con->ops.soft_reset(sde_con_disp);
2456 if (rc) {
2457 SDE_ERROR_ENC(sde_enc,
2458 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07002459 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
2460 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002461 }
2462 }
2463 }
2464
2465 rc = ctl->ops.reset(ctl);
2466 if (rc) {
2467 SDE_ERROR_ENC(sde_enc, "ctl %d reset failure\n", ctl->idx);
Dhaval Patel7ca510f2017-07-12 12:57:37 -07002468 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus", "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002469 }
2470
2471 phys_enc->enable_state = SDE_ENC_ENABLED;
2472}
2473
Clarence Ip110d15c2016-08-16 14:44:41 -04002474/**
2475 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
2476 * Iterate through the physical encoders and perform consolidated flush
2477 * and/or control start triggering as needed. This is done in the virtual
2478 * encoder rather than the individual physical ones in order to handle
2479 * use cases that require visibility into multiple physical encoders at
2480 * a time.
2481 * sde_enc: Pointer to virtual encoder structure
2482 */
2483static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
2484{
2485 struct sde_hw_ctl *ctl;
2486 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002487 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -04002488
2489 if (!sde_enc) {
2490 SDE_ERROR("invalid encoder\n");
2491 return;
2492 }
2493
2494 pending_flush = 0x0;
2495
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002496 /* update pending counts and trigger kickoff ctl flush atomically */
2497 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
2498
Clarence Ip110d15c2016-08-16 14:44:41 -04002499 /* don't perform flush/start operations for slave encoders */
2500 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2501 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002502 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05002503
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002504 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
2505 continue;
2506
Clarence Ip110d15c2016-08-16 14:44:41 -04002507 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002508 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04002509 continue;
2510
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002511 if (phys->connector)
2512 topology = sde_connector_get_topology_name(
2513 phys->connector);
2514
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002515 /*
2516 * don't wait on ppsplit slaves or skipped encoders because
2517 * they dont receive irqs
2518 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002519 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002520 phys->split_role == ENC_ROLE_SLAVE) &&
2521 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05002522 set_bit(i, sde_enc->frame_busy_mask);
Gopikrishnaiah Anandan38726842017-08-23 17:56:35 -07002523 if (phys->hw_ctl->ops.reg_dma_flush)
2524 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl);
Clarence Ip8e69ad02016-12-09 09:43:57 -05002525 if (!phys->ops.needs_single_flush ||
2526 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04002527 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
2528 else if (ctl->ops.get_pending_flush)
2529 pending_flush |= ctl->ops.get_pending_flush(ctl);
2530 }
2531
2532 /* for split flush, combine pending flush masks and send to master */
2533 if (pending_flush && sde_enc->cur_master) {
2534 _sde_encoder_trigger_flush(
2535 &sde_enc->base,
2536 sde_enc->cur_master,
2537 pending_flush);
2538 }
2539
2540 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002541
2542 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -04002543}
2544
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002545static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
2546 struct drm_encoder *drm_enc,
2547 unsigned long *affected_displays,
2548 int num_active_phys)
2549{
2550 struct sde_encoder_virt *sde_enc;
2551 struct sde_encoder_phys *master;
2552 enum sde_rm_topology_name topology;
2553 bool is_right_only;
2554
2555 if (!drm_enc || !affected_displays)
2556 return;
2557
2558 sde_enc = to_sde_encoder_virt(drm_enc);
2559 master = sde_enc->cur_master;
2560 if (!master || !master->connector)
2561 return;
2562
2563 topology = sde_connector_get_topology_name(master->connector);
2564 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
2565 return;
2566
2567 /*
2568 * For pingpong split, the slave pingpong won't generate IRQs. For
2569 * right-only updates, we can't swap pingpongs, or simply swap the
2570 * master/slave assignment, we actually have to swap the interfaces
2571 * so that the master physical encoder will use a pingpong/interface
2572 * that generates irqs on which to wait.
2573 */
2574 is_right_only = !test_bit(0, affected_displays) &&
2575 test_bit(1, affected_displays);
2576
2577 if (is_right_only && !sde_enc->intfs_swapped) {
2578 /* right-only update swap interfaces */
2579 swap(sde_enc->phys_encs[0]->intf_idx,
2580 sde_enc->phys_encs[1]->intf_idx);
2581 sde_enc->intfs_swapped = true;
2582 } else if (!is_right_only && sde_enc->intfs_swapped) {
2583 /* left-only or full update, swap back */
2584 swap(sde_enc->phys_encs[0]->intf_idx,
2585 sde_enc->phys_encs[1]->intf_idx);
2586 sde_enc->intfs_swapped = false;
2587 }
2588
2589 SDE_DEBUG_ENC(sde_enc,
2590 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
2591 is_right_only, sde_enc->intfs_swapped,
2592 sde_enc->phys_encs[0]->intf_idx - INTF_0,
2593 sde_enc->phys_encs[1]->intf_idx - INTF_0);
2594 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
2595 sde_enc->phys_encs[0]->intf_idx - INTF_0,
2596 sde_enc->phys_encs[1]->intf_idx - INTF_0,
2597 *affected_displays);
2598
2599 /* ppsplit always uses master since ppslave invalid for irqs*/
2600 if (num_active_phys == 1)
2601 *affected_displays = BIT(0);
2602}
2603
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002604static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
2605 struct sde_encoder_kickoff_params *params)
2606{
2607 struct sde_encoder_virt *sde_enc;
2608 struct sde_encoder_phys *phys;
2609 int i, num_active_phys;
2610 bool master_assigned = false;
2611
2612 if (!drm_enc || !params)
2613 return;
2614
2615 sde_enc = to_sde_encoder_virt(drm_enc);
2616
2617 if (sde_enc->num_phys_encs <= 1)
2618 return;
2619
2620 /* count bits set */
2621 num_active_phys = hweight_long(params->affected_displays);
2622
2623 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
2624 params->affected_displays, num_active_phys);
2625
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002626 /* for left/right only update, ppsplit master switches interface */
2627 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
2628 &params->affected_displays, num_active_phys);
2629
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002630 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2631 enum sde_enc_split_role prv_role, new_role;
2632 bool active;
2633
2634 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04002635 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002636 continue;
2637
2638 active = test_bit(i, &params->affected_displays);
2639 prv_role = phys->split_role;
2640
2641 if (active && num_active_phys == 1)
2642 new_role = ENC_ROLE_SOLO;
2643 else if (active && !master_assigned)
2644 new_role = ENC_ROLE_MASTER;
2645 else if (active)
2646 new_role = ENC_ROLE_SLAVE;
2647 else
2648 new_role = ENC_ROLE_SKIP;
2649
2650 phys->ops.update_split_role(phys, new_role);
2651 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
2652 sde_enc->cur_master = phys;
2653 master_assigned = true;
2654 }
2655
2656 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
2657 phys->hw_pp->idx - PINGPONG_0, prv_role,
2658 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002659 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
2660 phys->hw_pp->idx - PINGPONG_0, prv_role,
2661 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002662 }
2663}
2664
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05302665bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07002666{
2667 struct sde_encoder_virt *sde_enc;
2668 struct msm_display_info *disp_info;
2669
2670 if (!drm_enc) {
2671 SDE_ERROR("invalid encoder\n");
2672 return false;
2673 }
2674
2675 sde_enc = to_sde_encoder_virt(drm_enc);
2676 disp_info = &sde_enc->disp_info;
2677
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05302678 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07002679}
2680
Dhaval Patel0e558f42017-04-30 00:51:40 -07002681void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
2682{
2683 struct sde_encoder_virt *sde_enc;
2684 struct sde_encoder_phys *phys;
2685 unsigned int i;
2686 struct sde_hw_ctl *ctl;
2687 struct msm_display_info *disp_info;
2688
2689 if (!drm_enc) {
2690 SDE_ERROR("invalid encoder\n");
2691 return;
2692 }
2693 sde_enc = to_sde_encoder_virt(drm_enc);
2694 disp_info = &sde_enc->disp_info;
2695
2696 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2697 phys = sde_enc->phys_encs[i];
2698
2699 if (phys && phys->hw_ctl) {
2700 ctl = phys->hw_ctl;
2701 if (ctl->ops.clear_pending_flush)
2702 ctl->ops.clear_pending_flush(ctl);
2703
2704 /* update only for command mode primary ctl */
2705 if ((phys == sde_enc->cur_master) &&
2706 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
2707 && ctl->ops.trigger_pending)
2708 ctl->ops.trigger_pending(ctl);
2709 }
2710 }
2711}
2712
Ping Li8430ee12017-02-24 14:14:44 -08002713static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
2714{
2715 void *dither_cfg;
2716 int ret = 0;
2717 size_t len = 0;
2718 enum sde_rm_topology_name topology;
2719
2720 if (!phys || !phys->connector || !phys->hw_pp ||
2721 !phys->hw_pp->ops.setup_dither)
2722 return;
2723 topology = sde_connector_get_topology_name(phys->connector);
2724 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
2725 (phys->split_role == ENC_ROLE_SLAVE))
2726 return;
2727
2728 ret = sde_connector_get_dither_cfg(phys->connector,
2729 phys->connector->state, &dither_cfg, &len);
2730 if (!ret)
2731 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
2732}
2733
Alan Kwong4aacd532017-02-04 18:51:33 -08002734void sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
2735 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002736{
2737 struct sde_encoder_virt *sde_enc;
2738 struct sde_encoder_phys *phys;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002739 bool needs_hw_reset = false;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002740 unsigned int i;
Lloyd Atkinson05d75512017-01-17 14:45:51 -05002741 int rc;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002742
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002743 if (!drm_enc || !params) {
2744 SDE_ERROR("invalid args\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002745 return;
2746 }
2747 sde_enc = to_sde_encoder_virt(drm_enc);
2748
Clarence Ip19af1362016-09-23 14:57:51 -04002749 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002750 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002751
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002752 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002753 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002754 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002755 phys = sde_enc->phys_encs[i];
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002756 if (phys) {
2757 if (phys->ops.prepare_for_kickoff)
Alan Kwong4aacd532017-02-04 18:51:33 -08002758 phys->ops.prepare_for_kickoff(phys, params);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002759 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
2760 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08002761 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002762 }
2763 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07002764 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002765
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002766 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2767
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002768 /* if any phys needs reset, reset all phys, in-order */
2769 if (needs_hw_reset) {
Dhaval Patel0e558f42017-04-30 00:51:40 -07002770 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002771 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2772 phys = sde_enc->phys_encs[i];
2773 if (phys && phys->ops.hw_reset)
2774 phys->ops.hw_reset(phys);
2775 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002776 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05002777
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002778 _sde_encoder_update_master(drm_enc, params);
2779
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04002780 _sde_encoder_update_roi(drm_enc);
2781
Lloyd Atkinson05d75512017-01-17 14:45:51 -05002782 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
2783 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
2784 if (rc)
2785 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
2786 sde_enc->cur_master->connector->base.id,
2787 rc);
2788 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04002789
Lloyd Atkinson094780d2017-04-24 17:25:08 -04002790 if (sde_encoder_is_dsc_enabled(drm_enc)) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04002791 rc = _sde_encoder_dsc_setup(sde_enc, params);
2792 if (rc)
2793 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
2794 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002795}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002796
Alan Kwong628d19e2016-10-31 13:50:13 -04002797void sde_encoder_kickoff(struct drm_encoder *drm_enc)
2798{
2799 struct sde_encoder_virt *sde_enc;
2800 struct sde_encoder_phys *phys;
2801 unsigned int i;
2802
2803 if (!drm_enc) {
2804 SDE_ERROR("invalid encoder\n");
2805 return;
2806 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07002807 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04002808 sde_enc = to_sde_encoder_virt(drm_enc);
2809
2810 SDE_DEBUG_ENC(sde_enc, "\n");
2811
2812 atomic_set(&sde_enc->frame_done_timeout,
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07002813 SDE_FRAME_DONE_TIMEOUT * 1000 /
Alan Kwong628d19e2016-10-31 13:50:13 -04002814 drm_enc->crtc->state->adjusted_mode.vrefresh);
2815 mod_timer(&sde_enc->frame_done_timer, jiffies +
2816 ((atomic_read(&sde_enc->frame_done_timeout) * HZ) / 1000));
2817
2818 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04002819 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002820
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002821 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002822 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002823 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002824 if (phys && phys->ops.handle_post_kickoff)
2825 phys->ops.handle_post_kickoff(phys);
2826 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07002827 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002828}
2829
Clarence Ip9c65f7b2017-03-20 06:48:15 -07002830int sde_encoder_helper_hw_release(struct sde_encoder_phys *phys_enc,
2831 struct drm_framebuffer *fb)
2832{
2833 struct drm_encoder *drm_enc;
2834 struct sde_hw_mixer_cfg mixer;
2835 struct sde_rm_hw_iter lm_iter;
2836 bool lm_valid = false;
2837
2838 if (!phys_enc || !phys_enc->parent) {
2839 SDE_ERROR("invalid encoder\n");
2840 return -EINVAL;
2841 }
2842
2843 drm_enc = phys_enc->parent;
2844 memset(&mixer, 0, sizeof(mixer));
2845
2846 /* reset associated CTL/LMs */
2847 if (phys_enc->hw_ctl->ops.clear_pending_flush)
2848 phys_enc->hw_ctl->ops.clear_pending_flush(phys_enc->hw_ctl);
2849 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
2850 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
2851
2852 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
2853 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
2854 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
2855
2856 if (!hw_lm)
2857 continue;
2858
2859 /* need to flush LM to remove it */
2860 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
2861 phys_enc->hw_ctl->ops.update_pending_flush)
2862 phys_enc->hw_ctl->ops.update_pending_flush(
2863 phys_enc->hw_ctl,
2864 phys_enc->hw_ctl->ops.get_bitmask_mixer(
2865 phys_enc->hw_ctl, hw_lm->idx));
2866
2867 if (fb) {
2868 /* assume a single LM if targeting a frame buffer */
2869 if (lm_valid)
2870 continue;
2871
2872 mixer.out_height = fb->height;
2873 mixer.out_width = fb->width;
2874
2875 if (hw_lm->ops.setup_mixer_out)
2876 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
2877 }
2878
2879 lm_valid = true;
2880
2881 /* only enable border color on LM */
2882 if (phys_enc->hw_ctl->ops.setup_blendstage)
2883 phys_enc->hw_ctl->ops.setup_blendstage(
Dhaval Patel572cfd22017-06-12 19:33:39 -07002884 phys_enc->hw_ctl, hw_lm->idx, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07002885 }
2886
2887 if (!lm_valid) {
2888 SDE_DEBUG_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
2889 return -EFAULT;
2890 }
2891 return 0;
2892}
2893
Lloyd Atkinsone123c172017-02-27 13:19:08 -05002894void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
2895{
2896 struct sde_encoder_virt *sde_enc;
2897 struct sde_encoder_phys *phys;
2898 int i;
2899
2900 if (!drm_enc) {
2901 SDE_ERROR("invalid encoder\n");
2902 return;
2903 }
2904 sde_enc = to_sde_encoder_virt(drm_enc);
2905
2906 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2907 phys = sde_enc->phys_encs[i];
2908 if (phys && phys->ops.prepare_commit)
2909 phys->ops.prepare_commit(phys);
2910 }
2911}
2912
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07002913#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002914static int _sde_encoder_status_show(struct seq_file *s, void *data)
2915{
2916 struct sde_encoder_virt *sde_enc;
2917 int i;
2918
2919 if (!s || !s->private)
2920 return -EINVAL;
2921
2922 sde_enc = s->private;
2923
2924 mutex_lock(&sde_enc->enc_lock);
2925 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2926 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2927
2928 if (!phys)
2929 continue;
2930
2931 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
2932 phys->intf_idx - INTF_0,
2933 atomic_read(&phys->vsync_cnt),
2934 atomic_read(&phys->underrun_cnt));
2935
2936 switch (phys->intf_mode) {
2937 case INTF_MODE_VIDEO:
2938 seq_puts(s, "mode: video\n");
2939 break;
2940 case INTF_MODE_CMD:
2941 seq_puts(s, "mode: command\n");
2942 break;
2943 case INTF_MODE_WB_BLOCK:
2944 seq_puts(s, "mode: wb block\n");
2945 break;
2946 case INTF_MODE_WB_LINE:
2947 seq_puts(s, "mode: wb line\n");
2948 break;
2949 default:
2950 seq_puts(s, "mode: ???\n");
2951 break;
2952 }
2953 }
2954 mutex_unlock(&sde_enc->enc_lock);
2955
2956 return 0;
2957}
2958
2959static int _sde_encoder_debugfs_status_open(struct inode *inode,
2960 struct file *file)
2961{
2962 return single_open(file, _sde_encoder_status_show, inode->i_private);
2963}
2964
Dhaval Patelf9245d62017-03-28 16:24:00 -07002965static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302966 const char __user *user_buf, size_t count, loff_t *ppos)
2967{
2968 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07002969 int i = 0, rc;
2970 char buf[MISR_BUFF_SIZE + 1];
2971 size_t buff_copy;
2972 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302973
Dhaval Patelf9245d62017-03-28 16:24:00 -07002974 if (!file || !file->private_data)
2975 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302976
Dhaval Patelf9245d62017-03-28 16:24:00 -07002977 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302978
Dhaval Patelf9245d62017-03-28 16:24:00 -07002979 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
2980 if (copy_from_user(buf, user_buf, buff_copy))
2981 return -EINVAL;
2982
2983 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302984
2985 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07002986 return -EINVAL;
2987
2988 rc = _sde_encoder_power_enable(sde_enc, true);
2989 if (rc)
2990 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302991
2992 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07002993 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07002994 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302995 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2996 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2997
Dhaval Patelf9245d62017-03-28 16:24:00 -07002998 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302999 continue;
3000
Dhaval Patelf9245d62017-03-28 16:24:00 -07003001 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303002 }
3003 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003004 _sde_encoder_power_enable(sde_enc, false);
3005
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303006 return count;
3007}
3008
Dhaval Patelf9245d62017-03-28 16:24:00 -07003009static ssize_t _sde_encoder_misr_read(struct file *file,
3010 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303011{
3012 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07003013 int i = 0, len = 0;
3014 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
3015 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303016
3017 if (*ppos)
3018 return 0;
3019
Dhaval Patelf9245d62017-03-28 16:24:00 -07003020 if (!file || !file->private_data)
3021 return -EINVAL;
3022
3023 sde_enc = file->private_data;
3024
3025 rc = _sde_encoder_power_enable(sde_enc, true);
3026 if (rc)
3027 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303028
3029 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07003030 if (!sde_enc->misr_enable) {
3031 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3032 "disabled\n");
3033 goto buff_check;
3034 } else if (sde_enc->disp_info.capabilities &
3035 ~MSM_DISPLAY_CAP_VID_MODE) {
3036 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3037 "unsupported\n");
3038 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303039 }
3040
Dhaval Patelf9245d62017-03-28 16:24:00 -07003041 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3042 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3043 if (!phys || !phys->ops.collect_misr)
3044 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303045
Dhaval Patelf9245d62017-03-28 16:24:00 -07003046 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
3047 "Intf idx:%d\n", phys->intf_idx - INTF_0);
3048 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
3049 phys->ops.collect_misr(phys));
3050 }
3051
3052buff_check:
3053 if (count <= len) {
3054 len = 0;
3055 goto end;
3056 }
3057
3058 if (copy_to_user(user_buff, buf, len)) {
3059 len = -EFAULT;
3060 goto end;
3061 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303062
3063 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303064
Dhaval Patelf9245d62017-03-28 16:24:00 -07003065end:
3066 mutex_unlock(&sde_enc->enc_lock);
3067 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303068 return len;
3069}
3070
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003071static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003072{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003073 struct sde_encoder_virt *sde_enc;
3074 struct msm_drm_private *priv;
3075 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07003076 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003077
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003078 static const struct file_operations debugfs_status_fops = {
3079 .open = _sde_encoder_debugfs_status_open,
3080 .read = seq_read,
3081 .llseek = seq_lseek,
3082 .release = single_release,
3083 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303084
3085 static const struct file_operations debugfs_misr_fops = {
3086 .open = simple_open,
3087 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003088 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303089 };
3090
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003091 char name[SDE_NAME_SIZE];
3092
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003093 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003094 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003095 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003096 }
3097
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003098 sde_enc = to_sde_encoder_virt(drm_enc);
3099 priv = drm_enc->dev->dev_private;
3100 sde_kms = to_sde_kms(priv->kms);
3101
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003102 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
3103
3104 /* create overall sub-directory for the encoder */
3105 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07003106 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003107 if (!sde_enc->debugfs_root)
3108 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303109
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003110 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003111 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003112 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05303113
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04003114 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07003115 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003116
Alan Kwongf2debb02017-04-05 06:19:29 -07003117 for (i = 0; i < sde_enc->num_phys_encs; i++)
3118 if (sde_enc->phys_encs[i] &&
3119 sde_enc->phys_encs[i]->ops.late_register)
3120 sde_enc->phys_encs[i]->ops.late_register(
3121 sde_enc->phys_encs[i],
3122 sde_enc->debugfs_root);
3123
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003124 return 0;
3125}
3126
3127static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
3128{
3129 struct sde_encoder_virt *sde_enc;
3130
3131 if (!drm_enc)
3132 return;
3133
3134 sde_enc = to_sde_encoder_virt(drm_enc);
3135 debugfs_remove_recursive(sde_enc->debugfs_root);
3136}
3137#else
3138static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
3139{
3140 return 0;
3141}
3142
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07003143static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003144{
3145}
3146#endif
3147
3148static int sde_encoder_late_register(struct drm_encoder *encoder)
3149{
3150 return _sde_encoder_init_debugfs(encoder);
3151}
3152
3153static void sde_encoder_early_unregister(struct drm_encoder *encoder)
3154{
3155 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003156}
3157
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003158static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04003159 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003160 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003161 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003162{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003163 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003164
Clarence Ip19af1362016-09-23 14:57:51 -04003165 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003166
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003167 /*
3168 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
3169 * in this function, check up-front.
3170 */
3171 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
3172 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003173 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003174 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003175 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003176 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003177
Clarence Ipa4039322016-07-15 16:23:59 -04003178 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003179 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003180
3181 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003182 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003183 PTR_ERR(enc));
3184 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3185 }
3186
3187 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3188 ++sde_enc->num_phys_encs;
3189 }
3190
Clarence Ipa4039322016-07-15 16:23:59 -04003191 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003192 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003193
3194 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003195 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04003196 PTR_ERR(enc));
3197 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3198 }
3199
3200 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3201 ++sde_enc->num_phys_encs;
3202 }
3203
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003204 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003205}
3206
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003207static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
3208 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04003209{
3210 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04003211
Clarence Ip19af1362016-09-23 14:57:51 -04003212 if (!sde_enc) {
3213 SDE_ERROR("invalid encoder\n");
3214 return -EINVAL;
3215 }
3216
3217 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04003218
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003219 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003220 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04003221 sde_enc->num_phys_encs);
3222 return -EINVAL;
3223 }
3224
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003225 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04003226
3227 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04003228 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04003229 PTR_ERR(enc));
3230 return enc == 0 ? -EINVAL : PTR_ERR(enc);
3231 }
3232
3233 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
3234 ++sde_enc->num_phys_encs;
3235
3236 return 0;
3237}
3238
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003239static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003240 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04003241 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003242 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003243{
3244 int ret = 0;
3245 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003246 enum sde_intf_type intf_type;
3247 struct sde_encoder_virt_ops parent_ops = {
3248 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07003249 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04003250 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003251 };
3252 struct sde_enc_phys_init_params phys_params;
3253
Clarence Ip19af1362016-09-23 14:57:51 -04003254 if (!sde_enc || !sde_kms) {
3255 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
3256 sde_enc != 0, sde_kms != 0);
3257 return -EINVAL;
3258 }
3259
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003260 memset(&phys_params, 0, sizeof(phys_params));
3261 phys_params.sde_kms = sde_kms;
3262 phys_params.parent = &sde_enc->base;
3263 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003264 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003265
Clarence Ip19af1362016-09-23 14:57:51 -04003266 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003267
Clarence Ipa4039322016-07-15 16:23:59 -04003268 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003269 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
3270 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04003271 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003272 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
3273 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07003274 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
3275 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
3276 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04003277 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
3278 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
3279 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003280 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04003281 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003282 return -EINVAL;
3283 }
3284
Clarence Ip88270a62016-06-26 10:09:34 -04003285 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003286
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003287 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
3288
Clarence Ip19af1362016-09-23 14:57:51 -04003289 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003290
Dhaval Patele17e0ee2017-08-23 18:01:42 -07003291 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
3292 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003293 sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
3294
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003295 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04003296 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003297 /*
3298 * Left-most tile is at index 0, content is controller id
3299 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
3300 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
3301 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003302 u32 controller_id = disp_info->h_tile_instance[i];
3303
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003304 if (disp_info->num_of_h_tiles > 1) {
3305 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003306 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003307 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003308 phys_params.split_role = ENC_ROLE_SLAVE;
3309 } else {
3310 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003311 }
3312
Clarence Ip19af1362016-09-23 14:57:51 -04003313 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003314 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003315
Alan Kwongbb27c092016-07-20 16:41:25 -04003316 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003317 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003318 phys_params.wb_idx = sde_encoder_get_wb(
3319 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04003320 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003321 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04003322 SDE_ERROR_ENC(sde_enc,
3323 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003324 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04003325 ret = -EINVAL;
3326 }
Alan Kwongbb27c092016-07-20 16:41:25 -04003327 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003328 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003329 phys_params.intf_idx = sde_encoder_get_intf(
3330 sde_kms->catalog, intf_type,
3331 controller_id);
3332 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04003333 SDE_ERROR_ENC(sde_enc,
3334 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003335 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04003336 ret = -EINVAL;
3337 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003338 }
3339
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003340 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04003341 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003342 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
3343 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04003344 else
3345 ret = sde_encoder_virt_add_phys_encs(
3346 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04003347 sde_enc,
3348 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003349 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04003350 SDE_ERROR_ENC(sde_enc,
3351 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003352 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003353 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08003354
3355 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3356 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3357
3358 if (phys) {
3359 atomic_set(&phys->vsync_cnt, 0);
3360 atomic_set(&phys->underrun_cnt, 0);
3361 }
3362 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003363 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003364
3365 return ret;
3366}
3367
Alan Kwong628d19e2016-10-31 13:50:13 -04003368static void sde_encoder_frame_done_timeout(unsigned long data)
3369{
3370 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3371 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3372 struct msm_drm_private *priv;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003373 u32 event;
Alan Kwong628d19e2016-10-31 13:50:13 -04003374
3375 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3376 SDE_ERROR("invalid parameters\n");
3377 return;
3378 }
3379 priv = drm_enc->dev->dev_private;
3380
3381 if (!sde_enc->frame_busy_mask[0] || !sde_enc->crtc_frame_event_cb) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003382 SDE_DEBUG_ENC(sde_enc, "invalid timeout\n");
3383 SDE_EVT32(DRMID(drm_enc), sde_enc->frame_busy_mask[0], 0);
Alan Kwong628d19e2016-10-31 13:50:13 -04003384 return;
3385 } else if (!atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003386 SDE_ERROR_ENC(sde_enc, "invalid timeout\n");
Alan Kwong628d19e2016-10-31 13:50:13 -04003387 SDE_EVT32(DRMID(drm_enc), 0, 1);
3388 return;
3389 }
3390
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003391 SDE_ERROR_ENC(sde_enc, "frame done timeout\n");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003392
Veera Sundaram Sankaran7ee99092017-06-13 11:19:36 -07003393 event = SDE_ENCODER_FRAME_EVENT_ERROR;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07003394 SDE_EVT32(DRMID(drm_enc), event);
3395 sde_enc->crtc_frame_event_cb(sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04003396}
3397
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07003398static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
3399 .mode_set = sde_encoder_virt_mode_set,
3400 .disable = sde_encoder_virt_disable,
3401 .enable = sde_encoder_virt_enable,
3402 .atomic_check = sde_encoder_virt_atomic_check,
3403};
3404
3405static const struct drm_encoder_funcs sde_encoder_funcs = {
3406 .destroy = sde_encoder_destroy,
3407 .late_register = sde_encoder_late_register,
3408 .early_unregister = sde_encoder_early_unregister,
3409};
3410
Clarence Ip3649f8b2016-10-31 09:59:44 -04003411struct drm_encoder *sde_encoder_init(
3412 struct drm_device *dev,
3413 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003414{
3415 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04003416 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003417 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003418 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003419 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08003420 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003421 int ret = 0;
3422
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003423 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
3424 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003425 ret = -ENOMEM;
3426 goto fail;
3427 }
3428
Dhaval Patel22ef6df2016-10-20 14:42:52 -07003429 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003430 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
3431 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003432 if (ret)
3433 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003434
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003435 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003436 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003437 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07003438 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003439 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003440
Alan Kwong628d19e2016-10-31 13:50:13 -04003441 atomic_set(&sde_enc->frame_done_timeout, 0);
3442 setup_timer(&sde_enc->frame_done_timer, sde_encoder_frame_done_timeout,
3443 (unsigned long) sde_enc);
3444
Dhaval Patel020f7e122016-11-15 14:39:18 -08003445 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
3446 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003447 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08003448 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07003449 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08003450 PTR_ERR(sde_enc->rsc_client));
3451 sde_enc->rsc_client = NULL;
3452 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08003453
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003454 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04003455 kthread_init_delayed_work(&sde_enc->delayed_off_work,
3456 sde_encoder_off_work);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07003457 sde_enc->idle_timeout = IDLE_TIMEOUT;
Dhaval Patel020f7e122016-11-15 14:39:18 -08003458 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
3459
Clarence Ip19af1362016-09-23 14:57:51 -04003460 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003461
3462 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003463
3464fail:
Clarence Ip19af1362016-09-23 14:57:51 -04003465 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003466 if (drm_enc)
3467 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07003468
3469 return ERR_PTR(ret);
3470}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04003471
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07003472int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
3473 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04003474{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07003475 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003476 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003477 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04003478
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003479 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04003480 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003481 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04003482 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003483 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04003484 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04003485
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003486 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3487 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003488
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07003489 switch (event) {
3490 case MSM_ENC_COMMIT_DONE:
3491 fn_wait = phys->ops.wait_for_commit_done;
3492 break;
3493 case MSM_ENC_TX_COMPLETE:
3494 fn_wait = phys->ops.wait_for_tx_complete;
3495 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04003496 case MSM_ENC_VBLANK:
3497 fn_wait = phys->ops.wait_for_vblank;
3498 break;
3499 default:
3500 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
3501 event);
3502 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07003503 };
3504
3505 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003506 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07003507 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07003508 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003509 if (ret)
3510 return ret;
3511 }
3512 }
3513
3514 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04003515}
3516
Alan Kwong67a3f792016-11-01 23:16:53 -04003517enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
3518{
3519 struct sde_encoder_virt *sde_enc = NULL;
3520 int i;
3521
3522 if (!encoder) {
3523 SDE_ERROR("invalid encoder\n");
3524 return INTF_MODE_NONE;
3525 }
3526 sde_enc = to_sde_encoder_virt(encoder);
3527
3528 if (sde_enc->cur_master)
3529 return sde_enc->cur_master->intf_mode;
3530
3531 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3532 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3533
3534 if (phys)
3535 return phys->intf_mode;
3536 }
3537
3538 return INTF_MODE_NONE;
3539}