blob: 21c883ae4994b21febddb90e110949fcb11e6ec0 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
Kalyan Thota27ec06c2019-03-18 13:19:59 +05302 * Copyright (c) 2014-2019, The Linux Foundation. All rights reserved.
Dhaval Patel14d46ce2017-01-17 16:28:12 -08003 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinsona8781382017-07-17 10:20:43 -040020#include <linux/kthread.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070021#include <linux/debugfs.h>
22#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070023#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070024
Lloyd Atkinson09fed912016-06-24 18:14:13 -040025#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070026#include "sde_kms.h"
27#include "drm_crtc.h"
28#include "drm_crtc_helper.h"
29
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040030#include "sde_hwio.h"
31#include "sde_hw_catalog.h"
32#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040033#include "sde_hw_ctl.h"
34#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040035#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080036#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080037#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070038#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070039#include "sde_trace.h"
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050040#include "sde_core_irq.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040041
Clarence Ip19af1362016-09-23 14:57:51 -040042#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
43 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
44
45#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
46 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
47
Lloyd Atkinson05ef8232017-03-08 16:35:36 -050048#define SDE_DEBUG_PHYS(p, fmt, ...) SDE_DEBUG("enc%d intf%d pp%d " fmt,\
49 (p) ? (p)->parent->base.id : -1, \
50 (p) ? (p)->intf_idx - INTF_0 : -1, \
51 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
52 ##__VA_ARGS__)
53
54#define SDE_ERROR_PHYS(p, fmt, ...) SDE_ERROR("enc%d intf%d pp%d " fmt,\
55 (p) ? (p)->parent->base.id : -1, \
56 (p) ? (p)->intf_idx - INTF_0 : -1, \
57 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
58 ##__VA_ARGS__)
59
Lloyd Atkinson5d722782016-05-30 14:09:41 -040060/*
61 * Two to anticipate panels that can do cmd/vid dynamic switching
62 * plan is to create all possible physical encoder types, and switch between
63 * them at runtime
64 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040066
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040067#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
68 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
69
Vishnuvardhan Prodduturi6b1803a2019-01-19 16:35:34 +053070#define MAX_CHANNELS_PER_ENC 4
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070071
Dhaval Patelf9245d62017-03-28 16:24:00 -070072#define MISR_BUFF_SIZE 256
73
Clarence Ip89628132017-07-27 13:33:51 -040074#define IDLE_SHORT_TIMEOUT 1
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070075
Raviteja Tamatam3eebe962017-10-26 09:55:24 +053076#define FAULT_TOLERENCE_DELTA_IN_MS 2
77
78#define FAULT_TOLERENCE_WAIT_IN_MS 5
79
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -040080/* Maximum number of VSYNC wait attempts for RSC state transition */
81#define MAX_RSC_WAIT 5
82
Ping Li16162692018-05-08 14:13:46 -070083#define TOPOLOGY_DUALPIPE_MERGE_MODE(x) \
84 (((x) == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE) || \
85 ((x) == SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE) || \
86 ((x) == SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC))
87
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070088/**
89 * enum sde_enc_rc_events - events for resource control state machine
90 * @SDE_ENC_RC_EVENT_KICKOFF:
91 * This event happens at NORMAL priority.
92 * Event that signals the start of the transfer. When this event is
93 * received, enable MDP/DSI core clocks and request RSC with CMD state.
94 * Regardless of the previous state, the resource should be in ON state
95 * at the end of this event.
96 * @SDE_ENC_RC_EVENT_FRAME_DONE:
97 * This event happens at INTERRUPT level.
98 * Event signals the end of the data transfer after the PP FRAME_DONE
99 * event. At the end of this event, a delayed work is scheduled to go to
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700100 * IDLE_PC state after IDLE_POWERCOLLAPSE_DURATION time.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400101 * @SDE_ENC_RC_EVENT_PRE_STOP:
102 * This event happens at NORMAL priority.
103 * This event, when received during the ON state, set RSC to IDLE, and
104 * and leave the RC STATE in the PRE_OFF state.
105 * It should be followed by the STOP event as part of encoder disable.
106 * If received during IDLE or OFF states, it will do nothing.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700107 * @SDE_ENC_RC_EVENT_STOP:
108 * This event happens at NORMAL priority.
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400109 * When this event is received, disable all the MDP/DSI core clocks, and
110 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
111 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
112 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
113 * Resource state should be in OFF at the end of the event.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700114 * @SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700115 * This event happens at NORMAL priority from a work item.
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700116 * Event signals that there is a seamless mode switch is in prgoress. A
117 * client needs to turn of only irq - leave clocks ON to reduce the mode
118 * switch latency.
119 * @SDE_ENC_RC_EVENT_POST_MODESET:
120 * This event happens at NORMAL priority from a work item.
121 * Event signals that seamless mode switch is complete and resources are
122 * acquired. Clients wants to turn on the irq again and update the rsc
123 * with new vtotal.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700124 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
125 * This event happens at NORMAL priority from a work item.
Dhaval Patelc9e213b2017-11-02 12:13:12 -0700126 * Event signals that there were no frame updates for
127 * IDLE_POWERCOLLAPSE_DURATION time. This would disable MDP/DSI core clocks
128 * and request RSC with IDLE state and change the resource state to IDLE.
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800129 * @SDE_ENC_RC_EVENT_EARLY_WAKEUP:
130 * This event is triggered from the input event thread when touch event is
131 * received from the input device. On receiving this event,
132 * - If the device is in SDE_ENC_RC_STATE_IDLE state, it turns ON the
133 clocks and enable RSC.
134 * - If the device is in SDE_ENC_RC_STATE_ON state, it resets the delayed
135 * off work since a new commit is imminent.
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700136 */
137enum sde_enc_rc_events {
138 SDE_ENC_RC_EVENT_KICKOFF = 1,
139 SDE_ENC_RC_EVENT_FRAME_DONE,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400140 SDE_ENC_RC_EVENT_PRE_STOP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700141 SDE_ENC_RC_EVENT_STOP,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700142 SDE_ENC_RC_EVENT_PRE_MODESET,
143 SDE_ENC_RC_EVENT_POST_MODESET,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800144 SDE_ENC_RC_EVENT_ENTER_IDLE,
145 SDE_ENC_RC_EVENT_EARLY_WAKEUP,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700146};
147
148/*
149 * enum sde_enc_rc_states - states that the resource control maintains
150 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400151 * @SDE_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700152 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700153 * @SDE_ENC_RC_STATE_MODESET: Resource is in modeset state
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700154 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
155 */
156enum sde_enc_rc_states {
157 SDE_ENC_RC_STATE_OFF,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400158 SDE_ENC_RC_STATE_PRE_OFF,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700159 SDE_ENC_RC_STATE_ON,
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700160 SDE_ENC_RC_STATE_MODESET,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700161 SDE_ENC_RC_STATE_IDLE
162};
163
Chirag Khuranaed859f52019-11-20 18:18:12 +0530164/* rgb to yuv color space conversion matrix */
165static struct sde_csc_cfg sde_csc_10bit_convert[SDE_MAX_CSC] = {
166 [SDE_CSC_RGB2YUV_601L] = {
167 {
168 TO_S15D16(0x0083), TO_S15D16(0x0102), TO_S15D16(0x0032),
169 TO_S15D16(0xffb4), TO_S15D16(0xff6b), TO_S15D16(0x00e1),
170 TO_S15D16(0x00e1), TO_S15D16(0xff44), TO_S15D16(0xffdb),
171 },
172 { 0x0, 0x0, 0x0,},
173 { 0x0040, 0x0200, 0x0200,},
174 { 0x0, 0x3ff, 0x0, 0x3ff, 0x0, 0x3ff,},
175 { 0x0040, 0x03ac, 0x0040, 0x03c0, 0x0040, 0x03c0,},
176 },
177
178 [SDE_CSC_RGB2YUV_601FR] = {
179 {
180 TO_S15D16(0x0099), TO_S15D16(0x012d), TO_S15D16(0x003a),
181 TO_S15D16(0xffaa), TO_S15D16(0xff56), TO_S15D16(0x0100),
182 TO_S15D16(0x0100), TO_S15D16(0xff2a), TO_S15D16(0xffd6),
183 },
184 { 0x0, 0x0, 0x0,},
185 { 0x0000, 0x0200, 0x0200,},
186 { 0x0, 0x3ff, 0x0, 0x3ff, 0x0, 0x3ff,},
187 { 0x0, 0x3ff, 0x0, 0x3ff, 0x0, 0x3ff,},
188 },
189};
190
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400191/**
192 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
193 * encoders. Virtual encoder manages one "logical" display. Physical
194 * encoders manage one intf block, tied to a specific panel/sub-panel.
195 * Virtual encoder defers as much as possible to the physical encoders.
196 * Virtual encoder registers itself with the DRM Framework as the encoder.
197 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400198 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400199 * @bus_scaling_client: Client handle to the bus scaling interface
200 * @num_phys_encs: Actual number of physical encoders contained.
201 * @phys_encs: Container of physical encoders managed.
202 * @cur_master: Pointer to the current master in this mode. Optimization
203 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700204 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500205 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800206 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500207 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
208 * for partial update right-only cases, such as pingpong
209 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400210 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
211 * notification of the VBLANK
212 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400213 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
214 * all CTL paths
215 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700216 * @debugfs_root: Debug file system root file node
217 * @enc_lock: Lock around physical encoder create/destroy and
218 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400219 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
220 * busy processing current command.
221 * Bit0 = phys_encs[0] etc.
222 * @crtc_frame_event_cb: callback handler for frame event
223 * @crtc_frame_event_cb_data: callback handler private data
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400224 * @vsync_event_timer: vsync timer
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700225 * @rsc_client: rsc client pointer
226 * @rsc_state_init: boolean to indicate rsc config init
227 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700228 * @misr_enable: misr enable/disable status
Dhaval Patel010f5172017-08-01 22:40:09 -0700229 * @misr_frame_count: misr frame count before start capturing the data
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -0700230 * @idle_pc_enabled: indicate if idle power collapse is enabled
231 * currently. This can be controlled by user-mode
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700232 * @rc_lock: resource control mutex lock to protect
233 * virt encoder over various state changes
234 * @rc_state: resource controller state
235 * @delayed_off_work: delayed worker to schedule disabling of
236 * clks and resources after IDLE_TIMEOUT time.
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400237 * @vsync_event_work: worker to handle vsync event for autorefresh
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800238 * @input_event_work: worker to handle input device touch events
Dhaval Patel222023e2018-02-27 12:24:07 -0800239 * @esd_trigger_work: worker to handle esd trigger events
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800240 * @input_handler: handler for input device events
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700241 * @topology: topology of the display
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700242 * @vblank_enabled: boolean to track userspace vblank vote
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700243 * @rsc_config: rsc configuration for display vtotal, fps, etc.
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400244 * @cur_conn_roi: current connector roi
245 * @prv_conn_roi: previous connector roi to optimize if unchanged
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800246 * @crtc pointer to drm_crtc
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400247 */
248struct sde_encoder_virt {
249 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400250 spinlock_t enc_spinlock;
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +0530251 struct mutex vblank_ctl_lock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400252 uint32_t bus_scaling_client;
253
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400254 uint32_t display_num_of_h_tiles;
255
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400256 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400257 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
258 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700259 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800260 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400261
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500262 bool intfs_swapped;
263
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400264 void (*crtc_vblank_cb)(void *);
265 void *crtc_vblank_cb_data;
266
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700267 struct dentry *debugfs_root;
268 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400269 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
270 void (*crtc_frame_event_cb)(void *, u32 event);
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -0700271 struct sde_crtc_frame_event_cb_data crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400272
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400273 struct timer_list vsync_event_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800274
275 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700276 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800277 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700278 bool misr_enable;
Dhaval Patel010f5172017-08-01 22:40:09 -0700279 u32 misr_frame_count;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700280
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -0700281 bool idle_pc_enabled;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700282 struct mutex rc_lock;
283 enum sde_enc_rc_states rc_state;
Lloyd Atkinsona8781382017-07-17 10:20:43 -0400284 struct kthread_delayed_work delayed_off_work;
Benjamin Chan9cd866d2017-08-15 14:56:34 -0400285 struct kthread_work vsync_event_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800286 struct kthread_work input_event_work;
Dhaval Patel222023e2018-02-27 12:24:07 -0800287 struct kthread_work esd_trigger_work;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800288 struct input_handler *input_handler;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +0530289 bool input_handler_registered;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700290 struct msm_display_topology topology;
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -0700291 bool vblank_enabled;
Alan Kwong56f1a942017-04-04 11:53:42 -0700292
Dhaval Patel1b5605b2017-07-26 18:19:50 -0700293 struct sde_rsc_cmd_config rsc_config;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400294 struct sde_rect cur_conn_roi;
295 struct sde_rect prv_conn_roi;
Harsh Sahu1e52ed02017-11-28 14:34:22 -0800296 struct drm_crtc *crtc;
Dhaval Patel30874eb2018-05-31 13:33:31 -0700297
298 bool elevated_ahb_vote;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400299};
300
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400301#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700302
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -0500303static void _sde_encoder_pm_qos_add_request(struct drm_encoder *drm_enc)
304{
305 struct msm_drm_private *priv;
306 struct sde_kms *sde_kms;
307 struct pm_qos_request *req;
308 u32 cpu_mask;
309 u32 cpu_dma_latency;
310 int cpu;
311
312 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
313 SDE_ERROR("drm device invalid\n");
314 return;
315 }
316
317 priv = drm_enc->dev->dev_private;
318 if (!priv->kms) {
319 SDE_ERROR("invalid kms\n");
320 return;
321 }
322
323 sde_kms = to_sde_kms(priv->kms);
324 if (!sde_kms || !sde_kms->catalog)
325 return;
326
327 cpu_mask = sde_kms->catalog->perf.cpu_mask;
328 cpu_dma_latency = sde_kms->catalog->perf.cpu_dma_latency;
329 if (!cpu_mask)
330 return;
331
332 req = &sde_kms->pm_qos_cpu_req;
333 req->type = PM_QOS_REQ_AFFINE_CORES;
334 cpumask_empty(&req->cpus_affine);
335 for_each_possible_cpu(cpu) {
336 if ((1 << cpu) & cpu_mask)
337 cpumask_set_cpu(cpu, &req->cpus_affine);
338 }
339 pm_qos_add_request(req, PM_QOS_CPU_DMA_LATENCY, cpu_dma_latency);
340
341 SDE_EVT32_VERBOSE(DRMID(drm_enc), cpu_mask, cpu_dma_latency);
342}
343
344static void _sde_encoder_pm_qos_remove_request(struct drm_encoder *drm_enc)
345{
346 struct msm_drm_private *priv;
347 struct sde_kms *sde_kms;
348
349 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
350 SDE_ERROR("drm device invalid\n");
351 return;
352 }
353
354 priv = drm_enc->dev->dev_private;
355 if (!priv->kms) {
356 SDE_ERROR("invalid kms\n");
357 return;
358 }
359
360 sde_kms = to_sde_kms(priv->kms);
361 if (!sde_kms || !sde_kms->catalog || !sde_kms->catalog->perf.cpu_mask)
362 return;
363
364 pm_qos_remove_request(&sde_kms->pm_qos_cpu_req);
365}
366
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700367static struct drm_connector_state *_sde_encoder_get_conn_state(
368 struct drm_encoder *drm_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800369{
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700370 struct msm_drm_private *priv;
371 struct sde_kms *sde_kms;
372 struct list_head *connector_list;
373 struct drm_connector *conn_iter;
374
375 if (!drm_enc) {
376 SDE_ERROR("invalid argument\n");
377 return NULL;
378 }
379
380 priv = drm_enc->dev->dev_private;
381 sde_kms = to_sde_kms(priv->kms);
382 connector_list = &sde_kms->dev->mode_config.connector_list;
383
384 list_for_each_entry(conn_iter, connector_list, head)
385 if (conn_iter->encoder == drm_enc)
386 return conn_iter->state;
387
388 return NULL;
389}
390
391static int _sde_encoder_get_mode_info(struct drm_encoder *drm_enc,
392 struct msm_mode_info *mode_info)
393{
394 struct drm_connector_state *conn_state;
395
396 if (!drm_enc || !mode_info) {
397 SDE_ERROR("invalid arguments\n");
398 return -EINVAL;
399 }
400
401 conn_state = _sde_encoder_get_conn_state(drm_enc);
402 if (!conn_state) {
403 SDE_ERROR("invalid connector state for the encoder: %d\n",
404 drm_enc->base.id);
405 return -EINVAL;
406 }
407
408 return sde_connector_get_mode_info(conn_state, mode_info);
409}
410
411static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
412{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400413 struct msm_compression_info *comp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700414 struct msm_mode_info mode_info;
415 int rc = 0;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400416
417 if (!drm_enc)
418 return false;
419
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700420 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
421 if (rc) {
422 SDE_ERROR("failed to get mode info, enc: %d\n",
423 drm_enc->base.id);
424 return false;
425 }
426
427 comp_info = &mode_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800428
429 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
430}
431
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400432bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
433{
434 enum sde_rm_topology_name topology;
435 struct sde_encoder_virt *sde_enc;
436 struct drm_connector *drm_conn;
437
438 if (!drm_enc)
439 return false;
440
441 sde_enc = to_sde_encoder_virt(drm_enc);
442 if (!sde_enc->cur_master)
443 return false;
444
445 drm_conn = sde_enc->cur_master->connector;
446 if (!drm_conn)
447 return false;
448
449 topology = sde_connector_get_topology_name(drm_conn);
Vishnuvardhan Prodduturi6b1803a2019-01-19 16:35:34 +0530450 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE ||
451 topology == SDE_RM_TOPOLOGY_QUADPIPE_DSCMERGE)
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400452 return true;
453
454 return false;
455}
456
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -0700457int sde_encoder_in_clone_mode(struct drm_encoder *drm_enc)
458{
459 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
460
461 return sde_enc && sde_enc->cur_master &&
462 sde_enc->cur_master->in_clone_mode;
463}
464
Dhaval Patelf9245d62017-03-28 16:24:00 -0700465static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
466 bool enable)
467{
468 struct drm_encoder *drm_enc;
469 struct msm_drm_private *priv;
470 struct sde_kms *sde_kms;
471
472 if (!sde_enc) {
473 SDE_ERROR("invalid sde enc\n");
474 return -EINVAL;
475 }
476
477 drm_enc = &sde_enc->base;
478 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
479 SDE_ERROR("drm device invalid\n");
480 return -EINVAL;
481 }
482
483 priv = drm_enc->dev->dev_private;
484 if (!priv->kms) {
485 SDE_ERROR("invalid kms\n");
486 return -EINVAL;
487 }
488
489 sde_kms = to_sde_kms(priv->kms);
490
491 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
492 enable);
493}
494
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500495void sde_encoder_helper_report_irq_timeout(struct sde_encoder_phys *phys_enc,
496 enum sde_intr_idx intr_idx)
497{
498 SDE_EVT32(DRMID(phys_enc->parent),
499 phys_enc->intf_idx - INTF_0,
500 phys_enc->hw_pp->idx - PINGPONG_0,
501 intr_idx);
502 SDE_ERROR_PHYS(phys_enc, "irq %d timeout\n", intr_idx);
503
504 if (phys_enc->parent_ops.handle_frame_done)
505 phys_enc->parent_ops.handle_frame_done(
506 phys_enc->parent, phys_enc,
507 SDE_ENCODER_FRAME_EVENT_ERROR);
508}
509
510int sde_encoder_helper_wait_for_irq(struct sde_encoder_phys *phys_enc,
511 enum sde_intr_idx intr_idx,
512 struct sde_encoder_wait_info *wait_info)
513{
514 struct sde_encoder_irq *irq;
515 u32 irq_status;
516 int ret;
517
518 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
519 SDE_ERROR("invalid params\n");
520 return -EINVAL;
521 }
522 irq = &phys_enc->irq[intr_idx];
523
524 /* note: do master / slave checking outside */
525
526 /* return EWOULDBLOCK since we know the wait isn't necessary */
527 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
528 SDE_ERROR_PHYS(phys_enc, "encoder is disabled\n");
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400529 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
530 irq->irq_idx, intr_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500531 return -EWOULDBLOCK;
532 }
533
534 if (irq->irq_idx < 0) {
535 SDE_DEBUG_PHYS(phys_enc, "irq %s hw %d disabled, skip wait\n",
536 irq->name, irq->hw_idx);
537 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
538 irq->irq_idx);
539 return 0;
540 }
541
542 SDE_DEBUG_PHYS(phys_enc, "pending_cnt %d\n",
543 atomic_read(wait_info->atomic_cnt));
Dhaval Patela5f75952017-07-25 11:17:41 -0700544 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
545 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
546 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_ENTRY);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500547
548 ret = sde_encoder_helper_wait_event_timeout(
549 DRMID(phys_enc->parent),
550 irq->hw_idx,
551 wait_info);
552
553 if (ret <= 0) {
554 irq_status = sde_core_irq_read(phys_enc->sde_kms,
555 irq->irq_idx, true);
556 if (irq_status) {
557 unsigned long flags;
558
Dhaval Patela5f75952017-07-25 11:17:41 -0700559 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
560 irq->hw_idx, irq->irq_idx,
561 phys_enc->hw_pp->idx - PINGPONG_0,
562 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500563 SDE_DEBUG_PHYS(phys_enc,
564 "done but irq %d not triggered\n",
565 irq->irq_idx);
566 local_irq_save(flags);
567 irq->cb.func(phys_enc, irq->irq_idx);
568 local_irq_restore(flags);
569 ret = 0;
570 } else {
571 ret = -ETIMEDOUT;
Dhaval Patela5f75952017-07-25 11:17:41 -0700572 SDE_EVT32(DRMID(phys_enc->parent), intr_idx,
573 irq->hw_idx, irq->irq_idx,
574 phys_enc->hw_pp->idx - PINGPONG_0,
575 atomic_read(wait_info->atomic_cnt), irq_status,
576 SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500577 }
578 } else {
579 ret = 0;
Dhaval Patela5f75952017-07-25 11:17:41 -0700580 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
581 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
582 atomic_read(wait_info->atomic_cnt));
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500583 }
584
Dhaval Patela5f75952017-07-25 11:17:41 -0700585 SDE_EVT32_VERBOSE(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
586 irq->irq_idx, ret, phys_enc->hw_pp->idx - PINGPONG_0,
587 atomic_read(wait_info->atomic_cnt), SDE_EVTLOG_FUNC_EXIT);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500588
589 return ret;
590}
591
592int sde_encoder_helper_register_irq(struct sde_encoder_phys *phys_enc,
593 enum sde_intr_idx intr_idx)
594{
595 struct sde_encoder_irq *irq;
596 int ret = 0;
597
598 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
599 SDE_ERROR("invalid params\n");
600 return -EINVAL;
601 }
602 irq = &phys_enc->irq[intr_idx];
603
604 if (irq->irq_idx >= 0) {
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530605 SDE_DEBUG_PHYS(phys_enc,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500606 "skipping already registered irq %s type %d\n",
607 irq->name, irq->intr_type);
608 return 0;
609 }
610
611 irq->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
612 irq->intr_type, irq->hw_idx);
613 if (irq->irq_idx < 0) {
614 SDE_ERROR_PHYS(phys_enc,
615 "failed to lookup IRQ index for %s type:%d\n",
616 irq->name, irq->intr_type);
617 return -EINVAL;
618 }
619
620 ret = sde_core_irq_register_callback(phys_enc->sde_kms, irq->irq_idx,
621 &irq->cb);
622 if (ret) {
623 SDE_ERROR_PHYS(phys_enc,
624 "failed to register IRQ callback for %s\n",
625 irq->name);
626 irq->irq_idx = -EINVAL;
627 return ret;
628 }
629
630 ret = sde_core_irq_enable(phys_enc->sde_kms, &irq->irq_idx, 1);
631 if (ret) {
632 SDE_ERROR_PHYS(phys_enc,
633 "enable IRQ for intr:%s failed, irq_idx %d\n",
634 irq->name, irq->irq_idx);
635
636 sde_core_irq_unregister_callback(phys_enc->sde_kms,
637 irq->irq_idx, &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400638
639 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
640 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500641 irq->irq_idx = -EINVAL;
642 return ret;
643 }
644
645 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
646 SDE_DEBUG_PHYS(phys_enc, "registered irq %s idx: %d\n",
647 irq->name, irq->irq_idx);
648
649 return ret;
650}
651
652int sde_encoder_helper_unregister_irq(struct sde_encoder_phys *phys_enc,
653 enum sde_intr_idx intr_idx)
654{
655 struct sde_encoder_irq *irq;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400656 int ret;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500657
658 if (!phys_enc) {
659 SDE_ERROR("invalid encoder\n");
660 return -EINVAL;
661 }
662 irq = &phys_enc->irq[intr_idx];
663
664 /* silently skip irqs that weren't registered */
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400665 if (irq->irq_idx < 0) {
666 SDE_ERROR(
667 "extra unregister irq, enc%d intr_idx:0x%x hw_idx:0x%x irq_idx:0x%x\n",
668 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
669 irq->irq_idx);
670 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
671 irq->irq_idx, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500672 return 0;
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400673 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500674
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400675 ret = sde_core_irq_disable(phys_enc->sde_kms, &irq->irq_idx, 1);
676 if (ret)
677 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
678 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
679
680 ret = sde_core_irq_unregister_callback(phys_enc->sde_kms, irq->irq_idx,
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500681 &irq->cb);
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400682 if (ret)
683 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
684 irq->irq_idx, ret, SDE_EVTLOG_ERROR);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500685
686 SDE_EVT32(DRMID(phys_enc->parent), intr_idx, irq->hw_idx, irq->irq_idx);
687 SDE_DEBUG_PHYS(phys_enc, "unregistered %d\n", irq->irq_idx);
688
Lloyd Atkinsonde4270ab2017-06-27 16:43:53 -0400689 irq->irq_idx = -EINVAL;
690
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500691 return 0;
692}
693
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400694void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400695 struct sde_encoder_hw_resources *hw_res,
696 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400697{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400698 struct sde_encoder_virt *sde_enc = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700699 struct msm_mode_info mode_info;
700 int rc, i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400701
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400702 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400703 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
704 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400705 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400706 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400707
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400708 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400709 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400710
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400711 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400712 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400713 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
714
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400715 for (i = 0; i < sde_enc->num_phys_encs; i++) {
716 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
717
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400718 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400719 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400720 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700721
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700722 /**
723 * NOTE: Do not use sde_encoder_get_mode_info here as this function is
724 * called from atomic_check phase. Use the below API to get mode
725 * information of the temporary conn_state passed.
726 */
727 rc = sde_connector_get_mode_info(conn_state, &mode_info);
728 if (rc) {
729 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
730 return;
731 }
732
733 hw_res->topology = mode_info.topology;
Jeykumar Sankaran6f215d42017-09-12 16:15:23 -0700734 hw_res->is_primary = sde_enc->disp_info.is_primary;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400735}
736
Clarence Ip3649f8b2016-10-31 09:59:44 -0400737void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400738{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400739 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400740 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400741
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400742 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400743 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400744 return;
745 }
746
747 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400748 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400749
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700750 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800751 sde_rsc_client_destroy(sde_enc->rsc_client);
752
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700753 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400754 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
755
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400756 if (phys && phys->ops.destroy) {
757 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400758 --sde_enc->num_phys_encs;
759 sde_enc->phys_encs[i] = NULL;
760 }
761 }
762
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700763 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400764 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400765 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700766 sde_enc->num_phys_encs = 0;
767 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400768
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400769 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700770 mutex_destroy(&sde_enc->enc_lock);
771
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800772 if (sde_enc->input_handler) {
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800773 kfree(sde_enc->input_handler);
Shubhashree Dhar25b05422018-05-30 15:42:04 +0530774 sde_enc->input_handler = NULL;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +0530775 sde_enc->input_handler_registered = false;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -0800776 }
777
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400778 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700779}
780
Clarence Ip8e69ad02016-12-09 09:43:57 -0500781void sde_encoder_helper_split_config(
782 struct sde_encoder_phys *phys_enc,
783 enum sde_intf interface)
784{
785 struct sde_encoder_virt *sde_enc;
786 struct split_pipe_cfg cfg = { 0 };
787 struct sde_hw_mdp *hw_mdptop;
788 enum sde_rm_topology_name topology;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700789 struct msm_display_info *disp_info;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500790
791 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
792 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
793 return;
794 }
795
796 sde_enc = to_sde_encoder_virt(phys_enc->parent);
797 hw_mdptop = phys_enc->hw_mdptop;
Dhaval Patel5cd59a02017-06-13 16:29:40 -0700798 disp_info = &sde_enc->disp_info;
799
800 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
801 return;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500802
803 /**
804 * disable split modes since encoder will be operating in as the only
805 * encoder, either for the entire use case in the case of, for example,
806 * single DSI, or for this frame in the case of left/right only partial
807 * update.
808 */
809 if (phys_enc->split_role == ENC_ROLE_SOLO) {
810 if (hw_mdptop->ops.setup_split_pipe)
811 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
812 if (hw_mdptop->ops.setup_pp_split)
813 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
814 return;
815 }
816
817 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500818 cfg.mode = phys_enc->intf_mode;
819 cfg.intf = interface;
820
821 if (cfg.en && phys_enc->ops.needs_single_flush &&
822 phys_enc->ops.needs_single_flush(phys_enc))
823 cfg.split_flush_en = true;
824
825 topology = sde_connector_get_topology_name(phys_enc->connector);
826 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
827 cfg.pp_split_slave = cfg.intf;
828 else
829 cfg.pp_split_slave = INTF_MAX;
830
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500831 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500832 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
833
834 if (hw_mdptop->ops.setup_split_pipe)
835 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400836 } else if (sde_enc->hw_pp[0]) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500837 /*
838 * slave encoder
839 * - determine split index from master index,
840 * assume master is first pp
841 */
842 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
843 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
844 cfg.pp_split_index);
845
846 if (hw_mdptop->ops.setup_pp_split)
847 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
848 }
849}
850
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400851static int sde_encoder_virt_atomic_check(
852 struct drm_encoder *drm_enc,
853 struct drm_crtc_state *crtc_state,
854 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400855{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400856 struct sde_encoder_virt *sde_enc;
857 struct msm_drm_private *priv;
858 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400859 const struct drm_display_mode *mode;
860 struct drm_display_mode *adj_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700861 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700862 struct sde_connector_state *sde_conn_state = NULL;
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400863 struct sde_crtc_state *sde_crtc_state = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -0400864 int i = 0;
865 int ret = 0;
866
Alan Kwongbb27c092016-07-20 16:41:25 -0400867 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400868 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
869 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400870 return -EINVAL;
871 }
872
873 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400874 SDE_DEBUG_ENC(sde_enc, "\n");
875
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400876 priv = drm_enc->dev->dev_private;
877 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400878 mode = &crtc_state->mode;
879 adj_mode = &crtc_state->adjusted_mode;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700880 sde_conn = to_sde_connector(conn_state->connector);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700881 sde_conn_state = to_sde_connector_state(conn_state);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400882 sde_crtc_state = to_sde_crtc_state(crtc_state);
883
884 SDE_EVT32(DRMID(drm_enc), drm_atomic_crtc_needs_modeset(crtc_state));
Alan Kwongbb27c092016-07-20 16:41:25 -0400885
886 /* perform atomic check on the first physical encoder (master) */
887 for (i = 0; i < sde_enc->num_phys_encs; i++) {
888 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
889
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400890 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400891 ret = phys->ops.atomic_check(phys, crtc_state,
892 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400893 else if (phys && phys->ops.mode_fixup)
894 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400895 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400896
897 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400898 SDE_ERROR_ENC(sde_enc,
899 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400900 break;
901 }
902 }
903
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -0400904 if (!ret && drm_atomic_crtc_needs_modeset(crtc_state)) {
905 struct sde_rect mode_roi, roi;
906
907 mode_roi.x = 0;
908 mode_roi.y = 0;
909 mode_roi.w = crtc_state->adjusted_mode.hdisplay;
910 mode_roi.h = crtc_state->adjusted_mode.vdisplay;
911
912 if (sde_conn_state->rois.num_rects) {
913 sde_kms_rect_merge_rectangles(
914 &sde_conn_state->rois, &roi);
915 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
916 SDE_ERROR_ENC(sde_enc,
917 "roi (%d,%d,%d,%d) on connector invalid during modeset\n",
918 roi.x, roi.y, roi.w, roi.h);
919 ret = -EINVAL;
920 }
921 }
922
923 if (sde_crtc_state->user_roi_list.num_rects) {
924 sde_kms_rect_merge_rectangles(
925 &sde_crtc_state->user_roi_list, &roi);
926 if (!sde_kms_rect_is_equal(&mode_roi, &roi)) {
927 SDE_ERROR_ENC(sde_enc,
928 "roi (%d,%d,%d,%d) on crtc invalid during modeset\n",
929 roi.x, roi.y, roi.w, roi.h);
930 ret = -EINVAL;
931 }
932 }
933
934 if (ret)
935 return ret;
936 }
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700937
Lloyd Atkinson4ced69e2017-11-03 12:16:09 -0400938 if (!ret) {
939 /**
940 * record topology in previous atomic state to be able to handle
941 * topology transitions correctly.
942 */
943 enum sde_rm_topology_name old_top;
944
945 old_top = sde_connector_get_property(conn_state,
946 CONNECTOR_PROP_TOPOLOGY_NAME);
947 ret = sde_connector_set_old_topology_name(conn_state, old_top);
948 if (ret)
949 return ret;
950 }
951
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700952 if (!ret && sde_conn && drm_atomic_crtc_needs_modeset(crtc_state)) {
953 struct msm_display_topology *topology = NULL;
954
955 ret = sde_conn->ops.get_mode_info(adj_mode,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700956 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -0400957 sde_kms->catalog->max_mixer_width,
958 sde_conn->display);
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700959 if (ret) {
960 SDE_ERROR_ENC(sde_enc,
961 "failed to get mode info, rc = %d\n", ret);
962 return ret;
963 }
964
965 /* Reserve dynamic resources, indicating atomic_check phase */
966 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
967 conn_state, true);
968 if (ret) {
969 SDE_ERROR_ENC(sde_enc,
970 "RM failed to reserve resources, rc = %d\n",
971 ret);
972 return ret;
973 }
974
975 /**
976 * Update connector state with the topology selected for the
977 * resource set validated. Reset the topology if we are
978 * de-activating crtc.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700979 */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700980 if (crtc_state->active)
Jeykumar Sankaran905ba332017-10-19 10:45:02 -0700981 topology = &sde_conn_state->mode_info.topology;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -0700982
983 ret = sde_rm_update_topology(conn_state, topology);
984 if (ret) {
985 SDE_ERROR_ENC(sde_enc,
986 "RM failed to update topology, rc: %d\n", ret);
987 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700988 }
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700989
Jeykumar Sankaran83ddcb02017-10-27 11:34:50 -0700990 ret = sde_connector_set_blob_data(conn_state->connector,
991 conn_state,
992 CONNECTOR_PROP_SDE_INFO);
Jeykumar Sankaran736d79d2017-10-05 17:44:24 -0700993 if (ret) {
994 SDE_ERROR_ENC(sde_enc,
995 "connector failed to update info, rc: %d\n",
996 ret);
997 return ret;
998 }
999
1000 }
1001
1002 ret = sde_connector_roi_v1_check_roi(conn_state);
1003 if (ret) {
1004 SDE_ERROR_ENC(sde_enc, "connector roi check failed, rc: %d",
1005 ret);
1006 return ret;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001007 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001008
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -07001009 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -07001010 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -04001011
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04001012 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -04001013
1014 return ret;
1015}
1016
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001017static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
1018 int pic_width, int pic_height)
1019{
1020 if (!dsc || !pic_width || !pic_height) {
1021 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
1022 pic_width, pic_height);
1023 return -EINVAL;
1024 }
1025
1026 if ((pic_width % dsc->slice_width) ||
1027 (pic_height % dsc->slice_height)) {
1028 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
1029 pic_width, pic_height,
1030 dsc->slice_width, dsc->slice_height);
1031 return -EINVAL;
1032 }
1033
1034 dsc->pic_width = pic_width;
1035 dsc->pic_height = pic_height;
1036
1037 return 0;
1038}
1039
1040static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
1041 int intf_width)
1042{
1043 int slice_per_pkt, slice_per_intf;
1044 int bytes_in_slice, total_bytes_per_intf;
1045
1046 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
1047 (intf_width < dsc->slice_width)) {
1048 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
1049 intf_width, dsc ? dsc->slice_width : -1);
1050 return;
1051 }
1052
1053 slice_per_pkt = dsc->slice_per_pkt;
1054 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
1055
1056 /*
1057 * If slice_per_pkt is greater than slice_per_intf then default to 1.
1058 * This can happen during partial update.
1059 */
1060 if (slice_per_pkt > slice_per_intf)
1061 slice_per_pkt = 1;
1062
1063 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
1064 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
1065
1066 dsc->eol_byte_num = total_bytes_per_intf % 3;
1067 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
1068 dsc->bytes_in_slice = bytes_in_slice;
1069 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
1070 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
1071}
1072
1073static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
1074 int enc_ip_width)
1075{
1076 int ssm_delay, total_pixels, soft_slice_per_enc;
1077
1078 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
1079
1080 /*
1081 * minimum number of initial line pixels is a sum of:
1082 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
1083 * 91 for 10 bpc) * 3
1084 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
1085 * 3. the initial xmit delay
1086 * 4. total pipeline delay through the "lock step" of encoder (47)
1087 * 5. 6 additional pixels as the output of the rate buffer is
1088 * 48 bits wide
1089 */
1090 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
1091 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
1092 if (soft_slice_per_enc > 1)
1093 total_pixels += (ssm_delay * 3);
1094 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
1095 return 0;
1096}
1097
1098static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
1099 struct msm_display_dsc_info *dsc)
1100{
1101 /*
1102 * As per the DSC spec, ICH_RESET can be either end of the slice line
1103 * or at the end of the slice. HW internally generates ich_reset at
1104 * end of the slice line if DSC_MERGE is used or encoder has two
1105 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
1106 * is not used then it will generate ich_reset at the end of slice.
1107 *
1108 * Now as per the spec, during one PPS session, position where
1109 * ich_reset is generated should not change. Now if full-screen frame
1110 * has more than 1 soft slice then HW will automatically generate
1111 * ich_reset at the end of slice_line. But for the same panel, if
1112 * partial frame is enabled and only 1 encoder is used with 1 slice,
1113 * then HW will generate ich_reset at end of the slice. This is a
1114 * mismatch. Prevent this by overriding HW's decision.
1115 */
1116 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
1117 (dsc->slice_width == dsc->pic_width);
1118}
1119
1120static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
1121 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001122 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001123{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001124 if (!enable) {
1125 if (hw_pp->ops.disable_dsc)
1126 hw_pp->ops.disable_dsc(hw_pp);
1127 return;
1128 }
1129
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001130 if (hw_dsc->ops.dsc_config)
1131 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
1132
1133 if (hw_dsc->ops.dsc_config_thresh)
1134 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
1135
1136 if (hw_pp->ops.setup_dsc)
1137 hw_pp->ops.setup_dsc(hw_pp);
1138
1139 if (hw_pp->ops.enable_dsc)
1140 hw_pp->ops.enable_dsc(hw_pp);
1141}
1142
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001143static void _sde_encoder_get_connector_roi(
1144 struct sde_encoder_virt *sde_enc,
1145 struct sde_rect *merged_conn_roi)
1146{
1147 struct drm_connector *drm_conn;
1148 struct sde_connector_state *c_state;
1149
1150 if (!sde_enc || !merged_conn_roi)
1151 return;
1152
1153 drm_conn = sde_enc->phys_encs[0]->connector;
1154
1155 if (!drm_conn || !drm_conn->state)
1156 return;
1157
1158 c_state = to_sde_connector_state(drm_conn->state);
1159 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
1160}
1161
Ingrid Gallardo83532222017-06-02 16:48:51 -07001162static int _sde_encoder_dsc_n_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001163{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001164 int this_frame_slices;
1165 int intf_ip_w, enc_ip_w;
1166 int ich_res, dsc_common_mode = 0;
1167
1168 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
1169 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
1170 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001171 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001172 struct msm_mode_info mode_info;
1173 struct msm_display_dsc_info *dsc = NULL;
1174 int rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001175
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001176 if (hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001177 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1178 return -EINVAL;
1179 }
1180
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001181 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1182 if (rc) {
1183 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1184 return -EINVAL;
1185 }
1186
1187 dsc = &mode_info.comp_info.dsc_info;
1188
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001189 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001190
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001191 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001192 intf_ip_w = this_frame_slices * dsc->slice_width;
1193 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1194
1195 enc_ip_w = intf_ip_w;
1196 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1197
1198 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
1199
1200 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1201 dsc_common_mode = DSC_MODE_VIDEO;
1202
1203 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001204 roi->w, roi->h, dsc_common_mode);
1205 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001206
1207 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001208 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001209
1210 return 0;
1211}
Ingrid Gallardo83532222017-06-02 16:48:51 -07001212
Vishnuvardhan Prodduturi6b1803a2019-01-19 16:35:34 +05301213
1214static int _sde_encoder_dsc_4_lm_4_enc_2_intf(struct sde_encoder_virt *sde_enc,
1215 struct sde_encoder_kickoff_params *params)
1216{
1217 int this_frame_slices;
1218 int intf_ip_w, enc_ip_w;
1219 int ich_res, dsc_common_mode;
1220
1221 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
1222 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1223 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1224 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1225 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
1226 struct msm_mode_info mode_info;
1227 bool half_panel_partial_update;
1228 int i, rc;
1229
1230 memset(hw_dsc, 0, sizeof(struct sde_hw_dsc *)*MAX_CHANNELS_PER_ENC);
1231 memset(hw_pp, 0, sizeof(struct sde_hw_pingpong *)*MAX_CHANNELS_PER_ENC);
1232
1233 for (i = 0; i < params->num_channels; i++) {
1234 hw_pp[i] = sde_enc->hw_pp[i];
1235 hw_dsc[i] = sde_enc->hw_dsc[i];
1236
1237 if (!hw_pp[i] || !hw_dsc[i]) {
1238 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1239 return -EINVAL;
1240 }
1241 }
1242
1243 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1244 if (rc) {
1245 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1246 return -EINVAL;
1247 }
1248
1249 half_panel_partial_update =
1250 hweight_long(params->affected_displays) == 1;
1251
1252 dsc_common_mode = 0;
1253 if (!half_panel_partial_update)
1254 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1255 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1256 dsc_common_mode |= DSC_MODE_VIDEO;
1257
1258 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1259 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
1260 memcpy(&dsc[2], &mode_info.comp_info.dsc_info, sizeof(dsc[2]));
1261 memcpy(&dsc[3], &mode_info.comp_info.dsc_info, sizeof(dsc[3]));
1262
1263 /*
1264 * Since both DSC use same pic dimension, set same pic dimension
1265 * to both DSC structures.
1266 */
1267 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1268 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1269 _sde_encoder_dsc_update_pic_dim(&dsc[2], roi->w, roi->h);
1270 _sde_encoder_dsc_update_pic_dim(&dsc[3], roi->w, roi->h);
1271
1272 this_frame_slices = roi->w / dsc[0].slice_width;
1273 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1274
1275 if (!half_panel_partial_update)
1276 intf_ip_w /= 2;
1277
1278 /*
1279 * In this topology when both interfaces are active, they have same
1280 * load so intf_ip_w will be same.
1281 */
1282 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1283 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1284 _sde_encoder_dsc_pclk_param_calc(&dsc[2], intf_ip_w);
1285 _sde_encoder_dsc_pclk_param_calc(&dsc[3], intf_ip_w);
1286
1287 /*
1288 * In this topology, since there is no dsc_merge, uncompressed input
1289 * to encoder and interface is same.
1290 */
1291 enc_ip_w = intf_ip_w;
1292 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1293 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1294 _sde_encoder_dsc_initial_line_calc(&dsc[2], enc_ip_w);
1295 _sde_encoder_dsc_initial_line_calc(&dsc[3], enc_ip_w);
1296
1297 /*
1298 * __is_ich_reset_override_needed should be called only after
1299 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1300 */
1301 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1302 half_panel_partial_update, &dsc[0]);
1303
1304 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1305 roi->w, roi->h, dsc_common_mode);
1306
1307 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], &dsc[0],
1308 dsc_common_mode, ich_res, true);
1309 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], &dsc[1],
1310 dsc_common_mode, ich_res, true);
1311 _sde_encoder_dsc_pipe_cfg(hw_dsc[2], hw_pp[2], &dsc[2],
1312 dsc_common_mode, ich_res, true);
1313 _sde_encoder_dsc_pipe_cfg(hw_dsc[3], hw_pp[3], &dsc[3],
1314 dsc_common_mode, ich_res, true);
1315
1316 return 0;
1317}
1318
1319
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001320static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
1321 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001322{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001323 int this_frame_slices;
1324 int intf_ip_w, enc_ip_w;
1325 int ich_res, dsc_common_mode;
1326
1327 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001328 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
1329 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1330 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
1331 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001332 struct msm_mode_info mode_info;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001333 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001334 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001335
Vishnuvardhan Prodduturi6b1803a2019-01-19 16:35:34 +05301336 for (i = 0; i < params->num_channels; i++) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001337 hw_pp[i] = sde_enc->hw_pp[i];
1338 hw_dsc[i] = sde_enc->hw_dsc[i];
1339
1340 if (!hw_pp[i] || !hw_dsc[i]) {
1341 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1342 return -EINVAL;
1343 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001344 }
1345
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001346 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1347 if (rc) {
1348 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1349 return -EINVAL;
1350 }
1351
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001352 half_panel_partial_update =
1353 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001354
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001355 dsc_common_mode = 0;
1356 if (!half_panel_partial_update)
1357 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001358 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1359 dsc_common_mode |= DSC_MODE_VIDEO;
1360
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001361 memcpy(&dsc[0], &mode_info.comp_info.dsc_info, sizeof(dsc[0]));
1362 memcpy(&dsc[1], &mode_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001363
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001364 /*
1365 * Since both DSC use same pic dimension, set same pic dimension
1366 * to both DSC structures.
1367 */
1368 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
1369 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
1370
1371 this_frame_slices = roi->w / dsc[0].slice_width;
1372 intf_ip_w = this_frame_slices * dsc[0].slice_width;
1373
1374 if (!half_panel_partial_update)
1375 intf_ip_w /= 2;
1376
1377 /*
1378 * In this topology when both interfaces are active, they have same
1379 * load so intf_ip_w will be same.
1380 */
1381 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
1382 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
1383
1384 /*
1385 * In this topology, since there is no dsc_merge, uncompressed input
1386 * to encoder and interface is same.
1387 */
1388 enc_ip_w = intf_ip_w;
1389 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
1390 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
1391
1392 /*
1393 * __is_ich_reset_override_needed should be called only after
1394 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
1395 */
1396 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1397 half_panel_partial_update, &dsc[0]);
1398
1399 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
1400 roi->w, roi->h, dsc_common_mode);
1401
1402 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1403 bool active = !!((1 << i) & params->affected_displays);
1404
1405 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1406 dsc_common_mode, i, active);
1407 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
1408 dsc_common_mode, ich_res, active);
1409 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001410
1411 return 0;
1412}
1413
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001414static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
1415 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001416{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001417 int this_frame_slices;
1418 int intf_ip_w, enc_ip_w;
1419 int ich_res, dsc_common_mode;
1420
1421 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001422 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001423 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
1424 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001425 struct msm_display_dsc_info *dsc = NULL;
1426 struct msm_mode_info mode_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001427 bool half_panel_partial_update;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001428 int i, rc;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001429
Vishnuvardhan Prodduturi6b1803a2019-01-19 16:35:34 +05301430 for (i = 0; i < params->num_channels; i++) {
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001431 hw_pp[i] = sde_enc->hw_pp[i];
1432 hw_dsc[i] = sde_enc->hw_dsc[i];
1433
1434 if (!hw_pp[i] || !hw_dsc[i]) {
1435 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
1436 return -EINVAL;
1437 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001438 }
1439
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001440 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
1441 if (rc) {
1442 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
1443 return -EINVAL;
1444 }
1445
1446 dsc = &mode_info.comp_info.dsc_info;
1447
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001448 half_panel_partial_update =
1449 hweight_long(params->affected_displays) == 1;
1450
1451 dsc_common_mode = 0;
1452 if (!half_panel_partial_update)
1453 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
1454 if (enc_master->intf_mode == INTF_MODE_VIDEO)
1455 dsc_common_mode |= DSC_MODE_VIDEO;
1456
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001457 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001458
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001459 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001460 intf_ip_w = this_frame_slices * dsc->slice_width;
1461 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
1462
1463 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001464 * dsc merge case: when using 2 encoders for the same stream,
1465 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001466 */
1467 enc_ip_w = intf_ip_w / 2;
1468 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
1469
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001470 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
1471 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001472
1473 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001474 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001475 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
1476 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001477
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001478 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001479 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001480 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
1481 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001482
1483 return 0;
1484}
1485
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001486static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
1487{
1488 struct sde_encoder_virt *sde_enc;
1489 struct drm_connector *drm_conn;
1490 struct drm_display_mode *adj_mode;
1491 struct sde_rect roi;
1492
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001493 if (!drm_enc) {
1494 SDE_ERROR("invalid encoder parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001495 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001496 }
1497
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001498 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001499 if (!sde_enc->crtc || !sde_enc->crtc->state) {
1500 SDE_ERROR("invalid crtc parameter\n");
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001501 return -EINVAL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001502 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001503
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001504 if (!sde_enc->cur_master) {
1505 SDE_ERROR("invalid cur_master parameter\n");
1506 return -EINVAL;
1507 }
1508
1509 adj_mode = &sde_enc->cur_master->cached_mode;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001510 drm_conn = sde_enc->cur_master->connector;
1511
1512 _sde_encoder_get_connector_roi(sde_enc, &roi);
1513 if (sde_kms_rect_is_null(&roi)) {
1514 roi.w = adj_mode->hdisplay;
1515 roi.h = adj_mode->vdisplay;
1516 }
1517
1518 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
1519 sizeof(sde_enc->prv_conn_roi));
1520 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
1521
1522 return 0;
1523}
1524
1525static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
1526 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001527{
1528 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001529 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001530 int ret = 0;
1531
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001532 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
1533 !sde_enc->phys_encs[0]->connector)
1534 return -EINVAL;
1535
1536 drm_conn = sde_enc->phys_encs[0]->connector;
1537
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001538 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001539 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001540 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
1541 return -EINVAL;
1542 }
1543
Jayant Shekharac7bd942019-02-26 15:44:54 +05301544 params->num_channels =
1545 sde_rm_get_topology_num_encoders(topology);
1546
Ingrid Gallardo83532222017-06-02 16:48:51 -07001547 SDE_DEBUG_ENC(sde_enc, "topology:%d\n", topology);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04001548 SDE_EVT32(DRMID(&sde_enc->base), topology,
1549 sde_enc->cur_conn_roi.x,
1550 sde_enc->cur_conn_roi.y,
1551 sde_enc->cur_conn_roi.w,
1552 sde_enc->cur_conn_roi.h,
1553 sde_enc->prv_conn_roi.x,
1554 sde_enc->prv_conn_roi.y,
1555 sde_enc->prv_conn_roi.w,
1556 sde_enc->prv_conn_roi.h,
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001557 sde_enc->cur_master->cached_mode.hdisplay,
1558 sde_enc->cur_master->cached_mode.vdisplay);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001559
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001560 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
1561 &sde_enc->prv_conn_roi))
1562 return ret;
1563
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001564 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001565 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Ingrid Gallardo83532222017-06-02 16:48:51 -07001566 case SDE_RM_TOPOLOGY_DUALPIPE_3DMERGE_DSC:
1567 ret = _sde_encoder_dsc_n_lm_1_enc_1_intf(sde_enc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001568 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001569 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -04001570 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001571 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001572 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Kalyan Thota27ec06c2019-03-18 13:19:59 +05301573 case SDE_RM_TOPOLOGY_QUADPIPE_3DMERGE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04001574 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001575 break;
Vishnuvardhan Prodduturi6b1803a2019-01-19 16:35:34 +05301576 case SDE_RM_TOPOLOGY_QUADPIPE_DSCMERGE:
1577 ret = _sde_encoder_dsc_4_lm_4_enc_2_intf(sde_enc, params);
1578 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001579 default:
1580 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
1581 topology);
1582 return -EINVAL;
1583 };
1584
1585 return ret;
1586}
1587
Dhaval Patelaab9b522017-07-20 12:38:46 -07001588static void _sde_encoder_update_vsync_source(struct sde_encoder_virt *sde_enc,
1589 struct msm_display_info *disp_info, bool is_dummy)
1590{
1591 struct sde_vsync_source_cfg vsync_cfg = { 0 };
1592 struct msm_drm_private *priv;
1593 struct sde_kms *sde_kms;
1594 struct sde_hw_mdp *hw_mdptop;
1595 struct drm_encoder *drm_enc;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001596 struct msm_mode_info mode_info;
1597 int i, rc = 0;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001598
Jayant Shekhar136e0592018-10-09 18:32:33 +05301599 if (!sde_enc || !sde_enc->cur_master || !disp_info) {
Dhaval Patelaab9b522017-07-20 12:38:46 -07001600 SDE_ERROR("invalid param sde_enc:%d or disp_info:%d\n",
1601 sde_enc != NULL, disp_info != NULL);
1602 return;
1603 } else if (sde_enc->num_phys_encs > ARRAY_SIZE(sde_enc->hw_pp)) {
1604 SDE_ERROR("invalid num phys enc %d/%d\n",
1605 sde_enc->num_phys_encs,
1606 (int) ARRAY_SIZE(sde_enc->hw_pp));
1607 return;
1608 }
1609
1610 drm_enc = &sde_enc->base;
1611 /* this pointers are checked in virt_enable_helper */
1612 priv = drm_enc->dev->dev_private;
1613
1614 sde_kms = to_sde_kms(priv->kms);
1615 if (!sde_kms) {
1616 SDE_ERROR("invalid sde_kms\n");
1617 return;
1618 }
1619
1620 hw_mdptop = sde_kms->hw_mdp;
1621 if (!hw_mdptop) {
1622 SDE_ERROR("invalid mdptop\n");
1623 return;
1624 }
1625
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001626 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1627 if (rc) {
1628 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07001629 return;
1630 }
1631
Dhaval Patelaab9b522017-07-20 12:38:46 -07001632 if (hw_mdptop->ops.setup_vsync_source &&
1633 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
1634 for (i = 0; i < sde_enc->num_phys_encs; i++)
1635 vsync_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1636
1637 vsync_cfg.pp_count = sde_enc->num_phys_encs;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001638 vsync_cfg.frame_rate = mode_info.frame_rate;
Kalyan Thotaa02db2c2018-04-27 11:39:18 +05301639 vsync_cfg.vsync_source =
1640 sde_enc->cur_master->hw_pp->caps->te_source;
Dhaval Patelaab9b522017-07-20 12:38:46 -07001641 if (is_dummy)
1642 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_1;
1643 else if (disp_info->is_te_using_watchdog_timer)
1644 vsync_cfg.vsync_source = SDE_VSYNC_SOURCE_WD_TIMER_0;
Kalyan Thotaa02db2c2018-04-27 11:39:18 +05301645
Dhaval Patelaab9b522017-07-20 12:38:46 -07001646 vsync_cfg.is_dummy = is_dummy;
1647
1648 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
1649 }
1650}
1651
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001652static int _sde_encoder_dsc_disable(struct sde_encoder_virt *sde_enc)
1653{
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001654 int i, ret = 0;
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001655 struct sde_hw_pingpong *hw_pp = NULL;
1656 struct sde_hw_dsc *hw_dsc = NULL;
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001657
1658 if (!sde_enc || !sde_enc->phys_encs[0] ||
1659 !sde_enc->phys_encs[0]->connector) {
1660 SDE_ERROR("invalid params %d %d\n",
1661 !sde_enc, sde_enc ? !sde_enc->phys_encs[0] : -1);
1662 return -EINVAL;
1663 }
1664
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001665 /* Disable DSC for all the pp's present in this topology */
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001666 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1667 hw_pp = sde_enc->hw_pp[i];
1668 hw_dsc = sde_enc->hw_dsc[i];
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001669
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001670 if (hw_pp && hw_pp->ops.disable_dsc)
1671 hw_pp->ops.disable_dsc(hw_pp);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001672
Jeykumar Sankaran586d0922017-09-18 15:01:33 -07001673 if (hw_dsc && hw_dsc->ops.dsc_disable)
1674 hw_dsc->ops.dsc_disable(hw_dsc);
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07001675 }
1676
1677 return ret;
1678}
1679
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001680static int _sde_encoder_switch_to_watchdog_vsync(struct drm_encoder *drm_enc)
1681{
1682 struct sde_encoder_virt *sde_enc;
1683 struct msm_display_info disp_info;
1684
1685 if (!drm_enc) {
1686 pr_err("invalid drm encoder\n");
1687 return -EINVAL;
1688 }
1689
1690 sde_enc = to_sde_encoder_virt(drm_enc);
1691
1692 sde_encoder_control_te(drm_enc, false);
1693
1694 memcpy(&disp_info, &sde_enc->disp_info, sizeof(disp_info));
1695 disp_info.is_te_using_watchdog_timer = true;
1696 _sde_encoder_update_vsync_source(sde_enc, &disp_info, false);
1697
1698 sde_encoder_control_te(drm_enc, true);
1699
1700 return 0;
1701}
1702
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001703static int _sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -07001704 struct drm_encoder *drm_enc,
1705 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001706{
1707 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001708 struct drm_crtc *crtc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001709 enum sde_rsc_state rsc_state;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001710 struct sde_rsc_cmd_config *rsc_config;
1711 int ret, prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001712 struct msm_display_info *disp_info;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001713 struct msm_mode_info mode_info;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001714 int wait_vblank_crtc_id = SDE_RSC_INVALID_CRTC_ID;
1715 int wait_count = 0;
1716 struct drm_crtc *primary_crtc;
1717 int pipe = -1;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001718 int rc = 0;
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001719 int wait_refcount;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001720
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001721 if (!drm_enc || !drm_enc->dev) {
1722 SDE_ERROR("invalid encoder arguments\n");
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001723 return -EINVAL;
1724 }
1725
1726 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001727 crtc = sde_enc->crtc;
1728
1729 if (!sde_enc->crtc) {
1730 SDE_ERROR("invalid crtc parameter\n");
1731 return -EINVAL;
1732 }
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001733 disp_info = &sde_enc->disp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001734 rsc_config = &sde_enc->rsc_config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001735
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001736 if (!sde_enc->rsc_client) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001737 SDE_DEBUG_ENC(sde_enc, "rsc client not created\n");
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001738 return 0;
1739 }
1740
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001741 rc = _sde_encoder_get_mode_info(drm_enc, &mode_info);
1742 if (rc) {
1743 SDE_ERROR_ENC(sde_enc, "failed to mode info\n");
1744 return 0;
1745 }
1746
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001747 /**
1748 * only primary command mode panel can request CMD state.
1749 * all other panels/displays can request for VID state including
1750 * secondary command mode panel.
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07001751 * Clone mode encoder can request CLK STATE only.
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001752 */
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07001753 if (sde_encoder_in_clone_mode(drm_enc))
1754 rsc_state = enable ? SDE_RSC_CLK_STATE : SDE_RSC_IDLE_STATE;
1755 else
1756 rsc_state = enable ?
1757 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
1758 && disp_info->is_primary) ? SDE_RSC_CMD_STATE :
1759 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
1760
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001761 prefill_lines = config ? mode_info.prefill_lines +
1762 config->inline_rotate_prefill : mode_info.prefill_lines;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001763
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001764 /* compare specific items and reconfigure the rsc */
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001765 if ((rsc_config->fps != mode_info.frame_rate) ||
1766 (rsc_config->vtotal != mode_info.vtotal) ||
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001767 (rsc_config->prefill_lines != prefill_lines) ||
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001768 (rsc_config->jitter_numer != mode_info.jitter_numer) ||
1769 (rsc_config->jitter_denom != mode_info.jitter_denom)) {
1770 rsc_config->fps = mode_info.frame_rate;
1771 rsc_config->vtotal = mode_info.vtotal;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001772 rsc_config->prefill_lines = prefill_lines;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07001773 rsc_config->jitter_numer = mode_info.jitter_numer;
1774 rsc_config->jitter_denom = mode_info.jitter_denom;
Alan Kwong56f1a942017-04-04 11:53:42 -07001775 sde_enc->rsc_state_init = false;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001776 }
Alan Kwong56f1a942017-04-04 11:53:42 -07001777
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001778 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001779 && disp_info->is_primary) {
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001780 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001781 sde_enc->rsc_state_init = true;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001782
1783 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001784 rsc_state, rsc_config, crtc->base.id,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001785 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001786 } else {
1787 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001788 rsc_state, NULL, crtc->base.id,
1789 &wait_vblank_crtc_id);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001790 }
1791
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001792 /**
1793 * if RSC performed a state change that requires a VBLANK wait, it will
1794 * set wait_vblank_crtc_id to the CRTC whose VBLANK we must wait on.
1795 *
1796 * if we are the primary display, we will need to enable and wait
1797 * locally since we hold the commit thread
1798 *
1799 * if we are an external display, we must send a signal to the primary
1800 * to enable its VBLANK and wait one, since the RSC hardware is driven
1801 * by the primary panel's VBLANK signals
1802 */
1803 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id);
1804 if (ret) {
1805 SDE_ERROR_ENC(sde_enc,
1806 "sde rsc client update failed ret:%d\n", ret);
1807 return ret;
1808 } else if (wait_vblank_crtc_id == SDE_RSC_INVALID_CRTC_ID) {
1809 return ret;
1810 }
1811
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001812 if (wait_vblank_crtc_id)
1813 wait_refcount =
1814 sde_rsc_client_get_vsync_refcount(sde_enc->rsc_client);
1815 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1816 SDE_EVTLOG_FUNC_ENTRY);
1817
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001818 if (crtc->base.id != wait_vblank_crtc_id) {
1819 primary_crtc = drm_crtc_find(drm_enc->dev, wait_vblank_crtc_id);
1820 if (!primary_crtc) {
1821 SDE_ERROR_ENC(sde_enc,
1822 "failed to find primary crtc id %d\n",
1823 wait_vblank_crtc_id);
1824 return -EINVAL;
1825 }
1826 pipe = drm_crtc_index(primary_crtc);
1827 }
1828
1829 /**
1830 * note: VBLANK is expected to be enabled at this point in
1831 * resource control state machine if on primary CRTC
1832 */
1833 for (wait_count = 0; wait_count < MAX_RSC_WAIT; wait_count++) {
1834 if (sde_rsc_client_is_state_update_complete(
1835 sde_enc->rsc_client))
1836 break;
1837
1838 if (crtc->base.id == wait_vblank_crtc_id)
1839 ret = sde_encoder_wait_for_event(drm_enc,
1840 MSM_ENC_VBLANK);
1841 else
1842 drm_wait_one_vblank(drm_enc->dev, pipe);
1843
1844 if (ret) {
1845 SDE_ERROR_ENC(sde_enc,
1846 "wait for vblank failed ret:%d\n", ret);
Dhaval Patelef58f0b2018-01-22 19:13:52 -08001847 /**
1848 * rsc hardware may hang without vsync. avoid rsc hang
1849 * by generating the vsync from watchdog timer.
1850 */
1851 if (crtc->base.id == wait_vblank_crtc_id)
1852 _sde_encoder_switch_to_watchdog_vsync(drm_enc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001853 }
1854 }
1855
1856 if (wait_count >= MAX_RSC_WAIT)
1857 SDE_EVT32(DRMID(drm_enc), wait_vblank_crtc_id, wait_count,
1858 SDE_EVTLOG_ERROR);
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001859
Ingrid Gallardoe52302c2017-11-28 19:30:47 -08001860 if (wait_refcount)
1861 sde_rsc_client_reset_vsync_refcount(sde_enc->rsc_client);
1862 SDE_EVT32_VERBOSE(DRMID(drm_enc), wait_vblank_crtc_id, wait_refcount,
1863 SDE_EVTLOG_FUNC_EXIT);
1864
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001865 return ret;
1866}
1867
Dhaval Patel1b5605b2017-07-26 18:19:50 -07001868static void _sde_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
1869{
1870 struct sde_encoder_virt *sde_enc;
1871 int i;
1872
1873 if (!drm_enc) {
1874 SDE_ERROR("invalid encoder\n");
1875 return;
1876 }
1877
1878 sde_enc = to_sde_encoder_virt(drm_enc);
1879
1880 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1881 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1882 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1883
1884 if (phys && phys->ops.irq_control)
1885 phys->ops.irq_control(phys, enable);
1886 }
1887
1888}
1889
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07001890/* keep track of the userspace vblank during modeset */
1891static void _sde_encoder_modeset_helper_locked(struct drm_encoder *drm_enc,
1892 u32 sw_event)
1893{
1894 struct sde_encoder_virt *sde_enc;
1895 bool enable;
1896 int i;
1897
1898 if (!drm_enc) {
1899 SDE_ERROR("invalid encoder\n");
1900 return;
1901 }
1902
1903 sde_enc = to_sde_encoder_virt(drm_enc);
1904 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, vblank_enabled:%d\n",
1905 sw_event, sde_enc->vblank_enabled);
1906
1907 /* nothing to do if vblank not enabled by userspace */
1908 if (!sde_enc->vblank_enabled)
1909 return;
1910
1911 /* disable vblank on pre_modeset */
1912 if (sw_event == SDE_ENC_RC_EVENT_PRE_MODESET)
1913 enable = false;
1914 /* enable vblank on post_modeset */
1915 else if (sw_event == SDE_ENC_RC_EVENT_POST_MODESET)
1916 enable = true;
1917 else
1918 return;
1919
1920 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1921 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1922
1923 if (phys && phys->ops.control_vblank_irq)
1924 phys->ops.control_vblank_irq(phys, enable);
1925 }
1926}
1927
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001928struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
1929{
1930 struct sde_encoder_virt *sde_enc;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001931
1932 if (!drm_enc)
1933 return NULL;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001934 sde_enc = to_sde_encoder_virt(drm_enc);
Dhaval Patel5cd59a02017-06-13 16:29:40 -07001935 return sde_enc->rsc_client;
Dhaval Patel30fae8a2017-04-21 18:42:41 -07001936}
1937
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001938static void _sde_encoder_resource_control_rsc_update(
1939 struct drm_encoder *drm_enc, bool enable)
1940{
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001941 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001942 struct sde_encoder_virt *sde_enc;
1943
1944 if (!drm_enc) {
1945 SDE_ERROR("invalid encoder argument\n");
1946 return;
1947 }
1948 sde_enc = to_sde_encoder_virt(drm_enc);
1949 if (!sde_enc->crtc) {
1950 SDE_ERROR("invalid crtc\n");
1951 return;
1952 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001953
1954 if (enable) {
1955 rsc_cfg.inline_rotate_prefill =
Harsh Sahu1e52ed02017-11-28 14:34:22 -08001956 sde_crtc_get_inline_prefill(sde_enc->crtc);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001957
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001958 _sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
1959 } else {
1960 _sde_encoder_update_rsc_client(drm_enc, NULL, false);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001961 }
1962}
1963
Alan Kwong1124f1f2017-11-10 18:14:39 -05001964static int _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001965 bool enable)
1966{
1967 struct msm_drm_private *priv;
1968 struct sde_kms *sde_kms;
1969 struct sde_encoder_virt *sde_enc;
Alan Kwong1124f1f2017-11-10 18:14:39 -05001970 int rc;
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001971 bool is_cmd_mode, is_primary;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001972
1973 sde_enc = to_sde_encoder_virt(drm_enc);
1974 priv = drm_enc->dev->dev_private;
1975 sde_kms = to_sde_kms(priv->kms);
1976
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05001977 is_cmd_mode = sde_enc->disp_info.capabilities &
1978 MSM_DISPLAY_CAP_CMD_MODE;
1979 is_primary = sde_enc->disp_info.is_primary;
1980
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001981 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1982 SDE_EVT32(DRMID(drm_enc), enable);
1983
1984 if (!sde_enc->cur_master) {
1985 SDE_ERROR("encoder master not set\n");
Alan Kwong1124f1f2017-11-10 18:14:39 -05001986 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001987 }
1988
1989 if (enable) {
1990 /* enable SDE core clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05001991 rc = sde_power_resource_enable(&priv->phandle,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001992 sde_kms->core_client, true);
Alan Kwong1124f1f2017-11-10 18:14:39 -05001993 if (rc) {
1994 SDE_ERROR("failed to enable power resource %d\n", rc);
1995 SDE_EVT32(rc, SDE_EVTLOG_ERROR);
1996 return rc;
1997 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001998
Dhaval Patel30874eb2018-05-31 13:33:31 -07001999 sde_enc->elevated_ahb_vote = true;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002000 /* enable DSI clks */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002001 rc = sde_connector_clk_ctrl(sde_enc->cur_master->connector,
2002 true);
2003 if (rc) {
2004 SDE_ERROR("failed to enable clk control %d\n", rc);
2005 sde_power_resource_enable(&priv->phandle,
2006 sde_kms->core_client, false);
2007 return rc;
2008 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002009
2010 /* enable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002011 _sde_encoder_irq_control(drm_enc, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002012
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05002013 if (is_cmd_mode && is_primary)
2014 _sde_encoder_pm_qos_add_request(drm_enc);
2015
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002016 } else {
Lloyd Atkinson7fdd4c22017-11-16 20:10:17 -05002017 if (is_cmd_mode && is_primary)
2018 _sde_encoder_pm_qos_remove_request(drm_enc);
2019
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002020 /* disable all the irq */
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002021 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002022
2023 /* disable DSI clks */
2024 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
2025
2026 /* disable SDE core clks */
2027 sde_power_resource_enable(&priv->phandle,
2028 sde_kms->core_client, false);
2029 }
2030
Alan Kwong1124f1f2017-11-10 18:14:39 -05002031 return 0;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002032}
2033
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002034static void sde_encoder_input_event_handler(struct input_handle *handle,
2035 unsigned int type, unsigned int code, int value)
2036{
2037 struct drm_encoder *drm_enc = NULL;
2038 struct sde_encoder_virt *sde_enc = NULL;
Jayant Shekhar779c7522018-06-13 12:44:44 +05302039 struct msm_drm_thread *event_thread = NULL;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002040 struct msm_drm_private *priv = NULL;
2041
2042 if (!handle || !handle->handler || !handle->handler->private) {
2043 SDE_ERROR("invalid encoder for the input event\n");
2044 return;
2045 }
2046
2047 drm_enc = (struct drm_encoder *)handle->handler->private;
2048 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
2049 SDE_ERROR("invalid parameters\n");
2050 return;
2051 }
2052
2053 priv = drm_enc->dev->dev_private;
2054 sde_enc = to_sde_encoder_virt(drm_enc);
2055 if (!sde_enc->crtc || (sde_enc->crtc->index
Jayant Shekhar779c7522018-06-13 12:44:44 +05302056 >= ARRAY_SIZE(priv->event_thread))) {
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002057 SDE_DEBUG_ENC(sde_enc,
2058 "invalid cached CRTC: %d or crtc index: %d\n",
2059 sde_enc->crtc == NULL,
2060 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
2061 return;
2062 }
2063
2064 SDE_EVT32_VERBOSE(DRMID(drm_enc));
2065
Jayant Shekhar779c7522018-06-13 12:44:44 +05302066 event_thread = &priv->event_thread[sde_enc->crtc->index];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002067
Jayant Shekhar779c7522018-06-13 12:44:44 +05302068 /* Queue input event work to event thread */
2069 kthread_queue_work(&event_thread->worker,
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002070 &sde_enc->input_event_work);
2071}
2072
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07002073void sde_encoder_control_idle_pc(struct drm_encoder *drm_enc, bool enable)
2074{
2075 struct sde_encoder_virt *sde_enc;
2076
2077 if (!drm_enc) {
2078 SDE_ERROR("invalid encoder\n");
2079 return;
2080 }
2081 sde_enc = to_sde_encoder_virt(drm_enc);
2082
2083 /* return early if there is no state change */
2084 if (sde_enc->idle_pc_enabled == enable)
2085 return;
2086
2087 sde_enc->idle_pc_enabled = enable;
2088
2089 SDE_DEBUG("idle-pc state:%d\n", sde_enc->idle_pc_enabled);
2090 SDE_EVT32(sde_enc->idle_pc_enabled);
2091}
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002092
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002093static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
2094 u32 sw_event)
2095{
Dhaval Patel99412a52017-07-24 19:16:45 -07002096 bool autorefresh_enabled = false;
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002097 unsigned int lp, idle_pc_duration;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002098 struct sde_encoder_virt *sde_enc;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002099 struct msm_drm_private *priv;
2100 struct msm_drm_thread *disp_thread;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002101 int ret;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002102 bool is_vid_mode = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002103
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002104 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2105 SDE_ERROR("invalid encoder parameters, sw_event:%u\n",
2106 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002107 return -EINVAL;
2108 }
2109 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002110 priv = drm_enc->dev->dev_private;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002111 is_vid_mode = sde_enc->disp_info.capabilities &
2112 MSM_DISPLAY_CAP_VID_MODE;
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002113
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002114 /*
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002115 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002116 * events and return early for other events (ie wb display).
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002117 */
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07002118 if (!sde_enc->idle_pc_enabled &&
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002119 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002120 sw_event != SDE_ENC_RC_EVENT_PRE_MODESET &&
2121 sw_event != SDE_ENC_RC_EVENT_POST_MODESET &&
2122 sw_event != SDE_ENC_RC_EVENT_STOP &&
2123 sw_event != SDE_ENC_RC_EVENT_PRE_STOP))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002124 return 0;
2125
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07002126 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc:%d\n",
2127 sw_event, sde_enc->idle_pc_enabled);
2128 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_enabled,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002129 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
2130
2131 switch (sw_event) {
2132 case SDE_ENC_RC_EVENT_KICKOFF:
2133 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002134 if (kthread_cancel_delayed_work_sync(
2135 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002136 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2137 sw_event);
2138
2139 mutex_lock(&sde_enc->rc_lock);
2140
2141 /* return if the resource control is already in ON state */
2142 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2143 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
2144 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002145 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2146 SDE_EVTLOG_FUNC_CASE1);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002147 mutex_unlock(&sde_enc->rc_lock);
2148 return 0;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002149 } else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
2150 sde_enc->rc_state != SDE_ENC_RC_STATE_IDLE) {
2151 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2152 sw_event, sde_enc->rc_state);
2153 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2154 SDE_EVTLOG_ERROR);
2155 mutex_unlock(&sde_enc->rc_lock);
2156 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002157 }
2158
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002159 if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2160 _sde_encoder_irq_control(drm_enc, true);
2161 } else {
2162 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002163 ret = _sde_encoder_resource_control_helper(drm_enc,
2164 true);
2165 if (ret) {
2166 SDE_ERROR_ENC(sde_enc,
2167 "sw_event:%d, rc in state %d\n",
2168 sw_event, sde_enc->rc_state);
2169 SDE_EVT32(DRMID(drm_enc), sw_event,
2170 sde_enc->rc_state,
2171 SDE_EVTLOG_ERROR);
2172 mutex_unlock(&sde_enc->rc_lock);
2173 return ret;
2174 }
2175
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002176 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2177 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002178
2179 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2180 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
2181 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2182
2183 mutex_unlock(&sde_enc->rc_lock);
2184 break;
2185
2186 case SDE_ENC_RC_EVENT_FRAME_DONE:
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002187 if (!sde_enc->crtc) {
2188 SDE_ERROR("invalid crtc, sw_event:%u\n", sw_event);
2189 return -EINVAL;
2190 }
2191
2192 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2193 SDE_ERROR("invalid crtc index :%u\n",
2194 sde_enc->crtc->index);
2195 return -EINVAL;
2196 }
2197 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2198
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002199 /*
2200 * mutex lock is not used as this event happens at interrupt
2201 * context. And locking is not required as, the other events
2202 * like KICKOFF and STOP does a wait-for-idle before executing
2203 * the resource_control
2204 */
2205 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2206 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
2207 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002208 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2209 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002210 return -EINVAL;
2211 }
2212
2213 /*
2214 * schedule off work item only when there are no
2215 * frames pending
2216 */
Harsh Sahu1e52ed02017-11-28 14:34:22 -08002217 if (sde_crtc_frame_pending(sde_enc->crtc) > 1) {
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002218 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002219 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2220 SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002221 return 0;
2222 }
2223
Dhaval Patel99412a52017-07-24 19:16:45 -07002224 /* schedule delayed off work if autorefresh is disabled */
2225 if (sde_enc->cur_master &&
2226 sde_enc->cur_master->ops.is_autorefresh_enabled)
2227 autorefresh_enabled =
2228 sde_enc->cur_master->ops.is_autorefresh_enabled(
2229 sde_enc->cur_master);
2230
Clarence Ip89628132017-07-27 13:33:51 -04002231 /* set idle timeout based on master connector's lp value */
2232 if (sde_enc->cur_master)
2233 lp = sde_connector_get_lp(
2234 sde_enc->cur_master->connector);
2235 else
2236 lp = SDE_MODE_DPMS_ON;
2237
2238 if (lp == SDE_MODE_DPMS_LP2)
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002239 idle_pc_duration = IDLE_SHORT_TIMEOUT;
Clarence Ip89628132017-07-27 13:33:51 -04002240 else
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002241 idle_pc_duration = IDLE_POWERCOLLAPSE_DURATION;
Clarence Ip89628132017-07-27 13:33:51 -04002242
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002243 if (!autorefresh_enabled)
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002244 kthread_mod_delayed_work(
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002245 &disp_thread->worker,
2246 &sde_enc->delayed_off_work,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002247 msecs_to_jiffies(idle_pc_duration));
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002248 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Clarence Ip89628132017-07-27 13:33:51 -04002249 autorefresh_enabled,
Dhaval Patelc9e213b2017-11-02 12:13:12 -07002250 idle_pc_duration, SDE_EVTLOG_FUNC_CASE2);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002251 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
2252 sw_event);
2253 break;
2254
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002255 case SDE_ENC_RC_EVENT_PRE_STOP:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002256 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002257 if (kthread_cancel_delayed_work_sync(
2258 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002259 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2260 sw_event);
2261
2262 mutex_lock(&sde_enc->rc_lock);
2263
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002264 if (is_vid_mode &&
2265 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2266 _sde_encoder_irq_control(drm_enc, true);
2267 }
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002268 /* skip if is already OFF or IDLE, resources are off already */
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002269 else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002270 sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2271 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
2272 sw_event, sde_enc->rc_state);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002273 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2274 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002275 mutex_unlock(&sde_enc->rc_lock);
2276 return 0;
2277 }
2278
2279 /**
2280 * IRQs are still enabled currently, which allows wait for
2281 * VBLANK which RSC may require to correctly transition to OFF
2282 */
2283 _sde_encoder_resource_control_rsc_update(drm_enc, false);
2284
2285 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2286 SDE_ENC_RC_STATE_PRE_OFF,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002287 SDE_EVTLOG_FUNC_CASE3);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002288
2289 sde_enc->rc_state = SDE_ENC_RC_STATE_PRE_OFF;
2290
2291 mutex_unlock(&sde_enc->rc_lock);
2292 break;
2293
2294 case SDE_ENC_RC_EVENT_STOP:
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002295 /* cancel vsync event work and timer */
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302296 kthread_cancel_work_sync(&sde_enc->vsync_event_work);
Lloyd Atkinson418477a2017-11-07 16:53:39 -05002297 del_timer_sync(&sde_enc->vsync_event_timer);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002298
Jayant Shekhar12d908f2017-10-10 12:11:48 +05302299 mutex_lock(&sde_enc->rc_lock);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002300 /* return if the resource control is already in OFF state */
2301 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
2302 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
2303 sw_event);
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002304 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2305 SDE_EVTLOG_FUNC_CASE4);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002306 mutex_unlock(&sde_enc->rc_lock);
2307 return 0;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002308 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
2309 sde_enc->rc_state == SDE_ENC_RC_STATE_MODESET) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002310 SDE_ERROR_ENC(sde_enc, "sw_event:%d, rc in state %d\n",
2311 sw_event, sde_enc->rc_state);
2312 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2313 SDE_EVTLOG_ERROR);
2314 mutex_unlock(&sde_enc->rc_lock);
2315 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002316 }
2317
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002318 /**
2319 * expect to arrive here only if in either idle state or pre-off
2320 * and in IDLE state the resources are already disabled
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002321 */
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002322 if (sde_enc->rc_state == SDE_ENC_RC_STATE_PRE_OFF)
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002323 _sde_encoder_resource_control_helper(drm_enc, false);
2324
2325 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002326 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE4);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002327
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002328 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
2329
2330 mutex_unlock(&sde_enc->rc_lock);
2331 break;
2332
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002333 case SDE_ENC_RC_EVENT_PRE_MODESET:
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002334 /* cancel delayed off work, if any */
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002335 if (kthread_cancel_delayed_work_sync(
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002336 &sde_enc->delayed_off_work))
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002337 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
2338 sw_event);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002339
2340 mutex_lock(&sde_enc->rc_lock);
2341
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002342 /* return if the resource control is already in ON state */
2343 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
2344 /* enable all the clks and resources */
Alan Kwong1124f1f2017-11-10 18:14:39 -05002345 ret = _sde_encoder_resource_control_helper(drm_enc,
2346 true);
2347 if (ret) {
2348 SDE_ERROR_ENC(sde_enc,
2349 "sw_event:%d, rc in state %d\n",
2350 sw_event, sde_enc->rc_state);
2351 SDE_EVT32(DRMID(drm_enc), sw_event,
2352 sde_enc->rc_state,
2353 SDE_EVTLOG_ERROR);
2354 mutex_unlock(&sde_enc->rc_lock);
2355 return ret;
2356 }
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002357
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002358 _sde_encoder_resource_control_rsc_update(drm_enc, true);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002359
2360 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2361 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE5);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002362 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002363 }
2364
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002365 ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
2366 if (ret && ret != -EWOULDBLOCK) {
2367 SDE_ERROR_ENC(sde_enc,
2368 "wait for commit done returned %d\n",
2369 ret);
2370 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2371 ret, SDE_EVTLOG_ERROR);
2372 mutex_unlock(&sde_enc->rc_lock);
2373 return -EINVAL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002374 }
2375
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002376 _sde_encoder_irq_control(drm_enc, false);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002377 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002378
2379 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2380 SDE_ENC_RC_STATE_MODESET, SDE_EVTLOG_FUNC_CASE5);
2381
2382 sde_enc->rc_state = SDE_ENC_RC_STATE_MODESET;
2383 mutex_unlock(&sde_enc->rc_lock);
2384 break;
2385
2386 case SDE_ENC_RC_EVENT_POST_MODESET:
2387 mutex_lock(&sde_enc->rc_lock);
2388
2389 /* return if the resource control is already in ON state */
2390 if (sde_enc->rc_state != SDE_ENC_RC_STATE_MODESET) {
2391 SDE_ERROR_ENC(sde_enc,
2392 "sw_event:%d, rc:%d !MODESET state\n",
2393 sw_event, sde_enc->rc_state);
2394 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2395 SDE_EVTLOG_ERROR);
2396 mutex_unlock(&sde_enc->rc_lock);
2397 return -EINVAL;
2398 }
2399
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07002400 _sde_encoder_modeset_helper_locked(drm_enc, sw_event);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002401 _sde_encoder_irq_control(drm_enc, true);
2402
2403 _sde_encoder_update_rsc_client(drm_enc, NULL, true);
2404
2405 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2406 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE6);
2407
2408 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2409
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002410 mutex_unlock(&sde_enc->rc_lock);
2411 break;
2412
2413 case SDE_ENC_RC_EVENT_ENTER_IDLE:
2414 mutex_lock(&sde_enc->rc_lock);
2415
2416 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
Dhaval Patel8a7c3282017-12-05 00:41:58 -08002417 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002418 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002419 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2420 SDE_EVTLOG_ERROR);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002421 mutex_unlock(&sde_enc->rc_lock);
2422 return 0;
2423 }
2424
2425 /*
2426 * if we are in ON but a frame was just kicked off,
2427 * ignore the IDLE event, it's probably a stale timer event
2428 */
2429 if (sde_enc->frame_busy_mask[0]) {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002430 SDE_ERROR_ENC(sde_enc,
Lloyd Atkinsona8781382017-07-17 10:20:43 -04002431 "sw_event:%d, rc:%d frame pending\n",
2432 sw_event, sde_enc->rc_state);
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04002433 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2434 SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002435 mutex_unlock(&sde_enc->rc_lock);
2436 return 0;
2437 }
2438
Dhaval Patele17e0ee2017-08-23 18:01:42 -07002439 if (is_vid_mode) {
2440 _sde_encoder_irq_control(drm_enc, false);
2441 } else {
2442 /* disable all the clks and resources */
2443 _sde_encoder_resource_control_rsc_update(drm_enc,
2444 false);
2445 _sde_encoder_resource_control_helper(drm_enc, false);
2446 }
2447
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002448 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002449 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002450 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
2451
2452 mutex_unlock(&sde_enc->rc_lock);
2453 break;
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002454 case SDE_ENC_RC_EVENT_EARLY_WAKEUP:
2455 if (!sde_enc->crtc ||
2456 sde_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
2457 SDE_DEBUG_ENC(sde_enc,
2458 "invalid crtc:%d or crtc index:%d , sw_event:%u\n",
2459 sde_enc->crtc == NULL,
2460 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL,
2461 sw_event);
2462 return -EINVAL;
2463 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002464
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002465 disp_thread = &priv->disp_thread[sde_enc->crtc->index];
2466
2467 mutex_lock(&sde_enc->rc_lock);
2468
2469 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
2470 if (sde_enc->cur_master &&
2471 sde_enc->cur_master->ops.is_autorefresh_enabled)
2472 autorefresh_enabled =
2473 sde_enc->cur_master->ops.is_autorefresh_enabled(
2474 sde_enc->cur_master);
2475 if (autorefresh_enabled) {
2476 SDE_DEBUG_ENC(sde_enc,
2477 "not handling early wakeup since auto refresh is enabled\n");
Jeykumar Sankaran067b3b92018-01-19 10:35:22 -08002478 mutex_unlock(&sde_enc->rc_lock);
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002479 return 0;
2480 }
2481
2482 if (!sde_crtc_frame_pending(sde_enc->crtc))
2483 kthread_mod_delayed_work(&disp_thread->worker,
2484 &sde_enc->delayed_off_work,
2485 msecs_to_jiffies(
2486 IDLE_POWERCOLLAPSE_DURATION));
2487 } else if (sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
2488 /* enable all the clks and resources */
2489 _sde_encoder_resource_control_rsc_update(drm_enc, true);
2490 _sde_encoder_resource_control_helper(drm_enc, true);
2491
Jayant Shekhar85c40332018-05-08 11:46:36 +05302492 /*
2493 * In some cases, commit comes with slight delay
2494 * (> 80 ms)after early wake up, prevent clock switch
2495 * off to avoid jank in next update. So, increase the
2496 * command mode idle timeout sufficiently to prevent
2497 * such case.
2498 */
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002499 kthread_mod_delayed_work(&disp_thread->worker,
Jayant Shekhar85c40332018-05-08 11:46:36 +05302500 &sde_enc->delayed_off_work,
2501 msecs_to_jiffies(
2502 IDLE_POWERCOLLAPSE_IN_EARLY_WAKEUP));
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08002503
2504 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
2505 }
2506
2507 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
2508 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE8);
2509
2510 mutex_unlock(&sde_enc->rc_lock);
2511 break;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002512 default:
Dhaval Patela5f75952017-07-25 11:17:41 -07002513 SDE_EVT32(DRMID(drm_enc), sw_event, SDE_EVTLOG_ERROR);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002514 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
2515 break;
2516 }
2517
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07002518 SDE_EVT32_VERBOSE(DRMID(drm_enc), sw_event, sde_enc->idle_pc_enabled,
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002519 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
2520 return 0;
2521}
2522
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002523static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
2524 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002525 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002526{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002527 struct sde_encoder_virt *sde_enc;
2528 struct msm_drm_private *priv;
2529 struct sde_kms *sde_kms;
2530 struct list_head *connector_list;
2531 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002532 struct sde_connector_state *sde_conn_state = NULL;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002533 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002534 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002535 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002536
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002537 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002538 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002539 return;
2540 }
2541
Alan Kwong1124f1f2017-11-10 18:14:39 -05002542 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2543 SDE_ERROR("power resource is not enabled\n");
2544 return;
2545 }
2546
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002547 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002548 SDE_DEBUG_ENC(sde_enc, "\n");
2549
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002550 priv = drm_enc->dev->dev_private;
2551 sde_kms = to_sde_kms(priv->kms);
2552 connector_list = &sde_kms->dev->mode_config.connector_list;
2553
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002554 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002555
Dhaval Patele514aae2018-01-30 11:46:02 -08002556 /*
2557 * cache the crtc in sde_enc on enable for duration of use case
2558 * for correctly servicing asynchronous irq events and timers
2559 */
2560 if (!drm_enc->crtc) {
2561 SDE_ERROR("invalid crtc\n");
2562 return;
2563 }
2564 sde_enc->crtc = drm_enc->crtc;
2565
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002566 list_for_each_entry(conn_iter, connector_list, head)
2567 if (conn_iter->encoder == drm_enc)
2568 conn = conn_iter;
2569
2570 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04002571 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002572 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002573 } else if (!conn->state) {
2574 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
2575 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002576 }
2577
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002578 sde_conn = to_sde_connector(conn);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002579 sde_conn_state = to_sde_connector_state(conn->state);
2580 if (sde_conn && sde_conn_state) {
2581 ret = sde_conn->ops.get_mode_info(adj_mode,
2582 &sde_conn_state->mode_info,
Alan Kwongb1bca602017-09-18 17:28:45 -04002583 sde_kms->catalog->max_mixer_width,
2584 sde_conn->display);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002585 if (ret) {
2586 SDE_ERROR_ENC(sde_enc,
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002587 "failed to get mode info from the display\n");
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002588 return;
2589 }
2590 }
2591
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002592 /* release resources before seamless mode change */
2593 if (msm_is_mode_seamless_dms(adj_mode)) {
2594 /* restore resource state before releasing them */
2595 ret = sde_encoder_resource_control(drm_enc,
2596 SDE_ENC_RC_EVENT_PRE_MODESET);
2597 if (ret) {
2598 SDE_ERROR_ENC(sde_enc,
2599 "sde resource control failed: %d\n",
2600 ret);
2601 return;
2602 }
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07002603
2604 /*
2605 * Disable dsc before switch the mode and after pre_modeset,
2606 * to guarantee that previous kickoff finished.
2607 */
2608 _sde_encoder_dsc_disable(sde_enc);
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002609 }
2610
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002611 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
2612 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
2613 conn->state, false);
2614 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04002615 SDE_ERROR_ENC(sde_enc,
2616 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002617 return;
2618 }
2619
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002620 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
2621 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2622 sde_enc->hw_pp[i] = NULL;
2623 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
2624 break;
2625 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
2626 }
2627
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002628 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
2629 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2630 sde_enc->hw_dsc[i] = NULL;
2631 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
2632 break;
2633 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
2634 }
2635
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002636 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2637 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002638
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002639 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07002640 if (!sde_enc->hw_pp[i]) {
2641 SDE_ERROR_ENC(sde_enc,
2642 "invalid pingpong block for the encoder\n");
2643 return;
2644 }
2645 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04002646 phys->connector = conn->state->connector;
2647 if (phys->ops.mode_set)
2648 phys->ops.mode_set(phys, mode, adj_mode);
2649 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04002650 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07002651
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002652 /* update resources after seamless mode change */
2653 if (msm_is_mode_seamless_dms(adj_mode))
2654 sde_encoder_resource_control(&sde_enc->base,
2655 SDE_ENC_RC_EVENT_POST_MODESET);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002656}
2657
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002658void sde_encoder_control_te(struct drm_encoder *drm_enc, bool enable)
2659{
2660 struct sde_encoder_virt *sde_enc;
2661 struct sde_encoder_phys *phys;
2662 int i;
2663
2664 if (!drm_enc) {
2665 SDE_ERROR("invalid parameters\n");
2666 return;
2667 }
2668
2669 sde_enc = to_sde_encoder_virt(drm_enc);
2670 if (!sde_enc) {
2671 SDE_ERROR("invalid sde encoder\n");
2672 return;
2673 }
2674
2675 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2676 phys = sde_enc->phys_encs[i];
2677 if (phys && phys->ops.control_te)
2678 phys->ops.control_te(phys, enable);
2679 }
2680}
2681
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302682static int _sde_encoder_input_connect(struct input_handler *handler,
2683 struct input_dev *dev, const struct input_device_id *id)
2684{
2685 struct input_handle *handle;
2686 int rc = 0;
2687
2688 handle = kzalloc(sizeof(*handle), GFP_KERNEL);
2689 if (!handle)
2690 return -ENOMEM;
2691
2692 handle->dev = dev;
2693 handle->handler = handler;
2694 handle->name = handler->name;
2695
2696 rc = input_register_handle(handle);
2697 if (rc) {
2698 pr_err("failed to register input handle\n");
2699 goto error;
2700 }
2701
2702 rc = input_open_device(handle);
2703 if (rc) {
2704 pr_err("failed to open input device\n");
2705 goto error_unregister;
2706 }
2707
2708 return 0;
2709
2710error_unregister:
2711 input_unregister_handle(handle);
2712
2713error:
2714 kfree(handle);
2715
2716 return rc;
2717}
2718
2719static void _sde_encoder_input_disconnect(struct input_handle *handle)
2720{
2721 input_close_device(handle);
2722 input_unregister_handle(handle);
2723 kfree(handle);
2724}
2725
2726/**
2727 * Structure for specifying event parameters on which to receive callbacks.
2728 * This structure will trigger a callback in case of a touch event (specified by
2729 * EV_ABS) where there is a change in X and Y coordinates,
2730 */
2731static const struct input_device_id sde_input_ids[] = {
2732 {
2733 .flags = INPUT_DEVICE_ID_MATCH_EVBIT,
2734 .evbit = { BIT_MASK(EV_ABS) },
2735 .absbit = { [BIT_WORD(ABS_MT_POSITION_X)] =
2736 BIT_MASK(ABS_MT_POSITION_X) |
2737 BIT_MASK(ABS_MT_POSITION_Y) },
2738 },
2739 { },
2740};
2741
2742static int _sde_encoder_input_handler_register(
2743 struct input_handler *input_handler)
2744{
2745 int rc = 0;
2746
2747 rc = input_register_handler(input_handler);
2748 if (rc) {
2749 pr_err("input_register_handler failed, rc= %d\n", rc);
2750 kfree(input_handler);
2751 return rc;
2752 }
2753
2754 return rc;
2755}
2756
2757static int _sde_encoder_input_handler(
2758 struct sde_encoder_virt *sde_enc)
2759{
2760 struct input_handler *input_handler = NULL;
2761 int rc = 0;
2762
2763 if (sde_enc->input_handler) {
2764 SDE_ERROR_ENC(sde_enc,
2765 "input_handle is active. unexpected\n");
2766 return -EINVAL;
2767 }
2768
2769 input_handler = kzalloc(sizeof(*sde_enc->input_handler), GFP_KERNEL);
2770 if (!input_handler)
2771 return -ENOMEM;
2772
2773 input_handler->event = sde_encoder_input_event_handler;
2774 input_handler->connect = _sde_encoder_input_connect;
2775 input_handler->disconnect = _sde_encoder_input_disconnect;
2776 input_handler->name = "sde";
2777 input_handler->id_table = sde_input_ids;
2778 input_handler->private = sde_enc;
2779
2780 sde_enc->input_handler = input_handler;
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302781 sde_enc->input_handler_registered = false;
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302782
2783 return rc;
2784}
2785
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002786static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002787{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002788 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07002789 struct msm_drm_private *priv;
2790 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002791
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002792 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2793 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002794 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002795 }
Dhaval Patelaab9b522017-07-20 12:38:46 -07002796
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002797 priv = drm_enc->dev->dev_private;
Dhaval Patelaab9b522017-07-20 12:38:46 -07002798 sde_kms = to_sde_kms(priv->kms);
2799 if (!sde_kms) {
2800 SDE_ERROR("invalid sde_kms\n");
2801 return;
2802 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002803
2804 sde_enc = to_sde_encoder_virt(drm_enc);
2805 if (!sde_enc || !sde_enc->cur_master) {
2806 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002807 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002808 }
2809
Ajay Singh Parmar878ef142017-08-07 16:53:57 -07002810 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
2811 sde_enc->cur_master->hw_mdptop &&
2812 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select)
2813 sde_enc->cur_master->hw_mdptop->ops.intf_audio_select(
2814 sde_enc->cur_master->hw_mdptop);
2815
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002816 if (sde_enc->cur_master->hw_mdptop &&
2817 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
2818 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
2819 sde_enc->cur_master->hw_mdptop,
2820 sde_kms->catalog);
2821
Dhaval Patelaab9b522017-07-20 12:38:46 -07002822 _sde_encoder_update_vsync_source(sde_enc, &sde_enc->disp_info, false);
Veera Sundaram Sankaran33db4282017-11-01 12:45:25 -07002823 sde_encoder_control_te(drm_enc, true);
Lloyd Atkinsonbc01cbd2017-06-05 14:26:57 -04002824
2825 memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
2826 memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002827}
2828
2829void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
2830{
2831 struct sde_encoder_virt *sde_enc = NULL;
2832 int i;
2833
2834 if (!drm_enc) {
2835 SDE_ERROR("invalid encoder\n");
2836 return;
2837 }
2838 sde_enc = to_sde_encoder_virt(drm_enc);
2839
2840 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2841 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2842
2843 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
2844 phys->ops.restore(phys);
2845 }
2846
2847 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
2848 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2849
2850 _sde_encoder_virt_enable_helper(drm_enc);
2851}
2852
Jayant Shekhar401bcdf2018-07-27 12:15:03 +05302853static void sde_encoder_off_work(struct kthread_work *work)
2854{
2855 struct sde_encoder_virt *sde_enc = container_of(work,
2856 struct sde_encoder_virt, delayed_off_work.work);
2857 struct drm_encoder *drm_enc;
2858
2859 if (!sde_enc) {
2860 SDE_ERROR("invalid sde encoder\n");
2861 return;
2862 }
2863 drm_enc = &sde_enc->base;
2864
2865 sde_encoder_idle_request(drm_enc);
2866}
2867
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002868static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
2869{
2870 struct sde_encoder_virt *sde_enc = NULL;
2871 int i, ret = 0;
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002872 struct msm_compression_info *comp_info = NULL;
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002873 struct drm_display_mode *cur_mode = NULL;
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002874 struct msm_mode_info mode_info;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002875
2876 if (!drm_enc) {
2877 SDE_ERROR("invalid encoder\n");
2878 return;
2879 }
2880 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002881
Alan Kwong1124f1f2017-11-10 18:14:39 -05002882 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2883 SDE_ERROR("power resource is not enabled\n");
2884 return;
2885 }
2886
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002887 ret = _sde_encoder_get_mode_info(drm_enc, &mode_info);
2888 if (ret) {
2889 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
2890 return;
2891 }
2892
Dhaval Patelf492c5d2018-02-19 07:56:37 -08002893 if (drm_enc->crtc && !sde_enc->crtc)
2894 sde_enc->crtc = drm_enc->crtc;
2895
Jeykumar Sankaran905ba332017-10-19 10:45:02 -07002896 comp_info = &mode_info.comp_info;
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002897 cur_mode = &sde_enc->base.crtc->state->adjusted_mode;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002898
Clarence Ip19af1362016-09-23 14:57:51 -04002899 SDE_DEBUG_ENC(sde_enc, "\n");
Dhaval Patel1b5605b2017-07-26 18:19:50 -07002900 SDE_EVT32(DRMID(drm_enc), cur_mode->hdisplay, cur_mode->vdisplay);
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002901
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002902 sde_enc->cur_master = NULL;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002903 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2904 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2905
2906 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
2907 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
2908 sde_enc->cur_master = phys;
2909 break;
2910 }
2911 }
2912
2913 if (!sde_enc->cur_master) {
2914 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
2915 return;
2916 }
2917
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302918 if (sde_enc->input_handler && !sde_enc->input_handler_registered) {
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302919 ret = _sde_encoder_input_handler_register(
2920 sde_enc->input_handler);
2921 if (ret)
2922 SDE_ERROR(
2923 "input handler registration failed, rc = %d\n", ret);
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05302924 else
2925 sde_enc->input_handler_registered = true;
Shubhashree Dhar25b05422018-05-30 15:42:04 +05302926 }
2927
Jayant Shekhar71a0acb2018-08-17 08:24:51 +05302928 if (!(msm_is_mode_seamless_vrr(cur_mode)
2929 || msm_is_mode_seamless_dms(cur_mode)))
Jayant Shekhar401bcdf2018-07-27 12:15:03 +05302930 kthread_init_delayed_work(&sde_enc->delayed_off_work,
2931 sde_encoder_off_work);
2932
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002933 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2934 if (ret) {
2935 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
2936 ret);
2937 return;
2938 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08002939
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002940 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2941 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002942
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002943 if (!phys)
2944 continue;
2945
2946 phys->comp_type = comp_info->comp_type;
2947 if (phys != sde_enc->cur_master) {
2948 /**
2949 * on DMS request, the encoder will be enabled
2950 * already. Invoke restore to reconfigure the
2951 * new mode.
2952 */
2953 if (msm_is_mode_seamless_dms(cur_mode) &&
2954 phys->ops.restore)
2955 phys->ops.restore(phys);
2956 else if (phys->ops.enable)
Jeykumar Sankaran446a5f12017-05-09 20:30:39 -07002957 phys->ops.enable(phys);
2958 }
Dhaval Patel010f5172017-08-01 22:40:09 -07002959
2960 if (sde_enc->misr_enable && (sde_enc->disp_info.capabilities &
2961 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
2962 phys->ops.setup_misr(phys, true,
2963 sde_enc->misr_frame_count);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002964 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002965
Jeykumar Sankaran69934622017-05-31 18:16:25 -07002966 if (msm_is_mode_seamless_dms(cur_mode) &&
2967 sde_enc->cur_master->ops.restore)
2968 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
2969 else if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04002970 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002971
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07002972 _sde_encoder_virt_enable_helper(drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002973}
2974
2975static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
2976{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002977 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002978 struct msm_drm_private *priv;
2979 struct sde_kms *sde_kms;
Clarence Iped3327b2017-11-01 13:13:58 -04002980 enum sde_intf_mode intf_mode;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002981 int i = 0;
2982
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002983 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002984 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002985 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04002986 } else if (!drm_enc->dev) {
2987 SDE_ERROR("invalid dev\n");
2988 return;
2989 } else if (!drm_enc->dev->dev_private) {
2990 SDE_ERROR("invalid dev_private\n");
2991 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002992 }
2993
Alan Kwong1124f1f2017-11-10 18:14:39 -05002994 if (!sde_kms_power_resource_is_enabled(drm_enc->dev)) {
2995 SDE_ERROR("power resource is not enabled\n");
2996 return;
2997 }
2998
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04002999 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04003000 SDE_DEBUG_ENC(sde_enc, "\n");
3001
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003002 priv = drm_enc->dev->dev_private;
3003 sde_kms = to_sde_kms(priv->kms);
Clarence Iped3327b2017-11-01 13:13:58 -04003004 intf_mode = sde_encoder_get_intf_mode(drm_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003005
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003006 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04003007
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05303008 if (sde_enc->input_handler && sde_enc->input_handler_registered) {
Shubhashree Dhar137adbb2018-06-26 18:03:38 +05303009 input_unregister_handler(sde_enc->input_handler);
Shubhashree Dhar0c6ce3c2018-08-03 19:49:31 +05303010 sde_enc->input_handler_registered = false;
3011 }
3012
Shubhashree Dhar137adbb2018-06-26 18:03:38 +05303013
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04003014 /* wait for idle */
3015 sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
3016
Shubhashree Dhar25b05422018-05-30 15:42:04 +05303017 kthread_flush_work(&sde_enc->input_event_work);
3018
Clarence Iped3327b2017-11-01 13:13:58 -04003019 /*
3020 * For primary command mode encoders, execute the resource control
3021 * pre-stop operations before the physical encoders are disabled, to
3022 * allow the rsc to transition its states properly.
3023 *
3024 * For other encoder types, rsc should not be enabled until after
3025 * they have been fully disabled, so delay the pre-stop operations
3026 * until after the physical disable calls have returned.
3027 */
3028 if (sde_enc->disp_info.is_primary && intf_mode == INTF_MODE_CMD) {
3029 sde_encoder_resource_control(drm_enc,
3030 SDE_ENC_RC_EVENT_PRE_STOP);
3031 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3032 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04003033
Clarence Iped3327b2017-11-01 13:13:58 -04003034 if (phys && phys->ops.disable)
3035 phys->ops.disable(phys);
3036 }
3037 } else {
3038 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3039 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003040
Clarence Iped3327b2017-11-01 13:13:58 -04003041 if (phys && phys->ops.disable)
3042 phys->ops.disable(phys);
3043 }
3044 sde_encoder_resource_control(drm_enc,
3045 SDE_ENC_RC_EVENT_PRE_STOP);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003046 }
3047
Ingrid Gallardo2a2befb2017-08-07 15:02:51 -07003048 /*
3049 * disable dsc after the transfer is complete (for command mode)
3050 * and after physical encoder is disabled, to make sure timing
3051 * engine is already disabled (for video mode).
3052 */
3053 _sde_encoder_dsc_disable(sde_enc);
3054
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003055 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
3056
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04003057 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08003058 if (sde_enc->phys_encs[i]) {
3059 sde_enc->phys_encs[i]->cont_splash_settings = false;
3060 sde_enc->phys_encs[i]->cont_splash_single_flush = 0;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04003061 sde_enc->phys_encs[i]->connector = NULL;
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08003062 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003063 }
3064
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04003065 sde_enc->cur_master = NULL;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003066 /*
3067 * clear the cached crtc in sde_enc on use case finish, after all the
3068 * outstanding events and timers have been completed
3069 */
3070 sde_enc->crtc = NULL;
Lloyd Atkinson07099ad2017-08-15 13:32:24 -04003071
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07003072 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04003073
Lloyd Atkinson11f34442016-08-11 11:19:52 -04003074 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003075}
3076
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003077static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003078 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003079{
3080 int i = 0;
3081
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003082 for (i = 0; i < catalog->intf_count; i++) {
3083 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04003084 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003085 return catalog->intf[i].id;
3086 }
3087 }
3088
3089 return INTF_MAX;
3090}
3091
Alan Kwongbb27c092016-07-20 16:41:25 -04003092static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
3093 enum sde_intf_type type, u32 controller_id)
3094{
3095 if (controller_id < catalog->wb_count)
3096 return catalog->wb[controller_id].id;
3097
3098 return WB_MAX;
3099}
3100
Dhaval Patel81e87882016-10-19 21:41:56 -07003101static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
3102 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003103{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04003104 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003105 unsigned long lock_flags;
3106
Dhaval Patel81e87882016-10-19 21:41:56 -07003107 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04003108 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04003109
Narendra Muppalla77b32932017-05-10 13:53:11 -07003110 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04003111 sde_enc = to_sde_encoder_virt(drm_enc);
3112
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003113 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003114 if (sde_enc->crtc_vblank_cb)
3115 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003116 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07003117
3118 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07003119 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07003120}
3121
3122static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
3123 struct sde_encoder_phys *phy_enc)
3124{
3125 if (!phy_enc)
3126 return;
3127
Narendra Muppalla77b32932017-05-10 13:53:11 -07003128 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07003129 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05003130 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Ingrid Gallardo36ee68d2017-08-30 17:14:33 -07003131
3132 trace_sde_encoder_underrun(DRMID(drm_enc),
3133 atomic_read(&phy_enc->underrun_cnt));
3134
3135 SDE_DBG_CTRL("stop_ftrace");
3136 SDE_DBG_CTRL("panic_underrun");
3137
Narendra Muppalla77b32932017-05-10 13:53:11 -07003138 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04003139}
3140
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003141void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
3142 void (*vbl_cb)(void *), void *vbl_data)
3143{
3144 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3145 unsigned long lock_flags;
3146 bool enable;
3147 int i;
3148
3149 enable = vbl_cb ? true : false;
3150
Clarence Ip19af1362016-09-23 14:57:51 -04003151 if (!drm_enc) {
3152 SDE_ERROR("invalid encoder\n");
3153 return;
3154 }
3155 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04003156 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003157
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003158 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003159 sde_enc->crtc_vblank_cb = vbl_cb;
3160 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003161 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003162
3163 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3164 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
3165
3166 if (phys && phys->ops.control_vblank_irq)
3167 phys->ops.control_vblank_irq(phys, enable);
3168 }
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07003169 sde_enc->vblank_enabled = enable;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04003170}
3171
Alan Kwong628d19e2016-10-31 13:50:13 -04003172void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003173 void (*frame_event_cb)(void *, u32 event),
3174 struct drm_crtc *crtc)
Alan Kwong628d19e2016-10-31 13:50:13 -04003175{
3176 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3177 unsigned long lock_flags;
3178 bool enable;
3179
3180 enable = frame_event_cb ? true : false;
3181
3182 if (!drm_enc) {
3183 SDE_ERROR("invalid encoder\n");
3184 return;
3185 }
3186 SDE_DEBUG_ENC(sde_enc, "\n");
3187 SDE_EVT32(DRMID(drm_enc), enable, 0);
3188
3189 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3190 sde_enc->crtc_frame_event_cb = frame_event_cb;
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003191 sde_enc->crtc_frame_event_cb_data.crtc = crtc;
Alan Kwong628d19e2016-10-31 13:50:13 -04003192 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
3193}
3194
3195static void sde_encoder_frame_done_callback(
3196 struct drm_encoder *drm_enc,
3197 struct sde_encoder_phys *ready_phys, u32 event)
3198{
3199 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
3200 unsigned int i;
3201
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003202 sde_enc->crtc_frame_event_cb_data.connector =
3203 sde_enc->cur_master->connector;
3204
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003205 if (event & (SDE_ENCODER_FRAME_EVENT_DONE
3206 | SDE_ENCODER_FRAME_EVENT_ERROR
3207 | SDE_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
Lloyd Atkinsond0fedd02017-03-01 13:25:40 -05003208
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003209 if (!sde_enc->frame_busy_mask[0]) {
3210 /**
3211 * suppress frame_done without waiter,
3212 * likely autorefresh
3213 */
3214 SDE_EVT32(DRMID(drm_enc), event, ready_phys->intf_idx);
3215 return;
Alan Kwong628d19e2016-10-31 13:50:13 -04003216 }
3217
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003218 /* One of the physical encoders has become idle */
3219 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3220 if (sde_enc->phys_encs[i] == ready_phys) {
3221 clear_bit(i, sde_enc->frame_busy_mask);
3222 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
3223 sde_enc->frame_busy_mask[0]);
3224 }
3225 }
Alan Kwong628d19e2016-10-31 13:50:13 -04003226
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003227 if (!sde_enc->frame_busy_mask[0]) {
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003228 sde_encoder_resource_control(drm_enc,
3229 SDE_ENC_RC_EVENT_FRAME_DONE);
3230
3231 if (sde_enc->crtc_frame_event_cb)
3232 sde_enc->crtc_frame_event_cb(
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003233 &sde_enc->crtc_frame_event_cb_data,
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003234 event);
3235 }
3236 } else {
Alan Kwong628d19e2016-10-31 13:50:13 -04003237 if (sde_enc->crtc_frame_event_cb)
3238 sde_enc->crtc_frame_event_cb(
Prabhanjan Kandula199cfcd2018-03-28 11:45:20 -07003239 &sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04003240 }
3241}
3242
Dhaval Patel8a7c3282017-12-05 00:41:58 -08003243int sde_encoder_idle_request(struct drm_encoder *drm_enc)
3244{
3245 struct sde_encoder_virt *sde_enc;
3246
3247 if (!drm_enc) {
3248 SDE_ERROR("invalid drm encoder\n");
3249 return -EINVAL;
3250 }
3251
3252 sde_enc = to_sde_encoder_virt(drm_enc);
3253 sde_encoder_resource_control(&sde_enc->base,
3254 SDE_ENC_RC_EVENT_ENTER_IDLE);
3255
3256 return 0;
3257}
3258
Krishna Manikandanfb29f692019-04-09 17:00:41 +05303259int sde_encoder_get_ctlstart_timeout_state(struct drm_encoder *drm_enc)
3260{
3261 struct sde_encoder_virt *sde_enc = NULL;
3262 int i, count = 0;
3263
3264 if (!drm_enc)
3265 return 0;
3266
3267 sde_enc = to_sde_encoder_virt(drm_enc);
3268
3269 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3270 count += atomic_read(&sde_enc->phys_encs[i]->ctlstart_timeout);
3271 atomic_set(&sde_enc->phys_encs[i]->ctlstart_timeout, 0);
3272 }
3273
3274 return count;
3275}
3276
Clarence Ip110d15c2016-08-16 14:44:41 -04003277/**
3278 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
3279 * drm_enc: Pointer to drm encoder structure
3280 * phys: Pointer to physical encoder structure
3281 * extra_flush_bits: Additional bit mask to include in flush trigger
3282 */
3283static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
3284 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
3285{
3286 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003287 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04003288
3289 if (!drm_enc || !phys) {
3290 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
3291 drm_enc != 0, phys != 0);
3292 return;
3293 }
3294
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003295 if (!phys->hw_pp) {
3296 SDE_ERROR("invalid pingpong hw\n");
3297 return;
3298 }
3299
Clarence Ip110d15c2016-08-16 14:44:41 -04003300 ctl = phys->hw_ctl;
Alan Kwong4212dd42017-09-19 17:22:33 -04003301 if (!ctl || !phys->ops.trigger_flush) {
3302 SDE_ERROR("missing ctl/trigger cb\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04003303 return;
3304 }
3305
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003306 if (phys->split_role == ENC_ROLE_SKIP) {
3307 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
3308 "skip flush pp%d ctl%d\n",
3309 phys->hw_pp->idx - PINGPONG_0,
3310 ctl->idx - CTL_0);
3311 return;
3312 }
3313
Clarence Ip8e69ad02016-12-09 09:43:57 -05003314 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05003315
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07003316 if (phys->ops.is_master && phys->ops.is_master(phys))
3317 atomic_inc(&phys->pending_retire_fence_cnt);
3318
Clarence Ip110d15c2016-08-16 14:44:41 -04003319 if (extra_flush_bits && ctl->ops.update_pending_flush)
3320 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
3321
Alan Kwong4212dd42017-09-19 17:22:33 -04003322 phys->ops.trigger_flush(phys);
Dhaval Patel6c666622017-03-21 23:02:59 -07003323
3324 if (ctl->ops.get_pending_flush)
Clarence Ip569d5af2017-10-14 21:09:01 -04003325 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
3326 pending_kickoff_cnt, ctl->idx - CTL_0,
3327 ctl->ops.get_pending_flush(ctl));
Dhaval Patel6c666622017-03-21 23:02:59 -07003328 else
Clarence Ip569d5af2017-10-14 21:09:01 -04003329 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0,
3330 ctl->idx - CTL_0, pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04003331}
3332
3333/**
3334 * _sde_encoder_trigger_start - trigger start for a physical encoder
3335 * phys: Pointer to physical encoder structure
3336 */
3337static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
3338{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003339 struct sde_hw_ctl *ctl;
3340
Clarence Ip110d15c2016-08-16 14:44:41 -04003341 if (!phys) {
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003342 SDE_ERROR("invalid argument(s)\n");
3343 return;
3344 }
3345
3346 if (!phys->hw_pp) {
3347 SDE_ERROR("invalid pingpong hw\n");
Clarence Ip110d15c2016-08-16 14:44:41 -04003348 return;
3349 }
3350
Prabhanjan Kandula77cc0ee2018-04-15 21:44:50 -07003351 /* avoid ctrl start for encoder in clone mode */
3352 if (phys->in_clone_mode)
3353 return;
3354
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003355 ctl = phys->hw_ctl;
3356 if (phys->split_role == ENC_ROLE_SKIP) {
3357 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
3358 "skip start pp%d ctl%d\n",
3359 phys->hw_pp->idx - PINGPONG_0,
3360 ctl->idx - CTL_0);
3361 return;
3362 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003363 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
3364 phys->ops.trigger_start(phys);
3365}
3366
Alan Kwong4212dd42017-09-19 17:22:33 -04003367void sde_encoder_helper_trigger_flush(struct sde_encoder_phys *phys_enc)
3368{
3369 struct sde_hw_ctl *ctl;
3370
3371 if (!phys_enc) {
3372 SDE_ERROR("invalid encoder\n");
3373 return;
3374 }
3375
3376 ctl = phys_enc->hw_ctl;
3377 if (ctl && ctl->ops.trigger_flush)
3378 ctl->ops.trigger_flush(ctl);
3379}
3380
Clarence Ip110d15c2016-08-16 14:44:41 -04003381void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
3382{
3383 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04003384
3385 if (!phys_enc) {
3386 SDE_ERROR("invalid encoder\n");
3387 return;
3388 }
3389
3390 ctl = phys_enc->hw_ctl;
3391 if (ctl && ctl->ops.trigger_start) {
3392 ctl->ops.trigger_start(ctl);
Clarence Ip569d5af2017-10-14 21:09:01 -04003393 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx - CTL_0);
Clarence Ip110d15c2016-08-16 14:44:41 -04003394 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003395}
3396
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303397static int _sde_encoder_wait_timeout(int32_t drm_id, int32_t hw_id,
3398 s64 timeout_ms, struct sde_encoder_wait_info *info)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003399{
3400 int rc = 0;
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303401 s64 wait_time_jiffies = msecs_to_jiffies(timeout_ms);
3402 ktime_t cur_ktime;
3403 ktime_t exp_ktime = ktime_add_ms(ktime_get(), timeout_ms);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003404
3405 do {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003406 rc = wait_event_timeout(*(info->wq),
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303407 atomic_read(info->atomic_cnt) == 0, wait_time_jiffies);
3408 cur_ktime = ktime_get();
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003409
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303410 SDE_EVT32(drm_id, hw_id, rc, ktime_to_ms(cur_ktime),
3411 timeout_ms, atomic_read(info->atomic_cnt));
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003412 /* If we timed out, counter is valid and time is less, wait again */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05003413 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
Raviteja Tamatam3eebe962017-10-26 09:55:24 +05303414 (ktime_compare_safe(exp_ktime, cur_ktime) > 0));
3415
3416 return rc;
3417}
3418
3419int sde_encoder_helper_wait_event_timeout(int32_t drm_id, int32_t hw_id,
3420 struct sde_encoder_wait_info *info)
3421{
3422 int rc;
3423 ktime_t exp_ktime = ktime_add_ms(ktime_get(), info->timeout_ms);
3424
3425 rc = _sde_encoder_wait_timeout(drm_id, hw_id, info->timeout_ms, info);
3426
3427 /**
3428 * handle disabled irq case where timer irq is also delayed.
3429 * wait for additional timeout of FAULT_TOLERENCE_WAIT_IN_MS
3430 * if it event_timeout expired late detected.
3431 */
3432 if (atomic_read(info->atomic_cnt) && (!rc) &&
3433 (ktime_compare_safe(ktime_get(), ktime_add_ms(exp_ktime,
3434 FAULT_TOLERENCE_DELTA_IN_MS)) > 0))
3435 rc = _sde_encoder_wait_timeout(drm_id, hw_id,
3436 FAULT_TOLERENCE_WAIT_IN_MS, info);
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05003437
3438 return rc;
3439}
3440
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003441void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
3442{
3443 struct sde_encoder_virt *sde_enc;
3444 struct sde_connector *sde_con;
3445 void *sde_con_disp;
3446 struct sde_hw_ctl *ctl;
3447 int rc;
3448
3449 if (!phys_enc) {
3450 SDE_ERROR("invalid encoder\n");
3451 return;
3452 }
3453 sde_enc = to_sde_encoder_virt(phys_enc->parent);
3454 ctl = phys_enc->hw_ctl;
3455
3456 if (!ctl || !ctl->ops.reset)
3457 return;
3458
3459 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
3460 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
3461
3462 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
3463 phys_enc->connector) {
3464 sde_con = to_sde_connector(phys_enc->connector);
3465 sde_con_disp = sde_connector_get_display(phys_enc->connector);
3466
3467 if (sde_con->ops.soft_reset) {
3468 rc = sde_con->ops.soft_reset(sde_con_disp);
3469 if (rc) {
3470 SDE_ERROR_ENC(sde_enc,
3471 "connector soft reset failure\n");
Dhaval Patel7ca510f2017-07-12 12:57:37 -07003472 SDE_DBG_DUMP("all", "dbg_bus", "vbif_dbg_bus",
3473 "panic");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003474 }
3475 }
3476 }
3477
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05003478 phys_enc->enable_state = SDE_ENC_ENABLED;
3479}
3480
Clarence Ip110d15c2016-08-16 14:44:41 -04003481/**
3482 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
3483 * Iterate through the physical encoders and perform consolidated flush
3484 * and/or control start triggering as needed. This is done in the virtual
3485 * encoder rather than the individual physical ones in order to handle
3486 * use cases that require visibility into multiple physical encoders at
3487 * a time.
3488 * sde_enc: Pointer to virtual encoder structure
3489 */
3490static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
3491{
3492 struct sde_hw_ctl *ctl;
3493 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003494 unsigned long lock_flags;
Dhaval Patel30874eb2018-05-31 13:33:31 -07003495 struct msm_drm_private *priv = NULL;
3496 struct sde_kms *sde_kms = NULL;
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003497 bool is_vid_mode = false;
Clarence Ip110d15c2016-08-16 14:44:41 -04003498
3499 if (!sde_enc) {
3500 SDE_ERROR("invalid encoder\n");
3501 return;
3502 }
3503
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003504 is_vid_mode = sde_enc->disp_info.capabilities &
3505 MSM_DISPLAY_CAP_VID_MODE;
3506
3507
Clarence Ip110d15c2016-08-16 14:44:41 -04003508 pending_flush = 0x0;
3509
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003510 /*
3511 * Trigger LUT DMA flush, this might need a wait, so we need
3512 * to do this outside of the atomic context
3513 */
3514 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3515 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003516
3517 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3518 continue;
3519
3520 ctl = phys->hw_ctl;
3521 if (!ctl)
3522 continue;
3523
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003524 /* make reg dma kickoff as blocking for vidoe-mode */
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003525 if (phys->hw_ctl->ops.reg_dma_flush)
3526 phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl,
Veera Sundaram Sankaran2b64edf2018-08-27 14:01:50 -07003527 is_vid_mode);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003528 }
3529
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003530 /* update pending counts and trigger kickoff ctl flush atomically */
3531 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
3532
Clarence Ip110d15c2016-08-16 14:44:41 -04003533 /* don't perform flush/start operations for slave encoders */
3534 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3535 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07003536 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05003537
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003538 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
3539 continue;
3540
Clarence Ip110d15c2016-08-16 14:44:41 -04003541 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003542 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04003543 continue;
3544
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003545 if (phys->connector)
3546 topology = sde_connector_get_topology_name(
3547 phys->connector);
3548
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003549 /*
3550 * don't wait on ppsplit slaves or skipped encoders because
3551 * they dont receive irqs
3552 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003553 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003554 phys->split_role == ENC_ROLE_SLAVE) &&
3555 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05003556 set_bit(i, sde_enc->frame_busy_mask);
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07003557
Clarence Ip8e69ad02016-12-09 09:43:57 -05003558 if (!phys->ops.needs_single_flush ||
3559 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04003560 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
3561 else if (ctl->ops.get_pending_flush)
3562 pending_flush |= ctl->ops.get_pending_flush(ctl);
3563 }
3564
3565 /* for split flush, combine pending flush masks and send to master */
3566 if (pending_flush && sde_enc->cur_master) {
3567 _sde_encoder_trigger_flush(
3568 &sde_enc->base,
3569 sde_enc->cur_master,
3570 pending_flush);
3571 }
3572
3573 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04003574
3575 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel30874eb2018-05-31 13:33:31 -07003576
3577 if (sde_enc->elevated_ahb_vote) {
3578 priv = sde_enc->base.dev->dev_private;
3579 if (priv != NULL) {
3580 sde_kms = to_sde_kms(priv->kms);
3581 if (sde_kms != NULL) {
3582 sde_power_scale_reg_bus(&priv->phandle,
3583 sde_kms->core_client,
3584 VOTE_INDEX_LOW,
3585 false);
3586 }
3587 }
3588 sde_enc->elevated_ahb_vote = false;
3589 }
Clarence Ip110d15c2016-08-16 14:44:41 -04003590}
3591
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003592static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
3593 struct drm_encoder *drm_enc,
3594 unsigned long *affected_displays,
3595 int num_active_phys)
3596{
3597 struct sde_encoder_virt *sde_enc;
3598 struct sde_encoder_phys *master;
3599 enum sde_rm_topology_name topology;
3600 bool is_right_only;
3601
3602 if (!drm_enc || !affected_displays)
3603 return;
3604
3605 sde_enc = to_sde_encoder_virt(drm_enc);
3606 master = sde_enc->cur_master;
3607 if (!master || !master->connector)
3608 return;
3609
3610 topology = sde_connector_get_topology_name(master->connector);
3611 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
3612 return;
3613
3614 /*
3615 * For pingpong split, the slave pingpong won't generate IRQs. For
3616 * right-only updates, we can't swap pingpongs, or simply swap the
3617 * master/slave assignment, we actually have to swap the interfaces
3618 * so that the master physical encoder will use a pingpong/interface
3619 * that generates irqs on which to wait.
3620 */
3621 is_right_only = !test_bit(0, affected_displays) &&
3622 test_bit(1, affected_displays);
3623
3624 if (is_right_only && !sde_enc->intfs_swapped) {
3625 /* right-only update swap interfaces */
3626 swap(sde_enc->phys_encs[0]->intf_idx,
3627 sde_enc->phys_encs[1]->intf_idx);
3628 sde_enc->intfs_swapped = true;
3629 } else if (!is_right_only && sde_enc->intfs_swapped) {
3630 /* left-only or full update, swap back */
3631 swap(sde_enc->phys_encs[0]->intf_idx,
3632 sde_enc->phys_encs[1]->intf_idx);
3633 sde_enc->intfs_swapped = false;
3634 }
3635
3636 SDE_DEBUG_ENC(sde_enc,
3637 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
3638 is_right_only, sde_enc->intfs_swapped,
3639 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3640 sde_enc->phys_encs[1]->intf_idx - INTF_0);
3641 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
3642 sde_enc->phys_encs[0]->intf_idx - INTF_0,
3643 sde_enc->phys_encs[1]->intf_idx - INTF_0,
3644 *affected_displays);
3645
3646 /* ppsplit always uses master since ppslave invalid for irqs*/
3647 if (num_active_phys == 1)
3648 *affected_displays = BIT(0);
3649}
3650
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003651static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
3652 struct sde_encoder_kickoff_params *params)
3653{
3654 struct sde_encoder_virt *sde_enc;
3655 struct sde_encoder_phys *phys;
3656 int i, num_active_phys;
3657 bool master_assigned = false;
3658
3659 if (!drm_enc || !params)
3660 return;
3661
3662 sde_enc = to_sde_encoder_virt(drm_enc);
3663
3664 if (sde_enc->num_phys_encs <= 1)
3665 return;
3666
3667 /* count bits set */
3668 num_active_phys = hweight_long(params->affected_displays);
3669
3670 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
3671 params->affected_displays, num_active_phys);
Lloyd Atkinson5ca13aa2017-10-26 18:12:20 -04003672 SDE_EVT32_VERBOSE(DRMID(drm_enc), params->affected_displays,
3673 num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003674
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003675 /* for left/right only update, ppsplit master switches interface */
3676 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
3677 &params->affected_displays, num_active_phys);
3678
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003679 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3680 enum sde_enc_split_role prv_role, new_role;
3681 bool active;
3682
3683 phys = sde_enc->phys_encs[i];
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -04003684 if (!phys || !phys->ops.update_split_role || !phys->hw_pp)
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003685 continue;
3686
3687 active = test_bit(i, &params->affected_displays);
3688 prv_role = phys->split_role;
3689
3690 if (active && num_active_phys == 1)
3691 new_role = ENC_ROLE_SOLO;
3692 else if (active && !master_assigned)
3693 new_role = ENC_ROLE_MASTER;
3694 else if (active)
3695 new_role = ENC_ROLE_SLAVE;
3696 else
3697 new_role = ENC_ROLE_SKIP;
3698
3699 phys->ops.update_split_role(phys, new_role);
3700 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
3701 sde_enc->cur_master = phys;
3702 master_assigned = true;
3703 }
3704
3705 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
3706 phys->hw_pp->idx - PINGPONG_0, prv_role,
3707 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05003708 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
3709 phys->hw_pp->idx - PINGPONG_0, prv_role,
3710 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05003711 }
3712}
3713
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303714bool sde_encoder_check_mode(struct drm_encoder *drm_enc, u32 mode)
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003715{
3716 struct sde_encoder_virt *sde_enc;
3717 struct msm_display_info *disp_info;
3718
3719 if (!drm_enc) {
3720 SDE_ERROR("invalid encoder\n");
3721 return false;
3722 }
3723
3724 sde_enc = to_sde_encoder_virt(drm_enc);
3725 disp_info = &sde_enc->disp_info;
3726
Sravanthi Kollukuduru59d431a2017-07-05 00:10:41 +05303727 return (disp_info->capabilities & mode);
Veera Sundaram Sankaran2c748e62017-06-13 17:01:48 -07003728}
3729
Dhaval Patel0e558f42017-04-30 00:51:40 -07003730void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
3731{
3732 struct sde_encoder_virt *sde_enc;
3733 struct sde_encoder_phys *phys;
3734 unsigned int i;
3735 struct sde_hw_ctl *ctl;
3736 struct msm_display_info *disp_info;
3737
3738 if (!drm_enc) {
3739 SDE_ERROR("invalid encoder\n");
3740 return;
3741 }
3742 sde_enc = to_sde_encoder_virt(drm_enc);
3743 disp_info = &sde_enc->disp_info;
3744
3745 for (i = 0; i < sde_enc->num_phys_encs; i++) {
3746 phys = sde_enc->phys_encs[i];
3747
3748 if (phys && phys->hw_ctl) {
3749 ctl = phys->hw_ctl;
3750 if (ctl->ops.clear_pending_flush)
3751 ctl->ops.clear_pending_flush(ctl);
3752
3753 /* update only for command mode primary ctl */
3754 if ((phys == sde_enc->cur_master) &&
3755 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
3756 && ctl->ops.trigger_pending)
3757 ctl->ops.trigger_pending(ctl);
3758 }
3759 }
3760}
3761
Ping Li8430ee12017-02-24 14:14:44 -08003762static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
3763{
3764 void *dither_cfg;
Ping Li16162692018-05-08 14:13:46 -07003765 int ret = 0, rc, i = 0;
Ping Li8430ee12017-02-24 14:14:44 -08003766 size_t len = 0;
3767 enum sde_rm_topology_name topology;
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003768 struct drm_encoder *drm_enc;
3769 struct msm_mode_info mode_info;
3770 struct msm_display_dsc_info *dsc = NULL;
3771 struct sde_encoder_virt *sde_enc;
Ping Li16162692018-05-08 14:13:46 -07003772 struct sde_hw_pingpong *hw_pp;
Ping Li8430ee12017-02-24 14:14:44 -08003773
3774 if (!phys || !phys->connector || !phys->hw_pp ||
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003775 !phys->hw_pp->ops.setup_dither || !phys->parent)
Ping Li8430ee12017-02-24 14:14:44 -08003776 return;
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003777
Ping Li8430ee12017-02-24 14:14:44 -08003778 topology = sde_connector_get_topology_name(phys->connector);
3779 if ((topology == SDE_RM_TOPOLOGY_PPSPLIT) &&
3780 (phys->split_role == ENC_ROLE_SLAVE))
3781 return;
3782
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003783 drm_enc = phys->parent;
3784 sde_enc = to_sde_encoder_virt(drm_enc);
3785 rc = _sde_encoder_get_mode_info(&sde_enc->base, &mode_info);
3786 if (rc) {
3787 SDE_ERROR_ENC(sde_enc, "failed to get mode info\n");
3788 return;
3789 }
3790
3791 dsc = &mode_info.comp_info.dsc_info;
3792 /* disable dither for 10 bpp or 10bpc dsc config */
3793 if (dsc->bpp == 10 || dsc->bpc == 10) {
3794 phys->hw_pp->ops.setup_dither(phys->hw_pp, NULL, 0);
Ping Li16162692018-05-08 14:13:46 -07003795 return;
3796 }
3797
3798 ret = sde_connector_get_dither_cfg(phys->connector,
3799 phys->connector->state, &dither_cfg, &len);
3800 if (ret)
3801 return;
3802
3803 if (TOPOLOGY_DUALPIPE_MERGE_MODE(topology)) {
3804 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
3805 hw_pp = sde_enc->hw_pp[i];
3806 if (hw_pp) {
3807 phys->hw_pp->ops.setup_dither(hw_pp, dither_cfg,
3808 len);
3809 }
3810 }
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003811 } else {
Ping Li16162692018-05-08 14:13:46 -07003812 phys->hw_pp->ops.setup_dither(phys->hw_pp, dither_cfg, len);
Dhaval Patelc35d9bc2018-03-06 16:39:07 -08003813 }
Ping Li8430ee12017-02-24 14:14:44 -08003814}
3815
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003816static u32 _sde_encoder_calculate_linetime(struct sde_encoder_virt *sde_enc,
3817 struct drm_display_mode *mode)
3818{
3819 u64 pclk_rate;
3820 u32 pclk_period;
3821 u32 line_time;
3822
3823 /*
3824 * For linetime calculation, only operate on master encoder.
3825 */
3826 if (!sde_enc->cur_master)
3827 return 0;
3828
3829 if (!sde_enc->cur_master->ops.get_line_count) {
3830 SDE_ERROR("get_line_count function not defined\n");
3831 return 0;
3832 }
3833
3834 pclk_rate = mode->clock; /* pixel clock in kHz */
3835 if (pclk_rate == 0) {
3836 SDE_ERROR("pclk is 0, cannot calculate line time\n");
3837 return 0;
3838 }
3839
3840 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
3841 if (pclk_period == 0) {
3842 SDE_ERROR("pclk period is 0\n");
3843 return 0;
3844 }
3845
3846 /*
3847 * Line time calculation based on Pixel clock and HTOTAL.
3848 * Final unit is in ns.
3849 */
3850 line_time = (pclk_period * mode->htotal) / 1000;
3851 if (line_time == 0) {
3852 SDE_ERROR("line time calculation is 0\n");
3853 return 0;
3854 }
3855
3856 SDE_DEBUG_ENC(sde_enc,
3857 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
3858 pclk_rate, pclk_period, line_time);
3859
3860 return line_time;
3861}
3862
3863static int _sde_encoder_wakeup_time(struct drm_encoder *drm_enc,
3864 ktime_t *wakeup_time)
3865{
3866 struct drm_display_mode *mode;
3867 struct sde_encoder_virt *sde_enc;
3868 u32 cur_line;
3869 u32 line_time;
3870 u32 vtotal, time_to_vsync;
3871 ktime_t cur_time;
3872
3873 sde_enc = to_sde_encoder_virt(drm_enc);
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003874 mode = &sde_enc->cur_master->cached_mode;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003875
3876 line_time = _sde_encoder_calculate_linetime(sde_enc, mode);
3877 if (!line_time)
3878 return -EINVAL;
3879
3880 cur_line = sde_enc->cur_master->ops.get_line_count(sde_enc->cur_master);
3881
3882 vtotal = mode->vtotal;
3883 if (cur_line >= vtotal)
3884 time_to_vsync = line_time * vtotal;
3885 else
3886 time_to_vsync = line_time * (vtotal - cur_line);
3887
3888 if (time_to_vsync == 0) {
3889 SDE_ERROR("time to vsync should not be zero, vtotal=%d\n",
3890 vtotal);
3891 return -EINVAL;
3892 }
3893
3894 cur_time = ktime_get();
3895 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
3896
3897 SDE_DEBUG_ENC(sde_enc,
3898 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
3899 cur_line, vtotal, time_to_vsync,
3900 ktime_to_ms(cur_time),
3901 ktime_to_ms(*wakeup_time));
3902 return 0;
3903}
3904
3905static void sde_encoder_vsync_event_handler(unsigned long data)
3906{
3907 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
3908 struct sde_encoder_virt *sde_enc;
3909 struct msm_drm_private *priv;
3910 struct msm_drm_thread *event_thread;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003911
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003912 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
3913 SDE_ERROR("invalid encoder parameters\n");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003914 return;
3915 }
3916
3917 sde_enc = to_sde_encoder_virt(drm_enc);
3918 priv = drm_enc->dev->dev_private;
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003919 if (!sde_enc->crtc) {
3920 SDE_ERROR("invalid crtc");
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003921 return;
3922 }
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003923
3924 if (sde_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
3925 SDE_ERROR("invalid crtc index:%u\n",
3926 sde_enc->crtc->index);
3927 return;
3928 }
3929 event_thread = &priv->event_thread[sde_enc->crtc->index];
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003930 if (!event_thread) {
3931 SDE_ERROR("event_thread not found for crtc:%d\n",
Harsh Sahu1e52ed02017-11-28 14:34:22 -08003932 sde_enc->crtc->index);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003933 return;
3934 }
3935
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303936 kthread_queue_work(&event_thread->worker,
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003937 &sde_enc->vsync_event_work);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003938}
3939
Dhaval Patel222023e2018-02-27 12:24:07 -08003940static void sde_encoder_esd_trigger_work_handler(struct kthread_work *work)
3941{
3942 struct sde_encoder_virt *sde_enc = container_of(work,
3943 struct sde_encoder_virt, esd_trigger_work);
3944
3945 if (!sde_enc) {
3946 SDE_ERROR("invalid sde encoder\n");
3947 return;
3948 }
3949
3950 sde_encoder_resource_control(&sde_enc->base,
3951 SDE_ENC_RC_EVENT_KICKOFF);
3952}
3953
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08003954static void sde_encoder_input_event_work_handler(struct kthread_work *work)
3955{
3956 struct sde_encoder_virt *sde_enc = container_of(work,
3957 struct sde_encoder_virt, input_event_work);
3958
3959 if (!sde_enc) {
3960 SDE_ERROR("invalid sde encoder\n");
3961 return;
3962 }
3963
3964 sde_encoder_resource_control(&sde_enc->base,
3965 SDE_ENC_RC_EVENT_EARLY_WAKEUP);
3966}
3967
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003968static void sde_encoder_vsync_event_work_handler(struct kthread_work *work)
3969{
3970 struct sde_encoder_virt *sde_enc = container_of(work,
3971 struct sde_encoder_virt, vsync_event_work);
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303972 bool autorefresh_enabled = false;
3973 int rc = 0;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04003974 ktime_t wakeup_time;
3975
3976 if (!sde_enc) {
3977 SDE_ERROR("invalid sde encoder\n");
3978 return;
3979 }
3980
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303981 rc = _sde_encoder_power_enable(sde_enc, true);
3982 if (rc) {
3983 SDE_ERROR_ENC(sde_enc, "sde enc power enabled failed:%d\n", rc);
3984 return;
3985 }
3986
3987 if (sde_enc->cur_master &&
3988 sde_enc->cur_master->ops.is_autorefresh_enabled)
3989 autorefresh_enabled =
3990 sde_enc->cur_master->ops.is_autorefresh_enabled(
3991 sde_enc->cur_master);
3992
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303993 /* Update timer if autorefresh is enabled else return */
3994 if (!autorefresh_enabled)
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003995 goto exit;
Jayant Shekhar12d908f2017-10-10 12:11:48 +05303996
Lloyd Atkinson349f7412017-11-07 16:55:57 -05003997 rc = _sde_encoder_wakeup_time(&sde_enc->base, &wakeup_time);
3998 if (rc)
3999 goto exit;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004000
4001 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
4002 mod_timer(&sde_enc->vsync_event_timer,
4003 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
Lloyd Atkinson349f7412017-11-07 16:55:57 -05004004
4005exit:
4006 _sde_encoder_power_enable(sde_enc, false);
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004007}
4008
Clarence Ip5adc0fb2017-12-15 16:08:01 -05004009int sde_encoder_poll_line_counts(struct drm_encoder *drm_enc)
4010{
4011 static const uint64_t timeout_us = 50000;
4012 static const uint64_t sleep_us = 20;
4013 struct sde_encoder_virt *sde_enc;
4014 ktime_t cur_ktime, exp_ktime;
4015 uint32_t line_count, tmp, i;
4016
4017 if (!drm_enc) {
4018 SDE_ERROR("invalid encoder\n");
4019 return -EINVAL;
4020 }
4021 sde_enc = to_sde_encoder_virt(drm_enc);
4022 if (!sde_enc->cur_master ||
4023 !sde_enc->cur_master->ops.get_line_count) {
4024 SDE_DEBUG_ENC(sde_enc, "can't get master line count\n");
4025 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_ERROR);
4026 return -EINVAL;
4027 }
4028
4029 exp_ktime = ktime_add_ms(ktime_get(), timeout_us / 1000);
4030
4031 line_count = sde_enc->cur_master->ops.get_line_count(
4032 sde_enc->cur_master);
4033
4034 for (i = 0; i < (timeout_us * 2 / sleep_us); ++i) {
4035 tmp = line_count;
4036 line_count = sde_enc->cur_master->ops.get_line_count(
4037 sde_enc->cur_master);
4038 if (line_count < tmp) {
4039 SDE_EVT32(DRMID(drm_enc), line_count);
4040 return 0;
4041 }
4042
4043 cur_ktime = ktime_get();
4044 if (ktime_compare_safe(exp_ktime, cur_ktime) <= 0)
4045 break;
4046
4047 usleep_range(sleep_us / 2, sleep_us);
4048 }
4049
4050 SDE_EVT32(DRMID(drm_enc), line_count, SDE_EVTLOG_ERROR);
4051 return -ETIMEDOUT;
4052}
4053
Clarence Ip85f4f4532017-10-04 12:10:13 -04004054int sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
Alan Kwong4aacd532017-02-04 18:51:33 -08004055 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004056{
4057 struct sde_encoder_virt *sde_enc;
4058 struct sde_encoder_phys *phys;
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004059 struct sde_kms *sde_kms = NULL;
4060 struct msm_drm_private *priv = NULL;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05004061 bool needs_hw_reset = false;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05004062 uint32_t ln_cnt1, ln_cnt2;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004063 unsigned int i;
Clarence Ip85f4f4532017-10-04 12:10:13 -04004064 int rc, ret = 0;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004065
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004066 if (!drm_enc || !params || !drm_enc->dev ||
4067 !drm_enc->dev->dev_private) {
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05004068 SDE_ERROR("invalid args\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -04004069 return -EINVAL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004070 }
4071 sde_enc = to_sde_encoder_virt(drm_enc);
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004072 priv = drm_enc->dev->dev_private;
4073 sde_kms = to_sde_kms(priv->kms);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004074
Clarence Ip19af1362016-09-23 14:57:51 -04004075 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04004076 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004077
Clarence Ip5e3df1d2017-11-07 21:28:25 -05004078 /* save this for later, in case of errors */
4079 if (sde_enc->cur_master && sde_enc->cur_master->ops.get_wr_line_count)
4080 ln_cnt1 = sde_enc->cur_master->ops.get_wr_line_count(
4081 sde_enc->cur_master);
4082 else
4083 ln_cnt1 = -EINVAL;
4084
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05004085 /* prepare for next kickoff, may include waiting on previous kickoff */
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004086 SDE_ATRACE_BEGIN("enc_prepare_for_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004087 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004088 phys = sde_enc->phys_encs[i];
Jayant Shekhar98e78a82018-01-12 17:50:55 +05304089 params->is_primary = sde_enc->disp_info.is_primary;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05004090 if (phys) {
Clarence Ip85f4f4532017-10-04 12:10:13 -04004091 if (phys->ops.prepare_for_kickoff) {
4092 rc = phys->ops.prepare_for_kickoff(
4093 phys, params);
4094 if (rc)
4095 ret = rc;
4096 }
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05004097 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
4098 needs_hw_reset = true;
Ping Li8430ee12017-02-24 14:14:44 -08004099 _sde_encoder_setup_dither(phys);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05004100 }
4101 }
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004102 SDE_ATRACE_END("enc_prepare_for_kickoff");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05004103
Alan Kwong1124f1f2017-11-10 18:14:39 -05004104 rc = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
4105 if (rc) {
4106 SDE_ERROR_ENC(sde_enc, "resource kickoff failed rc %d\n", rc);
4107 return rc;
4108 }
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004109
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05004110 /* if any phys needs reset, reset all phys, in-order */
4111 if (needs_hw_reset) {
Clarence Ip5e3df1d2017-11-07 21:28:25 -05004112 /* query line count before cur_master is updated */
4113 if (sde_enc->cur_master &&
4114 sde_enc->cur_master->ops.get_wr_line_count)
4115 ln_cnt2 = sde_enc->cur_master->ops.get_wr_line_count(
4116 sde_enc->cur_master);
4117 else
4118 ln_cnt2 = -EINVAL;
4119
4120 SDE_EVT32(DRMID(drm_enc), ln_cnt1, ln_cnt2,
4121 SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05004122 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4123 phys = sde_enc->phys_encs[i];
4124 if (phys && phys->ops.hw_reset)
4125 phys->ops.hw_reset(phys);
4126 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004127 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05004128
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05004129 _sde_encoder_update_master(drm_enc, params);
4130
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04004131 _sde_encoder_update_roi(drm_enc);
4132
Lloyd Atkinson05d75512017-01-17 14:45:51 -05004133 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
4134 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
Clarence Ip85f4f4532017-10-04 12:10:13 -04004135 if (rc) {
Lloyd Atkinson05d75512017-01-17 14:45:51 -05004136 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
4137 sde_enc->cur_master->connector->base.id,
4138 rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04004139 ret = rc;
4140 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05004141 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04004142
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08004143 if (_sde_encoder_is_dsc_enabled(drm_enc) &&
4144 !sde_kms->splash_data.cont_splash_en) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04004145 rc = _sde_encoder_dsc_setup(sde_enc, params);
Clarence Ip85f4f4532017-10-04 12:10:13 -04004146 if (rc) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04004147 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
Clarence Ip85f4f4532017-10-04 12:10:13 -04004148 ret = rc;
4149 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04004150 }
Clarence Ip85f4f4532017-10-04 12:10:13 -04004151
4152 return ret;
Alan Kwong628d19e2016-10-31 13:50:13 -04004153}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004154
Clarence Ip662698e2017-09-12 18:34:16 -04004155/**
4156 * _sde_encoder_reset_ctl_hw - reset h/w configuration for all ctl's associated
4157 * with the specified encoder, and unstage all pipes from it
4158 * @encoder: encoder pointer
4159 * Returns: 0 on success
4160 */
4161static int _sde_encoder_reset_ctl_hw(struct drm_encoder *drm_enc)
4162{
4163 struct sde_encoder_virt *sde_enc;
4164 struct sde_encoder_phys *phys;
4165 unsigned int i;
4166 int rc = 0;
4167
4168 if (!drm_enc) {
4169 SDE_ERROR("invalid encoder\n");
4170 return -EINVAL;
4171 }
4172
4173 sde_enc = to_sde_encoder_virt(drm_enc);
4174
4175 SDE_ATRACE_BEGIN("encoder_release_lm");
4176 SDE_DEBUG_ENC(sde_enc, "\n");
4177
4178 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4179 phys = sde_enc->phys_encs[i];
4180 if (!phys)
4181 continue;
4182
4183 SDE_EVT32(DRMID(drm_enc), phys->intf_idx - INTF_0);
4184
4185 rc = sde_encoder_helper_reset_mixers(phys, NULL);
4186 if (rc)
4187 SDE_EVT32(DRMID(drm_enc), rc, SDE_EVTLOG_ERROR);
4188 }
4189
4190 SDE_ATRACE_END("encoder_release_lm");
4191 return rc;
4192}
4193
4194void sde_encoder_kickoff(struct drm_encoder *drm_enc, bool is_error)
Alan Kwong628d19e2016-10-31 13:50:13 -04004195{
4196 struct sde_encoder_virt *sde_enc;
4197 struct sde_encoder_phys *phys;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004198 ktime_t wakeup_time;
Alan Kwong628d19e2016-10-31 13:50:13 -04004199 unsigned int i;
4200
4201 if (!drm_enc) {
4202 SDE_ERROR("invalid encoder\n");
4203 return;
4204 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07004205 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04004206 sde_enc = to_sde_encoder_virt(drm_enc);
4207
4208 SDE_DEBUG_ENC(sde_enc, "\n");
4209
Clarence Ip662698e2017-09-12 18:34:16 -04004210 /* create a 'no pipes' commit to release buffers on errors */
4211 if (is_error)
4212 _sde_encoder_reset_ctl_hw(drm_enc);
4213
Alan Kwong628d19e2016-10-31 13:50:13 -04004214 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04004215 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004216
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05004217 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004218 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05004219 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004220 if (phys && phys->ops.handle_post_kickoff)
4221 phys->ops.handle_post_kickoff(phys);
4222 }
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004223
4224 if (sde_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
Tharun Raj Soma88b6dfc2018-05-11 14:19:49 +05304225 sde_enc->disp_info.is_primary &&
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004226 !_sde_encoder_wakeup_time(drm_enc, &wakeup_time)) {
4227 SDE_EVT32_VERBOSE(ktime_to_ms(wakeup_time));
4228 mod_timer(&sde_enc->vsync_event_timer,
4229 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
4230 }
4231
Narendra Muppalla77b32932017-05-10 13:53:11 -07004232 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004233}
4234
Clarence Ip662698e2017-09-12 18:34:16 -04004235int sde_encoder_helper_reset_mixers(struct sde_encoder_phys *phys_enc,
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004236 struct drm_framebuffer *fb)
4237{
4238 struct drm_encoder *drm_enc;
4239 struct sde_hw_mixer_cfg mixer;
4240 struct sde_rm_hw_iter lm_iter;
4241 bool lm_valid = false;
4242
4243 if (!phys_enc || !phys_enc->parent) {
4244 SDE_ERROR("invalid encoder\n");
4245 return -EINVAL;
4246 }
4247
4248 drm_enc = phys_enc->parent;
4249 memset(&mixer, 0, sizeof(mixer));
4250
4251 /* reset associated CTL/LMs */
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004252 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
4253 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
4254
4255 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
4256 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
4257 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
4258
4259 if (!hw_lm)
4260 continue;
4261
4262 /* need to flush LM to remove it */
4263 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
4264 phys_enc->hw_ctl->ops.update_pending_flush)
4265 phys_enc->hw_ctl->ops.update_pending_flush(
4266 phys_enc->hw_ctl,
4267 phys_enc->hw_ctl->ops.get_bitmask_mixer(
4268 phys_enc->hw_ctl, hw_lm->idx));
4269
4270 if (fb) {
4271 /* assume a single LM if targeting a frame buffer */
4272 if (lm_valid)
4273 continue;
4274
4275 mixer.out_height = fb->height;
4276 mixer.out_width = fb->width;
4277
4278 if (hw_lm->ops.setup_mixer_out)
4279 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
4280 }
4281
4282 lm_valid = true;
4283
4284 /* only enable border color on LM */
4285 if (phys_enc->hw_ctl->ops.setup_blendstage)
4286 phys_enc->hw_ctl->ops.setup_blendstage(
Nirmal Abraham39621b02019-05-03 14:46:30 +05304287 phys_enc->hw_ctl, hw_lm->idx,
4288 hw_lm->cfg.flags, NULL);
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004289 }
4290
4291 if (!lm_valid) {
Clarence Ip662698e2017-09-12 18:34:16 -04004292 SDE_ERROR_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
Clarence Ip9c65f7b2017-03-20 06:48:15 -07004293 return -EFAULT;
4294 }
4295 return 0;
4296}
4297
Lloyd Atkinsone123c172017-02-27 13:19:08 -05004298void sde_encoder_prepare_commit(struct drm_encoder *drm_enc)
4299{
4300 struct sde_encoder_virt *sde_enc;
4301 struct sde_encoder_phys *phys;
4302 int i;
4303
4304 if (!drm_enc) {
4305 SDE_ERROR("invalid encoder\n");
4306 return;
4307 }
4308 sde_enc = to_sde_encoder_virt(drm_enc);
4309
4310 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4311 phys = sde_enc->phys_encs[i];
4312 if (phys && phys->ops.prepare_commit)
4313 phys->ops.prepare_commit(phys);
4314 }
4315}
4316
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004317#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004318static int _sde_encoder_status_show(struct seq_file *s, void *data)
4319{
4320 struct sde_encoder_virt *sde_enc;
4321 int i;
4322
4323 if (!s || !s->private)
4324 return -EINVAL;
4325
4326 sde_enc = s->private;
4327
4328 mutex_lock(&sde_enc->enc_lock);
4329 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4330 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4331
4332 if (!phys)
4333 continue;
4334
4335 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
4336 phys->intf_idx - INTF_0,
4337 atomic_read(&phys->vsync_cnt),
4338 atomic_read(&phys->underrun_cnt));
4339
4340 switch (phys->intf_mode) {
4341 case INTF_MODE_VIDEO:
4342 seq_puts(s, "mode: video\n");
4343 break;
4344 case INTF_MODE_CMD:
4345 seq_puts(s, "mode: command\n");
4346 break;
4347 case INTF_MODE_WB_BLOCK:
4348 seq_puts(s, "mode: wb block\n");
4349 break;
4350 case INTF_MODE_WB_LINE:
4351 seq_puts(s, "mode: wb line\n");
4352 break;
4353 default:
4354 seq_puts(s, "mode: ???\n");
4355 break;
4356 }
4357 }
4358 mutex_unlock(&sde_enc->enc_lock);
4359
4360 return 0;
4361}
4362
4363static int _sde_encoder_debugfs_status_open(struct inode *inode,
4364 struct file *file)
4365{
4366 return single_open(file, _sde_encoder_status_show, inode->i_private);
4367}
4368
Dhaval Patelf9245d62017-03-28 16:24:00 -07004369static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304370 const char __user *user_buf, size_t count, loff_t *ppos)
4371{
4372 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004373 int i = 0, rc;
4374 char buf[MISR_BUFF_SIZE + 1];
4375 size_t buff_copy;
4376 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304377
Dhaval Patelf9245d62017-03-28 16:24:00 -07004378 if (!file || !file->private_data)
4379 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304380
Dhaval Patelf9245d62017-03-28 16:24:00 -07004381 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304382
Dhaval Patelf9245d62017-03-28 16:24:00 -07004383 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
4384 if (copy_from_user(buf, user_buf, buff_copy))
4385 return -EINVAL;
4386
4387 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304388
4389 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07004390 return -EINVAL;
4391
4392 rc = _sde_encoder_power_enable(sde_enc, true);
4393 if (rc)
4394 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304395
4396 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004397 sde_enc->misr_enable = enable;
Dhaval Patel010f5172017-08-01 22:40:09 -07004398 sde_enc->misr_frame_count = frame_count;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304399 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4400 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4401
Dhaval Patelf9245d62017-03-28 16:24:00 -07004402 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304403 continue;
4404
Dhaval Patelf9245d62017-03-28 16:24:00 -07004405 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304406 }
4407 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004408 _sde_encoder_power_enable(sde_enc, false);
4409
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304410 return count;
4411}
4412
Dhaval Patelf9245d62017-03-28 16:24:00 -07004413static ssize_t _sde_encoder_misr_read(struct file *file,
4414 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304415{
4416 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07004417 int i = 0, len = 0;
4418 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
4419 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304420
4421 if (*ppos)
4422 return 0;
4423
Dhaval Patelf9245d62017-03-28 16:24:00 -07004424 if (!file || !file->private_data)
4425 return -EINVAL;
4426
4427 sde_enc = file->private_data;
4428
4429 rc = _sde_encoder_power_enable(sde_enc, true);
4430 if (rc)
4431 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304432
4433 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07004434 if (!sde_enc->misr_enable) {
4435 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4436 "disabled\n");
4437 goto buff_check;
4438 } else if (sde_enc->disp_info.capabilities &
4439 ~MSM_DISPLAY_CAP_VID_MODE) {
4440 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4441 "unsupported\n");
4442 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304443 }
4444
Dhaval Patelf9245d62017-03-28 16:24:00 -07004445 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4446 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004447
Dhaval Patelf9245d62017-03-28 16:24:00 -07004448 if (!phys || !phys->ops.collect_misr)
4449 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304450
Dhaval Patelf9245d62017-03-28 16:24:00 -07004451 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
4452 "Intf idx:%d\n", phys->intf_idx - INTF_0);
4453 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
4454 phys->ops.collect_misr(phys));
4455 }
4456
4457buff_check:
4458 if (count <= len) {
4459 len = 0;
4460 goto end;
4461 }
4462
4463 if (copy_to_user(user_buff, buf, len)) {
4464 len = -EFAULT;
4465 goto end;
4466 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304467
4468 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304469
Dhaval Patelf9245d62017-03-28 16:24:00 -07004470end:
4471 mutex_unlock(&sde_enc->enc_lock);
4472 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304473 return len;
4474}
4475
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004476static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004477{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004478 struct sde_encoder_virt *sde_enc;
4479 struct msm_drm_private *priv;
4480 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07004481 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004482
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004483 static const struct file_operations debugfs_status_fops = {
4484 .open = _sde_encoder_debugfs_status_open,
4485 .read = seq_read,
4486 .llseek = seq_lseek,
4487 .release = single_release,
4488 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304489
4490 static const struct file_operations debugfs_misr_fops = {
4491 .open = simple_open,
4492 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004493 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304494 };
4495
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004496 char name[SDE_NAME_SIZE];
4497
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004498 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004499 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004500 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004501 }
4502
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004503 sde_enc = to_sde_encoder_virt(drm_enc);
4504 priv = drm_enc->dev->dev_private;
4505 sde_kms = to_sde_kms(priv->kms);
4506
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004507 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
4508
4509 /* create overall sub-directory for the encoder */
4510 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07004511 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004512 if (!sde_enc->debugfs_root)
4513 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304514
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004515 /* don't error check these */
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004516 debugfs_create_file("status", 0600,
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004517 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05304518
Lloyd Atkinson8de415a2017-05-23 11:31:16 -04004519 debugfs_create_file("misr_data", 0600,
Dhaval Patelf9245d62017-03-28 16:24:00 -07004520 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004521
Alan Kwongf2debb02017-04-05 06:19:29 -07004522 for (i = 0; i < sde_enc->num_phys_encs; i++)
4523 if (sde_enc->phys_encs[i] &&
4524 sde_enc->phys_encs[i]->ops.late_register)
4525 sde_enc->phys_encs[i]->ops.late_register(
4526 sde_enc->phys_encs[i],
4527 sde_enc->debugfs_root);
4528
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004529 return 0;
4530}
4531
4532static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
4533{
4534 struct sde_encoder_virt *sde_enc;
4535
4536 if (!drm_enc)
4537 return;
4538
4539 sde_enc = to_sde_encoder_virt(drm_enc);
4540 debugfs_remove_recursive(sde_enc->debugfs_root);
4541}
4542#else
4543static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
4544{
4545 return 0;
4546}
4547
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07004548static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004549{
4550}
4551#endif
4552
4553static int sde_encoder_late_register(struct drm_encoder *encoder)
4554{
4555 return _sde_encoder_init_debugfs(encoder);
4556}
4557
4558static void sde_encoder_early_unregister(struct drm_encoder *encoder)
4559{
4560 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004561}
4562
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004563static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04004564 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004565 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004566 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004567{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004568 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004569
Clarence Ip19af1362016-09-23 14:57:51 -04004570 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004571
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004572 /*
4573 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
4574 * in this function, check up-front.
4575 */
4576 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
4577 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004578 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004579 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004580 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004581 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004582
Clarence Ipa4039322016-07-15 16:23:59 -04004583 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004584 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004585
4586 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004587 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004588 PTR_ERR(enc));
4589 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4590 }
4591
4592 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4593 ++sde_enc->num_phys_encs;
4594 }
4595
Clarence Ipa4039322016-07-15 16:23:59 -04004596 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004597 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004598
4599 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004600 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04004601 PTR_ERR(enc));
4602 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4603 }
4604
4605 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4606 ++sde_enc->num_phys_encs;
4607 }
4608
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004609 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004610}
4611
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004612static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
4613 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04004614{
4615 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04004616
Clarence Ip19af1362016-09-23 14:57:51 -04004617 if (!sde_enc) {
4618 SDE_ERROR("invalid encoder\n");
4619 return -EINVAL;
4620 }
4621
4622 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04004623
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004624 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004625 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004626 sde_enc->num_phys_encs);
4627 return -EINVAL;
4628 }
4629
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004630 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004631
4632 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04004633 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04004634 PTR_ERR(enc));
4635 return enc == 0 ? -EINVAL : PTR_ERR(enc);
4636 }
4637
4638 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
4639 ++sde_enc->num_phys_encs;
4640
4641 return 0;
4642}
4643
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004644static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004645 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04004646 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004647 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004648{
4649 int ret = 0;
4650 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004651 enum sde_intf_type intf_type;
4652 struct sde_encoder_virt_ops parent_ops = {
4653 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07004654 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04004655 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004656 };
4657 struct sde_enc_phys_init_params phys_params;
4658
Clarence Ip19af1362016-09-23 14:57:51 -04004659 if (!sde_enc || !sde_kms) {
4660 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
4661 sde_enc != 0, sde_kms != 0);
4662 return -EINVAL;
4663 }
4664
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004665 memset(&phys_params, 0, sizeof(phys_params));
4666 phys_params.sde_kms = sde_kms;
4667 phys_params.parent = &sde_enc->base;
4668 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004669 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +05304670 phys_params.vblank_ctl_lock = &sde_enc->vblank_ctl_lock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004671
Clarence Ip19af1362016-09-23 14:57:51 -04004672 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004673
Clarence Ipa4039322016-07-15 16:23:59 -04004674 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004675 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
4676 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04004677 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004678 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4679 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07004680 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
4681 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
4682 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04004683 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
4684 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
4685 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004686 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04004687 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004688 return -EINVAL;
4689 }
4690
Clarence Ip88270a62016-06-26 10:09:34 -04004691 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004692
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004693 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
4694
Clarence Ip19af1362016-09-23 14:57:51 -04004695 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004696
Dhaval Patele17e0ee2017-08-23 18:01:42 -07004697 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
4698 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
Veera Sundaram Sankaran42ac38d2018-07-06 12:42:04 -07004699 sde_enc->idle_pc_enabled = sde_kms->catalog->has_idle_pc;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004700
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004701 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04004702 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004703 /*
4704 * Left-most tile is at index 0, content is controller id
4705 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
4706 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
4707 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004708 u32 controller_id = disp_info->h_tile_instance[i];
4709
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004710 if (disp_info->num_of_h_tiles > 1) {
4711 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004712 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004713 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004714 phys_params.split_role = ENC_ROLE_SLAVE;
4715 } else {
4716 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004717 }
4718
Clarence Ip19af1362016-09-23 14:57:51 -04004719 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004720 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004721
Alan Kwongbb27c092016-07-20 16:41:25 -04004722 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004723 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004724 phys_params.wb_idx = sde_encoder_get_wb(
4725 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04004726 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004727 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004728 SDE_ERROR_ENC(sde_enc,
4729 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004730 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004731 ret = -EINVAL;
4732 }
Alan Kwongbb27c092016-07-20 16:41:25 -04004733 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04004734 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004735 phys_params.intf_idx = sde_encoder_get_intf(
4736 sde_kms->catalog, intf_type,
4737 controller_id);
4738 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04004739 SDE_ERROR_ENC(sde_enc,
4740 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004741 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04004742 ret = -EINVAL;
4743 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004744 }
4745
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004746 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04004747 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004748 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
4749 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04004750 else
4751 ret = sde_encoder_virt_add_phys_encs(
4752 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04004753 sde_enc,
4754 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004755 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04004756 SDE_ERROR_ENC(sde_enc,
4757 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004758 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004759 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08004760
4761 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4762 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4763
4764 if (phys) {
4765 atomic_set(&phys->vsync_cnt, 0);
4766 atomic_set(&phys->underrun_cnt, 0);
4767 }
4768 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004769 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004770
4771 return ret;
4772}
4773
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07004774static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
4775 .mode_set = sde_encoder_virt_mode_set,
4776 .disable = sde_encoder_virt_disable,
4777 .enable = sde_encoder_virt_enable,
4778 .atomic_check = sde_encoder_virt_atomic_check,
4779};
4780
4781static const struct drm_encoder_funcs sde_encoder_funcs = {
4782 .destroy = sde_encoder_destroy,
4783 .late_register = sde_encoder_late_register,
4784 .early_unregister = sde_encoder_early_unregister,
4785};
4786
Clarence Ip3649f8b2016-10-31 09:59:44 -04004787struct drm_encoder *sde_encoder_init(
4788 struct drm_device *dev,
4789 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004790{
4791 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04004792 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004793 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04004794 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004795 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08004796 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004797 int ret = 0;
4798
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004799 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
4800 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004801 ret = -ENOMEM;
4802 goto fail;
4803 }
4804
Dhaval Patel22ef6df2016-10-20 14:42:52 -07004805 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04004806 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
4807 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004808 if (ret)
4809 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004810
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004811 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04004812 spin_lock_init(&sde_enc->enc_spinlock);
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +05304813 mutex_init(&sde_enc->vblank_ctl_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004814 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07004815 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004816 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004817
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004818 if ((disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) &&
4819 disp_info->is_primary)
4820 setup_timer(&sde_enc->vsync_event_timer,
4821 sde_encoder_vsync_event_handler,
4822 (unsigned long)sde_enc);
4823
Dhaval Patel020f7e122016-11-15 14:39:18 -08004824 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
4825 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004826 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08004827 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07004828 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08004829 PTR_ERR(sde_enc->rsc_client));
4830 sde_enc->rsc_client = NULL;
4831 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08004832
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004833 if (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
4834 ret = _sde_encoder_input_handler(sde_enc);
4835 if (ret)
4836 SDE_ERROR(
4837 "input handler registration failed, rc = %d\n", ret);
4838 }
4839
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07004840 mutex_init(&sde_enc->rc_lock);
Lloyd Atkinsona8781382017-07-17 10:20:43 -04004841 kthread_init_delayed_work(&sde_enc->delayed_off_work,
4842 sde_encoder_off_work);
Veera Sundaram Sankarandf79cc92017-10-10 22:32:46 -07004843 sde_enc->vblank_enabled = false;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04004844
4845 kthread_init_work(&sde_enc->vsync_event_work,
4846 sde_encoder_vsync_event_work_handler);
4847
Jeykumar Sankaranf8298f32017-12-08 10:39:51 -08004848 kthread_init_work(&sde_enc->input_event_work,
4849 sde_encoder_input_event_work_handler);
4850
Dhaval Patel222023e2018-02-27 12:24:07 -08004851 kthread_init_work(&sde_enc->esd_trigger_work,
4852 sde_encoder_esd_trigger_work_handler);
4853
Dhaval Patel020f7e122016-11-15 14:39:18 -08004854 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
4855
Clarence Ip19af1362016-09-23 14:57:51 -04004856 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004857
4858 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004859
4860fail:
Clarence Ip19af1362016-09-23 14:57:51 -04004861 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004862 if (drm_enc)
4863 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07004864
4865 return ERR_PTR(ret);
4866}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04004867
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004868int sde_encoder_wait_for_event(struct drm_encoder *drm_enc,
4869 enum msm_event_wait event)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004870{
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004871 int (*fn_wait)(struct sde_encoder_phys *phys_enc) = NULL;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004872 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004873 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004874
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004875 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04004876 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004877 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004878 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004879 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04004880 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004881
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004882 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4883 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04004884
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004885 switch (event) {
4886 case MSM_ENC_COMMIT_DONE:
4887 fn_wait = phys->ops.wait_for_commit_done;
4888 break;
4889 case MSM_ENC_TX_COMPLETE:
4890 fn_wait = phys->ops.wait_for_tx_complete;
4891 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004892 case MSM_ENC_VBLANK:
4893 fn_wait = phys->ops.wait_for_vblank;
4894 break;
Sandeep Panda11b20d82017-06-19 12:57:27 +05304895 case MSM_ENC_ACTIVE_REGION:
4896 fn_wait = phys->ops.wait_for_active;
4897 break;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04004898 default:
4899 SDE_ERROR_ENC(sde_enc, "unknown wait event %d\n",
4900 event);
4901 return -EINVAL;
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004902 };
4903
4904 if (phys && fn_wait) {
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004905 SDE_ATRACE_BEGIN("wait_for_completion_event");
Jeykumar Sankarandfaeec92017-06-06 15:21:51 -07004906 ret = fn_wait(phys);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -07004907 SDE_ATRACE_END("wait_for_completion_event");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04004908 if (ret)
4909 return ret;
4910 }
4911 }
4912
4913 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04004914}
4915
Alan Kwong67a3f792016-11-01 23:16:53 -04004916enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
4917{
4918 struct sde_encoder_virt *sde_enc = NULL;
4919 int i;
4920
4921 if (!encoder) {
4922 SDE_ERROR("invalid encoder\n");
4923 return INTF_MODE_NONE;
4924 }
4925 sde_enc = to_sde_encoder_virt(encoder);
4926
4927 if (sde_enc->cur_master)
4928 return sde_enc->cur_master->intf_mode;
4929
4930 for (i = 0; i < sde_enc->num_phys_encs; i++) {
4931 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
4932
4933 if (phys)
4934 return phys->intf_mode;
4935 }
4936
4937 return INTF_MODE_NONE;
4938}
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07004939
4940/**
4941 * sde_encoder_update_caps_for_cont_splash - update encoder settings during
4942 * device bootup when cont_splash is enabled
4943 * @drm_enc: Pointer to drm encoder structure
4944 * @Return: true if successful in updating the encoder structure
4945 */
4946int sde_encoder_update_caps_for_cont_splash(struct drm_encoder *encoder)
4947{
4948 struct sde_encoder_virt *sde_enc;
4949 struct msm_drm_private *priv;
4950 struct sde_kms *sde_kms;
4951 struct drm_connector *conn = NULL;
4952 struct sde_connector *sde_conn = NULL;
4953 struct sde_connector_state *sde_conn_state = NULL;
4954 struct drm_display_mode *drm_mode = NULL;
4955 struct sde_rm_hw_iter dsc_iter, pp_iter, ctl_iter;
4956 int ret = 0, i;
4957
4958 if (!encoder) {
4959 SDE_ERROR("invalid drm enc\n");
4960 return -EINVAL;
4961 }
4962
4963 if (!encoder->dev || !encoder->dev->dev_private) {
4964 SDE_ERROR("drm device invalid\n");
4965 return -EINVAL;
4966 }
4967
4968 priv = encoder->dev->dev_private;
4969 if (!priv->kms) {
4970 SDE_ERROR("invalid kms\n");
4971 return -EINVAL;
4972 }
4973
4974 sde_kms = to_sde_kms(priv->kms);
4975 sde_enc = to_sde_encoder_virt(encoder);
4976 if (!priv->num_connectors) {
4977 SDE_ERROR_ENC(sde_enc, "No connectors registered\n");
4978 return -EINVAL;
4979 }
4980 SDE_DEBUG_ENC(sde_enc,
4981 "num of connectors: %d\n", priv->num_connectors);
4982
4983 for (i = 0; i < priv->num_connectors; i++) {
4984 SDE_DEBUG_ENC(sde_enc, "connector id: %d\n",
4985 priv->connectors[i]->base.id);
4986 sde_conn = to_sde_connector(priv->connectors[i]);
4987 if (!sde_conn->encoder) {
4988 SDE_DEBUG_ENC(sde_enc,
4989 "encoder not attached to connector\n");
4990 continue;
4991 }
4992 if (sde_conn->encoder->base.id
4993 == encoder->base.id) {
4994 conn = (priv->connectors[i]);
4995 break;
4996 }
4997 }
4998
4999 if (!conn || !conn->state) {
5000 SDE_ERROR_ENC(sde_enc, "connector not found\n");
5001 return -EINVAL;
5002 }
5003
5004 sde_conn_state = to_sde_connector_state(conn->state);
5005
5006 if (!sde_conn->ops.get_mode_info) {
5007 SDE_ERROR_ENC(sde_enc, "conn: get_mode_info ops not found\n");
5008 return -EINVAL;
5009 }
5010
5011 ret = sde_conn->ops.get_mode_info(&encoder->crtc->state->adjusted_mode,
5012 &sde_conn_state->mode_info,
5013 sde_kms->catalog->max_mixer_width,
5014 sde_conn->display);
5015 if (ret) {
5016 SDE_ERROR_ENC(sde_enc,
5017 "conn: ->get_mode_info failed. ret=%d\n", ret);
5018 return ret;
5019 }
5020
5021 ret = sde_rm_reserve(&sde_kms->rm, encoder, encoder->crtc->state,
5022 conn->state, false);
5023 if (ret) {
5024 SDE_ERROR_ENC(sde_enc,
5025 "failed to reserve hw resources, %d\n", ret);
5026 return ret;
5027 }
5028
Jeykumar Sankarand920ec72017-11-18 20:01:39 -08005029 if (sde_conn->encoder) {
5030 conn->state->best_encoder = sde_conn->encoder;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07005031 SDE_DEBUG_ENC(sde_enc,
5032 "configured cstate->best_encoder to ID = %d\n",
5033 conn->state->best_encoder->base.id);
5034 } else {
5035 SDE_ERROR_ENC(sde_enc, "No encoder mapped to connector=%d\n",
5036 conn->base.id);
5037 }
5038
5039 SDE_DEBUG_ENC(sde_enc, "connector topology = %llu\n",
5040 sde_connector_get_topology_name(conn));
5041 drm_mode = &encoder->crtc->state->adjusted_mode;
5042 SDE_DEBUG_ENC(sde_enc, "hdisplay = %d, vdisplay = %d\n",
5043 drm_mode->hdisplay, drm_mode->vdisplay);
5044 drm_set_preferred_mode(conn, drm_mode->hdisplay, drm_mode->vdisplay);
5045
5046 if (encoder->bridge) {
5047 SDE_DEBUG_ENC(sde_enc, "Bridge mapped to encoder\n");
5048 /*
5049 * For cont-splash use case, we update the mode
5050 * configurations manually. This will skip the
5051 * usually mode set call when actual frame is
5052 * pushed from framework. The bridge needs to
5053 * be updated with the current drm mode by
5054 * calling the bridge mode set ops.
5055 */
5056 if (encoder->bridge->funcs) {
5057 SDE_DEBUG_ENC(sde_enc, "calling mode_set\n");
5058 encoder->bridge->funcs->mode_set(encoder->bridge,
5059 drm_mode, drm_mode);
5060 }
5061 } else {
5062 SDE_ERROR_ENC(sde_enc, "No bridge attached to encoder\n");
5063 }
5064
5065 sde_rm_init_hw_iter(&pp_iter, encoder->base.id, SDE_HW_BLK_PINGPONG);
5066 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
5067 sde_enc->hw_pp[i] = NULL;
5068 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
5069 break;
5070 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
5071 }
5072
5073 sde_rm_init_hw_iter(&dsc_iter, encoder->base.id, SDE_HW_BLK_DSC);
5074 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
5075 sde_enc->hw_dsc[i] = NULL;
5076 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
5077 break;
5078 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
5079 }
5080
5081 sde_rm_init_hw_iter(&ctl_iter, encoder->base.id, SDE_HW_BLK_CTL);
5082 for (i = 0; i < sde_enc->num_phys_encs; i++) {
5083 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
5084
5085 phys->hw_ctl = NULL;
5086 if (!sde_rm_get_hw(&sde_kms->rm, &ctl_iter))
5087 break;
5088 phys->hw_ctl = (struct sde_hw_ctl *) ctl_iter.hw;
5089 }
5090
5091 for (i = 0; i < sde_enc->num_phys_encs; i++) {
5092 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
5093
5094 if (!phys) {
5095 SDE_ERROR_ENC(sde_enc,
5096 "phys encoders not initialized\n");
5097 return -EINVAL;
5098 }
5099
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08005100 /* update connector for master and slave phys encoders */
5101 phys->connector = conn;
5102 phys->cont_splash_single_flush =
5103 sde_kms->splash_data.single_flush_en;
5104 phys->cont_splash_settings = true;
5105
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07005106 phys->hw_pp = sde_enc->hw_pp[i];
5107 if (phys->ops.cont_splash_mode_set)
5108 phys->ops.cont_splash_mode_set(phys, drm_mode);
5109
Ingrid Gallardo72cd1632018-02-28 15:26:37 -08005110 if (phys->ops.is_master && phys->ops.is_master(phys))
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07005111 sde_enc->cur_master = phys;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07005112 }
5113
5114 return ret;
5115}
Dhaval Patelef58f0b2018-01-22 19:13:52 -08005116
5117int sde_encoder_display_failure_notification(struct drm_encoder *enc)
5118{
Jayant Shekhar00a28e92018-06-04 12:15:23 +05305119 struct msm_drm_thread *event_thread = NULL;
Dhaval Patel222023e2018-02-27 12:24:07 -08005120 struct msm_drm_private *priv = NULL;
5121 struct sde_encoder_virt *sde_enc = NULL;
5122
5123 if (!enc || !enc->dev || !enc->dev->dev_private) {
5124 SDE_ERROR("invalid parameters\n");
5125 return -EINVAL;
5126 }
5127
5128 priv = enc->dev->dev_private;
5129 sde_enc = to_sde_encoder_virt(enc);
5130 if (!sde_enc->crtc || (sde_enc->crtc->index
Jayant Shekhar00a28e92018-06-04 12:15:23 +05305131 >= ARRAY_SIZE(priv->event_thread))) {
Dhaval Patel222023e2018-02-27 12:24:07 -08005132 SDE_DEBUG_ENC(sde_enc,
5133 "invalid cached CRTC: %d or crtc index: %d\n",
5134 sde_enc->crtc == NULL,
5135 sde_enc->crtc ? sde_enc->crtc->index : -EINVAL);
5136 return -EINVAL;
5137 }
5138
5139 SDE_EVT32_VERBOSE(DRMID(enc));
5140
Jayant Shekhar00a28e92018-06-04 12:15:23 +05305141 event_thread = &priv->event_thread[sde_enc->crtc->index];
5142
5143 kthread_queue_work(&event_thread->worker,
5144 &sde_enc->esd_trigger_work);
5145 kthread_flush_work(&sde_enc->esd_trigger_work);
5146
Dhaval Patelef58f0b2018-01-22 19:13:52 -08005147 /**
5148 * panel may stop generating te signal (vsync) during esd failure. rsc
5149 * hardware may hang without vsync. Avoid rsc hang by generating the
5150 * vsync from watchdog timer instead of panel.
5151 */
5152 _sde_encoder_switch_to_watchdog_vsync(enc);
5153
5154 sde_encoder_wait_for_event(enc, MSM_ENC_TX_COMPLETE);
5155
5156 return 0;
5157}
Chirag Khuranaed859f52019-11-20 18:18:12 +05305158
5159/**
5160 * sde_encoder_phys_setup_cdm - setup chroma down block
5161 * @phys_enc: Pointer to physical encoder
5162 * @output_type: HDMI/WB
5163 * @format: Output format
5164 * @roi: Output size
5165 */
5166void sde_encoder_phys_setup_cdm(struct sde_encoder_phys *phys_enc,
5167 const struct sde_format *format, u32 output_type,
5168 struct sde_rect *roi)
5169{
5170 struct drm_encoder *encoder = phys_enc->parent;
5171 struct sde_encoder_virt *sde_enc = NULL;
5172 struct sde_hw_cdm *hw_cdm = phys_enc->hw_cdm;
5173 struct sde_hw_cdm_cfg *cdm_cfg = &phys_enc->cdm_cfg;
5174 int ret;
5175 u32 csc_type = 0;
5176
5177 if (!encoder) {
5178 SDE_ERROR("invalid encoder\n");
5179 return;
5180 }
5181 sde_enc = to_sde_encoder_virt(encoder);
5182
5183 if (!SDE_FORMAT_IS_YUV(format)) {
5184 SDE_DEBUG_ENC(sde_enc, "[cdm_disable fmt:%x]\n",
5185 format->base.pixel_format);
5186
5187 if (hw_cdm && hw_cdm->ops.disable)
5188 hw_cdm->ops.disable(hw_cdm);
5189
5190 return;
5191 }
5192
5193 memset(cdm_cfg, 0, sizeof(struct sde_hw_cdm_cfg));
5194
5195 cdm_cfg->output_width = roi->w;
5196 cdm_cfg->output_height = roi->h;
5197 cdm_cfg->output_fmt = format;
5198 cdm_cfg->output_type = output_type;
5199 cdm_cfg->output_bit_depth = SDE_FORMAT_IS_DX(format) ?
5200 CDM_CDWN_OUTPUT_10BIT : CDM_CDWN_OUTPUT_8BIT;
5201
5202 /* enable 10 bit logic */
5203 switch (cdm_cfg->output_fmt->chroma_sample) {
5204 case SDE_CHROMA_RGB:
5205 cdm_cfg->h_cdwn_type = CDM_CDWN_DISABLE;
5206 cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
5207 break;
5208 case SDE_CHROMA_H2V1:
5209 cdm_cfg->h_cdwn_type = CDM_CDWN_COSITE;
5210 cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
5211 break;
5212 case SDE_CHROMA_420:
5213 cdm_cfg->h_cdwn_type = CDM_CDWN_COSITE;
5214 cdm_cfg->v_cdwn_type = CDM_CDWN_OFFSITE;
5215 break;
5216 case SDE_CHROMA_H1V2:
5217 default:
5218 SDE_ERROR("unsupported chroma sampling type\n");
5219 cdm_cfg->h_cdwn_type = CDM_CDWN_DISABLE;
5220 cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
5221 break;
5222 }
5223
5224 SDE_DEBUG_ENC(sde_enc, "[cdm_enable:%d,%d,%X,%d,%d,%d,%d]\n",
5225 cdm_cfg->output_width,
5226 cdm_cfg->output_height,
5227 cdm_cfg->output_fmt->base.pixel_format,
5228 cdm_cfg->output_type,
5229 cdm_cfg->output_bit_depth,
5230 cdm_cfg->h_cdwn_type,
5231 cdm_cfg->v_cdwn_type);
5232
5233 if (output_type == CDM_CDWN_OUTPUT_HDMI)
5234 csc_type = SDE_CSC_RGB2YUV_601FR;
5235 else if (output_type == CDM_CDWN_OUTPUT_WB)
5236 csc_type = SDE_CSC_RGB2YUV_601L;
5237
5238 if (hw_cdm && hw_cdm->ops.setup_csc_data) {
5239 ret = hw_cdm->ops.setup_csc_data(hw_cdm,
5240 &sde_csc_10bit_convert[csc_type]);
5241 if (ret < 0) {
5242 SDE_ERROR("failed to setup CSC %d\n", ret);
5243 return;
5244 }
5245 }
5246
5247 if (hw_cdm && hw_cdm->ops.setup_cdwn) {
5248 ret = hw_cdm->ops.setup_cdwn(hw_cdm, cdm_cfg);
5249 if (ret < 0) {
5250 SDE_ERROR("failed to setup CDM %d\n", ret);
5251 return;
5252 }
5253 }
5254
5255 if (hw_cdm && hw_cdm->ops.enable) {
5256 ret = hw_cdm->ops.enable(hw_cdm, cdm_cfg);
5257 if (ret < 0) {
5258 SDE_ERROR("failed to enable CDM %d\n", ret);
5259 return;
5260 }
5261 }
5262}