blob: 5ccd3856113007897f2bfb173b5010cff7cd78ff [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Dhaval Patel22ef6df2016-10-20 14:42:52 -070020#include <linux/debugfs.h>
21#include <linux/seq_file.h>
Dhaval Patel49ef6d72017-03-26 09:35:53 -070022#include <linux/sde_rsc.h>
Dhaval Patel22ef6df2016-10-20 14:42:52 -070023
Lloyd Atkinson09fed912016-06-24 18:14:13 -040024#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070025#include "sde_kms.h"
26#include "drm_crtc.h"
27#include "drm_crtc_helper.h"
28
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040029#include "sde_hwio.h"
30#include "sde_hw_catalog.h"
31#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040032#include "sde_hw_ctl.h"
33#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040034#include "sde_encoder_phys.h"
Dhaval Patel020f7e122016-11-15 14:39:18 -080035#include "sde_power_handle.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080036#include "sde_hw_dsc.h"
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070037#include "sde_crtc.h"
Narendra Muppalla77b32932017-05-10 13:53:11 -070038#include "sde_trace.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040039
Clarence Ip19af1362016-09-23 14:57:51 -040040#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
41 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
42
43#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
44 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
45
Alan Kwong628d19e2016-10-31 13:50:13 -040046/* timeout in frames waiting for frame done */
47#define SDE_ENCODER_FRAME_DONE_TIMEOUT 60
48
Lloyd Atkinson5d722782016-05-30 14:09:41 -040049/*
50 * Two to anticipate panels that can do cmd/vid dynamic switching
51 * plan is to create all possible physical encoder types, and switch between
52 * them at runtime
53 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040054#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040055
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040056#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
57 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
58
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070059#define MAX_CHANNELS_PER_ENC 2
60
Dhaval Patelf9245d62017-03-28 16:24:00 -070061#define MISR_BUFF_SIZE 256
62
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -070063#define IDLE_TIMEOUT 64
64
65/**
66 * enum sde_enc_rc_events - events for resource control state machine
67 * @SDE_ENC_RC_EVENT_KICKOFF:
68 * This event happens at NORMAL priority.
69 * Event that signals the start of the transfer. When this event is
70 * received, enable MDP/DSI core clocks and request RSC with CMD state.
71 * Regardless of the previous state, the resource should be in ON state
72 * at the end of this event.
73 * @SDE_ENC_RC_EVENT_FRAME_DONE:
74 * This event happens at INTERRUPT level.
75 * Event signals the end of the data transfer after the PP FRAME_DONE
76 * event. At the end of this event, a delayed work is scheduled to go to
77 * IDLE_PC state after IDLE_TIMEOUT time.
78 * @SDE_ENC_RC_EVENT_STOP:
79 * This event happens at NORMAL priority.
80 * When this event is received, disable all the MDP/DSI core clocks
81 * and request RSC with IDLE state. Resource state should be in OFF
82 * at the end of the event.
83 * @SDE_ENC_RC_EARLY_WAKEUP
84 * This event happens at NORMAL priority from a work item.
85 * Event signals that there will be frame update soon and the driver should
86 * wake up early to update the frame with minimum latency.
87 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
88 * This event happens at NORMAL priority from a work item.
89 * Event signals that there were no frame updates for IDLE_TIMEOUT time.
90 * This would disable MDP/DSI core clocks and request RSC with IDLE state
91 * and change the resource state to IDLE.
92 */
93enum sde_enc_rc_events {
94 SDE_ENC_RC_EVENT_KICKOFF = 1,
95 SDE_ENC_RC_EVENT_FRAME_DONE,
96 SDE_ENC_RC_EVENT_STOP,
97 SDE_ENC_RC_EVENT_EARLY_WAKE_UP,
98 SDE_ENC_RC_EVENT_ENTER_IDLE
99};
100
101/*
102 * enum sde_enc_rc_states - states that the resource control maintains
103 * @SDE_ENC_RC_STATE_OFF: Resource is in OFF state
104 * @SDE_ENC_RC_STATE_ON: Resource is in ON state
105 * @SDE_ENC_RC_STATE_IDLE: Resource is in IDLE state
106 */
107enum sde_enc_rc_states {
108 SDE_ENC_RC_STATE_OFF,
109 SDE_ENC_RC_STATE_ON,
110 SDE_ENC_RC_STATE_IDLE
111};
112
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400113/**
114 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
115 * encoders. Virtual encoder manages one "logical" display. Physical
116 * encoders manage one intf block, tied to a specific panel/sub-panel.
117 * Virtual encoder defers as much as possible to the physical encoders.
118 * Virtual encoder registers itself with the DRM Framework as the encoder.
119 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400120 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400121 * @bus_scaling_client: Client handle to the bus scaling interface
122 * @num_phys_encs: Actual number of physical encoders contained.
123 * @phys_encs: Container of physical encoders managed.
124 * @cur_master: Pointer to the current master in this mode. Optimization
125 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700126 * @hw_pp Handle to the pingpong blocks used for the display. No.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500127 * pingpong blocks can be different than num_phys_encs.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800128 * @hw_dsc: Array of DSC block handles used for the display.
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500129 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
130 * for partial update right-only cases, such as pingpong
131 * split where virtual pingpong does not generate IRQs
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400132 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
133 * notification of the VBLANK
134 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400135 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
136 * all CTL paths
137 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700138 * @debugfs_root: Debug file system root file node
139 * @enc_lock: Lock around physical encoder create/destroy and
140 access.
Alan Kwong628d19e2016-10-31 13:50:13 -0400141 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
142 * busy processing current command.
143 * Bit0 = phys_encs[0] etc.
144 * @crtc_frame_event_cb: callback handler for frame event
145 * @crtc_frame_event_cb_data: callback handler private data
Alan Kwong628d19e2016-10-31 13:50:13 -0400146 * @frame_done_timeout: frame done timeout in Hz
147 * @frame_done_timer: watchdog timer for frame done event
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700148 * @rsc_client: rsc client pointer
149 * @rsc_state_init: boolean to indicate rsc config init
150 * @disp_info: local copy of msm_display_info struct
Dhaval Patelf9245d62017-03-28 16:24:00 -0700151 * @misr_enable: misr enable/disable status
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700152 * @idle_pc_supported: indicate if idle power collaps is supported
153 * @rc_lock: resource control mutex lock to protect
154 * virt encoder over various state changes
155 * @rc_state: resource controller state
156 * @delayed_off_work: delayed worker to schedule disabling of
157 * clks and resources after IDLE_TIMEOUT time.
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700158 * @topology: topology of the display
159 * @mode_set_complete: flag to indicate modeset completion
Alan Kwong56f1a942017-04-04 11:53:42 -0700160 * @rsc_cfg: rsc configuration
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400161 * @cur_conn_roi: current connector roi
162 * @prv_conn_roi: previous connector roi to optimize if unchanged
Ingrid Gallardo79b44392017-05-30 16:30:52 -0700163 * @disable_inprogress: sde encoder disable is in progress.
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400164 */
165struct sde_encoder_virt {
166 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400167 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400168 uint32_t bus_scaling_client;
169
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400170 uint32_t display_num_of_h_tiles;
171
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400172 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400173 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
174 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700175 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800176 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400177
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500178 bool intfs_swapped;
179
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400180 void (*crtc_vblank_cb)(void *);
181 void *crtc_vblank_cb_data;
182
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700183 struct dentry *debugfs_root;
184 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400185 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
186 void (*crtc_frame_event_cb)(void *, u32 event);
187 void *crtc_frame_event_cb_data;
Alan Kwong628d19e2016-10-31 13:50:13 -0400188
189 atomic_t frame_done_timeout;
190 struct timer_list frame_done_timer;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800191
192 struct sde_rsc_client *rsc_client;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700193 bool rsc_state_init;
Dhaval Patel020f7e122016-11-15 14:39:18 -0800194 struct msm_display_info disp_info;
Dhaval Patelf9245d62017-03-28 16:24:00 -0700195 bool misr_enable;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700196
197 bool idle_pc_supported;
198 struct mutex rc_lock;
199 enum sde_enc_rc_states rc_state;
200 struct delayed_work delayed_off_work;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700201 struct msm_display_topology topology;
202 bool mode_set_complete;
Alan Kwong56f1a942017-04-04 11:53:42 -0700203
204 struct sde_encoder_rsc_config rsc_cfg;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400205 struct sde_rect cur_conn_roi;
206 struct sde_rect prv_conn_roi;
Ingrid Gallardo79b44392017-05-30 16:30:52 -0700207 bool disable_inprogress;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400208};
209
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400210#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700211
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400212bool sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
213
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800214{
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400215 struct sde_encoder_virt *sde_enc;
216 struct msm_compression_info *comp_info;
217
218 if (!drm_enc)
219 return false;
220
221 sde_enc = to_sde_encoder_virt(drm_enc);
222 comp_info = &sde_enc->disp_info.comp_info;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800223
224 return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
225}
226
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400227bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
228{
229 enum sde_rm_topology_name topology;
230 struct sde_encoder_virt *sde_enc;
231 struct drm_connector *drm_conn;
232
233 if (!drm_enc)
234 return false;
235
236 sde_enc = to_sde_encoder_virt(drm_enc);
237 if (!sde_enc->cur_master)
238 return false;
239
240 drm_conn = sde_enc->cur_master->connector;
241 if (!drm_conn)
242 return false;
243
244 topology = sde_connector_get_topology_name(drm_conn);
245 if (topology == SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE)
246 return true;
247
248 return false;
249}
250
Dhaval Patelf9245d62017-03-28 16:24:00 -0700251static inline int _sde_encoder_power_enable(struct sde_encoder_virt *sde_enc,
252 bool enable)
253{
254 struct drm_encoder *drm_enc;
255 struct msm_drm_private *priv;
256 struct sde_kms *sde_kms;
257
258 if (!sde_enc) {
259 SDE_ERROR("invalid sde enc\n");
260 return -EINVAL;
261 }
262
263 drm_enc = &sde_enc->base;
264 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
265 SDE_ERROR("drm device invalid\n");
266 return -EINVAL;
267 }
268
269 priv = drm_enc->dev->dev_private;
270 if (!priv->kms) {
271 SDE_ERROR("invalid kms\n");
272 return -EINVAL;
273 }
274
275 sde_kms = to_sde_kms(priv->kms);
276
277 return sde_power_resource_enable(&priv->phandle, sde_kms->core_client,
278 enable);
279}
280
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400281void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400282 struct sde_encoder_hw_resources *hw_res,
283 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400284{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400285 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400286 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400287
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400288 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400289 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
290 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400291 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400292 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400293
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400294 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400295 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400296
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400297 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400298 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400299 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
300
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400301 for (i = 0; i < sde_enc->num_phys_encs; i++) {
302 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
303
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400304 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400305 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400306 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700307
308 hw_res->topology = sde_enc->topology;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400309}
310
Clarence Ip3649f8b2016-10-31 09:59:44 -0400311void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400312{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400313 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400314 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400315
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400316 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400317 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400318 return;
319 }
320
321 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400322 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400323
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700324 mutex_lock(&sde_enc->enc_lock);
Dhaval Patel020f7e122016-11-15 14:39:18 -0800325 sde_rsc_client_destroy(sde_enc->rsc_client);
326
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700327 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400328 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
329
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400330 if (phys && phys->ops.destroy) {
331 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400332 --sde_enc->num_phys_encs;
333 sde_enc->phys_encs[i] = NULL;
334 }
335 }
336
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700337 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400338 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400339 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700340 sde_enc->num_phys_encs = 0;
341 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400342
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400343 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700344 mutex_destroy(&sde_enc->enc_lock);
345
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400346 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700347}
348
Clarence Ip8e69ad02016-12-09 09:43:57 -0500349void sde_encoder_helper_split_config(
350 struct sde_encoder_phys *phys_enc,
351 enum sde_intf interface)
352{
353 struct sde_encoder_virt *sde_enc;
354 struct split_pipe_cfg cfg = { 0 };
355 struct sde_hw_mdp *hw_mdptop;
356 enum sde_rm_topology_name topology;
357
358 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
359 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
360 return;
361 }
362
363 sde_enc = to_sde_encoder_virt(phys_enc->parent);
364 hw_mdptop = phys_enc->hw_mdptop;
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500365
366 /**
367 * disable split modes since encoder will be operating in as the only
368 * encoder, either for the entire use case in the case of, for example,
369 * single DSI, or for this frame in the case of left/right only partial
370 * update.
371 */
372 if (phys_enc->split_role == ENC_ROLE_SOLO) {
373 if (hw_mdptop->ops.setup_split_pipe)
374 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
375 if (hw_mdptop->ops.setup_pp_split)
376 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
377 return;
378 }
379
380 cfg.en = true;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500381 cfg.mode = phys_enc->intf_mode;
382 cfg.intf = interface;
383
384 if (cfg.en && phys_enc->ops.needs_single_flush &&
385 phys_enc->ops.needs_single_flush(phys_enc))
386 cfg.split_flush_en = true;
387
388 topology = sde_connector_get_topology_name(phys_enc->connector);
389 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
390 cfg.pp_split_slave = cfg.intf;
391 else
392 cfg.pp_split_slave = INTF_MAX;
393
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -0500394 if (phys_enc->split_role == ENC_ROLE_MASTER) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500395 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
396
397 if (hw_mdptop->ops.setup_split_pipe)
398 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
399 } else {
400 /*
401 * slave encoder
402 * - determine split index from master index,
403 * assume master is first pp
404 */
405 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
406 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
407 cfg.pp_split_index);
408
409 if (hw_mdptop->ops.setup_pp_split)
410 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
411 }
412}
413
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700414static void _sde_encoder_adjust_mode(struct drm_connector *connector,
415 struct drm_display_mode *adj_mode)
416{
417 struct drm_display_mode *cur_mode;
418
419 if (!connector || !adj_mode)
420 return;
421
422 list_for_each_entry(cur_mode, &connector->modes, head) {
423 if (cur_mode->vdisplay == adj_mode->vdisplay &&
424 cur_mode->hdisplay == adj_mode->hdisplay &&
425 cur_mode->vrefresh == adj_mode->vrefresh) {
426 adj_mode->private = cur_mode->private;
427 adj_mode->private_flags = cur_mode->private_flags;
428 }
429 }
430}
431
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400432static int sde_encoder_virt_atomic_check(
433 struct drm_encoder *drm_enc,
434 struct drm_crtc_state *crtc_state,
435 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400436{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400437 struct sde_encoder_virt *sde_enc;
438 struct msm_drm_private *priv;
439 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400440 const struct drm_display_mode *mode;
441 struct drm_display_mode *adj_mode;
442 int i = 0;
443 int ret = 0;
444
Alan Kwongbb27c092016-07-20 16:41:25 -0400445 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400446 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
447 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400448 return -EINVAL;
449 }
450
451 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400452 SDE_DEBUG_ENC(sde_enc, "\n");
453
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400454 priv = drm_enc->dev->dev_private;
455 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400456 mode = &crtc_state->mode;
457 adj_mode = &crtc_state->adjusted_mode;
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400458 SDE_EVT32(DRMID(drm_enc));
Alan Kwongbb27c092016-07-20 16:41:25 -0400459
Jeykumar Sankaraneb49ff32017-04-12 16:33:25 -0700460 /*
461 * display drivers may populate private fields of the drm display mode
462 * structure while registering possible modes of a connector with DRM.
463 * These private fields are not populated back while DRM invokes
464 * the mode_set callbacks. This module retrieves and populates the
465 * private fields of the given mode.
466 */
467 _sde_encoder_adjust_mode(conn_state->connector, adj_mode);
468
Alan Kwongbb27c092016-07-20 16:41:25 -0400469 /* perform atomic check on the first physical encoder (master) */
470 for (i = 0; i < sde_enc->num_phys_encs; i++) {
471 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
472
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400473 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400474 ret = phys->ops.atomic_check(phys, crtc_state,
475 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400476 else if (phys && phys->ops.mode_fixup)
477 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400478 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400479
480 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400481 SDE_ERROR_ENC(sde_enc,
482 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400483 break;
484 }
485 }
486
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400487 /* Reserve dynamic resources now. Indicating AtomicTest phase */
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700488 if (!ret) {
489 /*
490 * Avoid reserving resources when mode set is pending. Topology
491 * info may not be available to complete reservation.
492 */
493 if (drm_atomic_crtc_needs_modeset(crtc_state)
494 && sde_enc->mode_set_complete) {
495 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400496 conn_state, true);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700497 sde_enc->mode_set_complete = false;
498 }
499 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400500
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700501 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700502 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400503
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400504 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400505
506 return ret;
507}
508
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800509static int _sde_encoder_dsc_update_pic_dim(struct msm_display_dsc_info *dsc,
510 int pic_width, int pic_height)
511{
512 if (!dsc || !pic_width || !pic_height) {
513 SDE_ERROR("invalid input: pic_width=%d pic_height=%d\n",
514 pic_width, pic_height);
515 return -EINVAL;
516 }
517
518 if ((pic_width % dsc->slice_width) ||
519 (pic_height % dsc->slice_height)) {
520 SDE_ERROR("pic_dim=%dx%d has to be multiple of slice=%dx%d\n",
521 pic_width, pic_height,
522 dsc->slice_width, dsc->slice_height);
523 return -EINVAL;
524 }
525
526 dsc->pic_width = pic_width;
527 dsc->pic_height = pic_height;
528
529 return 0;
530}
531
532static void _sde_encoder_dsc_pclk_param_calc(struct msm_display_dsc_info *dsc,
533 int intf_width)
534{
535 int slice_per_pkt, slice_per_intf;
536 int bytes_in_slice, total_bytes_per_intf;
537
538 if (!dsc || !dsc->slice_width || !dsc->slice_per_pkt ||
539 (intf_width < dsc->slice_width)) {
540 SDE_ERROR("invalid input: intf_width=%d slice_width=%d\n",
541 intf_width, dsc ? dsc->slice_width : -1);
542 return;
543 }
544
545 slice_per_pkt = dsc->slice_per_pkt;
546 slice_per_intf = DIV_ROUND_UP(intf_width, dsc->slice_width);
547
548 /*
549 * If slice_per_pkt is greater than slice_per_intf then default to 1.
550 * This can happen during partial update.
551 */
552 if (slice_per_pkt > slice_per_intf)
553 slice_per_pkt = 1;
554
555 bytes_in_slice = DIV_ROUND_UP(dsc->slice_width * dsc->bpp, 8);
556 total_bytes_per_intf = bytes_in_slice * slice_per_intf;
557
558 dsc->eol_byte_num = total_bytes_per_intf % 3;
559 dsc->pclk_per_line = DIV_ROUND_UP(total_bytes_per_intf, 3);
560 dsc->bytes_in_slice = bytes_in_slice;
561 dsc->bytes_per_pkt = bytes_in_slice * slice_per_pkt;
562 dsc->pkt_per_line = slice_per_intf / slice_per_pkt;
563}
564
565static int _sde_encoder_dsc_initial_line_calc(struct msm_display_dsc_info *dsc,
566 int enc_ip_width)
567{
568 int ssm_delay, total_pixels, soft_slice_per_enc;
569
570 soft_slice_per_enc = enc_ip_width / dsc->slice_width;
571
572 /*
573 * minimum number of initial line pixels is a sum of:
574 * 1. sub-stream multiplexer delay (83 groups for 8bpc,
575 * 91 for 10 bpc) * 3
576 * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
577 * 3. the initial xmit delay
578 * 4. total pipeline delay through the "lock step" of encoder (47)
579 * 5. 6 additional pixels as the output of the rate buffer is
580 * 48 bits wide
581 */
582 ssm_delay = ((dsc->bpc < 10) ? 84 : 92);
583 total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
584 if (soft_slice_per_enc > 1)
585 total_pixels += (ssm_delay * 3);
586 dsc->initial_lines = DIV_ROUND_UP(total_pixels, dsc->slice_width);
587 return 0;
588}
589
590static bool _sde_encoder_dsc_ich_reset_override_needed(bool pu_en,
591 struct msm_display_dsc_info *dsc)
592{
593 /*
594 * As per the DSC spec, ICH_RESET can be either end of the slice line
595 * or at the end of the slice. HW internally generates ich_reset at
596 * end of the slice line if DSC_MERGE is used or encoder has two
597 * soft slices. However, if encoder has only 1 soft slice and DSC_MERGE
598 * is not used then it will generate ich_reset at the end of slice.
599 *
600 * Now as per the spec, during one PPS session, position where
601 * ich_reset is generated should not change. Now if full-screen frame
602 * has more than 1 soft slice then HW will automatically generate
603 * ich_reset at the end of slice_line. But for the same panel, if
604 * partial frame is enabled and only 1 encoder is used with 1 slice,
605 * then HW will generate ich_reset at end of the slice. This is a
606 * mismatch. Prevent this by overriding HW's decision.
607 */
608 return pu_en && dsc && (dsc->full_frame_slices > 1) &&
609 (dsc->slice_width == dsc->pic_width);
610}
611
612static void _sde_encoder_dsc_pipe_cfg(struct sde_hw_dsc *hw_dsc,
613 struct sde_hw_pingpong *hw_pp, struct msm_display_dsc_info *dsc,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400614 u32 common_mode, bool ich_reset, bool enable)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800615{
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400616 if (!enable) {
617 if (hw_pp->ops.disable_dsc)
618 hw_pp->ops.disable_dsc(hw_pp);
619 return;
620 }
621
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800622 if (hw_dsc->ops.dsc_config)
623 hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, ich_reset);
624
625 if (hw_dsc->ops.dsc_config_thresh)
626 hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
627
628 if (hw_pp->ops.setup_dsc)
629 hw_pp->ops.setup_dsc(hw_pp);
630
631 if (hw_pp->ops.enable_dsc)
632 hw_pp->ops.enable_dsc(hw_pp);
633}
634
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400635static void _sde_encoder_get_connector_roi(
636 struct sde_encoder_virt *sde_enc,
637 struct sde_rect *merged_conn_roi)
638{
639 struct drm_connector *drm_conn;
640 struct sde_connector_state *c_state;
641
642 if (!sde_enc || !merged_conn_roi)
643 return;
644
645 drm_conn = sde_enc->phys_encs[0]->connector;
646
647 if (!drm_conn || !drm_conn->state)
648 return;
649
650 c_state = to_sde_connector_state(drm_conn->state);
651 sde_kms_rect_merge_rectangles(&c_state->rois, merged_conn_roi);
652}
653
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800654static int _sde_encoder_dsc_1_lm_1_enc_1_intf(struct sde_encoder_virt *sde_enc)
655{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800656 int this_frame_slices;
657 int intf_ip_w, enc_ip_w;
658 int ich_res, dsc_common_mode = 0;
659
660 struct sde_hw_pingpong *hw_pp = sde_enc->hw_pp[0];
661 struct sde_hw_dsc *hw_dsc = sde_enc->hw_dsc[0];
662 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400663 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800664 struct msm_display_dsc_info *dsc =
665 &sde_enc->disp_info.comp_info.dsc_info;
666
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400667 if (dsc == NULL || hw_dsc == NULL || hw_pp == NULL || !enc_master) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800668 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
669 return -EINVAL;
670 }
671
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400672 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800673
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400674 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800675 intf_ip_w = this_frame_slices * dsc->slice_width;
676 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
677
678 enc_ip_w = intf_ip_w;
679 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
680
681 ich_res = _sde_encoder_dsc_ich_reset_override_needed(false, dsc);
682
683 if (enc_master->intf_mode == INTF_MODE_VIDEO)
684 dsc_common_mode = DSC_MODE_VIDEO;
685
686 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400687 roi->w, roi->h, dsc_common_mode);
688 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h, dsc_common_mode);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800689
690 _sde_encoder_dsc_pipe_cfg(hw_dsc, hw_pp, dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400691 ich_res, true);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800692
693 return 0;
694}
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400695static int _sde_encoder_dsc_2_lm_2_enc_2_intf(struct sde_encoder_virt *sde_enc,
696 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800697{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800698 int this_frame_slices;
699 int intf_ip_w, enc_ip_w;
700 int ich_res, dsc_common_mode;
701
702 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400703 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
704 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
705 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
706 struct msm_display_dsc_info dsc[MAX_CHANNELS_PER_ENC];
707 bool half_panel_partial_update;
708 int i;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800709
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400710 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
711 hw_pp[i] = sde_enc->hw_pp[i];
712 hw_dsc[i] = sde_enc->hw_dsc[i];
713
714 if (!hw_pp[i] || !hw_dsc[i]) {
715 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
716 return -EINVAL;
717 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800718 }
719
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400720 half_panel_partial_update =
721 hweight_long(params->affected_displays) == 1;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800722
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400723 dsc_common_mode = 0;
724 if (!half_panel_partial_update)
725 dsc_common_mode |= DSC_MODE_SPLIT_PANEL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800726 if (enc_master->intf_mode == INTF_MODE_VIDEO)
727 dsc_common_mode |= DSC_MODE_VIDEO;
728
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400729 memcpy(&dsc[0], &sde_enc->disp_info.comp_info.dsc_info, sizeof(dsc[0]));
730 memcpy(&dsc[1], &sde_enc->disp_info.comp_info.dsc_info, sizeof(dsc[1]));
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800731
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400732 /*
733 * Since both DSC use same pic dimension, set same pic dimension
734 * to both DSC structures.
735 */
736 _sde_encoder_dsc_update_pic_dim(&dsc[0], roi->w, roi->h);
737 _sde_encoder_dsc_update_pic_dim(&dsc[1], roi->w, roi->h);
738
739 this_frame_slices = roi->w / dsc[0].slice_width;
740 intf_ip_w = this_frame_slices * dsc[0].slice_width;
741
742 if (!half_panel_partial_update)
743 intf_ip_w /= 2;
744
745 /*
746 * In this topology when both interfaces are active, they have same
747 * load so intf_ip_w will be same.
748 */
749 _sde_encoder_dsc_pclk_param_calc(&dsc[0], intf_ip_w);
750 _sde_encoder_dsc_pclk_param_calc(&dsc[1], intf_ip_w);
751
752 /*
753 * In this topology, since there is no dsc_merge, uncompressed input
754 * to encoder and interface is same.
755 */
756 enc_ip_w = intf_ip_w;
757 _sde_encoder_dsc_initial_line_calc(&dsc[0], enc_ip_w);
758 _sde_encoder_dsc_initial_line_calc(&dsc[1], enc_ip_w);
759
760 /*
761 * __is_ich_reset_override_needed should be called only after
762 * updating pic dimension, mdss_panel_dsc_update_pic_dim.
763 */
764 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
765 half_panel_partial_update, &dsc[0]);
766
767 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
768 roi->w, roi->h, dsc_common_mode);
769
770 for (i = 0; i < sde_enc->num_phys_encs; i++) {
771 bool active = !!((1 << i) & params->affected_displays);
772
773 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
774 dsc_common_mode, i, active);
775 _sde_encoder_dsc_pipe_cfg(hw_dsc[i], hw_pp[i], &dsc[i],
776 dsc_common_mode, ich_res, active);
777 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800778
779 return 0;
780}
781
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400782static int _sde_encoder_dsc_2_lm_2_enc_1_intf(struct sde_encoder_virt *sde_enc,
783 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800784{
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800785 int this_frame_slices;
786 int intf_ip_w, enc_ip_w;
787 int ich_res, dsc_common_mode;
788
789 struct sde_encoder_phys *enc_master = sde_enc->cur_master;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400790 const struct sde_rect *roi = &sde_enc->cur_conn_roi;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400791 struct sde_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
792 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800793 struct msm_display_dsc_info *dsc =
794 &sde_enc->disp_info.comp_info.dsc_info;
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400795 bool half_panel_partial_update;
796 int i;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800797
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400798 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
799 hw_pp[i] = sde_enc->hw_pp[i];
800 hw_dsc[i] = sde_enc->hw_dsc[i];
801
802 if (!hw_pp[i] || !hw_dsc[i]) {
803 SDE_ERROR_ENC(sde_enc, "invalid params for DSC\n");
804 return -EINVAL;
805 }
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800806 }
807
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400808 half_panel_partial_update =
809 hweight_long(params->affected_displays) == 1;
810
811 dsc_common_mode = 0;
812 if (!half_panel_partial_update)
813 dsc_common_mode |= DSC_MODE_SPLIT_PANEL | DSC_MODE_MULTIPLEX;
814 if (enc_master->intf_mode == INTF_MODE_VIDEO)
815 dsc_common_mode |= DSC_MODE_VIDEO;
816
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400817 _sde_encoder_dsc_update_pic_dim(dsc, roi->w, roi->h);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800818
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400819 this_frame_slices = roi->w / dsc->slice_width;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800820 intf_ip_w = this_frame_slices * dsc->slice_width;
821 _sde_encoder_dsc_pclk_param_calc(dsc, intf_ip_w);
822
823 /*
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400824 * dsc merge case: when using 2 encoders for the same stream,
825 * no. of slices need to be same on both the encoders.
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800826 */
827 enc_ip_w = intf_ip_w / 2;
828 _sde_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
829
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400830 ich_res = _sde_encoder_dsc_ich_reset_override_needed(
831 half_panel_partial_update, dsc);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800832
833 SDE_DEBUG_ENC(sde_enc, "pic_w: %d pic_h: %d mode:%d\n",
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400834 roi->w, roi->h, dsc_common_mode);
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400835 SDE_EVT32(DRMID(&sde_enc->base), roi->w, roi->h,
836 dsc_common_mode, i, params->affected_displays);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800837
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400838 _sde_encoder_dsc_pipe_cfg(hw_dsc[0], hw_pp[0], dsc, dsc_common_mode,
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400839 ich_res, true);
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400840 _sde_encoder_dsc_pipe_cfg(hw_dsc[1], hw_pp[1], dsc, dsc_common_mode,
841 ich_res, !half_panel_partial_update);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800842
843 return 0;
844}
845
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400846static int _sde_encoder_update_roi(struct drm_encoder *drm_enc)
847{
848 struct sde_encoder_virt *sde_enc;
849 struct drm_connector *drm_conn;
850 struct drm_display_mode *adj_mode;
851 struct sde_rect roi;
852
853 if (!drm_enc || !drm_enc->crtc || !drm_enc->crtc->state)
854 return -EINVAL;
855 sde_enc = to_sde_encoder_virt(drm_enc);
856
857 if (!sde_enc->cur_master)
858 return -EINVAL;
859
860 adj_mode = &sde_enc->base.crtc->state->adjusted_mode;
861 drm_conn = sde_enc->cur_master->connector;
862
863 _sde_encoder_get_connector_roi(sde_enc, &roi);
864 if (sde_kms_rect_is_null(&roi)) {
865 roi.w = adj_mode->hdisplay;
866 roi.h = adj_mode->vdisplay;
867 }
868
869 memcpy(&sde_enc->prv_conn_roi, &sde_enc->cur_conn_roi,
870 sizeof(sde_enc->prv_conn_roi));
871 memcpy(&sde_enc->cur_conn_roi, &roi, sizeof(sde_enc->cur_conn_roi));
872
873 return 0;
874}
875
876static int _sde_encoder_dsc_setup(struct sde_encoder_virt *sde_enc,
877 struct sde_encoder_kickoff_params *params)
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800878{
879 enum sde_rm_topology_name topology;
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400880 struct drm_connector *drm_conn;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800881 int ret = 0;
882
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400883 if (!sde_enc || !params || !sde_enc->phys_encs[0] ||
884 !sde_enc->phys_encs[0]->connector)
885 return -EINVAL;
886
887 drm_conn = sde_enc->phys_encs[0]->connector;
888
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800889 topology = sde_connector_get_topology_name(drm_conn);
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700890 if (topology == SDE_RM_TOPOLOGY_NONE) {
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800891 SDE_ERROR_ENC(sde_enc, "topology not set yet\n");
892 return -EINVAL;
893 }
894
895 SDE_DEBUG_ENC(sde_enc, "\n");
896 SDE_EVT32(DRMID(&sde_enc->base));
897
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400898 if (sde_kms_rect_is_equal(&sde_enc->cur_conn_roi,
899 &sde_enc->prv_conn_roi))
900 return ret;
901
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800902 switch (topology) {
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700903 case SDE_RM_TOPOLOGY_SINGLEPIPE_DSC:
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800904 ret = _sde_encoder_dsc_1_lm_1_enc_1_intf(sde_enc);
905 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700906 case SDE_RM_TOPOLOGY_DUALPIPE_DSCMERGE:
Lloyd Atkinson094780d2017-04-24 17:25:08 -0400907 ret = _sde_encoder_dsc_2_lm_2_enc_1_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800908 break;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -0700909 case SDE_RM_TOPOLOGY_DUALPIPE_DSC:
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -0400910 ret = _sde_encoder_dsc_2_lm_2_enc_2_intf(sde_enc, params);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800911 break;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800912 default:
913 SDE_ERROR_ENC(sde_enc, "No DSC support for topology %d",
914 topology);
915 return -EINVAL;
916 };
917
918 return ret;
919}
920
Dhaval Patel30fae8a2017-04-21 18:42:41 -0700921static int sde_encoder_update_rsc_client(
Alan Kwong56f1a942017-04-04 11:53:42 -0700922 struct drm_encoder *drm_enc,
923 struct sde_encoder_rsc_config *config, bool enable)
Dhaval Patel30fae8a2017-04-21 18:42:41 -0700924{
925 struct sde_encoder_virt *sde_enc;
926 enum sde_rsc_state rsc_state;
927 struct sde_rsc_cmd_config rsc_config;
928 int ret;
929 struct msm_display_info *disp_info;
930
931 if (!drm_enc) {
932 SDE_ERROR("invalid encoder\n");
933 return -EINVAL;
934 }
935
936 sde_enc = to_sde_encoder_virt(drm_enc);
937 disp_info = &sde_enc->disp_info;
938
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700939 if (!sde_enc->rsc_client) {
940 SDE_DEBUG("rsc client not created\n");
941 return 0;
942 }
943
Dhaval Patel30fae8a2017-04-21 18:42:41 -0700944 /**
945 * only primary command mode panel can request CMD state.
946 * all other panels/displays can request for VID state including
947 * secondary command mode panel.
948 */
949 rsc_state = enable ?
950 (((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) &&
951 disp_info->is_primary) ? SDE_RSC_CMD_STATE :
952 SDE_RSC_VID_STATE) : SDE_RSC_IDLE_STATE;
953
Alan Kwong56f1a942017-04-04 11:53:42 -0700954 if (config && memcmp(&sde_enc->rsc_cfg, config,
955 sizeof(sde_enc->rsc_cfg)))
956 sde_enc->rsc_state_init = false;
957
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700958 if (rsc_state != SDE_RSC_IDLE_STATE && !sde_enc->rsc_state_init
Dhaval Patel30fae8a2017-04-21 18:42:41 -0700959 && disp_info->is_primary) {
960 rsc_config.fps = disp_info->frame_rate;
961 rsc_config.vtotal = disp_info->vtotal;
962 rsc_config.prefill_lines = disp_info->prefill_lines;
963 rsc_config.jitter = disp_info->jitter;
Alan Kwong56f1a942017-04-04 11:53:42 -0700964 rsc_config.prefill_lines += config ?
965 config->inline_rotate_prefill : 0;
Dhaval Patel30fae8a2017-04-21 18:42:41 -0700966 /* update it only once */
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -0700967 sde_enc->rsc_state_init = true;
Alan Kwong56f1a942017-04-04 11:53:42 -0700968 if (config)
969 sde_enc->rsc_cfg = *config;
Dhaval Patel30fae8a2017-04-21 18:42:41 -0700970
971 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
972 rsc_state, &rsc_config,
973 drm_enc->crtc ? drm_enc->crtc->index : -1);
974 } else {
975 ret = sde_rsc_client_state_update(sde_enc->rsc_client,
976 rsc_state, NULL,
977 drm_enc->crtc ? drm_enc->crtc->index : -1);
978 }
979
980 if (ret)
981 SDE_ERROR("sde rsc client update failed ret:%d\n", ret);
982
983 return ret;
984}
985
986struct sde_rsc_client *sde_encoder_get_rsc_client(struct drm_encoder *drm_enc)
987{
988 struct sde_encoder_virt *sde_enc;
989 struct msm_display_info *disp_info;
990
991 if (!drm_enc)
992 return NULL;
993
994 sde_enc = to_sde_encoder_virt(drm_enc);
995 disp_info = &sde_enc->disp_info;
996
997 return disp_info->is_primary ? sde_enc->rsc_client : NULL;
998}
999
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001000static void _sde_encoder_resource_control_helper(struct drm_encoder *drm_enc,
1001 bool enable)
1002{
1003 struct msm_drm_private *priv;
1004 struct sde_kms *sde_kms;
1005 struct sde_encoder_virt *sde_enc;
Alan Kwong56f1a942017-04-04 11:53:42 -07001006 struct sde_encoder_rsc_config rsc_cfg = { 0 };
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001007 int i;
1008
1009 sde_enc = to_sde_encoder_virt(drm_enc);
1010 priv = drm_enc->dev->dev_private;
1011 sde_kms = to_sde_kms(priv->kms);
1012
1013 SDE_DEBUG_ENC(sde_enc, "enable:%d\n", enable);
1014 SDE_EVT32(DRMID(drm_enc), enable);
1015
1016 if (!sde_enc->cur_master) {
1017 SDE_ERROR("encoder master not set\n");
1018 return;
1019 }
1020
1021 if (enable) {
1022 /* enable SDE core clks */
1023 sde_power_resource_enable(&priv->phandle,
1024 sde_kms->core_client, true);
1025
1026 /* enable DSI clks */
1027 sde_connector_clk_ctrl(sde_enc->cur_master->connector, true);
1028
1029 /* enable all the irq */
1030 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1031 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1032
1033 if (phys && phys->ops.irq_control)
1034 phys->ops.irq_control(phys, true);
1035 }
1036
Alan Kwong56f1a942017-04-04 11:53:42 -07001037 rsc_cfg.inline_rotate_prefill =
1038 sde_crtc_get_inline_prefill(drm_enc->crtc);
1039
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001040 /* enable RSC */
Alan Kwong56f1a942017-04-04 11:53:42 -07001041 sde_encoder_update_rsc_client(drm_enc, &rsc_cfg, true);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001042
1043 } else {
1044
1045 /* disable RSC */
Alan Kwong56f1a942017-04-04 11:53:42 -07001046 sde_encoder_update_rsc_client(drm_enc, NULL, false);
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001047
1048 /* disable all the irq */
1049 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1050 struct sde_encoder_phys *phys =
1051 sde_enc->phys_encs[i];
1052
1053 if (phys && phys->ops.irq_control)
1054 phys->ops.irq_control(phys, false);
1055 }
1056
1057 /* disable DSI clks */
1058 sde_connector_clk_ctrl(sde_enc->cur_master->connector, false);
1059
1060 /* disable SDE core clks */
1061 sde_power_resource_enable(&priv->phandle,
1062 sde_kms->core_client, false);
1063 }
1064
1065}
1066
1067static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
1068 u32 sw_event)
1069{
1070 bool schedule_off = false;
1071 struct sde_encoder_virt *sde_enc;
1072
1073 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1074 SDE_ERROR("invalid parameters\n");
1075 return -EINVAL;
1076 }
1077 sde_enc = to_sde_encoder_virt(drm_enc);
1078
1079 /*
1080 * when idle_pc is not supported, process only KICKOFF and STOP
1081 * event and return early for other events (ie video mode).
1082 */
1083 if (!sde_enc->idle_pc_supported &&
1084 (sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
1085 sw_event != SDE_ENC_RC_EVENT_STOP))
1086 return 0;
1087
1088 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, idle_pc_supported:%d\n", sw_event,
1089 sde_enc->idle_pc_supported);
1090 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
1091 sde_enc->rc_state, SDE_EVTLOG_FUNC_ENTRY);
1092
1093 switch (sw_event) {
1094 case SDE_ENC_RC_EVENT_KICKOFF:
1095 /* cancel delayed off work, if any */
1096 if (cancel_delayed_work_sync(&sde_enc->delayed_off_work))
1097 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1098 sw_event);
1099
1100 mutex_lock(&sde_enc->rc_lock);
1101
1102 /* return if the resource control is already in ON state */
1103 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
1104 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
1105 sw_event);
1106 mutex_unlock(&sde_enc->rc_lock);
1107 return 0;
1108 }
1109
1110 /* enable all the clks and resources */
1111 _sde_encoder_resource_control_helper(drm_enc, true);
1112
1113 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1114 SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
1115 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1116
1117 mutex_unlock(&sde_enc->rc_lock);
1118 break;
1119
1120 case SDE_ENC_RC_EVENT_FRAME_DONE:
1121 /*
1122 * mutex lock is not used as this event happens at interrupt
1123 * context. And locking is not required as, the other events
1124 * like KICKOFF and STOP does a wait-for-idle before executing
1125 * the resource_control
1126 */
1127 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1128 SDE_ERROR_ENC(sde_enc, "sw_event:%d,rc:%d-unexpected\n",
1129 sw_event, sde_enc->rc_state);
1130 return -EINVAL;
1131 }
1132
1133 /*
1134 * schedule off work item only when there are no
1135 * frames pending
1136 */
1137 if (sde_crtc_frame_pending(drm_enc->crtc) > 1) {
1138 SDE_DEBUG_ENC(sde_enc, "skip schedule work");
1139 return 0;
1140 }
1141
1142 /* schedule delayed off work */
1143 schedule_delayed_work(&sde_enc->delayed_off_work,
1144 msecs_to_jiffies(IDLE_TIMEOUT));
1145 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1146 SDE_EVTLOG_FUNC_CASE2);
1147 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
1148 sw_event);
1149 break;
1150
1151 case SDE_ENC_RC_EVENT_STOP:
1152 /* cancel delayed off work, if any */
1153 if (cancel_delayed_work_sync(&sde_enc->delayed_off_work))
1154 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1155 sw_event);
1156
1157 mutex_lock(&sde_enc->rc_lock);
1158
1159 /* return if the resource control is already in OFF state */
1160 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
1161 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
1162 sw_event);
1163 mutex_unlock(&sde_enc->rc_lock);
1164 return 0;
1165 }
1166
1167 /*
1168 * disable the clks and resources only if the resource control
1169 * is in ON state, otherwise the clks and resources would have
1170 * been disabled while going into IDLE state
1171 */
1172 if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON)
1173 _sde_encoder_resource_control_helper(drm_enc, false);
1174
1175 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1176 SDE_ENC_RC_STATE_OFF, SDE_EVTLOG_FUNC_CASE3);
1177 sde_enc->rc_state = SDE_ENC_RC_STATE_OFF;
1178
1179 mutex_unlock(&sde_enc->rc_lock);
1180 break;
1181
1182 case SDE_ENC_RC_EVENT_EARLY_WAKE_UP:
1183 /* cancel delayed off work, if any */
1184 if (cancel_delayed_work_sync(&sde_enc->delayed_off_work)) {
1185 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work cancelled\n",
1186 sw_event);
1187 schedule_off = true;
1188 }
1189
1190 mutex_lock(&sde_enc->rc_lock);
1191
1192 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1193 schedule_off, SDE_EVTLOG_FUNC_CASE4);
1194
1195 /* return if the resource control is in OFF state */
1196 if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
1197 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
1198 sw_event);
1199 mutex_unlock(&sde_enc->rc_lock);
1200 return 0;
1201 }
1202
1203 /*
1204 * enable all the clks and resources if resource control is
1205 * coming out of IDLE state
1206 */
1207 if (sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
1208 _sde_encoder_resource_control_helper(drm_enc, true);
1209 sde_enc->rc_state = SDE_ENC_RC_STATE_ON;
1210 schedule_off = true;
1211 }
1212
1213 /*
1214 * schedule off work when there are no frames pending and
1215 * 1. early wakeup cancelled off work
1216 * 2. early wakeup changed the rc_state to ON - this is to
1217 * handle cases where early wakeup is called but no
1218 * frame updates
1219 */
1220 if (schedule_off && !sde_crtc_frame_pending(drm_enc->crtc)) {
1221 /* schedule delayed off work */
1222 schedule_delayed_work(&sde_enc->delayed_off_work,
1223 msecs_to_jiffies(IDLE_TIMEOUT));
1224 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
1225 sw_event);
1226 }
1227
1228 mutex_unlock(&sde_enc->rc_lock);
1229 break;
1230
1231 case SDE_ENC_RC_EVENT_ENTER_IDLE:
1232 mutex_lock(&sde_enc->rc_lock);
1233
1234 if (sde_enc->rc_state != SDE_ENC_RC_STATE_ON) {
1235 SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc:%d !ON state\n",
1236 sw_event, sde_enc->rc_state);
1237 mutex_unlock(&sde_enc->rc_lock);
1238 return 0;
1239 }
1240
1241 /* disable all the clks and resources */
1242 _sde_encoder_resource_control_helper(drm_enc, false);
1243 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
1244 SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE5);
1245 sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
1246
1247 mutex_unlock(&sde_enc->rc_lock);
1248 break;
1249
1250 default:
1251 SDE_ERROR("unexpected sw_event: %d\n", sw_event);
1252 break;
1253 }
1254
1255 SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->idle_pc_supported,
1256 sde_enc->rc_state, SDE_EVTLOG_FUNC_EXIT);
1257 return 0;
1258}
1259
1260static void sde_encoder_off_work(struct work_struct *work)
1261{
1262 struct delayed_work *dw = to_delayed_work(work);
1263 struct sde_encoder_virt *sde_enc = container_of(dw,
1264 struct sde_encoder_virt, delayed_off_work);
1265
1266 if (!sde_enc) {
1267 SDE_ERROR("invalid sde encoder\n");
1268 return;
1269 }
1270
1271 sde_encoder_resource_control(&sde_enc->base,
1272 SDE_ENC_RC_EVENT_ENTER_IDLE);
1273}
1274
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001275static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
1276 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04001277 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001278{
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001279 struct sde_encoder_virt *sde_enc;
1280 struct msm_drm_private *priv;
1281 struct sde_kms *sde_kms;
1282 struct list_head *connector_list;
1283 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001284 struct sde_connector *sde_conn = NULL;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001285 struct sde_rm_hw_iter dsc_iter, pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001286 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001287
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001288 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04001289 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001290 return;
1291 }
1292
1293 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04001294 SDE_DEBUG_ENC(sde_enc, "\n");
1295
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001296 priv = drm_enc->dev->dev_private;
1297 sde_kms = to_sde_kms(priv->kms);
1298 connector_list = &sde_kms->dev->mode_config.connector_list;
1299
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04001300 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001301
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001302 list_for_each_entry(conn_iter, connector_list, head)
1303 if (conn_iter->encoder == drm_enc)
1304 conn = conn_iter;
1305
1306 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -04001307 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001308 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -04001309 } else if (!conn->state) {
1310 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
1311 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001312 }
1313
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001314 sde_conn = to_sde_connector(conn);
1315 if (sde_conn) {
1316 ret = sde_conn->ops.get_topology(adj_mode, &sde_enc->topology,
1317 sde_kms->catalog->max_mixer_width);
1318 if (ret) {
1319 SDE_ERROR_ENC(sde_enc,
1320 "invalid topology for the mode\n");
1321 return;
1322 }
1323 }
1324
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001325 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
1326 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
1327 conn->state, false);
1328 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -04001329 SDE_ERROR_ENC(sde_enc,
1330 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001331 return;
1332 }
1333
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07001334 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
1335 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1336 sde_enc->hw_pp[i] = NULL;
1337 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
1338 break;
1339 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
1340 }
1341
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001342 sde_rm_init_hw_iter(&dsc_iter, drm_enc->base.id, SDE_HW_BLK_DSC);
1343 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1344 sde_enc->hw_dsc[i] = NULL;
1345 if (!sde_rm_get_hw(&sde_kms->rm, &dsc_iter))
1346 break;
1347 sde_enc->hw_dsc[i] = (struct sde_hw_dsc *) dsc_iter.hw;
1348 }
1349
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001350 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1351 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04001352
Lloyd Atkinson55987b02016-08-16 16:57:46 -04001353 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -07001354 if (!sde_enc->hw_pp[i]) {
1355 SDE_ERROR_ENC(sde_enc,
1356 "invalid pingpong block for the encoder\n");
1357 return;
1358 }
1359 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -04001360 phys->connector = conn->state->connector;
1361 if (phys->ops.mode_set)
1362 phys->ops.mode_set(phys, mode, adj_mode);
1363 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -04001364 }
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001365
1366 sde_enc->mode_set_complete = true;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001367}
1368
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001369static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001370{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001371 struct sde_encoder_virt *sde_enc = NULL;
Clarence Ip35348262017-04-28 16:10:46 -07001372 struct msm_drm_private *priv;
1373 struct sde_kms *sde_kms;
Narendra Muppallad4081e12017-04-20 19:24:08 -07001374 struct sde_hw_mdp *hw_mdptop;
1375 int i = 0;
Narendra Muppallad4081e12017-04-20 19:24:08 -07001376 struct sde_watchdog_te_status te_cfg = { 0 };
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001377
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001378 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1379 SDE_ERROR("invalid parameters\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001380 return;
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001381 }
1382 priv = drm_enc->dev->dev_private;
1383
1384 sde_enc = to_sde_encoder_virt(drm_enc);
1385 if (!sde_enc || !sde_enc->cur_master) {
1386 SDE_ERROR("invalid sde encoder/master\n");
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04001387 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001388 }
1389
Clarence Ip35348262017-04-28 16:10:46 -07001390 sde_kms = to_sde_kms(priv->kms);
Narendra Muppallad4081e12017-04-20 19:24:08 -07001391 hw_mdptop = sde_kms->hw_mdp;
1392
1393 if (!hw_mdptop) {
1394 SDE_ERROR("invalid mdptop\n");
1395 return;
1396 }
1397
1398 sde_kms = to_sde_kms(priv->kms);
Clarence Ip35348262017-04-28 16:10:46 -07001399 if (!sde_kms) {
1400 SDE_ERROR("invalid sde_kms\n");
1401 return;
1402 }
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04001403
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001404 if (sde_enc->cur_master->hw_mdptop &&
1405 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
1406 sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
1407 sde_enc->cur_master->hw_mdptop,
1408 sde_kms->catalog);
1409
Narendra Muppallad4081e12017-04-20 19:24:08 -07001410 if (hw_mdptop->ops.setup_vsync_sel) {
1411 for (i = 0; i < sde_enc->num_phys_encs; i++)
1412 te_cfg.ppnumber[i] = sde_enc->hw_pp[i]->idx;
1413
1414 te_cfg.pp_count = sde_enc->num_phys_encs;
1415 te_cfg.frame_rate = sde_enc->disp_info.frame_rate;
1416 hw_mdptop->ops.setup_vsync_sel(hw_mdptop, &te_cfg,
1417 sde_enc->disp_info.is_te_using_watchdog_timer);
1418 }
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001419}
1420
1421void sde_encoder_virt_restore(struct drm_encoder *drm_enc)
1422{
1423 struct sde_encoder_virt *sde_enc = NULL;
1424 int i;
1425
1426 if (!drm_enc) {
1427 SDE_ERROR("invalid encoder\n");
1428 return;
1429 }
1430 sde_enc = to_sde_encoder_virt(drm_enc);
1431
1432 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1433 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1434
1435 if (phys && (phys != sde_enc->cur_master) && phys->ops.restore)
1436 phys->ops.restore(phys);
1437 }
1438
1439 if (sde_enc->cur_master && sde_enc->cur_master->ops.restore)
1440 sde_enc->cur_master->ops.restore(sde_enc->cur_master);
1441
1442 _sde_encoder_virt_enable_helper(drm_enc);
1443}
1444
1445static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
1446{
1447 struct sde_encoder_virt *sde_enc = NULL;
1448 int i, ret = 0;
1449
1450 if (!drm_enc) {
1451 SDE_ERROR("invalid encoder\n");
1452 return;
1453 }
1454 sde_enc = to_sde_encoder_virt(drm_enc);
1455
Clarence Ip19af1362016-09-23 14:57:51 -04001456 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04001457 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001458
Clarence Ipa87f8ec2016-08-23 13:43:19 -04001459 sde_enc->cur_master = NULL;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07001460 sde_enc->disable_inprogress = false;
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001461 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1462 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1463
1464 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
1465 SDE_DEBUG_ENC(sde_enc, "master is now idx %d\n", i);
1466 sde_enc->cur_master = phys;
1467 break;
1468 }
1469 }
1470
1471 if (!sde_enc->cur_master) {
1472 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
1473 return;
1474 }
1475
1476 ret = sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
1477 if (ret) {
1478 SDE_ERROR_ENC(sde_enc, "sde resource control failed: %d\n",
1479 ret);
1480 return;
1481 }
Dhaval Patel020f7e122016-11-15 14:39:18 -08001482
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001483 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1484 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001485
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001486 if (phys && (phys != sde_enc->cur_master) && phys->ops.enable)
1487 phys->ops.enable(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001488 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -04001489
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001490 if (sde_enc->cur_master->ops.enable)
Clarence Ipa87f8ec2016-08-23 13:43:19 -04001491 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001492
Veera Sundaram Sankaran82916e02017-03-29 18:44:22 -07001493 _sde_encoder_virt_enable_helper(drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001494}
1495
1496static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
1497{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001498 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001499 struct msm_drm_private *priv;
1500 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001501 int i = 0;
1502
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001503 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04001504 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001505 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04001506 } else if (!drm_enc->dev) {
1507 SDE_ERROR("invalid dev\n");
1508 return;
1509 } else if (!drm_enc->dev->dev_private) {
1510 SDE_ERROR("invalid dev_private\n");
1511 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001512 }
1513
1514 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04001515 SDE_DEBUG_ENC(sde_enc, "\n");
1516
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001517 priv = drm_enc->dev->dev_private;
1518 sde_kms = to_sde_kms(priv->kms);
Ingrid Gallardo79b44392017-05-30 16:30:52 -07001519 sde_enc->disable_inprogress = true;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001520
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04001521 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001522
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001523 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1524 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1525
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001526 if (phys && phys->ops.disable && !phys->ops.is_master(phys)) {
1527 phys->ops.disable(phys);
Lloyd Atkinson55987b02016-08-16 16:57:46 -04001528 phys->connector = NULL;
Dhaval Patel81e87882016-10-19 21:41:56 -07001529 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001530 }
1531
Lloyd Atkinson03810e32017-03-14 13:38:06 -07001532 /* after phys waits for frame-done, should be no more frames pending */
1533 if (atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
1534 SDE_ERROR("enc%d timeout pending\n", drm_enc->base.id);
1535 del_timer_sync(&sde_enc->frame_done_timer);
1536 }
1537
Lloyd Atkinson5217336c2016-09-15 18:21:18 -04001538 if (sde_enc->cur_master && sde_enc->cur_master->ops.disable)
1539 sde_enc->cur_master->ops.disable(sde_enc->cur_master);
1540
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001541 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_STOP);
1542
1543 if (sde_enc->cur_master) {
1544 sde_enc->cur_master->connector = NULL;
1545 sde_enc->cur_master = NULL;
1546 }
1547
1548 SDE_DEBUG_ENC(sde_enc, "encoder disabled\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001549
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001550 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001551}
1552
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001553static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001554 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001555{
1556 int i = 0;
1557
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001558 for (i = 0; i < catalog->intf_count; i++) {
1559 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001560 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001561 return catalog->intf[i].id;
1562 }
1563 }
1564
1565 return INTF_MAX;
1566}
1567
Alan Kwongbb27c092016-07-20 16:41:25 -04001568static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
1569 enum sde_intf_type type, u32 controller_id)
1570{
1571 if (controller_id < catalog->wb_count)
1572 return catalog->wb[controller_id].id;
1573
1574 return WB_MAX;
1575}
1576
Dhaval Patel81e87882016-10-19 21:41:56 -07001577static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
1578 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001579{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001580 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001581 unsigned long lock_flags;
1582
Dhaval Patel81e87882016-10-19 21:41:56 -07001583 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001584 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001585
Narendra Muppalla77b32932017-05-10 13:53:11 -07001586 SDE_ATRACE_BEGIN("encoder_vblank_callback");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001587 sde_enc = to_sde_encoder_virt(drm_enc);
1588
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001589 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001590 if (sde_enc->crtc_vblank_cb)
1591 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001592 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -07001593
1594 atomic_inc(&phy_enc->vsync_cnt);
Narendra Muppalla77b32932017-05-10 13:53:11 -07001595 SDE_ATRACE_END("encoder_vblank_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07001596}
1597
1598static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
1599 struct sde_encoder_phys *phy_enc)
1600{
1601 if (!phy_enc)
1602 return;
1603
Narendra Muppalla77b32932017-05-10 13:53:11 -07001604 SDE_ATRACE_BEGIN("encoder_underrun_callback");
Dhaval Patel81e87882016-10-19 21:41:56 -07001605 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -05001606 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Narendra Muppalla77b32932017-05-10 13:53:11 -07001607 SDE_ATRACE_END("encoder_underrun_callback");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001608}
1609
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001610void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
1611 void (*vbl_cb)(void *), void *vbl_data)
1612{
1613 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1614 unsigned long lock_flags;
1615 bool enable;
1616 int i;
1617
1618 enable = vbl_cb ? true : false;
1619
Clarence Ip19af1362016-09-23 14:57:51 -04001620 if (!drm_enc) {
1621 SDE_ERROR("invalid encoder\n");
1622 return;
1623 }
1624 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04001625 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001626
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001627 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001628 sde_enc->crtc_vblank_cb = vbl_cb;
1629 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001630 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001631
1632 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1633 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1634
1635 if (phys && phys->ops.control_vblank_irq)
1636 phys->ops.control_vblank_irq(phys, enable);
1637 }
1638}
1639
Alan Kwong628d19e2016-10-31 13:50:13 -04001640void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
1641 void (*frame_event_cb)(void *, u32 event),
1642 void *frame_event_cb_data)
1643{
1644 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1645 unsigned long lock_flags;
1646 bool enable;
1647
1648 enable = frame_event_cb ? true : false;
1649
1650 if (!drm_enc) {
1651 SDE_ERROR("invalid encoder\n");
1652 return;
1653 }
1654 SDE_DEBUG_ENC(sde_enc, "\n");
1655 SDE_EVT32(DRMID(drm_enc), enable, 0);
1656
1657 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
1658 sde_enc->crtc_frame_event_cb = frame_event_cb;
1659 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
1660 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
1661}
1662
1663static void sde_encoder_frame_done_callback(
1664 struct drm_encoder *drm_enc,
1665 struct sde_encoder_phys *ready_phys, u32 event)
1666{
1667 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1668 unsigned int i;
1669
1670 /* One of the physical encoders has become idle */
1671 for (i = 0; i < sde_enc->num_phys_encs; i++)
1672 if (sde_enc->phys_encs[i] == ready_phys) {
1673 clear_bit(i, sde_enc->frame_busy_mask);
Dhaval Patel6c666622017-03-21 23:02:59 -07001674 SDE_EVT32_VERBOSE(DRMID(drm_enc), i,
Alan Kwong628d19e2016-10-31 13:50:13 -04001675 sde_enc->frame_busy_mask[0]);
1676 }
1677
1678 if (!sde_enc->frame_busy_mask[0]) {
1679 atomic_set(&sde_enc->frame_done_timeout, 0);
1680 del_timer(&sde_enc->frame_done_timer);
1681
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07001682 sde_encoder_resource_control(drm_enc,
1683 SDE_ENC_RC_EVENT_FRAME_DONE);
1684
Ingrid Gallardo79b44392017-05-30 16:30:52 -07001685 if (sde_enc->disable_inprogress)
1686 event |= SDE_ENCODER_FRAME_EVENT_DURING_DISABLE;
1687
Alan Kwong628d19e2016-10-31 13:50:13 -04001688 if (sde_enc->crtc_frame_event_cb)
1689 sde_enc->crtc_frame_event_cb(
Ingrid Gallardo79b44392017-05-30 16:30:52 -07001690 sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04001691 }
1692}
1693
Clarence Ip110d15c2016-08-16 14:44:41 -04001694/**
1695 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
1696 * drm_enc: Pointer to drm encoder structure
1697 * phys: Pointer to physical encoder structure
1698 * extra_flush_bits: Additional bit mask to include in flush trigger
1699 */
1700static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
1701 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
1702{
1703 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -05001704 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -04001705
1706 if (!drm_enc || !phys) {
1707 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
1708 drm_enc != 0, phys != 0);
1709 return;
1710 }
1711
1712 ctl = phys->hw_ctl;
1713 if (!ctl || !ctl->ops.trigger_flush) {
1714 SDE_ERROR("missing trigger cb\n");
1715 return;
1716 }
1717
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001718 if (phys->split_role == ENC_ROLE_SKIP) {
1719 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
1720 "skip flush pp%d ctl%d\n",
1721 phys->hw_pp->idx - PINGPONG_0,
1722 ctl->idx - CTL_0);
1723 return;
1724 }
1725
Clarence Ip8e69ad02016-12-09 09:43:57 -05001726 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
Clarence Ip8e69ad02016-12-09 09:43:57 -05001727
Clarence Ip110d15c2016-08-16 14:44:41 -04001728 if (extra_flush_bits && ctl->ops.update_pending_flush)
1729 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
1730
1731 ctl->ops.trigger_flush(ctl);
Dhaval Patel6c666622017-03-21 23:02:59 -07001732
1733 if (ctl->ops.get_pending_flush)
1734 SDE_EVT32(DRMID(drm_enc), phys->intf_idx, pending_kickoff_cnt,
1735 ctl->idx, ctl->ops.get_pending_flush(ctl));
1736 else
1737 SDE_EVT32(DRMID(drm_enc), phys->intf_idx, ctl->idx,
1738 pending_kickoff_cnt);
Clarence Ip110d15c2016-08-16 14:44:41 -04001739}
1740
1741/**
1742 * _sde_encoder_trigger_start - trigger start for a physical encoder
1743 * phys: Pointer to physical encoder structure
1744 */
1745static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
1746{
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001747 struct sde_hw_ctl *ctl;
1748
Clarence Ip110d15c2016-08-16 14:44:41 -04001749 if (!phys) {
1750 SDE_ERROR("invalid encoder\n");
1751 return;
1752 }
1753
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001754 ctl = phys->hw_ctl;
1755 if (phys->split_role == ENC_ROLE_SKIP) {
1756 SDE_DEBUG_ENC(to_sde_encoder_virt(phys->parent),
1757 "skip start pp%d ctl%d\n",
1758 phys->hw_pp->idx - PINGPONG_0,
1759 ctl->idx - CTL_0);
1760 return;
1761 }
Clarence Ip110d15c2016-08-16 14:44:41 -04001762 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
1763 phys->ops.trigger_start(phys);
1764}
1765
1766void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
1767{
1768 struct sde_hw_ctl *ctl;
Clarence Ip110d15c2016-08-16 14:44:41 -04001769
1770 if (!phys_enc) {
1771 SDE_ERROR("invalid encoder\n");
1772 return;
1773 }
1774
1775 ctl = phys_enc->hw_ctl;
1776 if (ctl && ctl->ops.trigger_start) {
1777 ctl->ops.trigger_start(ctl);
Dhaval Patel6c666622017-03-21 23:02:59 -07001778 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
Clarence Ip110d15c2016-08-16 14:44:41 -04001779 }
Clarence Ip110d15c2016-08-16 14:44:41 -04001780}
1781
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05001782int sde_encoder_helper_wait_event_timeout(
1783 int32_t drm_id,
1784 int32_t hw_id,
1785 wait_queue_head_t *wq,
1786 atomic_t *cnt,
1787 s64 timeout_ms)
1788{
1789 int rc = 0;
1790 s64 expected_time = ktime_to_ms(ktime_get()) + timeout_ms;
1791 s64 jiffies = msecs_to_jiffies(timeout_ms);
1792 s64 time;
1793
1794 do {
1795 rc = wait_event_timeout(*wq, atomic_read(cnt) == 0, jiffies);
1796 time = ktime_to_ms(ktime_get());
1797
1798 SDE_EVT32(drm_id, hw_id, rc, time, expected_time,
1799 atomic_read(cnt));
1800 /* If we timed out, counter is valid and time is less, wait again */
1801 } while (atomic_read(cnt) && (rc == 0) && (time < expected_time));
1802
1803 return rc;
1804}
1805
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05001806void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
1807{
1808 struct sde_encoder_virt *sde_enc;
1809 struct sde_connector *sde_con;
1810 void *sde_con_disp;
1811 struct sde_hw_ctl *ctl;
1812 int rc;
1813
1814 if (!phys_enc) {
1815 SDE_ERROR("invalid encoder\n");
1816 return;
1817 }
1818 sde_enc = to_sde_encoder_virt(phys_enc->parent);
1819 ctl = phys_enc->hw_ctl;
1820
1821 if (!ctl || !ctl->ops.reset)
1822 return;
1823
1824 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
1825 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
1826
1827 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
1828 phys_enc->connector) {
1829 sde_con = to_sde_connector(phys_enc->connector);
1830 sde_con_disp = sde_connector_get_display(phys_enc->connector);
1831
1832 if (sde_con->ops.soft_reset) {
1833 rc = sde_con->ops.soft_reset(sde_con_disp);
1834 if (rc) {
1835 SDE_ERROR_ENC(sde_enc,
1836 "connector soft reset failure\n");
1837 SDE_DBG_DUMP("panic");
1838 }
1839 }
1840 }
1841
1842 rc = ctl->ops.reset(ctl);
1843 if (rc) {
1844 SDE_ERROR_ENC(sde_enc, "ctl %d reset failure\n", ctl->idx);
1845 SDE_DBG_DUMP("panic");
1846 }
1847
1848 phys_enc->enable_state = SDE_ENC_ENABLED;
1849}
1850
Clarence Ip110d15c2016-08-16 14:44:41 -04001851/**
1852 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
1853 * Iterate through the physical encoders and perform consolidated flush
1854 * and/or control start triggering as needed. This is done in the virtual
1855 * encoder rather than the individual physical ones in order to handle
1856 * use cases that require visibility into multiple physical encoders at
1857 * a time.
1858 * sde_enc: Pointer to virtual encoder structure
1859 */
1860static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
1861{
1862 struct sde_hw_ctl *ctl;
1863 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001864 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -04001865
1866 if (!sde_enc) {
1867 SDE_ERROR("invalid encoder\n");
1868 return;
1869 }
1870
1871 pending_flush = 0x0;
1872
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001873 /* update pending counts and trigger kickoff ctl flush atomically */
1874 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
1875
Clarence Ip110d15c2016-08-16 14:44:41 -04001876 /* don't perform flush/start operations for slave encoders */
1877 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1878 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Jeykumar Sankaran2b098072017-03-16 17:25:59 -07001879 enum sde_rm_topology_name topology = SDE_RM_TOPOLOGY_NONE;
Clarence Ip8e69ad02016-12-09 09:43:57 -05001880
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001881 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
1882 continue;
1883
Clarence Ip110d15c2016-08-16 14:44:41 -04001884 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001885 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -04001886 continue;
1887
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05001888 if (phys->connector)
1889 topology = sde_connector_get_topology_name(
1890 phys->connector);
1891
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001892 /*
1893 * don't wait on ppsplit slaves or skipped encoders because
1894 * they dont receive irqs
1895 */
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05001896 if (!(topology == SDE_RM_TOPOLOGY_PPSPLIT &&
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001897 phys->split_role == ENC_ROLE_SLAVE) &&
1898 phys->split_role != ENC_ROLE_SKIP)
Lloyd Atkinson8c50e152017-02-01 19:03:17 -05001899 set_bit(i, sde_enc->frame_busy_mask);
Alan Kwong628d19e2016-10-31 13:50:13 -04001900
Clarence Ip8e69ad02016-12-09 09:43:57 -05001901 if (!phys->ops.needs_single_flush ||
1902 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -04001903 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
1904 else if (ctl->ops.get_pending_flush)
1905 pending_flush |= ctl->ops.get_pending_flush(ctl);
1906 }
1907
1908 /* for split flush, combine pending flush masks and send to master */
1909 if (pending_flush && sde_enc->cur_master) {
1910 _sde_encoder_trigger_flush(
1911 &sde_enc->base,
1912 sde_enc->cur_master,
1913 pending_flush);
1914 }
1915
1916 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001917
1918 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -04001919}
1920
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05001921static void _sde_encoder_ppsplit_swap_intf_for_right_only_update(
1922 struct drm_encoder *drm_enc,
1923 unsigned long *affected_displays,
1924 int num_active_phys)
1925{
1926 struct sde_encoder_virt *sde_enc;
1927 struct sde_encoder_phys *master;
1928 enum sde_rm_topology_name topology;
1929 bool is_right_only;
1930
1931 if (!drm_enc || !affected_displays)
1932 return;
1933
1934 sde_enc = to_sde_encoder_virt(drm_enc);
1935 master = sde_enc->cur_master;
1936 if (!master || !master->connector)
1937 return;
1938
1939 topology = sde_connector_get_topology_name(master->connector);
1940 if (topology != SDE_RM_TOPOLOGY_PPSPLIT)
1941 return;
1942
1943 /*
1944 * For pingpong split, the slave pingpong won't generate IRQs. For
1945 * right-only updates, we can't swap pingpongs, or simply swap the
1946 * master/slave assignment, we actually have to swap the interfaces
1947 * so that the master physical encoder will use a pingpong/interface
1948 * that generates irqs on which to wait.
1949 */
1950 is_right_only = !test_bit(0, affected_displays) &&
1951 test_bit(1, affected_displays);
1952
1953 if (is_right_only && !sde_enc->intfs_swapped) {
1954 /* right-only update swap interfaces */
1955 swap(sde_enc->phys_encs[0]->intf_idx,
1956 sde_enc->phys_encs[1]->intf_idx);
1957 sde_enc->intfs_swapped = true;
1958 } else if (!is_right_only && sde_enc->intfs_swapped) {
1959 /* left-only or full update, swap back */
1960 swap(sde_enc->phys_encs[0]->intf_idx,
1961 sde_enc->phys_encs[1]->intf_idx);
1962 sde_enc->intfs_swapped = false;
1963 }
1964
1965 SDE_DEBUG_ENC(sde_enc,
1966 "right_only %d swapped %d phys0->intf%d, phys1->intf%d\n",
1967 is_right_only, sde_enc->intfs_swapped,
1968 sde_enc->phys_encs[0]->intf_idx - INTF_0,
1969 sde_enc->phys_encs[1]->intf_idx - INTF_0);
1970 SDE_EVT32(DRMID(drm_enc), is_right_only, sde_enc->intfs_swapped,
1971 sde_enc->phys_encs[0]->intf_idx - INTF_0,
1972 sde_enc->phys_encs[1]->intf_idx - INTF_0,
1973 *affected_displays);
1974
1975 /* ppsplit always uses master since ppslave invalid for irqs*/
1976 if (num_active_phys == 1)
1977 *affected_displays = BIT(0);
1978}
1979
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05001980static void _sde_encoder_update_master(struct drm_encoder *drm_enc,
1981 struct sde_encoder_kickoff_params *params)
1982{
1983 struct sde_encoder_virt *sde_enc;
1984 struct sde_encoder_phys *phys;
1985 int i, num_active_phys;
1986 bool master_assigned = false;
1987
1988 if (!drm_enc || !params)
1989 return;
1990
1991 sde_enc = to_sde_encoder_virt(drm_enc);
1992
1993 if (sde_enc->num_phys_encs <= 1)
1994 return;
1995
1996 /* count bits set */
1997 num_active_phys = hweight_long(params->affected_displays);
1998
1999 SDE_DEBUG_ENC(sde_enc, "affected_displays 0x%lx num_active_phys %d\n",
2000 params->affected_displays, num_active_phys);
2001
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002002 /* for left/right only update, ppsplit master switches interface */
2003 _sde_encoder_ppsplit_swap_intf_for_right_only_update(drm_enc,
2004 &params->affected_displays, num_active_phys);
2005
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002006 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2007 enum sde_enc_split_role prv_role, new_role;
2008 bool active;
2009
2010 phys = sde_enc->phys_encs[i];
2011 if (!phys || !phys->ops.update_split_role)
2012 continue;
2013
2014 active = test_bit(i, &params->affected_displays);
2015 prv_role = phys->split_role;
2016
2017 if (active && num_active_phys == 1)
2018 new_role = ENC_ROLE_SOLO;
2019 else if (active && !master_assigned)
2020 new_role = ENC_ROLE_MASTER;
2021 else if (active)
2022 new_role = ENC_ROLE_SLAVE;
2023 else
2024 new_role = ENC_ROLE_SKIP;
2025
2026 phys->ops.update_split_role(phys, new_role);
2027 if (new_role == ENC_ROLE_SOLO || new_role == ENC_ROLE_MASTER) {
2028 sde_enc->cur_master = phys;
2029 master_assigned = true;
2030 }
2031
2032 SDE_DEBUG_ENC(sde_enc, "pp %d role prv %d new %d active %d\n",
2033 phys->hw_pp->idx - PINGPONG_0, prv_role,
2034 phys->split_role, active);
Lloyd Atkinson66e7dde2017-02-08 15:52:53 -05002035 SDE_EVT32(DRMID(drm_enc), params->affected_displays,
2036 phys->hw_pp->idx - PINGPONG_0, prv_role,
2037 phys->split_role, active, num_active_phys);
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002038 }
2039}
2040
Dhaval Patel0e558f42017-04-30 00:51:40 -07002041void sde_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
2042{
2043 struct sde_encoder_virt *sde_enc;
2044 struct sde_encoder_phys *phys;
2045 unsigned int i;
2046 struct sde_hw_ctl *ctl;
2047 struct msm_display_info *disp_info;
2048
2049 if (!drm_enc) {
2050 SDE_ERROR("invalid encoder\n");
2051 return;
2052 }
2053 sde_enc = to_sde_encoder_virt(drm_enc);
2054 disp_info = &sde_enc->disp_info;
2055
2056 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2057 phys = sde_enc->phys_encs[i];
2058
2059 if (phys && phys->hw_ctl) {
2060 ctl = phys->hw_ctl;
2061 if (ctl->ops.clear_pending_flush)
2062 ctl->ops.clear_pending_flush(ctl);
2063
2064 /* update only for command mode primary ctl */
2065 if ((phys == sde_enc->cur_master) &&
2066 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
2067 && ctl->ops.trigger_pending)
2068 ctl->ops.trigger_pending(ctl);
2069 }
2070 }
2071}
2072
Alan Kwong4aacd532017-02-04 18:51:33 -08002073void sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
2074 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002075{
2076 struct sde_encoder_virt *sde_enc;
2077 struct sde_encoder_phys *phys;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002078 bool needs_hw_reset = false;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002079 unsigned int i;
Lloyd Atkinson05d75512017-01-17 14:45:51 -05002080 int rc;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002081
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002082 if (!drm_enc || !params) {
2083 SDE_ERROR("invalid args\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002084 return;
2085 }
2086 sde_enc = to_sde_encoder_virt(drm_enc);
2087
Clarence Ip19af1362016-09-23 14:57:51 -04002088 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -04002089 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002090
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002091 /* prepare for next kickoff, may include waiting on previous kickoff */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002092 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002093 phys = sde_enc->phys_encs[i];
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002094 if (phys) {
2095 if (phys->ops.prepare_for_kickoff)
Alan Kwong4aacd532017-02-04 18:51:33 -08002096 phys->ops.prepare_for_kickoff(phys, params);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002097 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
2098 needs_hw_reset = true;
2099 }
2100 }
2101
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002102 sde_encoder_resource_control(drm_enc, SDE_ENC_RC_EVENT_KICKOFF);
2103
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002104 /* if any phys needs reset, reset all phys, in-order */
2105 if (needs_hw_reset) {
Dhaval Patel0e558f42017-04-30 00:51:40 -07002106 SDE_EVT32(DRMID(drm_enc), SDE_EVTLOG_FUNC_CASE1);
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002107 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2108 phys = sde_enc->phys_encs[i];
2109 if (phys && phys->ops.hw_reset)
2110 phys->ops.hw_reset(phys);
2111 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002112 }
Lloyd Atkinson05d75512017-01-17 14:45:51 -05002113
Lloyd Atkinson73fb8092017-02-08 16:02:55 -05002114 _sde_encoder_update_master(drm_enc, params);
2115
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04002116 _sde_encoder_update_roi(drm_enc);
2117
Lloyd Atkinson05d75512017-01-17 14:45:51 -05002118 if (sde_enc->cur_master && sde_enc->cur_master->connector) {
2119 rc = sde_connector_pre_kickoff(sde_enc->cur_master->connector);
2120 if (rc)
2121 SDE_ERROR_ENC(sde_enc, "kickoff conn%d failed rc %d\n",
2122 sde_enc->cur_master->connector->base.id,
2123 rc);
2124 }
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04002125
Lloyd Atkinson094780d2017-04-24 17:25:08 -04002126 if (sde_encoder_is_dsc_enabled(drm_enc)) {
Lloyd Atkinsonb22f9a42017-05-17 17:29:56 -04002127 rc = _sde_encoder_dsc_setup(sde_enc, params);
2128 if (rc)
2129 SDE_ERROR_ENC(sde_enc, "failed to setup DSC: %d\n", rc);
2130 }
Alan Kwong628d19e2016-10-31 13:50:13 -04002131}
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002132
Alan Kwong628d19e2016-10-31 13:50:13 -04002133void sde_encoder_kickoff(struct drm_encoder *drm_enc)
2134{
2135 struct sde_encoder_virt *sde_enc;
2136 struct sde_encoder_phys *phys;
2137 unsigned int i;
2138
2139 if (!drm_enc) {
2140 SDE_ERROR("invalid encoder\n");
2141 return;
2142 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07002143 SDE_ATRACE_BEGIN("encoder_kickoff");
Alan Kwong628d19e2016-10-31 13:50:13 -04002144 sde_enc = to_sde_encoder_virt(drm_enc);
2145
2146 SDE_DEBUG_ENC(sde_enc, "\n");
2147
2148 atomic_set(&sde_enc->frame_done_timeout,
2149 SDE_ENCODER_FRAME_DONE_TIMEOUT * 1000 /
2150 drm_enc->crtc->state->adjusted_mode.vrefresh);
2151 mod_timer(&sde_enc->frame_done_timer, jiffies +
2152 ((atomic_read(&sde_enc->frame_done_timeout) * HZ) / 1000));
2153
2154 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -04002155 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002156
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002157 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002158 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -05002159 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002160 if (phys && phys->ops.handle_post_kickoff)
2161 phys->ops.handle_post_kickoff(phys);
2162 }
Narendra Muppalla77b32932017-05-10 13:53:11 -07002163 SDE_ATRACE_END("encoder_kickoff");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002164}
2165
Clarence Ip9c65f7b2017-03-20 06:48:15 -07002166int sde_encoder_helper_hw_release(struct sde_encoder_phys *phys_enc,
2167 struct drm_framebuffer *fb)
2168{
2169 struct drm_encoder *drm_enc;
2170 struct sde_hw_mixer_cfg mixer;
2171 struct sde_rm_hw_iter lm_iter;
2172 bool lm_valid = false;
2173
2174 if (!phys_enc || !phys_enc->parent) {
2175 SDE_ERROR("invalid encoder\n");
2176 return -EINVAL;
2177 }
2178
2179 drm_enc = phys_enc->parent;
2180 memset(&mixer, 0, sizeof(mixer));
2181
2182 /* reset associated CTL/LMs */
2183 if (phys_enc->hw_ctl->ops.clear_pending_flush)
2184 phys_enc->hw_ctl->ops.clear_pending_flush(phys_enc->hw_ctl);
2185 if (phys_enc->hw_ctl->ops.clear_all_blendstages)
2186 phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
2187
2188 sde_rm_init_hw_iter(&lm_iter, drm_enc->base.id, SDE_HW_BLK_LM);
2189 while (sde_rm_get_hw(&phys_enc->sde_kms->rm, &lm_iter)) {
2190 struct sde_hw_mixer *hw_lm = (struct sde_hw_mixer *)lm_iter.hw;
2191
2192 if (!hw_lm)
2193 continue;
2194
2195 /* need to flush LM to remove it */
2196 if (phys_enc->hw_ctl->ops.get_bitmask_mixer &&
2197 phys_enc->hw_ctl->ops.update_pending_flush)
2198 phys_enc->hw_ctl->ops.update_pending_flush(
2199 phys_enc->hw_ctl,
2200 phys_enc->hw_ctl->ops.get_bitmask_mixer(
2201 phys_enc->hw_ctl, hw_lm->idx));
2202
2203 if (fb) {
2204 /* assume a single LM if targeting a frame buffer */
2205 if (lm_valid)
2206 continue;
2207
2208 mixer.out_height = fb->height;
2209 mixer.out_width = fb->width;
2210
2211 if (hw_lm->ops.setup_mixer_out)
2212 hw_lm->ops.setup_mixer_out(hw_lm, &mixer);
2213 }
2214
2215 lm_valid = true;
2216
2217 /* only enable border color on LM */
2218 if (phys_enc->hw_ctl->ops.setup_blendstage)
2219 phys_enc->hw_ctl->ops.setup_blendstage(
2220 phys_enc->hw_ctl,
2221 hw_lm->idx, 0, 0);
2222 }
2223
2224 if (!lm_valid) {
2225 SDE_DEBUG_ENC(to_sde_encoder_virt(drm_enc), "lm not found\n");
2226 return -EFAULT;
2227 }
2228 return 0;
2229}
2230
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07002231#ifdef CONFIG_DEBUG_FS
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002232static int _sde_encoder_status_show(struct seq_file *s, void *data)
2233{
2234 struct sde_encoder_virt *sde_enc;
2235 int i;
2236
2237 if (!s || !s->private)
2238 return -EINVAL;
2239
2240 sde_enc = s->private;
2241
2242 mutex_lock(&sde_enc->enc_lock);
2243 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2244 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2245
2246 if (!phys)
2247 continue;
2248
2249 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
2250 phys->intf_idx - INTF_0,
2251 atomic_read(&phys->vsync_cnt),
2252 atomic_read(&phys->underrun_cnt));
2253
2254 switch (phys->intf_mode) {
2255 case INTF_MODE_VIDEO:
2256 seq_puts(s, "mode: video\n");
2257 break;
2258 case INTF_MODE_CMD:
2259 seq_puts(s, "mode: command\n");
2260 break;
2261 case INTF_MODE_WB_BLOCK:
2262 seq_puts(s, "mode: wb block\n");
2263 break;
2264 case INTF_MODE_WB_LINE:
2265 seq_puts(s, "mode: wb line\n");
2266 break;
2267 default:
2268 seq_puts(s, "mode: ???\n");
2269 break;
2270 }
2271 }
2272 mutex_unlock(&sde_enc->enc_lock);
2273
2274 return 0;
2275}
2276
2277static int _sde_encoder_debugfs_status_open(struct inode *inode,
2278 struct file *file)
2279{
2280 return single_open(file, _sde_encoder_status_show, inode->i_private);
2281}
2282
Dhaval Patelf9245d62017-03-28 16:24:00 -07002283static ssize_t _sde_encoder_misr_setup(struct file *file,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302284 const char __user *user_buf, size_t count, loff_t *ppos)
2285{
2286 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07002287 int i = 0, rc;
2288 char buf[MISR_BUFF_SIZE + 1];
2289 size_t buff_copy;
2290 u32 frame_count, enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302291
Dhaval Patelf9245d62017-03-28 16:24:00 -07002292 if (!file || !file->private_data)
2293 return -EINVAL;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302294
Dhaval Patelf9245d62017-03-28 16:24:00 -07002295 sde_enc = file->private_data;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302296
Dhaval Patelf9245d62017-03-28 16:24:00 -07002297 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
2298 if (copy_from_user(buf, user_buf, buff_copy))
2299 return -EINVAL;
2300
2301 buf[buff_copy] = 0; /* end of string */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302302
2303 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
Dhaval Patelf9245d62017-03-28 16:24:00 -07002304 return -EINVAL;
2305
2306 rc = _sde_encoder_power_enable(sde_enc, true);
2307 if (rc)
2308 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302309
2310 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07002311 sde_enc->misr_enable = enable;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302312 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2313 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2314
Dhaval Patelf9245d62017-03-28 16:24:00 -07002315 if (!phys || !phys->ops.setup_misr)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302316 continue;
2317
Dhaval Patelf9245d62017-03-28 16:24:00 -07002318 phys->ops.setup_misr(phys, enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302319 }
2320 mutex_unlock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07002321 _sde_encoder_power_enable(sde_enc, false);
2322
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302323 return count;
2324}
2325
Dhaval Patelf9245d62017-03-28 16:24:00 -07002326static ssize_t _sde_encoder_misr_read(struct file *file,
2327 char __user *user_buff, size_t count, loff_t *ppos)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302328{
2329 struct sde_encoder_virt *sde_enc;
Dhaval Patelf9245d62017-03-28 16:24:00 -07002330 int i = 0, len = 0;
2331 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
2332 int rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302333
2334 if (*ppos)
2335 return 0;
2336
Dhaval Patelf9245d62017-03-28 16:24:00 -07002337 if (!file || !file->private_data)
2338 return -EINVAL;
2339
2340 sde_enc = file->private_data;
2341
2342 rc = _sde_encoder_power_enable(sde_enc, true);
2343 if (rc)
2344 return rc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302345
2346 mutex_lock(&sde_enc->enc_lock);
Dhaval Patelf9245d62017-03-28 16:24:00 -07002347 if (!sde_enc->misr_enable) {
2348 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2349 "disabled\n");
2350 goto buff_check;
2351 } else if (sde_enc->disp_info.capabilities &
2352 ~MSM_DISPLAY_CAP_VID_MODE) {
2353 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2354 "unsupported\n");
2355 goto buff_check;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302356 }
2357
Dhaval Patelf9245d62017-03-28 16:24:00 -07002358 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2359 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2360 if (!phys || !phys->ops.collect_misr)
2361 continue;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302362
Dhaval Patelf9245d62017-03-28 16:24:00 -07002363 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2364 "Intf idx:%d\n", phys->intf_idx - INTF_0);
2365 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
2366 phys->ops.collect_misr(phys));
2367 }
2368
2369buff_check:
2370 if (count <= len) {
2371 len = 0;
2372 goto end;
2373 }
2374
2375 if (copy_to_user(user_buff, buf, len)) {
2376 len = -EFAULT;
2377 goto end;
2378 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302379
2380 *ppos += len; /* increase offset */
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302381
Dhaval Patelf9245d62017-03-28 16:24:00 -07002382end:
2383 mutex_unlock(&sde_enc->enc_lock);
2384 _sde_encoder_power_enable(sde_enc, false);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302385 return len;
2386}
2387
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002388static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002389{
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002390 struct sde_encoder_virt *sde_enc;
2391 struct msm_drm_private *priv;
2392 struct sde_kms *sde_kms;
Alan Kwongf2debb02017-04-05 06:19:29 -07002393 int i;
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002394
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002395 static const struct file_operations debugfs_status_fops = {
2396 .open = _sde_encoder_debugfs_status_open,
2397 .read = seq_read,
2398 .llseek = seq_lseek,
2399 .release = single_release,
2400 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302401
2402 static const struct file_operations debugfs_misr_fops = {
2403 .open = simple_open,
2404 .read = _sde_encoder_misr_read,
Dhaval Patelf9245d62017-03-28 16:24:00 -07002405 .write = _sde_encoder_misr_setup,
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302406 };
2407
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002408 char name[SDE_NAME_SIZE];
2409
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002410 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002411 SDE_ERROR("invalid encoder or kms\n");
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002412 return -EINVAL;
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002413 }
2414
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002415 sde_enc = to_sde_encoder_virt(drm_enc);
2416 priv = drm_enc->dev->dev_private;
2417 sde_kms = to_sde_kms(priv->kms);
2418
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002419 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
2420
2421 /* create overall sub-directory for the encoder */
2422 sde_enc->debugfs_root = debugfs_create_dir(name,
Lloyd Atkinson09e64bf2017-04-13 14:09:59 -07002423 drm_enc->dev->primary->debugfs_root);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002424 if (!sde_enc->debugfs_root)
2425 return -ENOMEM;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302426
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002427 /* don't error check these */
2428 debugfs_create_file("status", 0644,
2429 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05302430
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002431 debugfs_create_file("misr_data", 0644,
Dhaval Patelf9245d62017-03-28 16:24:00 -07002432 sde_enc->debugfs_root, sde_enc, &debugfs_misr_fops);
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002433
Alan Kwongf2debb02017-04-05 06:19:29 -07002434 for (i = 0; i < sde_enc->num_phys_encs; i++)
2435 if (sde_enc->phys_encs[i] &&
2436 sde_enc->phys_encs[i]->ops.late_register)
2437 sde_enc->phys_encs[i]->ops.late_register(
2438 sde_enc->phys_encs[i],
2439 sde_enc->debugfs_root);
2440
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002441 return 0;
2442}
2443
2444static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
2445{
2446 struct sde_encoder_virt *sde_enc;
2447
2448 if (!drm_enc)
2449 return;
2450
2451 sde_enc = to_sde_encoder_virt(drm_enc);
2452 debugfs_remove_recursive(sde_enc->debugfs_root);
2453}
2454#else
2455static int _sde_encoder_init_debugfs(struct drm_encoder *drm_enc)
2456{
2457 return 0;
2458}
2459
Lloyd Atkinsonc9fb3382017-03-24 08:08:30 -07002460static void _sde_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002461{
2462}
2463#endif
2464
2465static int sde_encoder_late_register(struct drm_encoder *encoder)
2466{
2467 return _sde_encoder_init_debugfs(encoder);
2468}
2469
2470static void sde_encoder_early_unregister(struct drm_encoder *encoder)
2471{
2472 _sde_encoder_destroy_debugfs(encoder);
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002473}
2474
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002475static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04002476 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002477 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002478 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002479{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002480 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002481
Clarence Ip19af1362016-09-23 14:57:51 -04002482 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002483
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002484 /*
2485 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
2486 * in this function, check up-front.
2487 */
2488 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
2489 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04002490 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002491 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002492 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002493 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002494
Clarence Ipa4039322016-07-15 16:23:59 -04002495 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002496 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002497
2498 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04002499 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002500 PTR_ERR(enc));
2501 return enc == 0 ? -EINVAL : PTR_ERR(enc);
2502 }
2503
2504 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
2505 ++sde_enc->num_phys_encs;
2506 }
2507
Clarence Ipa4039322016-07-15 16:23:59 -04002508 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002509 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04002510
2511 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04002512 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04002513 PTR_ERR(enc));
2514 return enc == 0 ? -EINVAL : PTR_ERR(enc);
2515 }
2516
2517 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
2518 ++sde_enc->num_phys_encs;
2519 }
2520
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002521 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002522}
2523
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002524static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
2525 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04002526{
2527 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04002528
Clarence Ip19af1362016-09-23 14:57:51 -04002529 if (!sde_enc) {
2530 SDE_ERROR("invalid encoder\n");
2531 return -EINVAL;
2532 }
2533
2534 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04002535
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002536 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04002537 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04002538 sde_enc->num_phys_encs);
2539 return -EINVAL;
2540 }
2541
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002542 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04002543
2544 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04002545 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04002546 PTR_ERR(enc));
2547 return enc == 0 ? -EINVAL : PTR_ERR(enc);
2548 }
2549
2550 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
2551 ++sde_enc->num_phys_encs;
2552
2553 return 0;
2554}
2555
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002556static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002557 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04002558 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002559 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002560{
2561 int ret = 0;
2562 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002563 enum sde_intf_type intf_type;
2564 struct sde_encoder_virt_ops parent_ops = {
2565 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07002566 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04002567 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002568 };
2569 struct sde_enc_phys_init_params phys_params;
2570
Clarence Ip19af1362016-09-23 14:57:51 -04002571 if (!sde_enc || !sde_kms) {
2572 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
2573 sde_enc != 0, sde_kms != 0);
2574 return -EINVAL;
2575 }
2576
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002577 memset(&phys_params, 0, sizeof(phys_params));
2578 phys_params.sde_kms = sde_kms;
2579 phys_params.parent = &sde_enc->base;
2580 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002581 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002582
Clarence Ip19af1362016-09-23 14:57:51 -04002583 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002584
Clarence Ipa4039322016-07-15 16:23:59 -04002585 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002586 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
2587 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04002588 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002589 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
2590 intf_type = INTF_HDMI;
Padmanabhan Komanduru63758612017-05-23 01:47:18 -07002591 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
2592 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
2593 intf_type = INTF_DP;
Alan Kwongbb27c092016-07-20 16:41:25 -04002594 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
2595 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
2596 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002597 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04002598 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002599 return -EINVAL;
2600 }
2601
Clarence Ip88270a62016-06-26 10:09:34 -04002602 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002603
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002604 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
2605
Clarence Ip19af1362016-09-23 14:57:51 -04002606 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002607
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08002608 phys_params.comp_type = disp_info->comp_info.comp_type;
2609
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002610 if (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
2611 sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
2612
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002613 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04002614 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002615 /*
2616 * Left-most tile is at index 0, content is controller id
2617 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
2618 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
2619 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002620 u32 controller_id = disp_info->h_tile_instance[i];
2621
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002622 if (disp_info->num_of_h_tiles > 1) {
2623 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002624 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002625 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002626 phys_params.split_role = ENC_ROLE_SLAVE;
2627 } else {
2628 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002629 }
2630
Clarence Ip19af1362016-09-23 14:57:51 -04002631 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002632 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002633
Alan Kwongbb27c092016-07-20 16:41:25 -04002634 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002635 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002636 phys_params.wb_idx = sde_encoder_get_wb(
2637 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04002638 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002639 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04002640 SDE_ERROR_ENC(sde_enc,
2641 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002642 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04002643 ret = -EINVAL;
2644 }
Alan Kwongbb27c092016-07-20 16:41:25 -04002645 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04002646 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002647 phys_params.intf_idx = sde_encoder_get_intf(
2648 sde_kms->catalog, intf_type,
2649 controller_id);
2650 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04002651 SDE_ERROR_ENC(sde_enc,
2652 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002653 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04002654 ret = -EINVAL;
2655 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002656 }
2657
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002658 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04002659 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002660 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
2661 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04002662 else
2663 ret = sde_encoder_virt_add_phys_encs(
2664 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04002665 sde_enc,
2666 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002667 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04002668 SDE_ERROR_ENC(sde_enc,
2669 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002670 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002671 }
Dhaval Pateld4e583a2017-03-10 14:46:44 -08002672
2673 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2674 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2675
2676 if (phys) {
2677 atomic_set(&phys->vsync_cnt, 0);
2678 atomic_set(&phys->underrun_cnt, 0);
2679 }
2680 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002681 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002682
2683 return ret;
2684}
2685
Alan Kwong628d19e2016-10-31 13:50:13 -04002686static void sde_encoder_frame_done_timeout(unsigned long data)
2687{
2688 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
2689 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
2690 struct msm_drm_private *priv;
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002691 u32 event;
Alan Kwong628d19e2016-10-31 13:50:13 -04002692
2693 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2694 SDE_ERROR("invalid parameters\n");
2695 return;
2696 }
2697 priv = drm_enc->dev->dev_private;
2698
2699 if (!sde_enc->frame_busy_mask[0] || !sde_enc->crtc_frame_event_cb) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002700 SDE_DEBUG_ENC(sde_enc, "invalid timeout\n");
2701 SDE_EVT32(DRMID(drm_enc), sde_enc->frame_busy_mask[0], 0);
Alan Kwong628d19e2016-10-31 13:50:13 -04002702 return;
2703 } else if (!atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002704 SDE_ERROR_ENC(sde_enc, "invalid timeout\n");
Alan Kwong628d19e2016-10-31 13:50:13 -04002705 SDE_EVT32(DRMID(drm_enc), 0, 1);
2706 return;
2707 }
2708
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002709 SDE_ERROR_ENC(sde_enc, "frame done timeout\n");
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05002710
Ingrid Gallardo79b44392017-05-30 16:30:52 -07002711 event = SDE_ENCODER_FRAME_EVENT_ERROR;
2712 if (sde_enc->disable_inprogress)
2713 event |= SDE_ENCODER_FRAME_EVENT_DURING_DISABLE;
2714
2715 SDE_EVT32(DRMID(drm_enc), event);
2716 sde_enc->crtc_frame_event_cb(sde_enc->crtc_frame_event_cb_data, event);
Alan Kwong628d19e2016-10-31 13:50:13 -04002717}
2718
Lloyd Atkinsonb020e0f2017-03-14 08:05:18 -07002719static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
2720 .mode_set = sde_encoder_virt_mode_set,
2721 .disable = sde_encoder_virt_disable,
2722 .enable = sde_encoder_virt_enable,
2723 .atomic_check = sde_encoder_virt_atomic_check,
2724};
2725
2726static const struct drm_encoder_funcs sde_encoder_funcs = {
2727 .destroy = sde_encoder_destroy,
2728 .late_register = sde_encoder_late_register,
2729 .early_unregister = sde_encoder_early_unregister,
2730};
2731
Clarence Ip3649f8b2016-10-31 09:59:44 -04002732struct drm_encoder *sde_encoder_init(
2733 struct drm_device *dev,
2734 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002735{
2736 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04002737 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002738 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002739 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002740 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Dhaval Patel020f7e122016-11-15 14:39:18 -08002741 char name[SDE_NAME_SIZE];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002742 int ret = 0;
2743
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002744 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
2745 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07002746 ret = -ENOMEM;
2747 goto fail;
2748 }
2749
Dhaval Patel22ef6df2016-10-20 14:42:52 -07002750 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04002751 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
2752 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002753 if (ret)
2754 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07002755
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002756 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04002757 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002758 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07002759 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002760 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07002761
Alan Kwong628d19e2016-10-31 13:50:13 -04002762 atomic_set(&sde_enc->frame_done_timeout, 0);
2763 setup_timer(&sde_enc->frame_done_timer, sde_encoder_frame_done_timeout,
2764 (unsigned long) sde_enc);
2765
Dhaval Patel020f7e122016-11-15 14:39:18 -08002766 snprintf(name, SDE_NAME_SIZE, "rsc_enc%u", drm_enc->base.id);
2767 sde_enc->rsc_client = sde_rsc_client_create(SDE_RSC_INDEX, name,
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08002768 disp_info->is_primary);
Dhaval Patel020f7e122016-11-15 14:39:18 -08002769 if (IS_ERR_OR_NULL(sde_enc->rsc_client)) {
Dhaval Patel49ef6d72017-03-26 09:35:53 -07002770 SDE_DEBUG("sde rsc client create failed :%ld\n",
Dhaval Patel020f7e122016-11-15 14:39:18 -08002771 PTR_ERR(sde_enc->rsc_client));
2772 sde_enc->rsc_client = NULL;
2773 }
Dhaval Patel82c8dbc2017-02-18 23:15:10 -08002774
Veera Sundaram Sankaranc9efbec2017-03-29 18:59:05 -07002775 mutex_init(&sde_enc->rc_lock);
2776 INIT_DELAYED_WORK(&sde_enc->delayed_off_work, sde_encoder_off_work);
2777
Dhaval Patel020f7e122016-11-15 14:39:18 -08002778 memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
2779
Clarence Ip19af1362016-09-23 14:57:51 -04002780 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002781
2782 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07002783
2784fail:
Clarence Ip19af1362016-09-23 14:57:51 -04002785 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002786 if (drm_enc)
2787 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07002788
2789 return ERR_PTR(ret);
2790}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04002791
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002792int sde_encoder_wait_for_commit_done(struct drm_encoder *drm_enc)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002793{
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002794 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002795 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002796
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002797 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04002798 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002799 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002800 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002801 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04002802 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002803
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002804 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2805 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04002806
Lloyd Atkinson5d722782016-05-30 14:09:41 -04002807 if (phys && phys->ops.wait_for_commit_done) {
2808 ret = phys->ops.wait_for_commit_done(phys);
2809 if (ret)
2810 return ret;
2811 }
2812 }
2813
2814 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04002815}
2816
Alan Kwong67a3f792016-11-01 23:16:53 -04002817enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
2818{
2819 struct sde_encoder_virt *sde_enc = NULL;
2820 int i;
2821
2822 if (!encoder) {
2823 SDE_ERROR("invalid encoder\n");
2824 return INTF_MODE_NONE;
2825 }
2826 sde_enc = to_sde_encoder_virt(encoder);
2827
2828 if (sde_enc->cur_master)
2829 return sde_enc->cur_master->intf_mode;
2830
2831 for (i = 0; i < sde_enc->num_phys_encs; i++) {
2832 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
2833
2834 if (phys)
2835 return phys->intf_mode;
2836 }
2837
2838 return INTF_MODE_NONE;
2839}