blob: 90d95f3fd068dc77e6ea06c40d5cf3cc71621e98 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Dhaval Patel22ef6df2016-10-20 14:42:52 -070020#include <linux/debugfs.h>
21#include <linux/seq_file.h>
22
Lloyd Atkinson09fed912016-06-24 18:14:13 -040023#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070024#include "sde_kms.h"
25#include "drm_crtc.h"
26#include "drm_crtc_helper.h"
27
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040028#include "sde_hwio.h"
29#include "sde_hw_catalog.h"
30#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040031#include "sde_hw_ctl.h"
32#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040033#include "sde_encoder_phys.h"
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -070034#include "sde_color_processing.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040035
Clarence Ip19af1362016-09-23 14:57:51 -040036#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
37 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
38
39#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
40 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
41
Alan Kwong628d19e2016-10-31 13:50:13 -040042/* timeout in frames waiting for frame done */
43#define SDE_ENCODER_FRAME_DONE_TIMEOUT 60
44
Lloyd Atkinson5d722782016-05-30 14:09:41 -040045/*
46 * Two to anticipate panels that can do cmd/vid dynamic switching
47 * plan is to create all possible physical encoder types, and switch between
48 * them at runtime
49 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040050#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040051
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040052#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
53 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
54
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070055#define MAX_CHANNELS_PER_ENC 2
56
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040057/**
58 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
59 * encoders. Virtual encoder manages one "logical" display. Physical
60 * encoders manage one intf block, tied to a specific panel/sub-panel.
61 * Virtual encoder defers as much as possible to the physical encoders.
62 * Virtual encoder registers itself with the DRM Framework as the encoder.
63 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -040064 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065 * @bus_scaling_client: Client handle to the bus scaling interface
66 * @num_phys_encs: Actual number of physical encoders contained.
67 * @phys_encs: Container of physical encoders managed.
68 * @cur_master: Pointer to the current master in this mode. Optimization
69 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070 * @hw_pp Handle to the pingpong blocks used for the display. No.
71 * pingpong blocks can be different than num_phys_encs.
Lloyd Atkinson5d722782016-05-30 14:09:41 -040072 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
73 * notification of the VBLANK
74 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -040075 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
76 * all CTL paths
77 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -070078 * @debugfs_root: Debug file system root file node
79 * @enc_lock: Lock around physical encoder create/destroy and
80 access.
Alan Kwong628d19e2016-10-31 13:50:13 -040081 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
82 * busy processing current command.
83 * Bit0 = phys_encs[0] etc.
84 * @crtc_frame_event_cb: callback handler for frame event
85 * @crtc_frame_event_cb_data: callback handler private data
86 * @crtc_frame_event: callback event
87 * @frame_done_timeout: frame done timeout in Hz
88 * @frame_done_timer: watchdog timer for frame done event
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040089 */
90struct sde_encoder_virt {
91 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -040092 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040093 uint32_t bus_scaling_client;
94
Lloyd Atkinson11f34442016-08-11 11:19:52 -040095 uint32_t display_num_of_h_tiles;
96
Lloyd Atkinson5d722782016-05-30 14:09:41 -040097 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040098 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
99 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700100 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400101
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400102 void (*crtc_vblank_cb)(void *);
103 void *crtc_vblank_cb_data;
104
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700105 struct dentry *debugfs_root;
106 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400107 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
108 void (*crtc_frame_event_cb)(void *, u32 event);
109 void *crtc_frame_event_cb_data;
110 u32 crtc_frame_event;
111
112 atomic_t frame_done_timeout;
113 struct timer_list frame_done_timer;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400114};
115
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400116#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700117
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400118void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400119 struct sde_encoder_hw_resources *hw_res,
120 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400121{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400122 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400123 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400124
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400125 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400126 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
127 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400128 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400129 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400130
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400131 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400132 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400133
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400134 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400135 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400136 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
137
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400138 for (i = 0; i < sde_enc->num_phys_encs; i++) {
139 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
140
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400141 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400142 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400143 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400144}
145
Clarence Ip3649f8b2016-10-31 09:59:44 -0400146void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400147{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400148 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400149 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400150
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400151 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400152 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400153 return;
154 }
155
156 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400157 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400158
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700159 mutex_lock(&sde_enc->enc_lock);
160 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400161 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
162
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400163 if (phys && phys->ops.destroy) {
164 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400165 --sde_enc->num_phys_encs;
166 sde_enc->phys_encs[i] = NULL;
167 }
168 }
169
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700170 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400171 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400172 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700173 sde_enc->num_phys_encs = 0;
174 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400175
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400176 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700177 debugfs_remove_recursive(sde_enc->debugfs_root);
178 mutex_destroy(&sde_enc->enc_lock);
179
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400180 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700181}
182
Clarence Ip8e69ad02016-12-09 09:43:57 -0500183void sde_encoder_helper_split_config(
184 struct sde_encoder_phys *phys_enc,
185 enum sde_intf interface)
186{
187 struct sde_encoder_virt *sde_enc;
188 struct split_pipe_cfg cfg = { 0 };
189 struct sde_hw_mdp *hw_mdptop;
190 enum sde_rm_topology_name topology;
191
192 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
193 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
194 return;
195 }
196
197 sde_enc = to_sde_encoder_virt(phys_enc->parent);
198 hw_mdptop = phys_enc->hw_mdptop;
199 cfg.en = phys_enc->split_role != ENC_ROLE_SOLO;
200 cfg.mode = phys_enc->intf_mode;
201 cfg.intf = interface;
202
203 if (cfg.en && phys_enc->ops.needs_single_flush &&
204 phys_enc->ops.needs_single_flush(phys_enc))
205 cfg.split_flush_en = true;
206
207 topology = sde_connector_get_topology_name(phys_enc->connector);
208 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
209 cfg.pp_split_slave = cfg.intf;
210 else
211 cfg.pp_split_slave = INTF_MAX;
212
213 if (phys_enc->split_role != ENC_ROLE_SLAVE) {
214 /* master/solo encoder */
215 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
216
217 if (hw_mdptop->ops.setup_split_pipe)
218 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
219 } else {
220 /*
221 * slave encoder
222 * - determine split index from master index,
223 * assume master is first pp
224 */
225 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
226 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
227 cfg.pp_split_index);
228
229 if (hw_mdptop->ops.setup_pp_split)
230 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
231 }
232}
233
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400234static int sde_encoder_virt_atomic_check(
235 struct drm_encoder *drm_enc,
236 struct drm_crtc_state *crtc_state,
237 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400238{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400239 struct sde_encoder_virt *sde_enc;
240 struct msm_drm_private *priv;
241 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400242 const struct drm_display_mode *mode;
243 struct drm_display_mode *adj_mode;
244 int i = 0;
245 int ret = 0;
246
Alan Kwongbb27c092016-07-20 16:41:25 -0400247 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400248 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
249 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400250 return -EINVAL;
251 }
252
253 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400254 SDE_DEBUG_ENC(sde_enc, "\n");
255
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400256 priv = drm_enc->dev->dev_private;
257 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400258 mode = &crtc_state->mode;
259 adj_mode = &crtc_state->adjusted_mode;
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400260 SDE_EVT32(DRMID(drm_enc));
Alan Kwongbb27c092016-07-20 16:41:25 -0400261
262 /* perform atomic check on the first physical encoder (master) */
263 for (i = 0; i < sde_enc->num_phys_encs; i++) {
264 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
265
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400266 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400267 ret = phys->ops.atomic_check(phys, crtc_state,
268 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400269 else if (phys && phys->ops.mode_fixup)
270 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400271 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400272
273 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400274 SDE_ERROR_ENC(sde_enc,
275 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400276 break;
277 }
278 }
279
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400280 /* Reserve dynamic resources now. Indicating AtomicTest phase */
281 if (!ret)
282 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
283 conn_state, true);
284
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700285 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700286 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400287
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400288 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400289
290 return ret;
291}
292
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400293static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
294 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400295 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400296{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400297 struct sde_encoder_virt *sde_enc;
298 struct msm_drm_private *priv;
299 struct sde_kms *sde_kms;
300 struct list_head *connector_list;
301 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700302 struct sde_rm_hw_iter pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400303 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400304
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400305 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400306 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400307 return;
308 }
309
310 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400311 SDE_DEBUG_ENC(sde_enc, "\n");
312
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400313 priv = drm_enc->dev->dev_private;
314 sde_kms = to_sde_kms(priv->kms);
315 connector_list = &sde_kms->dev->mode_config.connector_list;
316
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400317 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400318
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400319 list_for_each_entry(conn_iter, connector_list, head)
320 if (conn_iter->encoder == drm_enc)
321 conn = conn_iter;
322
323 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -0400324 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400325 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400326 } else if (!conn->state) {
327 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
328 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400329 }
330
331 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
332 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
333 conn->state, false);
334 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400335 SDE_ERROR_ENC(sde_enc,
336 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400337 return;
338 }
339
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700340 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
341 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
342 sde_enc->hw_pp[i] = NULL;
343 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
344 break;
345 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
346 }
347
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400348 for (i = 0; i < sde_enc->num_phys_encs; i++) {
349 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400350
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400351 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700352 if (!sde_enc->hw_pp[i]) {
353 SDE_ERROR_ENC(sde_enc,
354 "invalid pingpong block for the encoder\n");
355 return;
356 }
357 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400358 phys->connector = conn->state->connector;
359 if (phys->ops.mode_set)
360 phys->ops.mode_set(phys, mode, adj_mode);
361 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400362 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400363}
364
365static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
366{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400367 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400368 struct msm_drm_private *priv;
369 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400370 int i = 0;
371
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400372 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400373 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400374 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400375 } else if (!drm_enc->dev) {
376 SDE_ERROR("invalid dev\n");
377 return;
378 } else if (!drm_enc->dev->dev_private) {
379 SDE_ERROR("invalid dev_private\n");
380 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400381 }
382
383 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400384 priv = drm_enc->dev->dev_private;
385 sde_kms = to_sde_kms(priv->kms);
386
Clarence Ip19af1362016-09-23 14:57:51 -0400387 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400388 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400389
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400390 sde_power_resource_enable(&priv->phandle, sde_kms->core_client, true);
391
Clarence Ipa87f8ec2016-08-23 13:43:19 -0400392 sde_enc->cur_master = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400393 for (i = 0; i < sde_enc->num_phys_encs; i++) {
394 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400395
396 if (phys) {
Dhaval Patel81e87882016-10-19 21:41:56 -0700397 atomic_set(&phys->vsync_cnt, 0);
398 atomic_set(&phys->underrun_cnt, 0);
399
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400400 if (phys->ops.is_master && phys->ops.is_master(phys)) {
Clarence Ip19af1362016-09-23 14:57:51 -0400401 SDE_DEBUG_ENC(sde_enc,
402 "master is now idx %d\n", i);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400403 sde_enc->cur_master = phys;
Clarence Ipa87f8ec2016-08-23 13:43:19 -0400404 } else if (phys->ops.enable) {
405 phys->ops.enable(phys);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400406 }
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400407 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400408 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -0400409
410 if (!sde_enc->cur_master)
411 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
412 else if (sde_enc->cur_master->ops.enable)
413 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400414}
415
416static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
417{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400418 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400419 struct msm_drm_private *priv;
420 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400421 int i = 0;
422
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400423 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400424 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400425 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400426 } else if (!drm_enc->dev) {
427 SDE_ERROR("invalid dev\n");
428 return;
429 } else if (!drm_enc->dev->dev_private) {
430 SDE_ERROR("invalid dev_private\n");
431 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400432 }
433
434 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400435 SDE_DEBUG_ENC(sde_enc, "\n");
436
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400437 priv = drm_enc->dev->dev_private;
438 sde_kms = to_sde_kms(priv->kms);
439
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400440 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400441
Alan Kwong628d19e2016-10-31 13:50:13 -0400442 if (atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
443 SDE_ERROR("enc%d timeout pending\n", drm_enc->base.id);
444 del_timer_sync(&sde_enc->frame_done_timer);
445 }
446
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400447 for (i = 0; i < sde_enc->num_phys_encs; i++) {
448 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
449
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400450 if (phys) {
451 if (phys->ops.disable && !phys->ops.is_master(phys))
452 phys->ops.disable(phys);
453 phys->connector = NULL;
Dhaval Patel81e87882016-10-19 21:41:56 -0700454 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400455 }
456
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400457 if (sde_enc->cur_master && sde_enc->cur_master->ops.disable)
458 sde_enc->cur_master->ops.disable(sde_enc->cur_master);
459
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400460 sde_enc->cur_master = NULL;
Clarence Ip19af1362016-09-23 14:57:51 -0400461 SDE_DEBUG_ENC(sde_enc, "cleared master\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400462
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400463 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400464
465 sde_power_resource_enable(&priv->phandle, sde_kms->core_client, false);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400466}
467
468static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400469 .mode_set = sde_encoder_virt_mode_set,
470 .disable = sde_encoder_virt_disable,
471 .enable = sde_encoder_virt_enable,
Alan Kwongbb27c092016-07-20 16:41:25 -0400472 .atomic_check = sde_encoder_virt_atomic_check,
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400473};
474
475static const struct drm_encoder_funcs sde_encoder_funcs = {
476 .destroy = sde_encoder_destroy,
477};
478
479static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -0400480 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400481{
482 int i = 0;
483
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400484 for (i = 0; i < catalog->intf_count; i++) {
485 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -0400486 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400487 return catalog->intf[i].id;
488 }
489 }
490
491 return INTF_MAX;
492}
493
Alan Kwongbb27c092016-07-20 16:41:25 -0400494static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
495 enum sde_intf_type type, u32 controller_id)
496{
497 if (controller_id < catalog->wb_count)
498 return catalog->wb[controller_id].id;
499
500 return WB_MAX;
501}
502
Dhaval Patel81e87882016-10-19 21:41:56 -0700503static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
504 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400505{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400506 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400507 unsigned long lock_flags;
508
Dhaval Patel81e87882016-10-19 21:41:56 -0700509 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400510 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400511
512 sde_enc = to_sde_encoder_virt(drm_enc);
513
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400514 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400515 if (sde_enc->crtc_vblank_cb)
516 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400517 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -0700518
519 atomic_inc(&phy_enc->vsync_cnt);
520}
521
522static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
523 struct sde_encoder_phys *phy_enc)
524{
525 if (!phy_enc)
526 return;
527
528 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -0500529 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400530}
531
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400532void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
533 void (*vbl_cb)(void *), void *vbl_data)
534{
535 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
536 unsigned long lock_flags;
537 bool enable;
538 int i;
539
540 enable = vbl_cb ? true : false;
541
Clarence Ip19af1362016-09-23 14:57:51 -0400542 if (!drm_enc) {
543 SDE_ERROR("invalid encoder\n");
544 return;
545 }
546 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400547 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400548
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400549 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400550 sde_enc->crtc_vblank_cb = vbl_cb;
551 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400552 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400553
554 for (i = 0; i < sde_enc->num_phys_encs; i++) {
555 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
556
557 if (phys && phys->ops.control_vblank_irq)
558 phys->ops.control_vblank_irq(phys, enable);
559 }
560}
561
Alan Kwong628d19e2016-10-31 13:50:13 -0400562void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
563 void (*frame_event_cb)(void *, u32 event),
564 void *frame_event_cb_data)
565{
566 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
567 unsigned long lock_flags;
568 bool enable;
569
570 enable = frame_event_cb ? true : false;
571
572 if (!drm_enc) {
573 SDE_ERROR("invalid encoder\n");
574 return;
575 }
576 SDE_DEBUG_ENC(sde_enc, "\n");
577 SDE_EVT32(DRMID(drm_enc), enable, 0);
578
579 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
580 sde_enc->crtc_frame_event_cb = frame_event_cb;
581 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
582 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
583}
584
585static void sde_encoder_frame_done_callback(
586 struct drm_encoder *drm_enc,
587 struct sde_encoder_phys *ready_phys, u32 event)
588{
589 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
590 unsigned int i;
591
592 /* One of the physical encoders has become idle */
593 for (i = 0; i < sde_enc->num_phys_encs; i++)
594 if (sde_enc->phys_encs[i] == ready_phys) {
595 clear_bit(i, sde_enc->frame_busy_mask);
596 sde_enc->crtc_frame_event |= event;
597 SDE_EVT32(DRMID(drm_enc), i,
598 sde_enc->frame_busy_mask[0]);
599 }
600
601 if (!sde_enc->frame_busy_mask[0]) {
602 atomic_set(&sde_enc->frame_done_timeout, 0);
603 del_timer(&sde_enc->frame_done_timer);
604
605 if (sde_enc->crtc_frame_event_cb)
606 sde_enc->crtc_frame_event_cb(
607 sde_enc->crtc_frame_event_cb_data,
608 sde_enc->crtc_frame_event);
609 }
610}
611
Clarence Ip110d15c2016-08-16 14:44:41 -0400612/**
613 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
614 * drm_enc: Pointer to drm encoder structure
615 * phys: Pointer to physical encoder structure
616 * extra_flush_bits: Additional bit mask to include in flush trigger
617 */
618static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
619 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
620{
621 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500622 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -0400623
624 if (!drm_enc || !phys) {
625 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
626 drm_enc != 0, phys != 0);
627 return;
628 }
629
630 ctl = phys->hw_ctl;
631 if (!ctl || !ctl->ops.trigger_flush) {
632 SDE_ERROR("missing trigger cb\n");
633 return;
634 }
635
Clarence Ip8e69ad02016-12-09 09:43:57 -0500636 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
637 SDE_EVT32(DRMID(&to_sde_encoder_virt(drm_enc)->base),
638 phys->intf_idx, pending_kickoff_cnt);
639
Clarence Ip110d15c2016-08-16 14:44:41 -0400640 if (extra_flush_bits && ctl->ops.update_pending_flush)
641 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
642
643 ctl->ops.trigger_flush(ctl);
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400644 SDE_EVT32(DRMID(drm_enc), ctl->idx);
Clarence Ip110d15c2016-08-16 14:44:41 -0400645}
646
647/**
648 * _sde_encoder_trigger_start - trigger start for a physical encoder
649 * phys: Pointer to physical encoder structure
650 */
651static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
652{
653 if (!phys) {
654 SDE_ERROR("invalid encoder\n");
655 return;
656 }
657
658 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
659 phys->ops.trigger_start(phys);
660}
661
662void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
663{
664 struct sde_hw_ctl *ctl;
665 int ctl_idx = -1;
666
667 if (!phys_enc) {
668 SDE_ERROR("invalid encoder\n");
669 return;
670 }
671
672 ctl = phys_enc->hw_ctl;
673 if (ctl && ctl->ops.trigger_start) {
674 ctl->ops.trigger_start(ctl);
675 ctl_idx = ctl->idx;
676 }
677
678 if (phys_enc && phys_enc->parent)
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400679 SDE_EVT32(DRMID(phys_enc->parent), ctl_idx);
Clarence Ip110d15c2016-08-16 14:44:41 -0400680}
681
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500682int sde_encoder_helper_wait_event_timeout(
683 int32_t drm_id,
684 int32_t hw_id,
685 wait_queue_head_t *wq,
686 atomic_t *cnt,
687 s64 timeout_ms)
688{
689 int rc = 0;
690 s64 expected_time = ktime_to_ms(ktime_get()) + timeout_ms;
691 s64 jiffies = msecs_to_jiffies(timeout_ms);
692 s64 time;
693
694 do {
695 rc = wait_event_timeout(*wq, atomic_read(cnt) == 0, jiffies);
696 time = ktime_to_ms(ktime_get());
697
698 SDE_EVT32(drm_id, hw_id, rc, time, expected_time,
699 atomic_read(cnt));
700 /* If we timed out, counter is valid and time is less, wait again */
701 } while (atomic_read(cnt) && (rc == 0) && (time < expected_time));
702
703 return rc;
704}
705
Clarence Ip110d15c2016-08-16 14:44:41 -0400706/**
707 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
708 * Iterate through the physical encoders and perform consolidated flush
709 * and/or control start triggering as needed. This is done in the virtual
710 * encoder rather than the individual physical ones in order to handle
711 * use cases that require visibility into multiple physical encoders at
712 * a time.
713 * sde_enc: Pointer to virtual encoder structure
714 */
715static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
716{
717 struct sde_hw_ctl *ctl;
718 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400719 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -0400720
721 if (!sde_enc) {
722 SDE_ERROR("invalid encoder\n");
723 return;
724 }
725
726 pending_flush = 0x0;
Alan Kwong628d19e2016-10-31 13:50:13 -0400727 sde_enc->crtc_frame_event = 0;
Clarence Ip110d15c2016-08-16 14:44:41 -0400728
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400729 /* update pending counts and trigger kickoff ctl flush atomically */
730 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
731
Clarence Ip110d15c2016-08-16 14:44:41 -0400732 /* don't perform flush/start operations for slave encoders */
733 for (i = 0; i < sde_enc->num_phys_encs; i++) {
734 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Clarence Ip8e69ad02016-12-09 09:43:57 -0500735
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400736 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
737 continue;
738
Clarence Ip110d15c2016-08-16 14:44:41 -0400739 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400740 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -0400741 continue;
742
Alan Kwong628d19e2016-10-31 13:50:13 -0400743 set_bit(i, sde_enc->frame_busy_mask);
744
Clarence Ip8e69ad02016-12-09 09:43:57 -0500745 if (!phys->ops.needs_single_flush ||
746 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -0400747 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
748 else if (ctl->ops.get_pending_flush)
749 pending_flush |= ctl->ops.get_pending_flush(ctl);
750 }
751
752 /* for split flush, combine pending flush masks and send to master */
753 if (pending_flush && sde_enc->cur_master) {
754 _sde_encoder_trigger_flush(
755 &sde_enc->base,
756 sde_enc->cur_master,
757 pending_flush);
758 }
759
760 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400761
762 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -0400763}
764
Alan Kwong628d19e2016-10-31 13:50:13 -0400765void sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc)
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400766{
767 struct sde_encoder_virt *sde_enc;
768 struct sde_encoder_phys *phys;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400769 unsigned int i;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400770
771 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400772 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400773 return;
774 }
775 sde_enc = to_sde_encoder_virt(drm_enc);
776
Clarence Ip19af1362016-09-23 14:57:51 -0400777 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400778 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400779
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500780 /* prepare for next kickoff, may include waiting on previous kickoff */
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400781 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400782 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400783 if (phys && phys->ops.prepare_for_kickoff)
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500784 phys->ops.prepare_for_kickoff(phys);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400785 }
Alan Kwong628d19e2016-10-31 13:50:13 -0400786}
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400787
Alan Kwong628d19e2016-10-31 13:50:13 -0400788void sde_encoder_kickoff(struct drm_encoder *drm_enc)
789{
790 struct sde_encoder_virt *sde_enc;
791 struct sde_encoder_phys *phys;
792 unsigned int i;
793
794 if (!drm_enc) {
795 SDE_ERROR("invalid encoder\n");
796 return;
797 }
798 sde_enc = to_sde_encoder_virt(drm_enc);
799
800 SDE_DEBUG_ENC(sde_enc, "\n");
801
802 atomic_set(&sde_enc->frame_done_timeout,
803 SDE_ENCODER_FRAME_DONE_TIMEOUT * 1000 /
804 drm_enc->crtc->state->adjusted_mode.vrefresh);
805 mod_timer(&sde_enc->frame_done_timer, jiffies +
806 ((atomic_read(&sde_enc->frame_done_timeout) * HZ) / 1000));
807
808 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -0400809 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400810
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500811 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400812 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500813 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400814 if (phys && phys->ops.handle_post_kickoff)
815 phys->ops.handle_post_kickoff(phys);
816 }
817}
818
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700819static int _sde_encoder_status_show(struct seq_file *s, void *data)
820{
821 struct sde_encoder_virt *sde_enc;
822 int i;
823
824 if (!s || !s->private)
825 return -EINVAL;
826
827 sde_enc = s->private;
828
829 mutex_lock(&sde_enc->enc_lock);
830 for (i = 0; i < sde_enc->num_phys_encs; i++) {
831 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
832
833 if (!phys)
834 continue;
835
836 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
837 phys->intf_idx - INTF_0,
838 atomic_read(&phys->vsync_cnt),
839 atomic_read(&phys->underrun_cnt));
840
841 switch (phys->intf_mode) {
842 case INTF_MODE_VIDEO:
843 seq_puts(s, "mode: video\n");
844 break;
845 case INTF_MODE_CMD:
846 seq_puts(s, "mode: command\n");
847 break;
848 case INTF_MODE_WB_BLOCK:
849 seq_puts(s, "mode: wb block\n");
850 break;
851 case INTF_MODE_WB_LINE:
852 seq_puts(s, "mode: wb line\n");
853 break;
854 default:
855 seq_puts(s, "mode: ???\n");
856 break;
857 }
858 }
859 mutex_unlock(&sde_enc->enc_lock);
860
861 return 0;
862}
863
864static int _sde_encoder_debugfs_status_open(struct inode *inode,
865 struct file *file)
866{
867 return single_open(file, _sde_encoder_status_show, inode->i_private);
868}
869
Jayant Shekhar1d50ed22016-11-04 18:41:12 +0530870static void _sde_set_misr_params(struct sde_encoder_phys *phys, u32 enable,
871 u32 frame_count)
872{
873 int j;
874
875 if (!phys->misr_map)
876 return;
877
878 phys->misr_map->enable = enable;
879
880 if (frame_count <= SDE_CRC_BATCH_SIZE)
881 phys->misr_map->frame_count = frame_count;
882 else if (frame_count <= 0)
883 phys->misr_map->frame_count = 0;
884 else
885 phys->misr_map->frame_count = SDE_CRC_BATCH_SIZE;
886
887 if (!enable) {
888 phys->misr_map->last_idx = 0;
889 phys->misr_map->frame_count = 0;
890 for (j = 0; j < SDE_CRC_BATCH_SIZE; j++)
891 phys->misr_map->crc_value[j] = 0;
892 }
893}
894
895static ssize_t _sde_encoder_misr_set(struct file *file,
896 const char __user *user_buf, size_t count, loff_t *ppos)
897{
898 struct sde_encoder_virt *sde_enc;
899 struct drm_encoder *drm_enc;
900 int i = 0;
901 char buf[10];
902 u32 enable, frame_count;
903
904 drm_enc = file->private_data;
905 sde_enc = to_sde_encoder_virt(drm_enc);
906
907 if (copy_from_user(buf, user_buf, count))
908 return -EFAULT;
909
910 buf[count] = 0; /* end of string */
911
912 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
913 return -EFAULT;
914
915 mutex_lock(&sde_enc->enc_lock);
916 for (i = 0; i < sde_enc->num_phys_encs; i++) {
917 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
918
919 if (!phys || !phys->misr_map || !phys->ops.setup_misr)
920 continue;
921
922 _sde_set_misr_params(phys, enable, frame_count);
923 phys->ops.setup_misr(phys, phys->misr_map);
924 }
925 mutex_unlock(&sde_enc->enc_lock);
926 return count;
927}
928
929static ssize_t _sde_encoder_misr_read(
930 struct file *file,
931 char __user *buff, size_t count, loff_t *ppos)
932{
933 struct sde_encoder_virt *sde_enc;
934 struct drm_encoder *drm_enc;
935 int i = 0, j = 0, len = 0;
936 char buf[512] = {'\0'};
937
938 if (*ppos)
939 return 0;
940
941 drm_enc = file->private_data;
942 sde_enc = to_sde_encoder_virt(drm_enc);
943
944 mutex_lock(&sde_enc->enc_lock);
945 for (i = 0; i < sde_enc->num_phys_encs; i++) {
946 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
947 struct sde_misr_params *misr_map;
948
949 if (!phys || !phys->misr_map)
950 continue;
951
952 misr_map = phys->misr_map;
953
954 len += snprintf(buf+len, sizeof(buf), "INTF%d\n", i);
955 for (j = 0; j < SDE_CRC_BATCH_SIZE; j++)
956 len += snprintf(buf+len, sizeof(buf), "%x\n",
957 misr_map->crc_value[j]);
958 }
959
960 if (len < 0 || len >= sizeof(buf))
961 return 0;
962
963 if ((count < sizeof(buf)) || copy_to_user(buff, buf, len))
964 return -EFAULT;
965
966 *ppos += len; /* increase offset */
967 mutex_unlock(&sde_enc->enc_lock);
968
969 return len;
970}
971
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700972static void _sde_encoder_init_debugfs(struct drm_encoder *drm_enc,
973 struct sde_encoder_virt *sde_enc, struct sde_kms *sde_kms)
974{
975 static const struct file_operations debugfs_status_fops = {
976 .open = _sde_encoder_debugfs_status_open,
977 .read = seq_read,
978 .llseek = seq_lseek,
979 .release = single_release,
980 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +0530981
982 static const struct file_operations debugfs_misr_fops = {
983 .open = simple_open,
984 .read = _sde_encoder_misr_read,
985 .write = _sde_encoder_misr_set,
986 };
987
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700988 char name[SDE_NAME_SIZE];
989
990 if (!drm_enc || !sde_enc || !sde_kms) {
991 SDE_ERROR("invalid encoder or kms\n");
992 return;
993 }
994
995 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
996
997 /* create overall sub-directory for the encoder */
998 sde_enc->debugfs_root = debugfs_create_dir(name,
999 sde_debugfs_get_root(sde_kms));
1000 if (sde_enc->debugfs_root) {
1001 /* don't error check these */
1002 debugfs_create_file("status", 0644,
1003 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301004
1005 debugfs_create_file("misr_data", 0644,
1006 sde_enc->debugfs_root, drm_enc, &debugfs_misr_fops);
1007
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001008 }
1009}
1010
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001011static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04001012 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001013 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001014 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001015{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001016 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001017
Clarence Ip19af1362016-09-23 14:57:51 -04001018 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001019
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001020 /*
1021 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
1022 * in this function, check up-front.
1023 */
1024 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
1025 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001026 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001027 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001028 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001029 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001030
Clarence Ipa4039322016-07-15 16:23:59 -04001031 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001032 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001033
1034 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001035 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001036 PTR_ERR(enc));
1037 return enc == 0 ? -EINVAL : PTR_ERR(enc);
1038 }
1039
1040 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
1041 ++sde_enc->num_phys_encs;
1042 }
1043
Clarence Ipa4039322016-07-15 16:23:59 -04001044 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001045 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04001046
1047 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001048 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04001049 PTR_ERR(enc));
1050 return enc == 0 ? -EINVAL : PTR_ERR(enc);
1051 }
1052
1053 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
1054 ++sde_enc->num_phys_encs;
1055 }
1056
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001057 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001058}
1059
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001060static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
1061 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04001062{
1063 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04001064
Clarence Ip19af1362016-09-23 14:57:51 -04001065 if (!sde_enc) {
1066 SDE_ERROR("invalid encoder\n");
1067 return -EINVAL;
1068 }
1069
1070 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04001071
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001072 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001073 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04001074 sde_enc->num_phys_encs);
1075 return -EINVAL;
1076 }
1077
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001078 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04001079
1080 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001081 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04001082 PTR_ERR(enc));
1083 return enc == 0 ? -EINVAL : PTR_ERR(enc);
1084 }
1085
1086 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
1087 ++sde_enc->num_phys_encs;
1088
1089 return 0;
1090}
1091
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001092static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001093 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04001094 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001095 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001096{
1097 int ret = 0;
1098 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001099 enum sde_intf_type intf_type;
1100 struct sde_encoder_virt_ops parent_ops = {
1101 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07001102 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04001103 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001104 };
1105 struct sde_enc_phys_init_params phys_params;
1106
Clarence Ip19af1362016-09-23 14:57:51 -04001107 if (!sde_enc || !sde_kms) {
1108 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
1109 sde_enc != 0, sde_kms != 0);
1110 return -EINVAL;
1111 }
1112
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001113 memset(&phys_params, 0, sizeof(phys_params));
1114 phys_params.sde_kms = sde_kms;
1115 phys_params.parent = &sde_enc->base;
1116 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001117 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001118
Clarence Ip19af1362016-09-23 14:57:51 -04001119 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001120
Clarence Ipa4039322016-07-15 16:23:59 -04001121 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001122 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
1123 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04001124 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001125 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
1126 intf_type = INTF_HDMI;
Alan Kwongbb27c092016-07-20 16:41:25 -04001127 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
1128 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
1129 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001130 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04001131 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001132 return -EINVAL;
1133 }
1134
Clarence Ip88270a62016-06-26 10:09:34 -04001135 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001136
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001137 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
1138
Clarence Ip19af1362016-09-23 14:57:51 -04001139 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001140
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001141 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04001142 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001143 /*
1144 * Left-most tile is at index 0, content is controller id
1145 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
1146 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
1147 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001148 u32 controller_id = disp_info->h_tile_instance[i];
1149
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001150 if (disp_info->num_of_h_tiles > 1) {
1151 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001152 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001153 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001154 phys_params.split_role = ENC_ROLE_SLAVE;
1155 } else {
1156 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001157 }
1158
Clarence Ip19af1362016-09-23 14:57:51 -04001159 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001160 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001161
Alan Kwongbb27c092016-07-20 16:41:25 -04001162 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001163 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001164 phys_params.wb_idx = sde_encoder_get_wb(
1165 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04001166 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001167 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04001168 SDE_ERROR_ENC(sde_enc,
1169 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001170 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04001171 ret = -EINVAL;
1172 }
Alan Kwongbb27c092016-07-20 16:41:25 -04001173 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001174 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001175 phys_params.intf_idx = sde_encoder_get_intf(
1176 sde_kms->catalog, intf_type,
1177 controller_id);
1178 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04001179 SDE_ERROR_ENC(sde_enc,
1180 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001181 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04001182 ret = -EINVAL;
1183 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001184 }
1185
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001186 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04001187 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001188 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
1189 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04001190 else
1191 ret = sde_encoder_virt_add_phys_encs(
1192 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001193 sde_enc,
1194 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001195 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04001196 SDE_ERROR_ENC(sde_enc,
1197 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001198 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001199 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001200 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001201
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001202
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001203 return ret;
1204}
1205
Alan Kwong628d19e2016-10-31 13:50:13 -04001206static void sde_encoder_frame_done_timeout(unsigned long data)
1207{
1208 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
1209 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1210 struct msm_drm_private *priv;
1211
1212 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1213 SDE_ERROR("invalid parameters\n");
1214 return;
1215 }
1216 priv = drm_enc->dev->dev_private;
1217
1218 if (!sde_enc->frame_busy_mask[0] || !sde_enc->crtc_frame_event_cb) {
1219 SDE_DEBUG("enc%d invalid timeout\n", drm_enc->base.id);
1220 SDE_EVT32(DRMID(drm_enc),
1221 sde_enc->frame_busy_mask[0], 0);
1222 return;
1223 } else if (!atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
1224 SDE_ERROR("enc%d invalid timeout\n", drm_enc->base.id);
1225 SDE_EVT32(DRMID(drm_enc), 0, 1);
1226 return;
1227 }
1228
1229 SDE_EVT32(DRMID(drm_enc), 0, 2);
1230 sde_enc->crtc_frame_event_cb(sde_enc->crtc_frame_event_cb_data,
1231 SDE_ENCODER_FRAME_EVENT_ERROR);
1232}
1233
Clarence Ip3649f8b2016-10-31 09:59:44 -04001234struct drm_encoder *sde_encoder_init(
1235 struct drm_device *dev,
1236 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001237{
1238 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04001239 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001240 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001241 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001242 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001243 int ret = 0;
1244
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001245 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
1246 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001247 ret = -ENOMEM;
1248 goto fail;
1249 }
1250
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001251 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001252 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
1253 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001254 if (ret)
1255 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001256
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001257 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001258 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001259 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07001260 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001261 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001262
Alan Kwong628d19e2016-10-31 13:50:13 -04001263 atomic_set(&sde_enc->frame_done_timeout, 0);
1264 setup_timer(&sde_enc->frame_done_timer, sde_encoder_frame_done_timeout,
1265 (unsigned long) sde_enc);
1266
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001267 _sde_encoder_init_debugfs(drm_enc, sde_enc, sde_kms);
1268
Clarence Ip19af1362016-09-23 14:57:51 -04001269 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001270
1271 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001272
1273fail:
Clarence Ip19af1362016-09-23 14:57:51 -04001274 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001275 if (drm_enc)
1276 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001277
1278 return ERR_PTR(ret);
1279}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001280
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001281int sde_encoder_wait_for_commit_done(struct drm_encoder *drm_enc)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001282{
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001283 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001284 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001285
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001286 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04001287 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001288 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001289 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001290 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04001291 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001292
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001293 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1294 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001295
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001296 if (phys && phys->ops.wait_for_commit_done) {
1297 ret = phys->ops.wait_for_commit_done(phys);
1298 if (ret)
1299 return ret;
1300 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301301
1302 if (phys && phys->ops.collect_misr)
1303 if (phys->misr_map && phys->misr_map->enable)
1304 phys->ops.collect_misr(phys, phys->misr_map);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001305 }
1306
1307 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001308}
1309
Alan Kwong67a3f792016-11-01 23:16:53 -04001310enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
1311{
1312 struct sde_encoder_virt *sde_enc = NULL;
1313 int i;
1314
1315 if (!encoder) {
1316 SDE_ERROR("invalid encoder\n");
1317 return INTF_MODE_NONE;
1318 }
1319 sde_enc = to_sde_encoder_virt(encoder);
1320
1321 if (sde_enc->cur_master)
1322 return sde_enc->cur_master->intf_mode;
1323
1324 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1325 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1326
1327 if (phys)
1328 return phys->intf_mode;
1329 }
1330
1331 return INTF_MODE_NONE;
1332}