blob: fc1079291ea4e50e946f85506a64d4c9374e3a52 [file] [log] [blame]
Dhaval Patel14d46ce2017-01-17 16:28:12 -08001/*
2 * Copyright (c) 2014-2017, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07005 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -08006 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07009 *
Dhaval Patel14d46ce2017-01-17 16:28:12 -080010 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070017 */
18
Clarence Ip19af1362016-09-23 14:57:51 -040019#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Dhaval Patel22ef6df2016-10-20 14:42:52 -070020#include <linux/debugfs.h>
21#include <linux/seq_file.h>
22
Lloyd Atkinson09fed912016-06-24 18:14:13 -040023#include "msm_drv.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070024#include "sde_kms.h"
25#include "drm_crtc.h"
26#include "drm_crtc_helper.h"
27
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040028#include "sde_hwio.h"
29#include "sde_hw_catalog.h"
30#include "sde_hw_intf.h"
Clarence Ipc475b082016-06-26 09:27:23 -040031#include "sde_hw_ctl.h"
32#include "sde_formats.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040033#include "sde_encoder_phys.h"
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -070034#include "sde_color_processing.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040035
Clarence Ip19af1362016-09-23 14:57:51 -040036#define SDE_DEBUG_ENC(e, fmt, ...) SDE_DEBUG("enc%d " fmt,\
37 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
38
39#define SDE_ERROR_ENC(e, fmt, ...) SDE_ERROR("enc%d " fmt,\
40 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
41
Alan Kwong628d19e2016-10-31 13:50:13 -040042/* timeout in frames waiting for frame done */
43#define SDE_ENCODER_FRAME_DONE_TIMEOUT 60
44
Lloyd Atkinson5d722782016-05-30 14:09:41 -040045/*
46 * Two to anticipate panels that can do cmd/vid dynamic switching
47 * plan is to create all possible physical encoder types, and switch between
48 * them at runtime
49 */
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040050#define NUM_PHYS_ENCODER_TYPES 2
Lloyd Atkinson5d722782016-05-30 14:09:41 -040051
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040052#define MAX_PHYS_ENCODERS_PER_VIRTUAL \
53 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
54
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070055#define MAX_CHANNELS_PER_ENC 2
56
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040057/**
58 * struct sde_encoder_virt - virtual encoder. Container of one or more physical
59 * encoders. Virtual encoder manages one "logical" display. Physical
60 * encoders manage one intf block, tied to a specific panel/sub-panel.
61 * Virtual encoder defers as much as possible to the physical encoders.
62 * Virtual encoder registers itself with the DRM Framework as the encoder.
63 * @base: drm_encoder base class for registration with DRM
Lloyd Atkinson7d070942016-07-26 18:35:12 -040064 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040065 * @bus_scaling_client: Client handle to the bus scaling interface
66 * @num_phys_encs: Actual number of physical encoders contained.
67 * @phys_encs: Container of physical encoders managed.
68 * @cur_master: Pointer to the current master in this mode. Optimization
69 * Only valid after enable. Cleared as disable.
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -070070 * @hw_pp Handle to the pingpong blocks used for the display. No.
71 * pingpong blocks can be different than num_phys_encs.
Lloyd Atkinson5d722782016-05-30 14:09:41 -040072 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
73 * notification of the VBLANK
74 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
Lloyd Atkinson5d722782016-05-30 14:09:41 -040075 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
76 * all CTL paths
77 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
Dhaval Patel22ef6df2016-10-20 14:42:52 -070078 * @debugfs_root: Debug file system root file node
79 * @enc_lock: Lock around physical encoder create/destroy and
80 access.
Alan Kwong628d19e2016-10-31 13:50:13 -040081 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
82 * busy processing current command.
83 * Bit0 = phys_encs[0] etc.
84 * @crtc_frame_event_cb: callback handler for frame event
85 * @crtc_frame_event_cb_data: callback handler private data
86 * @crtc_frame_event: callback event
87 * @frame_done_timeout: frame done timeout in Hz
88 * @frame_done_timer: watchdog timer for frame done event
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040089 */
90struct sde_encoder_virt {
91 struct drm_encoder base;
Lloyd Atkinson7d070942016-07-26 18:35:12 -040092 spinlock_t enc_spinlock;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040093 uint32_t bus_scaling_client;
94
Lloyd Atkinson11f34442016-08-11 11:19:52 -040095 uint32_t display_num_of_h_tiles;
96
Lloyd Atkinson5d722782016-05-30 14:09:41 -040097 unsigned int num_phys_encs;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040098 struct sde_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
99 struct sde_encoder_phys *cur_master;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700100 struct sde_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400101
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400102 void (*crtc_vblank_cb)(void *);
103 void *crtc_vblank_cb_data;
104
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700105 struct dentry *debugfs_root;
106 struct mutex enc_lock;
Alan Kwong628d19e2016-10-31 13:50:13 -0400107 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
108 void (*crtc_frame_event_cb)(void *, u32 event);
109 void *crtc_frame_event_cb_data;
110 u32 crtc_frame_event;
111
112 atomic_t frame_done_timeout;
113 struct timer_list frame_done_timer;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400114};
115
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400116#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700117
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400118void sde_encoder_get_hw_resources(struct drm_encoder *drm_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400119 struct sde_encoder_hw_resources *hw_res,
120 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400121{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400122 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400123 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400124
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400125 if (!hw_res || !drm_enc || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400126 SDE_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
127 drm_enc != 0, hw_res != 0, conn_state != 0);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400128 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400129 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400130
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400131 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400132 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400133
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400134 /* Query resources used by phys encs, expected to be without overlap */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400135 memset(hw_res, 0, sizeof(*hw_res));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400136 hw_res->display_num_of_h_tiles = sde_enc->display_num_of_h_tiles;
137
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400138 for (i = 0; i < sde_enc->num_phys_encs; i++) {
139 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
140
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400141 if (phys && phys->ops.get_hw_resources)
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400142 phys->ops.get_hw_resources(phys, hw_res, conn_state);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400143 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400144}
145
Clarence Ip3649f8b2016-10-31 09:59:44 -0400146void sde_encoder_destroy(struct drm_encoder *drm_enc)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400147{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400148 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400149 int i = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400150
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400151 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400152 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400153 return;
154 }
155
156 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400157 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400158
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700159 mutex_lock(&sde_enc->enc_lock);
160 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400161 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
162
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400163 if (phys && phys->ops.destroy) {
164 phys->ops.destroy(phys);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400165 --sde_enc->num_phys_encs;
166 sde_enc->phys_encs[i] = NULL;
167 }
168 }
169
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700170 if (sde_enc->num_phys_encs)
Clarence Ip19af1362016-09-23 14:57:51 -0400171 SDE_ERROR_ENC(sde_enc, "expected 0 num_phys_encs not %d\n",
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400172 sde_enc->num_phys_encs);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700173 sde_enc->num_phys_encs = 0;
174 mutex_unlock(&sde_enc->enc_lock);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400175
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400176 drm_encoder_cleanup(drm_enc);
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700177 debugfs_remove_recursive(sde_enc->debugfs_root);
178 mutex_destroy(&sde_enc->enc_lock);
179
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400180 kfree(sde_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700181}
182
Clarence Ip8e69ad02016-12-09 09:43:57 -0500183void sde_encoder_helper_split_config(
184 struct sde_encoder_phys *phys_enc,
185 enum sde_intf interface)
186{
187 struct sde_encoder_virt *sde_enc;
188 struct split_pipe_cfg cfg = { 0 };
189 struct sde_hw_mdp *hw_mdptop;
190 enum sde_rm_topology_name topology;
191
192 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
193 SDE_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
194 return;
195 }
196
197 sde_enc = to_sde_encoder_virt(phys_enc->parent);
198 hw_mdptop = phys_enc->hw_mdptop;
199 cfg.en = phys_enc->split_role != ENC_ROLE_SOLO;
200 cfg.mode = phys_enc->intf_mode;
201 cfg.intf = interface;
202
203 if (cfg.en && phys_enc->ops.needs_single_flush &&
204 phys_enc->ops.needs_single_flush(phys_enc))
205 cfg.split_flush_en = true;
206
207 topology = sde_connector_get_topology_name(phys_enc->connector);
208 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
209 cfg.pp_split_slave = cfg.intf;
210 else
211 cfg.pp_split_slave = INTF_MAX;
212
213 if (phys_enc->split_role != ENC_ROLE_SLAVE) {
214 /* master/solo encoder */
215 SDE_DEBUG_ENC(sde_enc, "enable %d\n", cfg.en);
216
217 if (hw_mdptop->ops.setup_split_pipe)
218 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
219 } else {
220 /*
221 * slave encoder
222 * - determine split index from master index,
223 * assume master is first pp
224 */
225 cfg.pp_split_index = sde_enc->hw_pp[0]->idx - PINGPONG_0;
226 SDE_DEBUG_ENC(sde_enc, "master using pp%d\n",
227 cfg.pp_split_index);
228
229 if (hw_mdptop->ops.setup_pp_split)
230 hw_mdptop->ops.setup_pp_split(hw_mdptop, &cfg);
231 }
232}
233
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400234static int sde_encoder_virt_atomic_check(
235 struct drm_encoder *drm_enc,
236 struct drm_crtc_state *crtc_state,
237 struct drm_connector_state *conn_state)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400238{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400239 struct sde_encoder_virt *sde_enc;
240 struct msm_drm_private *priv;
241 struct sde_kms *sde_kms;
Alan Kwongbb27c092016-07-20 16:41:25 -0400242 const struct drm_display_mode *mode;
243 struct drm_display_mode *adj_mode;
244 int i = 0;
245 int ret = 0;
246
Alan Kwongbb27c092016-07-20 16:41:25 -0400247 if (!drm_enc || !crtc_state || !conn_state) {
Clarence Ip19af1362016-09-23 14:57:51 -0400248 SDE_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
249 drm_enc != 0, crtc_state != 0, conn_state != 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400250 return -EINVAL;
251 }
252
253 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400254 SDE_DEBUG_ENC(sde_enc, "\n");
255
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400256 priv = drm_enc->dev->dev_private;
257 sde_kms = to_sde_kms(priv->kms);
Alan Kwongbb27c092016-07-20 16:41:25 -0400258 mode = &crtc_state->mode;
259 adj_mode = &crtc_state->adjusted_mode;
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400260 SDE_EVT32(DRMID(drm_enc));
Alan Kwongbb27c092016-07-20 16:41:25 -0400261
262 /* perform atomic check on the first physical encoder (master) */
263 for (i = 0; i < sde_enc->num_phys_encs; i++) {
264 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
265
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400266 if (phys && phys->ops.atomic_check)
Alan Kwongbb27c092016-07-20 16:41:25 -0400267 ret = phys->ops.atomic_check(phys, crtc_state,
268 conn_state);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400269 else if (phys && phys->ops.mode_fixup)
270 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
Alan Kwongbb27c092016-07-20 16:41:25 -0400271 ret = -EINVAL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400272
273 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400274 SDE_ERROR_ENC(sde_enc,
275 "mode unsupported, phys idx %d\n", i);
Alan Kwongbb27c092016-07-20 16:41:25 -0400276 break;
277 }
278 }
279
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400280 /* Reserve dynamic resources now. Indicating AtomicTest phase */
281 if (!ret)
282 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, crtc_state,
283 conn_state, true);
284
Gopikrishnaiah Anandan703eb902016-10-06 18:43:57 -0700285 if (!ret)
Gopikrishnaiah Anandane0e5e0c2016-05-25 11:05:33 -0700286 drm_mode_set_crtcinfo(adj_mode, 0);
Alan Kwongbb27c092016-07-20 16:41:25 -0400287
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400288 SDE_EVT32(DRMID(drm_enc), adj_mode->flags, adj_mode->private_flags);
Alan Kwongbb27c092016-07-20 16:41:25 -0400289
290 return ret;
291}
292
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400293static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
294 struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400295 struct drm_display_mode *adj_mode)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400296{
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400297 struct sde_encoder_virt *sde_enc;
298 struct msm_drm_private *priv;
299 struct sde_kms *sde_kms;
300 struct list_head *connector_list;
301 struct drm_connector *conn = NULL, *conn_iter;
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700302 struct sde_rm_hw_iter pp_iter;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400303 int i = 0, ret;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400304
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400305 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400306 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400307 return;
308 }
309
310 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400311 SDE_DEBUG_ENC(sde_enc, "\n");
312
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400313 priv = drm_enc->dev->dev_private;
314 sde_kms = to_sde_kms(priv->kms);
315 connector_list = &sde_kms->dev->mode_config.connector_list;
316
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400317 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400318
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400319 list_for_each_entry(conn_iter, connector_list, head)
320 if (conn_iter->encoder == drm_enc)
321 conn = conn_iter;
322
323 if (!conn) {
Clarence Ip19af1362016-09-23 14:57:51 -0400324 SDE_ERROR_ENC(sde_enc, "failed to find attached connector\n");
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400325 return;
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400326 } else if (!conn->state) {
327 SDE_ERROR_ENC(sde_enc, "invalid connector state\n");
328 return;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400329 }
330
331 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
332 ret = sde_rm_reserve(&sde_kms->rm, drm_enc, drm_enc->crtc->state,
333 conn->state, false);
334 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400335 SDE_ERROR_ENC(sde_enc,
336 "failed to reserve hw resources, %d\n", ret);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400337 return;
338 }
339
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700340 sde_rm_init_hw_iter(&pp_iter, drm_enc->base.id, SDE_HW_BLK_PINGPONG);
341 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
342 sde_enc->hw_pp[i] = NULL;
343 if (!sde_rm_get_hw(&sde_kms->rm, &pp_iter))
344 break;
345 sde_enc->hw_pp[i] = (struct sde_hw_pingpong *) pp_iter.hw;
346 }
347
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400348 for (i = 0; i < sde_enc->num_phys_encs; i++) {
349 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400350
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400351 if (phys) {
Jeykumar Sankaranfdd77a92016-11-02 12:34:29 -0700352 if (!sde_enc->hw_pp[i]) {
353 SDE_ERROR_ENC(sde_enc,
354 "invalid pingpong block for the encoder\n");
355 return;
356 }
357 phys->hw_pp = sde_enc->hw_pp[i];
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400358 phys->connector = conn->state->connector;
359 if (phys->ops.mode_set)
360 phys->ops.mode_set(phys, mode, adj_mode);
361 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400362 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400363}
364
365static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
366{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400367 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400368 struct msm_drm_private *priv;
369 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400370 int i = 0;
371
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400372 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400373 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400374 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400375 } else if (!drm_enc->dev) {
376 SDE_ERROR("invalid dev\n");
377 return;
378 } else if (!drm_enc->dev->dev_private) {
379 SDE_ERROR("invalid dev_private\n");
380 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400381 }
382
383 sde_enc = to_sde_encoder_virt(drm_enc);
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400384 priv = drm_enc->dev->dev_private;
385 sde_kms = to_sde_kms(priv->kms);
386
Clarence Ip19af1362016-09-23 14:57:51 -0400387 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400388 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400389
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400390 sde_power_resource_enable(&priv->phandle, sde_kms->core_client, true);
391
Clarence Ipa87f8ec2016-08-23 13:43:19 -0400392 sde_enc->cur_master = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400393 for (i = 0; i < sde_enc->num_phys_encs; i++) {
394 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400395
396 if (phys) {
Dhaval Patel81e87882016-10-19 21:41:56 -0700397 atomic_set(&phys->vsync_cnt, 0);
398 atomic_set(&phys->underrun_cnt, 0);
399
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400400 if (phys->ops.is_master && phys->ops.is_master(phys)) {
Clarence Ip19af1362016-09-23 14:57:51 -0400401 SDE_DEBUG_ENC(sde_enc,
402 "master is now idx %d\n", i);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400403 sde_enc->cur_master = phys;
Clarence Ipa87f8ec2016-08-23 13:43:19 -0400404 } else if (phys->ops.enable) {
405 phys->ops.enable(phys);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400406 }
Abhijit Kulkarni7acb3262016-07-05 15:27:25 -0400407 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400408 }
Clarence Ipa87f8ec2016-08-23 13:43:19 -0400409
410 if (!sde_enc->cur_master)
411 SDE_ERROR("virt encoder has no master! num_phys %d\n", i);
412 else if (sde_enc->cur_master->ops.enable)
413 sde_enc->cur_master->ops.enable(sde_enc->cur_master);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400414}
415
416static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
417{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400418 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400419 struct msm_drm_private *priv;
420 struct sde_kms *sde_kms;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400421 int i = 0;
422
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400423 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400424 SDE_ERROR("invalid encoder\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400425 return;
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400426 } else if (!drm_enc->dev) {
427 SDE_ERROR("invalid dev\n");
428 return;
429 } else if (!drm_enc->dev->dev_private) {
430 SDE_ERROR("invalid dev_private\n");
431 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400432 }
433
434 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400435 SDE_DEBUG_ENC(sde_enc, "\n");
436
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400437 priv = drm_enc->dev->dev_private;
438 sde_kms = to_sde_kms(priv->kms);
439
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400440 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400441
Alan Kwong628d19e2016-10-31 13:50:13 -0400442 if (atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
443 SDE_ERROR("enc%d timeout pending\n", drm_enc->base.id);
444 del_timer_sync(&sde_enc->frame_done_timer);
445 }
446
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400447 for (i = 0; i < sde_enc->num_phys_encs; i++) {
448 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
449
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400450 if (phys) {
451 if (phys->ops.disable && !phys->ops.is_master(phys))
452 phys->ops.disable(phys);
453 phys->connector = NULL;
Dhaval Patel81e87882016-10-19 21:41:56 -0700454 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400455 }
456
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400457 if (sde_enc->cur_master && sde_enc->cur_master->ops.disable)
458 sde_enc->cur_master->ops.disable(sde_enc->cur_master);
459
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400460 sde_enc->cur_master = NULL;
Clarence Ip19af1362016-09-23 14:57:51 -0400461 SDE_DEBUG_ENC(sde_enc, "cleared master\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400462
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400463 sde_rm_release(&sde_kms->rm, drm_enc);
Lloyd Atkinson5217336c2016-09-15 18:21:18 -0400464
465 sde_power_resource_enable(&priv->phandle, sde_kms->core_client, false);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400466}
467
468static const struct drm_encoder_helper_funcs sde_encoder_helper_funcs = {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400469 .mode_set = sde_encoder_virt_mode_set,
470 .disable = sde_encoder_virt_disable,
471 .enable = sde_encoder_virt_enable,
Alan Kwongbb27c092016-07-20 16:41:25 -0400472 .atomic_check = sde_encoder_virt_atomic_check,
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400473};
474
475static const struct drm_encoder_funcs sde_encoder_funcs = {
476 .destroy = sde_encoder_destroy,
477};
478
479static enum sde_intf sde_encoder_get_intf(struct sde_mdss_cfg *catalog,
Lloyd Atkinson9a840312016-06-26 10:11:08 -0400480 enum sde_intf_type type, u32 controller_id)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400481{
482 int i = 0;
483
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400484 for (i = 0; i < catalog->intf_count; i++) {
485 if (catalog->intf[i].type == type
Lloyd Atkinson9a840312016-06-26 10:11:08 -0400486 && catalog->intf[i].controller_id == controller_id) {
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400487 return catalog->intf[i].id;
488 }
489 }
490
491 return INTF_MAX;
492}
493
Alan Kwongbb27c092016-07-20 16:41:25 -0400494static enum sde_wb sde_encoder_get_wb(struct sde_mdss_cfg *catalog,
495 enum sde_intf_type type, u32 controller_id)
496{
497 if (controller_id < catalog->wb_count)
498 return catalog->wb[controller_id].id;
499
500 return WB_MAX;
501}
502
Dhaval Patel81e87882016-10-19 21:41:56 -0700503static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,
504 struct sde_encoder_phys *phy_enc)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400505{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400506 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400507 unsigned long lock_flags;
508
Dhaval Patel81e87882016-10-19 21:41:56 -0700509 if (!drm_enc || !phy_enc)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400510 return;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400511
512 sde_enc = to_sde_encoder_virt(drm_enc);
513
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400514 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400515 if (sde_enc->crtc_vblank_cb)
516 sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400517 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Dhaval Patel81e87882016-10-19 21:41:56 -0700518
519 atomic_inc(&phy_enc->vsync_cnt);
520}
521
522static void sde_encoder_underrun_callback(struct drm_encoder *drm_enc,
523 struct sde_encoder_phys *phy_enc)
524{
525 if (!phy_enc)
526 return;
527
528 atomic_inc(&phy_enc->underrun_cnt);
Lloyd Atkinson64b07dd2016-12-12 17:10:57 -0500529 SDE_EVT32(DRMID(drm_enc), atomic_read(&phy_enc->underrun_cnt));
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400530}
531
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400532void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
533 void (*vbl_cb)(void *), void *vbl_data)
534{
535 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
536 unsigned long lock_flags;
537 bool enable;
538 int i;
539
540 enable = vbl_cb ? true : false;
541
Clarence Ip19af1362016-09-23 14:57:51 -0400542 if (!drm_enc) {
543 SDE_ERROR("invalid encoder\n");
544 return;
545 }
546 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400547 SDE_EVT32(DRMID(drm_enc), enable);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400548
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400549 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400550 sde_enc->crtc_vblank_cb = vbl_cb;
551 sde_enc->crtc_vblank_cb_data = vbl_data;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400552 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400553
554 for (i = 0; i < sde_enc->num_phys_encs; i++) {
555 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
556
557 if (phys && phys->ops.control_vblank_irq)
558 phys->ops.control_vblank_irq(phys, enable);
559 }
560}
561
Alan Kwong628d19e2016-10-31 13:50:13 -0400562void sde_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
563 void (*frame_event_cb)(void *, u32 event),
564 void *frame_event_cb_data)
565{
566 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
567 unsigned long lock_flags;
568 bool enable;
569
570 enable = frame_event_cb ? true : false;
571
572 if (!drm_enc) {
573 SDE_ERROR("invalid encoder\n");
574 return;
575 }
576 SDE_DEBUG_ENC(sde_enc, "\n");
577 SDE_EVT32(DRMID(drm_enc), enable, 0);
578
579 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
580 sde_enc->crtc_frame_event_cb = frame_event_cb;
581 sde_enc->crtc_frame_event_cb_data = frame_event_cb_data;
582 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
583}
584
585static void sde_encoder_frame_done_callback(
586 struct drm_encoder *drm_enc,
587 struct sde_encoder_phys *ready_phys, u32 event)
588{
589 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
590 unsigned int i;
591
592 /* One of the physical encoders has become idle */
593 for (i = 0; i < sde_enc->num_phys_encs; i++)
594 if (sde_enc->phys_encs[i] == ready_phys) {
595 clear_bit(i, sde_enc->frame_busy_mask);
596 sde_enc->crtc_frame_event |= event;
597 SDE_EVT32(DRMID(drm_enc), i,
598 sde_enc->frame_busy_mask[0]);
599 }
600
601 if (!sde_enc->frame_busy_mask[0]) {
602 atomic_set(&sde_enc->frame_done_timeout, 0);
603 del_timer(&sde_enc->frame_done_timer);
604
605 if (sde_enc->crtc_frame_event_cb)
606 sde_enc->crtc_frame_event_cb(
607 sde_enc->crtc_frame_event_cb_data,
608 sde_enc->crtc_frame_event);
609 }
610}
611
Clarence Ip110d15c2016-08-16 14:44:41 -0400612/**
613 * _sde_encoder_trigger_flush - trigger flush for a physical encoder
614 * drm_enc: Pointer to drm encoder structure
615 * phys: Pointer to physical encoder structure
616 * extra_flush_bits: Additional bit mask to include in flush trigger
617 */
618static inline void _sde_encoder_trigger_flush(struct drm_encoder *drm_enc,
619 struct sde_encoder_phys *phys, uint32_t extra_flush_bits)
620{
621 struct sde_hw_ctl *ctl;
Clarence Ip8e69ad02016-12-09 09:43:57 -0500622 int pending_kickoff_cnt;
Clarence Ip110d15c2016-08-16 14:44:41 -0400623
624 if (!drm_enc || !phys) {
625 SDE_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
626 drm_enc != 0, phys != 0);
627 return;
628 }
629
630 ctl = phys->hw_ctl;
631 if (!ctl || !ctl->ops.trigger_flush) {
632 SDE_ERROR("missing trigger cb\n");
633 return;
634 }
635
Clarence Ip8e69ad02016-12-09 09:43:57 -0500636 pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
637 SDE_EVT32(DRMID(&to_sde_encoder_virt(drm_enc)->base),
638 phys->intf_idx, pending_kickoff_cnt);
639
Clarence Ip110d15c2016-08-16 14:44:41 -0400640 if (extra_flush_bits && ctl->ops.update_pending_flush)
641 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
642
643 ctl->ops.trigger_flush(ctl);
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400644 SDE_EVT32(DRMID(drm_enc), ctl->idx);
Clarence Ip110d15c2016-08-16 14:44:41 -0400645}
646
647/**
648 * _sde_encoder_trigger_start - trigger start for a physical encoder
649 * phys: Pointer to physical encoder structure
650 */
651static inline void _sde_encoder_trigger_start(struct sde_encoder_phys *phys)
652{
653 if (!phys) {
654 SDE_ERROR("invalid encoder\n");
655 return;
656 }
657
658 if (phys->ops.trigger_start && phys->enable_state != SDE_ENC_DISABLED)
659 phys->ops.trigger_start(phys);
660}
661
662void sde_encoder_helper_trigger_start(struct sde_encoder_phys *phys_enc)
663{
664 struct sde_hw_ctl *ctl;
665 int ctl_idx = -1;
666
667 if (!phys_enc) {
668 SDE_ERROR("invalid encoder\n");
669 return;
670 }
671
672 ctl = phys_enc->hw_ctl;
673 if (ctl && ctl->ops.trigger_start) {
674 ctl->ops.trigger_start(ctl);
675 ctl_idx = ctl->idx;
676 }
677
678 if (phys_enc && phys_enc->parent)
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400679 SDE_EVT32(DRMID(phys_enc->parent), ctl_idx);
Clarence Ip110d15c2016-08-16 14:44:41 -0400680}
681
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500682int sde_encoder_helper_wait_event_timeout(
683 int32_t drm_id,
684 int32_t hw_id,
685 wait_queue_head_t *wq,
686 atomic_t *cnt,
687 s64 timeout_ms)
688{
689 int rc = 0;
690 s64 expected_time = ktime_to_ms(ktime_get()) + timeout_ms;
691 s64 jiffies = msecs_to_jiffies(timeout_ms);
692 s64 time;
693
694 do {
695 rc = wait_event_timeout(*wq, atomic_read(cnt) == 0, jiffies);
696 time = ktime_to_ms(ktime_get());
697
698 SDE_EVT32(drm_id, hw_id, rc, time, expected_time,
699 atomic_read(cnt));
700 /* If we timed out, counter is valid and time is less, wait again */
701 } while (atomic_read(cnt) && (rc == 0) && (time < expected_time));
702
703 return rc;
704}
705
Lloyd Atkinson8c49c582016-11-18 14:23:54 -0500706void sde_encoder_helper_hw_reset(struct sde_encoder_phys *phys_enc)
707{
708 struct sde_encoder_virt *sde_enc;
709 struct sde_connector *sde_con;
710 void *sde_con_disp;
711 struct sde_hw_ctl *ctl;
712 int rc;
713
714 if (!phys_enc) {
715 SDE_ERROR("invalid encoder\n");
716 return;
717 }
718 sde_enc = to_sde_encoder_virt(phys_enc->parent);
719 ctl = phys_enc->hw_ctl;
720
721 if (!ctl || !ctl->ops.reset)
722 return;
723
724 SDE_DEBUG_ENC(sde_enc, "ctl %d reset\n", ctl->idx);
725 SDE_EVT32(DRMID(phys_enc->parent), ctl->idx);
726
727 if (phys_enc->ops.is_master && phys_enc->ops.is_master(phys_enc) &&
728 phys_enc->connector) {
729 sde_con = to_sde_connector(phys_enc->connector);
730 sde_con_disp = sde_connector_get_display(phys_enc->connector);
731
732 if (sde_con->ops.soft_reset) {
733 rc = sde_con->ops.soft_reset(sde_con_disp);
734 if (rc) {
735 SDE_ERROR_ENC(sde_enc,
736 "connector soft reset failure\n");
737 SDE_DBG_DUMP("panic");
738 }
739 }
740 }
741
742 rc = ctl->ops.reset(ctl);
743 if (rc) {
744 SDE_ERROR_ENC(sde_enc, "ctl %d reset failure\n", ctl->idx);
745 SDE_DBG_DUMP("panic");
746 }
747
748 phys_enc->enable_state = SDE_ENC_ENABLED;
749}
750
Clarence Ip110d15c2016-08-16 14:44:41 -0400751/**
752 * _sde_encoder_kickoff_phys - handle physical encoder kickoff
753 * Iterate through the physical encoders and perform consolidated flush
754 * and/or control start triggering as needed. This is done in the virtual
755 * encoder rather than the individual physical ones in order to handle
756 * use cases that require visibility into multiple physical encoders at
757 * a time.
758 * sde_enc: Pointer to virtual encoder structure
759 */
760static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
761{
762 struct sde_hw_ctl *ctl;
763 uint32_t i, pending_flush;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400764 unsigned long lock_flags;
Clarence Ip110d15c2016-08-16 14:44:41 -0400765
766 if (!sde_enc) {
767 SDE_ERROR("invalid encoder\n");
768 return;
769 }
770
771 pending_flush = 0x0;
Alan Kwong628d19e2016-10-31 13:50:13 -0400772 sde_enc->crtc_frame_event = 0;
Clarence Ip110d15c2016-08-16 14:44:41 -0400773
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400774 /* update pending counts and trigger kickoff ctl flush atomically */
775 spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
776
Clarence Ip110d15c2016-08-16 14:44:41 -0400777 /* don't perform flush/start operations for slave encoders */
778 for (i = 0; i < sde_enc->num_phys_encs; i++) {
779 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Clarence Ip8e69ad02016-12-09 09:43:57 -0500780
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400781 if (!phys || phys->enable_state == SDE_ENC_DISABLED)
782 continue;
783
Clarence Ip110d15c2016-08-16 14:44:41 -0400784 ctl = phys->hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400785 if (!ctl)
Clarence Ip110d15c2016-08-16 14:44:41 -0400786 continue;
787
Alan Kwong628d19e2016-10-31 13:50:13 -0400788 set_bit(i, sde_enc->frame_busy_mask);
789
Clarence Ip8e69ad02016-12-09 09:43:57 -0500790 if (!phys->ops.needs_single_flush ||
791 !phys->ops.needs_single_flush(phys))
Clarence Ip110d15c2016-08-16 14:44:41 -0400792 _sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
793 else if (ctl->ops.get_pending_flush)
794 pending_flush |= ctl->ops.get_pending_flush(ctl);
795 }
796
797 /* for split flush, combine pending flush masks and send to master */
798 if (pending_flush && sde_enc->cur_master) {
799 _sde_encoder_trigger_flush(
800 &sde_enc->base,
801 sde_enc->cur_master,
802 pending_flush);
803 }
804
805 _sde_encoder_trigger_start(sde_enc->cur_master);
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400806
807 spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
Clarence Ip110d15c2016-08-16 14:44:41 -0400808}
809
Alan Kwong628d19e2016-10-31 13:50:13 -0400810void sde_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc)
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400811{
812 struct sde_encoder_virt *sde_enc;
813 struct sde_encoder_phys *phys;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -0500814 bool needs_hw_reset = false;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400815 unsigned int i;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400816
817 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -0400818 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400819 return;
820 }
821 sde_enc = to_sde_encoder_virt(drm_enc);
822
Clarence Ip19af1362016-09-23 14:57:51 -0400823 SDE_DEBUG_ENC(sde_enc, "\n");
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400824 SDE_EVT32(DRMID(drm_enc));
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400825
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500826 /* prepare for next kickoff, may include waiting on previous kickoff */
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400827 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400828 phys = sde_enc->phys_encs[i];
Lloyd Atkinson8c49c582016-11-18 14:23:54 -0500829 if (phys) {
830 if (phys->ops.prepare_for_kickoff)
831 phys->ops.prepare_for_kickoff(phys);
832 if (phys->enable_state == SDE_ENC_ERR_NEEDS_HW_RESET)
833 needs_hw_reset = true;
834 }
835 }
836
837 /* if any phys needs reset, reset all phys, in-order */
838 if (needs_hw_reset) {
839 for (i = 0; i < sde_enc->num_phys_encs; i++) {
840 phys = sde_enc->phys_encs[i];
841 if (phys && phys->ops.hw_reset)
842 phys->ops.hw_reset(phys);
843 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400844 }
Alan Kwong628d19e2016-10-31 13:50:13 -0400845}
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400846
Alan Kwong628d19e2016-10-31 13:50:13 -0400847void sde_encoder_kickoff(struct drm_encoder *drm_enc)
848{
849 struct sde_encoder_virt *sde_enc;
850 struct sde_encoder_phys *phys;
851 unsigned int i;
852
853 if (!drm_enc) {
854 SDE_ERROR("invalid encoder\n");
855 return;
856 }
857 sde_enc = to_sde_encoder_virt(drm_enc);
858
859 SDE_DEBUG_ENC(sde_enc, "\n");
860
861 atomic_set(&sde_enc->frame_done_timeout,
862 SDE_ENCODER_FRAME_DONE_TIMEOUT * 1000 /
863 drm_enc->crtc->state->adjusted_mode.vrefresh);
864 mod_timer(&sde_enc->frame_done_timer, jiffies +
865 ((atomic_read(&sde_enc->frame_done_timeout) * HZ) / 1000));
866
867 /* All phys encs are ready to go, trigger the kickoff */
Clarence Ip110d15c2016-08-16 14:44:41 -0400868 _sde_encoder_kickoff_phys(sde_enc);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400869
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500870 /* allow phys encs to handle any post-kickoff business */
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400871 for (i = 0; i < sde_enc->num_phys_encs; i++) {
Lloyd Atkinsonaa0dce92016-11-23 20:16:47 -0500872 phys = sde_enc->phys_encs[i];
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400873 if (phys && phys->ops.handle_post_kickoff)
874 phys->ops.handle_post_kickoff(phys);
875 }
876}
877
Dhaval Patel22ef6df2016-10-20 14:42:52 -0700878static int _sde_encoder_status_show(struct seq_file *s, void *data)
879{
880 struct sde_encoder_virt *sde_enc;
881 int i;
882
883 if (!s || !s->private)
884 return -EINVAL;
885
886 sde_enc = s->private;
887
888 mutex_lock(&sde_enc->enc_lock);
889 for (i = 0; i < sde_enc->num_phys_encs; i++) {
890 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
891
892 if (!phys)
893 continue;
894
895 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
896 phys->intf_idx - INTF_0,
897 atomic_read(&phys->vsync_cnt),
898 atomic_read(&phys->underrun_cnt));
899
900 switch (phys->intf_mode) {
901 case INTF_MODE_VIDEO:
902 seq_puts(s, "mode: video\n");
903 break;
904 case INTF_MODE_CMD:
905 seq_puts(s, "mode: command\n");
906 break;
907 case INTF_MODE_WB_BLOCK:
908 seq_puts(s, "mode: wb block\n");
909 break;
910 case INTF_MODE_WB_LINE:
911 seq_puts(s, "mode: wb line\n");
912 break;
913 default:
914 seq_puts(s, "mode: ???\n");
915 break;
916 }
917 }
918 mutex_unlock(&sde_enc->enc_lock);
919
920 return 0;
921}
922
923static int _sde_encoder_debugfs_status_open(struct inode *inode,
924 struct file *file)
925{
926 return single_open(file, _sde_encoder_status_show, inode->i_private);
927}
928
Jayant Shekhar1d50ed22016-11-04 18:41:12 +0530929static void _sde_set_misr_params(struct sde_encoder_phys *phys, u32 enable,
930 u32 frame_count)
931{
932 int j;
933
934 if (!phys->misr_map)
935 return;
936
937 phys->misr_map->enable = enable;
938
939 if (frame_count <= SDE_CRC_BATCH_SIZE)
940 phys->misr_map->frame_count = frame_count;
941 else if (frame_count <= 0)
942 phys->misr_map->frame_count = 0;
943 else
944 phys->misr_map->frame_count = SDE_CRC_BATCH_SIZE;
945
946 if (!enable) {
947 phys->misr_map->last_idx = 0;
948 phys->misr_map->frame_count = 0;
949 for (j = 0; j < SDE_CRC_BATCH_SIZE; j++)
950 phys->misr_map->crc_value[j] = 0;
951 }
952}
953
954static ssize_t _sde_encoder_misr_set(struct file *file,
955 const char __user *user_buf, size_t count, loff_t *ppos)
956{
957 struct sde_encoder_virt *sde_enc;
958 struct drm_encoder *drm_enc;
959 int i = 0;
960 char buf[10];
961 u32 enable, frame_count;
962
963 drm_enc = file->private_data;
964 sde_enc = to_sde_encoder_virt(drm_enc);
965
966 if (copy_from_user(buf, user_buf, count))
967 return -EFAULT;
968
969 buf[count] = 0; /* end of string */
970
971 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
972 return -EFAULT;
973
974 mutex_lock(&sde_enc->enc_lock);
975 for (i = 0; i < sde_enc->num_phys_encs; i++) {
976 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
977
978 if (!phys || !phys->misr_map || !phys->ops.setup_misr)
979 continue;
980
981 _sde_set_misr_params(phys, enable, frame_count);
982 phys->ops.setup_misr(phys, phys->misr_map);
983 }
984 mutex_unlock(&sde_enc->enc_lock);
985 return count;
986}
987
988static ssize_t _sde_encoder_misr_read(
989 struct file *file,
990 char __user *buff, size_t count, loff_t *ppos)
991{
992 struct sde_encoder_virt *sde_enc;
993 struct drm_encoder *drm_enc;
994 int i = 0, j = 0, len = 0;
995 char buf[512] = {'\0'};
996
997 if (*ppos)
998 return 0;
999
1000 drm_enc = file->private_data;
1001 sde_enc = to_sde_encoder_virt(drm_enc);
1002
1003 mutex_lock(&sde_enc->enc_lock);
1004 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1005 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1006 struct sde_misr_params *misr_map;
1007
1008 if (!phys || !phys->misr_map)
1009 continue;
1010
1011 misr_map = phys->misr_map;
1012
1013 len += snprintf(buf+len, sizeof(buf), "INTF%d\n", i);
1014 for (j = 0; j < SDE_CRC_BATCH_SIZE; j++)
1015 len += snprintf(buf+len, sizeof(buf), "%x\n",
1016 misr_map->crc_value[j]);
1017 }
1018
1019 if (len < 0 || len >= sizeof(buf))
1020 return 0;
1021
1022 if ((count < sizeof(buf)) || copy_to_user(buff, buf, len))
1023 return -EFAULT;
1024
1025 *ppos += len; /* increase offset */
1026 mutex_unlock(&sde_enc->enc_lock);
1027
1028 return len;
1029}
1030
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001031static void _sde_encoder_init_debugfs(struct drm_encoder *drm_enc,
1032 struct sde_encoder_virt *sde_enc, struct sde_kms *sde_kms)
1033{
1034 static const struct file_operations debugfs_status_fops = {
1035 .open = _sde_encoder_debugfs_status_open,
1036 .read = seq_read,
1037 .llseek = seq_lseek,
1038 .release = single_release,
1039 };
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301040
1041 static const struct file_operations debugfs_misr_fops = {
1042 .open = simple_open,
1043 .read = _sde_encoder_misr_read,
1044 .write = _sde_encoder_misr_set,
1045 };
1046
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001047 char name[SDE_NAME_SIZE];
1048
1049 if (!drm_enc || !sde_enc || !sde_kms) {
1050 SDE_ERROR("invalid encoder or kms\n");
1051 return;
1052 }
1053
1054 snprintf(name, SDE_NAME_SIZE, "encoder%u", drm_enc->base.id);
1055
1056 /* create overall sub-directory for the encoder */
1057 sde_enc->debugfs_root = debugfs_create_dir(name,
1058 sde_debugfs_get_root(sde_kms));
1059 if (sde_enc->debugfs_root) {
1060 /* don't error check these */
1061 debugfs_create_file("status", 0644,
1062 sde_enc->debugfs_root, sde_enc, &debugfs_status_fops);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301063
1064 debugfs_create_file("misr_data", 0644,
1065 sde_enc->debugfs_root, drm_enc, &debugfs_misr_fops);
1066
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001067 }
1068}
1069
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001070static int sde_encoder_virt_add_phys_encs(
Clarence Ipa4039322016-07-15 16:23:59 -04001071 u32 display_caps,
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001072 struct sde_encoder_virt *sde_enc,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001073 struct sde_enc_phys_init_params *params)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001074{
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001075 struct sde_encoder_phys *enc = NULL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001076
Clarence Ip19af1362016-09-23 14:57:51 -04001077 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001078
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001079 /*
1080 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
1081 * in this function, check up-front.
1082 */
1083 if (sde_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
1084 ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001085 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001086 sde_enc->num_phys_encs);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001087 return -EINVAL;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001088 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001089
Clarence Ipa4039322016-07-15 16:23:59 -04001090 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001091 enc = sde_encoder_phys_vid_init(params);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001092
1093 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001094 SDE_ERROR_ENC(sde_enc, "failed to init vid enc: %ld\n",
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001095 PTR_ERR(enc));
1096 return enc == 0 ? -EINVAL : PTR_ERR(enc);
1097 }
1098
1099 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
1100 ++sde_enc->num_phys_encs;
1101 }
1102
Clarence Ipa4039322016-07-15 16:23:59 -04001103 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001104 enc = sde_encoder_phys_cmd_init(params);
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04001105
1106 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001107 SDE_ERROR_ENC(sde_enc, "failed to init cmd enc: %ld\n",
Lloyd Atkinsona59eead2016-05-30 14:37:06 -04001108 PTR_ERR(enc));
1109 return enc == 0 ? -EINVAL : PTR_ERR(enc);
1110 }
1111
1112 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
1113 ++sde_enc->num_phys_encs;
1114 }
1115
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001116 return 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001117}
1118
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001119static int sde_encoder_virt_add_phys_enc_wb(struct sde_encoder_virt *sde_enc,
1120 struct sde_enc_phys_init_params *params)
Alan Kwongbb27c092016-07-20 16:41:25 -04001121{
1122 struct sde_encoder_phys *enc = NULL;
Alan Kwongbb27c092016-07-20 16:41:25 -04001123
Clarence Ip19af1362016-09-23 14:57:51 -04001124 if (!sde_enc) {
1125 SDE_ERROR("invalid encoder\n");
1126 return -EINVAL;
1127 }
1128
1129 SDE_DEBUG_ENC(sde_enc, "\n");
Alan Kwongbb27c092016-07-20 16:41:25 -04001130
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001131 if (sde_enc->num_phys_encs + 1 >= ARRAY_SIZE(sde_enc->phys_encs)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001132 SDE_ERROR_ENC(sde_enc, "too many physical encoders %d\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04001133 sde_enc->num_phys_encs);
1134 return -EINVAL;
1135 }
1136
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001137 enc = sde_encoder_phys_wb_init(params);
Alan Kwongbb27c092016-07-20 16:41:25 -04001138
1139 if (IS_ERR_OR_NULL(enc)) {
Clarence Ip19af1362016-09-23 14:57:51 -04001140 SDE_ERROR_ENC(sde_enc, "failed to init wb enc: %ld\n",
Alan Kwongbb27c092016-07-20 16:41:25 -04001141 PTR_ERR(enc));
1142 return enc == 0 ? -EINVAL : PTR_ERR(enc);
1143 }
1144
1145 sde_enc->phys_encs[sde_enc->num_phys_encs] = enc;
1146 ++sde_enc->num_phys_encs;
1147
1148 return 0;
1149}
1150
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001151static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001152 struct sde_kms *sde_kms,
Clarence Ipa4039322016-07-15 16:23:59 -04001153 struct msm_display_info *disp_info,
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001154 int *drm_enc_mode)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001155{
1156 int ret = 0;
1157 int i = 0;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001158 enum sde_intf_type intf_type;
1159 struct sde_encoder_virt_ops parent_ops = {
1160 sde_encoder_vblank_callback,
Dhaval Patel81e87882016-10-19 21:41:56 -07001161 sde_encoder_underrun_callback,
Alan Kwong628d19e2016-10-31 13:50:13 -04001162 sde_encoder_frame_done_callback,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001163 };
1164 struct sde_enc_phys_init_params phys_params;
1165
Clarence Ip19af1362016-09-23 14:57:51 -04001166 if (!sde_enc || !sde_kms) {
1167 SDE_ERROR("invalid arg(s), enc %d kms %d\n",
1168 sde_enc != 0, sde_kms != 0);
1169 return -EINVAL;
1170 }
1171
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001172 memset(&phys_params, 0, sizeof(phys_params));
1173 phys_params.sde_kms = sde_kms;
1174 phys_params.parent = &sde_enc->base;
1175 phys_params.parent_ops = parent_ops;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001176 phys_params.enc_spinlock = &sde_enc->enc_spinlock;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001177
Clarence Ip19af1362016-09-23 14:57:51 -04001178 SDE_DEBUG("\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001179
Clarence Ipa4039322016-07-15 16:23:59 -04001180 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001181 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
1182 intf_type = INTF_DSI;
Clarence Ipa4039322016-07-15 16:23:59 -04001183 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001184 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
1185 intf_type = INTF_HDMI;
Alan Kwongbb27c092016-07-20 16:41:25 -04001186 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_VIRTUAL) {
1187 *drm_enc_mode = DRM_MODE_ENCODER_VIRTUAL;
1188 intf_type = INTF_WB;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001189 } else {
Clarence Ip19af1362016-09-23 14:57:51 -04001190 SDE_ERROR_ENC(sde_enc, "unsupported display interface type\n");
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001191 return -EINVAL;
1192 }
1193
Clarence Ip88270a62016-06-26 10:09:34 -04001194 WARN_ON(disp_info->num_of_h_tiles < 1);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001195
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001196 sde_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
1197
Clarence Ip19af1362016-09-23 14:57:51 -04001198 SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001199
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001200 mutex_lock(&sde_enc->enc_lock);
Clarence Ip88270a62016-06-26 10:09:34 -04001201 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001202 /*
1203 * Left-most tile is at index 0, content is controller id
1204 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
1205 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
1206 */
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001207 u32 controller_id = disp_info->h_tile_instance[i];
1208
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001209 if (disp_info->num_of_h_tiles > 1) {
1210 if (i == 0)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001211 phys_params.split_role = ENC_ROLE_MASTER;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001212 else
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001213 phys_params.split_role = ENC_ROLE_SLAVE;
1214 } else {
1215 phys_params.split_role = ENC_ROLE_SOLO;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001216 }
1217
Clarence Ip19af1362016-09-23 14:57:51 -04001218 SDE_DEBUG("h_tile_instance %d = %d, split_role %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001219 i, controller_id, phys_params.split_role);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001220
Alan Kwongbb27c092016-07-20 16:41:25 -04001221 if (intf_type == INTF_WB) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001222 phys_params.intf_idx = INTF_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001223 phys_params.wb_idx = sde_encoder_get_wb(
1224 sde_kms->catalog,
Alan Kwongbb27c092016-07-20 16:41:25 -04001225 intf_type, controller_id);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001226 if (phys_params.wb_idx == WB_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04001227 SDE_ERROR_ENC(sde_enc,
1228 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001229 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04001230 ret = -EINVAL;
1231 }
Alan Kwongbb27c092016-07-20 16:41:25 -04001232 } else {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001233 phys_params.wb_idx = WB_MAX;
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001234 phys_params.intf_idx = sde_encoder_get_intf(
1235 sde_kms->catalog, intf_type,
1236 controller_id);
1237 if (phys_params.intf_idx == INTF_MAX) {
Clarence Ip19af1362016-09-23 14:57:51 -04001238 SDE_ERROR_ENC(sde_enc,
1239 "could not get wb: type %d, id %d\n",
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001240 intf_type, controller_id);
Alan Kwongbb27c092016-07-20 16:41:25 -04001241 ret = -EINVAL;
1242 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001243 }
1244
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001245 if (!ret) {
Alan Kwongbb27c092016-07-20 16:41:25 -04001246 if (intf_type == INTF_WB)
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001247 ret = sde_encoder_virt_add_phys_enc_wb(sde_enc,
1248 &phys_params);
Alan Kwongbb27c092016-07-20 16:41:25 -04001249 else
1250 ret = sde_encoder_virt_add_phys_encs(
1251 disp_info->capabilities,
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001252 sde_enc,
1253 &phys_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001254 if (ret)
Clarence Ip19af1362016-09-23 14:57:51 -04001255 SDE_ERROR_ENC(sde_enc,
1256 "failed to add phys encs\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001257 }
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001258 }
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001259 mutex_unlock(&sde_enc->enc_lock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001260
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001261
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001262 return ret;
1263}
1264
Alan Kwong628d19e2016-10-31 13:50:13 -04001265static void sde_encoder_frame_done_timeout(unsigned long data)
1266{
1267 struct drm_encoder *drm_enc = (struct drm_encoder *) data;
1268 struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
1269 struct msm_drm_private *priv;
1270
1271 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1272 SDE_ERROR("invalid parameters\n");
1273 return;
1274 }
1275 priv = drm_enc->dev->dev_private;
1276
1277 if (!sde_enc->frame_busy_mask[0] || !sde_enc->crtc_frame_event_cb) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05001278 SDE_DEBUG_ENC(sde_enc, "invalid timeout\n");
1279 SDE_EVT32(DRMID(drm_enc), sde_enc->frame_busy_mask[0], 0);
Alan Kwong628d19e2016-10-31 13:50:13 -04001280 return;
1281 } else if (!atomic_xchg(&sde_enc->frame_done_timeout, 0)) {
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05001282 SDE_ERROR_ENC(sde_enc, "invalid timeout\n");
Alan Kwong628d19e2016-10-31 13:50:13 -04001283 SDE_EVT32(DRMID(drm_enc), 0, 1);
1284 return;
1285 }
1286
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05001287 SDE_EVT32(DRMID(drm_enc), 2, sde_enc->crtc_frame_event);
1288 SDE_ERROR_ENC(sde_enc, "frame done timeout, frame_event %d\n",
1289 sde_enc->crtc_frame_event);
1290
Alan Kwong628d19e2016-10-31 13:50:13 -04001291 sde_enc->crtc_frame_event_cb(sde_enc->crtc_frame_event_cb_data,
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05001292 sde_enc->crtc_frame_event |
Alan Kwong628d19e2016-10-31 13:50:13 -04001293 SDE_ENCODER_FRAME_EVENT_ERROR);
1294}
1295
Clarence Ip3649f8b2016-10-31 09:59:44 -04001296struct drm_encoder *sde_encoder_init(
1297 struct drm_device *dev,
1298 struct msm_display_info *disp_info)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001299{
1300 struct msm_drm_private *priv = dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -04001301 struct sde_kms *sde_kms = to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001302 struct drm_encoder *drm_enc = NULL;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001303 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001304 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001305 int ret = 0;
1306
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001307 sde_enc = kzalloc(sizeof(*sde_enc), GFP_KERNEL);
1308 if (!sde_enc) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001309 ret = -ENOMEM;
1310 goto fail;
1311 }
1312
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001313 mutex_init(&sde_enc->enc_lock);
Lloyd Atkinson9a840312016-06-26 10:11:08 -04001314 ret = sde_encoder_setup_display(sde_enc, sde_kms, disp_info,
1315 &drm_enc_mode);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001316 if (ret)
1317 goto fail;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001318
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001319 sde_enc->cur_master = NULL;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001320 spin_lock_init(&sde_enc->enc_spinlock);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001321 drm_enc = &sde_enc->base;
Dhaval Patel04c7e8e2016-09-26 20:14:31 -07001322 drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode, NULL);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001323 drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001324
Alan Kwong628d19e2016-10-31 13:50:13 -04001325 atomic_set(&sde_enc->frame_done_timeout, 0);
1326 setup_timer(&sde_enc->frame_done_timer, sde_encoder_frame_done_timeout,
1327 (unsigned long) sde_enc);
1328
Dhaval Patel22ef6df2016-10-20 14:42:52 -07001329 _sde_encoder_init_debugfs(drm_enc, sde_enc, sde_kms);
1330
Clarence Ip19af1362016-09-23 14:57:51 -04001331 SDE_DEBUG_ENC(sde_enc, "created\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001332
1333 return drm_enc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001334
1335fail:
Clarence Ip19af1362016-09-23 14:57:51 -04001336 SDE_ERROR("failed to create encoder\n");
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001337 if (drm_enc)
1338 sde_encoder_destroy(drm_enc);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001339
1340 return ERR_PTR(ret);
1341}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -04001342
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001343int sde_encoder_wait_for_commit_done(struct drm_encoder *drm_enc)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001344{
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001345 struct sde_encoder_virt *sde_enc = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001346 int i, ret = 0;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001347
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001348 if (!drm_enc) {
Clarence Ip19af1362016-09-23 14:57:51 -04001349 SDE_ERROR("invalid encoder\n");
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001350 return -EINVAL;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001351 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001352 sde_enc = to_sde_encoder_virt(drm_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04001353 SDE_DEBUG_ENC(sde_enc, "\n");
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001354
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001355 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1356 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001357
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001358 if (phys && phys->ops.wait_for_commit_done) {
1359 ret = phys->ops.wait_for_commit_done(phys);
1360 if (ret)
1361 return ret;
1362 }
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301363
1364 if (phys && phys->ops.collect_misr)
1365 if (phys->misr_map && phys->misr_map->enable)
1366 phys->ops.collect_misr(phys, phys->misr_map);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001367 }
1368
1369 return ret;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001370}
1371
Alan Kwong67a3f792016-11-01 23:16:53 -04001372enum sde_intf_mode sde_encoder_get_intf_mode(struct drm_encoder *encoder)
1373{
1374 struct sde_encoder_virt *sde_enc = NULL;
1375 int i;
1376
1377 if (!encoder) {
1378 SDE_ERROR("invalid encoder\n");
1379 return INTF_MODE_NONE;
1380 }
1381 sde_enc = to_sde_encoder_virt(encoder);
1382
1383 if (sde_enc->cur_master)
1384 return sde_enc->cur_master->intf_mode;
1385
1386 for (i = 0; i < sde_enc->num_phys_encs; i++) {
1387 struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
1388
1389 if (phys)
1390 return phys->intf_mode;
1391 }
1392
1393 return INTF_MODE_NONE;
1394}