blob: a1c3bfa5566977780e633db96de02c9845375a5b [file] [log] [blame]
Narender Ankamc7ce0b02020-03-16 17:40:34 +05301/* Copyright (c) 2015-2020, The Linux Foundation. All rights reserved.
Lloyd Atkinson09fed912016-06-24 18:14:13 -04002 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
Lloyd Atkinson09fed912016-06-24 18:14:13 -040011 */
12
Clarence Ip19af1362016-09-23 14:57:51 -040013#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Lloyd Atkinson09fed912016-06-24 18:14:13 -040014#include "sde_encoder_phys.h"
Lloyd Atkinson5d722782016-05-30 14:09:41 -040015#include "sde_hw_interrupts.h"
Alan Kwongf5dd86c2016-08-09 18:08:17 -040016#include "sde_core_irq.h"
Clarence Ipc475b082016-06-26 09:27:23 -040017#include "sde_formats.h"
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -080018#include "dsi_display.h"
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -070019#include "sde_trace.h"
Lloyd Atkinson09fed912016-06-24 18:14:13 -040020
Clarence Ip19af1362016-09-23 14:57:51 -040021#define SDE_DEBUG_VIDENC(e, fmt, ...) SDE_DEBUG("enc%d intf%d " fmt, \
22 (e) && (e)->base.parent ? \
23 (e)->base.parent->base.id : -1, \
24 (e) && (e)->hw_intf ? \
25 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
26
27#define SDE_ERROR_VIDENC(e, fmt, ...) SDE_ERROR("enc%d intf%d " fmt, \
28 (e) && (e)->base.parent ? \
29 (e)->base.parent->base.id : -1, \
30 (e) && (e)->hw_intf ? \
31 (e)->hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
32
Lloyd Atkinson09fed912016-06-24 18:14:13 -040033#define to_sde_encoder_phys_vid(x) \
34 container_of(x, struct sde_encoder_phys_vid, base)
35
Clarence Ip569d5af2017-10-14 21:09:01 -040036/* maximum number of consecutive kickoff errors */
37#define KICKOFF_MAX_ERRORS 2
38
Sandeep Panda11b20d82017-06-19 12:57:27 +053039/* Poll time to do recovery during active region */
40#define POLL_TIME_USEC_FOR_LN_CNT 500
41#define MAX_POLL_CNT 10
42
Raviteja Tamatam5a10dd12018-06-29 16:19:06 +053043static bool _sde_encoder_phys_is_ppsplit(struct sde_encoder_phys *phys_enc)
44{
45 enum sde_rm_topology_name topology;
46
47 if (!phys_enc)
48 return false;
49
50 topology = sde_connector_get_topology_name(phys_enc->connector);
51 if (topology == SDE_RM_TOPOLOGY_PPSPLIT)
52 return true;
53
54 return false;
55}
56
Ben Chan78647cd2016-06-26 22:02:47 -040057static bool sde_encoder_phys_vid_is_master(
58 struct sde_encoder_phys *phys_enc)
59{
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -040060 bool ret = false;
61
62 if (phys_enc->split_role != ENC_ROLE_SLAVE)
63 ret = true;
Ben Chan78647cd2016-06-26 22:02:47 -040064
65 return ret;
66}
67
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -040068static void drm_mode_to_intf_timing_params(
Lloyd Atkinson5d722782016-05-30 14:09:41 -040069 const struct sde_encoder_phys_vid *vid_enc,
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -040070 const struct drm_display_mode *mode,
71 struct intf_timing_params *timing)
Lloyd Atkinson09fed912016-06-24 18:14:13 -040072{
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -040073 memset(timing, 0, sizeof(*timing));
Veera Sundaram Sankaran36774712017-06-26 14:33:33 -070074
75 if ((mode->htotal < mode->hsync_end)
76 || (mode->hsync_start < mode->hdisplay)
77 || (mode->vtotal < mode->vsync_end)
78 || (mode->vsync_start < mode->vdisplay)
79 || (mode->hsync_end < mode->hsync_start)
80 || (mode->vsync_end < mode->vsync_start)) {
81 SDE_ERROR(
82 "invalid params - hstart:%d,hend:%d,htot:%d,hdisplay:%d\n",
83 mode->hsync_start, mode->hsync_end,
84 mode->htotal, mode->hdisplay);
85 SDE_ERROR("vstart:%d,vend:%d,vtot:%d,vdisplay:%d\n",
86 mode->vsync_start, mode->vsync_end,
87 mode->vtotal, mode->vdisplay);
88 return;
89 }
90
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -040091 /*
92 * https://www.kernel.org/doc/htmldocs/drm/ch02s05.html
93 * Active Region Front Porch Sync Back Porch
94 * <-----------------><------------><-----><----------->
95 * <- [hv]display --->
96 * <--------- [hv]sync_start ------>
97 * <----------------- [hv]sync_end ------->
98 * <---------------------------- [hv]total ------------->
99 */
100 timing->width = mode->hdisplay; /* active width */
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -0800101 if (vid_enc->base.comp_type == MSM_DISPLAY_COMPRESSION_DSC)
102 timing->width = DIV_ROUND_UP(timing->width, 3);
103
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400104 timing->height = mode->vdisplay; /* active height */
105 timing->xres = timing->width;
106 timing->yres = timing->height;
107 timing->h_back_porch = mode->htotal - mode->hsync_end;
108 timing->h_front_porch = mode->hsync_start - mode->hdisplay;
109 timing->v_back_porch = mode->vtotal - mode->vsync_end;
110 timing->v_front_porch = mode->vsync_start - mode->vdisplay;
111 timing->hsync_pulse_width = mode->hsync_end - mode->hsync_start;
112 timing->vsync_pulse_width = mode->vsync_end - mode->vsync_start;
113 timing->hsync_polarity = (mode->flags & DRM_MODE_FLAG_NHSYNC) ? 1 : 0;
114 timing->vsync_polarity = (mode->flags & DRM_MODE_FLAG_NVSYNC) ? 1 : 0;
115 timing->border_clr = 0;
116 timing->underflow_clr = 0xff;
117 timing->hsync_skew = mode->hskew;
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530118 timing->v_front_porch_fixed = vid_enc->base.vfp_cached;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400119
120 /* DSI controller cannot handle active-low sync signals. */
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400121 if (vid_enc->hw_intf->cap->type == INTF_DSI) {
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400122 timing->hsync_polarity = 0;
123 timing->vsync_polarity = 0;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400124 }
125
126 /*
127 * For edp only:
128 * DISPLAY_V_START = (VBP * HCYCLE) + HBP
129 * DISPLAY_V_END = (VBP + VACTIVE) * HCYCLE - 1 - HFP
130 */
131 /*
132 * if (vid_enc->hw->cap->type == INTF_EDP) {
133 * display_v_start += mode->htotal - mode->hsync_start;
134 * display_v_end -= mode->hsync_start - mode->hdisplay;
135 * }
136 */
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400137}
138
139static inline u32 get_horizontal_total(const struct intf_timing_params *timing)
140{
141 u32 active = timing->xres;
142 u32 inactive =
143 timing->h_back_porch + timing->h_front_porch +
144 timing->hsync_pulse_width;
145 return active + inactive;
146}
147
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530148static inline u32 get_vertical_total(const struct intf_timing_params *timing,
149 bool use_fixed_vfp)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400150{
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530151 u32 inactive;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400152 u32 active = timing->yres;
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530153 u32 v_front_porch = use_fixed_vfp ?
154 timing->v_front_porch_fixed : timing->v_front_porch;
155
156 inactive = timing->v_back_porch + v_front_porch +
157 timing->vsync_pulse_width;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400158 return active + inactive;
159}
160
161/*
162 * programmable_fetch_get_num_lines:
163 * Number of fetch lines in vertical front porch
164 * @timing: Pointer to the intf timing information for the requested mode
165 *
166 * Returns the number of fetch lines in vertical front porch at which mdp
167 * can start fetching the next frame.
168 *
169 * Number of needed prefetch lines is anything that cannot be absorbed in the
170 * start of frame time (back porch + vsync pulse width).
171 *
172 * Some panels have very large VFP, however we only need a total number of
173 * lines based on the chip worst case latencies.
174 */
175static u32 programmable_fetch_get_num_lines(
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400176 struct sde_encoder_phys_vid *vid_enc,
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530177 const struct intf_timing_params *timing,
178 bool use_fixed_vfp)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400179{
180 u32 worst_case_needed_lines =
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400181 vid_enc->hw_intf->cap->prog_fetch_lines_worst_case;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400182 u32 start_of_frame_lines =
183 timing->v_back_porch + timing->vsync_pulse_width;
184 u32 needed_vfp_lines = worst_case_needed_lines - start_of_frame_lines;
185 u32 actual_vfp_lines = 0;
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530186 u32 v_front_porch = use_fixed_vfp ?
187 timing->v_front_porch_fixed : timing->v_front_porch;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400188
189 /* Fetch must be outside active lines, otherwise undefined. */
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400190 if (start_of_frame_lines >= worst_case_needed_lines) {
Clarence Ip19af1362016-09-23 14:57:51 -0400191 SDE_DEBUG_VIDENC(vid_enc,
192 "prog fetch is not needed, large vbp+vsw\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400193 actual_vfp_lines = 0;
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530194 } else if (v_front_porch < needed_vfp_lines) {
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400195 /* Warn fetch needed, but not enough porch in panel config */
196 pr_warn_once
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400197 ("low vbp+vfp may lead to perf issues in some cases\n");
Clarence Ip19af1362016-09-23 14:57:51 -0400198 SDE_DEBUG_VIDENC(vid_enc,
199 "less vfp than fetch req, using entire vfp\n");
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530200 actual_vfp_lines = v_front_porch;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400201 } else {
Clarence Ip19af1362016-09-23 14:57:51 -0400202 SDE_DEBUG_VIDENC(vid_enc, "room in vfp for needed prefetch\n");
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400203 actual_vfp_lines = needed_vfp_lines;
204 }
205
Clarence Ip19af1362016-09-23 14:57:51 -0400206 SDE_DEBUG_VIDENC(vid_enc,
207 "v_front_porch %u v_back_porch %u vsync_pulse_width %u\n",
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530208 v_front_porch, timing->v_back_porch,
Clarence Ip19af1362016-09-23 14:57:51 -0400209 timing->vsync_pulse_width);
210 SDE_DEBUG_VIDENC(vid_enc,
211 "wc_lines %u needed_vfp_lines %u actual_vfp_lines %u\n",
212 worst_case_needed_lines, needed_vfp_lines, actual_vfp_lines);
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400213
214 return actual_vfp_lines;
215}
216
217/*
218 * programmable_fetch_config: Programs HW to prefetch lines by offsetting
219 * the start of fetch into the vertical front porch for cases where the
220 * vsync pulse width and vertical back porch time is insufficient
221 *
222 * Gets # of lines to pre-fetch, then calculate VSYNC counter value.
223 * HW layer requires VSYNC counter of first pixel of tgt VFP line.
224 *
225 * @timing: Pointer to the intf timing information for the requested mode
226 */
227static void programmable_fetch_config(struct sde_encoder_phys *phys_enc,
228 const struct intf_timing_params *timing)
229{
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400230 struct sde_encoder_phys_vid *vid_enc =
231 to_sde_encoder_phys_vid(phys_enc);
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400232 struct intf_prog_fetch f = { 0 };
233 u32 vfp_fetch_lines = 0;
234 u32 horiz_total = 0;
235 u32 vert_total = 0;
236 u32 vfp_fetch_start_vsync_counter = 0;
237 unsigned long lock_flags;
238
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400239 if (WARN_ON_ONCE(!vid_enc->hw_intf->ops.setup_prg_fetch))
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400240 return;
241
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530242 vfp_fetch_lines = programmable_fetch_get_num_lines(vid_enc,
243 timing, true);
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400244 if (vfp_fetch_lines) {
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530245 vert_total = get_vertical_total(timing, true);
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400246 horiz_total = get_horizontal_total(timing);
247 vfp_fetch_start_vsync_counter =
248 (vert_total - vfp_fetch_lines) * horiz_total + 1;
249 f.enable = 1;
250 f.fetch_start = vfp_fetch_start_vsync_counter;
251 }
252
Clarence Ip19af1362016-09-23 14:57:51 -0400253 SDE_DEBUG_VIDENC(vid_enc,
254 "vfp_fetch_lines %u vfp_fetch_start_vsync_counter %u\n",
255 vfp_fetch_lines, vfp_fetch_start_vsync_counter);
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400256
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400257 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400258 vid_enc->hw_intf->ops.setup_prg_fetch(vid_enc->hw_intf, &f);
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400259 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400260}
261
Alan Kwong4aacd532017-02-04 18:51:33 -0800262/*
263 * programmable_rot_fetch_config: Programs ROT to prefetch lines by offsetting
264 * the start of fetch into the vertical front porch for cases where the
265 * vsync pulse width and vertical back porch time is insufficient
266 *
267 * Gets # of lines to pre-fetch, then calculate VSYNC counter value.
268 * HW layer requires VSYNC counter of first pixel of tgt VFP line.
269 * @phys_enc: Pointer to physical encoder
270 * @rot_fetch_lines: number of line to prefill, or 0 to disable
Jayant Shekhar98e78a82018-01-12 17:50:55 +0530271 * @is_primary: set true if the display is primary display
Alan Kwong4aacd532017-02-04 18:51:33 -0800272 */
273static void programmable_rot_fetch_config(struct sde_encoder_phys *phys_enc,
Jayant Shekhar98e78a82018-01-12 17:50:55 +0530274 u32 rot_fetch_lines, u32 is_primary)
Alan Kwong4aacd532017-02-04 18:51:33 -0800275{
276 struct sde_encoder_phys_vid *vid_enc =
277 to_sde_encoder_phys_vid(phys_enc);
278 struct intf_prog_fetch f = { 0 };
Clarence Ip49903532017-04-24 11:50:09 -0700279 struct intf_timing_params *timing;
Alan Kwong4aacd532017-02-04 18:51:33 -0800280 u32 vfp_fetch_lines = 0;
281 u32 horiz_total = 0;
282 u32 vert_total = 0;
283 u32 rot_fetch_start_vsync_counter = 0;
Clarence Ip7eb90452017-05-23 11:41:19 -0400284 u32 flush_mask = 0;
Alan Kwong4aacd532017-02-04 18:51:33 -0800285 unsigned long lock_flags;
286
Clarence Ip7eb90452017-05-23 11:41:19 -0400287 if (!phys_enc || !vid_enc->hw_intf || !phys_enc->hw_ctl ||
288 !phys_enc->hw_ctl->ops.get_bitmask_intf ||
289 !phys_enc->hw_ctl->ops.update_pending_flush ||
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800290 !vid_enc->hw_intf->ops.setup_rot_start ||
Jayant Shekhar98e78a82018-01-12 17:50:55 +0530291 !phys_enc->sde_kms ||
292 !is_primary)
Alan Kwong4aacd532017-02-04 18:51:33 -0800293 return;
294
Clarence Ip49903532017-04-24 11:50:09 -0700295 timing = &vid_enc->timing_params;
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530296 vfp_fetch_lines = programmable_fetch_get_num_lines(vid_enc,
297 timing, true);
Alan Kwongd9700602017-12-07 17:43:25 -0500298 if (rot_fetch_lines) {
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530299 vert_total = get_vertical_total(timing, true);
Alan Kwong4aacd532017-02-04 18:51:33 -0800300 horiz_total = get_horizontal_total(timing);
301 if (vert_total >= (vfp_fetch_lines + rot_fetch_lines)) {
302 rot_fetch_start_vsync_counter =
303 (vert_total - vfp_fetch_lines - rot_fetch_lines) *
304 horiz_total + 1;
305 f.enable = 1;
306 f.fetch_start = rot_fetch_start_vsync_counter;
Alan Kwongd9700602017-12-07 17:43:25 -0500307 } else {
308 SDE_ERROR_VIDENC(vid_enc,
309 "vert_total %u rot_fetch_lines %u vfp_fetch_lines %u\n",
310 vert_total, rot_fetch_lines, vfp_fetch_lines);
311 SDE_EVT32(DRMID(phys_enc->parent), vert_total,
312 rot_fetch_lines, vfp_fetch_lines,
313 SDE_EVTLOG_ERROR);
Alan Kwong4aacd532017-02-04 18:51:33 -0800314 }
315 }
316
Alan Kwongf5fd32b2017-08-18 11:24:31 -0400317 /* return if rot_fetch does not change since last update */
318 if (vid_enc->rot_fetch_valid &&
319 !memcmp(&vid_enc->rot_fetch, &f, sizeof(f)))
320 return;
321
Alan Kwong4aacd532017-02-04 18:51:33 -0800322 SDE_DEBUG_VIDENC(vid_enc,
Alan Kwongd2681812017-08-27 21:24:43 -0400323 "rot_fetch_lines %u vfp_fetch_lines %u rot_fetch_start_vsync_counter %u\n",
324 rot_fetch_lines, vfp_fetch_lines,
325 rot_fetch_start_vsync_counter);
Alan Kwong4aacd532017-02-04 18:51:33 -0800326
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800327 if (!phys_enc->sde_kms->splash_data.cont_splash_en) {
Alan Kwong052fb6b2018-01-05 13:37:34 -0500328 SDE_EVT32(DRMID(phys_enc->parent), f.enable, f.fetch_start);
329
Raviteja Tamatam5a10dd12018-06-29 16:19:06 +0530330 if (!_sde_encoder_phys_is_ppsplit(phys_enc) ||
331 sde_encoder_phys_vid_is_master(phys_enc)) {
332 phys_enc->hw_ctl->ops.get_bitmask_intf(
333 phys_enc->hw_ctl, &flush_mask,
334 vid_enc->hw_intf->idx);
335 phys_enc->hw_ctl->ops.update_pending_flush(
336 phys_enc->hw_ctl, flush_mask);
337 }
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800338 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
339 vid_enc->hw_intf->ops.setup_rot_start(vid_enc->hw_intf, &f);
340 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
Alan Kwongf5fd32b2017-08-18 11:24:31 -0400341
Alan Kwong052fb6b2018-01-05 13:37:34 -0500342 vid_enc->rot_fetch = f;
343 vid_enc->rot_fetch_valid = true;
344 }
Alan Kwong4aacd532017-02-04 18:51:33 -0800345}
346
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400347static bool sde_encoder_phys_vid_mode_fixup(
348 struct sde_encoder_phys *phys_enc,
349 const struct drm_display_mode *mode,
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400350 struct drm_display_mode *adj_mode)
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400351{
Clarence Ip19af1362016-09-23 14:57:51 -0400352 if (phys_enc)
353 SDE_DEBUG_VIDENC(to_sde_encoder_phys_vid(phys_enc), "\n");
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400354
355 /*
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400356 * Modifying mode has consequences when the mode comes back to us
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400357 */
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400358 return true;
359}
360
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400361static void sde_encoder_phys_vid_setup_timing_engine(
362 struct sde_encoder_phys *phys_enc)
363{
Dhaval Patel79613522016-10-25 23:18:48 -0700364 struct sde_encoder_phys_vid *vid_enc;
365 struct drm_display_mode mode;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400366 struct intf_timing_params timing_params = { 0 };
Lloyd Atkinson9a673492016-07-05 11:41:57 -0400367 const struct sde_format *fmt = NULL;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400368 u32 fmt_fourcc = DRM_FORMAT_RGB888;
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400369 unsigned long lock_flags;
370 struct sde_hw_intf_cfg intf_cfg = { 0 };
371
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800372 if (!phys_enc || !phys_enc->sde_kms || !phys_enc->hw_ctl ||
373 !phys_enc->hw_ctl->ops.setup_intf_cfg) {
Clarence Ip19af1362016-09-23 14:57:51 -0400374 SDE_ERROR("invalid encoder %d\n", phys_enc != 0);
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400375 return;
Clarence Ip19af1362016-09-23 14:57:51 -0400376 }
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400377
Dhaval Patel79613522016-10-25 23:18:48 -0700378 mode = phys_enc->cached_mode;
379 vid_enc = to_sde_encoder_phys_vid(phys_enc);
380 if (!vid_enc->hw_intf->ops.setup_timing_gen) {
381 SDE_ERROR("timing engine setup is not supported\n");
382 return;
383 }
384
Clarence Ip19af1362016-09-23 14:57:51 -0400385 SDE_DEBUG_VIDENC(vid_enc, "enabling mode:\n");
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400386 drm_mode_debug_printmodeline(&mode);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400387
Chirag Khuranaed859f52019-11-20 18:18:12 +0530388 if (phys_enc->split_role != ENC_ROLE_SOLO ||
389 (mode.private_flags & MSM_MODE_FLAG_COLOR_FORMAT_YCBCR420)) {
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400390 mode.hdisplay >>= 1;
391 mode.htotal >>= 1;
392 mode.hsync_start >>= 1;
393 mode.hsync_end >>= 1;
Chirag Khuranaed859f52019-11-20 18:18:12 +0530394 mode.hskew >>= 1;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400395
Clarence Ip19af1362016-09-23 14:57:51 -0400396 SDE_DEBUG_VIDENC(vid_enc,
Chirag Khuranaed859f52019-11-20 18:18:12 +0530397 "split_role %d, halve horizontal %d %d %d %d %d\n",
Clarence Ip19af1362016-09-23 14:57:51 -0400398 phys_enc->split_role,
399 mode.hdisplay, mode.htotal,
Chirag Khuranaed859f52019-11-20 18:18:12 +0530400 mode.hsync_start, mode.hsync_end,
401 mode.hskew);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400402 }
403
Kalyan Thota6a9f3b72018-01-18 18:00:02 +0530404 if (!phys_enc->vfp_cached) {
405 phys_enc->vfp_cached =
406 sde_connector_get_panel_vfp(phys_enc->connector, &mode);
407 if (phys_enc->vfp_cached <= 0)
408 phys_enc->vfp_cached = mode.vsync_start - mode.vdisplay;
409 }
410
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400411 drm_mode_to_intf_timing_params(vid_enc, &mode, &timing_params);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400412
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800413 vid_enc->timing_params = timing_params;
414
415 if (phys_enc->sde_kms->splash_data.cont_splash_en) {
416 SDE_DEBUG_VIDENC(vid_enc,
417 "skipping intf programming since cont splash is enabled\n");
418 return;
419 }
420
Lloyd Atkinson9a673492016-07-05 11:41:57 -0400421 fmt = sde_get_sde_format(fmt_fourcc);
Clarence Ip19af1362016-09-23 14:57:51 -0400422 SDE_DEBUG_VIDENC(vid_enc, "fmt_fourcc 0x%X\n", fmt_fourcc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400423
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400424 intf_cfg.intf = vid_enc->hw_intf->idx;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400425 intf_cfg.intf_mode_sel = SDE_CTL_MODE_SEL_VID;
426 intf_cfg.stream_sel = 0; /* Don't care value for video mode */
Lloyd Atkinson55987b02016-08-16 16:57:46 -0400427 intf_cfg.mode_3d = sde_encoder_helper_get_3d_blend_mode(phys_enc);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400428
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400429 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400430 vid_enc->hw_intf->ops.setup_timing_gen(vid_enc->hw_intf,
Lloyd Atkinson9a673492016-07-05 11:41:57 -0400431 &timing_params, fmt);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400432 phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg);
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400433 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400434 programmable_fetch_config(phys_enc, &timing_params);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400435}
436
Ben Chan78647cd2016-06-26 22:02:47 -0400437static void sde_encoder_phys_vid_vblank_irq(void *arg, int irq_idx)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400438{
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500439 struct sde_encoder_phys *phys_enc = arg;
440 struct sde_encoder_phys_vid *vid_enc =
441 to_sde_encoder_phys_vid(phys_enc);
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700442 struct sde_hw_ctl *hw_ctl;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400443 unsigned long lock_flags;
Clarence Ipa1b52e72018-01-15 17:04:16 -0500444 u32 flush_register = ~0;
Clarence Ip75fe4a12017-12-22 12:14:51 -0500445 u32 reset_status = 0;
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700446 int new_cnt = -1, old_cnt = -1;
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -0700447 u32 event = 0;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400448
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500449 if (!phys_enc)
Clarence Ip19af1362016-09-23 14:57:51 -0400450 return;
451
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700452 hw_ctl = phys_enc->hw_ctl;
Lloyd Atkinson6a5359d2017-06-21 10:18:08 -0400453 if (!hw_ctl)
454 return;
455
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -0700456 SDE_ATRACE_BEGIN("vblank_irq");
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700457
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700458 /*
459 * only decrement the pending flush count if we've actually flushed
460 * hardware. due to sw irq latency, vblank may have already happened
461 * so we need to double-check with hw that it accepted the flush bits
462 */
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400463 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
Clarence Ipa1b52e72018-01-15 17:04:16 -0500464
465 old_cnt = atomic_read(&phys_enc->pending_kickoff_cnt);
466
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700467 if (hw_ctl && hw_ctl->ops.get_flush_register)
468 flush_register = hw_ctl->ops.get_flush_register(hw_ctl);
469
Clarence Ipa1b52e72018-01-15 17:04:16 -0500470 if (flush_register)
471 goto not_flushed;
Clarence Ip75fe4a12017-12-22 12:14:51 -0500472
Clarence Ipa1b52e72018-01-15 17:04:16 -0500473 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
474
475 /* signal only for master, where there is a pending kickoff */
476 if (sde_encoder_phys_vid_is_master(phys_enc)) {
477 if (atomic_add_unless(&phys_enc->pending_retire_fence_cnt,
478 -1, 0))
479 event |= SDE_ENCODER_FRAME_EVENT_SIGNAL_RETIRE_FENCE |
480 SDE_ENCODER_FRAME_EVENT_SIGNAL_RELEASE_FENCE;
481 }
482
483not_flushed:
Clarence Ip75fe4a12017-12-22 12:14:51 -0500484 if (hw_ctl && hw_ctl->ops.get_reset)
485 reset_status = hw_ctl->ops.get_reset(hw_ctl);
486
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400487 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
488
Clarence Ipa1b52e72018-01-15 17:04:16 -0500489 if (event && phys_enc->parent_ops.handle_frame_done)
490 phys_enc->parent_ops.handle_frame_done(phys_enc->parent,
491 phys_enc, event);
492
493 if (phys_enc->parent_ops.handle_vblank_virt)
494 phys_enc->parent_ops.handle_vblank_virt(phys_enc->parent,
495 phys_enc);
496
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700497 SDE_EVT32_IRQ(DRMID(phys_enc->parent), vid_enc->hw_intf->idx - INTF_0,
Clarence Ip75fe4a12017-12-22 12:14:51 -0500498 old_cnt, new_cnt, reset_status ? SDE_EVTLOG_ERROR : 0,
499 flush_register, event);
Lloyd Atkinson6340a372017-04-05 13:04:22 -0700500
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400501 /* Signal any waiting atomic commit thread */
502 wake_up_all(&phys_enc->pending_kickoff_wq);
Veera Sundaram Sankarana90e1392017-07-06 15:00:09 -0700503 SDE_ATRACE_END("vblank_irq");
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400504}
505
Dhaval Patel81e87882016-10-19 21:41:56 -0700506static void sde_encoder_phys_vid_underrun_irq(void *arg, int irq_idx)
507{
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500508 struct sde_encoder_phys *phys_enc = arg;
Dhaval Patel81e87882016-10-19 21:41:56 -0700509
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500510 if (!phys_enc)
Dhaval Patel81e87882016-10-19 21:41:56 -0700511 return;
512
Dhaval Patel81e87882016-10-19 21:41:56 -0700513 if (phys_enc->parent_ops.handle_underrun_virt)
514 phys_enc->parent_ops.handle_underrun_virt(phys_enc->parent,
515 phys_enc);
516}
517
Ingrid Gallardo1afd2c12017-08-17 16:24:35 -0700518static bool _sde_encoder_phys_is_dual_ctl(struct sde_encoder_phys *phys_enc)
519{
520 enum sde_rm_topology_name topology;
521
522 if (!phys_enc)
523 return false;
524
525 topology = sde_connector_get_topology_name(phys_enc->connector);
526 if ((topology == SDE_RM_TOPOLOGY_DUALPIPE_DSC) ||
Kalyan Thota27ec06c2019-03-18 13:19:59 +0530527 (topology == SDE_RM_TOPOLOGY_DUALPIPE) ||
528 (topology == SDE_RM_TOPOLOGY_QUADPIPE_3DMERGE) ||
529 (topology == SDE_RM_TOPOLOGY_QUADPIPE_DSCMERGE) ||
530 (topology == SDE_RM_TOPOLOGY_QUADPIPE_3DMERGE_DSC))
Ingrid Gallardo1afd2c12017-08-17 16:24:35 -0700531 return true;
532
533 return false;
534}
535
Clarence Ip8e69ad02016-12-09 09:43:57 -0500536static bool sde_encoder_phys_vid_needs_single_flush(
Clarence Ip110d15c2016-08-16 14:44:41 -0400537 struct sde_encoder_phys *phys_enc)
538{
Ingrid Gallardo72cd1632018-02-28 15:26:37 -0800539 return phys_enc && (
540 phys_enc->cont_splash_settings ?
541 phys_enc->cont_splash_single_flush :
542 (_sde_encoder_phys_is_ppsplit(phys_enc) ||
543 _sde_encoder_phys_is_dual_ctl(phys_enc)));
Clarence Ip110d15c2016-08-16 14:44:41 -0400544}
545
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500546static void _sde_encoder_phys_vid_setup_irq_hw_idx(
547 struct sde_encoder_phys *phys_enc)
Ben Chan78647cd2016-06-26 22:02:47 -0400548{
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500549 struct sde_encoder_irq *irq;
Ben Chan78647cd2016-06-26 22:02:47 -0400550
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530551 /*
552 * Initialize irq->hw_idx only when irq is not registered.
553 * Prevent invalidating irq->irq_idx as modeset may be
554 * called many times during dfps.
555 */
556
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500557 irq = &phys_enc->irq[INTR_IDX_VSYNC];
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530558 if (irq->irq_idx < 0)
559 irq->hw_idx = phys_enc->intf_idx;
Clarence Ip19af1362016-09-23 14:57:51 -0400560
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500561 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
Raviteja Tamatam68892de2017-06-20 04:47:19 +0530562 if (irq->irq_idx < 0)
563 irq->hw_idx = phys_enc->intf_idx;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400564}
565
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -0700566static void sde_encoder_phys_vid_cont_splash_mode_set(
567 struct sde_encoder_phys *phys_enc,
568 struct drm_display_mode *adj_mode)
569{
570 if (!phys_enc || !adj_mode) {
571 SDE_ERROR("invalid args\n");
572 return;
573 }
574
575 phys_enc->cached_mode = *adj_mode;
576 phys_enc->enable_state = SDE_ENC_ENABLED;
577
578 _sde_encoder_phys_vid_setup_irq_hw_idx(phys_enc);
579}
580
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400581static void sde_encoder_phys_vid_mode_set(
582 struct sde_encoder_phys *phys_enc,
583 struct drm_display_mode *mode,
584 struct drm_display_mode *adj_mode)
585{
Clarence Ip8e69ad02016-12-09 09:43:57 -0500586 struct sde_rm *rm;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400587 struct sde_rm_hw_iter iter;
588 int i, instance;
Dhaval Patel79613522016-10-25 23:18:48 -0700589 struct sde_encoder_phys_vid *vid_enc;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400590
Clarence Ip8e69ad02016-12-09 09:43:57 -0500591 if (!phys_enc || !phys_enc->sde_kms) {
Veera Sundaram Sankaran36774712017-06-26 14:33:33 -0700592 SDE_ERROR("invalid encoder/kms\n");
Clarence Ip19af1362016-09-23 14:57:51 -0400593 return;
594 }
595
Chirag Khuranaed859f52019-11-20 18:18:12 +0530596 phys_enc->hw_ctl = NULL;
597 phys_enc->hw_cdm = NULL;
598
Clarence Ip8e69ad02016-12-09 09:43:57 -0500599 rm = &phys_enc->sde_kms->rm;
Dhaval Patel79613522016-10-25 23:18:48 -0700600 vid_enc = to_sde_encoder_phys_vid(phys_enc);
Veera Sundaram Sankaran36774712017-06-26 14:33:33 -0700601
602 if (adj_mode) {
603 phys_enc->cached_mode = *adj_mode;
604 drm_mode_debug_printmodeline(adj_mode);
605 SDE_DEBUG_VIDENC(vid_enc, "caching mode:\n");
606 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400607
608 instance = phys_enc->split_role == ENC_ROLE_SLAVE ? 1 : 0;
609
610 /* Retrieve previously allocated HW Resources. Shouldn't fail */
611 sde_rm_init_hw_iter(&iter, phys_enc->parent->base.id, SDE_HW_BLK_CTL);
612 for (i = 0; i <= instance; i++) {
Clarence Ip8e69ad02016-12-09 09:43:57 -0500613 if (sde_rm_get_hw(rm, &iter))
614 phys_enc->hw_ctl = (struct sde_hw_ctl *)iter.hw;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400615 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400616 if (IS_ERR_OR_NULL(phys_enc->hw_ctl)) {
Clarence Ip19af1362016-09-23 14:57:51 -0400617 SDE_ERROR_VIDENC(vid_enc, "failed to init ctl, %ld\n",
618 PTR_ERR(phys_enc->hw_ctl));
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400619 phys_enc->hw_ctl = NULL;
620 return;
621 }
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500622
623 _sde_encoder_phys_vid_setup_irq_hw_idx(phys_enc);
Chirag Khuranaed859f52019-11-20 18:18:12 +0530624
625 /* CDM is optional */
626 sde_rm_init_hw_iter(&iter, phys_enc->parent->base.id, SDE_HW_BLK_CDM);
627 for (i = 0; i <= instance; i++) {
628 sde_rm_get_hw(rm, &iter);
629 if (i == instance)
630 phys_enc->hw_cdm = (struct sde_hw_cdm *) iter.hw;
631 }
632
633 if (IS_ERR(phys_enc->hw_cdm)) {
634 SDE_ERROR("CDM required but not allocated: %ld\n",
635 PTR_ERR(phys_enc->hw_cdm));
636 phys_enc->hw_cdm = NULL;
637 }
Lloyd Atkinsonaf7952d2016-06-26 22:41:26 -0400638}
639
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400640static int sde_encoder_phys_vid_control_vblank_irq(
641 struct sde_encoder_phys *phys_enc,
642 bool enable)
643{
644 int ret = 0;
Dhaval Patel79613522016-10-25 23:18:48 -0700645 struct sde_encoder_phys_vid *vid_enc;
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700646 int refcount;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400647
Clarence Ip19af1362016-09-23 14:57:51 -0400648 if (!phys_enc) {
649 SDE_ERROR("invalid encoder\n");
650 return -EINVAL;
651 }
652
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +0530653 mutex_lock(phys_enc->vblank_ctl_lock);
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700654 refcount = atomic_read(&phys_enc->vblank_refcount);
Dhaval Patel79613522016-10-25 23:18:48 -0700655 vid_enc = to_sde_encoder_phys_vid(phys_enc);
656
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400657 /* Slave encoders don't report vblank */
Alan Kwongbaa56352016-10-04 09:38:11 -0400658 if (!sde_encoder_phys_vid_is_master(phys_enc))
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700659 goto end;
660
661 /* protect against negative */
662 if (!enable && refcount == 0) {
663 ret = -EINVAL;
664 goto end;
665 }
Alan Kwongbaa56352016-10-04 09:38:11 -0400666
667 SDE_DEBUG_VIDENC(vid_enc, "[%pS] enable=%d/%d\n",
668 __builtin_return_address(0),
669 enable, atomic_read(&phys_enc->vblank_refcount));
670
Lloyd Atkinson5d40d312016-09-06 08:34:13 -0400671 SDE_EVT32(DRMID(phys_enc->parent), enable,
Alan Kwongbaa56352016-10-04 09:38:11 -0400672 atomic_read(&phys_enc->vblank_refcount));
673
Jayant Shekhar3136b282018-05-04 10:55:15 +0530674 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1) {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500675 ret = sde_encoder_helper_register_irq(phys_enc, INTR_IDX_VSYNC);
Jayant Shekhar3136b282018-05-04 10:55:15 +0530676 if (ret)
677 atomic_dec_return(&phys_enc->vblank_refcount);
678 } else if (!enable &&
679 atomic_dec_return(&phys_enc->vblank_refcount) == 0) {
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500680 ret = sde_encoder_helper_unregister_irq(phys_enc,
681 INTR_IDX_VSYNC);
Jayant Shekhar3136b282018-05-04 10:55:15 +0530682 if (ret)
683 atomic_inc_return(&phys_enc->vblank_refcount);
684 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400685
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700686end:
687 if (ret) {
Clarence Ip19af1362016-09-23 14:57:51 -0400688 SDE_ERROR_VIDENC(vid_enc,
689 "control vblank irq error %d, enable %d\n",
690 ret, enable);
Dhaval Patele17e0ee2017-08-23 18:01:42 -0700691 SDE_EVT32(DRMID(phys_enc->parent),
692 vid_enc->hw_intf->idx - INTF_0,
693 enable, refcount, SDE_EVTLOG_ERROR);
694 }
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +0530695 mutex_unlock(phys_enc->vblank_ctl_lock);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400696 return ret;
697}
698
Ingrid Gallardo61210ea2017-10-17 17:29:31 -0700699static bool sde_encoder_phys_vid_wait_dma_trigger(
700 struct sde_encoder_phys *phys_enc)
701{
702 struct sde_encoder_phys_vid *vid_enc;
703 struct sde_hw_intf *intf;
704 struct sde_hw_ctl *ctl;
705 struct intf_status status;
706
707 if (!phys_enc) {
708 SDE_ERROR("invalid encoder\n");
709 return false;
710 }
711
712 vid_enc = to_sde_encoder_phys_vid(phys_enc);
713 intf = vid_enc->hw_intf;
714 ctl = phys_enc->hw_ctl;
715 if (!vid_enc->hw_intf || !phys_enc->hw_ctl) {
716 SDE_ERROR("invalid hw_intf %d hw_ctl %d\n",
717 vid_enc->hw_intf != NULL, phys_enc->hw_ctl != NULL);
718 return false;
719 }
720
721 if (!intf->ops.get_status)
722 return false;
723
724 intf->ops.get_status(intf, &status);
725
726 /* if interface is not enabled, return true to wait for dma trigger */
727 return status.is_en ? false : true;
728}
729
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400730static void sde_encoder_phys_vid_enable(struct sde_encoder_phys *phys_enc)
731{
Alan Kwong67a3f792016-11-01 23:16:53 -0400732 struct msm_drm_private *priv;
Dhaval Patel79613522016-10-25 23:18:48 -0700733 struct sde_encoder_phys_vid *vid_enc;
734 struct sde_hw_intf *intf;
735 struct sde_hw_ctl *ctl;
Chirag Khuranaed859f52019-11-20 18:18:12 +0530736 struct sde_hw_cdm *hw_cdm = NULL;
737 struct drm_display_mode mode;
738 const struct sde_format *fmt = NULL;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400739 u32 flush_mask = 0;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400740
Alan Kwong67a3f792016-11-01 23:16:53 -0400741 if (!phys_enc || !phys_enc->parent || !phys_enc->parent->dev ||
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800742 !phys_enc->parent->dev->dev_private ||
743 !phys_enc->sde_kms) {
Alan Kwong67a3f792016-11-01 23:16:53 -0400744 SDE_ERROR("invalid encoder/device\n");
Clarence Ip19af1362016-09-23 14:57:51 -0400745 return;
Dhaval Patel79613522016-10-25 23:18:48 -0700746 }
Chirag Khuranaed859f52019-11-20 18:18:12 +0530747 hw_cdm = phys_enc->hw_cdm;
Alan Kwong67a3f792016-11-01 23:16:53 -0400748 priv = phys_enc->parent->dev->dev_private;
Chirag Khuranaed859f52019-11-20 18:18:12 +0530749 mode = phys_enc->cached_mode;
Dhaval Patel79613522016-10-25 23:18:48 -0700750
751 vid_enc = to_sde_encoder_phys_vid(phys_enc);
752 intf = vid_enc->hw_intf;
753 ctl = phys_enc->hw_ctl;
754 if (!vid_enc->hw_intf || !phys_enc->hw_ctl) {
Clarence Ip19af1362016-09-23 14:57:51 -0400755 SDE_ERROR("invalid hw_intf %d hw_ctl %d\n",
756 vid_enc->hw_intf != 0, phys_enc->hw_ctl != 0);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400757 return;
758 }
759
Clarence Ip19af1362016-09-23 14:57:51 -0400760 SDE_DEBUG_VIDENC(vid_enc, "\n");
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400761
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400762 if (WARN_ON(!vid_enc->hw_intf->ops.enable_timing))
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400763 return;
764
Alan Kwongf5fd32b2017-08-18 11:24:31 -0400765 /* reset state variables until after first update */
766 vid_enc->rot_fetch_valid = false;
767
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800768 if (!phys_enc->sde_kms->splash_data.cont_splash_en)
769 sde_encoder_helper_split_config(phys_enc,
770 vid_enc->hw_intf->idx);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400771
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400772 sde_encoder_phys_vid_setup_timing_engine(phys_enc);
Clarence Ipf99ccea2016-07-05 11:41:07 -0400773
Ingrid Gallardo821985b2017-05-31 16:50:52 -0700774 /*
Krishna Manikandancdf394e2018-09-20 14:32:48 +0530775 * For pp-split, skip setting the flush bit for the slave intf,
776 * since both intfs use same ctl and HW will only flush the master.
Ingrid Gallardo821985b2017-05-31 16:50:52 -0700777 */
Krishna Manikandancdf394e2018-09-20 14:32:48 +0530778 if (_sde_encoder_phys_is_ppsplit(phys_enc) &&
Ingrid Gallardo821985b2017-05-31 16:50:52 -0700779 !sde_encoder_phys_vid_is_master(phys_enc))
780 goto skip_flush;
781
Jeykumar Sankarand920ec72017-11-18 20:01:39 -0800782 /**
783 * skip flushing intf during cont. splash handoff since bootloader
784 * has already enabled the hardware and is single buffered.
785 */
786
787 if (phys_enc->sde_kms->splash_data.cont_splash_en) {
788 SDE_DEBUG_VIDENC(vid_enc,
789 "skipping intf flush bit set as cont. splash is enabled\n");
790 goto skip_flush;
791 }
792
Chirag Khuranaed859f52019-11-20 18:18:12 +0530793 if (mode.private_flags & MSM_MODE_FLAG_COLOR_FORMAT_YCBCR420)
794 fmt = sde_get_sde_format(DRM_FORMAT_YUV420);
Narender Ankam08247f72019-12-23 18:57:49 +0530795 else if (mode.private_flags & MSM_MODE_FLAG_COLOR_FORMAT_YCBCR422)
796 fmt = sde_get_sde_format(DRM_FORMAT_NV61);
Chirag Khuranaed859f52019-11-20 18:18:12 +0530797
798 if (fmt) {
799 struct sde_rect hdmi_roi;
800
801 hdmi_roi.w = mode.hdisplay;
802 hdmi_roi.h = mode.vdisplay;
803 sde_encoder_phys_setup_cdm(phys_enc, fmt,
804 CDM_CDWN_OUTPUT_HDMI, &hdmi_roi);
805 }
806
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400807 ctl->ops.get_bitmask_intf(ctl, &flush_mask, intf->idx);
Chirag Khuranaed859f52019-11-20 18:18:12 +0530808 if (ctl->ops.get_bitmask_cdm && hw_cdm)
809 ctl->ops.get_bitmask_cdm(ctl, &flush_mask, hw_cdm->idx);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400810 ctl->ops.update_pending_flush(ctl, flush_mask);
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400811
Ingrid Gallardo821985b2017-05-31 16:50:52 -0700812skip_flush:
Clarence Ip19af1362016-09-23 14:57:51 -0400813 SDE_DEBUG_VIDENC(vid_enc, "update pending flush ctl %d flush_mask %x\n",
814 ctl->idx - CTL_0, flush_mask);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400815
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400816 /* ctl_flush & timing engine enable will be triggered by framework */
817 if (phys_enc->enable_state == SDE_ENC_DISABLED)
818 phys_enc->enable_state = SDE_ENC_ENABLING;
Dhaval Patel81e87882016-10-19 21:41:56 -0700819
Dhaval Patel81e87882016-10-19 21:41:56 -0700820 return;
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400821}
822
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400823static void sde_encoder_phys_vid_destroy(struct sde_encoder_phys *phys_enc)
824{
Dhaval Patel79613522016-10-25 23:18:48 -0700825 struct sde_encoder_phys_vid *vid_enc;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400826
Clarence Ip19af1362016-09-23 14:57:51 -0400827 if (!phys_enc) {
828 SDE_ERROR("invalid encoder\n");
829 return;
830 }
Dhaval Patel79613522016-10-25 23:18:48 -0700831
832 vid_enc = to_sde_encoder_phys_vid(phys_enc);
Clarence Ip19af1362016-09-23 14:57:51 -0400833 SDE_DEBUG_VIDENC(vid_enc, "\n");
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400834 kfree(vid_enc);
835}
836
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -0400837static void sde_encoder_phys_vid_get_hw_resources(
838 struct sde_encoder_phys *phys_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400839 struct sde_encoder_hw_resources *hw_res,
840 struct drm_connector_state *conn_state)
Lloyd Atkinson09fed912016-06-24 18:14:13 -0400841{
Dhaval Patel79613522016-10-25 23:18:48 -0700842 struct sde_encoder_phys_vid *vid_enc;
Narender Ankamc7ce0b02020-03-16 17:40:34 +0530843 struct sde_mdss_cfg *vid_catalog;
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400844
Dhaval Patel79613522016-10-25 23:18:48 -0700845 if (!phys_enc || !hw_res) {
Clarence Ip19af1362016-09-23 14:57:51 -0400846 SDE_ERROR("invalid arg(s), enc %d hw_res %d conn_state %d\n",
Narender Ankamc7ce0b02020-03-16 17:40:34 +0530847 phys_enc != NULL, hw_res != NULL, conn_state != NULL);
Clarence Ip19af1362016-09-23 14:57:51 -0400848 return;
849 }
Dhaval Patel79613522016-10-25 23:18:48 -0700850
Narender Ankamc7ce0b02020-03-16 17:40:34 +0530851 vid_catalog = phys_enc->sde_kms->catalog;
Dhaval Patel79613522016-10-25 23:18:48 -0700852 vid_enc = to_sde_encoder_phys_vid(phys_enc);
Narender Ankamc7ce0b02020-03-16 17:40:34 +0530853 if (!vid_enc->hw_intf || !vid_catalog) {
854 SDE_ERROR("invalid arg(s), hw_intf %d vid_catalog %d\n",
855 vid_enc->hw_intf != NULL, vid_catalog != NULL);
Dhaval Patel79613522016-10-25 23:18:48 -0700856 return;
857 }
858
Clarence Ip19af1362016-09-23 14:57:51 -0400859 SDE_DEBUG_VIDENC(vid_enc, "\n");
Narender Ankamc7ce0b02020-03-16 17:40:34 +0530860 if (vid_enc->hw_intf->idx > INTF_MAX) {
861 SDE_ERROR("invalid arg(s), idx %d\n",
862 vid_enc->hw_intf->idx);
863 return;
864 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400865 hw_res->intfs[vid_enc->hw_intf->idx - INTF_0] = INTF_MODE_VIDEO;
Narender Ankamc7ce0b02020-03-16 17:40:34 +0530866
867 if (vid_catalog->intf[vid_enc->hw_intf->idx - INTF_0].type
868 == INTF_DP)
869 hw_res->needs_cdm = true;
Chirag Khuranaed859f52019-11-20 18:18:12 +0530870 SDE_DEBUG_DRIVER("[vid] needs_cdm=%d\n", hw_res->needs_cdm);
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400871}
872
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400873static int _sde_encoder_phys_vid_wait_for_vblank(
Alan Kwong628d19e2016-10-31 13:50:13 -0400874 struct sde_encoder_phys *phys_enc, bool notify)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -0400875{
Veera Sundaram Sankaran36774712017-06-26 14:33:33 -0700876 struct sde_encoder_wait_info wait_info;
Veera Sundaram Sankaran4bbd6212017-10-17 17:47:32 -0700877 int ret = 0;
878 u32 event = 0;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400879
Veera Sundaram Sankaran36774712017-06-26 14:33:33 -0700880 if (!phys_enc) {
881 pr_err("invalid encoder\n");
882 return -EINVAL;
883 }
884
885 wait_info.wq = &phys_enc->pending_kickoff_wq;
886 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
887 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
888
Alan Kwong628d19e2016-10-31 13:50:13 -0400889 if (!sde_encoder_phys_vid_is_master(phys_enc)) {
Ingrid Gallardo821985b2017-05-31 16:50:52 -0700890 /* signal done for slave video encoder, unless it is pp-split */
Veera Sundaram Sankaran4bbd6212017-10-17 17:47:32 -0700891 if (!_sde_encoder_phys_is_ppsplit(phys_enc) && notify) {
892 event = SDE_ENCODER_FRAME_EVENT_DONE;
893 goto end;
894 }
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400895 return 0;
Alan Kwong628d19e2016-10-31 13:50:13 -0400896 }
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400897
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400898 /* Wait for kickoff to complete */
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500899 ret = sde_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_VSYNC,
900 &wait_info);
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -0400901
Veera Sundaram Sankaran4bbd6212017-10-17 17:47:32 -0700902 if (ret == -ETIMEDOUT)
903 event = SDE_ENCODER_FRAME_EVENT_SIGNAL_RELEASE_FENCE
904 | SDE_ENCODER_FRAME_EVENT_SIGNAL_RETIRE_FENCE
905 | SDE_ENCODER_FRAME_EVENT_ERROR;
906 else if (!ret && notify)
907 event = SDE_ENCODER_FRAME_EVENT_DONE;
908
909end:
910 SDE_EVT32(DRMID(phys_enc->parent), event, notify, ret,
911 ret ? SDE_EVTLOG_FATAL : 0);
912 if (phys_enc->parent_ops.handle_frame_done && event)
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500913 phys_enc->parent_ops.handle_frame_done(
914 phys_enc->parent, phys_enc,
Kalyan Thota9391e562018-04-11 17:26:26 +0530915 event);
Lloyd Atkinson05ef8232017-03-08 16:35:36 -0500916 return ret;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400917}
918
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400919static int sde_encoder_phys_vid_wait_for_vblank(
Alan Kwong628d19e2016-10-31 13:50:13 -0400920 struct sde_encoder_phys *phys_enc)
921{
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -0400922 return _sde_encoder_phys_vid_wait_for_vblank(phys_enc, true);
Alan Kwong628d19e2016-10-31 13:50:13 -0400923}
924
Clarence Ip85f4f4532017-10-04 12:10:13 -0400925static int sde_encoder_phys_vid_prepare_for_kickoff(
Alan Kwong4aacd532017-02-04 18:51:33 -0800926 struct sde_encoder_phys *phys_enc,
927 struct sde_encoder_kickoff_params *params)
Lloyd Atkinson6cc9de32016-11-17 17:56:13 -0500928{
929 struct sde_encoder_phys_vid *vid_enc;
930 struct sde_hw_ctl *ctl;
931 int rc;
932
Clarence Ip85f4f4532017-10-04 12:10:13 -0400933 if (!phys_enc || !params || !phys_enc->hw_ctl) {
Alan Kwong4aacd532017-02-04 18:51:33 -0800934 SDE_ERROR("invalid encoder/parameters\n");
Clarence Ip85f4f4532017-10-04 12:10:13 -0400935 return -EINVAL;
Lloyd Atkinson6cc9de32016-11-17 17:56:13 -0500936 }
937 vid_enc = to_sde_encoder_phys_vid(phys_enc);
938
939 ctl = phys_enc->hw_ctl;
Clarence Ip85f4f4532017-10-04 12:10:13 -0400940 if (!ctl->ops.wait_reset_status)
941 return 0;
Lloyd Atkinson6cc9de32016-11-17 17:56:13 -0500942
943 /*
944 * hw supports hardware initiated ctl reset, so before we kickoff a new
945 * frame, need to check and wait for hw initiated ctl reset completion
946 */
947 rc = ctl->ops.wait_reset_status(ctl);
948 if (rc) {
949 SDE_ERROR_VIDENC(vid_enc, "ctl %d reset failure: %d\n",
950 ctl->idx, rc);
Clarence Ip569d5af2017-10-14 21:09:01 -0400951
952 ++vid_enc->error_count;
953 if (vid_enc->error_count >= KICKOFF_MAX_ERRORS) {
954 vid_enc->error_count = KICKOFF_MAX_ERRORS;
955
Clarence Ip569d5af2017-10-14 21:09:01 -0400956 SDE_DBG_DUMP("panic");
Clarence Ip569d5af2017-10-14 21:09:01 -0400957 } else if (vid_enc->error_count == 1) {
958 SDE_EVT32(DRMID(phys_enc->parent), SDE_EVTLOG_FATAL);
Clarence Ip569d5af2017-10-14 21:09:01 -0400959 }
960
961 /* request a ctl reset before the next flush */
962 phys_enc->enable_state = SDE_ENC_ERR_NEEDS_HW_RESET;
963 } else {
964 vid_enc->error_count = 0;
Lloyd Atkinson6cc9de32016-11-17 17:56:13 -0500965 }
Alan Kwong4aacd532017-02-04 18:51:33 -0800966
Jayant Shekhar98e78a82018-01-12 17:50:55 +0530967 programmable_rot_fetch_config(phys_enc,
968 params->inline_rotate_prefill, params->is_primary);
Clarence Ip85f4f4532017-10-04 12:10:13 -0400969
970 return rc;
Lloyd Atkinson6cc9de32016-11-17 17:56:13 -0500971}
972
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400973static void sde_encoder_phys_vid_disable(struct sde_encoder_phys *phys_enc)
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400974{
Alan Kwong67a3f792016-11-01 23:16:53 -0400975 struct msm_drm_private *priv;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400976 struct sde_encoder_phys_vid *vid_enc;
977 unsigned long lock_flags;
978 int ret;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400979
Alan Kwong67a3f792016-11-01 23:16:53 -0400980 if (!phys_enc || !phys_enc->parent || !phys_enc->parent->dev ||
981 !phys_enc->parent->dev->dev_private) {
982 SDE_ERROR("invalid encoder/device\n");
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400983 return;
984 }
Alan Kwong67a3f792016-11-01 23:16:53 -0400985 priv = phys_enc->parent->dev->dev_private;
Lloyd Atkinson7d070942016-07-26 18:35:12 -0400986
987 vid_enc = to_sde_encoder_phys_vid(phys_enc);
988 if (!vid_enc->hw_intf || !phys_enc->hw_ctl) {
989 SDE_ERROR("invalid hw_intf %d hw_ctl %d\n",
990 vid_enc->hw_intf != 0, phys_enc->hw_ctl != 0);
991 return;
992 }
993
994 SDE_DEBUG_VIDENC(vid_enc, "\n");
995
996 if (WARN_ON(!vid_enc->hw_intf->ops.enable_timing))
997 return;
998
999 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
1000 SDE_ERROR("already disabled\n");
1001 return;
1002 }
1003
1004 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
1005 vid_enc->hw_intf->ops.enable_timing(vid_enc->hw_intf, 0);
1006 if (sde_encoder_phys_vid_is_master(phys_enc))
1007 sde_encoder_phys_inc_pending(phys_enc);
1008 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
1009
Clarence Iped3327b2017-11-01 13:13:58 -04001010 if (!sde_encoder_phys_vid_is_master(phys_enc))
1011 goto exit;
1012
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001013 /*
1014 * Wait for a vsync so we know the ENABLE=0 latched before
1015 * the (connector) source of the vsync's gets disabled,
1016 * otherwise we end up in a funny state if we re-enable
1017 * before the disable latches, which results that some of
1018 * the settings changes for the new modeset (like new
1019 * scanout buffer) don't latch properly..
1020 */
Clarence Iped3327b2017-11-01 13:13:58 -04001021 ret = sde_encoder_phys_vid_control_vblank_irq(phys_enc, true);
1022 if (ret) {
1023 SDE_ERROR_VIDENC(vid_enc,
1024 "failed to enable vblank irq: %d\n",
1025 ret);
1026 SDE_EVT32(DRMID(phys_enc->parent),
1027 vid_enc->hw_intf->idx - INTF_0, ret,
1028 SDE_EVTLOG_FUNC_CASE1,
1029 SDE_EVTLOG_ERROR);
1030 } else {
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001031 ret = _sde_encoder_phys_vid_wait_for_vblank(phys_enc, false);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001032 if (ret) {
1033 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
1034 SDE_ERROR_VIDENC(vid_enc,
1035 "failure waiting for disable: %d\n",
1036 ret);
1037 SDE_EVT32(DRMID(phys_enc->parent),
Clarence Iped3327b2017-11-01 13:13:58 -04001038 vid_enc->hw_intf->idx - INTF_0, ret,
1039 SDE_EVTLOG_FUNC_CASE2,
1040 SDE_EVTLOG_ERROR);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001041 }
Clarence Iped3327b2017-11-01 13:13:58 -04001042 sde_encoder_phys_vid_control_vblank_irq(phys_enc, false);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001043 }
Chirag Khuranaed859f52019-11-20 18:18:12 +05301044
1045 if (phys_enc->hw_cdm && phys_enc->hw_cdm->ops.disable) {
1046 SDE_DEBUG_DRIVER("[cdm_disable]\n");
1047 phys_enc->hw_cdm->ops.disable(phys_enc->hw_cdm);
1048 }
Clarence Iped3327b2017-11-01 13:13:58 -04001049exit:
Kalyan Thota6a9f3b72018-01-18 18:00:02 +05301050 phys_enc->vfp_cached = 0;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001051 phys_enc->enable_state = SDE_ENC_DISABLED;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001052}
1053
1054static void sde_encoder_phys_vid_handle_post_kickoff(
1055 struct sde_encoder_phys *phys_enc)
1056{
1057 unsigned long lock_flags;
Dhaval Patel79613522016-10-25 23:18:48 -07001058 struct sde_encoder_phys_vid *vid_enc;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001059
Clarence Ip19af1362016-09-23 14:57:51 -04001060 if (!phys_enc) {
1061 SDE_ERROR("invalid encoder\n");
1062 return;
1063 }
Dhaval Patel79613522016-10-25 23:18:48 -07001064
1065 vid_enc = to_sde_encoder_phys_vid(phys_enc);
Clarence Ip19af1362016-09-23 14:57:51 -04001066 SDE_DEBUG_VIDENC(vid_enc, "enable_state %d\n", phys_enc->enable_state);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001067
1068 /*
1069 * Video mode must flush CTL before enabling timing engine
1070 * Video encoders need to turn on their interfaces now
1071 */
1072 if (phys_enc->enable_state == SDE_ENC_ENABLING) {
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001073 SDE_EVT32(DRMID(phys_enc->parent),
1074 vid_enc->hw_intf->idx - INTF_0);
1075 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001076 vid_enc->hw_intf->ops.enable_timing(vid_enc->hw_intf, 1);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001077 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001078 phys_enc->enable_state = SDE_ENC_ENABLED;
1079 }
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001080}
1081
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001082static void sde_encoder_phys_vid_irq_control(struct sde_encoder_phys *phys_enc,
1083 bool enable)
1084{
1085 struct sde_encoder_phys_vid *vid_enc;
1086 int ret;
1087
1088 if (!phys_enc)
1089 return;
1090
1091 vid_enc = to_sde_encoder_phys_vid(phys_enc);
1092
1093 SDE_EVT32(DRMID(phys_enc->parent), vid_enc->hw_intf->idx - INTF_0,
1094 enable, atomic_read(&phys_enc->vblank_refcount));
1095
1096 if (enable) {
1097 ret = sde_encoder_phys_vid_control_vblank_irq(phys_enc, true);
1098 if (ret)
1099 return;
1100
1101 sde_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
1102 } else {
1103 sde_encoder_phys_vid_control_vblank_irq(phys_enc, false);
1104 sde_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
1105 }
1106}
1107
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301108static void sde_encoder_phys_vid_setup_misr(struct sde_encoder_phys *phys_enc,
Dhaval Patelf9245d62017-03-28 16:24:00 -07001109 bool enable, u32 frame_count)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301110{
Dhaval Patelf9245d62017-03-28 16:24:00 -07001111 struct sde_encoder_phys_vid *vid_enc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301112
Dhaval Patelf9245d62017-03-28 16:24:00 -07001113 if (!phys_enc)
1114 return;
1115 vid_enc = to_sde_encoder_phys_vid(phys_enc);
1116
1117 if (vid_enc->hw_intf && vid_enc->hw_intf->ops.setup_misr)
1118 vid_enc->hw_intf->ops.setup_misr(vid_enc->hw_intf,
1119 enable, frame_count);
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301120}
1121
Dhaval Patelf9245d62017-03-28 16:24:00 -07001122static u32 sde_encoder_phys_vid_collect_misr(struct sde_encoder_phys *phys_enc)
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301123{
Dhaval Patelf9245d62017-03-28 16:24:00 -07001124 struct sde_encoder_phys_vid *vid_enc;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301125
Dhaval Patelf9245d62017-03-28 16:24:00 -07001126 if (!phys_enc)
1127 return 0;
1128 vid_enc = to_sde_encoder_phys_vid(phys_enc);
1129
1130 return vid_enc->hw_intf && vid_enc->hw_intf->ops.collect_misr ?
1131 vid_enc->hw_intf->ops.collect_misr(vid_enc->hw_intf) : 0;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301132}
1133
Benjamin Chan9cd866d2017-08-15 14:56:34 -04001134static int sde_encoder_phys_vid_get_line_count(
1135 struct sde_encoder_phys *phys_enc)
1136{
1137 struct sde_encoder_phys_vid *vid_enc;
1138
1139 if (!phys_enc)
1140 return -EINVAL;
1141
1142 if (!sde_encoder_phys_vid_is_master(phys_enc))
1143 return -EINVAL;
1144
1145 vid_enc = to_sde_encoder_phys_vid(phys_enc);
1146 if (!vid_enc->hw_intf || !vid_enc->hw_intf->ops.get_line_count)
1147 return -EINVAL;
1148
1149 return vid_enc->hw_intf->ops.get_line_count(vid_enc->hw_intf);
1150}
1151
Sandeep Panda11b20d82017-06-19 12:57:27 +05301152static int sde_encoder_phys_vid_wait_for_active(
1153 struct sde_encoder_phys *phys_enc)
1154{
1155 struct drm_display_mode mode;
1156 struct sde_encoder_phys_vid *vid_enc;
1157 u32 ln_cnt, min_ln_cnt, active_lns_cnt;
1158 u32 clk_period, time_of_line;
1159 u32 delay, retry = MAX_POLL_CNT;
1160
1161 vid_enc = to_sde_encoder_phys_vid(phys_enc);
1162
1163 if (!vid_enc->hw_intf || !vid_enc->hw_intf->ops.get_line_count) {
1164 SDE_ERROR_VIDENC(vid_enc, "invalid vid_enc params\n");
1165 return -EINVAL;
1166 }
1167
1168 mode = phys_enc->cached_mode;
1169
1170 /*
1171 * calculate clk_period as pico second to maintain good
1172 * accuracy with high pclk rate and this number is in 17 bit
1173 * range.
1174 */
1175 clk_period = DIV_ROUND_UP_ULL(1000000000, mode.clock);
1176 if (!clk_period) {
1177 SDE_ERROR_VIDENC(vid_enc, "Unable to calculate clock period\n");
1178 return -EINVAL;
1179 }
1180
1181 min_ln_cnt = (mode.vtotal - mode.vsync_start) +
1182 (mode.vsync_end - mode.vsync_start);
1183 active_lns_cnt = mode.vdisplay;
1184 time_of_line = mode.htotal * clk_period;
1185
1186 /* delay in micro seconds */
1187 delay = (time_of_line * (min_ln_cnt +
1188 (mode.vsync_start - mode.vdisplay))) / 1000000;
1189
1190 /*
1191 * Wait for max delay before
1192 * polling to check active region
1193 */
1194 if (delay > POLL_TIME_USEC_FOR_LN_CNT)
1195 delay = POLL_TIME_USEC_FOR_LN_CNT;
1196
1197 while (retry) {
1198 ln_cnt = vid_enc->hw_intf->ops.get_line_count(vid_enc->hw_intf);
1199
1200 if ((ln_cnt >= min_ln_cnt) &&
1201 (ln_cnt < (active_lns_cnt + min_ln_cnt))) {
1202 SDE_DEBUG_VIDENC(vid_enc,
1203 "Needed lines left line_cnt=%d\n",
1204 ln_cnt);
1205 return 0;
1206 }
1207
1208 SDE_ERROR_VIDENC(vid_enc, "line count is less. line_cnt = %d\n",
1209 ln_cnt);
1210 /* Add delay so that line count is in active region */
1211 udelay(delay);
1212 retry--;
1213 }
1214
1215 return -EINVAL;
1216}
1217
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001218static void sde_encoder_phys_vid_init_ops(struct sde_encoder_phys_ops *ops)
Abhijit Kulkarni40e38162016-06-26 22:12:09 -04001219{
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001220 ops->is_master = sde_encoder_phys_vid_is_master;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001221 ops->mode_set = sde_encoder_phys_vid_mode_set;
Chandan Uddaraju3f2cf422017-06-15 15:37:39 -07001222 ops->cont_splash_mode_set = sde_encoder_phys_vid_cont_splash_mode_set;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001223 ops->mode_fixup = sde_encoder_phys_vid_mode_fixup;
1224 ops->enable = sde_encoder_phys_vid_enable;
1225 ops->disable = sde_encoder_phys_vid_disable;
1226 ops->destroy = sde_encoder_phys_vid_destroy;
1227 ops->get_hw_resources = sde_encoder_phys_vid_get_hw_resources;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001228 ops->control_vblank_irq = sde_encoder_phys_vid_control_vblank_irq;
Lloyd Atkinsonf68a2132017-07-17 10:16:30 -04001229 ops->wait_for_commit_done = sde_encoder_phys_vid_wait_for_vblank;
1230 ops->wait_for_vblank = sde_encoder_phys_vid_wait_for_vblank;
1231 ops->wait_for_tx_complete = sde_encoder_phys_vid_wait_for_vblank;
Dhaval Patele17e0ee2017-08-23 18:01:42 -07001232 ops->irq_control = sde_encoder_phys_vid_irq_control;
Lloyd Atkinson6cc9de32016-11-17 17:56:13 -05001233 ops->prepare_for_kickoff = sde_encoder_phys_vid_prepare_for_kickoff;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001234 ops->handle_post_kickoff = sde_encoder_phys_vid_handle_post_kickoff;
Clarence Ip8e69ad02016-12-09 09:43:57 -05001235 ops->needs_single_flush = sde_encoder_phys_vid_needs_single_flush;
Jayant Shekhar1d50ed22016-11-04 18:41:12 +05301236 ops->setup_misr = sde_encoder_phys_vid_setup_misr;
1237 ops->collect_misr = sde_encoder_phys_vid_collect_misr;
Alan Kwong4212dd42017-09-19 17:22:33 -04001238 ops->trigger_flush = sde_encoder_helper_trigger_flush;
Lloyd Atkinson8c49c582016-11-18 14:23:54 -05001239 ops->hw_reset = sde_encoder_helper_hw_reset;
Benjamin Chan9cd866d2017-08-15 14:56:34 -04001240 ops->get_line_count = sde_encoder_phys_vid_get_line_count;
Clarence Ip5e3df1d2017-11-07 21:28:25 -05001241 ops->get_wr_line_count = sde_encoder_phys_vid_get_line_count;
Ingrid Gallardo61210ea2017-10-17 17:29:31 -07001242 ops->wait_dma_trigger = sde_encoder_phys_vid_wait_dma_trigger;
Sandeep Panda11b20d82017-06-19 12:57:27 +05301243 ops->wait_for_active = sde_encoder_phys_vid_wait_for_active;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001244}
1245
Lloyd Atkinsonf30546e2016-06-26 10:08:25 -04001246struct sde_encoder_phys *sde_encoder_phys_vid_init(
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001247 struct sde_enc_phys_init_params *p)
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001248{
1249 struct sde_encoder_phys *phys_enc = NULL;
1250 struct sde_encoder_phys_vid *vid_enc = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001251 struct sde_rm_hw_iter iter;
1252 struct sde_hw_mdp *hw_mdp;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05001253 struct sde_encoder_irq *irq;
Alan Kwonga172ef52016-09-27 00:29:10 -04001254 int i, ret = 0;
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001255
Clarence Ip19af1362016-09-23 14:57:51 -04001256 if (!p) {
1257 ret = -EINVAL;
1258 goto fail;
1259 }
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001260
1261 vid_enc = kzalloc(sizeof(*vid_enc), GFP_KERNEL);
1262 if (!vid_enc) {
1263 ret = -ENOMEM;
1264 goto fail;
1265 }
Ben Chan78647cd2016-06-26 22:02:47 -04001266
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001267 phys_enc = &vid_enc->base;
1268
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001269 hw_mdp = sde_rm_get_mdp(&p->sde_kms->rm);
1270 if (IS_ERR_OR_NULL(hw_mdp)) {
1271 ret = PTR_ERR(hw_mdp);
1272 SDE_ERROR("failed to get mdptop\n");
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001273 goto fail;
1274 }
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001275 phys_enc->hw_mdptop = hw_mdp;
Dhaval Patel81e87882016-10-19 21:41:56 -07001276 phys_enc->intf_idx = p->intf_idx;
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001277
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001278 /**
1279 * hw_intf resource permanently assigned to this encoder
1280 * Other resources allocated at atomic commit time by use case
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001281 */
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001282 sde_rm_init_hw_iter(&iter, 0, SDE_HW_BLK_INTF);
1283 while (sde_rm_get_hw(&p->sde_kms->rm, &iter)) {
1284 struct sde_hw_intf *hw_intf = (struct sde_hw_intf *)iter.hw;
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001285
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001286 if (hw_intf->idx == p->intf_idx) {
1287 vid_enc->hw_intf = hw_intf;
1288 break;
1289 }
1290 }
1291
1292 if (!vid_enc->hw_intf) {
1293 ret = -EINVAL;
Clarence Ip19af1362016-09-23 14:57:51 -04001294 SDE_ERROR("failed to get hw_intf\n");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001295 goto fail;
1296 }
1297
Clarence Ip19af1362016-09-23 14:57:51 -04001298 SDE_DEBUG_VIDENC(vid_enc, "\n");
1299
Lloyd Atkinsone5c2c0b2016-07-05 12:23:29 -04001300 sde_encoder_phys_vid_init_ops(&phys_enc->ops);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001301 phys_enc->parent = p->parent;
1302 phys_enc->parent_ops = p->parent_ops;
1303 phys_enc->sde_kms = p->sde_kms;
1304 phys_enc->split_role = p->split_role;
Clarence Ip03521982016-08-26 10:49:47 -04001305 phys_enc->intf_mode = INTF_MODE_VIDEO;
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001306 phys_enc->enc_spinlock = p->enc_spinlock;
Raviteja Tamatam3ea60b82018-04-27 15:41:18 +05301307 phys_enc->vblank_ctl_lock = p->vblank_ctl_lock;
Jeykumar Sankaran5c2f0702017-03-09 18:03:15 -08001308 phys_enc->comp_type = p->comp_type;
Lloyd Atkinson05ef8232017-03-08 16:35:36 -05001309 for (i = 0; i < INTR_IDX_MAX; i++) {
1310 irq = &phys_enc->irq[i];
1311 INIT_LIST_HEAD(&irq->cb.list);
1312 irq->irq_idx = -EINVAL;
1313 irq->hw_idx = -EINVAL;
1314 irq->cb.arg = phys_enc;
1315 }
1316
1317 irq = &phys_enc->irq[INTR_IDX_VSYNC];
1318 irq->name = "vsync_irq";
1319 irq->intr_type = SDE_IRQ_TYPE_INTF_VSYNC;
1320 irq->intr_idx = INTR_IDX_VSYNC;
1321 irq->cb.func = sde_encoder_phys_vid_vblank_irq;
1322
1323 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
1324 irq->name = "underrun";
1325 irq->intr_type = SDE_IRQ_TYPE_INTF_UNDER_RUN;
1326 irq->intr_idx = INTR_IDX_UNDERRUN;
1327 irq->cb.func = sde_encoder_phys_vid_underrun_irq;
1328
Alan Kwongbaa56352016-10-04 09:38:11 -04001329 atomic_set(&phys_enc->vblank_refcount, 0);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001330 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
Veera Sundaram Sankaran675ff622017-06-21 21:44:46 -07001331 atomic_set(&phys_enc->pending_retire_fence_cnt, 0);
Lloyd Atkinson7d070942016-07-26 18:35:12 -04001332 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
Lloyd Atkinson5d722782016-05-30 14:09:41 -04001333 phys_enc->enable_state = SDE_ENC_DISABLED;
1334
Dhaval Patel81e87882016-10-19 21:41:56 -07001335 SDE_DEBUG_VIDENC(vid_enc, "created intf idx:%d\n", p->intf_idx);
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001336
1337 return phys_enc;
1338
1339fail:
Clarence Ip19af1362016-09-23 14:57:51 -04001340 SDE_ERROR("failed to create encoder\n");
Lloyd Atkinson09fed912016-06-24 18:14:13 -04001341 if (vid_enc)
1342 sde_encoder_phys_vid_destroy(phys_enc);
1343
1344 return ERR_PTR(ret);
1345}