blob: 3600e444e66c4fe04831734ee2a2f348293b9942 [file] [log] [blame]
Alan Kwongbb27c092016-07-20 16:41:25 -04001/*
Lloyd Atkinson8772e202016-09-26 17:52:16 -04002 * Copyright (c) 2015-2017 The Linux Foundation. All rights reserved.
Alan Kwongbb27c092016-07-20 16:41:25 -04003 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 and
6 * only version 2 as published by the Free Software Foundation.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 *
13 */
14
Alan Kwong4c3cf4c2016-09-25 20:08:09 -040015#define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
Alan Kwongbb27c092016-07-20 16:41:25 -040016
17#include <linux/jiffies.h>
18#include <linux/debugfs.h>
19
20#include "sde_encoder_phys.h"
21#include "sde_formats.h"
22#include "sde_hw_top.h"
23#include "sde_hw_interrupts.h"
Alan Kwongf5dd86c2016-08-09 18:08:17 -040024#include "sde_core_irq.h"
Alan Kwongbb27c092016-07-20 16:41:25 -040025#include "sde_wb.h"
Lloyd Atkinson8772e202016-09-26 17:52:16 -040026#include "sde_vbif.h"
Alan Kwongbb27c092016-07-20 16:41:25 -040027
28/* wait for at most 2 vsync for lowest refresh rate (24hz) */
29#define WAIT_TIMEOUT_MSEC 84
30
31#define to_sde_encoder_phys_wb(x) \
32 container_of(x, struct sde_encoder_phys_wb, base)
33
34#define DEV(phy_enc) (phy_enc->parent->dev)
35
36/**
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -040037 * sde_encoder_phys_wb_is_master - report wb always as master encoder
38 */
39static bool sde_encoder_phys_wb_is_master(struct sde_encoder_phys *phys_enc)
40{
41 return true;
42}
43
44/**
Alan Kwongbb27c092016-07-20 16:41:25 -040045 * sde_encoder_phys_wb_get_intr_type - get interrupt type based on block mode
46 * @hw_wb: Pointer to h/w writeback driver
47 */
48static enum sde_intr_type sde_encoder_phys_wb_get_intr_type(
49 struct sde_hw_wb *hw_wb)
50{
51 return (hw_wb->caps->features & BIT(SDE_WB_BLOCK_MODE)) ?
52 SDE_IRQ_TYPE_WB_ROT_COMP : SDE_IRQ_TYPE_WB_WFD_COMP;
53}
54
55/**
Alan Kwong5d324e42016-07-28 22:56:18 -040056 * sde_encoder_phys_wb_set_ot_limit - set OT limit for writeback interface
57 * @phys_enc: Pointer to physical encoder
58 */
59static void sde_encoder_phys_wb_set_ot_limit(
60 struct sde_encoder_phys *phys_enc)
61{
62 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
63 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
64 struct sde_vbif_set_ot_params ot_params;
65
66 memset(&ot_params, 0, sizeof(ot_params));
67 ot_params.xin_id = hw_wb->caps->xin_id;
68 ot_params.num = hw_wb->idx - WB_0;
69 ot_params.width = wb_enc->wb_roi.w;
70 ot_params.height = wb_enc->wb_roi.h;
71 ot_params.is_wfd = true;
72 ot_params.frame_rate = phys_enc->cached_mode.vrefresh;
73 ot_params.vbif_idx = hw_wb->caps->vbif_idx;
74 ot_params.clk_ctrl = hw_wb->caps->clk_ctrl;
75 ot_params.rd = false;
76
77 sde_vbif_set_ot_limit(phys_enc->sde_kms, &ot_params);
78}
79
80/**
Alan Kwongbb27c092016-07-20 16:41:25 -040081 * sde_encoder_phys_wb_set_traffic_shaper - set traffic shaper for writeback
82 * @phys_enc: Pointer to physical encoder
83 */
84static void sde_encoder_phys_wb_set_traffic_shaper(
85 struct sde_encoder_phys *phys_enc)
86{
87 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
88 struct sde_hw_wb_cfg *wb_cfg = &wb_enc->wb_cfg;
89
90 /* traffic shaper is only enabled for rotator */
91 wb_cfg->ts_cfg.en = false;
92}
93
94/**
95 * sde_encoder_phys_setup_cdm - setup chroma down block
96 * @phys_enc: Pointer to physical encoder
97 * @fb: Pointer to output framebuffer
98 * @format: Output format
99 */
100void sde_encoder_phys_setup_cdm(struct sde_encoder_phys *phys_enc,
101 struct drm_framebuffer *fb, const struct sde_format *format,
102 struct sde_rect *wb_roi)
103{
104 struct sde_hw_cdm *hw_cdm = phys_enc->hw_cdm;
105 struct sde_hw_cdm_cfg *cdm_cfg = &phys_enc->cdm_cfg;
106 int ret;
107
108 if (!SDE_FORMAT_IS_YUV(format)) {
109 SDE_DEBUG("[cdm_disable fmt:%x]\n",
110 format->base.pixel_format);
111
112 if (hw_cdm && hw_cdm->ops.disable)
113 hw_cdm->ops.disable(hw_cdm);
114
115 return;
116 }
117
118 memset(cdm_cfg, 0, sizeof(struct sde_hw_cdm_cfg));
119
120 cdm_cfg->output_width = wb_roi->w;
121 cdm_cfg->output_height = wb_roi->h;
122 cdm_cfg->output_fmt = format;
123 cdm_cfg->output_type = CDM_CDWN_OUTPUT_WB;
124 cdm_cfg->output_bit_depth = CDM_CDWN_OUTPUT_8BIT;
125
126 /* enable 10 bit logic */
127 switch (cdm_cfg->output_fmt->chroma_sample) {
128 case SDE_CHROMA_RGB:
129 cdm_cfg->h_cdwn_type = CDM_CDWN_DISABLE;
130 cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
131 break;
132 case SDE_CHROMA_H2V1:
133 cdm_cfg->h_cdwn_type = CDM_CDWN_COSITE;
134 cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
135 break;
136 case SDE_CHROMA_420:
137 cdm_cfg->h_cdwn_type = CDM_CDWN_COSITE;
138 cdm_cfg->v_cdwn_type = CDM_CDWN_OFFSITE;
139 break;
140 case SDE_CHROMA_H1V2:
141 default:
142 SDE_ERROR("unsupported chroma sampling type\n");
143 cdm_cfg->h_cdwn_type = CDM_CDWN_DISABLE;
144 cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
145 break;
146 }
147
148 SDE_DEBUG("[cdm_enable:%d,%d,%X,%d,%d,%d,%d]\n",
149 cdm_cfg->output_width,
150 cdm_cfg->output_height,
151 cdm_cfg->output_fmt->base.pixel_format,
152 cdm_cfg->output_type,
153 cdm_cfg->output_bit_depth,
154 cdm_cfg->h_cdwn_type,
155 cdm_cfg->v_cdwn_type);
156
157 if (hw_cdm && hw_cdm->ops.setup_cdwn) {
158 ret = hw_cdm->ops.setup_cdwn(hw_cdm, cdm_cfg);
159 if (ret < 0) {
160 SDE_ERROR("failed to setup CDM %d\n", ret);
161 return;
162 }
163 }
164
165 if (hw_cdm && hw_cdm->ops.enable) {
166 ret = hw_cdm->ops.enable(hw_cdm, cdm_cfg);
167 if (ret < 0) {
168 SDE_ERROR("failed to enable CDM %d\n", ret);
169 return;
170 }
171 }
172}
173
174/**
175 * sde_encoder_phys_wb_setup_fb - setup output framebuffer
176 * @phys_enc: Pointer to physical encoder
177 * @fb: Pointer to output framebuffer
178 * @wb_roi: Pointer to output region of interest
179 */
180static void sde_encoder_phys_wb_setup_fb(struct sde_encoder_phys *phys_enc,
181 struct drm_framebuffer *fb, struct sde_rect *wb_roi)
182{
183 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
Clarence Ip03521982016-08-26 10:49:47 -0400184 struct sde_hw_wb *hw_wb;
185 struct sde_hw_wb_cfg *wb_cfg;
Alan Kwongbb27c092016-07-20 16:41:25 -0400186 const struct msm_format *format;
187 int ret, mmu_id;
188
Clarence Ip03521982016-08-26 10:49:47 -0400189 if (!phys_enc) {
190 SDE_ERROR("invalid encoder\n");
191 return;
192 }
193
194 hw_wb = wb_enc->hw_wb;
195 wb_cfg = &wb_enc->wb_cfg;
Alan Kwongbb27c092016-07-20 16:41:25 -0400196 memset(wb_cfg, 0, sizeof(struct sde_hw_wb_cfg));
197
Clarence Ip03521982016-08-26 10:49:47 -0400198 wb_cfg->intf_mode = phys_enc->intf_mode;
Alan Kwongbb27c092016-07-20 16:41:25 -0400199 wb_cfg->is_secure = (fb->flags & DRM_MODE_FB_SECURE) ? true : false;
200 mmu_id = (wb_cfg->is_secure) ?
201 wb_enc->mmu_id[SDE_IOMMU_DOMAIN_SECURE] :
202 wb_enc->mmu_id[SDE_IOMMU_DOMAIN_UNSECURE];
203
204 SDE_DEBUG("[fb_secure:%d]\n", wb_cfg->is_secure);
205
206 format = msm_framebuffer_format(fb);
Dhaval Patelccbcb3d2016-08-22 11:58:14 -0700207 if (!format) {
208 SDE_DEBUG("invalid format for fb\n");
209 return;
210 }
211
Alan Kwongbb27c092016-07-20 16:41:25 -0400212 wb_cfg->dest.format = sde_get_sde_format_ext(
213 format->pixel_format,
214 fb->modifier,
215 drm_format_num_planes(fb->pixel_format));
216 if (!wb_cfg->dest.format) {
217 /* this error should be detected during atomic_check */
218 SDE_ERROR("failed to get format %x\n", format->pixel_format);
219 return;
220 }
abeykunf1539f72016-08-24 16:08:03 -0400221 wb_cfg->roi = *wb_roi;
Alan Kwongbb27c092016-07-20 16:41:25 -0400222
abeykunf1539f72016-08-24 16:08:03 -0400223 if (hw_wb->caps->features & BIT(SDE_WB_XY_ROI_OFFSET)) {
224 ret = sde_format_populate_layout(mmu_id, fb, &wb_cfg->dest);
225 if (ret) {
226 SDE_DEBUG("failed to populate layout %d\n", ret);
227 return;
228 }
229 wb_cfg->dest.width = fb->width;
230 wb_cfg->dest.height = fb->height;
231 wb_cfg->dest.num_planes = wb_cfg->dest.format->num_planes;
232 } else {
233 ret = sde_format_populate_layout_with_roi(mmu_id, fb, wb_roi,
Alan Kwongbb27c092016-07-20 16:41:25 -0400234 &wb_cfg->dest);
abeykunf1539f72016-08-24 16:08:03 -0400235 if (ret) {
236 /* this error should be detected during atomic_check */
237 SDE_DEBUG("failed to populate layout %d\n", ret);
238 return;
239 }
Alan Kwongbb27c092016-07-20 16:41:25 -0400240 }
241
242 if ((wb_cfg->dest.format->fetch_planes == SDE_PLANE_PLANAR) &&
243 (wb_cfg->dest.format->element[0] == C1_B_Cb))
244 swap(wb_cfg->dest.plane_addr[1], wb_cfg->dest.plane_addr[2]);
245
246 SDE_DEBUG("[fb_offset:%8.8x,%8.8x,%8.8x,%8.8x]\n",
247 wb_cfg->dest.plane_addr[0],
248 wb_cfg->dest.plane_addr[1],
249 wb_cfg->dest.plane_addr[2],
250 wb_cfg->dest.plane_addr[3]);
251 SDE_DEBUG("[fb_stride:%8.8x,%8.8x,%8.8x,%8.8x]\n",
252 wb_cfg->dest.plane_pitch[0],
253 wb_cfg->dest.plane_pitch[1],
254 wb_cfg->dest.plane_pitch[2],
255 wb_cfg->dest.plane_pitch[3]);
256
abeykunf1539f72016-08-24 16:08:03 -0400257 if (hw_wb->ops.setup_roi)
258 hw_wb->ops.setup_roi(hw_wb, wb_cfg);
259
Alan Kwongbb27c092016-07-20 16:41:25 -0400260 if (hw_wb->ops.setup_outformat)
261 hw_wb->ops.setup_outformat(hw_wb, wb_cfg);
262
263 if (hw_wb->ops.setup_outaddress)
264 hw_wb->ops.setup_outaddress(hw_wb, wb_cfg);
265}
266
267/**
268 * sde_encoder_phys_wb_setup_cdp - setup chroma down prefetch block
269 * @phys_enc: Pointer to physical encoder
270 */
271static void sde_encoder_phys_wb_setup_cdp(struct sde_encoder_phys *phys_enc)
272{
273 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
274 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
275 struct sde_hw_intf_cfg *intf_cfg = &wb_enc->intf_cfg;
276
277 memset(intf_cfg, 0, sizeof(struct sde_hw_intf_cfg));
278
279 intf_cfg->intf = SDE_NONE;
280 intf_cfg->wb = hw_wb->idx;
281
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400282 if (phys_enc->hw_ctl && phys_enc->hw_ctl->ops.setup_intf_cfg)
Alan Kwongbb27c092016-07-20 16:41:25 -0400283 phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl,
284 intf_cfg);
285}
286
287/**
288 * sde_encoder_phys_wb_atomic_check - verify and fixup given atomic states
289 * @phys_enc: Pointer to physical encoder
290 * @crtc_state: Pointer to CRTC atomic state
291 * @conn_state: Pointer to connector atomic state
292 */
293static int sde_encoder_phys_wb_atomic_check(
294 struct sde_encoder_phys *phys_enc,
295 struct drm_crtc_state *crtc_state,
296 struct drm_connector_state *conn_state)
297{
298 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
299 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
300 const struct sde_wb_cfg *wb_cfg = hw_wb->caps;
301 struct drm_framebuffer *fb;
302 const struct sde_format *fmt;
303 struct sde_rect wb_roi;
304 const struct drm_display_mode *mode = &crtc_state->mode;
305 int rc;
306
307 SDE_DEBUG("[atomic_check:%d,%d,\"%s\",%d,%d]\n",
308 hw_wb->idx - WB_0, mode->base.id, mode->name,
309 mode->hdisplay, mode->vdisplay);
310
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400311 if (!conn_state || !conn_state->connector) {
312 SDE_ERROR("invalid connector state\n");
313 return -EINVAL;
314 } else if (conn_state->connector->status !=
315 connector_status_connected) {
316 SDE_ERROR("connector not connected %d\n",
317 conn_state->connector->status);
318 return -EINVAL;
319 }
320
Alan Kwongbb27c092016-07-20 16:41:25 -0400321 memset(&wb_roi, 0, sizeof(struct sde_rect));
322
323 rc = sde_wb_connector_state_get_output_roi(conn_state, &wb_roi);
324 if (rc) {
325 SDE_ERROR("failed to get roi %d\n", rc);
326 return rc;
327 }
328
329 SDE_DEBUG("[roi:%u,%u,%u,%u]\n", wb_roi.x, wb_roi.y,
330 wb_roi.w, wb_roi.h);
331
332 fb = sde_wb_connector_state_get_output_fb(conn_state);
333 if (!fb) {
334 SDE_ERROR("no output framebuffer\n");
335 return -EINVAL;
336 }
337
338 SDE_DEBUG("[fb_id:%u][fb:%u,%u]\n", fb->base.id,
339 fb->width, fb->height);
340
341 fmt = sde_get_sde_format_ext(fb->pixel_format, fb->modifier,
342 drm_format_num_planes(fb->pixel_format));
343 if (!fmt) {
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400344 SDE_ERROR("unsupported output pixel format:%x\n",
Alan Kwongbb27c092016-07-20 16:41:25 -0400345 fb->pixel_format);
346 return -EINVAL;
347 }
348
349 SDE_DEBUG("[fb_fmt:%x,%llx]\n", fb->pixel_format,
350 fb->modifier[0]);
351
352 if (SDE_FORMAT_IS_YUV(fmt) &&
353 !(wb_cfg->features & BIT(SDE_WB_YUV_CONFIG))) {
354 SDE_ERROR("invalid output format %x\n", fmt->base.pixel_format);
355 return -EINVAL;
356 }
357
358 if (SDE_FORMAT_IS_UBWC(fmt) &&
359 !(wb_cfg->features & BIT(SDE_WB_UBWC_1_0))) {
360 SDE_ERROR("invalid output format %x\n", fmt->base.pixel_format);
361 return -EINVAL;
362 }
363
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400364 if (SDE_FORMAT_IS_YUV(fmt) != !!phys_enc->hw_cdm)
365 crtc_state->mode_changed = true;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400366
Alan Kwongbb27c092016-07-20 16:41:25 -0400367 if (wb_roi.w && wb_roi.h) {
368 if (wb_roi.w != mode->hdisplay) {
369 SDE_ERROR("invalid roi w=%d, mode w=%d\n", wb_roi.w,
370 mode->hdisplay);
371 return -EINVAL;
372 } else if (wb_roi.h != mode->vdisplay) {
373 SDE_ERROR("invalid roi h=%d, mode h=%d\n", wb_roi.h,
374 mode->vdisplay);
375 return -EINVAL;
376 } else if (wb_roi.x + wb_roi.w > fb->width) {
377 SDE_ERROR("invalid roi x=%d, w=%d, fb w=%d\n",
378 wb_roi.x, wb_roi.w, fb->width);
379 return -EINVAL;
380 } else if (wb_roi.y + wb_roi.h > fb->height) {
381 SDE_ERROR("invalid roi y=%d, h=%d, fb h=%d\n",
382 wb_roi.y, wb_roi.h, fb->height);
383 return -EINVAL;
384 } else if (wb_roi.w > wb_cfg->sblk->maxlinewidth) {
385 SDE_ERROR("invalid roi w=%d, maxlinewidth=%u\n",
386 wb_roi.w, wb_cfg->sblk->maxlinewidth);
387 return -EINVAL;
388 }
389 } else {
390 if (wb_roi.x || wb_roi.y) {
391 SDE_ERROR("invalid roi x=%d, y=%d\n",
392 wb_roi.x, wb_roi.y);
393 return -EINVAL;
394 } else if (fb->width != mode->hdisplay) {
395 SDE_ERROR("invalid fb w=%d, mode w=%d\n", fb->width,
396 mode->hdisplay);
397 return -EINVAL;
398 } else if (fb->height != mode->vdisplay) {
399 SDE_ERROR("invalid fb h=%d, mode h=%d\n", fb->height,
400 mode->vdisplay);
401 return -EINVAL;
402 } else if (fb->width > wb_cfg->sblk->maxlinewidth) {
403 SDE_ERROR("invalid fb w=%d, maxlinewidth=%u\n",
404 fb->width, wb_cfg->sblk->maxlinewidth);
405 return -EINVAL;
406 }
407 }
408
409 return 0;
410}
411
412/**
413 * sde_encoder_phys_wb_flush - flush hardware update
414 * @phys_enc: Pointer to physical encoder
415 */
416static void sde_encoder_phys_wb_flush(struct sde_encoder_phys *phys_enc)
417{
418 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
419 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
420 struct sde_hw_ctl *hw_ctl = phys_enc->hw_ctl;
421 struct sde_hw_cdm *hw_cdm = phys_enc->hw_cdm;
422 u32 flush_mask = 0;
423
424 SDE_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
425
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400426 if (!hw_ctl) {
427 SDE_DEBUG("[wb:%d] no ctl assigned\n", hw_wb->idx - WB_0);
428 return;
429 }
430
Alan Kwongbb27c092016-07-20 16:41:25 -0400431 if (hw_ctl->ops.get_bitmask_wb)
432 hw_ctl->ops.get_bitmask_wb(hw_ctl, &flush_mask, hw_wb->idx);
433
434 if (hw_ctl->ops.get_bitmask_cdm && hw_cdm)
435 hw_ctl->ops.get_bitmask_cdm(hw_ctl, &flush_mask, hw_cdm->idx);
436
437 if (hw_ctl->ops.update_pending_flush)
438 hw_ctl->ops.update_pending_flush(hw_ctl, flush_mask);
439
440 SDE_DEBUG("Flushing CTL_ID %d, flush_mask %x, WB %d\n",
441 hw_ctl->idx - CTL_0, flush_mask, hw_wb->idx - WB_0);
442}
443
444/**
445 * sde_encoder_phys_wb_setup - setup writeback encoder
446 * @phys_enc: Pointer to physical encoder
447 */
448static void sde_encoder_phys_wb_setup(
449 struct sde_encoder_phys *phys_enc)
450{
451 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
452 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
453 struct drm_display_mode mode = phys_enc->cached_mode;
454 struct drm_framebuffer *fb;
455 struct sde_rect *wb_roi = &wb_enc->wb_roi;
456
457 SDE_DEBUG("[mode_set:%d,%d,\"%s\",%d,%d]\n",
458 hw_wb->idx - WB_0, mode.base.id, mode.name,
459 mode.hdisplay, mode.vdisplay);
460
461 memset(wb_roi, 0, sizeof(struct sde_rect));
462
463 fb = sde_wb_get_output_fb(wb_enc->wb_dev);
464 if (!fb) {
465 SDE_DEBUG("no output framebuffer\n");
466 return;
467 }
468
469 SDE_DEBUG("[fb_id:%u][fb:%u,%u]\n", fb->base.id,
470 fb->width, fb->height);
471
472 sde_wb_get_output_roi(wb_enc->wb_dev, wb_roi);
473 if (wb_roi->w == 0 || wb_roi->h == 0) {
474 wb_roi->x = 0;
475 wb_roi->y = 0;
476 wb_roi->w = fb->width;
477 wb_roi->h = fb->height;
478 }
479
480 SDE_DEBUG("[roi:%u,%u,%u,%u]\n", wb_roi->x, wb_roi->y,
481 wb_roi->w, wb_roi->h);
482
483 wb_enc->wb_fmt = sde_get_sde_format_ext(fb->pixel_format, fb->modifier,
484 drm_format_num_planes(fb->pixel_format));
485 if (!wb_enc->wb_fmt) {
486 SDE_ERROR("unsupported output pixel format: %d\n",
487 fb->pixel_format);
488 return;
489 }
490
491 SDE_DEBUG("[fb_fmt:%x,%llx]\n", fb->pixel_format,
492 fb->modifier[0]);
493
Alan Kwong5d324e42016-07-28 22:56:18 -0400494 sde_encoder_phys_wb_set_ot_limit(phys_enc);
495
Alan Kwongbb27c092016-07-20 16:41:25 -0400496 sde_encoder_phys_wb_set_traffic_shaper(phys_enc);
497
498 sde_encoder_phys_setup_cdm(phys_enc, fb, wb_enc->wb_fmt, wb_roi);
499
500 sde_encoder_phys_wb_setup_fb(phys_enc, fb, wb_roi);
501
502 sde_encoder_phys_wb_setup_cdp(phys_enc);
503}
504
505/**
506 * sde_encoder_phys_wb_unregister_irq - unregister writeback interrupt handler
507 * @phys_enc: Pointer to physical encoder
508 */
509static int sde_encoder_phys_wb_unregister_irq(
510 struct sde_encoder_phys *phys_enc)
511{
512 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
513 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
514
515 if (wb_enc->bypass_irqreg)
516 return 0;
517
Alan Kwongf5dd86c2016-08-09 18:08:17 -0400518 sde_core_irq_disable(phys_enc->sde_kms, &wb_enc->irq_idx, 1);
519 sde_core_irq_register_callback(phys_enc->sde_kms, wb_enc->irq_idx,
520 NULL);
Alan Kwongbb27c092016-07-20 16:41:25 -0400521
522 SDE_DEBUG("un-register IRQ for wb %d, irq_idx=%d\n",
523 hw_wb->idx - WB_0,
524 wb_enc->irq_idx);
525
526 return 0;
527}
528
529/**
530 * sde_encoder_phys_wb_done_irq - writeback interrupt handler
531 * @arg: Pointer to writeback encoder
532 * @irq_idx: interrupt index
533 */
534static void sde_encoder_phys_wb_done_irq(void *arg, int irq_idx)
535{
536 struct sde_encoder_phys_wb *wb_enc = arg;
537 struct sde_encoder_phys *phys_enc = &wb_enc->base;
538 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
539
540 SDE_DEBUG("[wb:%d,%u]\n", hw_wb->idx - WB_0,
541 wb_enc->frame_count);
542
543 complete_all(&wb_enc->wbdone_complete);
544
545 phys_enc->parent_ops.handle_vblank_virt(phys_enc->parent);
546}
547
548/**
549 * sde_encoder_phys_wb_register_irq - register writeback interrupt handler
550 * @phys_enc: Pointer to physical encoder
551 */
552static int sde_encoder_phys_wb_register_irq(struct sde_encoder_phys *phys_enc)
553{
554 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
555 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
556 struct sde_irq_callback irq_cb;
557 enum sde_intr_type intr_type;
558 int ret = 0;
559
560 if (wb_enc->bypass_irqreg)
561 return 0;
562
563 intr_type = sde_encoder_phys_wb_get_intr_type(hw_wb);
Alan Kwongf5dd86c2016-08-09 18:08:17 -0400564 wb_enc->irq_idx = sde_core_irq_idx_lookup(phys_enc->sde_kms,
Alan Kwongbb27c092016-07-20 16:41:25 -0400565 intr_type, hw_wb->idx);
566 if (wb_enc->irq_idx < 0) {
567 SDE_ERROR(
568 "failed to lookup IRQ index for WB_DONE with wb=%d\n",
569 hw_wb->idx - WB_0);
570 return -EINVAL;
571 }
572
573 irq_cb.func = sde_encoder_phys_wb_done_irq;
574 irq_cb.arg = wb_enc;
Alan Kwongf5dd86c2016-08-09 18:08:17 -0400575 ret = sde_core_irq_register_callback(phys_enc->sde_kms,
576 wb_enc->irq_idx, &irq_cb);
Alan Kwongbb27c092016-07-20 16:41:25 -0400577 if (ret) {
578 SDE_ERROR("failed to register IRQ callback WB_DONE\n");
579 return ret;
580 }
581
Alan Kwongf5dd86c2016-08-09 18:08:17 -0400582 ret = sde_core_irq_enable(phys_enc->sde_kms, &wb_enc->irq_idx, 1);
Alan Kwongbb27c092016-07-20 16:41:25 -0400583 if (ret) {
584 SDE_ERROR(
585 "failed to enable IRQ for WB_DONE, wb %d, irq_idx=%d\n",
586 hw_wb->idx - WB_0,
587 wb_enc->irq_idx);
588 wb_enc->irq_idx = -EINVAL;
589
590 /* Unregister callback on IRQ enable failure */
Alan Kwongf5dd86c2016-08-09 18:08:17 -0400591 sde_core_irq_register_callback(phys_enc->sde_kms,
592 wb_enc->irq_idx, NULL);
Alan Kwongbb27c092016-07-20 16:41:25 -0400593 return ret;
594 }
595
596 SDE_DEBUG("registered IRQ for wb %d, irq_idx=%d\n",
597 hw_wb->idx - WB_0,
598 wb_enc->irq_idx);
599
600 return ret;
601}
602
603/**
604 * sde_encoder_phys_wb_mode_set - set display mode
605 * @phys_enc: Pointer to physical encoder
606 * @mode: Pointer to requested display mode
607 * @adj_mode: Pointer to adjusted display mode
608 */
609static void sde_encoder_phys_wb_mode_set(
610 struct sde_encoder_phys *phys_enc,
611 struct drm_display_mode *mode,
612 struct drm_display_mode *adj_mode)
613{
614 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400615 struct sde_rm *rm = &phys_enc->sde_kms->rm;
Alan Kwongbb27c092016-07-20 16:41:25 -0400616 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400617 struct sde_rm_hw_iter iter;
618 int i, instance;
Alan Kwongbb27c092016-07-20 16:41:25 -0400619
620 phys_enc->cached_mode = *adj_mode;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400621 instance = phys_enc->split_role == ENC_ROLE_SLAVE ? 1 : 0;
Alan Kwongbb27c092016-07-20 16:41:25 -0400622
623 SDE_DEBUG("[mode_set_cache:%d,%d,\"%s\",%d,%d]\n",
624 hw_wb->idx - WB_0, mode->base.id,
625 mode->name, mode->hdisplay, mode->vdisplay);
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400626
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400627 phys_enc->hw_ctl = NULL;
628 phys_enc->hw_cdm = NULL;
629
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400630 /* Retrieve previously allocated HW Resources. CTL shouldn't fail */
631 sde_rm_init_hw_iter(&iter, phys_enc->parent->base.id, SDE_HW_BLK_CTL);
632 for (i = 0; i <= instance; i++) {
633 sde_rm_get_hw(rm, &iter);
634 if (i == instance)
635 phys_enc->hw_ctl = (struct sde_hw_ctl *) iter.hw;
636 }
637
638 if (IS_ERR_OR_NULL(phys_enc->hw_ctl)) {
639 SDE_ERROR("failed init ctl: %ld\n", PTR_ERR(phys_enc->hw_ctl));
640 phys_enc->hw_ctl = NULL;
641 return;
642 }
643
644 /* CDM is optional */
645 sde_rm_init_hw_iter(&iter, phys_enc->parent->base.id, SDE_HW_BLK_CDM);
646 for (i = 0; i <= instance; i++) {
647 sde_rm_get_hw(rm, &iter);
648 if (i == instance)
649 phys_enc->hw_cdm = (struct sde_hw_cdm *) iter.hw;
650 }
651
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400652 if (IS_ERR(phys_enc->hw_cdm)) {
653 SDE_ERROR("CDM required but not allocated: %ld\n",
654 PTR_ERR(phys_enc->hw_cdm));
655 phys_enc->hw_ctl = NULL;
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400656 }
Alan Kwongbb27c092016-07-20 16:41:25 -0400657}
658
659/**
Alan Kwongbb27c092016-07-20 16:41:25 -0400660 * sde_encoder_phys_wb_wait_for_commit_done - wait until request is committed
661 * @phys_enc: Pointer to physical encoder
662 */
663static int sde_encoder_phys_wb_wait_for_commit_done(
664 struct sde_encoder_phys *phys_enc)
665{
666 unsigned long ret;
667 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
668 u32 irq_status;
669 u64 wb_time = 0;
670 int rc = 0;
671
672 /* Return EWOULDBLOCK since we know the wait isn't necessary */
673 if (WARN_ON(phys_enc->enable_state != SDE_ENC_ENABLED))
674 return -EWOULDBLOCK;
675
676 MSM_EVT(DEV(phys_enc), wb_enc->frame_count, 0);
677
678 ret = wait_for_completion_timeout(&wb_enc->wbdone_complete,
679 msecs_to_jiffies(wb_enc->wbdone_timeout));
680
681 if (!ret) {
682 MSM_EVT(DEV(phys_enc), wb_enc->frame_count, 0);
683
Alan Kwongf5dd86c2016-08-09 18:08:17 -0400684 irq_status = sde_core_irq_read(phys_enc->sde_kms,
Alan Kwongbb27c092016-07-20 16:41:25 -0400685 wb_enc->irq_idx, true);
686 if (irq_status) {
687 SDE_DEBUG("wb:%d done but irq not triggered\n",
688 wb_enc->wb_dev->wb_idx - WB_0);
689 sde_encoder_phys_wb_done_irq(wb_enc, wb_enc->irq_idx);
690 } else {
691 SDE_ERROR("wb:%d kickoff timed out\n",
692 wb_enc->wb_dev->wb_idx - WB_0);
693 rc = -ETIMEDOUT;
694 }
695 }
696
697 sde_encoder_phys_wb_unregister_irq(phys_enc);
698
699 if (!rc)
700 wb_enc->end_time = ktime_get();
701
702 /* once operation is done, disable traffic shaper */
703 if (wb_enc->wb_cfg.ts_cfg.en && wb_enc->hw_wb &&
704 wb_enc->hw_wb->ops.setup_trafficshaper) {
705 wb_enc->wb_cfg.ts_cfg.en = false;
706 wb_enc->hw_wb->ops.setup_trafficshaper(
707 wb_enc->hw_wb, &wb_enc->wb_cfg);
708 }
709
710 /* remove vote for iommu/clk/bus */
711 wb_enc->frame_count++;
712
713 if (!rc) {
714 wb_time = (u64)ktime_to_us(wb_enc->end_time) -
715 (u64)ktime_to_us(wb_enc->start_time);
716 SDE_DEBUG("wb:%d took %llu us\n",
717 wb_enc->wb_dev->wb_idx - WB_0, wb_time);
718 }
719
720 MSM_EVT(DEV(phys_enc), wb_enc->frame_count, wb_time);
721
722 return rc;
723}
724
725/**
726 * sde_encoder_phys_wb_prepare_for_kickoff - pre-kickoff processing
727 * @phys_enc: Pointer to physical encoder
728 * @need_to_wait: Wait for next submission
729 */
730static void sde_encoder_phys_wb_prepare_for_kickoff(
731 struct sde_encoder_phys *phys_enc,
732 bool *need_to_wait)
733{
734 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
735 int ret;
736
737 SDE_DEBUG("[wb:%d,%u]\n", wb_enc->hw_wb->idx - WB_0,
738 wb_enc->kickoff_count);
739
740 *need_to_wait = false;
741
742 reinit_completion(&wb_enc->wbdone_complete);
743
744 ret = sde_encoder_phys_wb_register_irq(phys_enc);
745 if (ret) {
746 SDE_ERROR("failed to register irq %d\n", ret);
747 return;
748 }
749
750 wb_enc->kickoff_count++;
751
752 /* set OT limit & enable traffic shaper */
753 sde_encoder_phys_wb_setup(phys_enc);
754
755 sde_encoder_phys_wb_flush(phys_enc);
756
757 /* vote for iommu/clk/bus */
758 wb_enc->start_time = ktime_get();
759
760 MSM_EVT(DEV(phys_enc), *need_to_wait, wb_enc->kickoff_count);
761}
762
763/**
764 * sde_encoder_phys_wb_handle_post_kickoff - post-kickoff processing
765 * @phys_enc: Pointer to physical encoder
766 */
767static void sde_encoder_phys_wb_handle_post_kickoff(
768 struct sde_encoder_phys *phys_enc)
769{
770 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
771
772 SDE_DEBUG("[wb:%d]\n", wb_enc->hw_wb->idx - WB_0);
773
774 MSM_EVT(DEV(phys_enc), 0, 0);
775}
776
777/**
778 * sde_encoder_phys_wb_enable - enable writeback encoder
779 * @phys_enc: Pointer to physical encoder
780 */
781static void sde_encoder_phys_wb_enable(struct sde_encoder_phys *phys_enc)
782{
783 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
784 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
Alan Kwong74e8ba32016-09-30 14:25:16 -0400785 struct drm_device *dev;
Alan Kwongbb27c092016-07-20 16:41:25 -0400786 struct drm_connector *connector;
787
788 SDE_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
789
Alan Kwong74e8ba32016-09-30 14:25:16 -0400790 if (!wb_enc->base.parent || !wb_enc->base.parent->dev) {
791 SDE_ERROR("invalid drm device\n");
792 return;
793 }
794 dev = wb_enc->base.parent->dev;
795
Alan Kwongbb27c092016-07-20 16:41:25 -0400796 /* find associated writeback connector */
Alan Kwong74e8ba32016-09-30 14:25:16 -0400797 mutex_lock(&dev->mode_config.mutex);
Alan Kwongbb27c092016-07-20 16:41:25 -0400798 drm_for_each_connector(connector, phys_enc->parent->dev) {
799 if (connector->encoder == phys_enc->parent)
800 break;
801 }
Alan Kwong74e8ba32016-09-30 14:25:16 -0400802 mutex_unlock(&dev->mode_config.mutex);
803
Alan Kwongbb27c092016-07-20 16:41:25 -0400804 if (!connector || connector->encoder != phys_enc->parent) {
805 SDE_ERROR("failed to find writeback connector\n");
806 return;
807 }
808 wb_enc->wb_dev = sde_wb_connector_get_wb(connector);
809
810 phys_enc->enable_state = SDE_ENC_ENABLED;
811}
812
813/**
814 * sde_encoder_phys_wb_disable - disable writeback encoder
815 * @phys_enc: Pointer to physical encoder
816 */
817static void sde_encoder_phys_wb_disable(struct sde_encoder_phys *phys_enc)
818{
819 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
820 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
821
822 SDE_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
823
824 if (phys_enc->enable_state == SDE_ENC_DISABLED) {
825 SDE_ERROR("encoder is already disabled\n");
826 return;
827 }
828
829 if (wb_enc->frame_count != wb_enc->kickoff_count) {
830 SDE_DEBUG("[wait_for_done: wb:%d, frame:%u, kickoff:%u]\n",
831 hw_wb->idx - WB_0, wb_enc->frame_count,
832 wb_enc->kickoff_count);
833 sde_encoder_phys_wb_wait_for_commit_done(phys_enc);
834 }
835
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400836 if (phys_enc->hw_cdm && phys_enc->hw_cdm->ops.disable) {
837 SDE_DEBUG_DRIVER("[cdm_disable]\n");
838 phys_enc->hw_cdm->ops.disable(phys_enc->hw_cdm);
839 }
840
Alan Kwongbb27c092016-07-20 16:41:25 -0400841 phys_enc->enable_state = SDE_ENC_DISABLED;
842}
843
844/**
845 * sde_encoder_phys_wb_get_hw_resources - get hardware resources
846 * @phys_enc: Pointer to physical encoder
847 * @hw_res: Pointer to encoder resources
848 */
849static void sde_encoder_phys_wb_get_hw_resources(
850 struct sde_encoder_phys *phys_enc,
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400851 struct sde_encoder_hw_resources *hw_res,
852 struct drm_connector_state *conn_state)
Alan Kwongbb27c092016-07-20 16:41:25 -0400853{
854 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
Clarence Ip03521982016-08-26 10:49:47 -0400855 struct sde_hw_wb *hw_wb;
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400856 struct drm_framebuffer *fb;
857 const struct sde_format *fmt;
Alan Kwongbb27c092016-07-20 16:41:25 -0400858
Clarence Ip03521982016-08-26 10:49:47 -0400859 if (!phys_enc) {
860 SDE_ERROR("invalid encoder\n");
861 return;
862 }
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400863
864 fb = sde_wb_connector_state_get_output_fb(conn_state);
865 if (!fb) {
866 SDE_ERROR("no output framebuffer\n");
867 return;
868 }
869
870 fmt = sde_get_sde_format_ext(fb->pixel_format, fb->modifier,
871 drm_format_num_planes(fb->pixel_format));
872 if (!fmt) {
873 SDE_ERROR("unsupported output pixel format:%d\n",
874 fb->pixel_format);
875 return;
876 }
877
Clarence Ip03521982016-08-26 10:49:47 -0400878 hw_wb = wb_enc->hw_wb;
Clarence Ip03521982016-08-26 10:49:47 -0400879 hw_res->wbs[hw_wb->idx - WB_0] = phys_enc->intf_mode;
Alan Kwong4c3cf4c2016-09-25 20:08:09 -0400880 hw_res->needs_cdm = SDE_FORMAT_IS_YUV(fmt);
881 SDE_DEBUG("[wb:%d] intf_mode=%d needs_cdm=%d\n", hw_wb->idx - WB_0,
882 hw_res->wbs[hw_wb->idx - WB_0],
883 hw_res->needs_cdm);
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -0400884}
Lloyd Atkinson11f34442016-08-11 11:19:52 -0400885
Alan Kwongbb27c092016-07-20 16:41:25 -0400886#ifdef CONFIG_DEBUG_FS
887/**
888 * sde_encoder_phys_wb_init_debugfs - initialize writeback encoder debugfs
889 * @phys_enc: Pointer to physical encoder
890 * @sde_kms: Pointer to SDE KMS object
891 */
892static int sde_encoder_phys_wb_init_debugfs(
893 struct sde_encoder_phys *phys_enc, struct sde_kms *kms)
894{
895 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
896
897 if (!phys_enc || !kms || !wb_enc->hw_wb)
898 return -EINVAL;
899
900 snprintf(wb_enc->wb_name, ARRAY_SIZE(wb_enc->wb_name), "encoder_wb%d",
901 wb_enc->hw_wb->idx - WB_0);
902
903 wb_enc->debugfs_root =
904 debugfs_create_dir(wb_enc->wb_name,
905 sde_debugfs_get_root(kms));
906 if (!wb_enc->debugfs_root) {
907 SDE_ERROR("failed to create debugfs\n");
908 return -ENOMEM;
909 }
910
911 if (!debugfs_create_u32("wbdone_timeout", 0644,
912 wb_enc->debugfs_root, &wb_enc->wbdone_timeout)) {
913 SDE_ERROR("failed to create debugfs/wbdone_timeout\n");
914 return -ENOMEM;
915 }
916
917 if (!debugfs_create_u32("bypass_irqreg", 0644,
918 wb_enc->debugfs_root, &wb_enc->bypass_irqreg)) {
919 SDE_ERROR("failed to create debugfs/bypass_irqreg\n");
920 return -ENOMEM;
921 }
922
923 return 0;
924}
925
926/**
927 * sde_encoder_phys_wb_destroy_debugfs - destroy writeback encoder debugfs
928 * @phys_enc: Pointer to physical encoder
929 */
930static void sde_encoder_phys_wb_destroy_debugfs(
931 struct sde_encoder_phys *phys_enc)
932{
933 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
934
935 if (!phys_enc)
936 return;
937
938 debugfs_remove_recursive(wb_enc->debugfs_root);
939}
940#else
Dhaval Patel48f2d0f2016-09-27 16:39:12 -0700941static int sde_encoder_phys_wb_init_debugfs(
Alan Kwongbb27c092016-07-20 16:41:25 -0400942 struct sde_encoder_phys *phys_enc, struct sde_kms *kms)
943{
Dhaval Patel48f2d0f2016-09-27 16:39:12 -0700944 return 0;
Alan Kwongbb27c092016-07-20 16:41:25 -0400945}
946static void sde_encoder_phys_wb_destroy_debugfs(
947 struct sde_encoder_phys *phys_enc)
948{
949}
950#endif
951
952/**
953 * sde_encoder_phys_wb_destroy - destroy writeback encoder
954 * @phys_enc: Pointer to physical encoder
955 */
956static void sde_encoder_phys_wb_destroy(struct sde_encoder_phys *phys_enc)
957{
958 struct sde_encoder_phys_wb *wb_enc = to_sde_encoder_phys_wb(phys_enc);
959 struct sde_hw_wb *hw_wb = wb_enc->hw_wb;
960
961 SDE_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
962
963 if (!phys_enc)
964 return;
965
966 sde_encoder_phys_wb_destroy_debugfs(phys_enc);
967
Alan Kwongbb27c092016-07-20 16:41:25 -0400968 kfree(wb_enc);
969}
970
971/**
972 * sde_encoder_phys_wb_init_ops - initialize writeback operations
973 * @ops: Pointer to encoder operation table
974 */
975static void sde_encoder_phys_wb_init_ops(struct sde_encoder_phys_ops *ops)
976{
Lloyd Atkinsone7bcdd22016-08-11 10:53:37 -0400977 ops->is_master = sde_encoder_phys_wb_is_master;
Alan Kwongbb27c092016-07-20 16:41:25 -0400978 ops->mode_set = sde_encoder_phys_wb_mode_set;
979 ops->enable = sde_encoder_phys_wb_enable;
980 ops->disable = sde_encoder_phys_wb_disable;
981 ops->destroy = sde_encoder_phys_wb_destroy;
982 ops->atomic_check = sde_encoder_phys_wb_atomic_check;
983 ops->get_hw_resources = sde_encoder_phys_wb_get_hw_resources;
Alan Kwongbb27c092016-07-20 16:41:25 -0400984 ops->wait_for_commit_done = sde_encoder_phys_wb_wait_for_commit_done;
985 ops->prepare_for_kickoff = sde_encoder_phys_wb_prepare_for_kickoff;
986 ops->handle_post_kickoff = sde_encoder_phys_wb_handle_post_kickoff;
Clarence Ip110d15c2016-08-16 14:44:41 -0400987 ops->trigger_start = sde_encoder_helper_trigger_start;
Alan Kwongbb27c092016-07-20 16:41:25 -0400988}
989
990/**
991 * sde_encoder_phys_wb_init - initialize writeback encoder
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -0400992 * @init: Pointer to init info structure with initialization params
Alan Kwongbb27c092016-07-20 16:41:25 -0400993 */
994struct sde_encoder_phys *sde_encoder_phys_wb_init(
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -0400995 struct sde_enc_phys_init_params *p)
Alan Kwongbb27c092016-07-20 16:41:25 -0400996{
997 struct sde_encoder_phys *phys_enc;
998 struct sde_encoder_phys_wb *wb_enc;
999 struct sde_hw_mdp *hw_mdp;
1000 int ret = 0;
1001
1002 SDE_DEBUG("\n");
1003
1004 wb_enc = kzalloc(sizeof(*wb_enc), GFP_KERNEL);
1005 if (!wb_enc) {
1006 ret = -ENOMEM;
1007 goto fail_alloc;
1008 }
1009 wb_enc->irq_idx = -EINVAL;
1010 wb_enc->wbdone_timeout = WAIT_TIMEOUT_MSEC;
1011 init_completion(&wb_enc->wbdone_complete);
1012
1013 phys_enc = &wb_enc->base;
1014
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001015 if (p->sde_kms->vbif[VBIF_NRT]) {
Alan Kwongbb27c092016-07-20 16:41:25 -04001016 wb_enc->mmu_id[SDE_IOMMU_DOMAIN_UNSECURE] =
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001017 p->sde_kms->mmu_id[MSM_SMMU_DOMAIN_NRT_UNSECURE];
Alan Kwongbb27c092016-07-20 16:41:25 -04001018 wb_enc->mmu_id[SDE_IOMMU_DOMAIN_SECURE] =
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001019 p->sde_kms->mmu_id[MSM_SMMU_DOMAIN_NRT_SECURE];
Alan Kwongbb27c092016-07-20 16:41:25 -04001020 } else {
1021 wb_enc->mmu_id[SDE_IOMMU_DOMAIN_UNSECURE] =
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001022 p->sde_kms->mmu_id[MSM_SMMU_DOMAIN_UNSECURE];
Alan Kwongbb27c092016-07-20 16:41:25 -04001023 wb_enc->mmu_id[SDE_IOMMU_DOMAIN_SECURE] =
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001024 p->sde_kms->mmu_id[MSM_SMMU_DOMAIN_SECURE];
Alan Kwongbb27c092016-07-20 16:41:25 -04001025 }
1026
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001027 hw_mdp = sde_rm_get_mdp(&p->sde_kms->rm);
Alan Kwongbb27c092016-07-20 16:41:25 -04001028 if (IS_ERR_OR_NULL(hw_mdp)) {
1029 ret = PTR_ERR(hw_mdp);
1030 SDE_ERROR("failed to init hw_top: %d\n", ret);
1031 goto fail_mdp_init;
1032 }
1033 phys_enc->hw_mdptop = hw_mdp;
1034
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001035 /**
1036 * hw_wb resource permanently assigned to this encoder
1037 * Other resources allocated at atomic commit time by use case
1038 */
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001039 if (p->wb_idx != SDE_NONE) {
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001040 struct sde_rm_hw_iter iter;
Alan Kwongbb27c092016-07-20 16:41:25 -04001041
Lloyd Atkinson11f34442016-08-11 11:19:52 -04001042 sde_rm_init_hw_iter(&iter, 0, SDE_HW_BLK_WB);
1043 while (sde_rm_get_hw(&p->sde_kms->rm, &iter)) {
1044 struct sde_hw_wb *hw_wb = (struct sde_hw_wb *)iter.hw;
1045
1046 if (hw_wb->idx == p->wb_idx) {
1047 wb_enc->hw_wb = hw_wb;
1048 break;
1049 }
1050 }
1051
1052 if (!wb_enc->hw_wb) {
1053 ret = -EINVAL;
1054 SDE_ERROR("failed to init hw_wb%d\n", p->wb_idx - WB_0);
Alan Kwongbb27c092016-07-20 16:41:25 -04001055 goto fail_wb_init;
1056 }
Alan Kwongbb27c092016-07-20 16:41:25 -04001057 } else {
1058 ret = -EINVAL;
1059 SDE_ERROR("invalid wb_idx\n");
1060 goto fail_wb_check;
1061 }
1062
Alan Kwongbb27c092016-07-20 16:41:25 -04001063 sde_encoder_phys_wb_init_ops(&phys_enc->ops);
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001064 phys_enc->parent = p->parent;
1065 phys_enc->parent_ops = p->parent_ops;
1066 phys_enc->sde_kms = p->sde_kms;
1067 phys_enc->split_role = p->split_role;
Clarence Ip03521982016-08-26 10:49:47 -04001068 phys_enc->intf_mode = INTF_MODE_WB_LINE;
Alan Kwongbb27c092016-07-20 16:41:25 -04001069 spin_lock_init(&phys_enc->spin_lock);
1070
Lloyd Atkinson6ef6cb52016-07-06 11:49:18 -04001071 ret = sde_encoder_phys_wb_init_debugfs(phys_enc, p->sde_kms);
Alan Kwongbb27c092016-07-20 16:41:25 -04001072 if (ret) {
1073 SDE_ERROR("failed to init debugfs %d\n", ret);
1074 goto fail_debugfs_init;
1075 }
1076
1077 SDE_DEBUG("Created sde_encoder_phys_wb for wb %d\n",
1078 wb_enc->hw_wb->idx - WB_0);
1079
1080 return phys_enc;
1081
1082fail_debugfs_init:
Alan Kwongbb27c092016-07-20 16:41:25 -04001083fail_wb_init:
1084fail_wb_check:
Alan Kwongbb27c092016-07-20 16:41:25 -04001085fail_mdp_init:
1086 kfree(wb_enc);
1087fail_alloc:
1088 return ERR_PTR(ret);
1089}
1090