blob: 8c7a7fd6a4b3e274f607c9d35eeb197436f030f0 [file] [log] [blame]
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
2 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 */
12
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040013#include <linux/sort.h>
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070014#include <drm/drm_mode.h>
15#include <drm/drm_crtc.h>
16#include <drm/drm_crtc_helper.h>
17#include <drm/drm_flip_work.h>
18
19#include "sde_kms.h"
20#include "sde_hw_lm.h"
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040021#include "sde_hw_mdp_ctl.h"
22
23#define CRTC_DUAL_MIXERS 2
24#define PENDING_FLIP 2
25
26#define CRTC_HW_MIXER_MAXSTAGES(c, idx) ((c)->mixer[idx].sblk->maxblendstages)
27
28struct sde_crtc_mixer {
29 struct sde_hw_dspp *hw_dspp;
30 struct sde_hw_mixer *hw_lm;
31 struct sde_hw_ctl *hw_ctl;
32 u32 flush_mask;
33};
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070034
35struct sde_crtc {
36 struct drm_crtc base;
37 char name[8];
38 struct drm_plane *plane;
39 struct drm_plane *planes[8];
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040040 struct drm_encoder *encoder;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070041 int id;
42 bool enabled;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040043
44 spinlock_t lm_lock; /* protect registers */
45
46 /* HW Resources reserved for the crtc */
47 u32 num_ctls;
48 u32 num_mixers;
49 struct sde_crtc_mixer mixer[CRTC_DUAL_MIXERS];
50
51 /*if there is a pending flip, these will be non-null */
52 struct drm_pending_vblank_event *event;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070053};
54
55#define to_sde_crtc(x) container_of(x, struct sde_crtc, base)
56
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040057static struct sde_kms *get_kms(struct drm_crtc *crtc)
58{
59 struct msm_drm_private *priv = crtc->dev->dev_private;
Ben Chan78647cd2016-06-26 22:02:47 -040060 return to_sde_kms(priv->kms);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040061}
62
63static inline struct sde_hw_ctl *sde_crtc_rm_get_ctl_path(enum sde_ctl idx,
64 void __iomem *addr,
65 struct sde_mdss_cfg *m)
66{
67 /*
68 * This module keeps track of the requested hw resources state,
69 * if the requested resource is being used it returns NULL,
70 * otherwise it returns the hw driver struct
71 */
72 return sde_hw_ctl_init(idx, addr, m);
73}
74
75static inline struct sde_hw_mixer *sde_crtc_rm_get_mixer(enum sde_lm idx,
76 void __iomem *addr,
77 struct sde_mdss_cfg *m)
78{
79 /*
80 * This module keeps track of the requested hw resources state,
81 * if the requested resource is being used it returns NULL,
82 * otherwise it returns the hw driver struct
83 */
84 return sde_hw_lm_init(idx, addr, m);
85}
86
87static int sde_crtc_reserve_hw_resources(struct drm_crtc *crtc,
88 struct drm_encoder *encoder)
89{
90 /*
91 * Assign CRTC resources
92 * num_ctls;
93 * num_mixers;
94 * sde_lm mixer[CRTC_MAX_PIPES];
95 * sde_ctl ctl[CRTC_MAX_PIPES];
96 */
97 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
98 struct sde_kms *kms = get_kms(crtc);
99 enum sde_lm lm_id[CRTC_DUAL_MIXERS];
100 enum sde_ctl ctl_id[CRTC_DUAL_MIXERS];
101 int i;
102
103 if (!kms) {
104 DBG("[%s] invalid kms\n", __func__);
105 return -EINVAL;
106 }
107
108 if (!kms->mmio)
109 return -EINVAL;
110
111 /*
112 * simple check validate against catalog
113 */
114 sde_crtc->num_ctls = 1;
115 sde_crtc->num_mixers = 1;
116 ctl_id[0] = CTL_0;
117 lm_id[0] = LM_0;
118
119 /*
120 * need to also enable MDP core clock and AHB CLK
121 * before touching HW driver
122 */
123 DBG("%s Enable clocks\n", __func__);
124 sde_enable(kms);
125 for (i = 0; i < sde_crtc->num_ctls; i++) {
126 sde_crtc->mixer[i].hw_ctl = sde_crtc_rm_get_ctl_path(ctl_id[i],
127 kms->mmio, kms->catalog);
128 if (!sde_crtc->mixer[i].hw_ctl) {
129 DBG("[%s], Invalid ctl_path", __func__);
130 return -EACCES;
131 }
132 }
133
134 for (i = 0; i < sde_crtc->num_mixers; i++) {
135 sde_crtc->mixer[i].hw_lm = sde_crtc_rm_get_mixer(lm_id[i],
136 kms->mmio, kms->catalog);
137 if (!sde_crtc->mixer[i].hw_lm) {
138 DBG("[%s], Invalid ctl_path", __func__);
139 return -EACCES;
140 }
141 }
142 /*
143 * need to disable MDP core clock and AHB CLK
144 */
145 sde_disable(kms);
146 return 0;
147}
148
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700149static void sde_crtc_destroy(struct drm_crtc *crtc)
150{
151 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
152
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400153 DBG("");
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700154 drm_crtc_cleanup(crtc);
155 kfree(sde_crtc);
156}
157
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700158static bool sde_crtc_mode_fixup(struct drm_crtc *crtc,
159 const struct drm_display_mode *mode,
160 struct drm_display_mode *adjusted_mode)
161{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400162 DBG("");
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700163 return true;
164}
165
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400166static void sde_crtc_mode_set_nofb(struct drm_crtc *crtc)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700167{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400168 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
169 struct sde_crtc_mixer *mixer = sde_crtc->mixer;
170 struct drm_device *dev = crtc->dev;
171 struct sde_hw_mixer *lm;
172 unsigned long flags;
173 struct drm_display_mode *mode;
174 struct sde_hw_mixer_cfg cfg;
175 u32 mixer_width;
176 int i;
177 int rc;
178
179 DBG("");
180 if (WARN_ON(!crtc->state))
181 return;
182
183 mode = &crtc->state->adjusted_mode;
184
185 DBG("%s: set mode: %d:\"%s\" %d %d %d %d %d %d %d %d %d %d 0x%x 0x%x",
186 sde_crtc->name, mode->base.id, mode->name,
187 mode->vrefresh, mode->clock,
188 mode->hdisplay, mode->hsync_start,
189 mode->hsync_end, mode->htotal,
190 mode->vdisplay, mode->vsync_start,
191 mode->vsync_end, mode->vtotal,
192 mode->type, mode->flags);
193
194 /*
195 * reserve mixer(s) if not already avaialable
196 * if dual mode, mixer_width = half mode width
197 * program mode configuration on mixer(s)
198 */
199 if ((sde_crtc->num_ctls == 0) ||
200 (sde_crtc->num_mixers == 0)) {
201 rc = sde_crtc_reserve_hw_resources(crtc, sde_crtc->encoder);
202 if (rc) {
203 dev_err(dev->dev, " error reserving HW resource for this CRTC\n");
204 return;
205 }
206 }
207
208 if (sde_crtc->num_mixers == CRTC_DUAL_MIXERS)
209 mixer_width = mode->hdisplay >> 1;
210 else
211 mixer_width = mode->hdisplay;
212
213 spin_lock_irqsave(&sde_crtc->lm_lock, flags);
214
215 for (i = 0; i < sde_crtc->num_mixers; i++) {
216 lm = mixer[i].hw_lm;
217 cfg.out_width = mixer_width;
218 cfg.out_height = mode->vdisplay;
219 cfg.right_mixer = (i == 0) ? false : true;
220 cfg.flags = 0;
221 lm->ops.setup_mixer_out(lm, &cfg);
222 }
223
224 spin_unlock_irqrestore(&sde_crtc->lm_lock, flags);
225}
226
227static void sde_crtc_get_blend_cfg(struct sde_hw_blend_cfg *cfg,
228 struct sde_plane_state *pstate)
229{
230 const struct mdp_format *format;
231 struct drm_plane *plane;
232
233 format = to_mdp_format(
234 msm_framebuffer_format(pstate->base.fb));
235 plane = pstate->base.plane;
236
237 cfg->fg.alpha_sel = ALPHA_FG_CONST;
238 cfg->bg.alpha_sel = ALPHA_BG_CONST;
239 cfg->fg.const_alpha = pstate->alpha;
240 cfg->bg.const_alpha = 0xFF - pstate->alpha;
241
242 if (format->alpha_enable && pstate->premultiplied) {
243 cfg->fg.alpha_sel = ALPHA_FG_CONST;
244 cfg->bg.alpha_sel = ALPHA_FG_PIXEL;
245 if (pstate->alpha != 0xff) {
246 cfg->bg.const_alpha = pstate->alpha;
247 cfg->bg.inv_alpha_sel = 1;
248 cfg->bg.mod_alpha = 1;
249 } else {
250 cfg->bg.inv_mode_alpha = 1;
251 }
252 } else if (format->alpha_enable) {
253 cfg->fg.alpha_sel = ALPHA_FG_PIXEL;
254 cfg->bg.alpha_sel = ALPHA_FG_PIXEL;
255 if (pstate->alpha != 0xff) {
256 cfg->bg.const_alpha = pstate->alpha;
257 cfg->fg.mod_alpha = 1;
258 cfg->bg.inv_alpha_sel = 1;
259 cfg->bg.mod_alpha = 1;
260 cfg->bg.inv_mode_alpha = 1;
261 } else {
262 cfg->bg.inv_mode_alpha = 1;
263 }
264 }
265}
266
267static void blend_setup(struct drm_crtc *crtc)
268{
269 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
270 struct sde_crtc_mixer *mixer = sde_crtc->mixer;
271 struct drm_plane *plane;
272 struct sde_plane_state *pstate, *pstates[SDE_STAGE_MAX] = {0};
273 struct sde_hw_stage_cfg stage_cfg;
274 struct sde_hw_blend_cfg blend;
275 struct sde_hw_ctl *ctl;
276 struct sde_hw_mixer *lm;
277 u32 flush_mask = 0;
278 unsigned long flags;
279 int i, j, plane_cnt = 0;
280
281 spin_lock_irqsave(&sde_crtc->lm_lock, flags);
282
283 /* ctl could be reserved already */
284 if (!sde_crtc->num_ctls)
285 goto out;
286
287 /* initialize stage cfg */
288 memset(&stage_cfg, 0, sizeof(stage_cfg));
289 memset(&blend, 0, sizeof(blend));
290
291 /* Collect all plane information */
292 drm_atomic_crtc_for_each_plane(plane, crtc) {
293 pstate = to_sde_plane_state(plane->state);
294 pstates[pstate->stage] = pstate;
295 plane_cnt++;
296 for (i = 0; i < sde_crtc->num_mixers; i++) {
297 stage_cfg.stage[pstate->stage][i] =
298 sde_plane_pipe(plane);
299
300 /* Cache the flushmask for this layer
301 * sourcesplit is always enabled, so this layer will
302 * be staged on both the mixers
303 */
304 ctl = mixer[i].hw_ctl;
305 ctl->ops.get_bitmask_sspp(ctl, &flush_mask,
306 sde_plane_pipe(plane));
307 }
308 }
309
310 /*
311 * If there is no base layer, enable border color.
312 * currently border color is always black
313 */
314 if ((stage_cfg.stage[SDE_STAGE_BASE][0] == SSPP_NONE) &&
315 plane_cnt) {
316 stage_cfg.border_enable = 1;
317 DBG("Border Color is enabled\n");
318 }
319
320 /* Program hw */
321 for (i = 0; i < sde_crtc->num_mixers; i++) {
322 if (!mixer[i].hw_lm)
323 continue;
324
325 if (!mixer[i].hw_ctl)
326 continue;
327
328 ctl = mixer[i].hw_ctl;
329 lm = mixer[i].hw_lm;
330
331 /* stage config */
332 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
333 &stage_cfg);
334 /* stage config flush mask */
335 mixer[i].flush_mask = flush_mask;
336 /* get the flush mask for mixer */
337 ctl->ops.get_bitmask_mixer(ctl, &mixer[i].flush_mask,
338 mixer[i].hw_lm->idx);
339
340 /* blend config */
341 for (j = SDE_STAGE_0; j < SDE_STAGE_MAX; j++) {
342 if (!pstates[j])
343 continue;
344 sde_crtc_get_blend_cfg(&blend, pstates[j]);
345 blend.fg.alpha_sel = ALPHA_FG_CONST;
346 blend.bg.alpha_sel = ALPHA_BG_CONST;
347 blend.fg.const_alpha = pstate->alpha;
348 blend.bg.const_alpha = 0xFF - pstate->alpha;
349 lm->ops.setup_blend_config(lm, j, &blend);
350 }
351 }
352out:
353 spin_unlock_irqrestore(&sde_crtc->lm_lock, flags);
354}
355
356static void request_pending(struct drm_crtc *crtc, u32 pending)
357{
358 DBG("");
359}
360/**
361 * Flush the CTL PATH
362 */
363static u32 crtc_flush_all(struct drm_crtc *crtc)
364{
365 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
366 struct sde_hw_ctl *ctl;
367 int i;
368
369 DBG("");
370
371 for (i = 0; i < sde_crtc->num_ctls; i++) {
372 /*
373 * Query flush_mask from encoder
374 * and append to the ctl_path flush_mask
375 */
376 ctl = sde_crtc->mixer[i].hw_ctl;
377 ctl->ops.get_bitmask_intf(ctl,
378 &(sde_crtc->mixer[i].flush_mask),
379 INTF_1);
380 ctl->ops.setup_flush(ctl,
381 sde_crtc->mixer[i].flush_mask);
382 }
383
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700384 return 0;
385}
386
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400387static void sde_crtc_atomic_begin(struct drm_crtc *crtc,
388 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700389{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400390 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
391 struct drm_device *dev = crtc->dev;
392 unsigned long flags;
393
394 DBG("");
395
396 WARN_ON(sde_crtc->event);
397
398 spin_lock_irqsave(&dev->event_lock, flags);
399 sde_crtc->event = crtc->state->event;
400 spin_unlock_irqrestore(&dev->event_lock, flags);
401
402 /*
403 * If no CTL has been allocated in sde_crtc_atomic_check(),
404 * it means we are trying to flush a CRTC whose state is disabled:
405 * nothing else needs to be done.
406 */
407 if (unlikely(!sde_crtc->num_ctls))
408 return;
409
410 blend_setup(crtc);
411
412 /*
413 * PP_DONE irq is only used by command mode for now.
414 * It is better to request pending before FLUSH and START trigger
415 * to make sure no pp_done irq missed.
416 * This is safe because no pp_done will happen before SW trigger
417 * in command mode.
418 */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700419}
420
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400421static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
422 struct drm_crtc_state *old_crtc_state)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700423{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400424 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
425 struct drm_device *dev = crtc->dev;
426 unsigned long flags;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700427
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400428 DBG("");
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700429
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400430 WARN_ON(sde_crtc->event);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700431
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400432 spin_lock_irqsave(&dev->event_lock, flags);
433 sde_crtc->event = crtc->state->event;
434 spin_unlock_irqrestore(&dev->event_lock, flags);
435
436 /*
437 * If no CTL has been allocated in sde_crtc_atomic_check(),
438 * it means we are trying to flush a CRTC whose state is disabled:
439 * nothing else needs to be done.
440 */
441 if (unlikely(!sde_crtc->num_ctls))
442 return;
443
444 crtc_flush_all(crtc);
445
446 request_pending(crtc, PENDING_FLIP);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700447}
448
449static int sde_crtc_set_property(struct drm_crtc *crtc,
450 struct drm_property *property, uint64_t val)
451{
452 return -EINVAL;
453}
454
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400455static int sde_crtc_cursor_set(struct drm_crtc *crtc,
456 struct drm_file *file, uint32_t handle,
457 uint32_t width, uint32_t height)
458{
459 return 0;
460}
461
462static int sde_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
463{
464 return 0;
465}
466
467static void sde_crtc_disable(struct drm_crtc *crtc)
468{
469 DBG("");
470}
471
472static void sde_crtc_enable(struct drm_crtc *crtc)
473{
474 DBG("");
475}
476
477struct plane_state {
478 struct drm_plane *plane;
479 struct sde_plane_state *state;
480};
481
482static int pstate_cmp(const void *a, const void *b)
483{
484 struct plane_state *pa = (struct plane_state *)a;
485 struct plane_state *pb = (struct plane_state *)b;
486
487 return pa->state->zpos - pb->state->zpos;
488}
489
490static int sde_crtc_atomic_check(struct drm_crtc *crtc,
491 struct drm_crtc_state *state)
492{
493 struct sde_crtc *sde_crtc = to_sde_crtc(crtc);
494 struct sde_kms *sde_kms = get_kms(crtc);
495 struct drm_plane *plane;
496 struct drm_device *dev = crtc->dev;
497 struct plane_state pstates[SDE_STAGE_MAX];
498 int max_stages = CRTC_HW_MIXER_MAXSTAGES(sde_kms->catalog, 0);
499 int cnt = 0, i;
500
501 DBG("%s: check", sde_crtc->name);
502
503 /* verify that there are not too many planes attached to crtc
504 * and that we don't have conflicting mixer stages:
505 */
506 drm_atomic_crtc_state_for_each_plane(plane, state) {
507 struct drm_plane_state *pstate;
508
509 if (cnt >= (max_stages)) {
510 dev_err(dev->dev, "too many planes!\n");
511 return -EINVAL;
512 }
513
514 pstate = state->state->plane_states[drm_plane_index(plane)];
515
516 /* plane might not have changed, in which case take
517 * current state:
518 */
519 if (!pstate)
520 pstate = plane->state;
521 pstates[cnt].plane = plane;
522 pstates[cnt].state = to_sde_plane_state(pstate);
523
524 cnt++;
525 }
526
527 /* assign a stage based on sorted zpos property */
528 sort(pstates, cnt, sizeof(pstates[0]), pstate_cmp, NULL);
529
530 for (i = 0; i < cnt; i++) {
531 pstates[i].state->stage = SDE_STAGE_0 + i;
532 DBG("%s: assign pipe %d on stage=%d", sde_crtc->name,
533 sde_plane_pipe(pstates[i].plane),
534 pstates[i].state->stage);
535 }
536
537 return 0;
538}
539
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700540static const struct drm_crtc_funcs sde_crtc_funcs = {
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400541 .set_config = drm_atomic_helper_set_config,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700542 .destroy = sde_crtc_destroy,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400543 .page_flip = drm_atomic_helper_page_flip,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700544 .set_property = sde_crtc_set_property,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400545 .reset = drm_atomic_helper_crtc_reset,
546 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
547 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
548 .cursor_set = sde_crtc_cursor_set,
549 .cursor_move = sde_crtc_cursor_move,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700550};
551
552static const struct drm_crtc_helper_funcs sde_crtc_helper_funcs = {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700553 .mode_fixup = sde_crtc_mode_fixup,
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400554 .mode_set_nofb = sde_crtc_mode_set_nofb,
555 .disable = sde_crtc_disable,
556 .enable = sde_crtc_enable,
557 .atomic_check = sde_crtc_atomic_check,
558 .atomic_begin = sde_crtc_atomic_begin,
559 .atomic_flush = sde_crtc_atomic_flush,
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700560};
561
562uint32_t sde_crtc_vblank(struct drm_crtc *crtc)
563{
564 return 0;
565}
566
567void sde_crtc_cancel_pending_flip(struct drm_crtc *crtc, struct drm_file *file)
568{
569}
570
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400571static void sde_crtc_install_properties(struct drm_crtc *crtc,
572 struct drm_mode_object *obj)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700573{
574}
575
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700576
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400577/* initialize crtc */
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700578struct drm_crtc *sde_crtc_init(struct drm_device *dev,
579 struct drm_encoder *encoder,
580 struct drm_plane *plane, int id)
581{
582 struct drm_crtc *crtc = NULL;
583 struct sde_crtc *sde_crtc;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400584 int rc;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700585
586 sde_crtc = kzalloc(sizeof(*sde_crtc), GFP_KERNEL);
587 if (!sde_crtc)
588 return ERR_PTR(-ENOMEM);
589
590 crtc = &sde_crtc->base;
591
592 sde_crtc->id = id;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400593 sde_crtc->encoder = encoder;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700594
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400595 sde_crtc_install_properties(crtc, &crtc->base);
596
597 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &sde_crtc_funcs);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700598
599 drm_crtc_helper_add(crtc, &sde_crtc_helper_funcs);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400600 plane->crtc = crtc;
601
602 rc = sde_crtc_reserve_hw_resources(crtc, encoder);
603 if (rc) {
604 dev_err(dev->dev, " error reserving HW resource for this CRTC\n");
605 return ERR_PTR(-EINVAL);
606 }
607
608 DBG("%s: Successfully initialized crtc\n", __func__);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700609 return crtc;
610}