blob: 19e3a7a50a6788c304a81fcaacac7eecd7e780a2 [file] [log] [blame]
Dhaval Patel13485f12017-01-11 12:55:22 -08001/* Copyright (c) 2015-2017, The Linux Foundation. All rights reserved.
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07002 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 */
12
13#include <linux/delay.h>
14#include "sde_hwio.h"
Clarence Ipc475b082016-06-26 09:27:23 -040015#include "sde_hw_ctl.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070016
17#define CTL_LAYER(lm) \
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040018 (((lm) == LM_5) ? (0x024) : (((lm) - LM_0) * 0x004))
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070019#define CTL_LAYER_EXT(lm) \
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040020 (0x40 + (((lm) - LM_0) * 0x004))
Dhaval Patel13485f12017-01-11 12:55:22 -080021#define CTL_LAYER_EXT2(lm) \
22 (0x70 + (((lm) - LM_0) * 0x004))
Jeykumar Sankaran2e655032017-02-04 14:05:45 -080023#define CTL_LAYER_EXT3(lm) \
24 (0xA0 + (((lm) - LM_0) * 0x004))
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070025#define CTL_TOP 0x014
26#define CTL_FLUSH 0x018
27#define CTL_START 0x01C
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070028#define CTL_SW_RESET 0x030
29#define CTL_LAYER_EXTN_OFFSET 0x40
30
31#define SDE_REG_RESET_TIMEOUT_COUNT 20
32
33static struct sde_ctl_cfg *_ctl_offset(enum sde_ctl ctl,
34 struct sde_mdss_cfg *m,
35 void __iomem *addr,
36 struct sde_hw_blk_reg_map *b)
37{
38 int i;
39
40 for (i = 0; i < m->ctl_count; i++) {
41 if (ctl == m->ctl[i].id) {
42 b->base_off = addr;
43 b->blk_off = m->ctl[i].base;
44 b->hwversion = m->hwversion;
Clarence Ip4ce59322016-06-26 22:27:51 -040045 b->log_mask = SDE_DBG_MASK_CTL;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070046 return &m->ctl[i];
47 }
48 }
49 return ERR_PTR(-ENOMEM);
50}
51
52static int _mixer_stages(const struct sde_lm_cfg *mixer, int count,
53 enum sde_lm lm)
54{
55 int i;
56 int stages = -EINVAL;
57
58 for (i = 0; i < count; i++) {
59 if (lm == mixer[i].id) {
60 stages = mixer[i].sblk->maxblendstages;
61 break;
62 }
63 }
64
65 return stages;
66}
67
Lloyd Atkinson5d722782016-05-30 14:09:41 -040068static inline void sde_hw_ctl_trigger_start(struct sde_hw_ctl *ctx)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070069{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040070 SDE_REG_WRITE(&ctx->hw, CTL_START, 0x1);
71}
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070072
Lloyd Atkinson5d722782016-05-30 14:09:41 -040073static inline void sde_hw_ctl_clear_pending_flush(struct sde_hw_ctl *ctx)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040074{
Lloyd Atkinson5d722782016-05-30 14:09:41 -040075 ctx->pending_flush_mask = 0x0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070076}
77
Lloyd Atkinson5d722782016-05-30 14:09:41 -040078static inline void sde_hw_ctl_update_pending_flush(struct sde_hw_ctl *ctx,
79 u32 flushbits)
80{
81 ctx->pending_flush_mask |= flushbits;
82}
83
Clarence Ip110d15c2016-08-16 14:44:41 -040084static u32 sde_hw_ctl_get_pending_flush(struct sde_hw_ctl *ctx)
85{
86 if (!ctx)
87 return 0x0;
88
89 return ctx->pending_flush_mask;
90}
91
Lloyd Atkinson5d722782016-05-30 14:09:41 -040092static inline void sde_hw_ctl_trigger_flush(struct sde_hw_ctl *ctx)
93{
94 SDE_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
95}
96
97
Dhaval Patel48c76022016-09-01 17:51:23 -070098static inline uint32_t sde_hw_ctl_get_bitmask_sspp(struct sde_hw_ctl *ctx,
99 enum sde_sspp sspp)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700100{
Dhaval Patel48c76022016-09-01 17:51:23 -0700101 uint32_t flushbits = 0;
102
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700103 switch (sspp) {
104 case SSPP_VIG0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700105 flushbits = BIT(0);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700106 break;
107 case SSPP_VIG1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700108 flushbits = BIT(1);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700109 break;
110 case SSPP_VIG2:
Dhaval Patel48c76022016-09-01 17:51:23 -0700111 flushbits = BIT(2);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700112 break;
113 case SSPP_VIG3:
Dhaval Patel48c76022016-09-01 17:51:23 -0700114 flushbits = BIT(18);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700115 break;
116 case SSPP_RGB0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700117 flushbits = BIT(3);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700118 break;
119 case SSPP_RGB1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700120 flushbits = BIT(4);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700121 break;
122 case SSPP_RGB2:
Dhaval Patel48c76022016-09-01 17:51:23 -0700123 flushbits = BIT(5);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700124 break;
125 case SSPP_RGB3:
Dhaval Patel48c76022016-09-01 17:51:23 -0700126 flushbits = BIT(19);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700127 break;
128 case SSPP_DMA0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700129 flushbits = BIT(11);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700130 break;
131 case SSPP_DMA1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700132 flushbits = BIT(12);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700133 break;
Dhaval Patel13485f12017-01-11 12:55:22 -0800134 case SSPP_DMA2:
135 flushbits = BIT(24);
136 break;
137 case SSPP_DMA3:
138 flushbits = BIT(25);
139 break;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700140 case SSPP_CURSOR0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700141 flushbits = BIT(22);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700142 break;
143 case SSPP_CURSOR1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700144 flushbits = BIT(23);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700145 break;
146 default:
Dhaval Patel48c76022016-09-01 17:51:23 -0700147 break;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700148 }
Dhaval Patel48c76022016-09-01 17:51:23 -0700149
150 return flushbits;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700151}
152
Dhaval Patel48c76022016-09-01 17:51:23 -0700153static inline uint32_t sde_hw_ctl_get_bitmask_mixer(struct sde_hw_ctl *ctx,
154 enum sde_lm lm)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700155{
Dhaval Patel48c76022016-09-01 17:51:23 -0700156 uint32_t flushbits = 0;
157
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700158 switch (lm) {
159 case LM_0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700160 flushbits = BIT(6);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700161 break;
162 case LM_1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700163 flushbits = BIT(7);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700164 break;
165 case LM_2:
Dhaval Patel48c76022016-09-01 17:51:23 -0700166 flushbits = BIT(8);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700167 break;
168 case LM_3:
Dhaval Patel48c76022016-09-01 17:51:23 -0700169 flushbits = BIT(9);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700170 break;
171 case LM_4:
Dhaval Patel48c76022016-09-01 17:51:23 -0700172 flushbits = BIT(10);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700173 break;
174 case LM_5:
Dhaval Patel48c76022016-09-01 17:51:23 -0700175 flushbits = BIT(20);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700176 break;
177 default:
178 return -EINVAL;
179 }
Dhaval Patel48c76022016-09-01 17:51:23 -0700180
181 flushbits |= BIT(17); /* CTL */
182
183 return flushbits;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700184}
185
186static inline int sde_hw_ctl_get_bitmask_dspp(struct sde_hw_ctl *ctx,
187 u32 *flushbits, enum sde_dspp dspp)
188{
189 switch (dspp) {
190 case DSPP_0:
191 *flushbits |= BIT(13);
192 break;
193 case DSPP_1:
194 *flushbits |= BIT(14);
195 break;
196 default:
197 return -EINVAL;
198 }
199 return 0;
200}
201
202static inline int sde_hw_ctl_get_bitmask_intf(struct sde_hw_ctl *ctx,
203 u32 *flushbits, enum sde_intf intf)
204{
205 switch (intf) {
206 case INTF_0:
207 *flushbits |= BIT(31);
208 break;
209 case INTF_1:
210 *flushbits |= BIT(30);
211 break;
212 case INTF_2:
213 *flushbits |= BIT(29);
214 break;
215 case INTF_3:
216 *flushbits |= BIT(28);
217 break;
218 default:
219 return -EINVAL;
220 }
221 return 0;
222}
223
Alan Kwong3232ca52016-07-29 02:27:47 -0400224static inline int sde_hw_ctl_get_bitmask_wb(struct sde_hw_ctl *ctx,
225 u32 *flushbits, enum sde_wb wb)
226{
227 switch (wb) {
228 case WB_0:
229 case WB_1:
230 case WB_2:
231 *flushbits |= BIT(16);
232 break;
233 default:
234 return -EINVAL;
235 }
236 return 0;
237}
238
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700239static inline int sde_hw_ctl_get_bitmask_cdm(struct sde_hw_ctl *ctx,
240 u32 *flushbits, enum sde_cdm cdm)
241{
242 switch (cdm) {
243 case CDM_0:
244 *flushbits |= BIT(26);
245 break;
246 default:
247 return -EINVAL;
248 }
249 return 0;
250}
251
252static int sde_hw_ctl_reset_control(struct sde_hw_ctl *ctx)
253{
254 struct sde_hw_blk_reg_map *c = &ctx->hw;
255 int count = SDE_REG_RESET_TIMEOUT_COUNT;
256 int reset;
257
258 SDE_REG_WRITE(c, CTL_SW_RESET, 0x1);
259
260 for (; count > 0; count--) {
261 /* insert small delay to avoid spinning the cpu while waiting */
262 usleep_range(20, 50);
263 reset = SDE_REG_READ(c, CTL_SW_RESET);
264 if (reset == 0)
265 return 0;
266 }
267
268 return -EINVAL;
269}
270
Lloyd Atkinsone5ec30d2016-08-23 14:32:32 -0400271static void sde_hw_ctl_clear_all_blendstages(struct sde_hw_ctl *ctx)
272{
273 struct sde_hw_blk_reg_map *c = &ctx->hw;
274 int i;
275
276 for (i = 0; i < ctx->mixer_count; i++) {
277 SDE_REG_WRITE(c, CTL_LAYER(LM_0 + i), 0);
278 SDE_REG_WRITE(c, CTL_LAYER_EXT(LM_0 + i), 0);
279 }
280}
281
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700282static void sde_hw_ctl_setup_blendstage(struct sde_hw_ctl *ctx,
Dhaval Patel44f12472016-08-29 12:19:47 -0700283 enum sde_lm lm, struct sde_hw_stage_cfg *stage_cfg, u32 index)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700284{
285 struct sde_hw_blk_reg_map *c = &ctx->hw;
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800286 u32 mixercfg = 0, mixercfg_ext = 0, mix, ext;
287 u32 mixercfg_ext2 = 0, mixercfg_ext3 = 0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700288 int i, j;
289 u8 stages;
290 int pipes_per_stage;
291
Dhaval Patel48c76022016-09-01 17:51:23 -0700292 if (index >= CRTC_DUAL_MIXERS)
293 return;
294
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700295 stages = _mixer_stages(ctx->mixer_hw_caps, ctx->mixer_count, lm);
Clarence Ipc475b082016-06-26 09:27:23 -0400296 if (stages < 0)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700297 return;
298
299 if (test_bit(SDE_MIXER_SOURCESPLIT,
300 &ctx->mixer_hw_caps->features))
Dhaval Patel48c76022016-09-01 17:51:23 -0700301 pipes_per_stage = PIPES_PER_STAGE;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700302 else
303 pipes_per_stage = 1;
304
Dhaval Patel48c76022016-09-01 17:51:23 -0700305 mixercfg = BIT(24); /* always set BORDER_OUT */
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400306
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700307 for (i = 0; i <= stages; i++) {
Clarence Ip7b493572015-12-21 17:57:48 -0500308 /* overflow to ext register if 'i + 1 > 7' */
309 mix = (i + 1) & 0x7;
310 ext = i >= 7;
311
Dhaval Patel48c76022016-09-01 17:51:23 -0700312 for (j = 0 ; j < pipes_per_stage; j++) {
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800313 enum sde_sspp_multirect_index rect_index =
314 stage_cfg->multirect_index[index][i][j];
315
Dhaval Patel48c76022016-09-01 17:51:23 -0700316 switch (stage_cfg->stage[index][i][j]) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700317 case SSPP_VIG0:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800318 if (rect_index == SDE_SSPP_RECT_1) {
319 mixercfg_ext3 |= ((i + 1) & 0xF) << 0;
320 } else {
321 mixercfg |= mix << 0;
322 mixercfg_ext |= ext << 0;
323 }
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700324 break;
325 case SSPP_VIG1:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800326 if (rect_index == SDE_SSPP_RECT_1) {
327 mixercfg_ext3 |= ((i + 1) & 0xF) << 4;
328 } else {
329 mixercfg |= mix << 3;
330 mixercfg_ext |= ext << 2;
331 }
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700332 break;
333 case SSPP_VIG2:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800334 if (rect_index == SDE_SSPP_RECT_1) {
335 mixercfg_ext3 |= ((i + 1) & 0xF) << 8;
336 } else {
337 mixercfg |= mix << 6;
338 mixercfg_ext |= ext << 4;
339 }
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700340 break;
341 case SSPP_VIG3:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800342 if (rect_index == SDE_SSPP_RECT_1) {
343 mixercfg_ext3 |= ((i + 1) & 0xF) << 12;
344 } else {
345 mixercfg |= mix << 26;
346 mixercfg_ext |= ext << 6;
347 }
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700348 break;
349 case SSPP_RGB0:
Clarence Ip7b493572015-12-21 17:57:48 -0500350 mixercfg |= mix << 9;
351 mixercfg_ext |= ext << 8;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700352 break;
353 case SSPP_RGB1:
Clarence Ip7b493572015-12-21 17:57:48 -0500354 mixercfg |= mix << 12;
355 mixercfg_ext |= ext << 10;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700356 break;
357 case SSPP_RGB2:
Clarence Ip7b493572015-12-21 17:57:48 -0500358 mixercfg |= mix << 15;
359 mixercfg_ext |= ext << 12;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700360 break;
361 case SSPP_RGB3:
Clarence Ip7b493572015-12-21 17:57:48 -0500362 mixercfg |= mix << 29;
363 mixercfg_ext |= ext << 14;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700364 break;
365 case SSPP_DMA0:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800366 if (rect_index == SDE_SSPP_RECT_1) {
367 mixercfg_ext2 |= ((i + 1) & 0xF) << 8;
368 } else {
369 mixercfg |= mix << 18;
370 mixercfg_ext |= ext << 16;
371 }
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700372 break;
373 case SSPP_DMA1:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800374 if (rect_index == SDE_SSPP_RECT_1) {
375 mixercfg_ext2 |= ((i + 1) & 0xF) << 12;
376 } else {
377 mixercfg |= mix << 21;
378 mixercfg_ext |= ext << 18;
379 }
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700380 break;
Dhaval Patel13485f12017-01-11 12:55:22 -0800381 case SSPP_DMA2:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800382 if (rect_index == SDE_SSPP_RECT_1) {
383 mixercfg_ext2 |= ((i + 1) & 0xF) << 16;
384 } else {
385 mix |= (i + 1) & 0xF;
386 mixercfg_ext2 |= mix << 0;
387 }
Dhaval Patel13485f12017-01-11 12:55:22 -0800388 break;
389 case SSPP_DMA3:
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800390 if (rect_index == SDE_SSPP_RECT_1) {
391 mixercfg_ext2 |= ((i + 1) & 0xF) << 20;
392 } else {
393 mix |= (i + 1) & 0xF;
394 mixercfg_ext2 |= mix << 4;
395 }
Dhaval Patel13485f12017-01-11 12:55:22 -0800396 break;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700397 case SSPP_CURSOR0:
Clarence Ip7b493572015-12-21 17:57:48 -0500398 mixercfg_ext |= ((i + 1) & 0xF) << 20;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700399 break;
400 case SSPP_CURSOR1:
Clarence Ip7b493572015-12-21 17:57:48 -0500401 mixercfg_ext |= ((i + 1) & 0xF) << 26;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700402 break;
403 default:
404 break;
405 }
406 }
407 }
408
409 SDE_REG_WRITE(c, CTL_LAYER(lm), mixercfg);
410 SDE_REG_WRITE(c, CTL_LAYER_EXT(lm), mixercfg_ext);
Dhaval Patel13485f12017-01-11 12:55:22 -0800411 SDE_REG_WRITE(c, CTL_LAYER_EXT2(lm), mixercfg_ext2);
Jeykumar Sankaran2e655032017-02-04 14:05:45 -0800412 SDE_REG_WRITE(c, CTL_LAYER_EXT3(lm), mixercfg_ext3);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700413}
414
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400415static void sde_hw_ctl_intf_cfg(struct sde_hw_ctl *ctx,
416 struct sde_hw_intf_cfg *cfg)
417{
418 struct sde_hw_blk_reg_map *c = &ctx->hw;
419 u32 intf_cfg = 0;
420
421 intf_cfg |= (cfg->intf & 0xF) << 4;
422
423 if (cfg->wb)
424 intf_cfg |= (cfg->wb & 0x3) + 2;
425
426 if (cfg->mode_3d) {
427 intf_cfg |= BIT(19);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400428 intf_cfg |= (cfg->mode_3d - 0x1) << 20;
429 }
430
431 switch (cfg->intf_mode_sel) {
432 case SDE_CTL_MODE_SEL_VID:
433 intf_cfg &= ~BIT(17);
434 intf_cfg &= ~(0x3 << 15);
435 break;
436 case SDE_CTL_MODE_SEL_CMD:
437 intf_cfg |= BIT(17);
438 intf_cfg |= ((cfg->stream_sel & 0x3) << 15);
439 break;
440 default:
441 pr_err("unknown interface type %d\n", cfg->intf_mode_sel);
442 return;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400443 }
444
445 SDE_REG_WRITE(c, CTL_TOP, intf_cfg);
446}
447
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700448static void _setup_ctl_ops(struct sde_hw_ctl_ops *ops,
449 unsigned long cap)
450{
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400451 ops->clear_pending_flush = sde_hw_ctl_clear_pending_flush;
452 ops->update_pending_flush = sde_hw_ctl_update_pending_flush;
Clarence Ip110d15c2016-08-16 14:44:41 -0400453 ops->get_pending_flush = sde_hw_ctl_get_pending_flush;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400454 ops->trigger_flush = sde_hw_ctl_trigger_flush;
455 ops->trigger_start = sde_hw_ctl_trigger_start;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400456 ops->setup_intf_cfg = sde_hw_ctl_intf_cfg;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700457 ops->reset = sde_hw_ctl_reset_control;
Lloyd Atkinsone5ec30d2016-08-23 14:32:32 -0400458 ops->clear_all_blendstages = sde_hw_ctl_clear_all_blendstages;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400459 ops->setup_blendstage = sde_hw_ctl_setup_blendstage;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700460 ops->get_bitmask_sspp = sde_hw_ctl_get_bitmask_sspp;
461 ops->get_bitmask_mixer = sde_hw_ctl_get_bitmask_mixer;
462 ops->get_bitmask_dspp = sde_hw_ctl_get_bitmask_dspp;
463 ops->get_bitmask_intf = sde_hw_ctl_get_bitmask_intf;
464 ops->get_bitmask_cdm = sde_hw_ctl_get_bitmask_cdm;
Alan Kwong3232ca52016-07-29 02:27:47 -0400465 ops->get_bitmask_wb = sde_hw_ctl_get_bitmask_wb;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700466};
467
468struct sde_hw_ctl *sde_hw_ctl_init(enum sde_ctl idx,
469 void __iomem *addr,
470 struct sde_mdss_cfg *m)
471{
472 struct sde_hw_ctl *c;
473 struct sde_ctl_cfg *cfg;
474
475 c = kzalloc(sizeof(*c), GFP_KERNEL);
476 if (!c)
477 return ERR_PTR(-ENOMEM);
478
479 cfg = _ctl_offset(idx, m, addr, &c->hw);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400480 if (IS_ERR_OR_NULL(cfg)) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700481 kfree(c);
Lloyd Atkinson23491262016-05-19 09:37:02 -0400482 pr_err("failed to create sde_hw_ctl %d\n", idx);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700483 return ERR_PTR(-EINVAL);
484 }
485
486 c->caps = cfg;
487 _setup_ctl_ops(&c->ops, c->caps->features);
488 c->idx = idx;
489 c->mixer_count = m->mixer_count;
490 c->mixer_hw_caps = m->mixer;
491
492 return c;
493}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400494
495void sde_hw_ctl_destroy(struct sde_hw_ctl *ctx)
496{
497 kfree(ctx);
498}