blob: 2f1bac78b14ea1fd34fc43d80af421aef1f3963b [file] [log] [blame]
Narendra Muppalla1b0b3352015-09-29 10:16:51 -07001/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
2 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 */
12
13#include <linux/delay.h>
14#include "sde_hwio.h"
Clarence Ipc475b082016-06-26 09:27:23 -040015#include "sde_hw_ctl.h"
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070016
17#define CTL_LAYER(lm) \
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040018 (((lm) == LM_5) ? (0x024) : (((lm) - LM_0) * 0x004))
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070019#define CTL_LAYER_EXT(lm) \
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040020 (0x40 + (((lm) - LM_0) * 0x004))
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070021#define CTL_TOP 0x014
22#define CTL_FLUSH 0x018
23#define CTL_START 0x01C
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070024#define CTL_SW_RESET 0x030
25#define CTL_LAYER_EXTN_OFFSET 0x40
26
27#define SDE_REG_RESET_TIMEOUT_COUNT 20
28
29static struct sde_ctl_cfg *_ctl_offset(enum sde_ctl ctl,
30 struct sde_mdss_cfg *m,
31 void __iomem *addr,
32 struct sde_hw_blk_reg_map *b)
33{
34 int i;
35
36 for (i = 0; i < m->ctl_count; i++) {
37 if (ctl == m->ctl[i].id) {
38 b->base_off = addr;
39 b->blk_off = m->ctl[i].base;
40 b->hwversion = m->hwversion;
Clarence Ip4ce59322016-06-26 22:27:51 -040041 b->log_mask = SDE_DBG_MASK_CTL;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070042 return &m->ctl[i];
43 }
44 }
45 return ERR_PTR(-ENOMEM);
46}
47
48static int _mixer_stages(const struct sde_lm_cfg *mixer, int count,
49 enum sde_lm lm)
50{
51 int i;
52 int stages = -EINVAL;
53
54 for (i = 0; i < count; i++) {
55 if (lm == mixer[i].id) {
56 stages = mixer[i].sblk->maxblendstages;
57 break;
58 }
59 }
60
61 return stages;
62}
63
Lloyd Atkinson5d722782016-05-30 14:09:41 -040064static inline void sde_hw_ctl_trigger_start(struct sde_hw_ctl *ctx)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070065{
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040066 SDE_REG_WRITE(&ctx->hw, CTL_START, 0x1);
67}
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070068
Lloyd Atkinson5d722782016-05-30 14:09:41 -040069static inline void sde_hw_ctl_clear_pending_flush(struct sde_hw_ctl *ctx)
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -040070{
Lloyd Atkinson5d722782016-05-30 14:09:41 -040071 ctx->pending_flush_mask = 0x0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070072}
73
Lloyd Atkinson5d722782016-05-30 14:09:41 -040074static inline void sde_hw_ctl_update_pending_flush(struct sde_hw_ctl *ctx,
75 u32 flushbits)
76{
77 ctx->pending_flush_mask |= flushbits;
78}
79
Clarence Ip110d15c2016-08-16 14:44:41 -040080static u32 sde_hw_ctl_get_pending_flush(struct sde_hw_ctl *ctx)
81{
82 if (!ctx)
83 return 0x0;
84
85 return ctx->pending_flush_mask;
86}
87
Lloyd Atkinson5d722782016-05-30 14:09:41 -040088static inline void sde_hw_ctl_trigger_flush(struct sde_hw_ctl *ctx)
89{
90 SDE_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
91}
92
93
Dhaval Patel48c76022016-09-01 17:51:23 -070094static inline uint32_t sde_hw_ctl_get_bitmask_sspp(struct sde_hw_ctl *ctx,
95 enum sde_sspp sspp)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070096{
Dhaval Patel48c76022016-09-01 17:51:23 -070097 uint32_t flushbits = 0;
98
Narendra Muppalla1b0b3352015-09-29 10:16:51 -070099 switch (sspp) {
100 case SSPP_VIG0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700101 flushbits = BIT(0);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700102 break;
103 case SSPP_VIG1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700104 flushbits = BIT(1);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700105 break;
106 case SSPP_VIG2:
Dhaval Patel48c76022016-09-01 17:51:23 -0700107 flushbits = BIT(2);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700108 break;
109 case SSPP_VIG3:
Dhaval Patel48c76022016-09-01 17:51:23 -0700110 flushbits = BIT(18);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700111 break;
112 case SSPP_RGB0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700113 flushbits = BIT(3);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700114 break;
115 case SSPP_RGB1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700116 flushbits = BIT(4);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700117 break;
118 case SSPP_RGB2:
Dhaval Patel48c76022016-09-01 17:51:23 -0700119 flushbits = BIT(5);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700120 break;
121 case SSPP_RGB3:
Dhaval Patel48c76022016-09-01 17:51:23 -0700122 flushbits = BIT(19);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700123 break;
124 case SSPP_DMA0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700125 flushbits = BIT(11);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700126 break;
127 case SSPP_DMA1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700128 flushbits = BIT(12);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700129 break;
130 case SSPP_CURSOR0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700131 flushbits = BIT(22);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700132 break;
133 case SSPP_CURSOR1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700134 flushbits = BIT(23);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700135 break;
136 default:
Dhaval Patel48c76022016-09-01 17:51:23 -0700137 break;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700138 }
Dhaval Patel48c76022016-09-01 17:51:23 -0700139
140 return flushbits;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700141}
142
Dhaval Patel48c76022016-09-01 17:51:23 -0700143static inline uint32_t sde_hw_ctl_get_bitmask_mixer(struct sde_hw_ctl *ctx,
144 enum sde_lm lm)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700145{
Dhaval Patel48c76022016-09-01 17:51:23 -0700146 uint32_t flushbits = 0;
147
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700148 switch (lm) {
149 case LM_0:
Dhaval Patel48c76022016-09-01 17:51:23 -0700150 flushbits = BIT(6);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700151 break;
152 case LM_1:
Dhaval Patel48c76022016-09-01 17:51:23 -0700153 flushbits = BIT(7);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700154 break;
155 case LM_2:
Dhaval Patel48c76022016-09-01 17:51:23 -0700156 flushbits = BIT(8);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700157 break;
158 case LM_3:
Dhaval Patel48c76022016-09-01 17:51:23 -0700159 flushbits = BIT(9);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700160 break;
161 case LM_4:
Dhaval Patel48c76022016-09-01 17:51:23 -0700162 flushbits = BIT(10);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700163 break;
164 case LM_5:
Dhaval Patel48c76022016-09-01 17:51:23 -0700165 flushbits = BIT(20);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700166 break;
167 default:
168 return -EINVAL;
169 }
Dhaval Patel48c76022016-09-01 17:51:23 -0700170
171 flushbits |= BIT(17); /* CTL */
172
173 return flushbits;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700174}
175
176static inline int sde_hw_ctl_get_bitmask_dspp(struct sde_hw_ctl *ctx,
177 u32 *flushbits, enum sde_dspp dspp)
178{
179 switch (dspp) {
180 case DSPP_0:
181 *flushbits |= BIT(13);
182 break;
183 case DSPP_1:
184 *flushbits |= BIT(14);
185 break;
186 default:
187 return -EINVAL;
188 }
189 return 0;
190}
191
192static inline int sde_hw_ctl_get_bitmask_intf(struct sde_hw_ctl *ctx,
193 u32 *flushbits, enum sde_intf intf)
194{
195 switch (intf) {
196 case INTF_0:
197 *flushbits |= BIT(31);
198 break;
199 case INTF_1:
200 *flushbits |= BIT(30);
201 break;
202 case INTF_2:
203 *flushbits |= BIT(29);
204 break;
205 case INTF_3:
206 *flushbits |= BIT(28);
207 break;
208 default:
209 return -EINVAL;
210 }
211 return 0;
212}
213
Alan Kwong3232ca52016-07-29 02:27:47 -0400214static inline int sde_hw_ctl_get_bitmask_wb(struct sde_hw_ctl *ctx,
215 u32 *flushbits, enum sde_wb wb)
216{
217 switch (wb) {
218 case WB_0:
219 case WB_1:
220 case WB_2:
221 *flushbits |= BIT(16);
222 break;
223 default:
224 return -EINVAL;
225 }
226 return 0;
227}
228
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700229static inline int sde_hw_ctl_get_bitmask_cdm(struct sde_hw_ctl *ctx,
230 u32 *flushbits, enum sde_cdm cdm)
231{
232 switch (cdm) {
233 case CDM_0:
234 *flushbits |= BIT(26);
235 break;
236 default:
237 return -EINVAL;
238 }
239 return 0;
240}
241
242static int sde_hw_ctl_reset_control(struct sde_hw_ctl *ctx)
243{
244 struct sde_hw_blk_reg_map *c = &ctx->hw;
245 int count = SDE_REG_RESET_TIMEOUT_COUNT;
246 int reset;
247
248 SDE_REG_WRITE(c, CTL_SW_RESET, 0x1);
249
250 for (; count > 0; count--) {
251 /* insert small delay to avoid spinning the cpu while waiting */
252 usleep_range(20, 50);
253 reset = SDE_REG_READ(c, CTL_SW_RESET);
254 if (reset == 0)
255 return 0;
256 }
257
258 return -EINVAL;
259}
260
261static void sde_hw_ctl_setup_blendstage(struct sde_hw_ctl *ctx,
Dhaval Patel44f12472016-08-29 12:19:47 -0700262 enum sde_lm lm, struct sde_hw_stage_cfg *stage_cfg, u32 index)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700263{
264 struct sde_hw_blk_reg_map *c = &ctx->hw;
Clarence Ip7b493572015-12-21 17:57:48 -0500265 u32 mixercfg, mixercfg_ext, mix, ext;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700266 int i, j;
267 u8 stages;
268 int pipes_per_stage;
269
Dhaval Patel48c76022016-09-01 17:51:23 -0700270 if (index >= CRTC_DUAL_MIXERS)
271 return;
272
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700273 stages = _mixer_stages(ctx->mixer_hw_caps, ctx->mixer_count, lm);
Clarence Ipc475b082016-06-26 09:27:23 -0400274 if (stages < 0)
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700275 return;
276
277 if (test_bit(SDE_MIXER_SOURCESPLIT,
278 &ctx->mixer_hw_caps->features))
Dhaval Patel48c76022016-09-01 17:51:23 -0700279 pipes_per_stage = PIPES_PER_STAGE;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700280 else
281 pipes_per_stage = 1;
282
Dhaval Patel48c76022016-09-01 17:51:23 -0700283 mixercfg = BIT(24); /* always set BORDER_OUT */
Clarence Ip7b493572015-12-21 17:57:48 -0500284 mixercfg_ext = 0;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400285
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700286 for (i = 0; i <= stages; i++) {
Clarence Ip7b493572015-12-21 17:57:48 -0500287 /* overflow to ext register if 'i + 1 > 7' */
288 mix = (i + 1) & 0x7;
289 ext = i >= 7;
290
Dhaval Patel48c76022016-09-01 17:51:23 -0700291 for (j = 0 ; j < pipes_per_stage; j++) {
292 switch (stage_cfg->stage[index][i][j]) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700293 case SSPP_VIG0:
Clarence Ip7b493572015-12-21 17:57:48 -0500294 mixercfg |= mix << 0;
295 mixercfg_ext |= ext << 0;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700296 break;
297 case SSPP_VIG1:
Clarence Ip7b493572015-12-21 17:57:48 -0500298 mixercfg |= mix << 3;
299 mixercfg_ext |= ext << 2;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700300 break;
301 case SSPP_VIG2:
Clarence Ip7b493572015-12-21 17:57:48 -0500302 mixercfg |= mix << 6;
303 mixercfg_ext |= ext << 4;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700304 break;
305 case SSPP_VIG3:
Clarence Ip7b493572015-12-21 17:57:48 -0500306 mixercfg |= mix << 26;
307 mixercfg_ext |= ext << 6;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700308 break;
309 case SSPP_RGB0:
Clarence Ip7b493572015-12-21 17:57:48 -0500310 mixercfg |= mix << 9;
311 mixercfg_ext |= ext << 8;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700312 break;
313 case SSPP_RGB1:
Clarence Ip7b493572015-12-21 17:57:48 -0500314 mixercfg |= mix << 12;
315 mixercfg_ext |= ext << 10;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700316 break;
317 case SSPP_RGB2:
Clarence Ip7b493572015-12-21 17:57:48 -0500318 mixercfg |= mix << 15;
319 mixercfg_ext |= ext << 12;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700320 break;
321 case SSPP_RGB3:
Clarence Ip7b493572015-12-21 17:57:48 -0500322 mixercfg |= mix << 29;
323 mixercfg_ext |= ext << 14;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700324 break;
325 case SSPP_DMA0:
Clarence Ip7b493572015-12-21 17:57:48 -0500326 mixercfg |= mix << 18;
327 mixercfg_ext |= ext << 16;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700328 break;
329 case SSPP_DMA1:
Clarence Ip7b493572015-12-21 17:57:48 -0500330 mixercfg |= mix << 21;
331 mixercfg_ext |= ext << 18;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700332 break;
333 case SSPP_CURSOR0:
Clarence Ip7b493572015-12-21 17:57:48 -0500334 mixercfg_ext |= ((i + 1) & 0xF) << 20;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700335 break;
336 case SSPP_CURSOR1:
Clarence Ip7b493572015-12-21 17:57:48 -0500337 mixercfg_ext |= ((i + 1) & 0xF) << 26;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700338 break;
339 default:
340 break;
341 }
342 }
343 }
344
345 SDE_REG_WRITE(c, CTL_LAYER(lm), mixercfg);
346 SDE_REG_WRITE(c, CTL_LAYER_EXT(lm), mixercfg_ext);
347}
348
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400349static void sde_hw_ctl_intf_cfg(struct sde_hw_ctl *ctx,
350 struct sde_hw_intf_cfg *cfg)
351{
352 struct sde_hw_blk_reg_map *c = &ctx->hw;
353 u32 intf_cfg = 0;
354
355 intf_cfg |= (cfg->intf & 0xF) << 4;
356
357 if (cfg->wb)
358 intf_cfg |= (cfg->wb & 0x3) + 2;
359
360 if (cfg->mode_3d) {
361 intf_cfg |= BIT(19);
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400362 intf_cfg |= (cfg->mode_3d - 0x1) << 20;
363 }
364
365 switch (cfg->intf_mode_sel) {
366 case SDE_CTL_MODE_SEL_VID:
367 intf_cfg &= ~BIT(17);
368 intf_cfg &= ~(0x3 << 15);
369 break;
370 case SDE_CTL_MODE_SEL_CMD:
371 intf_cfg |= BIT(17);
372 intf_cfg |= ((cfg->stream_sel & 0x3) << 15);
373 break;
374 default:
375 pr_err("unknown interface type %d\n", cfg->intf_mode_sel);
376 return;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400377 }
378
379 SDE_REG_WRITE(c, CTL_TOP, intf_cfg);
380}
381
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700382static void _setup_ctl_ops(struct sde_hw_ctl_ops *ops,
383 unsigned long cap)
384{
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400385 ops->clear_pending_flush = sde_hw_ctl_clear_pending_flush;
386 ops->update_pending_flush = sde_hw_ctl_update_pending_flush;
Clarence Ip110d15c2016-08-16 14:44:41 -0400387 ops->get_pending_flush = sde_hw_ctl_get_pending_flush;
Lloyd Atkinson5d722782016-05-30 14:09:41 -0400388 ops->trigger_flush = sde_hw_ctl_trigger_flush;
389 ops->trigger_start = sde_hw_ctl_trigger_start;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400390 ops->setup_intf_cfg = sde_hw_ctl_intf_cfg;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700391 ops->reset = sde_hw_ctl_reset_control;
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400392 ops->setup_blendstage = sde_hw_ctl_setup_blendstage;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700393 ops->get_bitmask_sspp = sde_hw_ctl_get_bitmask_sspp;
394 ops->get_bitmask_mixer = sde_hw_ctl_get_bitmask_mixer;
395 ops->get_bitmask_dspp = sde_hw_ctl_get_bitmask_dspp;
396 ops->get_bitmask_intf = sde_hw_ctl_get_bitmask_intf;
397 ops->get_bitmask_cdm = sde_hw_ctl_get_bitmask_cdm;
Alan Kwong3232ca52016-07-29 02:27:47 -0400398 ops->get_bitmask_wb = sde_hw_ctl_get_bitmask_wb;
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700399};
400
401struct sde_hw_ctl *sde_hw_ctl_init(enum sde_ctl idx,
402 void __iomem *addr,
403 struct sde_mdss_cfg *m)
404{
405 struct sde_hw_ctl *c;
406 struct sde_ctl_cfg *cfg;
407
408 c = kzalloc(sizeof(*c), GFP_KERNEL);
409 if (!c)
410 return ERR_PTR(-ENOMEM);
411
412 cfg = _ctl_offset(idx, m, addr, &c->hw);
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400413 if (IS_ERR_OR_NULL(cfg)) {
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700414 kfree(c);
Lloyd Atkinson23491262016-05-19 09:37:02 -0400415 pr_err("failed to create sde_hw_ctl %d\n", idx);
Narendra Muppalla1b0b3352015-09-29 10:16:51 -0700416 return ERR_PTR(-EINVAL);
417 }
418
419 c->caps = cfg;
420 _setup_ctl_ops(&c->ops, c->caps->features);
421 c->idx = idx;
422 c->mixer_count = m->mixer_count;
423 c->mixer_hw_caps = m->mixer;
424
425 return c;
426}
Abhijit Kulkarni3e3e0d22016-06-24 17:56:13 -0400427
428void sde_hw_ctl_destroy(struct sde_hw_ctl *ctx)
429{
430 kfree(ctx);
431}