blob: baa05af6ca88813d144115c80da7fe36e8494d70 [file] [log] [blame]
Kamil Debskiaf935742011-06-21 10:51:26 -03001/*
Mauro Carvalho Chehab2c3fb082012-08-14 17:31:16 -03002 * drivers/media/platform/samsung/mfc5/s5p_mfc_opr.c
Kamil Debskiaf935742011-06-21 10:51:26 -03003 *
4 * Samsung MFC (Multi Function Codec - FIMV) driver
5 * This file contains hw related functions.
6 *
7 * Kamil Debski, Copyright (c) 2011 Samsung Electronics
8 * http://www.samsung.com/
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License version 2 as
12 * published by the Free Software Foundation.
13 */
14
15#include "regs-mfc.h"
Arun Kumar K77a788f2012-10-03 22:19:07 -030016#include "s5p_mfc_cmd_v5.h"
Kamil Debskiaf935742011-06-21 10:51:26 -030017#include "s5p_mfc_common.h"
18#include "s5p_mfc_ctrl.h"
19#include "s5p_mfc_debug.h"
20#include "s5p_mfc_intr.h"
Arun Kumar K77a788f2012-10-03 22:19:07 -030021#include "s5p_mfc_opr_v5.h"
Kamil Debskiaf935742011-06-21 10:51:26 -030022#include "s5p_mfc_pm.h"
Kamil Debskiaf935742011-06-21 10:51:26 -030023#include <asm/cacheflush.h>
24#include <linux/delay.h>
25#include <linux/dma-mapping.h>
26#include <linux/err.h>
27#include <linux/firmware.h>
28#include <linux/io.h>
29#include <linux/jiffies.h>
30#include <linux/mm.h>
31#include <linux/sched.h>
32
33#define OFFSETA(x) (((x) - dev->bank1) >> MFC_OFFSET_SHIFT)
34#define OFFSETB(x) (((x) - dev->bank2) >> MFC_OFFSET_SHIFT)
35
36/* Allocate temporary buffers for decoding */
37int s5p_mfc_alloc_dec_temp_buffers(struct s5p_mfc_ctx *ctx)
38{
39 void *desc_virt;
40 struct s5p_mfc_dev *dev = ctx->dev;
41
42 ctx->desc_buf = vb2_dma_contig_memops.alloc(
43 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], DESC_BUF_SIZE);
44 if (IS_ERR_VALUE((int)ctx->desc_buf)) {
Sachin Kamat12597622012-05-10 03:32:00 -030045 ctx->desc_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -030046 mfc_err("Allocating DESC buffer failed\n");
47 return -ENOMEM;
48 }
49 ctx->desc_phys = s5p_mfc_mem_cookie(
50 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->desc_buf);
51 BUG_ON(ctx->desc_phys & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
52 desc_virt = vb2_dma_contig_memops.vaddr(ctx->desc_buf);
53 if (desc_virt == NULL) {
54 vb2_dma_contig_memops.put(ctx->desc_buf);
55 ctx->desc_phys = 0;
Sachin Kamat12597622012-05-10 03:32:00 -030056 ctx->desc_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -030057 mfc_err("Remapping DESC buffer failed\n");
58 return -ENOMEM;
59 }
60 memset(desc_virt, 0, DESC_BUF_SIZE);
61 wmb();
62 return 0;
63}
64
65/* Release temporary buffers for decoding */
66void s5p_mfc_release_dec_desc_buffer(struct s5p_mfc_ctx *ctx)
67{
68 if (ctx->desc_phys) {
69 vb2_dma_contig_memops.put(ctx->desc_buf);
70 ctx->desc_phys = 0;
Sachin Kamat12597622012-05-10 03:32:00 -030071 ctx->desc_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -030072 }
73}
74
75/* Allocate codec buffers */
76int s5p_mfc_alloc_codec_buffers(struct s5p_mfc_ctx *ctx)
77{
78 struct s5p_mfc_dev *dev = ctx->dev;
79 unsigned int enc_ref_y_size = 0;
80 unsigned int enc_ref_c_size = 0;
81 unsigned int guard_width, guard_height;
82
83 if (ctx->type == MFCINST_DECODER) {
84 mfc_debug(2, "Luma size:%d Chroma size:%d MV size:%d\n",
85 ctx->luma_size, ctx->chroma_size, ctx->mv_size);
86 mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count);
87 } else if (ctx->type == MFCINST_ENCODER) {
88 enc_ref_y_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
89 * ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
90 enc_ref_y_size = ALIGN(enc_ref_y_size, S5P_FIMV_NV12MT_SALIGN);
91
92 if (ctx->codec_mode == S5P_FIMV_CODEC_H264_ENC) {
93 enc_ref_c_size = ALIGN(ctx->img_width,
94 S5P_FIMV_NV12MT_HALIGN)
95 * ALIGN(ctx->img_height >> 1,
96 S5P_FIMV_NV12MT_VALIGN);
97 enc_ref_c_size = ALIGN(enc_ref_c_size,
98 S5P_FIMV_NV12MT_SALIGN);
99 } else {
100 guard_width = ALIGN(ctx->img_width + 16,
101 S5P_FIMV_NV12MT_HALIGN);
102 guard_height = ALIGN((ctx->img_height >> 1) + 4,
103 S5P_FIMV_NV12MT_VALIGN);
104 enc_ref_c_size = ALIGN(guard_width * guard_height,
105 S5P_FIMV_NV12MT_SALIGN);
106 }
107 mfc_debug(2, "recon luma size: %d chroma size: %d\n",
108 enc_ref_y_size, enc_ref_c_size);
109 } else {
110 return -EINVAL;
111 }
112 /* Codecs have different memory requirements */
113 switch (ctx->codec_mode) {
114 case S5P_FIMV_CODEC_H264_DEC:
115 ctx->bank1_size =
116 ALIGN(S5P_FIMV_DEC_NB_IP_SIZE +
117 S5P_FIMV_DEC_VERT_NB_MV_SIZE,
118 S5P_FIMV_DEC_BUF_ALIGN);
119 ctx->bank2_size = ctx->total_dpb_count * ctx->mv_size;
120 break;
121 case S5P_FIMV_CODEC_MPEG4_DEC:
122 ctx->bank1_size =
123 ALIGN(S5P_FIMV_DEC_NB_DCAC_SIZE +
124 S5P_FIMV_DEC_UPNB_MV_SIZE +
125 S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE +
126 S5P_FIMV_DEC_STX_PARSER_SIZE +
127 S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE,
128 S5P_FIMV_DEC_BUF_ALIGN);
129 ctx->bank2_size = 0;
130 break;
131 case S5P_FIMV_CODEC_VC1RCV_DEC:
132 case S5P_FIMV_CODEC_VC1_DEC:
133 ctx->bank1_size =
134 ALIGN(S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE +
135 S5P_FIMV_DEC_UPNB_MV_SIZE +
136 S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE +
137 S5P_FIMV_DEC_NB_DCAC_SIZE +
138 3 * S5P_FIMV_DEC_VC1_BITPLANE_SIZE,
139 S5P_FIMV_DEC_BUF_ALIGN);
140 ctx->bank2_size = 0;
141 break;
142 case S5P_FIMV_CODEC_MPEG2_DEC:
143 ctx->bank1_size = 0;
144 ctx->bank2_size = 0;
145 break;
146 case S5P_FIMV_CODEC_H263_DEC:
147 ctx->bank1_size =
148 ALIGN(S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE +
149 S5P_FIMV_DEC_UPNB_MV_SIZE +
150 S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE +
151 S5P_FIMV_DEC_NB_DCAC_SIZE,
152 S5P_FIMV_DEC_BUF_ALIGN);
153 ctx->bank2_size = 0;
154 break;
155 case S5P_FIMV_CODEC_H264_ENC:
156 ctx->bank1_size = (enc_ref_y_size * 2) +
157 S5P_FIMV_ENC_UPMV_SIZE +
158 S5P_FIMV_ENC_COLFLG_SIZE +
159 S5P_FIMV_ENC_INTRAMD_SIZE +
160 S5P_FIMV_ENC_NBORINFO_SIZE;
161 ctx->bank2_size = (enc_ref_y_size * 2) +
162 (enc_ref_c_size * 4) +
163 S5P_FIMV_ENC_INTRAPRED_SIZE;
164 break;
165 case S5P_FIMV_CODEC_MPEG4_ENC:
166 ctx->bank1_size = (enc_ref_y_size * 2) +
167 S5P_FIMV_ENC_UPMV_SIZE +
168 S5P_FIMV_ENC_COLFLG_SIZE +
169 S5P_FIMV_ENC_ACDCCOEF_SIZE;
170 ctx->bank2_size = (enc_ref_y_size * 2) +
171 (enc_ref_c_size * 4);
172 break;
173 case S5P_FIMV_CODEC_H263_ENC:
174 ctx->bank1_size = (enc_ref_y_size * 2) +
175 S5P_FIMV_ENC_UPMV_SIZE +
176 S5P_FIMV_ENC_ACDCCOEF_SIZE;
177 ctx->bank2_size = (enc_ref_y_size * 2) +
178 (enc_ref_c_size * 4);
179 break;
180 default:
181 break;
182 }
183 /* Allocate only if memory from bank 1 is necessary */
184 if (ctx->bank1_size > 0) {
185 ctx->bank1_buf = vb2_dma_contig_memops.alloc(
186 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->bank1_size);
187 if (IS_ERR(ctx->bank1_buf)) {
Sachin Kamat12597622012-05-10 03:32:00 -0300188 ctx->bank1_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300189 printk(KERN_ERR
190 "Buf alloc for decoding failed (port A)\n");
191 return -ENOMEM;
192 }
193 ctx->bank1_phys = s5p_mfc_mem_cookie(
194 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->bank1_buf);
195 BUG_ON(ctx->bank1_phys & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
196 }
197 /* Allocate only if memory from bank 2 is necessary */
198 if (ctx->bank2_size > 0) {
199 ctx->bank2_buf = vb2_dma_contig_memops.alloc(
200 dev->alloc_ctx[MFC_BANK2_ALLOC_CTX], ctx->bank2_size);
201 if (IS_ERR(ctx->bank2_buf)) {
Sachin Kamat12597622012-05-10 03:32:00 -0300202 ctx->bank2_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300203 mfc_err("Buf alloc for decoding failed (port B)\n");
204 return -ENOMEM;
205 }
206 ctx->bank2_phys = s5p_mfc_mem_cookie(
207 dev->alloc_ctx[MFC_BANK2_ALLOC_CTX], ctx->bank2_buf);
208 BUG_ON(ctx->bank2_phys & ((1 << MFC_BANK2_ALIGN_ORDER) - 1));
209 }
210 return 0;
211}
212
213/* Release buffers allocated for codec */
214void s5p_mfc_release_codec_buffers(struct s5p_mfc_ctx *ctx)
215{
216 if (ctx->bank1_buf) {
217 vb2_dma_contig_memops.put(ctx->bank1_buf);
Sachin Kamat12597622012-05-10 03:32:00 -0300218 ctx->bank1_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300219 ctx->bank1_phys = 0;
220 ctx->bank1_size = 0;
221 }
222 if (ctx->bank2_buf) {
223 vb2_dma_contig_memops.put(ctx->bank2_buf);
Sachin Kamat12597622012-05-10 03:32:00 -0300224 ctx->bank2_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300225 ctx->bank2_phys = 0;
226 ctx->bank2_size = 0;
227 }
228}
229
230/* Allocate memory for instance data buffer */
231int s5p_mfc_alloc_instance_buffer(struct s5p_mfc_ctx *ctx)
232{
233 void *context_virt;
234 struct s5p_mfc_dev *dev = ctx->dev;
235
236 if (ctx->codec_mode == S5P_FIMV_CODEC_H264_DEC ||
237 ctx->codec_mode == S5P_FIMV_CODEC_H264_ENC)
238 ctx->ctx_size = MFC_H264_CTX_BUF_SIZE;
239 else
240 ctx->ctx_size = MFC_CTX_BUF_SIZE;
241 ctx->ctx_buf = vb2_dma_contig_memops.alloc(
242 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->ctx_size);
243 if (IS_ERR(ctx->ctx_buf)) {
244 mfc_err("Allocating context buffer failed\n");
245 ctx->ctx_phys = 0;
Sachin Kamat12597622012-05-10 03:32:00 -0300246 ctx->ctx_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300247 return -ENOMEM;
248 }
249 ctx->ctx_phys = s5p_mfc_mem_cookie(
250 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->ctx_buf);
251 BUG_ON(ctx->ctx_phys & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
252 ctx->ctx_ofs = OFFSETA(ctx->ctx_phys);
253 context_virt = vb2_dma_contig_memops.vaddr(ctx->ctx_buf);
254 if (context_virt == NULL) {
255 mfc_err("Remapping instance buffer failed\n");
256 vb2_dma_contig_memops.put(ctx->ctx_buf);
257 ctx->ctx_phys = 0;
Sachin Kamat12597622012-05-10 03:32:00 -0300258 ctx->ctx_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300259 return -ENOMEM;
260 }
261 /* Zero content of the allocated memory */
262 memset(context_virt, 0, ctx->ctx_size);
263 wmb();
Arun Kumar K77a788f2012-10-03 22:19:07 -0300264
265 /* Initialize shared memory */
266 ctx->shm_alloc = vb2_dma_contig_memops.alloc(
267 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], SHARED_BUF_SIZE);
268 if (IS_ERR(ctx->shm_alloc)) {
269 mfc_err("failed to allocate shared memory\n");
270 return PTR_ERR(ctx->shm_alloc);
271 }
272 /* shared memory offset only keeps the offset from base (port a) */
273 ctx->shm_ofs = s5p_mfc_mem_cookie(
274 dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->shm_alloc)
275 - dev->bank1;
276 BUG_ON(ctx->shm_ofs & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
277
278 ctx->shm = vb2_dma_contig_memops.vaddr(ctx->shm_alloc);
279 if (!ctx->shm) {
280 vb2_dma_contig_memops.put(ctx->shm_alloc);
281 ctx->shm_ofs = 0;
282 ctx->shm_alloc = NULL;
283 mfc_err("failed to virt addr of shared memory\n");
Kamil Debskiaf935742011-06-21 10:51:26 -0300284 return -ENOMEM;
285 }
Arun Kumar K77a788f2012-10-03 22:19:07 -0300286 memset((void *)ctx->shm, 0, SHARED_BUF_SIZE);
287 wmb();
Kamil Debskiaf935742011-06-21 10:51:26 -0300288 return 0;
289}
290
291/* Release instance buffer */
292void s5p_mfc_release_instance_buffer(struct s5p_mfc_ctx *ctx)
293{
294 if (ctx->ctx_buf) {
295 vb2_dma_contig_memops.put(ctx->ctx_buf);
296 ctx->ctx_phys = 0;
Sachin Kamat12597622012-05-10 03:32:00 -0300297 ctx->ctx_buf = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300298 }
299 if (ctx->shm_alloc) {
300 vb2_dma_contig_memops.put(ctx->shm_alloc);
Sachin Kamat12597622012-05-10 03:32:00 -0300301 ctx->shm_alloc = NULL;
302 ctx->shm = NULL;
Kamil Debskiaf935742011-06-21 10:51:26 -0300303 }
304}
305
Arun Kumar K77a788f2012-10-03 22:19:07 -0300306void s5p_mfc_write_info_v5(struct s5p_mfc_ctx *ctx, unsigned int data,
307 unsigned int ofs)
308{
309 writel(data, (ctx->shm + ofs));
310 wmb();
311}
312
313unsigned int s5p_mfc_read_info_v5(struct s5p_mfc_ctx *ctx,
314 unsigned int ofs)
315{
316 rmb();
317 return readl(ctx->shm + ofs);
318}
319
Kamil Debskiaf935742011-06-21 10:51:26 -0300320/* Set registers for decoding temporary buffers */
321void s5p_mfc_set_dec_desc_buffer(struct s5p_mfc_ctx *ctx)
322{
323 struct s5p_mfc_dev *dev = ctx->dev;
324
325 mfc_write(dev, OFFSETA(ctx->desc_phys), S5P_FIMV_SI_CH0_DESC_ADR);
326 mfc_write(dev, DESC_BUF_SIZE, S5P_FIMV_SI_CH0_DESC_SIZE);
327}
328
329/* Set registers for shared buffer */
Sachin Kamata13bba42012-05-10 03:32:01 -0300330static void s5p_mfc_set_shared_buffer(struct s5p_mfc_ctx *ctx)
Kamil Debskiaf935742011-06-21 10:51:26 -0300331{
332 struct s5p_mfc_dev *dev = ctx->dev;
333 mfc_write(dev, ctx->shm_ofs, S5P_FIMV_SI_CH0_HOST_WR_ADR);
334}
335
336/* Set registers for decoding stream buffer */
337int s5p_mfc_set_dec_stream_buffer(struct s5p_mfc_ctx *ctx, int buf_addr,
338 unsigned int start_num_byte, unsigned int buf_size)
339{
340 struct s5p_mfc_dev *dev = ctx->dev;
341
342 mfc_write(dev, OFFSETA(buf_addr), S5P_FIMV_SI_CH0_SB_ST_ADR);
343 mfc_write(dev, ctx->dec_src_buf_size, S5P_FIMV_SI_CH0_CPB_SIZE);
344 mfc_write(dev, buf_size, S5P_FIMV_SI_CH0_SB_FRM_SIZE);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300345 s5p_mfc_write_info_v5(ctx, start_num_byte, START_BYTE_NUM);
Kamil Debskiaf935742011-06-21 10:51:26 -0300346 return 0;
347}
348
349/* Set decoding frame buffer */
350int s5p_mfc_set_dec_frame_buffer(struct s5p_mfc_ctx *ctx)
351{
352 unsigned int frame_size, i;
353 unsigned int frame_size_ch, frame_size_mv;
354 struct s5p_mfc_dev *dev = ctx->dev;
355 unsigned int dpb;
356 size_t buf_addr1, buf_addr2;
357 int buf_size1, buf_size2;
358
359 buf_addr1 = ctx->bank1_phys;
360 buf_size1 = ctx->bank1_size;
361 buf_addr2 = ctx->bank2_phys;
362 buf_size2 = ctx->bank2_size;
363 dpb = mfc_read(dev, S5P_FIMV_SI_CH0_DPB_CONF_CTRL) &
364 ~S5P_FIMV_DPB_COUNT_MASK;
365 mfc_write(dev, ctx->total_dpb_count | dpb,
366 S5P_FIMV_SI_CH0_DPB_CONF_CTRL);
367 s5p_mfc_set_shared_buffer(ctx);
368 switch (ctx->codec_mode) {
369 case S5P_FIMV_CODEC_H264_DEC:
370 mfc_write(dev, OFFSETA(buf_addr1),
371 S5P_FIMV_H264_VERT_NB_MV_ADR);
372 buf_addr1 += S5P_FIMV_DEC_VERT_NB_MV_SIZE;
373 buf_size1 -= S5P_FIMV_DEC_VERT_NB_MV_SIZE;
374 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H264_NB_IP_ADR);
375 buf_addr1 += S5P_FIMV_DEC_NB_IP_SIZE;
376 buf_size1 -= S5P_FIMV_DEC_NB_IP_SIZE;
377 break;
378 case S5P_FIMV_CODEC_MPEG4_DEC:
379 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_NB_DCAC_ADR);
380 buf_addr1 += S5P_FIMV_DEC_NB_DCAC_SIZE;
381 buf_size1 -= S5P_FIMV_DEC_NB_DCAC_SIZE;
382 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_UP_NB_MV_ADR);
383 buf_addr1 += S5P_FIMV_DEC_UPNB_MV_SIZE;
384 buf_size1 -= S5P_FIMV_DEC_UPNB_MV_SIZE;
385 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_SA_MV_ADR);
386 buf_addr1 += S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
387 buf_size1 -= S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
388 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_SP_ADR);
389 buf_addr1 += S5P_FIMV_DEC_STX_PARSER_SIZE;
390 buf_size1 -= S5P_FIMV_DEC_STX_PARSER_SIZE;
391 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_OT_LINE_ADR);
392 buf_addr1 += S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
393 buf_size1 -= S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
394 break;
395 case S5P_FIMV_CODEC_H263_DEC:
396 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_OT_LINE_ADR);
397 buf_addr1 += S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
398 buf_size1 -= S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
399 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_UP_NB_MV_ADR);
400 buf_addr1 += S5P_FIMV_DEC_UPNB_MV_SIZE;
401 buf_size1 -= S5P_FIMV_DEC_UPNB_MV_SIZE;
402 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_SA_MV_ADR);
403 buf_addr1 += S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
404 buf_size1 -= S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
405 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_NB_DCAC_ADR);
406 buf_addr1 += S5P_FIMV_DEC_NB_DCAC_SIZE;
407 buf_size1 -= S5P_FIMV_DEC_NB_DCAC_SIZE;
408 break;
409 case S5P_FIMV_CODEC_VC1_DEC:
410 case S5P_FIMV_CODEC_VC1RCV_DEC:
411 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_NB_DCAC_ADR);
412 buf_addr1 += S5P_FIMV_DEC_NB_DCAC_SIZE;
413 buf_size1 -= S5P_FIMV_DEC_NB_DCAC_SIZE;
414 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_OT_LINE_ADR);
415 buf_addr1 += S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
416 buf_size1 -= S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
417 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_UP_NB_MV_ADR);
418 buf_addr1 += S5P_FIMV_DEC_UPNB_MV_SIZE;
419 buf_size1 -= S5P_FIMV_DEC_UPNB_MV_SIZE;
420 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_SA_MV_ADR);
421 buf_addr1 += S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
422 buf_size1 -= S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
423 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_BITPLANE3_ADR);
424 buf_addr1 += S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
425 buf_size1 -= S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
426 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_BITPLANE2_ADR);
427 buf_addr1 += S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
428 buf_size1 -= S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
429 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_BITPLANE1_ADR);
430 buf_addr1 += S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
431 buf_size1 -= S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
432 break;
433 case S5P_FIMV_CODEC_MPEG2_DEC:
434 break;
435 default:
436 mfc_err("Unknown codec for decoding (%x)\n",
437 ctx->codec_mode);
438 return -EINVAL;
439 break;
440 }
441 frame_size = ctx->luma_size;
442 frame_size_ch = ctx->chroma_size;
443 frame_size_mv = ctx->mv_size;
444 mfc_debug(2, "Frm size: %d ch: %d mv: %d\n", frame_size, frame_size_ch,
445 frame_size_mv);
446 for (i = 0; i < ctx->total_dpb_count; i++) {
447 /* Bank2 */
448 mfc_debug(2, "Luma %d: %x\n", i,
449 ctx->dst_bufs[i].cookie.raw.luma);
450 mfc_write(dev, OFFSETB(ctx->dst_bufs[i].cookie.raw.luma),
451 S5P_FIMV_DEC_LUMA_ADR + i * 4);
452 mfc_debug(2, "\tChroma %d: %x\n", i,
453 ctx->dst_bufs[i].cookie.raw.chroma);
454 mfc_write(dev, OFFSETA(ctx->dst_bufs[i].cookie.raw.chroma),
455 S5P_FIMV_DEC_CHROMA_ADR + i * 4);
456 if (ctx->codec_mode == S5P_FIMV_CODEC_H264_DEC) {
457 mfc_debug(2, "\tBuf2: %x, size: %d\n",
458 buf_addr2, buf_size2);
459 mfc_write(dev, OFFSETB(buf_addr2),
460 S5P_FIMV_H264_MV_ADR + i * 4);
461 buf_addr2 += frame_size_mv;
462 buf_size2 -= frame_size_mv;
463 }
464 }
465 mfc_debug(2, "Buf1: %u, buf_size1: %d\n", buf_addr1, buf_size1);
466 mfc_debug(2, "Buf 1/2 size after: %d/%d (frames %d)\n",
467 buf_size1, buf_size2, ctx->total_dpb_count);
468 if (buf_size1 < 0 || buf_size2 < 0) {
469 mfc_debug(2, "Not enough memory has been allocated\n");
470 return -ENOMEM;
471 }
Arun Kumar K77a788f2012-10-03 22:19:07 -0300472 s5p_mfc_write_info_v5(ctx, frame_size, ALLOC_LUMA_DPB_SIZE);
473 s5p_mfc_write_info_v5(ctx, frame_size_ch, ALLOC_CHROMA_DPB_SIZE);
Kamil Debskiaf935742011-06-21 10:51:26 -0300474 if (ctx->codec_mode == S5P_FIMV_CODEC_H264_DEC)
Arun Kumar K77a788f2012-10-03 22:19:07 -0300475 s5p_mfc_write_info_v5(ctx, frame_size_mv, ALLOC_MV_SIZE);
Kamil Debskiaf935742011-06-21 10:51:26 -0300476 mfc_write(dev, ((S5P_FIMV_CH_INIT_BUFS & S5P_FIMV_CH_MASK)
477 << S5P_FIMV_CH_SHIFT) | (ctx->inst_no),
478 S5P_FIMV_SI_CH0_INST_ID);
479 return 0;
480}
481
482/* Set registers for encoding stream buffer */
483int s5p_mfc_set_enc_stream_buffer(struct s5p_mfc_ctx *ctx,
484 unsigned long addr, unsigned int size)
485{
486 struct s5p_mfc_dev *dev = ctx->dev;
487
488 mfc_write(dev, OFFSETA(addr), S5P_FIMV_ENC_SI_CH0_SB_ADR);
489 mfc_write(dev, size, S5P_FIMV_ENC_SI_CH0_SB_SIZE);
490 return 0;
491}
492
493void s5p_mfc_set_enc_frame_buffer(struct s5p_mfc_ctx *ctx,
494 unsigned long y_addr, unsigned long c_addr)
495{
496 struct s5p_mfc_dev *dev = ctx->dev;
497
498 mfc_write(dev, OFFSETB(y_addr), S5P_FIMV_ENC_SI_CH0_CUR_Y_ADR);
499 mfc_write(dev, OFFSETB(c_addr), S5P_FIMV_ENC_SI_CH0_CUR_C_ADR);
500}
501
502void s5p_mfc_get_enc_frame_buffer(struct s5p_mfc_ctx *ctx,
503 unsigned long *y_addr, unsigned long *c_addr)
504{
505 struct s5p_mfc_dev *dev = ctx->dev;
506
507 *y_addr = dev->bank2 + (mfc_read(dev, S5P_FIMV_ENCODED_Y_ADDR)
508 << MFC_OFFSET_SHIFT);
509 *c_addr = dev->bank2 + (mfc_read(dev, S5P_FIMV_ENCODED_C_ADDR)
510 << MFC_OFFSET_SHIFT);
511}
512
513/* Set encoding ref & codec buffer */
514int s5p_mfc_set_enc_ref_buffer(struct s5p_mfc_ctx *ctx)
515{
516 struct s5p_mfc_dev *dev = ctx->dev;
517 size_t buf_addr1, buf_addr2;
518 size_t buf_size1, buf_size2;
519 unsigned int enc_ref_y_size, enc_ref_c_size;
520 unsigned int guard_width, guard_height;
521 int i;
522
523 buf_addr1 = ctx->bank1_phys;
524 buf_size1 = ctx->bank1_size;
525 buf_addr2 = ctx->bank2_phys;
526 buf_size2 = ctx->bank2_size;
527 enc_ref_y_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
528 * ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
529 enc_ref_y_size = ALIGN(enc_ref_y_size, S5P_FIMV_NV12MT_SALIGN);
530 if (ctx->codec_mode == S5P_FIMV_CODEC_H264_ENC) {
531 enc_ref_c_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
532 * ALIGN((ctx->img_height >> 1), S5P_FIMV_NV12MT_VALIGN);
533 enc_ref_c_size = ALIGN(enc_ref_c_size, S5P_FIMV_NV12MT_SALIGN);
534 } else {
535 guard_width = ALIGN(ctx->img_width + 16,
536 S5P_FIMV_NV12MT_HALIGN);
537 guard_height = ALIGN((ctx->img_height >> 1) + 4,
538 S5P_FIMV_NV12MT_VALIGN);
539 enc_ref_c_size = ALIGN(guard_width * guard_height,
540 S5P_FIMV_NV12MT_SALIGN);
541 }
542 mfc_debug(2, "buf_size1: %d, buf_size2: %d\n", buf_size1, buf_size2);
543 switch (ctx->codec_mode) {
544 case S5P_FIMV_CODEC_H264_ENC:
545 for (i = 0; i < 2; i++) {
546 mfc_write(dev, OFFSETA(buf_addr1),
547 S5P_FIMV_ENC_REF0_LUMA_ADR + (4 * i));
548 buf_addr1 += enc_ref_y_size;
549 buf_size1 -= enc_ref_y_size;
550
551 mfc_write(dev, OFFSETB(buf_addr2),
552 S5P_FIMV_ENC_REF2_LUMA_ADR + (4 * i));
553 buf_addr2 += enc_ref_y_size;
554 buf_size2 -= enc_ref_y_size;
555 }
556 for (i = 0; i < 4; i++) {
557 mfc_write(dev, OFFSETB(buf_addr2),
558 S5P_FIMV_ENC_REF0_CHROMA_ADR + (4 * i));
559 buf_addr2 += enc_ref_c_size;
560 buf_size2 -= enc_ref_c_size;
561 }
562 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H264_UP_MV_ADR);
563 buf_addr1 += S5P_FIMV_ENC_UPMV_SIZE;
564 buf_size1 -= S5P_FIMV_ENC_UPMV_SIZE;
565 mfc_write(dev, OFFSETA(buf_addr1),
566 S5P_FIMV_H264_COZERO_FLAG_ADR);
567 buf_addr1 += S5P_FIMV_ENC_COLFLG_SIZE;
568 buf_size1 -= S5P_FIMV_ENC_COLFLG_SIZE;
569 mfc_write(dev, OFFSETA(buf_addr1),
570 S5P_FIMV_H264_UP_INTRA_MD_ADR);
571 buf_addr1 += S5P_FIMV_ENC_INTRAMD_SIZE;
572 buf_size1 -= S5P_FIMV_ENC_INTRAMD_SIZE;
573 mfc_write(dev, OFFSETB(buf_addr2),
574 S5P_FIMV_H264_UP_INTRA_PRED_ADR);
575 buf_addr2 += S5P_FIMV_ENC_INTRAPRED_SIZE;
576 buf_size2 -= S5P_FIMV_ENC_INTRAPRED_SIZE;
577 mfc_write(dev, OFFSETA(buf_addr1),
578 S5P_FIMV_H264_NBOR_INFO_ADR);
579 buf_addr1 += S5P_FIMV_ENC_NBORINFO_SIZE;
580 buf_size1 -= S5P_FIMV_ENC_NBORINFO_SIZE;
581 mfc_debug(2, "buf_size1: %d, buf_size2: %d\n",
582 buf_size1, buf_size2);
583 break;
584 case S5P_FIMV_CODEC_MPEG4_ENC:
585 for (i = 0; i < 2; i++) {
586 mfc_write(dev, OFFSETA(buf_addr1),
587 S5P_FIMV_ENC_REF0_LUMA_ADR + (4 * i));
588 buf_addr1 += enc_ref_y_size;
589 buf_size1 -= enc_ref_y_size;
590 mfc_write(dev, OFFSETB(buf_addr2),
591 S5P_FIMV_ENC_REF2_LUMA_ADR + (4 * i));
592 buf_addr2 += enc_ref_y_size;
593 buf_size2 -= enc_ref_y_size;
594 }
595 for (i = 0; i < 4; i++) {
596 mfc_write(dev, OFFSETB(buf_addr2),
597 S5P_FIMV_ENC_REF0_CHROMA_ADR + (4 * i));
598 buf_addr2 += enc_ref_c_size;
599 buf_size2 -= enc_ref_c_size;
600 }
601 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_UP_MV_ADR);
602 buf_addr1 += S5P_FIMV_ENC_UPMV_SIZE;
603 buf_size1 -= S5P_FIMV_ENC_UPMV_SIZE;
604 mfc_write(dev, OFFSETA(buf_addr1),
605 S5P_FIMV_MPEG4_COZERO_FLAG_ADR);
606 buf_addr1 += S5P_FIMV_ENC_COLFLG_SIZE;
607 buf_size1 -= S5P_FIMV_ENC_COLFLG_SIZE;
608 mfc_write(dev, OFFSETA(buf_addr1),
609 S5P_FIMV_MPEG4_ACDC_COEF_ADR);
610 buf_addr1 += S5P_FIMV_ENC_ACDCCOEF_SIZE;
611 buf_size1 -= S5P_FIMV_ENC_ACDCCOEF_SIZE;
612 mfc_debug(2, "buf_size1: %d, buf_size2: %d\n",
613 buf_size1, buf_size2);
614 break;
615 case S5P_FIMV_CODEC_H263_ENC:
616 for (i = 0; i < 2; i++) {
617 mfc_write(dev, OFFSETA(buf_addr1),
618 S5P_FIMV_ENC_REF0_LUMA_ADR + (4 * i));
619 buf_addr1 += enc_ref_y_size;
620 buf_size1 -= enc_ref_y_size;
621 mfc_write(dev, OFFSETB(buf_addr2),
622 S5P_FIMV_ENC_REF2_LUMA_ADR + (4 * i));
623 buf_addr2 += enc_ref_y_size;
624 buf_size2 -= enc_ref_y_size;
625 }
626 for (i = 0; i < 4; i++) {
627 mfc_write(dev, OFFSETB(buf_addr2),
628 S5P_FIMV_ENC_REF0_CHROMA_ADR + (4 * i));
629 buf_addr2 += enc_ref_c_size;
630 buf_size2 -= enc_ref_c_size;
631 }
632 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_UP_MV_ADR);
633 buf_addr1 += S5P_FIMV_ENC_UPMV_SIZE;
634 buf_size1 -= S5P_FIMV_ENC_UPMV_SIZE;
635 mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_ACDC_COEF_ADR);
636 buf_addr1 += S5P_FIMV_ENC_ACDCCOEF_SIZE;
637 buf_size1 -= S5P_FIMV_ENC_ACDCCOEF_SIZE;
638 mfc_debug(2, "buf_size1: %d, buf_size2: %d\n",
639 buf_size1, buf_size2);
640 break;
641 default:
642 mfc_err("Unknown codec set for encoding: %d\n",
643 ctx->codec_mode);
644 return -EINVAL;
645 }
646 return 0;
647}
648
649static int s5p_mfc_set_enc_params(struct s5p_mfc_ctx *ctx)
650{
651 struct s5p_mfc_dev *dev = ctx->dev;
652 struct s5p_mfc_enc_params *p = &ctx->enc_params;
653 unsigned int reg;
654 unsigned int shm;
655
656 /* width */
657 mfc_write(dev, ctx->img_width, S5P_FIMV_ENC_HSIZE_PX);
658 /* height */
659 mfc_write(dev, ctx->img_height, S5P_FIMV_ENC_VSIZE_PX);
660 /* pictype : enable, IDR period */
661 reg = mfc_read(dev, S5P_FIMV_ENC_PIC_TYPE_CTRL);
662 reg |= (1 << 18);
663 reg &= ~(0xFFFF);
664 reg |= p->gop_size;
665 mfc_write(dev, reg, S5P_FIMV_ENC_PIC_TYPE_CTRL);
666 mfc_write(dev, 0, S5P_FIMV_ENC_B_RECON_WRITE_ON);
667 /* multi-slice control */
668 /* multi-slice MB number or bit size */
669 mfc_write(dev, p->slice_mode, S5P_FIMV_ENC_MSLICE_CTRL);
670 if (p->slice_mode == V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB) {
671 mfc_write(dev, p->slice_mb, S5P_FIMV_ENC_MSLICE_MB);
672 } else if (p->slice_mode == V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES) {
673 mfc_write(dev, p->slice_bit, S5P_FIMV_ENC_MSLICE_BIT);
674 } else {
675 mfc_write(dev, 0, S5P_FIMV_ENC_MSLICE_MB);
676 mfc_write(dev, 0, S5P_FIMV_ENC_MSLICE_BIT);
677 }
678 /* cyclic intra refresh */
679 mfc_write(dev, p->intra_refresh_mb, S5P_FIMV_ENC_CIR_CTRL);
680 /* memory structure cur. frame */
681 if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M)
682 mfc_write(dev, 0, S5P_FIMV_ENC_MAP_FOR_CUR);
683 else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT)
684 mfc_write(dev, 3, S5P_FIMV_ENC_MAP_FOR_CUR);
685 /* padding control & value */
686 reg = mfc_read(dev, S5P_FIMV_ENC_PADDING_CTRL);
687 if (p->pad) {
688 /** enable */
689 reg |= (1 << 31);
690 /** cr value */
691 reg &= ~(0xFF << 16);
692 reg |= (p->pad_cr << 16);
693 /** cb value */
694 reg &= ~(0xFF << 8);
695 reg |= (p->pad_cb << 8);
696 /** y value */
697 reg &= ~(0xFF);
698 reg |= (p->pad_luma);
699 } else {
700 /** disable & all value clear */
701 reg = 0;
702 }
703 mfc_write(dev, reg, S5P_FIMV_ENC_PADDING_CTRL);
704 /* rate control config. */
705 reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
706 /** frame-level rate control */
707 reg &= ~(0x1 << 9);
708 reg |= (p->rc_frame << 9);
709 mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
710 /* bit rate */
711 if (p->rc_frame)
712 mfc_write(dev, p->rc_bitrate,
713 S5P_FIMV_ENC_RC_BIT_RATE);
714 else
715 mfc_write(dev, 0, S5P_FIMV_ENC_RC_BIT_RATE);
716 /* reaction coefficient */
717 if (p->rc_frame)
718 mfc_write(dev, p->rc_reaction_coeff, S5P_FIMV_ENC_RC_RPARA);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300719 shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300720 /* seq header ctrl */
721 shm &= ~(0x1 << 3);
722 shm |= (p->seq_hdr_mode << 3);
723 /* frame skip mode */
724 shm &= ~(0x3 << 1);
725 shm |= (p->frame_skip_mode << 1);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300726 s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300727 /* fixed target bit */
Arun Kumar K77a788f2012-10-03 22:19:07 -0300728 s5p_mfc_write_info_v5(ctx, p->fixed_target_bit, RC_CONTROL_CONFIG);
Kamil Debskiaf935742011-06-21 10:51:26 -0300729 return 0;
730}
731
732static int s5p_mfc_set_enc_params_h264(struct s5p_mfc_ctx *ctx)
733{
734 struct s5p_mfc_dev *dev = ctx->dev;
735 struct s5p_mfc_enc_params *p = &ctx->enc_params;
736 struct s5p_mfc_h264_enc_params *p_264 = &p->codec.h264;
737 unsigned int reg;
738 unsigned int shm;
739
740 s5p_mfc_set_enc_params(ctx);
741 /* pictype : number of B */
742 reg = mfc_read(dev, S5P_FIMV_ENC_PIC_TYPE_CTRL);
743 /* num_b_frame - 0 ~ 2 */
744 reg &= ~(0x3 << 16);
745 reg |= (p->num_b_frame << 16);
746 mfc_write(dev, reg, S5P_FIMV_ENC_PIC_TYPE_CTRL);
747 /* profile & level */
748 reg = mfc_read(dev, S5P_FIMV_ENC_PROFILE);
749 /* level */
750 reg &= ~(0xFF << 8);
751 reg |= (p_264->level << 8);
752 /* profile - 0 ~ 2 */
753 reg &= ~(0x3F);
754 reg |= p_264->profile;
755 mfc_write(dev, reg, S5P_FIMV_ENC_PROFILE);
756 /* interlace */
757 mfc_write(dev, p->interlace, S5P_FIMV_ENC_PIC_STRUCT);
758 /* height */
759 if (p->interlace)
760 mfc_write(dev, ctx->img_height >> 1, S5P_FIMV_ENC_VSIZE_PX);
761 /* loopfilter ctrl */
762 mfc_write(dev, p_264->loop_filter_mode, S5P_FIMV_ENC_LF_CTRL);
763 /* loopfilter alpha offset */
764 if (p_264->loop_filter_alpha < 0) {
765 reg = 0x10;
766 reg |= (0xFF - p_264->loop_filter_alpha) + 1;
767 } else {
768 reg = 0x00;
769 reg |= (p_264->loop_filter_alpha & 0xF);
770 }
771 mfc_write(dev, reg, S5P_FIMV_ENC_ALPHA_OFF);
772 /* loopfilter beta offset */
773 if (p_264->loop_filter_beta < 0) {
774 reg = 0x10;
775 reg |= (0xFF - p_264->loop_filter_beta) + 1;
776 } else {
777 reg = 0x00;
778 reg |= (p_264->loop_filter_beta & 0xF);
779 }
780 mfc_write(dev, reg, S5P_FIMV_ENC_BETA_OFF);
781 /* entropy coding mode */
782 if (p_264->entropy_mode == V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC)
783 mfc_write(dev, 1, S5P_FIMV_ENC_H264_ENTROPY_MODE);
784 else
785 mfc_write(dev, 0, S5P_FIMV_ENC_H264_ENTROPY_MODE);
786 /* number of ref. picture */
787 reg = mfc_read(dev, S5P_FIMV_ENC_H264_NUM_OF_REF);
788 /* num of ref. pictures of P */
789 reg &= ~(0x3 << 5);
790 reg |= (p_264->num_ref_pic_4p << 5);
791 /* max number of ref. pictures */
792 reg &= ~(0x1F);
793 reg |= p_264->max_ref_pic;
794 mfc_write(dev, reg, S5P_FIMV_ENC_H264_NUM_OF_REF);
795 /* 8x8 transform enable */
796 mfc_write(dev, p_264->_8x8_transform, S5P_FIMV_ENC_H264_TRANS_FLAG);
797 /* rate control config. */
798 reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
799 /* macroblock level rate control */
800 reg &= ~(0x1 << 8);
801 reg |= (p_264->rc_mb << 8);
802 /* frame QP */
803 reg &= ~(0x3F);
804 reg |= p_264->rc_frame_qp;
805 mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
806 /* frame rate */
807 if (p->rc_frame && p->rc_framerate_denom)
808 mfc_write(dev, p->rc_framerate_num * 1000
809 / p->rc_framerate_denom, S5P_FIMV_ENC_RC_FRAME_RATE);
810 else
811 mfc_write(dev, 0, S5P_FIMV_ENC_RC_FRAME_RATE);
812 /* max & min value of QP */
813 reg = mfc_read(dev, S5P_FIMV_ENC_RC_QBOUND);
814 /* max QP */
815 reg &= ~(0x3F << 8);
816 reg |= (p_264->rc_max_qp << 8);
817 /* min QP */
818 reg &= ~(0x3F);
819 reg |= p_264->rc_min_qp;
820 mfc_write(dev, reg, S5P_FIMV_ENC_RC_QBOUND);
821 /* macroblock adaptive scaling features */
822 if (p_264->rc_mb) {
823 reg = mfc_read(dev, S5P_FIMV_ENC_RC_MB_CTRL);
824 /* dark region */
825 reg &= ~(0x1 << 3);
826 reg |= (p_264->rc_mb_dark << 3);
827 /* smooth region */
828 reg &= ~(0x1 << 2);
829 reg |= (p_264->rc_mb_smooth << 2);
830 /* static region */
831 reg &= ~(0x1 << 1);
832 reg |= (p_264->rc_mb_static << 1);
833 /* high activity region */
834 reg &= ~(0x1);
835 reg |= p_264->rc_mb_activity;
836 mfc_write(dev, reg, S5P_FIMV_ENC_RC_MB_CTRL);
837 }
838 if (!p->rc_frame &&
839 !p_264->rc_mb) {
Arun Kumar K77a788f2012-10-03 22:19:07 -0300840 shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
Kamil Debskiaf935742011-06-21 10:51:26 -0300841 shm &= ~(0xFFF);
842 shm |= ((p_264->rc_b_frame_qp & 0x3F) << 6);
843 shm |= (p_264->rc_p_frame_qp & 0x3F);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300844 s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
Kamil Debskiaf935742011-06-21 10:51:26 -0300845 }
846 /* extended encoder ctrl */
Arun Kumar K77a788f2012-10-03 22:19:07 -0300847 shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300848 /* AR VUI control */
849 shm &= ~(0x1 << 15);
850 shm |= (p_264->vui_sar << 1);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300851 s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300852 if (p_264->vui_sar) {
853 /* aspect ration IDC */
Arun Kumar K77a788f2012-10-03 22:19:07 -0300854 shm = s5p_mfc_read_info_v5(ctx, SAMPLE_ASPECT_RATIO_IDC);
Kamil Debskiaf935742011-06-21 10:51:26 -0300855 shm &= ~(0xFF);
856 shm |= p_264->vui_sar_idc;
Arun Kumar K77a788f2012-10-03 22:19:07 -0300857 s5p_mfc_write_info_v5(ctx, shm, SAMPLE_ASPECT_RATIO_IDC);
Kamil Debskiaf935742011-06-21 10:51:26 -0300858 if (p_264->vui_sar_idc == 0xFF) {
859 /* sample AR info */
Arun Kumar K77a788f2012-10-03 22:19:07 -0300860 shm = s5p_mfc_read_info_v5(ctx, EXTENDED_SAR);
Kamil Debskiaf935742011-06-21 10:51:26 -0300861 shm &= ~(0xFFFFFFFF);
862 shm |= p_264->vui_ext_sar_width << 16;
863 shm |= p_264->vui_ext_sar_height;
Arun Kumar K77a788f2012-10-03 22:19:07 -0300864 s5p_mfc_write_info_v5(ctx, shm, EXTENDED_SAR);
Kamil Debskiaf935742011-06-21 10:51:26 -0300865 }
866 }
867 /* intra picture period for H.264 */
Arun Kumar K77a788f2012-10-03 22:19:07 -0300868 shm = s5p_mfc_read_info_v5(ctx, H264_I_PERIOD);
Kamil Debskiaf935742011-06-21 10:51:26 -0300869 /* control */
870 shm &= ~(0x1 << 16);
871 shm |= (p_264->open_gop << 16);
872 /* value */
873 if (p_264->open_gop) {
874 shm &= ~(0xFFFF);
875 shm |= p_264->open_gop_size;
876 }
Arun Kumar K77a788f2012-10-03 22:19:07 -0300877 s5p_mfc_write_info_v5(ctx, shm, H264_I_PERIOD);
Kamil Debskiaf935742011-06-21 10:51:26 -0300878 /* extended encoder ctrl */
Arun Kumar K77a788f2012-10-03 22:19:07 -0300879 shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300880 /* vbv buffer size */
881 if (p->frame_skip_mode ==
882 V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
883 shm &= ~(0xFFFF << 16);
884 shm |= (p_264->cpb_size << 16);
885 }
Arun Kumar K77a788f2012-10-03 22:19:07 -0300886 s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300887 return 0;
888}
889
890static int s5p_mfc_set_enc_params_mpeg4(struct s5p_mfc_ctx *ctx)
891{
892 struct s5p_mfc_dev *dev = ctx->dev;
893 struct s5p_mfc_enc_params *p = &ctx->enc_params;
894 struct s5p_mfc_mpeg4_enc_params *p_mpeg4 = &p->codec.mpeg4;
895 unsigned int reg;
896 unsigned int shm;
897 unsigned int framerate;
898
899 s5p_mfc_set_enc_params(ctx);
900 /* pictype : number of B */
901 reg = mfc_read(dev, S5P_FIMV_ENC_PIC_TYPE_CTRL);
902 /* num_b_frame - 0 ~ 2 */
903 reg &= ~(0x3 << 16);
904 reg |= (p->num_b_frame << 16);
905 mfc_write(dev, reg, S5P_FIMV_ENC_PIC_TYPE_CTRL);
906 /* profile & level */
907 reg = mfc_read(dev, S5P_FIMV_ENC_PROFILE);
908 /* level */
909 reg &= ~(0xFF << 8);
910 reg |= (p_mpeg4->level << 8);
911 /* profile - 0 ~ 2 */
912 reg &= ~(0x3F);
913 reg |= p_mpeg4->profile;
914 mfc_write(dev, reg, S5P_FIMV_ENC_PROFILE);
915 /* quarter_pixel */
916 mfc_write(dev, p_mpeg4->quarter_pixel, S5P_FIMV_ENC_MPEG4_QUART_PXL);
917 /* qp */
918 if (!p->rc_frame) {
Arun Kumar K77a788f2012-10-03 22:19:07 -0300919 shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
Kamil Debskiaf935742011-06-21 10:51:26 -0300920 shm &= ~(0xFFF);
921 shm |= ((p_mpeg4->rc_b_frame_qp & 0x3F) << 6);
922 shm |= (p_mpeg4->rc_p_frame_qp & 0x3F);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300923 s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
Kamil Debskiaf935742011-06-21 10:51:26 -0300924 }
925 /* frame rate */
926 if (p->rc_frame) {
927 if (p->rc_framerate_denom > 0) {
928 framerate = p->rc_framerate_num * 1000 /
929 p->rc_framerate_denom;
930 mfc_write(dev, framerate,
931 S5P_FIMV_ENC_RC_FRAME_RATE);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300932 shm = s5p_mfc_read_info_v5(ctx, RC_VOP_TIMING);
Kamil Debskiaf935742011-06-21 10:51:26 -0300933 shm &= ~(0xFFFFFFFF);
934 shm |= (1 << 31);
935 shm |= ((p->rc_framerate_num & 0x7FFF) << 16);
936 shm |= (p->rc_framerate_denom & 0xFFFF);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300937 s5p_mfc_write_info_v5(ctx, shm, RC_VOP_TIMING);
Kamil Debskiaf935742011-06-21 10:51:26 -0300938 }
939 } else {
940 mfc_write(dev, 0, S5P_FIMV_ENC_RC_FRAME_RATE);
941 }
942 /* rate control config. */
943 reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
944 /* frame QP */
945 reg &= ~(0x3F);
946 reg |= p_mpeg4->rc_frame_qp;
947 mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
948 /* max & min value of QP */
949 reg = mfc_read(dev, S5P_FIMV_ENC_RC_QBOUND);
950 /* max QP */
951 reg &= ~(0x3F << 8);
952 reg |= (p_mpeg4->rc_max_qp << 8);
953 /* min QP */
954 reg &= ~(0x3F);
955 reg |= p_mpeg4->rc_min_qp;
956 mfc_write(dev, reg, S5P_FIMV_ENC_RC_QBOUND);
957 /* extended encoder ctrl */
Arun Kumar K77a788f2012-10-03 22:19:07 -0300958 shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300959 /* vbv buffer size */
960 if (p->frame_skip_mode ==
961 V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
962 shm &= ~(0xFFFF << 16);
963 shm |= (p->vbv_size << 16);
964 }
Arun Kumar K77a788f2012-10-03 22:19:07 -0300965 s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -0300966 return 0;
967}
968
969static int s5p_mfc_set_enc_params_h263(struct s5p_mfc_ctx *ctx)
970{
971 struct s5p_mfc_dev *dev = ctx->dev;
972 struct s5p_mfc_enc_params *p = &ctx->enc_params;
973 struct s5p_mfc_mpeg4_enc_params *p_h263 = &p->codec.mpeg4;
974 unsigned int reg;
975 unsigned int shm;
976
977 s5p_mfc_set_enc_params(ctx);
978 /* qp */
979 if (!p->rc_frame) {
Arun Kumar K77a788f2012-10-03 22:19:07 -0300980 shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
Kamil Debskiaf935742011-06-21 10:51:26 -0300981 shm &= ~(0xFFF);
982 shm |= (p_h263->rc_p_frame_qp & 0x3F);
Arun Kumar K77a788f2012-10-03 22:19:07 -0300983 s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
Kamil Debskiaf935742011-06-21 10:51:26 -0300984 }
985 /* frame rate */
986 if (p->rc_frame && p->rc_framerate_denom)
987 mfc_write(dev, p->rc_framerate_num * 1000
988 / p->rc_framerate_denom, S5P_FIMV_ENC_RC_FRAME_RATE);
989 else
990 mfc_write(dev, 0, S5P_FIMV_ENC_RC_FRAME_RATE);
991 /* rate control config. */
992 reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
993 /* frame QP */
994 reg &= ~(0x3F);
995 reg |= p_h263->rc_frame_qp;
996 mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
997 /* max & min value of QP */
998 reg = mfc_read(dev, S5P_FIMV_ENC_RC_QBOUND);
999 /* max QP */
1000 reg &= ~(0x3F << 8);
1001 reg |= (p_h263->rc_max_qp << 8);
1002 /* min QP */
1003 reg &= ~(0x3F);
1004 reg |= p_h263->rc_min_qp;
1005 mfc_write(dev, reg, S5P_FIMV_ENC_RC_QBOUND);
1006 /* extended encoder ctrl */
Arun Kumar K77a788f2012-10-03 22:19:07 -03001007 shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -03001008 /* vbv buffer size */
1009 if (p->frame_skip_mode ==
1010 V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
1011 shm &= ~(0xFFFF << 16);
1012 shm |= (p->vbv_size << 16);
1013 }
Arun Kumar K77a788f2012-10-03 22:19:07 -03001014 s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
Kamil Debskiaf935742011-06-21 10:51:26 -03001015 return 0;
1016}
1017
1018/* Initialize decoding */
1019int s5p_mfc_init_decode(struct s5p_mfc_ctx *ctx)
1020{
1021 struct s5p_mfc_dev *dev = ctx->dev;
1022
1023 s5p_mfc_set_shared_buffer(ctx);
1024 /* Setup loop filter, for decoding this is only valid for MPEG4 */
1025 if (ctx->codec_mode == S5P_FIMV_CODEC_MPEG4_DEC)
1026 mfc_write(dev, ctx->loop_filter_mpeg4, S5P_FIMV_ENC_LF_CTRL);
1027 else
1028 mfc_write(dev, 0, S5P_FIMV_ENC_LF_CTRL);
1029 mfc_write(dev, ((ctx->slice_interface & S5P_FIMV_SLICE_INT_MASK) <<
1030 S5P_FIMV_SLICE_INT_SHIFT) | (ctx->display_delay_enable <<
1031 S5P_FIMV_DDELAY_ENA_SHIFT) | ((ctx->display_delay &
1032 S5P_FIMV_DDELAY_VAL_MASK) << S5P_FIMV_DDELAY_VAL_SHIFT),
1033 S5P_FIMV_SI_CH0_DPB_CONF_CTRL);
1034 mfc_write(dev,
1035 ((S5P_FIMV_CH_SEQ_HEADER & S5P_FIMV_CH_MASK) << S5P_FIMV_CH_SHIFT)
1036 | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
1037 return 0;
1038}
1039
1040static void s5p_mfc_set_flush(struct s5p_mfc_ctx *ctx, int flush)
1041{
1042 struct s5p_mfc_dev *dev = ctx->dev;
1043 unsigned int dpb;
1044
1045 if (flush)
1046 dpb = mfc_read(dev, S5P_FIMV_SI_CH0_DPB_CONF_CTRL) | (
1047 S5P_FIMV_DPB_FLUSH_MASK << S5P_FIMV_DPB_FLUSH_SHIFT);
1048 else
1049 dpb = mfc_read(dev, S5P_FIMV_SI_CH0_DPB_CONF_CTRL) &
1050 ~(S5P_FIMV_DPB_FLUSH_MASK << S5P_FIMV_DPB_FLUSH_SHIFT);
1051 mfc_write(dev, dpb, S5P_FIMV_SI_CH0_DPB_CONF_CTRL);
1052}
1053
1054/* Decode a single frame */
1055int s5p_mfc_decode_one_frame(struct s5p_mfc_ctx *ctx,
1056 enum s5p_mfc_decode_arg last_frame)
1057{
1058 struct s5p_mfc_dev *dev = ctx->dev;
1059
1060 mfc_write(dev, ctx->dec_dst_flag, S5P_FIMV_SI_CH0_RELEASE_BUF);
1061 s5p_mfc_set_shared_buffer(ctx);
1062 s5p_mfc_set_flush(ctx, ctx->dpb_flush_flag);
1063 /* Issue different commands to instance basing on whether it
1064 * is the last frame or not. */
1065 switch (last_frame) {
1066 case MFC_DEC_FRAME:
1067 mfc_write(dev, ((S5P_FIMV_CH_FRAME_START & S5P_FIMV_CH_MASK) <<
1068 S5P_FIMV_CH_SHIFT) | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
1069 break;
1070 case MFC_DEC_LAST_FRAME:
1071 mfc_write(dev, ((S5P_FIMV_CH_LAST_FRAME & S5P_FIMV_CH_MASK) <<
1072 S5P_FIMV_CH_SHIFT) | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
1073 break;
1074 case MFC_DEC_RES_CHANGE:
1075 mfc_write(dev, ((S5P_FIMV_CH_FRAME_START_REALLOC &
1076 S5P_FIMV_CH_MASK) << S5P_FIMV_CH_SHIFT) | (ctx->inst_no),
1077 S5P_FIMV_SI_CH0_INST_ID);
1078 break;
1079 }
1080 mfc_debug(2, "Decoding a usual frame\n");
1081 return 0;
1082}
1083
1084int s5p_mfc_init_encode(struct s5p_mfc_ctx *ctx)
1085{
1086 struct s5p_mfc_dev *dev = ctx->dev;
1087
1088 if (ctx->codec_mode == S5P_FIMV_CODEC_H264_ENC)
1089 s5p_mfc_set_enc_params_h264(ctx);
1090 else if (ctx->codec_mode == S5P_FIMV_CODEC_MPEG4_ENC)
1091 s5p_mfc_set_enc_params_mpeg4(ctx);
1092 else if (ctx->codec_mode == S5P_FIMV_CODEC_H263_ENC)
1093 s5p_mfc_set_enc_params_h263(ctx);
1094 else {
1095 mfc_err("Unknown codec for encoding (%x)\n",
1096 ctx->codec_mode);
1097 return -EINVAL;
1098 }
1099 s5p_mfc_set_shared_buffer(ctx);
1100 mfc_write(dev, ((S5P_FIMV_CH_SEQ_HEADER << 16) & 0x70000) |
1101 (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
1102 return 0;
1103}
1104
1105/* Encode a single frame */
1106int s5p_mfc_encode_one_frame(struct s5p_mfc_ctx *ctx)
1107{
1108 struct s5p_mfc_dev *dev = ctx->dev;
Andrzej Hajdaf9f715a2012-08-21 08:05:32 -03001109 int cmd;
Kamil Debskiaf935742011-06-21 10:51:26 -03001110 /* memory structure cur. frame */
1111 if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M)
1112 mfc_write(dev, 0, S5P_FIMV_ENC_MAP_FOR_CUR);
1113 else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT)
1114 mfc_write(dev, 3, S5P_FIMV_ENC_MAP_FOR_CUR);
1115 s5p_mfc_set_shared_buffer(ctx);
Andrzej Hajdaf9f715a2012-08-21 08:05:32 -03001116
1117 if (ctx->state == MFCINST_FINISHING)
1118 cmd = S5P_FIMV_CH_LAST_FRAME;
1119 else
1120 cmd = S5P_FIMV_CH_FRAME_START;
1121 mfc_write(dev, ((cmd & S5P_FIMV_CH_MASK) << S5P_FIMV_CH_SHIFT)
1122 | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
1123
Kamil Debskiaf935742011-06-21 10:51:26 -03001124 return 0;
1125}
1126
1127static int s5p_mfc_get_new_ctx(struct s5p_mfc_dev *dev)
1128{
1129 unsigned long flags;
1130 int new_ctx;
1131 int cnt;
1132
1133 spin_lock_irqsave(&dev->condlock, flags);
1134 new_ctx = (dev->curr_ctx + 1) % MFC_NUM_CONTEXTS;
1135 cnt = 0;
1136 while (!test_bit(new_ctx, &dev->ctx_work_bits)) {
1137 new_ctx = (new_ctx + 1) % MFC_NUM_CONTEXTS;
1138 if (++cnt > MFC_NUM_CONTEXTS) {
1139 /* No contexts to run */
1140 spin_unlock_irqrestore(&dev->condlock, flags);
1141 return -EAGAIN;
1142 }
1143 }
1144 spin_unlock_irqrestore(&dev->condlock, flags);
1145 return new_ctx;
1146}
1147
1148static void s5p_mfc_run_res_change(struct s5p_mfc_ctx *ctx)
1149{
1150 struct s5p_mfc_dev *dev = ctx->dev;
1151
1152 s5p_mfc_set_dec_stream_buffer(ctx, 0, 0, 0);
1153 dev->curr_ctx = ctx->num;
1154 s5p_mfc_clean_ctx_int_flags(ctx);
1155 s5p_mfc_decode_one_frame(ctx, MFC_DEC_RES_CHANGE);
1156}
1157
1158static int s5p_mfc_run_dec_frame(struct s5p_mfc_ctx *ctx, int last_frame)
1159{
1160 struct s5p_mfc_dev *dev = ctx->dev;
1161 struct s5p_mfc_buf *temp_vb;
1162 unsigned long flags;
1163 unsigned int index;
1164
1165 spin_lock_irqsave(&dev->irqlock, flags);
1166 /* Frames are being decoded */
1167 if (list_empty(&ctx->src_queue)) {
1168 mfc_debug(2, "No src buffers\n");
1169 spin_unlock_irqrestore(&dev->irqlock, flags);
1170 return -EAGAIN;
1171 }
1172 /* Get the next source buffer */
1173 temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
Andrzej Hajdaf9f715a2012-08-21 08:05:32 -03001174 temp_vb->flags |= MFC_BUF_FLAG_USED;
Kamil Debskiaf935742011-06-21 10:51:26 -03001175 s5p_mfc_set_dec_stream_buffer(ctx,
Marek Szyprowskiba7fcb02011-08-29 03:20:56 -03001176 vb2_dma_contig_plane_dma_addr(temp_vb->b, 0), ctx->consumed_stream,
Kamil Debskiaf935742011-06-21 10:51:26 -03001177 temp_vb->b->v4l2_planes[0].bytesused);
1178 spin_unlock_irqrestore(&dev->irqlock, flags);
1179 index = temp_vb->b->v4l2_buf.index;
1180 dev->curr_ctx = ctx->num;
1181 s5p_mfc_clean_ctx_int_flags(ctx);
1182 if (temp_vb->b->v4l2_planes[0].bytesused == 0) {
1183 last_frame = MFC_DEC_LAST_FRAME;
1184 mfc_debug(2, "Setting ctx->state to FINISHING\n");
1185 ctx->state = MFCINST_FINISHING;
1186 }
1187 s5p_mfc_decode_one_frame(ctx, last_frame);
1188 return 0;
1189}
1190
1191static int s5p_mfc_run_enc_frame(struct s5p_mfc_ctx *ctx)
1192{
1193 struct s5p_mfc_dev *dev = ctx->dev;
1194 unsigned long flags;
1195 struct s5p_mfc_buf *dst_mb;
1196 struct s5p_mfc_buf *src_mb;
1197 unsigned long src_y_addr, src_c_addr, dst_addr;
1198 unsigned int dst_size;
1199
1200 spin_lock_irqsave(&dev->irqlock, flags);
Andrzej Hajdaf9f715a2012-08-21 08:05:32 -03001201 if (list_empty(&ctx->src_queue) && ctx->state != MFCINST_FINISHING) {
Kamil Debskiaf935742011-06-21 10:51:26 -03001202 mfc_debug(2, "no src buffers\n");
1203 spin_unlock_irqrestore(&dev->irqlock, flags);
1204 return -EAGAIN;
1205 }
1206 if (list_empty(&ctx->dst_queue)) {
1207 mfc_debug(2, "no dst buffers\n");
1208 spin_unlock_irqrestore(&dev->irqlock, flags);
1209 return -EAGAIN;
1210 }
Andrzej Hajdaf9f715a2012-08-21 08:05:32 -03001211 if (list_empty(&ctx->src_queue)) {
1212 /* send null frame */
1213 s5p_mfc_set_enc_frame_buffer(ctx, dev->bank2, dev->bank2);
1214 src_mb = NULL;
1215 } else {
1216 src_mb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf,
1217 list);
1218 src_mb->flags |= MFC_BUF_FLAG_USED;
1219 if (src_mb->b->v4l2_planes[0].bytesused == 0) {
1220 /* send null frame */
1221 s5p_mfc_set_enc_frame_buffer(ctx, dev->bank2,
1222 dev->bank2);
1223 ctx->state = MFCINST_FINISHING;
1224 } else {
1225 src_y_addr = vb2_dma_contig_plane_dma_addr(src_mb->b,
1226 0);
1227 src_c_addr = vb2_dma_contig_plane_dma_addr(src_mb->b,
1228 1);
1229 s5p_mfc_set_enc_frame_buffer(ctx, src_y_addr,
1230 src_c_addr);
1231 if (src_mb->flags & MFC_BUF_FLAG_EOS)
1232 ctx->state = MFCINST_FINISHING;
1233 }
1234 }
Kamil Debskiaf935742011-06-21 10:51:26 -03001235 dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
Andrzej Hajdaf9f715a2012-08-21 08:05:32 -03001236 dst_mb->flags |= MFC_BUF_FLAG_USED;
Marek Szyprowskiba7fcb02011-08-29 03:20:56 -03001237 dst_addr = vb2_dma_contig_plane_dma_addr(dst_mb->b, 0);
Kamil Debskiaf935742011-06-21 10:51:26 -03001238 dst_size = vb2_plane_size(dst_mb->b, 0);
1239 s5p_mfc_set_enc_stream_buffer(ctx, dst_addr, dst_size);
1240 spin_unlock_irqrestore(&dev->irqlock, flags);
1241 dev->curr_ctx = ctx->num;
1242 s5p_mfc_clean_ctx_int_flags(ctx);
Andrzej Hajdaf9f715a2012-08-21 08:05:32 -03001243 mfc_debug(2, "encoding buffer with index=%d state=%d",
1244 src_mb ? src_mb->b->v4l2_buf.index : -1, ctx->state);
Kamil Debskiaf935742011-06-21 10:51:26 -03001245 s5p_mfc_encode_one_frame(ctx);
1246 return 0;
1247}
1248
1249static void s5p_mfc_run_init_dec(struct s5p_mfc_ctx *ctx)
1250{
1251 struct s5p_mfc_dev *dev = ctx->dev;
1252 unsigned long flags;
1253 struct s5p_mfc_buf *temp_vb;
1254
1255 /* Initializing decoding - parsing header */
1256 spin_lock_irqsave(&dev->irqlock, flags);
1257 mfc_debug(2, "Preparing to init decoding\n");
1258 temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
1259 s5p_mfc_set_dec_desc_buffer(ctx);
1260 mfc_debug(2, "Header size: %d\n", temp_vb->b->v4l2_planes[0].bytesused);
1261 s5p_mfc_set_dec_stream_buffer(ctx,
Marek Szyprowskiba7fcb02011-08-29 03:20:56 -03001262 vb2_dma_contig_plane_dma_addr(temp_vb->b, 0),
Kamil Debskiaf935742011-06-21 10:51:26 -03001263 0, temp_vb->b->v4l2_planes[0].bytesused);
1264 spin_unlock_irqrestore(&dev->irqlock, flags);
1265 dev->curr_ctx = ctx->num;
1266 s5p_mfc_clean_ctx_int_flags(ctx);
1267 s5p_mfc_init_decode(ctx);
1268}
1269
1270static void s5p_mfc_run_init_enc(struct s5p_mfc_ctx *ctx)
1271{
1272 struct s5p_mfc_dev *dev = ctx->dev;
1273 unsigned long flags;
1274 struct s5p_mfc_buf *dst_mb;
1275 unsigned long dst_addr;
1276 unsigned int dst_size;
1277
1278 s5p_mfc_set_enc_ref_buffer(ctx);
1279 spin_lock_irqsave(&dev->irqlock, flags);
1280 dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
Marek Szyprowskiba7fcb02011-08-29 03:20:56 -03001281 dst_addr = vb2_dma_contig_plane_dma_addr(dst_mb->b, 0);
Kamil Debskiaf935742011-06-21 10:51:26 -03001282 dst_size = vb2_plane_size(dst_mb->b, 0);
1283 s5p_mfc_set_enc_stream_buffer(ctx, dst_addr, dst_size);
1284 spin_unlock_irqrestore(&dev->irqlock, flags);
1285 dev->curr_ctx = ctx->num;
1286 s5p_mfc_clean_ctx_int_flags(ctx);
1287 s5p_mfc_init_encode(ctx);
1288}
1289
1290static int s5p_mfc_run_init_dec_buffers(struct s5p_mfc_ctx *ctx)
1291{
1292 struct s5p_mfc_dev *dev = ctx->dev;
1293 unsigned long flags;
1294 struct s5p_mfc_buf *temp_vb;
1295 int ret;
1296
1297 /*
1298 * Header was parsed now starting processing
1299 * First set the output frame buffers
1300 */
1301 if (ctx->capture_state != QUEUE_BUFS_MMAPED) {
1302 mfc_err("It seems that not all destionation buffers were "
1303 "mmaped\nMFC requires that all destination are mmaped "
1304 "before starting processing\n");
1305 return -EAGAIN;
1306 }
1307 spin_lock_irqsave(&dev->irqlock, flags);
1308 if (list_empty(&ctx->src_queue)) {
1309 mfc_err("Header has been deallocated in the middle of"
1310 " initialization\n");
1311 spin_unlock_irqrestore(&dev->irqlock, flags);
1312 return -EIO;
1313 }
1314 temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
1315 mfc_debug(2, "Header size: %d\n", temp_vb->b->v4l2_planes[0].bytesused);
1316 s5p_mfc_set_dec_stream_buffer(ctx,
Marek Szyprowskiba7fcb02011-08-29 03:20:56 -03001317 vb2_dma_contig_plane_dma_addr(temp_vb->b, 0),
Kamil Debskiaf935742011-06-21 10:51:26 -03001318 0, temp_vb->b->v4l2_planes[0].bytesused);
1319 spin_unlock_irqrestore(&dev->irqlock, flags);
1320 dev->curr_ctx = ctx->num;
1321 s5p_mfc_clean_ctx_int_flags(ctx);
1322 ret = s5p_mfc_set_dec_frame_buffer(ctx);
1323 if (ret) {
1324 mfc_err("Failed to alloc frame mem\n");
1325 ctx->state = MFCINST_ERROR;
1326 }
1327 return ret;
1328}
1329
1330/* Try running an operation on hardware */
1331void s5p_mfc_try_run(struct s5p_mfc_dev *dev)
1332{
1333 struct s5p_mfc_ctx *ctx;
1334 int new_ctx;
1335 unsigned int ret = 0;
1336
1337 if (test_bit(0, &dev->enter_suspend)) {
1338 mfc_debug(1, "Entering suspend so do not schedule any jobs\n");
1339 return;
1340 }
1341 /* Check whether hardware is not running */
1342 if (test_and_set_bit(0, &dev->hw_lock) != 0) {
1343 /* This is perfectly ok, the scheduled ctx should wait */
1344 mfc_debug(1, "Couldn't lock HW\n");
1345 return;
1346 }
1347 /* Choose the context to run */
1348 new_ctx = s5p_mfc_get_new_ctx(dev);
1349 if (new_ctx < 0) {
1350 /* No contexts to run */
1351 if (test_and_clear_bit(0, &dev->hw_lock) == 0) {
1352 mfc_err("Failed to unlock hardware\n");
1353 return;
1354 }
1355 mfc_debug(1, "No ctx is scheduled to be run\n");
1356 return;
1357 }
1358 ctx = dev->ctx[new_ctx];
1359 /* Got context to run in ctx */
1360 /*
1361 * Last frame has already been sent to MFC.
1362 * Now obtaining frames from MFC buffer
1363 */
1364 s5p_mfc_clock_on();
1365 if (ctx->type == MFCINST_DECODER) {
1366 s5p_mfc_set_dec_desc_buffer(ctx);
1367 switch (ctx->state) {
1368 case MFCINST_FINISHING:
1369 s5p_mfc_run_dec_frame(ctx, MFC_DEC_LAST_FRAME);
1370 break;
1371 case MFCINST_RUNNING:
1372 ret = s5p_mfc_run_dec_frame(ctx, MFC_DEC_FRAME);
1373 break;
1374 case MFCINST_INIT:
1375 s5p_mfc_clean_ctx_int_flags(ctx);
1376 ret = s5p_mfc_open_inst_cmd(ctx);
1377 break;
1378 case MFCINST_RETURN_INST:
1379 s5p_mfc_clean_ctx_int_flags(ctx);
1380 ret = s5p_mfc_close_inst_cmd(ctx);
1381 break;
1382 case MFCINST_GOT_INST:
1383 s5p_mfc_run_init_dec(ctx);
1384 break;
1385 case MFCINST_HEAD_PARSED:
1386 ret = s5p_mfc_run_init_dec_buffers(ctx);
1387 mfc_debug(1, "head parsed\n");
1388 break;
1389 case MFCINST_RES_CHANGE_INIT:
1390 s5p_mfc_run_res_change(ctx);
1391 break;
1392 case MFCINST_RES_CHANGE_FLUSH:
1393 s5p_mfc_run_dec_frame(ctx, MFC_DEC_FRAME);
1394 break;
1395 case MFCINST_RES_CHANGE_END:
1396 mfc_debug(2, "Finished remaining frames after resolution change\n");
1397 ctx->capture_state = QUEUE_FREE;
1398 mfc_debug(2, "Will re-init the codec\n");
1399 s5p_mfc_run_init_dec(ctx);
1400 break;
1401 default:
1402 ret = -EAGAIN;
1403 }
1404 } else if (ctx->type == MFCINST_ENCODER) {
1405 switch (ctx->state) {
1406 case MFCINST_FINISHING:
1407 case MFCINST_RUNNING:
1408 ret = s5p_mfc_run_enc_frame(ctx);
1409 break;
1410 case MFCINST_INIT:
1411 s5p_mfc_clean_ctx_int_flags(ctx);
1412 ret = s5p_mfc_open_inst_cmd(ctx);
1413 break;
1414 case MFCINST_RETURN_INST:
1415 s5p_mfc_clean_ctx_int_flags(ctx);
1416 ret = s5p_mfc_close_inst_cmd(ctx);
1417 break;
1418 case MFCINST_GOT_INST:
1419 s5p_mfc_run_init_enc(ctx);
1420 break;
1421 default:
1422 ret = -EAGAIN;
1423 }
1424 } else {
1425 mfc_err("Invalid context type: %d\n", ctx->type);
1426 ret = -EAGAIN;
1427 }
1428
1429 if (ret) {
1430 /* Free hardware lock */
1431 if (test_and_clear_bit(0, &dev->hw_lock) == 0)
1432 mfc_err("Failed to unlock hardware\n");
1433
1434 /* This is in deed imporant, as no operation has been
1435 * scheduled, reduce the clock count as no one will
1436 * ever do this, because no interrupt related to this try_run
1437 * will ever come from hardware. */
1438 s5p_mfc_clock_off();
1439 }
1440}
1441
1442
1443void s5p_mfc_cleanup_queue(struct list_head *lh, struct vb2_queue *vq)
1444{
1445 struct s5p_mfc_buf *b;
1446 int i;
1447
1448 while (!list_empty(lh)) {
1449 b = list_entry(lh->next, struct s5p_mfc_buf, list);
1450 for (i = 0; i < b->b->num_planes; i++)
1451 vb2_set_plane_payload(b->b, i, 0);
1452 vb2_buffer_done(b->b, VB2_BUF_STATE_ERROR);
1453 list_del(&b->list);
1454 }
1455}
1456