blob: d6ab33e7060aa824accddf2e295d77062f37984c [file] [log] [blame]
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001/*
2 * sh-mobile VEU mem2mem driver
3 *
4 * Copyright (C) 2012 Renesas Electronics Corporation
5 * Author: Guennadi Liakhovetski, <g.liakhovetski@gmx.de>
6 * Copyright (C) 2008 Magnus Damm
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the version 2 of the GNU General Public License as
10 * published by the Free Software Foundation
11 */
12
Sachin Kamatf2b4dc12013-03-04 05:15:18 -030013#include <linux/err.h>
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -030014#include <linux/fs.h>
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/interrupt.h>
18#include <linux/io.h>
19#include <linux/platform_device.h>
20#include <linux/pm_runtime.h>
21#include <linux/slab.h>
22#include <linux/types.h>
23#include <linux/videodev2.h>
24
25#include <media/v4l2-dev.h>
26#include <media/v4l2-device.h>
27#include <media/v4l2-ioctl.h>
28#include <media/v4l2-mem2mem.h>
Axel Lin7c5a62e2014-08-09 03:19:20 -030029#include <media/v4l2-image-sizes.h>
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -030030#include <media/videobuf2-dma-contig.h>
31
32#define VEU_STR 0x00 /* start register */
33#define VEU_SWR 0x10 /* src: line length */
34#define VEU_SSR 0x14 /* src: image size */
35#define VEU_SAYR 0x18 /* src: y/rgb plane address */
36#define VEU_SACR 0x1c /* src: c plane address */
37#define VEU_BSSR 0x20 /* bundle mode register */
38#define VEU_EDWR 0x30 /* dst: line length */
39#define VEU_DAYR 0x34 /* dst: y/rgb plane address */
40#define VEU_DACR 0x38 /* dst: c plane address */
41#define VEU_TRCR 0x50 /* transform control */
42#define VEU_RFCR 0x54 /* resize scale */
43#define VEU_RFSR 0x58 /* resize clip */
44#define VEU_ENHR 0x5c /* enhance */
45#define VEU_FMCR 0x70 /* filter mode */
46#define VEU_VTCR 0x74 /* lowpass vertical */
47#define VEU_HTCR 0x78 /* lowpass horizontal */
48#define VEU_APCR 0x80 /* color match */
49#define VEU_ECCR 0x84 /* color replace */
50#define VEU_AFXR 0x90 /* fixed mode */
51#define VEU_SWPR 0x94 /* swap */
52#define VEU_EIER 0xa0 /* interrupt mask */
53#define VEU_EVTR 0xa4 /* interrupt event */
54#define VEU_STAR 0xb0 /* status */
55#define VEU_BSRR 0xb4 /* reset */
56
57#define VEU_MCR00 0x200 /* color conversion matrix coefficient 00 */
58#define VEU_MCR01 0x204 /* color conversion matrix coefficient 01 */
59#define VEU_MCR02 0x208 /* color conversion matrix coefficient 02 */
60#define VEU_MCR10 0x20c /* color conversion matrix coefficient 10 */
61#define VEU_MCR11 0x210 /* color conversion matrix coefficient 11 */
62#define VEU_MCR12 0x214 /* color conversion matrix coefficient 12 */
63#define VEU_MCR20 0x218 /* color conversion matrix coefficient 20 */
64#define VEU_MCR21 0x21c /* color conversion matrix coefficient 21 */
65#define VEU_MCR22 0x220 /* color conversion matrix coefficient 22 */
66#define VEU_COFFR 0x224 /* color conversion offset */
67#define VEU_CBR 0x228 /* color conversion clip */
68
69/*
70 * 4092x4092 max size is the normal case. In some cases it can be reduced to
71 * 2048x2048, in other cases it can be 4092x8188 or even 8188x8188.
72 */
73#define MAX_W 4092
74#define MAX_H 4092
75#define MIN_W 8
76#define MIN_H 8
77#define ALIGN_W 4
78
79/* 3 buffers of 2048 x 1536 - 3 megapixels @ 16bpp */
80#define VIDEO_MEM_LIMIT ALIGN(2048 * 1536 * 2 * 3, 1024 * 1024)
81
82#define MEM2MEM_DEF_TRANSLEN 1
83
84struct sh_veu_dev;
85
86struct sh_veu_file {
87 struct sh_veu_dev *veu_dev;
88 bool cfg_needed;
89};
90
91struct sh_veu_format {
92 char *name;
93 u32 fourcc;
94 unsigned int depth;
95 unsigned int ydepth;
96};
97
98/* video data format */
99struct sh_veu_vfmt {
100 /* Replace with v4l2_rect */
101 struct v4l2_rect frame;
102 unsigned int bytesperline;
103 unsigned int offset_y;
104 unsigned int offset_c;
105 const struct sh_veu_format *fmt;
106};
107
108struct sh_veu_dev {
109 struct v4l2_device v4l2_dev;
110 struct video_device vdev;
111 struct v4l2_m2m_dev *m2m_dev;
112 struct device *dev;
113 struct v4l2_m2m_ctx *m2m_ctx;
114 struct sh_veu_vfmt vfmt_out;
115 struct sh_veu_vfmt vfmt_in;
116 /* Only single user per direction so far */
117 struct sh_veu_file *capture;
118 struct sh_veu_file *output;
119 struct mutex fop_lock;
120 void __iomem *base;
121 struct vb2_alloc_ctx *alloc_ctx;
122 spinlock_t lock;
123 bool is_2h;
124 unsigned int xaction;
125 bool aborting;
126};
127
128enum sh_veu_fmt_idx {
129 SH_VEU_FMT_NV12,
130 SH_VEU_FMT_NV16,
131 SH_VEU_FMT_NV24,
132 SH_VEU_FMT_RGB332,
133 SH_VEU_FMT_RGB444,
134 SH_VEU_FMT_RGB565,
135 SH_VEU_FMT_RGB666,
136 SH_VEU_FMT_RGB24,
137};
138
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300139#define DEFAULT_IN_WIDTH VGA_WIDTH
140#define DEFAULT_IN_HEIGHT VGA_HEIGHT
141#define DEFAULT_IN_FMTIDX SH_VEU_FMT_NV12
142#define DEFAULT_OUT_WIDTH VGA_WIDTH
143#define DEFAULT_OUT_HEIGHT VGA_HEIGHT
144#define DEFAULT_OUT_FMTIDX SH_VEU_FMT_RGB565
145
146/*
147 * Alignment: Y-plane should be 4-byte aligned for NV12 and NV16, and 8-byte
148 * aligned for NV24.
149 */
150static const struct sh_veu_format sh_veu_fmt[] = {
151 [SH_VEU_FMT_NV12] = { .ydepth = 8, .depth = 12, .name = "NV12", .fourcc = V4L2_PIX_FMT_NV12 },
152 [SH_VEU_FMT_NV16] = { .ydepth = 8, .depth = 16, .name = "NV16", .fourcc = V4L2_PIX_FMT_NV16 },
153 [SH_VEU_FMT_NV24] = { .ydepth = 8, .depth = 24, .name = "NV24", .fourcc = V4L2_PIX_FMT_NV24 },
154 [SH_VEU_FMT_RGB332] = { .ydepth = 8, .depth = 8, .name = "RGB332", .fourcc = V4L2_PIX_FMT_RGB332 },
155 [SH_VEU_FMT_RGB444] = { .ydepth = 16, .depth = 16, .name = "RGB444", .fourcc = V4L2_PIX_FMT_RGB444 },
156 [SH_VEU_FMT_RGB565] = { .ydepth = 16, .depth = 16, .name = "RGB565", .fourcc = V4L2_PIX_FMT_RGB565 },
157 [SH_VEU_FMT_RGB666] = { .ydepth = 32, .depth = 32, .name = "BGR666", .fourcc = V4L2_PIX_FMT_BGR666 },
158 [SH_VEU_FMT_RGB24] = { .ydepth = 24, .depth = 24, .name = "RGB24", .fourcc = V4L2_PIX_FMT_RGB24 },
159};
160
161#define DEFAULT_IN_VFMT (struct sh_veu_vfmt){ \
162 .frame = { \
163 .width = VGA_WIDTH, \
164 .height = VGA_HEIGHT, \
165 }, \
166 .bytesperline = (VGA_WIDTH * sh_veu_fmt[DEFAULT_IN_FMTIDX].ydepth) >> 3, \
167 .fmt = &sh_veu_fmt[DEFAULT_IN_FMTIDX], \
168}
169
170#define DEFAULT_OUT_VFMT (struct sh_veu_vfmt){ \
171 .frame = { \
172 .width = VGA_WIDTH, \
173 .height = VGA_HEIGHT, \
174 }, \
175 .bytesperline = (VGA_WIDTH * sh_veu_fmt[DEFAULT_OUT_FMTIDX].ydepth) >> 3, \
176 .fmt = &sh_veu_fmt[DEFAULT_OUT_FMTIDX], \
177}
178
179/*
180 * TODO: add support for further output formats:
181 * SH_VEU_FMT_NV12,
182 * SH_VEU_FMT_NV16,
183 * SH_VEU_FMT_NV24,
184 * SH_VEU_FMT_RGB332,
185 * SH_VEU_FMT_RGB444,
186 * SH_VEU_FMT_RGB666,
187 * SH_VEU_FMT_RGB24,
188 */
189
190static const int sh_veu_fmt_out[] = {
191 SH_VEU_FMT_RGB565,
192};
193
194/*
195 * TODO: add support for further input formats:
196 * SH_VEU_FMT_NV16,
197 * SH_VEU_FMT_NV24,
198 * SH_VEU_FMT_RGB565,
199 * SH_VEU_FMT_RGB666,
200 * SH_VEU_FMT_RGB24,
201 */
202static const int sh_veu_fmt_in[] = {
203 SH_VEU_FMT_NV12,
204};
205
206static enum v4l2_colorspace sh_veu_4cc2cspace(u32 fourcc)
207{
208 switch (fourcc) {
209 default:
210 BUG();
211 case V4L2_PIX_FMT_NV12:
212 case V4L2_PIX_FMT_NV16:
213 case V4L2_PIX_FMT_NV24:
Hans Verkuil17a705e2015-06-15 08:33:29 -0300214 return V4L2_COLORSPACE_SMPTE170M;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300215 case V4L2_PIX_FMT_RGB332:
216 case V4L2_PIX_FMT_RGB444:
217 case V4L2_PIX_FMT_RGB565:
218 case V4L2_PIX_FMT_BGR666:
219 case V4L2_PIX_FMT_RGB24:
220 return V4L2_COLORSPACE_SRGB;
221 }
222}
223
224static u32 sh_veu_reg_read(struct sh_veu_dev *veu, unsigned int reg)
225{
226 return ioread32(veu->base + reg);
227}
228
229static void sh_veu_reg_write(struct sh_veu_dev *veu, unsigned int reg,
230 u32 value)
231{
232 iowrite32(value, veu->base + reg);
233}
234
235 /* ========== mem2mem callbacks ========== */
236
237static void sh_veu_job_abort(void *priv)
238{
239 struct sh_veu_dev *veu = priv;
240
241 /* Will cancel the transaction in the next interrupt handler */
242 veu->aborting = true;
243}
244
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300245static void sh_veu_process(struct sh_veu_dev *veu,
246 struct vb2_buffer *src_buf,
247 struct vb2_buffer *dst_buf)
248{
249 dma_addr_t addr = vb2_dma_contig_plane_dma_addr(dst_buf, 0);
250
251 sh_veu_reg_write(veu, VEU_DAYR, addr + veu->vfmt_out.offset_y);
252 sh_veu_reg_write(veu, VEU_DACR, veu->vfmt_out.offset_c ?
253 addr + veu->vfmt_out.offset_c : 0);
Mauro Carvalho Chehab59dad492012-12-20 14:53:26 -0200254 dev_dbg(veu->dev, "%s(): dst base %lx, y: %x, c: %x\n", __func__,
255 (unsigned long)addr,
256 veu->vfmt_out.offset_y, veu->vfmt_out.offset_c);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300257
258 addr = vb2_dma_contig_plane_dma_addr(src_buf, 0);
259 sh_veu_reg_write(veu, VEU_SAYR, addr + veu->vfmt_in.offset_y);
260 sh_veu_reg_write(veu, VEU_SACR, veu->vfmt_in.offset_c ?
261 addr + veu->vfmt_in.offset_c : 0);
Mauro Carvalho Chehab59dad492012-12-20 14:53:26 -0200262 dev_dbg(veu->dev, "%s(): src base %lx, y: %x, c: %x\n", __func__,
263 (unsigned long)addr,
264 veu->vfmt_in.offset_y, veu->vfmt_in.offset_c);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300265
266 sh_veu_reg_write(veu, VEU_STR, 1);
267
268 sh_veu_reg_write(veu, VEU_EIER, 1); /* enable interrupt in VEU */
269}
270
271/**
272 * sh_veu_device_run() - prepares and starts the device
273 *
274 * This will be called by the framework when it decides to schedule a particular
275 * instance.
276 */
277static void sh_veu_device_run(void *priv)
278{
279 struct sh_veu_dev *veu = priv;
280 struct vb2_buffer *src_buf, *dst_buf;
281
282 src_buf = v4l2_m2m_next_src_buf(veu->m2m_ctx);
283 dst_buf = v4l2_m2m_next_dst_buf(veu->m2m_ctx);
284
285 if (src_buf && dst_buf)
286 sh_veu_process(veu, src_buf, dst_buf);
287}
288
289 /* ========== video ioctls ========== */
290
291static bool sh_veu_is_streamer(struct sh_veu_dev *veu, struct sh_veu_file *veu_file,
292 enum v4l2_buf_type type)
293{
294 return (type == V4L2_BUF_TYPE_VIDEO_CAPTURE &&
295 veu_file == veu->capture) ||
296 (type == V4L2_BUF_TYPE_VIDEO_OUTPUT &&
297 veu_file == veu->output);
298}
299
300static int sh_veu_queue_init(void *priv, struct vb2_queue *src_vq,
301 struct vb2_queue *dst_vq);
302
303/*
304 * It is not unusual to have video nodes open()ed multiple times. While some
305 * V4L2 operations are non-intrusive, like querying formats and various
306 * parameters, others, like setting formats, starting and stopping streaming,
307 * queuing and dequeuing buffers, directly affect hardware configuration and /
308 * or execution. This function verifies availability of the requested interface
309 * and, if available, reserves it for the requesting user.
310 */
311static int sh_veu_stream_init(struct sh_veu_dev *veu, struct sh_veu_file *veu_file,
312 enum v4l2_buf_type type)
313{
314 struct sh_veu_file **stream;
315
316 switch (type) {
317 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
318 stream = &veu->capture;
319 break;
320 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
321 stream = &veu->output;
322 break;
323 default:
324 return -EINVAL;
325 }
326
327 if (*stream == veu_file)
328 return 0;
329
330 if (*stream)
331 return -EBUSY;
332
333 *stream = veu_file;
334
335 return 0;
336}
337
338static int sh_veu_context_init(struct sh_veu_dev *veu)
339{
340 if (veu->m2m_ctx)
341 return 0;
342
343 veu->m2m_ctx = v4l2_m2m_ctx_init(veu->m2m_dev, veu,
344 sh_veu_queue_init);
345
Sachin Kamat627df4b2013-07-15 15:57:07 +0530346 return PTR_ERR_OR_ZERO(veu->m2m_ctx);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300347}
348
349static int sh_veu_querycap(struct file *file, void *priv,
350 struct v4l2_capability *cap)
351{
352 strlcpy(cap->driver, "sh-veu", sizeof(cap->driver));
353 strlcpy(cap->card, "sh-mobile VEU", sizeof(cap->card));
354 strlcpy(cap->bus_info, "platform:sh-veu", sizeof(cap->bus_info));
355 cap->device_caps = V4L2_CAP_VIDEO_M2M | V4L2_CAP_STREAMING;
356 cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
357
358 return 0;
359}
360
361static int sh_veu_enum_fmt(struct v4l2_fmtdesc *f, const int *fmt, int fmt_num)
362{
363 if (f->index >= fmt_num)
364 return -EINVAL;
365
366 strlcpy(f->description, sh_veu_fmt[fmt[f->index]].name, sizeof(f->description));
367 f->pixelformat = sh_veu_fmt[fmt[f->index]].fourcc;
368 return 0;
369}
370
371static int sh_veu_enum_fmt_vid_cap(struct file *file, void *priv,
372 struct v4l2_fmtdesc *f)
373{
374 return sh_veu_enum_fmt(f, sh_veu_fmt_out, ARRAY_SIZE(sh_veu_fmt_out));
375}
376
377static int sh_veu_enum_fmt_vid_out(struct file *file, void *priv,
378 struct v4l2_fmtdesc *f)
379{
380 return sh_veu_enum_fmt(f, sh_veu_fmt_in, ARRAY_SIZE(sh_veu_fmt_in));
381}
382
383static struct sh_veu_vfmt *sh_veu_get_vfmt(struct sh_veu_dev *veu,
384 enum v4l2_buf_type type)
385{
386 switch (type) {
387 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
388 return &veu->vfmt_out;
389 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
390 return &veu->vfmt_in;
391 default:
392 return NULL;
393 }
394}
395
396static int sh_veu_g_fmt(struct sh_veu_file *veu_file, struct v4l2_format *f)
397{
398 struct v4l2_pix_format *pix = &f->fmt.pix;
399 struct sh_veu_dev *veu = veu_file->veu_dev;
400 struct sh_veu_vfmt *vfmt;
401
402 vfmt = sh_veu_get_vfmt(veu, f->type);
403
404 pix->width = vfmt->frame.width;
405 pix->height = vfmt->frame.height;
406 pix->field = V4L2_FIELD_NONE;
407 pix->pixelformat = vfmt->fmt->fourcc;
408 pix->colorspace = sh_veu_4cc2cspace(pix->pixelformat);
409 pix->bytesperline = vfmt->bytesperline;
410 pix->sizeimage = vfmt->bytesperline * pix->height *
411 vfmt->fmt->depth / vfmt->fmt->ydepth;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300412 dev_dbg(veu->dev, "%s(): type: %d, size %u @ %ux%u, fmt %x\n", __func__,
413 f->type, pix->sizeimage, pix->width, pix->height, pix->pixelformat);
414
415 return 0;
416}
417
418static int sh_veu_g_fmt_vid_out(struct file *file, void *priv,
419 struct v4l2_format *f)
420{
421 return sh_veu_g_fmt(priv, f);
422}
423
424static int sh_veu_g_fmt_vid_cap(struct file *file, void *priv,
425 struct v4l2_format *f)
426{
427 return sh_veu_g_fmt(priv, f);
428}
429
430static int sh_veu_try_fmt(struct v4l2_format *f, const struct sh_veu_format *fmt)
431{
432 struct v4l2_pix_format *pix = &f->fmt.pix;
433 unsigned int y_bytes_used;
434
435 /*
436 * V4L2 specification suggests, that the driver should correct the
437 * format struct if any of the dimensions is unsupported
438 */
439 switch (pix->field) {
440 default:
441 case V4L2_FIELD_ANY:
442 pix->field = V4L2_FIELD_NONE;
443 /* fall through: continue handling V4L2_FIELD_NONE */
444 case V4L2_FIELD_NONE:
445 break;
446 }
447
448 v4l_bound_align_image(&pix->width, MIN_W, MAX_W, ALIGN_W,
449 &pix->height, MIN_H, MAX_H, 0, 0);
450
451 y_bytes_used = (pix->width * fmt->ydepth) >> 3;
452
453 if (pix->bytesperline < y_bytes_used)
454 pix->bytesperline = y_bytes_used;
455 pix->sizeimage = pix->height * pix->bytesperline * fmt->depth / fmt->ydepth;
456
457 pix->pixelformat = fmt->fourcc;
458 pix->colorspace = sh_veu_4cc2cspace(pix->pixelformat);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300459
460 pr_debug("%s(): type: %d, size %u\n", __func__, f->type, pix->sizeimage);
461
462 return 0;
463}
464
465static const struct sh_veu_format *sh_veu_find_fmt(const struct v4l2_format *f)
466{
467 const int *fmt;
468 int i, n, dflt;
469
470 pr_debug("%s(%d;%d)\n", __func__, f->type, f->fmt.pix.field);
471
472 switch (f->type) {
473 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
474 fmt = sh_veu_fmt_out;
475 n = ARRAY_SIZE(sh_veu_fmt_out);
476 dflt = DEFAULT_OUT_FMTIDX;
477 break;
478 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
479 default:
480 fmt = sh_veu_fmt_in;
481 n = ARRAY_SIZE(sh_veu_fmt_in);
482 dflt = DEFAULT_IN_FMTIDX;
483 break;
484 }
485
486 for (i = 0; i < n; i++)
487 if (sh_veu_fmt[fmt[i]].fourcc == f->fmt.pix.pixelformat)
488 return &sh_veu_fmt[fmt[i]];
489
490 return &sh_veu_fmt[dflt];
491}
492
493static int sh_veu_try_fmt_vid_cap(struct file *file, void *priv,
494 struct v4l2_format *f)
495{
496 const struct sh_veu_format *fmt;
497
498 fmt = sh_veu_find_fmt(f);
499 if (!fmt)
500 /* wrong buffer type */
501 return -EINVAL;
502
503 return sh_veu_try_fmt(f, fmt);
504}
505
506static int sh_veu_try_fmt_vid_out(struct file *file, void *priv,
507 struct v4l2_format *f)
508{
509 const struct sh_veu_format *fmt;
510
511 fmt = sh_veu_find_fmt(f);
512 if (!fmt)
513 /* wrong buffer type */
514 return -EINVAL;
515
516 return sh_veu_try_fmt(f, fmt);
517}
518
519static void sh_veu_colour_offset(struct sh_veu_dev *veu, struct sh_veu_vfmt *vfmt)
520{
521 /* dst_left and dst_top validity will be verified in CROP / COMPOSE */
522 unsigned int left = vfmt->frame.left & ~0x03;
523 unsigned int top = vfmt->frame.top;
524 dma_addr_t offset = ((left * veu->vfmt_out.fmt->depth) >> 3) +
525 top * veu->vfmt_out.bytesperline;
526 unsigned int y_line;
527
528 vfmt->offset_y = offset;
529
530 switch (vfmt->fmt->fourcc) {
531 case V4L2_PIX_FMT_NV12:
532 case V4L2_PIX_FMT_NV16:
533 case V4L2_PIX_FMT_NV24:
534 y_line = ALIGN(vfmt->frame.width, 16);
535 vfmt->offset_c = offset + y_line * vfmt->frame.height;
536 break;
537 case V4L2_PIX_FMT_RGB332:
538 case V4L2_PIX_FMT_RGB444:
539 case V4L2_PIX_FMT_RGB565:
540 case V4L2_PIX_FMT_BGR666:
541 case V4L2_PIX_FMT_RGB24:
542 vfmt->offset_c = 0;
543 break;
544 default:
545 BUG();
546 }
547}
548
549static int sh_veu_s_fmt(struct sh_veu_file *veu_file, struct v4l2_format *f)
550{
551 struct v4l2_pix_format *pix = &f->fmt.pix;
552 struct sh_veu_dev *veu = veu_file->veu_dev;
553 struct sh_veu_vfmt *vfmt;
554 struct vb2_queue *vq;
555 int ret = sh_veu_context_init(veu);
556 if (ret < 0)
557 return ret;
558
559 vq = v4l2_m2m_get_vq(veu->m2m_ctx, f->type);
560 if (!vq)
561 return -EINVAL;
562
563 if (vb2_is_busy(vq)) {
564 v4l2_err(&veu_file->veu_dev->v4l2_dev, "%s queue busy\n", __func__);
565 return -EBUSY;
566 }
567
568 vfmt = sh_veu_get_vfmt(veu, f->type);
569 /* called after try_fmt(), hence vfmt != NULL. Implicit BUG_ON() below */
570
571 vfmt->fmt = sh_veu_find_fmt(f);
572 /* vfmt->fmt != NULL following the same argument as above */
573 vfmt->frame.width = pix->width;
574 vfmt->frame.height = pix->height;
575 vfmt->bytesperline = pix->bytesperline;
576
577 sh_veu_colour_offset(veu, vfmt);
578
579 /*
580 * We could also verify and require configuration only if any parameters
581 * actually have changed, but it is unlikely, that the user requests the
582 * same configuration several times without closing the device.
583 */
584 veu_file->cfg_needed = true;
585
586 dev_dbg(veu->dev,
587 "Setting format for type %d, wxh: %dx%d, fmt: %x\n",
588 f->type, pix->width, pix->height, vfmt->fmt->fourcc);
589
590 return 0;
591}
592
593static int sh_veu_s_fmt_vid_cap(struct file *file, void *priv,
594 struct v4l2_format *f)
595{
596 int ret = sh_veu_try_fmt_vid_cap(file, priv, f);
597 if (ret)
598 return ret;
599
600 return sh_veu_s_fmt(priv, f);
601}
602
603static int sh_veu_s_fmt_vid_out(struct file *file, void *priv,
604 struct v4l2_format *f)
605{
606 int ret = sh_veu_try_fmt_vid_out(file, priv, f);
607 if (ret)
608 return ret;
609
610 return sh_veu_s_fmt(priv, f);
611}
612
613static int sh_veu_reqbufs(struct file *file, void *priv,
614 struct v4l2_requestbuffers *reqbufs)
615{
616 struct sh_veu_file *veu_file = priv;
617 struct sh_veu_dev *veu = veu_file->veu_dev;
618 int ret = sh_veu_context_init(veu);
619 if (ret < 0)
620 return ret;
621
622 ret = sh_veu_stream_init(veu, veu_file, reqbufs->type);
623 if (ret < 0)
624 return ret;
625
626 return v4l2_m2m_reqbufs(file, veu->m2m_ctx, reqbufs);
627}
628
629static int sh_veu_querybuf(struct file *file, void *priv,
630 struct v4l2_buffer *buf)
631{
632 struct sh_veu_file *veu_file = priv;
633
634 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, buf->type))
635 return -EBUSY;
636
637 return v4l2_m2m_querybuf(file, veu_file->veu_dev->m2m_ctx, buf);
638}
639
640static int sh_veu_qbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
641{
642 struct sh_veu_file *veu_file = priv;
643
644 dev_dbg(veu_file->veu_dev->dev, "%s(%d)\n", __func__, buf->type);
645 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, buf->type))
646 return -EBUSY;
647
648 return v4l2_m2m_qbuf(file, veu_file->veu_dev->m2m_ctx, buf);
649}
650
651static int sh_veu_dqbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
652{
653 struct sh_veu_file *veu_file = priv;
654
655 dev_dbg(veu_file->veu_dev->dev, "%s(%d)\n", __func__, buf->type);
656 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, buf->type))
657 return -EBUSY;
658
659 return v4l2_m2m_dqbuf(file, veu_file->veu_dev->m2m_ctx, buf);
660}
661
662static void sh_veu_calc_scale(struct sh_veu_dev *veu,
663 int size_in, int size_out, int crop_out,
664 u32 *mant, u32 *frac, u32 *rep)
665{
666 u32 fixpoint;
667
668 /* calculate FRAC and MANT */
669 *rep = *mant = *frac = 0;
670
671 if (size_in == size_out) {
672 if (crop_out != size_out)
673 *mant = 1; /* needed for cropping */
674 return;
675 }
676
677 /* VEU2H special upscale */
678 if (veu->is_2h && size_out > size_in) {
679 u32 fixpoint = (4096 * size_in) / size_out;
680 *mant = fixpoint / 4096;
681 *frac = (fixpoint - (*mant * 4096)) & ~0x07;
682
683 switch (*frac) {
684 case 0x800:
685 *rep = 1;
686 break;
687 case 0x400:
688 *rep = 3;
689 break;
690 case 0x200:
691 *rep = 7;
692 break;
693 }
694 if (*rep)
695 return;
696 }
697
698 fixpoint = (4096 * (size_in - 1)) / (size_out + 1);
699 *mant = fixpoint / 4096;
700 *frac = fixpoint - (*mant * 4096);
701
702 if (*frac & 0x07) {
703 /*
704 * FIXME: do we really have to round down twice in the
705 * up-scaling case?
706 */
707 *frac &= ~0x07;
708 if (size_out > size_in)
709 *frac -= 8; /* round down if scaling up */
710 else
711 *frac += 8; /* round up if scaling down */
712 }
713}
714
715static unsigned long sh_veu_scale_v(struct sh_veu_dev *veu,
716 int size_in, int size_out, int crop_out)
717{
718 u32 mant, frac, value, rep;
719
720 sh_veu_calc_scale(veu, size_in, size_out, crop_out, &mant, &frac, &rep);
721
722 /* set scale */
723 value = (sh_veu_reg_read(veu, VEU_RFCR) & ~0xffff0000) |
724 (((mant << 12) | frac) << 16);
725
726 sh_veu_reg_write(veu, VEU_RFCR, value);
727
728 /* set clip */
729 value = (sh_veu_reg_read(veu, VEU_RFSR) & ~0xffff0000) |
730 (((rep << 12) | crop_out) << 16);
731
732 sh_veu_reg_write(veu, VEU_RFSR, value);
733
734 return ALIGN((size_in * crop_out) / size_out, 4);
735}
736
737static unsigned long sh_veu_scale_h(struct sh_veu_dev *veu,
738 int size_in, int size_out, int crop_out)
739{
740 u32 mant, frac, value, rep;
741
742 sh_veu_calc_scale(veu, size_in, size_out, crop_out, &mant, &frac, &rep);
743
744 /* set scale */
745 value = (sh_veu_reg_read(veu, VEU_RFCR) & ~0xffff) |
746 (mant << 12) | frac;
747
748 sh_veu_reg_write(veu, VEU_RFCR, value);
749
750 /* set clip */
751 value = (sh_veu_reg_read(veu, VEU_RFSR) & ~0xffff) |
752 (rep << 12) | crop_out;
753
754 sh_veu_reg_write(veu, VEU_RFSR, value);
755
756 return ALIGN((size_in * crop_out) / size_out, 4);
757}
758
759static void sh_veu_configure(struct sh_veu_dev *veu)
760{
761 u32 src_width, src_stride, src_height;
762 u32 dst_width, dst_stride, dst_height;
763 u32 real_w, real_h;
764
765 /* reset VEU */
766 sh_veu_reg_write(veu, VEU_BSRR, 0x100);
767
768 src_width = veu->vfmt_in.frame.width;
769 src_height = veu->vfmt_in.frame.height;
770 src_stride = ALIGN(veu->vfmt_in.frame.width, 16);
771
772 dst_width = real_w = veu->vfmt_out.frame.width;
773 dst_height = real_h = veu->vfmt_out.frame.height;
774 /* Datasheet is unclear - whether it's always number of bytes or not */
775 dst_stride = veu->vfmt_out.bytesperline;
776
777 /*
778 * So far real_w == dst_width && real_h == dst_height, but it wasn't
779 * necessarily the case in the original vidix driver, so, it may change
780 * here in the future too.
781 */
782 src_width = sh_veu_scale_h(veu, src_width, real_w, dst_width);
783 src_height = sh_veu_scale_v(veu, src_height, real_h, dst_height);
784
785 sh_veu_reg_write(veu, VEU_SWR, src_stride);
786 sh_veu_reg_write(veu, VEU_SSR, src_width | (src_height << 16));
787 sh_veu_reg_write(veu, VEU_BSSR, 0); /* not using bundle mode */
788
789 sh_veu_reg_write(veu, VEU_EDWR, dst_stride);
790 sh_veu_reg_write(veu, VEU_DACR, 0); /* unused for RGB */
791
792 sh_veu_reg_write(veu, VEU_SWPR, 0x67);
793 sh_veu_reg_write(veu, VEU_TRCR, (6 << 16) | (0 << 14) | 2 | 4);
794
795 if (veu->is_2h) {
796 sh_veu_reg_write(veu, VEU_MCR00, 0x0cc5);
797 sh_veu_reg_write(veu, VEU_MCR01, 0x0950);
798 sh_veu_reg_write(veu, VEU_MCR02, 0x0000);
799
800 sh_veu_reg_write(veu, VEU_MCR10, 0x397f);
801 sh_veu_reg_write(veu, VEU_MCR11, 0x0950);
802 sh_veu_reg_write(veu, VEU_MCR12, 0x3ccd);
803
804 sh_veu_reg_write(veu, VEU_MCR20, 0x0000);
805 sh_veu_reg_write(veu, VEU_MCR21, 0x0950);
806 sh_veu_reg_write(veu, VEU_MCR22, 0x1023);
807
808 sh_veu_reg_write(veu, VEU_COFFR, 0x00800010);
809 }
810}
811
812static int sh_veu_streamon(struct file *file, void *priv,
813 enum v4l2_buf_type type)
814{
815 struct sh_veu_file *veu_file = priv;
816
817 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, type))
818 return -EBUSY;
819
820 if (veu_file->cfg_needed) {
821 struct sh_veu_dev *veu = veu_file->veu_dev;
822 veu_file->cfg_needed = false;
823 sh_veu_configure(veu_file->veu_dev);
824 veu->xaction = 0;
825 veu->aborting = false;
826 }
827
828 return v4l2_m2m_streamon(file, veu_file->veu_dev->m2m_ctx, type);
829}
830
831static int sh_veu_streamoff(struct file *file, void *priv,
832 enum v4l2_buf_type type)
833{
834 struct sh_veu_file *veu_file = priv;
835
836 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, type))
837 return -EBUSY;
838
839 return v4l2_m2m_streamoff(file, veu_file->veu_dev->m2m_ctx, type);
840}
841
842static const struct v4l2_ioctl_ops sh_veu_ioctl_ops = {
843 .vidioc_querycap = sh_veu_querycap,
844
845 .vidioc_enum_fmt_vid_cap = sh_veu_enum_fmt_vid_cap,
846 .vidioc_g_fmt_vid_cap = sh_veu_g_fmt_vid_cap,
847 .vidioc_try_fmt_vid_cap = sh_veu_try_fmt_vid_cap,
848 .vidioc_s_fmt_vid_cap = sh_veu_s_fmt_vid_cap,
849
850 .vidioc_enum_fmt_vid_out = sh_veu_enum_fmt_vid_out,
851 .vidioc_g_fmt_vid_out = sh_veu_g_fmt_vid_out,
852 .vidioc_try_fmt_vid_out = sh_veu_try_fmt_vid_out,
853 .vidioc_s_fmt_vid_out = sh_veu_s_fmt_vid_out,
854
855 .vidioc_reqbufs = sh_veu_reqbufs,
856 .vidioc_querybuf = sh_veu_querybuf,
857
858 .vidioc_qbuf = sh_veu_qbuf,
859 .vidioc_dqbuf = sh_veu_dqbuf,
860
861 .vidioc_streamon = sh_veu_streamon,
862 .vidioc_streamoff = sh_veu_streamoff,
863};
864
865 /* ========== Queue operations ========== */
866
867static int sh_veu_queue_setup(struct vb2_queue *vq,
Junghak Sung33119e82015-10-06 06:37:46 -0300868 const void *parg,
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300869 unsigned int *nbuffers, unsigned int *nplanes,
870 unsigned int sizes[], void *alloc_ctxs[])
871{
Junghak Sung33119e82015-10-06 06:37:46 -0300872 const struct v4l2_format *f = parg;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300873 struct sh_veu_dev *veu = vb2_get_drv_priv(vq);
874 struct sh_veu_vfmt *vfmt;
875 unsigned int size, count = *nbuffers;
876
877 if (f) {
878 const struct v4l2_pix_format *pix = &f->fmt.pix;
879 const struct sh_veu_format *fmt = sh_veu_find_fmt(f);
880 struct v4l2_format ftmp = *f;
881
882 if (fmt->fourcc != pix->pixelformat)
883 return -EINVAL;
884 sh_veu_try_fmt(&ftmp, fmt);
885 if (ftmp.fmt.pix.width != pix->width ||
886 ftmp.fmt.pix.height != pix->height)
887 return -EINVAL;
Katsuya Matsubara697a6d22013-04-23 07:51:37 -0300888 size = pix->bytesperline ? pix->bytesperline * pix->height * fmt->depth / fmt->ydepth :
889 pix->width * pix->height * fmt->depth / fmt->ydepth;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300890 } else {
891 vfmt = sh_veu_get_vfmt(veu, vq->type);
Katsuya Matsubara697a6d22013-04-23 07:51:37 -0300892 size = vfmt->bytesperline * vfmt->frame.height * vfmt->fmt->depth / vfmt->fmt->ydepth;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300893 }
894
895 if (count < 2)
896 *nbuffers = count = 2;
897
898 if (size * count > VIDEO_MEM_LIMIT) {
899 count = VIDEO_MEM_LIMIT / size;
900 *nbuffers = count;
901 }
902
903 *nplanes = 1;
904 sizes[0] = size;
905 alloc_ctxs[0] = veu->alloc_ctx;
906
907 dev_dbg(veu->dev, "get %d buffer(s) of size %d each.\n", count, size);
908
909 return 0;
910}
911
912static int sh_veu_buf_prepare(struct vb2_buffer *vb)
913{
914 struct sh_veu_dev *veu = vb2_get_drv_priv(vb->vb2_queue);
915 struct sh_veu_vfmt *vfmt;
916 unsigned int sizeimage;
917
918 vfmt = sh_veu_get_vfmt(veu, vb->vb2_queue->type);
919 sizeimage = vfmt->bytesperline * vfmt->frame.height *
920 vfmt->fmt->depth / vfmt->fmt->ydepth;
921
922 if (vb2_plane_size(vb, 0) < sizeimage) {
923 dev_dbg(veu->dev, "%s data will not fit into plane (%lu < %u)\n",
924 __func__, vb2_plane_size(vb, 0), sizeimage);
925 return -EINVAL;
926 }
927
928 vb2_set_plane_payload(vb, 0, sizeimage);
929
930 return 0;
931}
932
933static void sh_veu_buf_queue(struct vb2_buffer *vb)
934{
Junghak Sung2d700712015-09-22 10:30:30 -0300935 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300936 struct sh_veu_dev *veu = vb2_get_drv_priv(vb->vb2_queue);
Junghak Sung2d700712015-09-22 10:30:30 -0300937 dev_dbg(veu->dev, "%s(%d)\n", __func__, vb->type);
938 v4l2_m2m_buf_queue(veu->m2m_ctx, vbuf);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300939}
940
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300941static const struct vb2_ops sh_veu_qops = {
942 .queue_setup = sh_veu_queue_setup,
943 .buf_prepare = sh_veu_buf_prepare,
944 .buf_queue = sh_veu_buf_queue,
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300945 .wait_prepare = vb2_ops_wait_prepare,
946 .wait_finish = vb2_ops_wait_finish,
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300947};
948
949static int sh_veu_queue_init(void *priv, struct vb2_queue *src_vq,
950 struct vb2_queue *dst_vq)
951{
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300952 struct sh_veu_dev *veu = priv;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300953 int ret;
954
955 memset(src_vq, 0, sizeof(*src_vq));
956 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
957 src_vq->io_modes = VB2_MMAP | VB2_USERPTR;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300958 src_vq->drv_priv = veu;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300959 src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
960 src_vq->ops = &sh_veu_qops;
961 src_vq->mem_ops = &vb2_dma_contig_memops;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300962 src_vq->lock = &veu->fop_lock;
Hans Verkuil073b7952015-06-15 08:33:28 -0300963 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300964
965 ret = vb2_queue_init(src_vq);
966 if (ret < 0)
967 return ret;
968
969 memset(dst_vq, 0, sizeof(*dst_vq));
970 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
971 dst_vq->io_modes = VB2_MMAP | VB2_USERPTR;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300972 dst_vq->drv_priv = veu;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300973 dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
974 dst_vq->ops = &sh_veu_qops;
975 dst_vq->mem_ops = &vb2_dma_contig_memops;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300976 dst_vq->lock = &veu->fop_lock;
Hans Verkuil073b7952015-06-15 08:33:28 -0300977 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300978
979 return vb2_queue_init(dst_vq);
980}
981
982 /* ========== File operations ========== */
983
984static int sh_veu_open(struct file *file)
985{
986 struct sh_veu_dev *veu = video_drvdata(file);
987 struct sh_veu_file *veu_file;
988
989 veu_file = kzalloc(sizeof(*veu_file), GFP_KERNEL);
990 if (!veu_file)
991 return -ENOMEM;
992
993 veu_file->veu_dev = veu;
994 veu_file->cfg_needed = true;
995
996 file->private_data = veu_file;
997
998 pm_runtime_get_sync(veu->dev);
999
1000 dev_dbg(veu->dev, "Created instance %p\n", veu_file);
1001
1002 return 0;
1003}
1004
1005static int sh_veu_release(struct file *file)
1006{
1007 struct sh_veu_dev *veu = video_drvdata(file);
1008 struct sh_veu_file *veu_file = file->private_data;
1009
1010 dev_dbg(veu->dev, "Releasing instance %p\n", veu_file);
1011
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001012 if (veu_file == veu->capture) {
1013 veu->capture = NULL;
1014 vb2_queue_release(v4l2_m2m_get_vq(veu->m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE));
1015 }
1016
1017 if (veu_file == veu->output) {
1018 veu->output = NULL;
1019 vb2_queue_release(v4l2_m2m_get_vq(veu->m2m_ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT));
1020 }
1021
1022 if (!veu->output && !veu->capture && veu->m2m_ctx) {
1023 v4l2_m2m_ctx_release(veu->m2m_ctx);
1024 veu->m2m_ctx = NULL;
1025 }
1026
Katsuya Matsubara6abb3cf2013-04-23 07:51:36 -03001027 pm_runtime_put(veu->dev);
1028
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001029 kfree(veu_file);
1030
1031 return 0;
1032}
1033
1034static unsigned int sh_veu_poll(struct file *file,
1035 struct poll_table_struct *wait)
1036{
1037 struct sh_veu_file *veu_file = file->private_data;
1038
1039 return v4l2_m2m_poll(file, veu_file->veu_dev->m2m_ctx, wait);
1040}
1041
1042static int sh_veu_mmap(struct file *file, struct vm_area_struct *vma)
1043{
1044 struct sh_veu_file *veu_file = file->private_data;
1045
1046 return v4l2_m2m_mmap(file, veu_file->veu_dev->m2m_ctx, vma);
1047}
1048
1049static const struct v4l2_file_operations sh_veu_fops = {
1050 .owner = THIS_MODULE,
1051 .open = sh_veu_open,
1052 .release = sh_veu_release,
1053 .poll = sh_veu_poll,
1054 .unlocked_ioctl = video_ioctl2,
1055 .mmap = sh_veu_mmap,
1056};
1057
1058static const struct video_device sh_veu_videodev = {
1059 .name = "sh-veu",
1060 .fops = &sh_veu_fops,
1061 .ioctl_ops = &sh_veu_ioctl_ops,
1062 .minor = -1,
1063 .release = video_device_release_empty,
1064 .vfl_dir = VFL_DIR_M2M,
1065};
1066
1067static const struct v4l2_m2m_ops sh_veu_m2m_ops = {
1068 .device_run = sh_veu_device_run,
1069 .job_abort = sh_veu_job_abort,
1070};
1071
1072static irqreturn_t sh_veu_bh(int irq, void *dev_id)
1073{
1074 struct sh_veu_dev *veu = dev_id;
1075
1076 if (veu->xaction == MEM2MEM_DEF_TRANSLEN || veu->aborting) {
1077 v4l2_m2m_job_finish(veu->m2m_dev, veu->m2m_ctx);
1078 veu->xaction = 0;
1079 } else {
1080 sh_veu_device_run(veu);
1081 }
1082
1083 return IRQ_HANDLED;
1084}
1085
1086static irqreturn_t sh_veu_isr(int irq, void *dev_id)
1087{
1088 struct sh_veu_dev *veu = dev_id;
Junghak Sung2d700712015-09-22 10:30:30 -03001089 struct vb2_v4l2_buffer *dst;
1090 struct vb2_v4l2_buffer *src;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001091 u32 status = sh_veu_reg_read(veu, VEU_EVTR);
1092
1093 /* bundle read mode not used */
1094 if (!(status & 1))
1095 return IRQ_NONE;
1096
1097 /* disable interrupt in VEU */
1098 sh_veu_reg_write(veu, VEU_EIER, 0);
1099 /* halt operation */
1100 sh_veu_reg_write(veu, VEU_STR, 0);
1101 /* ack int, write 0 to clear bits */
1102 sh_veu_reg_write(veu, VEU_EVTR, status & ~1);
1103
1104 /* conversion completed */
1105 dst = v4l2_m2m_dst_buf_remove(veu->m2m_ctx);
1106 src = v4l2_m2m_src_buf_remove(veu->m2m_ctx);
1107 if (!src || !dst)
1108 return IRQ_NONE;
1109
Junghak Sung2d700712015-09-22 10:30:30 -03001110 dst->timestamp = src->timestamp;
1111 dst->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
1112 dst->flags |=
1113 src->flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
1114 dst->timecode = src->timecode;
Hans Verkuil073b7952015-06-15 08:33:28 -03001115
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001116 spin_lock(&veu->lock);
1117 v4l2_m2m_buf_done(src, VB2_BUF_STATE_DONE);
1118 v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE);
1119 spin_unlock(&veu->lock);
1120
1121 veu->xaction++;
1122
Katsuya Matsubara9166e1a2013-04-23 07:51:35 -03001123 return IRQ_WAKE_THREAD;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001124}
1125
Stephen Rothwell3151d142013-01-10 21:35:34 -03001126static int sh_veu_probe(struct platform_device *pdev)
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001127{
1128 struct sh_veu_dev *veu;
1129 struct resource *reg_res;
1130 struct video_device *vdev;
1131 int irq, ret;
1132
1133 reg_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1134 irq = platform_get_irq(pdev, 0);
1135
1136 if (!reg_res || irq <= 0) {
1137 dev_err(&pdev->dev, "Insufficient VEU platform information.\n");
1138 return -ENODEV;
1139 }
1140
1141 veu = devm_kzalloc(&pdev->dev, sizeof(*veu), GFP_KERNEL);
1142 if (!veu)
1143 return -ENOMEM;
1144
1145 veu->is_2h = resource_size(reg_res) == 0x22c;
1146
Sachin Kamatf2b4dc12013-03-04 05:15:18 -03001147 veu->base = devm_ioremap_resource(&pdev->dev, reg_res);
1148 if (IS_ERR(veu->base))
1149 return PTR_ERR(veu->base);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001150
1151 ret = devm_request_threaded_irq(&pdev->dev, irq, sh_veu_isr, sh_veu_bh,
1152 0, "veu", veu);
1153 if (ret < 0)
1154 return ret;
1155
1156 ret = v4l2_device_register(&pdev->dev, &veu->v4l2_dev);
1157 if (ret < 0) {
1158 dev_err(&pdev->dev, "Error registering v4l2 device\n");
1159 return ret;
1160 }
1161
1162 vdev = &veu->vdev;
1163
1164 veu->alloc_ctx = vb2_dma_contig_init_ctx(&pdev->dev);
1165 if (IS_ERR(veu->alloc_ctx)) {
1166 ret = PTR_ERR(veu->alloc_ctx);
1167 goto einitctx;
1168 }
1169
1170 *vdev = sh_veu_videodev;
Hans Verkuilab312032014-12-10 12:35:34 -03001171 vdev->v4l2_dev = &veu->v4l2_dev;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001172 spin_lock_init(&veu->lock);
1173 mutex_init(&veu->fop_lock);
1174 vdev->lock = &veu->fop_lock;
1175
1176 video_set_drvdata(vdev, veu);
1177
1178 veu->dev = &pdev->dev;
1179 veu->vfmt_out = DEFAULT_OUT_VFMT;
1180 veu->vfmt_in = DEFAULT_IN_VFMT;
1181
1182 veu->m2m_dev = v4l2_m2m_init(&sh_veu_m2m_ops);
1183 if (IS_ERR(veu->m2m_dev)) {
1184 ret = PTR_ERR(veu->m2m_dev);
1185 v4l2_err(&veu->v4l2_dev, "Failed to init mem2mem device: %d\n", ret);
1186 goto em2minit;
1187 }
1188
1189 pm_runtime_enable(&pdev->dev);
1190 pm_runtime_resume(&pdev->dev);
1191
1192 ret = video_register_device(vdev, VFL_TYPE_GRABBER, -1);
1193 pm_runtime_suspend(&pdev->dev);
1194 if (ret < 0)
1195 goto evidreg;
1196
1197 return ret;
1198
1199evidreg:
1200 pm_runtime_disable(&pdev->dev);
1201 v4l2_m2m_release(veu->m2m_dev);
1202em2minit:
1203 vb2_dma_contig_cleanup_ctx(veu->alloc_ctx);
1204einitctx:
1205 v4l2_device_unregister(&veu->v4l2_dev);
1206 return ret;
1207}
1208
Stephen Rothwell3151d142013-01-10 21:35:34 -03001209static int sh_veu_remove(struct platform_device *pdev)
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001210{
1211 struct v4l2_device *v4l2_dev = platform_get_drvdata(pdev);
1212 struct sh_veu_dev *veu = container_of(v4l2_dev,
1213 struct sh_veu_dev, v4l2_dev);
1214
1215 video_unregister_device(&veu->vdev);
1216 pm_runtime_disable(&pdev->dev);
1217 v4l2_m2m_release(veu->m2m_dev);
1218 vb2_dma_contig_cleanup_ctx(veu->alloc_ctx);
1219 v4l2_device_unregister(&veu->v4l2_dev);
1220
1221 return 0;
1222}
1223
1224static struct platform_driver __refdata sh_veu_pdrv = {
Stephen Rothwell3151d142013-01-10 21:35:34 -03001225 .remove = sh_veu_remove,
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001226 .driver = {
1227 .name = "sh_veu",
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001228 },
1229};
1230
Sachin Kamat6647b9c2013-03-05 01:53:36 -03001231module_platform_driver_probe(sh_veu_pdrv, sh_veu_probe);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001232
1233MODULE_DESCRIPTION("sh-mobile VEU mem2mem driver");
1234MODULE_AUTHOR("Guennadi Liakhovetski, <g.liakhovetski@gmx.de>");
1235MODULE_LICENSE("GPL v2");