blob: 15a562af13c774711c8478cf682491e916b8784b [file] [log] [blame]
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001/*
2 * sh-mobile VEU mem2mem driver
3 *
4 * Copyright (C) 2012 Renesas Electronics Corporation
5 * Author: Guennadi Liakhovetski, <g.liakhovetski@gmx.de>
6 * Copyright (C) 2008 Magnus Damm
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the version 2 of the GNU General Public License as
10 * published by the Free Software Foundation
11 */
12
Sachin Kamatf2b4dc12013-03-04 05:15:18 -030013#include <linux/err.h>
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -030014#include <linux/fs.h>
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/interrupt.h>
18#include <linux/io.h>
19#include <linux/platform_device.h>
20#include <linux/pm_runtime.h>
21#include <linux/slab.h>
22#include <linux/types.h>
23#include <linux/videodev2.h>
24
25#include <media/v4l2-dev.h>
26#include <media/v4l2-device.h>
27#include <media/v4l2-ioctl.h>
28#include <media/v4l2-mem2mem.h>
Axel Lin7c5a62e2014-08-09 03:19:20 -030029#include <media/v4l2-image-sizes.h>
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -030030#include <media/videobuf2-dma-contig.h>
31
32#define VEU_STR 0x00 /* start register */
33#define VEU_SWR 0x10 /* src: line length */
34#define VEU_SSR 0x14 /* src: image size */
35#define VEU_SAYR 0x18 /* src: y/rgb plane address */
36#define VEU_SACR 0x1c /* src: c plane address */
37#define VEU_BSSR 0x20 /* bundle mode register */
38#define VEU_EDWR 0x30 /* dst: line length */
39#define VEU_DAYR 0x34 /* dst: y/rgb plane address */
40#define VEU_DACR 0x38 /* dst: c plane address */
41#define VEU_TRCR 0x50 /* transform control */
42#define VEU_RFCR 0x54 /* resize scale */
43#define VEU_RFSR 0x58 /* resize clip */
44#define VEU_ENHR 0x5c /* enhance */
45#define VEU_FMCR 0x70 /* filter mode */
46#define VEU_VTCR 0x74 /* lowpass vertical */
47#define VEU_HTCR 0x78 /* lowpass horizontal */
48#define VEU_APCR 0x80 /* color match */
49#define VEU_ECCR 0x84 /* color replace */
50#define VEU_AFXR 0x90 /* fixed mode */
51#define VEU_SWPR 0x94 /* swap */
52#define VEU_EIER 0xa0 /* interrupt mask */
53#define VEU_EVTR 0xa4 /* interrupt event */
54#define VEU_STAR 0xb0 /* status */
55#define VEU_BSRR 0xb4 /* reset */
56
57#define VEU_MCR00 0x200 /* color conversion matrix coefficient 00 */
58#define VEU_MCR01 0x204 /* color conversion matrix coefficient 01 */
59#define VEU_MCR02 0x208 /* color conversion matrix coefficient 02 */
60#define VEU_MCR10 0x20c /* color conversion matrix coefficient 10 */
61#define VEU_MCR11 0x210 /* color conversion matrix coefficient 11 */
62#define VEU_MCR12 0x214 /* color conversion matrix coefficient 12 */
63#define VEU_MCR20 0x218 /* color conversion matrix coefficient 20 */
64#define VEU_MCR21 0x21c /* color conversion matrix coefficient 21 */
65#define VEU_MCR22 0x220 /* color conversion matrix coefficient 22 */
66#define VEU_COFFR 0x224 /* color conversion offset */
67#define VEU_CBR 0x228 /* color conversion clip */
68
69/*
70 * 4092x4092 max size is the normal case. In some cases it can be reduced to
71 * 2048x2048, in other cases it can be 4092x8188 or even 8188x8188.
72 */
73#define MAX_W 4092
74#define MAX_H 4092
75#define MIN_W 8
76#define MIN_H 8
77#define ALIGN_W 4
78
79/* 3 buffers of 2048 x 1536 - 3 megapixels @ 16bpp */
80#define VIDEO_MEM_LIMIT ALIGN(2048 * 1536 * 2 * 3, 1024 * 1024)
81
82#define MEM2MEM_DEF_TRANSLEN 1
83
84struct sh_veu_dev;
85
86struct sh_veu_file {
87 struct sh_veu_dev *veu_dev;
88 bool cfg_needed;
89};
90
91struct sh_veu_format {
92 char *name;
93 u32 fourcc;
94 unsigned int depth;
95 unsigned int ydepth;
96};
97
98/* video data format */
99struct sh_veu_vfmt {
100 /* Replace with v4l2_rect */
101 struct v4l2_rect frame;
102 unsigned int bytesperline;
103 unsigned int offset_y;
104 unsigned int offset_c;
105 const struct sh_veu_format *fmt;
106};
107
108struct sh_veu_dev {
109 struct v4l2_device v4l2_dev;
110 struct video_device vdev;
111 struct v4l2_m2m_dev *m2m_dev;
112 struct device *dev;
113 struct v4l2_m2m_ctx *m2m_ctx;
114 struct sh_veu_vfmt vfmt_out;
115 struct sh_veu_vfmt vfmt_in;
116 /* Only single user per direction so far */
117 struct sh_veu_file *capture;
118 struct sh_veu_file *output;
119 struct mutex fop_lock;
120 void __iomem *base;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300121 spinlock_t lock;
122 bool is_2h;
123 unsigned int xaction;
124 bool aborting;
125};
126
127enum sh_veu_fmt_idx {
128 SH_VEU_FMT_NV12,
129 SH_VEU_FMT_NV16,
130 SH_VEU_FMT_NV24,
131 SH_VEU_FMT_RGB332,
132 SH_VEU_FMT_RGB444,
133 SH_VEU_FMT_RGB565,
134 SH_VEU_FMT_RGB666,
135 SH_VEU_FMT_RGB24,
136};
137
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300138#define DEFAULT_IN_WIDTH VGA_WIDTH
139#define DEFAULT_IN_HEIGHT VGA_HEIGHT
140#define DEFAULT_IN_FMTIDX SH_VEU_FMT_NV12
141#define DEFAULT_OUT_WIDTH VGA_WIDTH
142#define DEFAULT_OUT_HEIGHT VGA_HEIGHT
143#define DEFAULT_OUT_FMTIDX SH_VEU_FMT_RGB565
144
145/*
146 * Alignment: Y-plane should be 4-byte aligned for NV12 and NV16, and 8-byte
147 * aligned for NV24.
148 */
149static const struct sh_veu_format sh_veu_fmt[] = {
150 [SH_VEU_FMT_NV12] = { .ydepth = 8, .depth = 12, .name = "NV12", .fourcc = V4L2_PIX_FMT_NV12 },
151 [SH_VEU_FMT_NV16] = { .ydepth = 8, .depth = 16, .name = "NV16", .fourcc = V4L2_PIX_FMT_NV16 },
152 [SH_VEU_FMT_NV24] = { .ydepth = 8, .depth = 24, .name = "NV24", .fourcc = V4L2_PIX_FMT_NV24 },
153 [SH_VEU_FMT_RGB332] = { .ydepth = 8, .depth = 8, .name = "RGB332", .fourcc = V4L2_PIX_FMT_RGB332 },
154 [SH_VEU_FMT_RGB444] = { .ydepth = 16, .depth = 16, .name = "RGB444", .fourcc = V4L2_PIX_FMT_RGB444 },
155 [SH_VEU_FMT_RGB565] = { .ydepth = 16, .depth = 16, .name = "RGB565", .fourcc = V4L2_PIX_FMT_RGB565 },
156 [SH_VEU_FMT_RGB666] = { .ydepth = 32, .depth = 32, .name = "BGR666", .fourcc = V4L2_PIX_FMT_BGR666 },
157 [SH_VEU_FMT_RGB24] = { .ydepth = 24, .depth = 24, .name = "RGB24", .fourcc = V4L2_PIX_FMT_RGB24 },
158};
159
160#define DEFAULT_IN_VFMT (struct sh_veu_vfmt){ \
161 .frame = { \
162 .width = VGA_WIDTH, \
163 .height = VGA_HEIGHT, \
164 }, \
165 .bytesperline = (VGA_WIDTH * sh_veu_fmt[DEFAULT_IN_FMTIDX].ydepth) >> 3, \
166 .fmt = &sh_veu_fmt[DEFAULT_IN_FMTIDX], \
167}
168
169#define DEFAULT_OUT_VFMT (struct sh_veu_vfmt){ \
170 .frame = { \
171 .width = VGA_WIDTH, \
172 .height = VGA_HEIGHT, \
173 }, \
174 .bytesperline = (VGA_WIDTH * sh_veu_fmt[DEFAULT_OUT_FMTIDX].ydepth) >> 3, \
175 .fmt = &sh_veu_fmt[DEFAULT_OUT_FMTIDX], \
176}
177
178/*
179 * TODO: add support for further output formats:
180 * SH_VEU_FMT_NV12,
181 * SH_VEU_FMT_NV16,
182 * SH_VEU_FMT_NV24,
183 * SH_VEU_FMT_RGB332,
184 * SH_VEU_FMT_RGB444,
185 * SH_VEU_FMT_RGB666,
186 * SH_VEU_FMT_RGB24,
187 */
188
189static const int sh_veu_fmt_out[] = {
190 SH_VEU_FMT_RGB565,
191};
192
193/*
194 * TODO: add support for further input formats:
195 * SH_VEU_FMT_NV16,
196 * SH_VEU_FMT_NV24,
197 * SH_VEU_FMT_RGB565,
198 * SH_VEU_FMT_RGB666,
199 * SH_VEU_FMT_RGB24,
200 */
201static const int sh_veu_fmt_in[] = {
202 SH_VEU_FMT_NV12,
203};
204
205static enum v4l2_colorspace sh_veu_4cc2cspace(u32 fourcc)
206{
207 switch (fourcc) {
208 default:
209 BUG();
210 case V4L2_PIX_FMT_NV12:
211 case V4L2_PIX_FMT_NV16:
212 case V4L2_PIX_FMT_NV24:
Hans Verkuil17a705e2015-06-15 08:33:29 -0300213 return V4L2_COLORSPACE_SMPTE170M;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300214 case V4L2_PIX_FMT_RGB332:
215 case V4L2_PIX_FMT_RGB444:
216 case V4L2_PIX_FMT_RGB565:
217 case V4L2_PIX_FMT_BGR666:
218 case V4L2_PIX_FMT_RGB24:
219 return V4L2_COLORSPACE_SRGB;
220 }
221}
222
223static u32 sh_veu_reg_read(struct sh_veu_dev *veu, unsigned int reg)
224{
225 return ioread32(veu->base + reg);
226}
227
228static void sh_veu_reg_write(struct sh_veu_dev *veu, unsigned int reg,
229 u32 value)
230{
231 iowrite32(value, veu->base + reg);
232}
233
234 /* ========== mem2mem callbacks ========== */
235
236static void sh_veu_job_abort(void *priv)
237{
238 struct sh_veu_dev *veu = priv;
239
240 /* Will cancel the transaction in the next interrupt handler */
241 veu->aborting = true;
242}
243
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300244static void sh_veu_process(struct sh_veu_dev *veu,
245 struct vb2_buffer *src_buf,
246 struct vb2_buffer *dst_buf)
247{
248 dma_addr_t addr = vb2_dma_contig_plane_dma_addr(dst_buf, 0);
249
250 sh_veu_reg_write(veu, VEU_DAYR, addr + veu->vfmt_out.offset_y);
251 sh_veu_reg_write(veu, VEU_DACR, veu->vfmt_out.offset_c ?
252 addr + veu->vfmt_out.offset_c : 0);
Mauro Carvalho Chehab59dad492012-12-20 14:53:26 -0200253 dev_dbg(veu->dev, "%s(): dst base %lx, y: %x, c: %x\n", __func__,
254 (unsigned long)addr,
255 veu->vfmt_out.offset_y, veu->vfmt_out.offset_c);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300256
257 addr = vb2_dma_contig_plane_dma_addr(src_buf, 0);
258 sh_veu_reg_write(veu, VEU_SAYR, addr + veu->vfmt_in.offset_y);
259 sh_veu_reg_write(veu, VEU_SACR, veu->vfmt_in.offset_c ?
260 addr + veu->vfmt_in.offset_c : 0);
Mauro Carvalho Chehab59dad492012-12-20 14:53:26 -0200261 dev_dbg(veu->dev, "%s(): src base %lx, y: %x, c: %x\n", __func__,
262 (unsigned long)addr,
263 veu->vfmt_in.offset_y, veu->vfmt_in.offset_c);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300264
265 sh_veu_reg_write(veu, VEU_STR, 1);
266
267 sh_veu_reg_write(veu, VEU_EIER, 1); /* enable interrupt in VEU */
268}
269
270/**
271 * sh_veu_device_run() - prepares and starts the device
272 *
273 * This will be called by the framework when it decides to schedule a particular
274 * instance.
275 */
276static void sh_veu_device_run(void *priv)
277{
278 struct sh_veu_dev *veu = priv;
279 struct vb2_buffer *src_buf, *dst_buf;
280
281 src_buf = v4l2_m2m_next_src_buf(veu->m2m_ctx);
282 dst_buf = v4l2_m2m_next_dst_buf(veu->m2m_ctx);
283
284 if (src_buf && dst_buf)
285 sh_veu_process(veu, src_buf, dst_buf);
286}
287
288 /* ========== video ioctls ========== */
289
290static bool sh_veu_is_streamer(struct sh_veu_dev *veu, struct sh_veu_file *veu_file,
291 enum v4l2_buf_type type)
292{
293 return (type == V4L2_BUF_TYPE_VIDEO_CAPTURE &&
294 veu_file == veu->capture) ||
295 (type == V4L2_BUF_TYPE_VIDEO_OUTPUT &&
296 veu_file == veu->output);
297}
298
299static int sh_veu_queue_init(void *priv, struct vb2_queue *src_vq,
300 struct vb2_queue *dst_vq);
301
302/*
303 * It is not unusual to have video nodes open()ed multiple times. While some
304 * V4L2 operations are non-intrusive, like querying formats and various
305 * parameters, others, like setting formats, starting and stopping streaming,
306 * queuing and dequeuing buffers, directly affect hardware configuration and /
307 * or execution. This function verifies availability of the requested interface
308 * and, if available, reserves it for the requesting user.
309 */
310static int sh_veu_stream_init(struct sh_veu_dev *veu, struct sh_veu_file *veu_file,
311 enum v4l2_buf_type type)
312{
313 struct sh_veu_file **stream;
314
315 switch (type) {
316 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
317 stream = &veu->capture;
318 break;
319 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
320 stream = &veu->output;
321 break;
322 default:
323 return -EINVAL;
324 }
325
326 if (*stream == veu_file)
327 return 0;
328
329 if (*stream)
330 return -EBUSY;
331
332 *stream = veu_file;
333
334 return 0;
335}
336
337static int sh_veu_context_init(struct sh_veu_dev *veu)
338{
339 if (veu->m2m_ctx)
340 return 0;
341
342 veu->m2m_ctx = v4l2_m2m_ctx_init(veu->m2m_dev, veu,
343 sh_veu_queue_init);
344
Sachin Kamat627df4bc2013-07-15 15:57:07 +0530345 return PTR_ERR_OR_ZERO(veu->m2m_ctx);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300346}
347
348static int sh_veu_querycap(struct file *file, void *priv,
349 struct v4l2_capability *cap)
350{
351 strlcpy(cap->driver, "sh-veu", sizeof(cap->driver));
352 strlcpy(cap->card, "sh-mobile VEU", sizeof(cap->card));
353 strlcpy(cap->bus_info, "platform:sh-veu", sizeof(cap->bus_info));
354 cap->device_caps = V4L2_CAP_VIDEO_M2M | V4L2_CAP_STREAMING;
355 cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
356
357 return 0;
358}
359
360static int sh_veu_enum_fmt(struct v4l2_fmtdesc *f, const int *fmt, int fmt_num)
361{
362 if (f->index >= fmt_num)
363 return -EINVAL;
364
365 strlcpy(f->description, sh_veu_fmt[fmt[f->index]].name, sizeof(f->description));
366 f->pixelformat = sh_veu_fmt[fmt[f->index]].fourcc;
367 return 0;
368}
369
370static int sh_veu_enum_fmt_vid_cap(struct file *file, void *priv,
371 struct v4l2_fmtdesc *f)
372{
373 return sh_veu_enum_fmt(f, sh_veu_fmt_out, ARRAY_SIZE(sh_veu_fmt_out));
374}
375
376static int sh_veu_enum_fmt_vid_out(struct file *file, void *priv,
377 struct v4l2_fmtdesc *f)
378{
379 return sh_veu_enum_fmt(f, sh_veu_fmt_in, ARRAY_SIZE(sh_veu_fmt_in));
380}
381
382static struct sh_veu_vfmt *sh_veu_get_vfmt(struct sh_veu_dev *veu,
383 enum v4l2_buf_type type)
384{
385 switch (type) {
386 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
387 return &veu->vfmt_out;
388 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
389 return &veu->vfmt_in;
390 default:
391 return NULL;
392 }
393}
394
395static int sh_veu_g_fmt(struct sh_veu_file *veu_file, struct v4l2_format *f)
396{
397 struct v4l2_pix_format *pix = &f->fmt.pix;
398 struct sh_veu_dev *veu = veu_file->veu_dev;
399 struct sh_veu_vfmt *vfmt;
400
401 vfmt = sh_veu_get_vfmt(veu, f->type);
402
403 pix->width = vfmt->frame.width;
404 pix->height = vfmt->frame.height;
405 pix->field = V4L2_FIELD_NONE;
406 pix->pixelformat = vfmt->fmt->fourcc;
407 pix->colorspace = sh_veu_4cc2cspace(pix->pixelformat);
408 pix->bytesperline = vfmt->bytesperline;
409 pix->sizeimage = vfmt->bytesperline * pix->height *
410 vfmt->fmt->depth / vfmt->fmt->ydepth;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300411 dev_dbg(veu->dev, "%s(): type: %d, size %u @ %ux%u, fmt %x\n", __func__,
412 f->type, pix->sizeimage, pix->width, pix->height, pix->pixelformat);
413
414 return 0;
415}
416
417static int sh_veu_g_fmt_vid_out(struct file *file, void *priv,
418 struct v4l2_format *f)
419{
420 return sh_veu_g_fmt(priv, f);
421}
422
423static int sh_veu_g_fmt_vid_cap(struct file *file, void *priv,
424 struct v4l2_format *f)
425{
426 return sh_veu_g_fmt(priv, f);
427}
428
429static int sh_veu_try_fmt(struct v4l2_format *f, const struct sh_veu_format *fmt)
430{
431 struct v4l2_pix_format *pix = &f->fmt.pix;
432 unsigned int y_bytes_used;
433
434 /*
435 * V4L2 specification suggests, that the driver should correct the
436 * format struct if any of the dimensions is unsupported
437 */
438 switch (pix->field) {
439 default:
440 case V4L2_FIELD_ANY:
441 pix->field = V4L2_FIELD_NONE;
442 /* fall through: continue handling V4L2_FIELD_NONE */
443 case V4L2_FIELD_NONE:
444 break;
445 }
446
447 v4l_bound_align_image(&pix->width, MIN_W, MAX_W, ALIGN_W,
448 &pix->height, MIN_H, MAX_H, 0, 0);
449
450 y_bytes_used = (pix->width * fmt->ydepth) >> 3;
451
452 if (pix->bytesperline < y_bytes_used)
453 pix->bytesperline = y_bytes_used;
454 pix->sizeimage = pix->height * pix->bytesperline * fmt->depth / fmt->ydepth;
455
456 pix->pixelformat = fmt->fourcc;
457 pix->colorspace = sh_veu_4cc2cspace(pix->pixelformat);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300458
459 pr_debug("%s(): type: %d, size %u\n", __func__, f->type, pix->sizeimage);
460
461 return 0;
462}
463
464static const struct sh_veu_format *sh_veu_find_fmt(const struct v4l2_format *f)
465{
466 const int *fmt;
467 int i, n, dflt;
468
469 pr_debug("%s(%d;%d)\n", __func__, f->type, f->fmt.pix.field);
470
471 switch (f->type) {
472 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
473 fmt = sh_veu_fmt_out;
474 n = ARRAY_SIZE(sh_veu_fmt_out);
475 dflt = DEFAULT_OUT_FMTIDX;
476 break;
477 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
478 default:
479 fmt = sh_veu_fmt_in;
480 n = ARRAY_SIZE(sh_veu_fmt_in);
481 dflt = DEFAULT_IN_FMTIDX;
482 break;
483 }
484
485 for (i = 0; i < n; i++)
486 if (sh_veu_fmt[fmt[i]].fourcc == f->fmt.pix.pixelformat)
487 return &sh_veu_fmt[fmt[i]];
488
489 return &sh_veu_fmt[dflt];
490}
491
492static int sh_veu_try_fmt_vid_cap(struct file *file, void *priv,
493 struct v4l2_format *f)
494{
495 const struct sh_veu_format *fmt;
496
497 fmt = sh_veu_find_fmt(f);
498 if (!fmt)
499 /* wrong buffer type */
500 return -EINVAL;
501
502 return sh_veu_try_fmt(f, fmt);
503}
504
505static int sh_veu_try_fmt_vid_out(struct file *file, void *priv,
506 struct v4l2_format *f)
507{
508 const struct sh_veu_format *fmt;
509
510 fmt = sh_veu_find_fmt(f);
511 if (!fmt)
512 /* wrong buffer type */
513 return -EINVAL;
514
515 return sh_veu_try_fmt(f, fmt);
516}
517
518static void sh_veu_colour_offset(struct sh_veu_dev *veu, struct sh_veu_vfmt *vfmt)
519{
520 /* dst_left and dst_top validity will be verified in CROP / COMPOSE */
521 unsigned int left = vfmt->frame.left & ~0x03;
522 unsigned int top = vfmt->frame.top;
523 dma_addr_t offset = ((left * veu->vfmt_out.fmt->depth) >> 3) +
524 top * veu->vfmt_out.bytesperline;
525 unsigned int y_line;
526
527 vfmt->offset_y = offset;
528
529 switch (vfmt->fmt->fourcc) {
530 case V4L2_PIX_FMT_NV12:
531 case V4L2_PIX_FMT_NV16:
532 case V4L2_PIX_FMT_NV24:
533 y_line = ALIGN(vfmt->frame.width, 16);
534 vfmt->offset_c = offset + y_line * vfmt->frame.height;
535 break;
536 case V4L2_PIX_FMT_RGB332:
537 case V4L2_PIX_FMT_RGB444:
538 case V4L2_PIX_FMT_RGB565:
539 case V4L2_PIX_FMT_BGR666:
540 case V4L2_PIX_FMT_RGB24:
541 vfmt->offset_c = 0;
542 break;
543 default:
544 BUG();
545 }
546}
547
548static int sh_veu_s_fmt(struct sh_veu_file *veu_file, struct v4l2_format *f)
549{
550 struct v4l2_pix_format *pix = &f->fmt.pix;
551 struct sh_veu_dev *veu = veu_file->veu_dev;
552 struct sh_veu_vfmt *vfmt;
553 struct vb2_queue *vq;
554 int ret = sh_veu_context_init(veu);
555 if (ret < 0)
556 return ret;
557
558 vq = v4l2_m2m_get_vq(veu->m2m_ctx, f->type);
559 if (!vq)
560 return -EINVAL;
561
562 if (vb2_is_busy(vq)) {
563 v4l2_err(&veu_file->veu_dev->v4l2_dev, "%s queue busy\n", __func__);
564 return -EBUSY;
565 }
566
567 vfmt = sh_veu_get_vfmt(veu, f->type);
568 /* called after try_fmt(), hence vfmt != NULL. Implicit BUG_ON() below */
569
570 vfmt->fmt = sh_veu_find_fmt(f);
571 /* vfmt->fmt != NULL following the same argument as above */
572 vfmt->frame.width = pix->width;
573 vfmt->frame.height = pix->height;
574 vfmt->bytesperline = pix->bytesperline;
575
576 sh_veu_colour_offset(veu, vfmt);
577
578 /*
579 * We could also verify and require configuration only if any parameters
580 * actually have changed, but it is unlikely, that the user requests the
581 * same configuration several times without closing the device.
582 */
583 veu_file->cfg_needed = true;
584
585 dev_dbg(veu->dev,
586 "Setting format for type %d, wxh: %dx%d, fmt: %x\n",
587 f->type, pix->width, pix->height, vfmt->fmt->fourcc);
588
589 return 0;
590}
591
592static int sh_veu_s_fmt_vid_cap(struct file *file, void *priv,
593 struct v4l2_format *f)
594{
595 int ret = sh_veu_try_fmt_vid_cap(file, priv, f);
596 if (ret)
597 return ret;
598
599 return sh_veu_s_fmt(priv, f);
600}
601
602static int sh_veu_s_fmt_vid_out(struct file *file, void *priv,
603 struct v4l2_format *f)
604{
605 int ret = sh_veu_try_fmt_vid_out(file, priv, f);
606 if (ret)
607 return ret;
608
609 return sh_veu_s_fmt(priv, f);
610}
611
612static int sh_veu_reqbufs(struct file *file, void *priv,
613 struct v4l2_requestbuffers *reqbufs)
614{
615 struct sh_veu_file *veu_file = priv;
616 struct sh_veu_dev *veu = veu_file->veu_dev;
617 int ret = sh_veu_context_init(veu);
618 if (ret < 0)
619 return ret;
620
621 ret = sh_veu_stream_init(veu, veu_file, reqbufs->type);
622 if (ret < 0)
623 return ret;
624
625 return v4l2_m2m_reqbufs(file, veu->m2m_ctx, reqbufs);
626}
627
628static int sh_veu_querybuf(struct file *file, void *priv,
629 struct v4l2_buffer *buf)
630{
631 struct sh_veu_file *veu_file = priv;
632
633 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, buf->type))
634 return -EBUSY;
635
636 return v4l2_m2m_querybuf(file, veu_file->veu_dev->m2m_ctx, buf);
637}
638
639static int sh_veu_qbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
640{
641 struct sh_veu_file *veu_file = priv;
642
643 dev_dbg(veu_file->veu_dev->dev, "%s(%d)\n", __func__, buf->type);
644 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, buf->type))
645 return -EBUSY;
646
647 return v4l2_m2m_qbuf(file, veu_file->veu_dev->m2m_ctx, buf);
648}
649
650static int sh_veu_dqbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
651{
652 struct sh_veu_file *veu_file = priv;
653
654 dev_dbg(veu_file->veu_dev->dev, "%s(%d)\n", __func__, buf->type);
655 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, buf->type))
656 return -EBUSY;
657
658 return v4l2_m2m_dqbuf(file, veu_file->veu_dev->m2m_ctx, buf);
659}
660
661static void sh_veu_calc_scale(struct sh_veu_dev *veu,
662 int size_in, int size_out, int crop_out,
663 u32 *mant, u32 *frac, u32 *rep)
664{
665 u32 fixpoint;
666
667 /* calculate FRAC and MANT */
668 *rep = *mant = *frac = 0;
669
670 if (size_in == size_out) {
671 if (crop_out != size_out)
672 *mant = 1; /* needed for cropping */
673 return;
674 }
675
676 /* VEU2H special upscale */
677 if (veu->is_2h && size_out > size_in) {
678 u32 fixpoint = (4096 * size_in) / size_out;
679 *mant = fixpoint / 4096;
680 *frac = (fixpoint - (*mant * 4096)) & ~0x07;
681
682 switch (*frac) {
683 case 0x800:
684 *rep = 1;
685 break;
686 case 0x400:
687 *rep = 3;
688 break;
689 case 0x200:
690 *rep = 7;
691 break;
692 }
693 if (*rep)
694 return;
695 }
696
697 fixpoint = (4096 * (size_in - 1)) / (size_out + 1);
698 *mant = fixpoint / 4096;
699 *frac = fixpoint - (*mant * 4096);
700
701 if (*frac & 0x07) {
702 /*
703 * FIXME: do we really have to round down twice in the
704 * up-scaling case?
705 */
706 *frac &= ~0x07;
707 if (size_out > size_in)
708 *frac -= 8; /* round down if scaling up */
709 else
710 *frac += 8; /* round up if scaling down */
711 }
712}
713
714static unsigned long sh_veu_scale_v(struct sh_veu_dev *veu,
715 int size_in, int size_out, int crop_out)
716{
717 u32 mant, frac, value, rep;
718
719 sh_veu_calc_scale(veu, size_in, size_out, crop_out, &mant, &frac, &rep);
720
721 /* set scale */
722 value = (sh_veu_reg_read(veu, VEU_RFCR) & ~0xffff0000) |
723 (((mant << 12) | frac) << 16);
724
725 sh_veu_reg_write(veu, VEU_RFCR, value);
726
727 /* set clip */
728 value = (sh_veu_reg_read(veu, VEU_RFSR) & ~0xffff0000) |
729 (((rep << 12) | crop_out) << 16);
730
731 sh_veu_reg_write(veu, VEU_RFSR, value);
732
733 return ALIGN((size_in * crop_out) / size_out, 4);
734}
735
736static unsigned long sh_veu_scale_h(struct sh_veu_dev *veu,
737 int size_in, int size_out, int crop_out)
738{
739 u32 mant, frac, value, rep;
740
741 sh_veu_calc_scale(veu, size_in, size_out, crop_out, &mant, &frac, &rep);
742
743 /* set scale */
744 value = (sh_veu_reg_read(veu, VEU_RFCR) & ~0xffff) |
745 (mant << 12) | frac;
746
747 sh_veu_reg_write(veu, VEU_RFCR, value);
748
749 /* set clip */
750 value = (sh_veu_reg_read(veu, VEU_RFSR) & ~0xffff) |
751 (rep << 12) | crop_out;
752
753 sh_veu_reg_write(veu, VEU_RFSR, value);
754
755 return ALIGN((size_in * crop_out) / size_out, 4);
756}
757
758static void sh_veu_configure(struct sh_veu_dev *veu)
759{
760 u32 src_width, src_stride, src_height;
761 u32 dst_width, dst_stride, dst_height;
762 u32 real_w, real_h;
763
764 /* reset VEU */
765 sh_veu_reg_write(veu, VEU_BSRR, 0x100);
766
767 src_width = veu->vfmt_in.frame.width;
768 src_height = veu->vfmt_in.frame.height;
769 src_stride = ALIGN(veu->vfmt_in.frame.width, 16);
770
771 dst_width = real_w = veu->vfmt_out.frame.width;
772 dst_height = real_h = veu->vfmt_out.frame.height;
773 /* Datasheet is unclear - whether it's always number of bytes or not */
774 dst_stride = veu->vfmt_out.bytesperline;
775
776 /*
777 * So far real_w == dst_width && real_h == dst_height, but it wasn't
778 * necessarily the case in the original vidix driver, so, it may change
779 * here in the future too.
780 */
781 src_width = sh_veu_scale_h(veu, src_width, real_w, dst_width);
782 src_height = sh_veu_scale_v(veu, src_height, real_h, dst_height);
783
784 sh_veu_reg_write(veu, VEU_SWR, src_stride);
785 sh_veu_reg_write(veu, VEU_SSR, src_width | (src_height << 16));
786 sh_veu_reg_write(veu, VEU_BSSR, 0); /* not using bundle mode */
787
788 sh_veu_reg_write(veu, VEU_EDWR, dst_stride);
789 sh_veu_reg_write(veu, VEU_DACR, 0); /* unused for RGB */
790
791 sh_veu_reg_write(veu, VEU_SWPR, 0x67);
792 sh_veu_reg_write(veu, VEU_TRCR, (6 << 16) | (0 << 14) | 2 | 4);
793
794 if (veu->is_2h) {
795 sh_veu_reg_write(veu, VEU_MCR00, 0x0cc5);
796 sh_veu_reg_write(veu, VEU_MCR01, 0x0950);
797 sh_veu_reg_write(veu, VEU_MCR02, 0x0000);
798
799 sh_veu_reg_write(veu, VEU_MCR10, 0x397f);
800 sh_veu_reg_write(veu, VEU_MCR11, 0x0950);
801 sh_veu_reg_write(veu, VEU_MCR12, 0x3ccd);
802
803 sh_veu_reg_write(veu, VEU_MCR20, 0x0000);
804 sh_veu_reg_write(veu, VEU_MCR21, 0x0950);
805 sh_veu_reg_write(veu, VEU_MCR22, 0x1023);
806
807 sh_veu_reg_write(veu, VEU_COFFR, 0x00800010);
808 }
809}
810
811static int sh_veu_streamon(struct file *file, void *priv,
812 enum v4l2_buf_type type)
813{
814 struct sh_veu_file *veu_file = priv;
815
816 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, type))
817 return -EBUSY;
818
819 if (veu_file->cfg_needed) {
820 struct sh_veu_dev *veu = veu_file->veu_dev;
821 veu_file->cfg_needed = false;
822 sh_veu_configure(veu_file->veu_dev);
823 veu->xaction = 0;
824 veu->aborting = false;
825 }
826
827 return v4l2_m2m_streamon(file, veu_file->veu_dev->m2m_ctx, type);
828}
829
830static int sh_veu_streamoff(struct file *file, void *priv,
831 enum v4l2_buf_type type)
832{
833 struct sh_veu_file *veu_file = priv;
834
835 if (!sh_veu_is_streamer(veu_file->veu_dev, veu_file, type))
836 return -EBUSY;
837
838 return v4l2_m2m_streamoff(file, veu_file->veu_dev->m2m_ctx, type);
839}
840
841static const struct v4l2_ioctl_ops sh_veu_ioctl_ops = {
842 .vidioc_querycap = sh_veu_querycap,
843
844 .vidioc_enum_fmt_vid_cap = sh_veu_enum_fmt_vid_cap,
845 .vidioc_g_fmt_vid_cap = sh_veu_g_fmt_vid_cap,
846 .vidioc_try_fmt_vid_cap = sh_veu_try_fmt_vid_cap,
847 .vidioc_s_fmt_vid_cap = sh_veu_s_fmt_vid_cap,
848
849 .vidioc_enum_fmt_vid_out = sh_veu_enum_fmt_vid_out,
850 .vidioc_g_fmt_vid_out = sh_veu_g_fmt_vid_out,
851 .vidioc_try_fmt_vid_out = sh_veu_try_fmt_vid_out,
852 .vidioc_s_fmt_vid_out = sh_veu_s_fmt_vid_out,
853
854 .vidioc_reqbufs = sh_veu_reqbufs,
855 .vidioc_querybuf = sh_veu_querybuf,
856
857 .vidioc_qbuf = sh_veu_qbuf,
858 .vidioc_dqbuf = sh_veu_dqbuf,
859
860 .vidioc_streamon = sh_veu_streamon,
861 .vidioc_streamoff = sh_veu_streamoff,
862};
863
864 /* ========== Queue operations ========== */
865
866static int sh_veu_queue_setup(struct vb2_queue *vq,
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300867 unsigned int *nbuffers, unsigned int *nplanes,
Hans Verkuil36c0f8b2016-04-15 09:15:05 -0300868 unsigned int sizes[], struct device *alloc_devs[])
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300869{
870 struct sh_veu_dev *veu = vb2_get_drv_priv(vq);
Hans Verkuildf9ecb02015-10-28 00:50:37 -0200871 struct sh_veu_vfmt *vfmt = sh_veu_get_vfmt(veu, vq->type);
872 unsigned int count = *nbuffers;
873 unsigned int size = vfmt->bytesperline * vfmt->frame.height *
874 vfmt->fmt->depth / vfmt->fmt->ydepth;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300875
876 if (count < 2)
877 *nbuffers = count = 2;
878
879 if (size * count > VIDEO_MEM_LIMIT) {
880 count = VIDEO_MEM_LIMIT / size;
881 *nbuffers = count;
882 }
883
Hans Verkuil1ad70ce2016-02-15 13:41:51 -0200884 if (*nplanes)
Hans Verkuildf9ecb02015-10-28 00:50:37 -0200885 return sizes[0] < size ? -EINVAL : 0;
Hans Verkuildf9ecb02015-10-28 00:50:37 -0200886
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300887 *nplanes = 1;
888 sizes[0] = size;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300889
890 dev_dbg(veu->dev, "get %d buffer(s) of size %d each.\n", count, size);
891
892 return 0;
893}
894
895static int sh_veu_buf_prepare(struct vb2_buffer *vb)
896{
897 struct sh_veu_dev *veu = vb2_get_drv_priv(vb->vb2_queue);
898 struct sh_veu_vfmt *vfmt;
899 unsigned int sizeimage;
900
901 vfmt = sh_veu_get_vfmt(veu, vb->vb2_queue->type);
902 sizeimage = vfmt->bytesperline * vfmt->frame.height *
903 vfmt->fmt->depth / vfmt->fmt->ydepth;
904
905 if (vb2_plane_size(vb, 0) < sizeimage) {
906 dev_dbg(veu->dev, "%s data will not fit into plane (%lu < %u)\n",
907 __func__, vb2_plane_size(vb, 0), sizeimage);
908 return -EINVAL;
909 }
910
911 vb2_set_plane_payload(vb, 0, sizeimage);
912
913 return 0;
914}
915
916static void sh_veu_buf_queue(struct vb2_buffer *vb)
917{
Junghak Sung2d700712015-09-22 10:30:30 -0300918 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300919 struct sh_veu_dev *veu = vb2_get_drv_priv(vb->vb2_queue);
Junghak Sung2d700712015-09-22 10:30:30 -0300920 dev_dbg(veu->dev, "%s(%d)\n", __func__, vb->type);
921 v4l2_m2m_buf_queue(veu->m2m_ctx, vbuf);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300922}
923
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300924static const struct vb2_ops sh_veu_qops = {
925 .queue_setup = sh_veu_queue_setup,
926 .buf_prepare = sh_veu_buf_prepare,
927 .buf_queue = sh_veu_buf_queue,
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300928 .wait_prepare = vb2_ops_wait_prepare,
929 .wait_finish = vb2_ops_wait_finish,
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300930};
931
932static int sh_veu_queue_init(void *priv, struct vb2_queue *src_vq,
933 struct vb2_queue *dst_vq)
934{
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300935 struct sh_veu_dev *veu = priv;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300936 int ret;
937
938 memset(src_vq, 0, sizeof(*src_vq));
939 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
940 src_vq->io_modes = VB2_MMAP | VB2_USERPTR;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300941 src_vq->drv_priv = veu;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300942 src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
943 src_vq->ops = &sh_veu_qops;
944 src_vq->mem_ops = &vb2_dma_contig_memops;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300945 src_vq->lock = &veu->fop_lock;
Hans Verkuil073b7952015-06-15 08:33:28 -0300946 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
Hans Verkuil1ad70ce2016-02-15 13:41:51 -0200947 src_vq->dev = veu->v4l2_dev.dev;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300948
949 ret = vb2_queue_init(src_vq);
950 if (ret < 0)
951 return ret;
952
953 memset(dst_vq, 0, sizeof(*dst_vq));
954 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
955 dst_vq->io_modes = VB2_MMAP | VB2_USERPTR;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300956 dst_vq->drv_priv = veu;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300957 dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
958 dst_vq->ops = &sh_veu_qops;
959 dst_vq->mem_ops = &vb2_dma_contig_memops;
Prabhakar Lad8776ff62014-11-26 19:42:28 -0300960 dst_vq->lock = &veu->fop_lock;
Hans Verkuil073b7952015-06-15 08:33:28 -0300961 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
Hans Verkuil1ad70ce2016-02-15 13:41:51 -0200962 dst_vq->dev = veu->v4l2_dev.dev;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300963
964 return vb2_queue_init(dst_vq);
965}
966
967 /* ========== File operations ========== */
968
969static int sh_veu_open(struct file *file)
970{
971 struct sh_veu_dev *veu = video_drvdata(file);
972 struct sh_veu_file *veu_file;
973
974 veu_file = kzalloc(sizeof(*veu_file), GFP_KERNEL);
975 if (!veu_file)
976 return -ENOMEM;
977
978 veu_file->veu_dev = veu;
979 veu_file->cfg_needed = true;
980
981 file->private_data = veu_file;
982
983 pm_runtime_get_sync(veu->dev);
984
985 dev_dbg(veu->dev, "Created instance %p\n", veu_file);
986
987 return 0;
988}
989
990static int sh_veu_release(struct file *file)
991{
992 struct sh_veu_dev *veu = video_drvdata(file);
993 struct sh_veu_file *veu_file = file->private_data;
994
995 dev_dbg(veu->dev, "Releasing instance %p\n", veu_file);
996
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -0300997 if (veu_file == veu->capture) {
998 veu->capture = NULL;
999 vb2_queue_release(v4l2_m2m_get_vq(veu->m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE));
1000 }
1001
1002 if (veu_file == veu->output) {
1003 veu->output = NULL;
1004 vb2_queue_release(v4l2_m2m_get_vq(veu->m2m_ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT));
1005 }
1006
1007 if (!veu->output && !veu->capture && veu->m2m_ctx) {
1008 v4l2_m2m_ctx_release(veu->m2m_ctx);
1009 veu->m2m_ctx = NULL;
1010 }
1011
Katsuya Matsubara6abb3cf2013-04-23 07:51:36 -03001012 pm_runtime_put(veu->dev);
1013
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001014 kfree(veu_file);
1015
1016 return 0;
1017}
1018
1019static unsigned int sh_veu_poll(struct file *file,
1020 struct poll_table_struct *wait)
1021{
1022 struct sh_veu_file *veu_file = file->private_data;
1023
1024 return v4l2_m2m_poll(file, veu_file->veu_dev->m2m_ctx, wait);
1025}
1026
1027static int sh_veu_mmap(struct file *file, struct vm_area_struct *vma)
1028{
1029 struct sh_veu_file *veu_file = file->private_data;
1030
1031 return v4l2_m2m_mmap(file, veu_file->veu_dev->m2m_ctx, vma);
1032}
1033
1034static const struct v4l2_file_operations sh_veu_fops = {
1035 .owner = THIS_MODULE,
1036 .open = sh_veu_open,
1037 .release = sh_veu_release,
1038 .poll = sh_veu_poll,
1039 .unlocked_ioctl = video_ioctl2,
1040 .mmap = sh_veu_mmap,
1041};
1042
1043static const struct video_device sh_veu_videodev = {
1044 .name = "sh-veu",
1045 .fops = &sh_veu_fops,
1046 .ioctl_ops = &sh_veu_ioctl_ops,
1047 .minor = -1,
1048 .release = video_device_release_empty,
1049 .vfl_dir = VFL_DIR_M2M,
1050};
1051
1052static const struct v4l2_m2m_ops sh_veu_m2m_ops = {
1053 .device_run = sh_veu_device_run,
1054 .job_abort = sh_veu_job_abort,
1055};
1056
1057static irqreturn_t sh_veu_bh(int irq, void *dev_id)
1058{
1059 struct sh_veu_dev *veu = dev_id;
1060
1061 if (veu->xaction == MEM2MEM_DEF_TRANSLEN || veu->aborting) {
1062 v4l2_m2m_job_finish(veu->m2m_dev, veu->m2m_ctx);
1063 veu->xaction = 0;
1064 } else {
1065 sh_veu_device_run(veu);
1066 }
1067
1068 return IRQ_HANDLED;
1069}
1070
1071static irqreturn_t sh_veu_isr(int irq, void *dev_id)
1072{
1073 struct sh_veu_dev *veu = dev_id;
Junghak Sung2d700712015-09-22 10:30:30 -03001074 struct vb2_v4l2_buffer *dst;
1075 struct vb2_v4l2_buffer *src;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001076 u32 status = sh_veu_reg_read(veu, VEU_EVTR);
1077
1078 /* bundle read mode not used */
1079 if (!(status & 1))
1080 return IRQ_NONE;
1081
1082 /* disable interrupt in VEU */
1083 sh_veu_reg_write(veu, VEU_EIER, 0);
1084 /* halt operation */
1085 sh_veu_reg_write(veu, VEU_STR, 0);
1086 /* ack int, write 0 to clear bits */
1087 sh_veu_reg_write(veu, VEU_EVTR, status & ~1);
1088
1089 /* conversion completed */
1090 dst = v4l2_m2m_dst_buf_remove(veu->m2m_ctx);
1091 src = v4l2_m2m_src_buf_remove(veu->m2m_ctx);
1092 if (!src || !dst)
1093 return IRQ_NONE;
1094
Junghak Sungd6dd6452015-11-03 08:16:37 -02001095 dst->vb2_buf.timestamp = src->vb2_buf.timestamp;
Junghak Sung2d700712015-09-22 10:30:30 -03001096 dst->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
1097 dst->flags |=
1098 src->flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
1099 dst->timecode = src->timecode;
Hans Verkuil073b7952015-06-15 08:33:28 -03001100
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001101 spin_lock(&veu->lock);
1102 v4l2_m2m_buf_done(src, VB2_BUF_STATE_DONE);
1103 v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE);
1104 spin_unlock(&veu->lock);
1105
1106 veu->xaction++;
1107
Katsuya Matsubara9166e1a2013-04-23 07:51:35 -03001108 return IRQ_WAKE_THREAD;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001109}
1110
Stephen Rothwell3151d142013-01-10 21:35:34 -03001111static int sh_veu_probe(struct platform_device *pdev)
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001112{
1113 struct sh_veu_dev *veu;
1114 struct resource *reg_res;
1115 struct video_device *vdev;
1116 int irq, ret;
1117
1118 reg_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1119 irq = platform_get_irq(pdev, 0);
1120
1121 if (!reg_res || irq <= 0) {
1122 dev_err(&pdev->dev, "Insufficient VEU platform information.\n");
1123 return -ENODEV;
1124 }
1125
1126 veu = devm_kzalloc(&pdev->dev, sizeof(*veu), GFP_KERNEL);
1127 if (!veu)
1128 return -ENOMEM;
1129
1130 veu->is_2h = resource_size(reg_res) == 0x22c;
1131
Sachin Kamatf2b4dc12013-03-04 05:15:18 -03001132 veu->base = devm_ioremap_resource(&pdev->dev, reg_res);
1133 if (IS_ERR(veu->base))
1134 return PTR_ERR(veu->base);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001135
1136 ret = devm_request_threaded_irq(&pdev->dev, irq, sh_veu_isr, sh_veu_bh,
1137 0, "veu", veu);
1138 if (ret < 0)
1139 return ret;
1140
1141 ret = v4l2_device_register(&pdev->dev, &veu->v4l2_dev);
1142 if (ret < 0) {
1143 dev_err(&pdev->dev, "Error registering v4l2 device\n");
1144 return ret;
1145 }
1146
1147 vdev = &veu->vdev;
1148
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001149 *vdev = sh_veu_videodev;
Hans Verkuilab312032014-12-10 12:35:34 -03001150 vdev->v4l2_dev = &veu->v4l2_dev;
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001151 spin_lock_init(&veu->lock);
1152 mutex_init(&veu->fop_lock);
1153 vdev->lock = &veu->fop_lock;
1154
1155 video_set_drvdata(vdev, veu);
1156
1157 veu->dev = &pdev->dev;
1158 veu->vfmt_out = DEFAULT_OUT_VFMT;
1159 veu->vfmt_in = DEFAULT_IN_VFMT;
1160
1161 veu->m2m_dev = v4l2_m2m_init(&sh_veu_m2m_ops);
1162 if (IS_ERR(veu->m2m_dev)) {
1163 ret = PTR_ERR(veu->m2m_dev);
1164 v4l2_err(&veu->v4l2_dev, "Failed to init mem2mem device: %d\n", ret);
1165 goto em2minit;
1166 }
1167
1168 pm_runtime_enable(&pdev->dev);
1169 pm_runtime_resume(&pdev->dev);
1170
1171 ret = video_register_device(vdev, VFL_TYPE_GRABBER, -1);
1172 pm_runtime_suspend(&pdev->dev);
1173 if (ret < 0)
1174 goto evidreg;
1175
1176 return ret;
1177
1178evidreg:
1179 pm_runtime_disable(&pdev->dev);
1180 v4l2_m2m_release(veu->m2m_dev);
1181em2minit:
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001182 v4l2_device_unregister(&veu->v4l2_dev);
1183 return ret;
1184}
1185
Stephen Rothwell3151d142013-01-10 21:35:34 -03001186static int sh_veu_remove(struct platform_device *pdev)
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001187{
1188 struct v4l2_device *v4l2_dev = platform_get_drvdata(pdev);
1189 struct sh_veu_dev *veu = container_of(v4l2_dev,
1190 struct sh_veu_dev, v4l2_dev);
1191
1192 video_unregister_device(&veu->vdev);
1193 pm_runtime_disable(&pdev->dev);
1194 v4l2_m2m_release(veu->m2m_dev);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001195 v4l2_device_unregister(&veu->v4l2_dev);
1196
1197 return 0;
1198}
1199
1200static struct platform_driver __refdata sh_veu_pdrv = {
Stephen Rothwell3151d142013-01-10 21:35:34 -03001201 .remove = sh_veu_remove,
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001202 .driver = {
1203 .name = "sh_veu",
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001204 },
1205};
1206
Sachin Kamat6647b9c2013-03-05 01:53:36 -03001207module_platform_driver_probe(sh_veu_pdrv, sh_veu_probe);
Guennadi Liakhovetski05efa712012-10-05 07:43:41 -03001208
1209MODULE_DESCRIPTION("sh-mobile VEU mem2mem driver");
1210MODULE_AUTHOR("Guennadi Liakhovetski, <g.liakhovetski@gmx.de>");
1211MODULE_LICENSE("GPL v2");