blob: 9f03b791b711cdaef78beb4bacf268a009a607bb [file] [log] [blame]
Sungchun Kang5d718332012-07-31 10:44:05 -03001/*
2 * Copyright (c) 2011 - 2012 Samsung Electronics Co., Ltd.
3 * http://www.samsung.com
4 *
5 * Samsung EXYNOS5 SoC series G-Scaler driver
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published
9 * by the Free Software Foundation, either version 2 of the License,
10 * or (at your option) any later version.
11 */
12
13#include <linux/module.h>
14#include <linux/kernel.h>
Sungchun Kang5d718332012-07-31 10:44:05 -030015#include <linux/types.h>
16#include <linux/errno.h>
17#include <linux/bug.h>
18#include <linux/interrupt.h>
19#include <linux/workqueue.h>
20#include <linux/device.h>
21#include <linux/platform_device.h>
22#include <linux/list.h>
23#include <linux/io.h>
24#include <linux/slab.h>
25#include <linux/clk.h>
26
27#include <media/v4l2-ioctl.h>
28
29#include "gsc-core.h"
30
31static int gsc_m2m_ctx_stop_req(struct gsc_ctx *ctx)
32{
33 struct gsc_ctx *curr_ctx;
34 struct gsc_dev *gsc = ctx->gsc_dev;
35 int ret;
36
37 curr_ctx = v4l2_m2m_get_curr_priv(gsc->m2m.m2m_dev);
38 if (!gsc_m2m_pending(gsc) || (curr_ctx != ctx))
39 return 0;
40
41 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ, ctx);
42 ret = wait_event_timeout(gsc->irq_queue,
43 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ, ctx),
44 GSC_SHUTDOWN_TIMEOUT);
45
46 return ret == 0 ? -ETIMEDOUT : ret;
47}
48
Shaik Ameer Bashad9315162013-09-20 03:26:17 -030049static void __gsc_m2m_job_abort(struct gsc_ctx *ctx)
50{
51 int ret;
52
53 ret = gsc_m2m_ctx_stop_req(ctx);
54 if ((ret == -ETIMEDOUT) || (ctx->state & GSC_CTX_ABORT)) {
55 gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ | GSC_CTX_ABORT, ctx);
56 gsc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR);
57 }
58}
59
Sungchun Kang5d718332012-07-31 10:44:05 -030060static int gsc_m2m_start_streaming(struct vb2_queue *q, unsigned int count)
61{
62 struct gsc_ctx *ctx = q->drv_priv;
63 int ret;
64
65 ret = pm_runtime_get_sync(&ctx->gsc_dev->pdev->dev);
66 return ret > 0 ? 0 : ret;
67}
68
Hans Verkuile37559b2014-04-17 02:47:21 -030069static void gsc_m2m_stop_streaming(struct vb2_queue *q)
Sungchun Kang5d718332012-07-31 10:44:05 -030070{
71 struct gsc_ctx *ctx = q->drv_priv;
Sungchun Kang5d718332012-07-31 10:44:05 -030072
Shaik Ameer Bashad9315162013-09-20 03:26:17 -030073 __gsc_m2m_job_abort(ctx);
Sungchun Kang5d718332012-07-31 10:44:05 -030074
75 pm_runtime_put(&ctx->gsc_dev->pdev->dev);
Sungchun Kang5d718332012-07-31 10:44:05 -030076}
77
78void gsc_m2m_job_finish(struct gsc_ctx *ctx, int vb_state)
79{
Junghak Sung2d700712015-09-22 10:30:30 -030080 struct vb2_v4l2_buffer *src_vb, *dst_vb;
Sungchun Kang5d718332012-07-31 10:44:05 -030081
82 if (!ctx || !ctx->m2m_ctx)
83 return;
84
85 src_vb = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
86 dst_vb = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
87
88 if (src_vb && dst_vb) {
Junghak Sungd6dd6452015-11-03 08:16:37 -020089 dst_vb->vb2_buf.timestamp = src_vb->vb2_buf.timestamp;
Junghak Sung2d700712015-09-22 10:30:30 -030090 dst_vb->timecode = src_vb->timecode;
91 dst_vb->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
92 dst_vb->flags |=
93 src_vb->flags
Sakari Ailus309f4d62014-02-08 14:21:35 -030094 & V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
Kamil Debski9c303ec2013-04-24 10:50:55 -030095
Sungchun Kang5d718332012-07-31 10:44:05 -030096 v4l2_m2m_buf_done(src_vb, vb_state);
97 v4l2_m2m_buf_done(dst_vb, vb_state);
98
99 v4l2_m2m_job_finish(ctx->gsc_dev->m2m.m2m_dev,
100 ctx->m2m_ctx);
101 }
102}
103
Sungchun Kang5d718332012-07-31 10:44:05 -0300104static void gsc_m2m_job_abort(void *priv)
105{
Shaik Ameer Bashad9315162013-09-20 03:26:17 -0300106 __gsc_m2m_job_abort((struct gsc_ctx *)priv);
Sungchun Kang5d718332012-07-31 10:44:05 -0300107}
108
Shaik Ameer Bashaf60e1602012-11-22 02:25:06 -0300109static int gsc_get_bufs(struct gsc_ctx *ctx)
Sungchun Kang5d718332012-07-31 10:44:05 -0300110{
111 struct gsc_frame *s_frame, *d_frame;
Junghak Sung2d700712015-09-22 10:30:30 -0300112 struct vb2_v4l2_buffer *src_vb, *dst_vb;
Sungchun Kang5d718332012-07-31 10:44:05 -0300113 int ret;
114
115 s_frame = &ctx->s_frame;
116 d_frame = &ctx->d_frame;
117
Shaik Ameer Bashaf60e1602012-11-22 02:25:06 -0300118 src_vb = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
Junghak Sung2d700712015-09-22 10:30:30 -0300119 ret = gsc_prepare_addr(ctx, &src_vb->vb2_buf, s_frame, &s_frame->addr);
Sungchun Kang5d718332012-07-31 10:44:05 -0300120 if (ret)
121 return ret;
122
Shaik Ameer Bashaf60e1602012-11-22 02:25:06 -0300123 dst_vb = v4l2_m2m_next_dst_buf(ctx->m2m_ctx);
Junghak Sung2d700712015-09-22 10:30:30 -0300124 ret = gsc_prepare_addr(ctx, &dst_vb->vb2_buf, d_frame, &d_frame->addr);
Shaik Ameer Bashaf60e1602012-11-22 02:25:06 -0300125 if (ret)
126 return ret;
127
Junghak Sungd6dd6452015-11-03 08:16:37 -0200128 dst_vb->vb2_buf.timestamp = src_vb->vb2_buf.timestamp;
Shaik Ameer Bashaf60e1602012-11-22 02:25:06 -0300129
130 return 0;
Sungchun Kang5d718332012-07-31 10:44:05 -0300131}
132
133static void gsc_m2m_device_run(void *priv)
134{
135 struct gsc_ctx *ctx = priv;
136 struct gsc_dev *gsc;
137 unsigned long flags;
Shaik Ameer Basha4bd0e032012-10-16 09:38:34 -0300138 int ret;
Sungchun Kang5d718332012-07-31 10:44:05 -0300139 bool is_set = false;
140
141 if (WARN(!ctx, "null hardware context\n"))
142 return;
143
144 gsc = ctx->gsc_dev;
145 spin_lock_irqsave(&gsc->slock, flags);
146
147 set_bit(ST_M2M_PEND, &gsc->state);
148
149 /* Reconfigure hardware if the context has changed. */
150 if (gsc->m2m.ctx != ctx) {
151 pr_debug("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p",
152 gsc->m2m.ctx, ctx);
153 ctx->state |= GSC_PARAMS;
154 gsc->m2m.ctx = ctx;
155 }
156
Shaik Ameer Bashad9315162013-09-20 03:26:17 -0300157 is_set = ctx->state & GSC_CTX_STOP_REQ;
Sungchun Kang5d718332012-07-31 10:44:05 -0300158 if (is_set) {
Shaik Ameer Bashad9315162013-09-20 03:26:17 -0300159 ctx->state &= ~GSC_CTX_STOP_REQ;
160 ctx->state |= GSC_CTX_ABORT;
Sungchun Kang5d718332012-07-31 10:44:05 -0300161 wake_up(&gsc->irq_queue);
162 goto put_device;
163 }
164
Shaik Ameer Bashaf60e1602012-11-22 02:25:06 -0300165 ret = gsc_get_bufs(ctx);
Sungchun Kang5d718332012-07-31 10:44:05 -0300166 if (ret) {
167 pr_err("Wrong address");
168 goto put_device;
169 }
170
171 gsc_set_prefbuf(gsc, &ctx->s_frame);
172 gsc_hw_set_input_addr(gsc, &ctx->s_frame.addr, GSC_M2M_BUF_NUM);
173 gsc_hw_set_output_addr(gsc, &ctx->d_frame.addr, GSC_M2M_BUF_NUM);
174
175 if (ctx->state & GSC_PARAMS) {
176 gsc_hw_set_input_buf_masking(gsc, GSC_M2M_BUF_NUM, false);
177 gsc_hw_set_output_buf_masking(gsc, GSC_M2M_BUF_NUM, false);
178 gsc_hw_set_frm_done_irq_mask(gsc, false);
179 gsc_hw_set_gsc_irq_enable(gsc, true);
180
181 if (gsc_set_scaler_info(ctx)) {
182 pr_err("Scaler setup error");
183 goto put_device;
184 }
185
186 gsc_hw_set_input_path(ctx);
187 gsc_hw_set_in_size(ctx);
188 gsc_hw_set_in_image_format(ctx);
189
190 gsc_hw_set_output_path(ctx);
191 gsc_hw_set_out_size(ctx);
192 gsc_hw_set_out_image_format(ctx);
193
194 gsc_hw_set_prescaler(ctx);
195 gsc_hw_set_mainscaler(ctx);
196 gsc_hw_set_rotation(ctx);
197 gsc_hw_set_global_alpha(ctx);
198 }
199
200 /* update shadow registers */
201 gsc_hw_set_sfr_update(ctx);
202
203 ctx->state &= ~GSC_PARAMS;
204 gsc_hw_enable_control(gsc, true);
205
206 spin_unlock_irqrestore(&gsc->slock, flags);
207 return;
208
209put_device:
210 ctx->state &= ~GSC_PARAMS;
211 spin_unlock_irqrestore(&gsc->slock, flags);
212}
213
214static int gsc_m2m_queue_setup(struct vb2_queue *vq,
Sungchun Kang5d718332012-07-31 10:44:05 -0300215 unsigned int *num_buffers, unsigned int *num_planes,
Hans Verkuil36c0f8b2016-04-15 09:15:05 -0300216 unsigned int sizes[], struct device *alloc_devs[])
Sungchun Kang5d718332012-07-31 10:44:05 -0300217{
218 struct gsc_ctx *ctx = vb2_get_drv_priv(vq);
219 struct gsc_frame *frame;
220 int i;
221
222 frame = ctx_get_frame(ctx, vq->type);
223 if (IS_ERR(frame))
224 return PTR_ERR(frame);
225
226 if (!frame->fmt)
227 return -EINVAL;
228
229 *num_planes = frame->fmt->num_planes;
Hans Verkuilc781e4a2016-02-15 14:25:09 -0200230 for (i = 0; i < frame->fmt->num_planes; i++)
Sungchun Kang5d718332012-07-31 10:44:05 -0300231 sizes[i] = frame->payload[i];
Sungchun Kang5d718332012-07-31 10:44:05 -0300232 return 0;
233}
234
235static int gsc_m2m_buf_prepare(struct vb2_buffer *vb)
236{
237 struct gsc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
238 struct gsc_frame *frame;
239 int i;
240
241 frame = ctx_get_frame(ctx, vb->vb2_queue->type);
242 if (IS_ERR(frame))
243 return PTR_ERR(frame);
244
245 if (!V4L2_TYPE_IS_OUTPUT(vb->vb2_queue->type)) {
246 for (i = 0; i < frame->fmt->num_planes; i++)
247 vb2_set_plane_payload(vb, i, frame->payload[i]);
248 }
249
250 return 0;
251}
252
253static void gsc_m2m_buf_queue(struct vb2_buffer *vb)
254{
Junghak Sung2d700712015-09-22 10:30:30 -0300255 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
Sungchun Kang5d718332012-07-31 10:44:05 -0300256 struct gsc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
257
258 pr_debug("ctx: %p, ctx->state: 0x%x", ctx, ctx->state);
259
260 if (ctx->m2m_ctx)
Junghak Sung2d700712015-09-22 10:30:30 -0300261 v4l2_m2m_buf_queue(ctx->m2m_ctx, vbuf);
Sungchun Kang5d718332012-07-31 10:44:05 -0300262}
263
Julia Lawallb7b361f2016-09-08 20:59:10 -0300264static const struct vb2_ops gsc_m2m_qops = {
Sungchun Kang5d718332012-07-31 10:44:05 -0300265 .queue_setup = gsc_m2m_queue_setup,
266 .buf_prepare = gsc_m2m_buf_prepare,
267 .buf_queue = gsc_m2m_buf_queue,
Prabhakar Lad0637f0542014-11-26 19:42:26 -0300268 .wait_prepare = vb2_ops_wait_prepare,
269 .wait_finish = vb2_ops_wait_finish,
Sungchun Kang5d718332012-07-31 10:44:05 -0300270 .stop_streaming = gsc_m2m_stop_streaming,
271 .start_streaming = gsc_m2m_start_streaming,
272};
273
274static int gsc_m2m_querycap(struct file *file, void *fh,
275 struct v4l2_capability *cap)
276{
277 struct gsc_ctx *ctx = fh_to_ctx(fh);
278 struct gsc_dev *gsc = ctx->gsc_dev;
279
Javier Martinez Canillasa4a4b152016-06-16 18:40:35 -0300280 strlcpy(cap->driver, GSC_MODULE_NAME, sizeof(cap->driver));
281 strlcpy(cap->card, GSC_MODULE_NAME " gscaler", sizeof(cap->card));
Javier Martinez Canillas3a5a2ac2016-06-16 18:40:34 -0300282 snprintf(cap->bus_info, sizeof(cap->bus_info), "platform:%s",
283 dev_name(&gsc->pdev->dev));
Sungchun Kang5d718332012-07-31 10:44:05 -0300284 cap->device_caps = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M_MPLANE |
285 V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE;
286
287 cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
288 return 0;
289}
290
291static int gsc_m2m_enum_fmt_mplane(struct file *file, void *priv,
292 struct v4l2_fmtdesc *f)
293{
294 return gsc_enum_fmt_mplane(f);
295}
296
297static int gsc_m2m_g_fmt_mplane(struct file *file, void *fh,
298 struct v4l2_format *f)
299{
300 struct gsc_ctx *ctx = fh_to_ctx(fh);
301
302 return gsc_g_fmt_mplane(ctx, f);
303}
304
305static int gsc_m2m_try_fmt_mplane(struct file *file, void *fh,
306 struct v4l2_format *f)
307{
308 struct gsc_ctx *ctx = fh_to_ctx(fh);
309
310 return gsc_try_fmt_mplane(ctx, f);
311}
312
313static int gsc_m2m_s_fmt_mplane(struct file *file, void *fh,
314 struct v4l2_format *f)
315{
316 struct gsc_ctx *ctx = fh_to_ctx(fh);
317 struct vb2_queue *vq;
318 struct gsc_frame *frame;
319 struct v4l2_pix_format_mplane *pix;
320 int i, ret = 0;
321
322 ret = gsc_m2m_try_fmt_mplane(file, fh, f);
323 if (ret)
324 return ret;
325
326 vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
327
328 if (vb2_is_streaming(vq)) {
329 pr_err("queue (%d) busy", f->type);
330 return -EBUSY;
331 }
332
333 if (V4L2_TYPE_IS_OUTPUT(f->type))
334 frame = &ctx->s_frame;
335 else
336 frame = &ctx->d_frame;
337
338 pix = &f->fmt.pix_mp;
339 frame->fmt = find_fmt(&pix->pixelformat, NULL, 0);
340 frame->colorspace = pix->colorspace;
341 if (!frame->fmt)
342 return -EINVAL;
343
344 for (i = 0; i < frame->fmt->num_planes; i++)
345 frame->payload[i] = pix->plane_fmt[i].sizeimage;
346
347 gsc_set_frame_size(frame, pix->width, pix->height);
348
349 if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
350 gsc_ctx_state_lock_set(GSC_PARAMS | GSC_DST_FMT, ctx);
351 else
352 gsc_ctx_state_lock_set(GSC_PARAMS | GSC_SRC_FMT, ctx);
353
354 pr_debug("f_w: %d, f_h: %d", frame->f_width, frame->f_height);
355
356 return 0;
357}
358
359static int gsc_m2m_reqbufs(struct file *file, void *fh,
360 struct v4l2_requestbuffers *reqbufs)
361{
362 struct gsc_ctx *ctx = fh_to_ctx(fh);
363 struct gsc_dev *gsc = ctx->gsc_dev;
Sungchun Kang5d718332012-07-31 10:44:05 -0300364 u32 max_cnt;
365
366 max_cnt = (reqbufs->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) ?
367 gsc->variant->in_buf_cnt : gsc->variant->out_buf_cnt;
368 if (reqbufs->count > max_cnt) {
369 return -EINVAL;
370 } else if (reqbufs->count == 0) {
371 if (reqbufs->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
372 gsc_ctx_state_lock_clear(GSC_SRC_FMT, ctx);
373 else
374 gsc_ctx_state_lock_clear(GSC_DST_FMT, ctx);
375 }
376
Sungchun Kang5d718332012-07-31 10:44:05 -0300377 return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs);
378}
379
Shaik Ameer Basha371a6642012-12-07 08:28:55 -0300380static int gsc_m2m_expbuf(struct file *file, void *fh,
381 struct v4l2_exportbuffer *eb)
382{
383 struct gsc_ctx *ctx = fh_to_ctx(fh);
384 return v4l2_m2m_expbuf(file, ctx->m2m_ctx, eb);
385}
386
Sungchun Kang5d718332012-07-31 10:44:05 -0300387static int gsc_m2m_querybuf(struct file *file, void *fh,
388 struct v4l2_buffer *buf)
389{
390 struct gsc_ctx *ctx = fh_to_ctx(fh);
391 return v4l2_m2m_querybuf(file, ctx->m2m_ctx, buf);
392}
393
394static int gsc_m2m_qbuf(struct file *file, void *fh,
395 struct v4l2_buffer *buf)
396{
397 struct gsc_ctx *ctx = fh_to_ctx(fh);
398 return v4l2_m2m_qbuf(file, ctx->m2m_ctx, buf);
399}
400
401static int gsc_m2m_dqbuf(struct file *file, void *fh,
402 struct v4l2_buffer *buf)
403{
404 struct gsc_ctx *ctx = fh_to_ctx(fh);
405 return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
406}
407
408static int gsc_m2m_streamon(struct file *file, void *fh,
409 enum v4l2_buf_type type)
410{
411 struct gsc_ctx *ctx = fh_to_ctx(fh);
412
413 /* The source and target color format need to be set */
414 if (V4L2_TYPE_IS_OUTPUT(type)) {
415 if (!gsc_ctx_state_is_set(GSC_SRC_FMT, ctx))
416 return -EINVAL;
417 } else if (!gsc_ctx_state_is_set(GSC_DST_FMT, ctx)) {
418 return -EINVAL;
419 }
420
421 return v4l2_m2m_streamon(file, ctx->m2m_ctx, type);
422}
423
424static int gsc_m2m_streamoff(struct file *file, void *fh,
425 enum v4l2_buf_type type)
426{
427 struct gsc_ctx *ctx = fh_to_ctx(fh);
428 return v4l2_m2m_streamoff(file, ctx->m2m_ctx, type);
429}
430
431/* Return 1 if rectangle a is enclosed in rectangle b, or 0 otherwise. */
432static int is_rectangle_enclosed(struct v4l2_rect *a, struct v4l2_rect *b)
433{
434 if (a->left < b->left || a->top < b->top)
435 return 0;
436
437 if (a->left + a->width > b->left + b->width)
438 return 0;
439
440 if (a->top + a->height > b->top + b->height)
441 return 0;
442
443 return 1;
444}
445
446static int gsc_m2m_g_selection(struct file *file, void *fh,
447 struct v4l2_selection *s)
448{
449 struct gsc_frame *frame;
450 struct gsc_ctx *ctx = fh_to_ctx(fh);
451
452 if ((s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) &&
453 (s->type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE))
454 return -EINVAL;
455
456 frame = ctx_get_frame(ctx, s->type);
457 if (IS_ERR(frame))
458 return PTR_ERR(frame);
459
460 switch (s->target) {
461 case V4L2_SEL_TGT_COMPOSE_DEFAULT:
462 case V4L2_SEL_TGT_COMPOSE_BOUNDS:
463 case V4L2_SEL_TGT_CROP_BOUNDS:
464 case V4L2_SEL_TGT_CROP_DEFAULT:
465 s->r.left = 0;
466 s->r.top = 0;
467 s->r.width = frame->f_width;
468 s->r.height = frame->f_height;
469 return 0;
470
471 case V4L2_SEL_TGT_COMPOSE:
472 case V4L2_SEL_TGT_CROP:
473 s->r.left = frame->crop.left;
474 s->r.top = frame->crop.top;
475 s->r.width = frame->crop.width;
476 s->r.height = frame->crop.height;
477 return 0;
478 }
479
480 return -EINVAL;
481}
482
483static int gsc_m2m_s_selection(struct file *file, void *fh,
484 struct v4l2_selection *s)
485{
486 struct gsc_frame *frame;
487 struct gsc_ctx *ctx = fh_to_ctx(fh);
488 struct v4l2_crop cr;
489 struct gsc_variant *variant = ctx->gsc_dev->variant;
490 int ret;
491
492 cr.type = s->type;
493 cr.c = s->r;
494
495 if ((s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) &&
496 (s->type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE))
497 return -EINVAL;
498
499 ret = gsc_try_crop(ctx, &cr);
500 if (ret)
501 return ret;
502
503 if (s->flags & V4L2_SEL_FLAG_LE &&
504 !is_rectangle_enclosed(&cr.c, &s->r))
505 return -ERANGE;
506
507 if (s->flags & V4L2_SEL_FLAG_GE &&
508 !is_rectangle_enclosed(&s->r, &cr.c))
509 return -ERANGE;
510
511 s->r = cr.c;
512
513 switch (s->target) {
514 case V4L2_SEL_TGT_COMPOSE_BOUNDS:
515 case V4L2_SEL_TGT_COMPOSE_DEFAULT:
516 case V4L2_SEL_TGT_COMPOSE:
517 frame = &ctx->s_frame;
518 break;
519
520 case V4L2_SEL_TGT_CROP_BOUNDS:
521 case V4L2_SEL_TGT_CROP:
522 case V4L2_SEL_TGT_CROP_DEFAULT:
523 frame = &ctx->d_frame;
524 break;
525
526 default:
527 return -EINVAL;
528 }
529
530 /* Check to see if scaling ratio is within supported range */
531 if (gsc_ctx_state_is_set(GSC_DST_FMT | GSC_SRC_FMT, ctx)) {
532 if (s->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
533 ret = gsc_check_scaler_ratio(variant, cr.c.width,
534 cr.c.height, ctx->d_frame.crop.width,
535 ctx->d_frame.crop.height,
536 ctx->gsc_ctrls.rotate->val, ctx->out_path);
537 } else {
538 ret = gsc_check_scaler_ratio(variant,
539 ctx->s_frame.crop.width,
540 ctx->s_frame.crop.height, cr.c.width,
541 cr.c.height, ctx->gsc_ctrls.rotate->val,
542 ctx->out_path);
543 }
544
545 if (ret) {
546 pr_err("Out of scaler range");
547 return -EINVAL;
548 }
549 }
550
551 frame->crop = cr.c;
552
553 gsc_ctx_state_lock_set(GSC_PARAMS, ctx);
554 return 0;
555}
556
557static const struct v4l2_ioctl_ops gsc_m2m_ioctl_ops = {
558 .vidioc_querycap = gsc_m2m_querycap,
559 .vidioc_enum_fmt_vid_cap_mplane = gsc_m2m_enum_fmt_mplane,
560 .vidioc_enum_fmt_vid_out_mplane = gsc_m2m_enum_fmt_mplane,
561 .vidioc_g_fmt_vid_cap_mplane = gsc_m2m_g_fmt_mplane,
562 .vidioc_g_fmt_vid_out_mplane = gsc_m2m_g_fmt_mplane,
563 .vidioc_try_fmt_vid_cap_mplane = gsc_m2m_try_fmt_mplane,
564 .vidioc_try_fmt_vid_out_mplane = gsc_m2m_try_fmt_mplane,
565 .vidioc_s_fmt_vid_cap_mplane = gsc_m2m_s_fmt_mplane,
566 .vidioc_s_fmt_vid_out_mplane = gsc_m2m_s_fmt_mplane,
567 .vidioc_reqbufs = gsc_m2m_reqbufs,
Shaik Ameer Basha371a6642012-12-07 08:28:55 -0300568 .vidioc_expbuf = gsc_m2m_expbuf,
Sungchun Kang5d718332012-07-31 10:44:05 -0300569 .vidioc_querybuf = gsc_m2m_querybuf,
570 .vidioc_qbuf = gsc_m2m_qbuf,
571 .vidioc_dqbuf = gsc_m2m_dqbuf,
572 .vidioc_streamon = gsc_m2m_streamon,
573 .vidioc_streamoff = gsc_m2m_streamoff,
574 .vidioc_g_selection = gsc_m2m_g_selection,
575 .vidioc_s_selection = gsc_m2m_s_selection
576};
577
578static int queue_init(void *priv, struct vb2_queue *src_vq,
579 struct vb2_queue *dst_vq)
580{
581 struct gsc_ctx *ctx = priv;
582 int ret;
583
584 memset(src_vq, 0, sizeof(*src_vq));
585 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
Shaik Ameer Basha371a6642012-12-07 08:28:55 -0300586 src_vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
Sungchun Kang5d718332012-07-31 10:44:05 -0300587 src_vq->drv_priv = ctx;
588 src_vq->ops = &gsc_m2m_qops;
589 src_vq->mem_ops = &vb2_dma_contig_memops;
590 src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
Sakari Ailusade48682014-02-25 19:12:19 -0300591 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
Prabhakar Lad0637f0542014-11-26 19:42:26 -0300592 src_vq->lock = &ctx->gsc_dev->lock;
Hans Verkuilc781e4a2016-02-15 14:25:09 -0200593 src_vq->dev = &ctx->gsc_dev->pdev->dev;
Sungchun Kang5d718332012-07-31 10:44:05 -0300594
595 ret = vb2_queue_init(src_vq);
596 if (ret)
597 return ret;
598
599 memset(dst_vq, 0, sizeof(*dst_vq));
600 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
Shaik Ameer Basha371a6642012-12-07 08:28:55 -0300601 dst_vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
Sungchun Kang5d718332012-07-31 10:44:05 -0300602 dst_vq->drv_priv = ctx;
603 dst_vq->ops = &gsc_m2m_qops;
604 dst_vq->mem_ops = &vb2_dma_contig_memops;
605 dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
Sakari Ailusade48682014-02-25 19:12:19 -0300606 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
Prabhakar Lad0637f0542014-11-26 19:42:26 -0300607 dst_vq->lock = &ctx->gsc_dev->lock;
Hans Verkuilc781e4a2016-02-15 14:25:09 -0200608 dst_vq->dev = &ctx->gsc_dev->pdev->dev;
Sungchun Kang5d718332012-07-31 10:44:05 -0300609
610 return vb2_queue_init(dst_vq);
611}
612
613static int gsc_m2m_open(struct file *file)
614{
615 struct gsc_dev *gsc = video_drvdata(file);
616 struct gsc_ctx *ctx = NULL;
617 int ret;
618
619 pr_debug("pid: %d, state: 0x%lx", task_pid_nr(current), gsc->state);
620
621 if (mutex_lock_interruptible(&gsc->lock))
622 return -ERESTARTSYS;
623
Sachin Kamat2c8cc132012-11-23 08:04:42 -0300624 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
Sungchun Kang5d718332012-07-31 10:44:05 -0300625 if (!ctx) {
626 ret = -ENOMEM;
627 goto unlock;
628 }
629
630 v4l2_fh_init(&ctx->fh, gsc->m2m.vfd);
631 ret = gsc_ctrls_create(ctx);
632 if (ret)
633 goto error_fh;
634
635 /* Use separate control handler per file handle */
636 ctx->fh.ctrl_handler = &ctx->ctrl_handler;
637 file->private_data = &ctx->fh;
638 v4l2_fh_add(&ctx->fh);
639
640 ctx->gsc_dev = gsc;
641 /* Default color format */
642 ctx->s_frame.fmt = get_format(0);
643 ctx->d_frame.fmt = get_format(0);
644 /* Setup the device context for mem2mem mode. */
645 ctx->state = GSC_CTX_M2M;
646 ctx->flags = 0;
647 ctx->in_path = GSC_DMA;
648 ctx->out_path = GSC_DMA;
649
650 ctx->m2m_ctx = v4l2_m2m_ctx_init(gsc->m2m.m2m_dev, ctx, queue_init);
651 if (IS_ERR(ctx->m2m_ctx)) {
652 pr_err("Failed to initialize m2m context");
653 ret = PTR_ERR(ctx->m2m_ctx);
654 goto error_ctrls;
655 }
656
657 if (gsc->m2m.refcnt++ == 0)
658 set_bit(ST_M2M_OPEN, &gsc->state);
659
660 pr_debug("gsc m2m driver is opened, ctx(0x%p)", ctx);
661
662 mutex_unlock(&gsc->lock);
663 return 0;
664
665error_ctrls:
666 gsc_ctrls_delete(ctx);
667error_fh:
668 v4l2_fh_del(&ctx->fh);
669 v4l2_fh_exit(&ctx->fh);
670 kfree(ctx);
671unlock:
672 mutex_unlock(&gsc->lock);
673 return ret;
674}
675
676static int gsc_m2m_release(struct file *file)
677{
678 struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
679 struct gsc_dev *gsc = ctx->gsc_dev;
680
681 pr_debug("pid: %d, state: 0x%lx, refcnt= %d",
682 task_pid_nr(current), gsc->state, gsc->m2m.refcnt);
683
Sylwester Nawrocki98680182012-11-21 11:46:43 -0300684 mutex_lock(&gsc->lock);
Sungchun Kang5d718332012-07-31 10:44:05 -0300685
686 v4l2_m2m_ctx_release(ctx->m2m_ctx);
687 gsc_ctrls_delete(ctx);
688 v4l2_fh_del(&ctx->fh);
689 v4l2_fh_exit(&ctx->fh);
690
691 if (--gsc->m2m.refcnt <= 0)
692 clear_bit(ST_M2M_OPEN, &gsc->state);
693 kfree(ctx);
694
695 mutex_unlock(&gsc->lock);
696 return 0;
697}
698
699static unsigned int gsc_m2m_poll(struct file *file,
700 struct poll_table_struct *wait)
701{
702 struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
703 struct gsc_dev *gsc = ctx->gsc_dev;
Markus Elfringd5441ea2015-12-19 12:28:37 -0200704 unsigned int ret;
Sungchun Kang5d718332012-07-31 10:44:05 -0300705
706 if (mutex_lock_interruptible(&gsc->lock))
707 return -ERESTARTSYS;
708
709 ret = v4l2_m2m_poll(file, ctx->m2m_ctx, wait);
710 mutex_unlock(&gsc->lock);
711
712 return ret;
713}
714
715static int gsc_m2m_mmap(struct file *file, struct vm_area_struct *vma)
716{
717 struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
718 struct gsc_dev *gsc = ctx->gsc_dev;
719 int ret;
720
721 if (mutex_lock_interruptible(&gsc->lock))
722 return -ERESTARTSYS;
723
724 ret = v4l2_m2m_mmap(file, ctx->m2m_ctx, vma);
725 mutex_unlock(&gsc->lock);
726
727 return ret;
728}
729
730static const struct v4l2_file_operations gsc_m2m_fops = {
731 .owner = THIS_MODULE,
732 .open = gsc_m2m_open,
733 .release = gsc_m2m_release,
734 .poll = gsc_m2m_poll,
735 .unlocked_ioctl = video_ioctl2,
736 .mmap = gsc_m2m_mmap,
737};
738
739static struct v4l2_m2m_ops gsc_m2m_ops = {
740 .device_run = gsc_m2m_device_run,
741 .job_abort = gsc_m2m_job_abort,
742};
743
744int gsc_register_m2m_device(struct gsc_dev *gsc)
745{
746 struct platform_device *pdev;
747 int ret;
748
749 if (!gsc)
750 return -ENODEV;
751
752 pdev = gsc->pdev;
753
754 gsc->vdev.fops = &gsc_m2m_fops;
755 gsc->vdev.ioctl_ops = &gsc_m2m_ioctl_ops;
756 gsc->vdev.release = video_device_release_empty;
757 gsc->vdev.lock = &gsc->lock;
Sylwester Nawrocki24fc6812012-11-10 19:57:56 -0300758 gsc->vdev.vfl_dir = VFL_DIR_M2M;
Arun Kumar Kd0b1c312013-07-26 07:28:01 -0300759 gsc->vdev.v4l2_dev = &gsc->v4l2_dev;
Sungchun Kang5d718332012-07-31 10:44:05 -0300760 snprintf(gsc->vdev.name, sizeof(gsc->vdev.name), "%s.%d:m2m",
761 GSC_MODULE_NAME, gsc->id);
762
763 video_set_drvdata(&gsc->vdev, gsc);
764
765 gsc->m2m.vfd = &gsc->vdev;
766 gsc->m2m.m2m_dev = v4l2_m2m_init(&gsc_m2m_ops);
767 if (IS_ERR(gsc->m2m.m2m_dev)) {
768 dev_err(&pdev->dev, "failed to initialize v4l2-m2m device\n");
769 ret = PTR_ERR(gsc->m2m.m2m_dev);
770 goto err_m2m_r1;
771 }
772
773 ret = video_register_device(&gsc->vdev, VFL_TYPE_GRABBER, -1);
774 if (ret) {
775 dev_err(&pdev->dev,
776 "%s(): failed to register video device\n", __func__);
777 goto err_m2m_r2;
778 }
779
780 pr_debug("gsc m2m driver registered as /dev/video%d", gsc->vdev.num);
781 return 0;
782
783err_m2m_r2:
784 v4l2_m2m_release(gsc->m2m.m2m_dev);
785err_m2m_r1:
786 video_device_release(gsc->m2m.vfd);
787
788 return ret;
789}
790
791void gsc_unregister_m2m_device(struct gsc_dev *gsc)
792{
793 if (gsc)
794 v4l2_m2m_release(gsc->m2m.m2m_dev);
795}