]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - drivers/staging/media/omap4iss/iss_video.c
[media] doc: no singing
[mirror_ubuntu-artful-kernel.git] / drivers / staging / media / omap4iss / iss_video.c
CommitLineData
fc96d58c
SA
1/*
2 * TI OMAP4 ISS V4L2 Driver - Generic video node
3 *
4 * Copyright (C) 2012 Texas Instruments, Inc.
5 *
6 * Author: Sergio Aguirre <sergio.a.aguirre@gmail.com>
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 */
13
14#include <asm/cacheflush.h>
15#include <linux/clk.h>
16#include <linux/mm.h>
17#include <linux/pagemap.h>
18#include <linux/sched.h>
19#include <linux/slab.h>
20#include <linux/vmalloc.h>
21#include <linux/module.h>
22#include <media/v4l2-dev.h>
23#include <media/v4l2-ioctl.h>
24
25#include "iss_video.h"
26#include "iss.h"
27
28
29/* -----------------------------------------------------------------------------
30 * Helper functions
31 */
32
33static struct iss_format_info formats[] = {
34 { V4L2_MBUS_FMT_Y8_1X8, V4L2_MBUS_FMT_Y8_1X8,
35 V4L2_MBUS_FMT_Y8_1X8, V4L2_MBUS_FMT_Y8_1X8,
36 V4L2_PIX_FMT_GREY, 8, },
37 { V4L2_MBUS_FMT_Y10_1X10, V4L2_MBUS_FMT_Y10_1X10,
38 V4L2_MBUS_FMT_Y10_1X10, V4L2_MBUS_FMT_Y8_1X8,
39 V4L2_PIX_FMT_Y10, 10, },
40 { V4L2_MBUS_FMT_Y12_1X12, V4L2_MBUS_FMT_Y10_1X10,
41 V4L2_MBUS_FMT_Y12_1X12, V4L2_MBUS_FMT_Y8_1X8,
42 V4L2_PIX_FMT_Y12, 12, },
43 { V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_MBUS_FMT_SBGGR8_1X8,
44 V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_MBUS_FMT_SBGGR8_1X8,
45 V4L2_PIX_FMT_SBGGR8, 8, },
46 { V4L2_MBUS_FMT_SGBRG8_1X8, V4L2_MBUS_FMT_SGBRG8_1X8,
47 V4L2_MBUS_FMT_SGBRG8_1X8, V4L2_MBUS_FMT_SGBRG8_1X8,
48 V4L2_PIX_FMT_SGBRG8, 8, },
49 { V4L2_MBUS_FMT_SGRBG8_1X8, V4L2_MBUS_FMT_SGRBG8_1X8,
50 V4L2_MBUS_FMT_SGRBG8_1X8, V4L2_MBUS_FMT_SGRBG8_1X8,
51 V4L2_PIX_FMT_SGRBG8, 8, },
52 { V4L2_MBUS_FMT_SRGGB8_1X8, V4L2_MBUS_FMT_SRGGB8_1X8,
53 V4L2_MBUS_FMT_SRGGB8_1X8, V4L2_MBUS_FMT_SRGGB8_1X8,
54 V4L2_PIX_FMT_SRGGB8, 8, },
55 { V4L2_MBUS_FMT_SGRBG10_DPCM8_1X8, V4L2_MBUS_FMT_SGRBG10_DPCM8_1X8,
56 V4L2_MBUS_FMT_SGRBG10_1X10, 0,
57 V4L2_PIX_FMT_SGRBG10DPCM8, 8, },
58 { V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_MBUS_FMT_SBGGR10_1X10,
59 V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_MBUS_FMT_SBGGR8_1X8,
60 V4L2_PIX_FMT_SBGGR10, 10, },
61 { V4L2_MBUS_FMT_SGBRG10_1X10, V4L2_MBUS_FMT_SGBRG10_1X10,
62 V4L2_MBUS_FMT_SGBRG10_1X10, V4L2_MBUS_FMT_SGBRG8_1X8,
63 V4L2_PIX_FMT_SGBRG10, 10, },
64 { V4L2_MBUS_FMT_SGRBG10_1X10, V4L2_MBUS_FMT_SGRBG10_1X10,
65 V4L2_MBUS_FMT_SGRBG10_1X10, V4L2_MBUS_FMT_SGRBG8_1X8,
66 V4L2_PIX_FMT_SGRBG10, 10, },
67 { V4L2_MBUS_FMT_SRGGB10_1X10, V4L2_MBUS_FMT_SRGGB10_1X10,
68 V4L2_MBUS_FMT_SRGGB10_1X10, V4L2_MBUS_FMT_SRGGB8_1X8,
69 V4L2_PIX_FMT_SRGGB10, 10, },
70 { V4L2_MBUS_FMT_SBGGR12_1X12, V4L2_MBUS_FMT_SBGGR10_1X10,
71 V4L2_MBUS_FMT_SBGGR12_1X12, V4L2_MBUS_FMT_SBGGR8_1X8,
72 V4L2_PIX_FMT_SBGGR12, 12, },
73 { V4L2_MBUS_FMT_SGBRG12_1X12, V4L2_MBUS_FMT_SGBRG10_1X10,
74 V4L2_MBUS_FMT_SGBRG12_1X12, V4L2_MBUS_FMT_SGBRG8_1X8,
75 V4L2_PIX_FMT_SGBRG12, 12, },
76 { V4L2_MBUS_FMT_SGRBG12_1X12, V4L2_MBUS_FMT_SGRBG10_1X10,
77 V4L2_MBUS_FMT_SGRBG12_1X12, V4L2_MBUS_FMT_SGRBG8_1X8,
78 V4L2_PIX_FMT_SGRBG12, 12, },
79 { V4L2_MBUS_FMT_SRGGB12_1X12, V4L2_MBUS_FMT_SRGGB10_1X10,
80 V4L2_MBUS_FMT_SRGGB12_1X12, V4L2_MBUS_FMT_SRGGB8_1X8,
81 V4L2_PIX_FMT_SRGGB12, 12, },
82 { V4L2_MBUS_FMT_UYVY8_1X16, V4L2_MBUS_FMT_UYVY8_1X16,
83 V4L2_MBUS_FMT_UYVY8_1X16, 0,
84 V4L2_PIX_FMT_UYVY, 16, },
85 { V4L2_MBUS_FMT_YUYV8_1X16, V4L2_MBUS_FMT_YUYV8_1X16,
86 V4L2_MBUS_FMT_YUYV8_1X16, 0,
87 V4L2_PIX_FMT_YUYV, 16, },
88 { V4L2_MBUS_FMT_YUYV8_1_5X8, V4L2_MBUS_FMT_YUYV8_1_5X8,
89 V4L2_MBUS_FMT_YUYV8_1_5X8, 0,
90 V4L2_PIX_FMT_NV12, 8, },
91};
92
93const struct iss_format_info *
94omap4iss_video_format_info(enum v4l2_mbus_pixelcode code)
95{
96 unsigned int i;
97
98 for (i = 0; i < ARRAY_SIZE(formats); ++i) {
99 if (formats[i].code == code)
100 return &formats[i];
101 }
102
103 return NULL;
104}
105
106/*
107 * iss_video_mbus_to_pix - Convert v4l2_mbus_framefmt to v4l2_pix_format
108 * @video: ISS video instance
109 * @mbus: v4l2_mbus_framefmt format (input)
110 * @pix: v4l2_pix_format format (output)
111 *
112 * Fill the output pix structure with information from the input mbus format.
113 * The bytesperline and sizeimage fields are computed from the requested bytes
114 * per line value in the pix format and information from the video instance.
115 *
116 * Return the number of padding bytes at end of line.
117 */
118static unsigned int iss_video_mbus_to_pix(const struct iss_video *video,
119 const struct v4l2_mbus_framefmt *mbus,
120 struct v4l2_pix_format *pix)
121{
122 unsigned int bpl = pix->bytesperline;
123 unsigned int min_bpl;
124 unsigned int i;
125
126 memset(pix, 0, sizeof(*pix));
127 pix->width = mbus->width;
128 pix->height = mbus->height;
129
130 /* Skip the last format in the loop so that it will be selected if no
131 * match is found.
132 */
133 for (i = 0; i < ARRAY_SIZE(formats) - 1; ++i) {
134 if (formats[i].code == mbus->code)
135 break;
136 }
137
138 min_bpl = pix->width * ALIGN(formats[i].bpp, 8) / 8;
139
140 /* Clamp the requested bytes per line value. If the maximum bytes per
141 * line value is zero, the module doesn't support user configurable line
142 * sizes. Override the requested value with the minimum in that case.
143 */
144 if (video->bpl_max)
145 bpl = clamp(bpl, min_bpl, video->bpl_max);
146 else
147 bpl = min_bpl;
148
149 if (!video->bpl_zero_padding || bpl != min_bpl)
150 bpl = ALIGN(bpl, video->bpl_alignment);
151
152 pix->pixelformat = formats[i].pixelformat;
153 pix->bytesperline = bpl;
154 pix->sizeimage = pix->bytesperline * pix->height;
155 pix->colorspace = mbus->colorspace;
156 pix->field = mbus->field;
157
158 /* FIXME: Special case for NV12! We should make this nicer... */
159 if (pix->pixelformat == V4L2_PIX_FMT_NV12)
160 pix->sizeimage += (pix->bytesperline * pix->height) / 2;
161
162 return bpl - min_bpl;
163}
164
165static void iss_video_pix_to_mbus(const struct v4l2_pix_format *pix,
166 struct v4l2_mbus_framefmt *mbus)
167{
168 unsigned int i;
169
170 memset(mbus, 0, sizeof(*mbus));
171 mbus->width = pix->width;
172 mbus->height = pix->height;
173
174 for (i = 0; i < ARRAY_SIZE(formats); ++i) {
175 if (formats[i].pixelformat == pix->pixelformat)
176 break;
177 }
178
179 if (WARN_ON(i == ARRAY_SIZE(formats)))
180 return;
181
182 mbus->code = formats[i].code;
183 mbus->colorspace = pix->colorspace;
184 mbus->field = pix->field;
185}
186
187static struct v4l2_subdev *
188iss_video_remote_subdev(struct iss_video *video, u32 *pad)
189{
190 struct media_pad *remote;
191
192 remote = media_entity_remote_pad(&video->pad);
193
194 if (remote == NULL ||
195 media_entity_type(remote->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
196 return NULL;
197
198 if (pad)
199 *pad = remote->index;
200
201 return media_entity_to_v4l2_subdev(remote->entity);
202}
203
204/* Return a pointer to the ISS video instance at the far end of the pipeline. */
205static struct iss_video *
206iss_video_far_end(struct iss_video *video)
207{
208 struct media_entity_graph graph;
209 struct media_entity *entity = &video->video.entity;
210 struct media_device *mdev = entity->parent;
211 struct iss_video *far_end = NULL;
212
213 mutex_lock(&mdev->graph_mutex);
214 media_entity_graph_walk_start(&graph, entity);
215
216 while ((entity = media_entity_graph_walk_next(&graph))) {
217 if (entity == &video->video.entity)
218 continue;
219
220 if (media_entity_type(entity) != MEDIA_ENT_T_DEVNODE)
221 continue;
222
223 far_end = to_iss_video(media_entity_to_video_device(entity));
224 if (far_end->type != video->type)
225 break;
226
227 far_end = NULL;
228 }
229
230 mutex_unlock(&mdev->graph_mutex);
231 return far_end;
232}
233
234static int
235__iss_video_get_format(struct iss_video *video, struct v4l2_format *format)
236{
237 struct v4l2_subdev_format fmt;
238 struct v4l2_subdev *subdev;
239 u32 pad;
240 int ret;
241
242 subdev = iss_video_remote_subdev(video, &pad);
243 if (subdev == NULL)
244 return -EINVAL;
245
fc96d58c
SA
246 fmt.pad = pad;
247 fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
fc96d58c 248
c3dbc70d
LP
249 mutex_lock(&video->mutex);
250 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
fc96d58c
SA
251 mutex_unlock(&video->mutex);
252
253 if (ret)
254 return ret;
255
256 format->type = video->type;
257 return iss_video_mbus_to_pix(video, &fmt.format, &format->fmt.pix);
258}
259
260static int
261iss_video_check_format(struct iss_video *video, struct iss_video_fh *vfh)
262{
263 struct v4l2_format format;
264 int ret;
265
266 memcpy(&format, &vfh->format, sizeof(format));
267 ret = __iss_video_get_format(video, &format);
268 if (ret < 0)
269 return ret;
270
271 if (vfh->format.fmt.pix.pixelformat != format.fmt.pix.pixelformat ||
272 vfh->format.fmt.pix.height != format.fmt.pix.height ||
273 vfh->format.fmt.pix.width != format.fmt.pix.width ||
274 vfh->format.fmt.pix.bytesperline != format.fmt.pix.bytesperline ||
275 vfh->format.fmt.pix.sizeimage != format.fmt.pix.sizeimage)
276 return -EINVAL;
277
278 return ret;
279}
280
281/* -----------------------------------------------------------------------------
282 * Video queue operations
283 */
284
285static int iss_video_queue_setup(struct vb2_queue *vq, const struct v4l2_format *fmt,
286 unsigned int *count, unsigned int *num_planes,
287 unsigned int sizes[], void *alloc_ctxs[])
288{
289 struct iss_video_fh *vfh = vb2_get_drv_priv(vq);
290 struct iss_video *video = vfh->video;
291
292 /* Revisit multi-planar support for NV12 */
293 *num_planes = 1;
294
295 sizes[0] = vfh->format.fmt.pix.sizeimage;
296 if (sizes[0] == 0)
297 return -EINVAL;
298
299 alloc_ctxs[0] = video->alloc_ctx;
300
301 *count = min(*count, (unsigned int)(video->capture_mem / PAGE_ALIGN(sizes[0])));
302
303 return 0;
304}
305
306static void iss_video_buf_cleanup(struct vb2_buffer *vb)
307{
308 struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb);
309
310 if (buffer->iss_addr)
311 buffer->iss_addr = 0;
312}
313
314static int iss_video_buf_prepare(struct vb2_buffer *vb)
315{
316 struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue);
317 struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb);
318 struct iss_video *video = vfh->video;
319 unsigned long size = vfh->format.fmt.pix.sizeimage;
320 dma_addr_t addr;
321
322 if (vb2_plane_size(vb, 0) < size)
323 return -ENOBUFS;
324
325 addr = vb2_dma_contig_plane_dma_addr(vb, 0);
326 if (!IS_ALIGNED(addr, 32)) {
327 dev_dbg(video->iss->dev, "Buffer address must be "
328 "aligned to 32 bytes boundary.\n");
329 return -EINVAL;
330 }
331
332 vb2_set_plane_payload(vb, 0, size);
333 buffer->iss_addr = addr;
334 return 0;
335}
336
337static void iss_video_buf_queue(struct vb2_buffer *vb)
338{
339 struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue);
340 struct iss_video *video = vfh->video;
341 struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb);
342 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity);
343 unsigned int empty;
344 unsigned long flags;
345
346 spin_lock_irqsave(&video->qlock, flags);
347 empty = list_empty(&video->dmaqueue);
348 list_add_tail(&buffer->list, &video->dmaqueue);
349 spin_unlock_irqrestore(&video->qlock, flags);
350
351 if (empty) {
352 enum iss_pipeline_state state;
353 unsigned int start;
354
355 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
356 state = ISS_PIPELINE_QUEUE_OUTPUT;
357 else
358 state = ISS_PIPELINE_QUEUE_INPUT;
359
360 spin_lock_irqsave(&pipe->lock, flags);
361 pipe->state |= state;
362 video->ops->queue(video, buffer);
363 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_QUEUED;
364
365 start = iss_pipeline_ready(pipe);
366 if (start)
367 pipe->state |= ISS_PIPELINE_STREAM;
368 spin_unlock_irqrestore(&pipe->lock, flags);
369
370 if (start)
371 omap4iss_pipeline_set_stream(pipe,
372 ISS_PIPELINE_STREAM_SINGLESHOT);
373 }
374}
375
376static struct vb2_ops iss_video_vb2ops = {
377 .queue_setup = iss_video_queue_setup,
378 .buf_prepare = iss_video_buf_prepare,
379 .buf_queue = iss_video_buf_queue,
380 .buf_cleanup = iss_video_buf_cleanup,
381};
382
383/*
384 * omap4iss_video_buffer_next - Complete the current buffer and return the next
385 * @video: ISS video object
386 *
387 * Remove the current video buffer from the DMA queue and fill its timestamp,
388 * field count and state fields before waking up its completion handler.
389 *
390 * For capture video nodes, the buffer state is set to VB2_BUF_STATE_DONE if no
391 * error has been flagged in the pipeline, or to VB2_BUF_STATE_ERROR otherwise.
392 *
393 * The DMA queue is expected to contain at least one buffer.
394 *
395 * Return a pointer to the next buffer in the DMA queue, or NULL if the queue is
396 * empty.
397 */
398struct iss_buffer *omap4iss_video_buffer_next(struct iss_video *video)
399{
400 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity);
401 enum iss_pipeline_state state;
402 struct iss_buffer *buf;
403 unsigned long flags;
404 struct timespec ts;
405
406 spin_lock_irqsave(&video->qlock, flags);
407 if (WARN_ON(list_empty(&video->dmaqueue))) {
408 spin_unlock_irqrestore(&video->qlock, flags);
409 return NULL;
410 }
411
412 buf = list_first_entry(&video->dmaqueue, struct iss_buffer,
413 list);
414 list_del(&buf->list);
415 spin_unlock_irqrestore(&video->qlock, flags);
416
417 ktime_get_ts(&ts);
418 buf->vb.v4l2_buf.timestamp.tv_sec = ts.tv_sec;
419 buf->vb.v4l2_buf.timestamp.tv_usec = ts.tv_nsec / NSEC_PER_USEC;
420
421 /* Do frame number propagation only if this is the output video node.
422 * Frame number either comes from the CSI receivers or it gets
423 * incremented here if H3A is not active.
424 * Note: There is no guarantee that the output buffer will finish
425 * first, so the input number might lag behind by 1 in some cases.
426 */
427 if (video == pipe->output && !pipe->do_propagation)
428 buf->vb.v4l2_buf.sequence = atomic_inc_return(&pipe->frame_number);
429 else
430 buf->vb.v4l2_buf.sequence = atomic_read(&pipe->frame_number);
431
432 vb2_buffer_done(&buf->vb, pipe->error ? VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
433 pipe->error = false;
434
435 spin_lock_irqsave(&video->qlock, flags);
436 if (list_empty(&video->dmaqueue)) {
437 spin_unlock_irqrestore(&video->qlock, flags);
438 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
439 state = ISS_PIPELINE_QUEUE_OUTPUT
440 | ISS_PIPELINE_STREAM;
441 else
442 state = ISS_PIPELINE_QUEUE_INPUT
443 | ISS_PIPELINE_STREAM;
444
445 spin_lock_irqsave(&pipe->lock, flags);
446 pipe->state &= ~state;
447 if (video->pipe.stream_state == ISS_PIPELINE_STREAM_CONTINUOUS)
448 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN;
449 spin_unlock_irqrestore(&pipe->lock, flags);
450 return NULL;
451 }
452
453 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && pipe->input != NULL) {
454 spin_lock_irqsave(&pipe->lock, flags);
455 pipe->state &= ~ISS_PIPELINE_STREAM;
456 spin_unlock_irqrestore(&pipe->lock, flags);
457 }
458
459 buf = list_first_entry(&video->dmaqueue, struct iss_buffer,
460 list);
461 spin_unlock_irqrestore(&video->qlock, flags);
462 buf->vb.state = VB2_BUF_STATE_ACTIVE;
463 return buf;
464}
465
466/* -----------------------------------------------------------------------------
467 * V4L2 ioctls
468 */
469
470static int
471iss_video_querycap(struct file *file, void *fh, struct v4l2_capability *cap)
472{
473 struct iss_video *video = video_drvdata(file);
474
475 strlcpy(cap->driver, ISS_VIDEO_DRIVER_NAME, sizeof(cap->driver));
476 strlcpy(cap->card, video->video.name, sizeof(cap->card));
477 strlcpy(cap->bus_info, "media", sizeof(cap->bus_info));
478
479 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
35c71be8 480 cap->device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING;
fc96d58c 481 else
35c71be8
LP
482 cap->device_caps = V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_STREAMING;
483
484 cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING
485 | V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT;
fc96d58c
SA
486
487 return 0;
488}
489
490static int
491iss_video_get_format(struct file *file, void *fh, struct v4l2_format *format)
492{
493 struct iss_video_fh *vfh = to_iss_video_fh(fh);
494 struct iss_video *video = video_drvdata(file);
495
496 if (format->type != video->type)
497 return -EINVAL;
498
499 mutex_lock(&video->mutex);
500 *format = vfh->format;
501 mutex_unlock(&video->mutex);
502
503 return 0;
504}
505
506static int
507iss_video_set_format(struct file *file, void *fh, struct v4l2_format *format)
508{
509 struct iss_video_fh *vfh = to_iss_video_fh(fh);
510 struct iss_video *video = video_drvdata(file);
511 struct v4l2_mbus_framefmt fmt;
512
513 if (format->type != video->type)
514 return -EINVAL;
515
516 mutex_lock(&video->mutex);
517
518 /* Fill the bytesperline and sizeimage fields by converting to media bus
519 * format and back to pixel format.
520 */
521 iss_video_pix_to_mbus(&format->fmt.pix, &fmt);
522 iss_video_mbus_to_pix(video, &fmt, &format->fmt.pix);
523
524 vfh->format = *format;
525
526 mutex_unlock(&video->mutex);
527 return 0;
528}
529
530static int
531iss_video_try_format(struct file *file, void *fh, struct v4l2_format *format)
532{
533 struct iss_video *video = video_drvdata(file);
534 struct v4l2_subdev_format fmt;
535 struct v4l2_subdev *subdev;
536 u32 pad;
537 int ret;
538
539 if (format->type != video->type)
540 return -EINVAL;
541
542 subdev = iss_video_remote_subdev(video, &pad);
543 if (subdev == NULL)
544 return -EINVAL;
545
546 iss_video_pix_to_mbus(&format->fmt.pix, &fmt.format);
547
548 fmt.pad = pad;
549 fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
550 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
551 if (ret)
e43484e4 552 return ret;
fc96d58c
SA
553
554 iss_video_mbus_to_pix(video, &fmt.format, &format->fmt.pix);
555 return 0;
556}
557
558static int
559iss_video_cropcap(struct file *file, void *fh, struct v4l2_cropcap *cropcap)
560{
561 struct iss_video *video = video_drvdata(file);
562 struct v4l2_subdev *subdev;
563 int ret;
564
565 subdev = iss_video_remote_subdev(video, NULL);
566 if (subdev == NULL)
567 return -EINVAL;
568
569 mutex_lock(&video->mutex);
570 ret = v4l2_subdev_call(subdev, video, cropcap, cropcap);
571 mutex_unlock(&video->mutex);
572
4a62361f 573 return ret == -ENOIOCTLCMD ? -ENOTTY : ret;
fc96d58c
SA
574}
575
576static int
577iss_video_get_crop(struct file *file, void *fh, struct v4l2_crop *crop)
578{
579 struct iss_video *video = video_drvdata(file);
580 struct v4l2_subdev_format format;
581 struct v4l2_subdev *subdev;
582 u32 pad;
583 int ret;
584
585 subdev = iss_video_remote_subdev(video, &pad);
586 if (subdev == NULL)
587 return -EINVAL;
588
589 /* Try the get crop operation first and fallback to get format if not
590 * implemented.
591 */
592 ret = v4l2_subdev_call(subdev, video, g_crop, crop);
593 if (ret != -ENOIOCTLCMD)
594 return ret;
595
596 format.pad = pad;
597 format.which = V4L2_SUBDEV_FORMAT_ACTIVE;
598 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &format);
599 if (ret < 0)
4a62361f 600 return ret == -ENOIOCTLCMD ? -ENOTTY : ret;
fc96d58c
SA
601
602 crop->c.left = 0;
603 crop->c.top = 0;
604 crop->c.width = format.format.width;
605 crop->c.height = format.format.height;
606
607 return 0;
608}
609
610static int
611iss_video_set_crop(struct file *file, void *fh, const struct v4l2_crop *crop)
612{
613 struct iss_video *video = video_drvdata(file);
614 struct v4l2_subdev *subdev;
615 int ret;
616
617 subdev = iss_video_remote_subdev(video, NULL);
618 if (subdev == NULL)
619 return -EINVAL;
620
621 mutex_lock(&video->mutex);
622 ret = v4l2_subdev_call(subdev, video, s_crop, crop);
623 mutex_unlock(&video->mutex);
624
4a62361f 625 return ret == -ENOIOCTLCMD ? -ENOTTY : ret;
fc96d58c
SA
626}
627
628static int
629iss_video_get_param(struct file *file, void *fh, struct v4l2_streamparm *a)
630{
631 struct iss_video_fh *vfh = to_iss_video_fh(fh);
632 struct iss_video *video = video_drvdata(file);
633
634 if (video->type != V4L2_BUF_TYPE_VIDEO_OUTPUT ||
635 video->type != a->type)
636 return -EINVAL;
637
638 memset(a, 0, sizeof(*a));
639 a->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
640 a->parm.output.capability = V4L2_CAP_TIMEPERFRAME;
641 a->parm.output.timeperframe = vfh->timeperframe;
642
643 return 0;
644}
645
646static int
647iss_video_set_param(struct file *file, void *fh, struct v4l2_streamparm *a)
648{
649 struct iss_video_fh *vfh = to_iss_video_fh(fh);
650 struct iss_video *video = video_drvdata(file);
651
652 if (video->type != V4L2_BUF_TYPE_VIDEO_OUTPUT ||
653 video->type != a->type)
654 return -EINVAL;
655
656 if (a->parm.output.timeperframe.denominator == 0)
657 a->parm.output.timeperframe.denominator = 1;
658
659 vfh->timeperframe = a->parm.output.timeperframe;
660
661 return 0;
662}
663
664static int
665iss_video_reqbufs(struct file *file, void *fh, struct v4l2_requestbuffers *rb)
666{
667 struct iss_video_fh *vfh = to_iss_video_fh(fh);
668
669 return vb2_reqbufs(&vfh->queue, rb);
670}
671
672static int
673iss_video_querybuf(struct file *file, void *fh, struct v4l2_buffer *b)
674{
675 struct iss_video_fh *vfh = to_iss_video_fh(fh);
676
677 return vb2_querybuf(&vfh->queue, b);
678}
679
680static int
681iss_video_qbuf(struct file *file, void *fh, struct v4l2_buffer *b)
682{
683 struct iss_video_fh *vfh = to_iss_video_fh(fh);
684
685 return vb2_qbuf(&vfh->queue, b);
686}
687
688static int
689iss_video_dqbuf(struct file *file, void *fh, struct v4l2_buffer *b)
690{
691 struct iss_video_fh *vfh = to_iss_video_fh(fh);
692
693 return vb2_dqbuf(&vfh->queue, b, file->f_flags & O_NONBLOCK);
694}
695
696/*
697 * Stream management
698 *
699 * Every ISS pipeline has a single input and a single output. The input can be
700 * either a sensor or a video node. The output is always a video node.
701 *
702 * As every pipeline has an output video node, the ISS video objects at the
703 * pipeline output stores the pipeline state. It tracks the streaming state of
704 * both the input and output, as well as the availability of buffers.
705 *
706 * In sensor-to-memory mode, frames are always available at the pipeline input.
707 * Starting the sensor usually requires I2C transfers and must be done in
708 * interruptible context. The pipeline is started and stopped synchronously
709 * to the stream on/off commands. All modules in the pipeline will get their
710 * subdev set stream handler called. The module at the end of the pipeline must
711 * delay starting the hardware until buffers are available at its output.
712 *
713 * In memory-to-memory mode, starting/stopping the stream requires
714 * synchronization between the input and output. ISS modules can't be stopped
715 * in the middle of a frame, and at least some of the modules seem to become
716 * busy as soon as they're started, even if they don't receive a frame start
717 * event. For that reason frames need to be processed in single-shot mode. The
718 * driver needs to wait until a frame is completely processed and written to
719 * memory before restarting the pipeline for the next frame. Pipelined
720 * processing might be possible but requires more testing.
721 *
722 * Stream start must be delayed until buffers are available at both the input
723 * and output. The pipeline must be started in the videobuf queue callback with
724 * the buffers queue spinlock held. The modules subdev set stream operation must
725 * not sleep.
726 */
727static int
728iss_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type)
729{
730 struct iss_video_fh *vfh = to_iss_video_fh(fh);
731 struct iss_video *video = video_drvdata(file);
732 enum iss_pipeline_state state;
733 struct iss_pipeline *pipe;
734 struct iss_video *far_end;
735 unsigned long flags;
736 int ret;
737
738 if (type != video->type)
739 return -EINVAL;
740
741 mutex_lock(&video->stream_lock);
742
fc96d58c
SA
743 /* Start streaming on the pipeline. No link touching an entity in the
744 * pipeline can be activated or deactivated once streaming is started.
745 */
746 pipe = video->video.entity.pipe
747 ? to_iss_pipeline(&video->video.entity) : &video->pipe;
748 pipe->external = NULL;
749 pipe->external_rate = 0;
750 pipe->external_bpp = 0;
751
752 if (video->iss->pdata->set_constraints)
753 video->iss->pdata->set_constraints(video->iss, true);
754
755 ret = media_entity_pipeline_start(&video->video.entity, &pipe->pipe);
756 if (ret < 0)
757 goto err_media_entity_pipeline_start;
758
759 /* Verify that the currently configured format matches the output of
760 * the connected subdev.
761 */
762 ret = iss_video_check_format(video, vfh);
763 if (ret < 0)
764 goto err_iss_video_check_format;
765
766 video->bpl_padding = ret;
767 video->bpl_value = vfh->format.fmt.pix.bytesperline;
768
769 /* Find the ISS video node connected at the far end of the pipeline and
770 * update the pipeline.
771 */
772 far_end = iss_video_far_end(video);
773
774 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
775 state = ISS_PIPELINE_STREAM_OUTPUT | ISS_PIPELINE_IDLE_OUTPUT;
776 pipe->input = far_end;
777 pipe->output = video;
778 } else {
779 if (far_end == NULL) {
780 ret = -EPIPE;
781 goto err_iss_video_check_format;
782 }
783
784 state = ISS_PIPELINE_STREAM_INPUT | ISS_PIPELINE_IDLE_INPUT;
785 pipe->input = video;
786 pipe->output = far_end;
787 }
788
789 spin_lock_irqsave(&pipe->lock, flags);
790 pipe->state &= ~ISS_PIPELINE_STREAM;
791 pipe->state |= state;
792 spin_unlock_irqrestore(&pipe->lock, flags);
793
794 /* Set the maximum time per frame as the value requested by userspace.
795 * This is a soft limit that can be overridden if the hardware doesn't
796 * support the request limit.
797 */
798 if (video->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
799 pipe->max_timeperframe = vfh->timeperframe;
800
801 video->queue = &vfh->queue;
802 INIT_LIST_HEAD(&video->dmaqueue);
803 spin_lock_init(&video->qlock);
804 atomic_set(&pipe->frame_number, -1);
805
806 ret = vb2_streamon(&vfh->queue, type);
807 if (ret < 0)
808 goto err_iss_video_check_format;
809
810 /* In sensor-to-memory mode, the stream can be started synchronously
811 * to the stream on command. In memory-to-memory mode, it will be
812 * started when buffers are queued on both the input and output.
813 */
814 if (pipe->input == NULL) {
815 unsigned long flags;
816 ret = omap4iss_pipeline_set_stream(pipe,
817 ISS_PIPELINE_STREAM_CONTINUOUS);
818 if (ret < 0)
819 goto err_omap4iss_set_stream;
820 spin_lock_irqsave(&video->qlock, flags);
821 if (list_empty(&video->dmaqueue))
822 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN;
823 spin_unlock_irqrestore(&video->qlock, flags);
824 }
825
2b16b44a
LP
826 mutex_unlock(&video->stream_lock);
827 return 0;
828
fc96d58c 829err_omap4iss_set_stream:
2b16b44a 830 vb2_streamoff(&vfh->queue, type);
fc96d58c 831err_iss_video_check_format:
2b16b44a 832 media_entity_pipeline_stop(&video->video.entity);
fc96d58c 833err_media_entity_pipeline_start:
2b16b44a
LP
834 if (video->iss->pdata->set_constraints)
835 video->iss->pdata->set_constraints(video->iss, false);
836 video->queue = NULL;
fc96d58c 837
fc96d58c
SA
838 mutex_unlock(&video->stream_lock);
839 return ret;
840}
841
842static int
843iss_video_streamoff(struct file *file, void *fh, enum v4l2_buf_type type)
844{
845 struct iss_video_fh *vfh = to_iss_video_fh(fh);
846 struct iss_video *video = video_drvdata(file);
847 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity);
848 enum iss_pipeline_state state;
849 unsigned long flags;
850
851 if (type != video->type)
852 return -EINVAL;
853
854 mutex_lock(&video->stream_lock);
855
856 if (!vb2_is_streaming(&vfh->queue))
857 goto done;
858
859 /* Update the pipeline state. */
860 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
861 state = ISS_PIPELINE_STREAM_OUTPUT
862 | ISS_PIPELINE_QUEUE_OUTPUT;
863 else
864 state = ISS_PIPELINE_STREAM_INPUT
865 | ISS_PIPELINE_QUEUE_INPUT;
866
867 spin_lock_irqsave(&pipe->lock, flags);
868 pipe->state &= ~state;
869 spin_unlock_irqrestore(&pipe->lock, flags);
870
871 /* Stop the stream. */
872 omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_STOPPED);
873 vb2_streamoff(&vfh->queue, type);
874 video->queue = NULL;
fc96d58c
SA
875
876 if (video->iss->pdata->set_constraints)
877 video->iss->pdata->set_constraints(video->iss, false);
878 media_entity_pipeline_stop(&video->video.entity);
879
880done:
881 mutex_unlock(&video->stream_lock);
882 return 0;
883}
884
885static int
886iss_video_enum_input(struct file *file, void *fh, struct v4l2_input *input)
887{
888 if (input->index > 0)
889 return -EINVAL;
890
891 strlcpy(input->name, "camera", sizeof(input->name));
892 input->type = V4L2_INPUT_TYPE_CAMERA;
893
894 return 0;
895}
896
897static int
898iss_video_g_input(struct file *file, void *fh, unsigned int *input)
899{
900 *input = 0;
901
902 return 0;
903}
904
16422f55
LP
905static int
906iss_video_s_input(struct file *file, void *fh, unsigned int input)
907{
908 return input == 0 ? 0 : -EINVAL;
909}
910
fc96d58c
SA
911static const struct v4l2_ioctl_ops iss_video_ioctl_ops = {
912 .vidioc_querycap = iss_video_querycap,
913 .vidioc_g_fmt_vid_cap = iss_video_get_format,
914 .vidioc_s_fmt_vid_cap = iss_video_set_format,
915 .vidioc_try_fmt_vid_cap = iss_video_try_format,
916 .vidioc_g_fmt_vid_out = iss_video_get_format,
917 .vidioc_s_fmt_vid_out = iss_video_set_format,
918 .vidioc_try_fmt_vid_out = iss_video_try_format,
919 .vidioc_cropcap = iss_video_cropcap,
920 .vidioc_g_crop = iss_video_get_crop,
921 .vidioc_s_crop = iss_video_set_crop,
922 .vidioc_g_parm = iss_video_get_param,
923 .vidioc_s_parm = iss_video_set_param,
924 .vidioc_reqbufs = iss_video_reqbufs,
925 .vidioc_querybuf = iss_video_querybuf,
926 .vidioc_qbuf = iss_video_qbuf,
927 .vidioc_dqbuf = iss_video_dqbuf,
928 .vidioc_streamon = iss_video_streamon,
929 .vidioc_streamoff = iss_video_streamoff,
930 .vidioc_enum_input = iss_video_enum_input,
931 .vidioc_g_input = iss_video_g_input,
16422f55 932 .vidioc_s_input = iss_video_s_input,
fc96d58c
SA
933};
934
935/* -----------------------------------------------------------------------------
936 * V4L2 file operations
937 */
938
939static int iss_video_open(struct file *file)
940{
941 struct iss_video *video = video_drvdata(file);
942 struct iss_video_fh *handle;
943 struct vb2_queue *q;
944 int ret = 0;
945
946 handle = kzalloc(sizeof(*handle), GFP_KERNEL);
947 if (handle == NULL)
948 return -ENOMEM;
949
950 v4l2_fh_init(&handle->vfh, &video->video);
951 v4l2_fh_add(&handle->vfh);
952
953 /* If this is the first user, initialise the pipeline. */
954 if (omap4iss_get(video->iss) == NULL) {
955 ret = -EBUSY;
956 goto done;
957 }
958
959 ret = omap4iss_pipeline_pm_use(&video->video.entity, 1);
960 if (ret < 0) {
961 omap4iss_put(video->iss);
962 goto done;
963 }
964
965 video->alloc_ctx = vb2_dma_contig_init_ctx(video->iss->dev);
966 if (IS_ERR(video->alloc_ctx)) {
967 ret = PTR_ERR(video->alloc_ctx);
968 omap4iss_put(video->iss);
969 goto done;
970 }
971
972 q = &handle->queue;
973
974 q->type = video->type;
975 q->io_modes = VB2_MMAP;
976 q->drv_priv = handle;
977 q->ops = &iss_video_vb2ops;
978 q->mem_ops = &vb2_dma_contig_memops;
979 q->buf_struct_size = sizeof(struct iss_buffer);
bb4e7d6e 980 q->timestamp_type = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
fc96d58c
SA
981
982 ret = vb2_queue_init(q);
983 if (ret) {
984 omap4iss_put(video->iss);
985 goto done;
986 }
987
988 memset(&handle->format, 0, sizeof(handle->format));
989 handle->format.type = video->type;
990 handle->timeperframe.denominator = 1;
991
992 handle->video = video;
993 file->private_data = &handle->vfh;
994
995done:
996 if (ret < 0) {
997 v4l2_fh_del(&handle->vfh);
998 kfree(handle);
999 }
1000
1001 return ret;
1002}
1003
1004static int iss_video_release(struct file *file)
1005{
1006 struct iss_video *video = video_drvdata(file);
1007 struct v4l2_fh *vfh = file->private_data;
1008 struct iss_video_fh *handle = to_iss_video_fh(vfh);
1009
1010 /* Disable streaming and free the buffers queue resources. */
1011 iss_video_streamoff(file, vfh, video->type);
1012
1013 omap4iss_pipeline_pm_use(&video->video.entity, 0);
1014
1015 /* Release the videobuf2 queue */
1016 vb2_queue_release(&handle->queue);
1017
1018 /* Release the file handle. */
1019 v4l2_fh_del(vfh);
1020 kfree(handle);
1021 file->private_data = NULL;
1022
1023 omap4iss_put(video->iss);
1024
1025 return 0;
1026}
1027
1028static unsigned int iss_video_poll(struct file *file, poll_table *wait)
1029{
1030 struct iss_video_fh *vfh = to_iss_video_fh(file->private_data);
1031
1032 return vb2_poll(&vfh->queue, file, wait);
1033}
1034
1035static int iss_video_mmap(struct file *file, struct vm_area_struct *vma)
1036{
1037 struct iss_video_fh *vfh = to_iss_video_fh(file->private_data);
1038
1039 return vb2_mmap(&vfh->queue, vma);;
1040}
1041
1042static struct v4l2_file_operations iss_video_fops = {
1043 .owner = THIS_MODULE,
1044 .unlocked_ioctl = video_ioctl2,
1045 .open = iss_video_open,
1046 .release = iss_video_release,
1047 .poll = iss_video_poll,
1048 .mmap = iss_video_mmap,
1049};
1050
1051/* -----------------------------------------------------------------------------
1052 * ISS video core
1053 */
1054
1055static const struct iss_video_operations iss_video_dummy_ops = {
1056};
1057
1058int omap4iss_video_init(struct iss_video *video, const char *name)
1059{
1060 const char *direction;
1061 int ret;
1062
1063 switch (video->type) {
1064 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1065 direction = "output";
1066 video->pad.flags = MEDIA_PAD_FL_SINK;
1067 break;
1068 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1069 direction = "input";
1070 video->pad.flags = MEDIA_PAD_FL_SOURCE;
1071 break;
1072
1073 default:
1074 return -EINVAL;
1075 }
1076
1077 ret = media_entity_init(&video->video.entity, 1, &video->pad, 0);
1078 if (ret < 0)
1079 return ret;
1080
1081 mutex_init(&video->mutex);
1082 atomic_set(&video->active, 0);
1083
1084 spin_lock_init(&video->pipe.lock);
1085 mutex_init(&video->stream_lock);
1086
1087 /* Initialize the video device. */
1088 if (video->ops == NULL)
1089 video->ops = &iss_video_dummy_ops;
1090
1091 video->video.fops = &iss_video_fops;
1092 snprintf(video->video.name, sizeof(video->video.name),
1093 "OMAP4 ISS %s %s", name, direction);
1094 video->video.vfl_type = VFL_TYPE_GRABBER;
1095 video->video.release = video_device_release_empty;
1096 video->video.ioctl_ops = &iss_video_ioctl_ops;
1097 video->pipe.stream_state = ISS_PIPELINE_STREAM_STOPPED;
1098
1099 video_set_drvdata(&video->video, video);
1100
1101 return 0;
1102}
1103
1104void omap4iss_video_cleanup(struct iss_video *video)
1105{
1106 media_entity_cleanup(&video->video.entity);
1107 mutex_destroy(&video->stream_lock);
1108 mutex_destroy(&video->mutex);
1109}
1110
1111int omap4iss_video_register(struct iss_video *video, struct v4l2_device *vdev)
1112{
1113 int ret;
1114
1115 video->video.v4l2_dev = vdev;
1116
1117 ret = video_register_device(&video->video, VFL_TYPE_GRABBER, -1);
1118 if (ret < 0)
1119 printk(KERN_ERR "%s: could not register video device (%d)\n",
1120 __func__, ret);
1121
1122 return ret;
1123}
1124
1125void omap4iss_video_unregister(struct iss_video *video)
1126{
2b7f0b64 1127 video_unregister_device(&video->video);
fc96d58c 1128}