2 * TI OMAP4 ISS V4L2 Driver - Generic video node
4 * Copyright (C) 2012 Texas Instruments, Inc.
6 * Author: Sergio Aguirre <sergio.a.aguirre@gmail.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <asm/cacheflush.h>
15 #include <linux/clk.h>
17 #include <linux/pagemap.h>
18 #include <linux/sched.h>
19 #include <linux/slab.h>
20 #include <linux/vmalloc.h>
21 #include <linux/module.h>
23 #include <media/v4l2-dev.h>
24 #include <media/v4l2-ioctl.h>
25 #include <media/v4l2-mc.h>
27 #include "iss_video.h"
30 /* -----------------------------------------------------------------------------
34 static struct iss_format_info formats
[] = {
35 { MEDIA_BUS_FMT_Y8_1X8
, MEDIA_BUS_FMT_Y8_1X8
,
36 MEDIA_BUS_FMT_Y8_1X8
, MEDIA_BUS_FMT_Y8_1X8
,
37 V4L2_PIX_FMT_GREY
, 8, "Greyscale 8 bpp", },
38 { MEDIA_BUS_FMT_Y10_1X10
, MEDIA_BUS_FMT_Y10_1X10
,
39 MEDIA_BUS_FMT_Y10_1X10
, MEDIA_BUS_FMT_Y8_1X8
,
40 V4L2_PIX_FMT_Y10
, 10, "Greyscale 10 bpp", },
41 { MEDIA_BUS_FMT_Y12_1X12
, MEDIA_BUS_FMT_Y10_1X10
,
42 MEDIA_BUS_FMT_Y12_1X12
, MEDIA_BUS_FMT_Y8_1X8
,
43 V4L2_PIX_FMT_Y12
, 12, "Greyscale 12 bpp", },
44 { MEDIA_BUS_FMT_SBGGR8_1X8
, MEDIA_BUS_FMT_SBGGR8_1X8
,
45 MEDIA_BUS_FMT_SBGGR8_1X8
, MEDIA_BUS_FMT_SBGGR8_1X8
,
46 V4L2_PIX_FMT_SBGGR8
, 8, "BGGR Bayer 8 bpp", },
47 { MEDIA_BUS_FMT_SGBRG8_1X8
, MEDIA_BUS_FMT_SGBRG8_1X8
,
48 MEDIA_BUS_FMT_SGBRG8_1X8
, MEDIA_BUS_FMT_SGBRG8_1X8
,
49 V4L2_PIX_FMT_SGBRG8
, 8, "GBRG Bayer 8 bpp", },
50 { MEDIA_BUS_FMT_SGRBG8_1X8
, MEDIA_BUS_FMT_SGRBG8_1X8
,
51 MEDIA_BUS_FMT_SGRBG8_1X8
, MEDIA_BUS_FMT_SGRBG8_1X8
,
52 V4L2_PIX_FMT_SGRBG8
, 8, "GRBG Bayer 8 bpp", },
53 { MEDIA_BUS_FMT_SRGGB8_1X8
, MEDIA_BUS_FMT_SRGGB8_1X8
,
54 MEDIA_BUS_FMT_SRGGB8_1X8
, MEDIA_BUS_FMT_SRGGB8_1X8
,
55 V4L2_PIX_FMT_SRGGB8
, 8, "RGGB Bayer 8 bpp", },
56 { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8
, MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8
,
57 MEDIA_BUS_FMT_SGRBG10_1X10
, 0,
58 V4L2_PIX_FMT_SGRBG10DPCM8
, 8, "GRBG Bayer 10 bpp DPCM8", },
59 { MEDIA_BUS_FMT_SBGGR10_1X10
, MEDIA_BUS_FMT_SBGGR10_1X10
,
60 MEDIA_BUS_FMT_SBGGR10_1X10
, MEDIA_BUS_FMT_SBGGR8_1X8
,
61 V4L2_PIX_FMT_SBGGR10
, 10, "BGGR Bayer 10 bpp", },
62 { MEDIA_BUS_FMT_SGBRG10_1X10
, MEDIA_BUS_FMT_SGBRG10_1X10
,
63 MEDIA_BUS_FMT_SGBRG10_1X10
, MEDIA_BUS_FMT_SGBRG8_1X8
,
64 V4L2_PIX_FMT_SGBRG10
, 10, "GBRG Bayer 10 bpp", },
65 { MEDIA_BUS_FMT_SGRBG10_1X10
, MEDIA_BUS_FMT_SGRBG10_1X10
,
66 MEDIA_BUS_FMT_SGRBG10_1X10
, MEDIA_BUS_FMT_SGRBG8_1X8
,
67 V4L2_PIX_FMT_SGRBG10
, 10, "GRBG Bayer 10 bpp", },
68 { MEDIA_BUS_FMT_SRGGB10_1X10
, MEDIA_BUS_FMT_SRGGB10_1X10
,
69 MEDIA_BUS_FMT_SRGGB10_1X10
, MEDIA_BUS_FMT_SRGGB8_1X8
,
70 V4L2_PIX_FMT_SRGGB10
, 10, "RGGB Bayer 10 bpp", },
71 { MEDIA_BUS_FMT_SBGGR12_1X12
, MEDIA_BUS_FMT_SBGGR10_1X10
,
72 MEDIA_BUS_FMT_SBGGR12_1X12
, MEDIA_BUS_FMT_SBGGR8_1X8
,
73 V4L2_PIX_FMT_SBGGR12
, 12, "BGGR Bayer 12 bpp", },
74 { MEDIA_BUS_FMT_SGBRG12_1X12
, MEDIA_BUS_FMT_SGBRG10_1X10
,
75 MEDIA_BUS_FMT_SGBRG12_1X12
, MEDIA_BUS_FMT_SGBRG8_1X8
,
76 V4L2_PIX_FMT_SGBRG12
, 12, "GBRG Bayer 12 bpp", },
77 { MEDIA_BUS_FMT_SGRBG12_1X12
, MEDIA_BUS_FMT_SGRBG10_1X10
,
78 MEDIA_BUS_FMT_SGRBG12_1X12
, MEDIA_BUS_FMT_SGRBG8_1X8
,
79 V4L2_PIX_FMT_SGRBG12
, 12, "GRBG Bayer 12 bpp", },
80 { MEDIA_BUS_FMT_SRGGB12_1X12
, MEDIA_BUS_FMT_SRGGB10_1X10
,
81 MEDIA_BUS_FMT_SRGGB12_1X12
, MEDIA_BUS_FMT_SRGGB8_1X8
,
82 V4L2_PIX_FMT_SRGGB12
, 12, "RGGB Bayer 12 bpp", },
83 { MEDIA_BUS_FMT_UYVY8_1X16
, MEDIA_BUS_FMT_UYVY8_1X16
,
84 MEDIA_BUS_FMT_UYVY8_1X16
, 0,
85 V4L2_PIX_FMT_UYVY
, 16, "YUV 4:2:2 (UYVY)", },
86 { MEDIA_BUS_FMT_YUYV8_1X16
, MEDIA_BUS_FMT_YUYV8_1X16
,
87 MEDIA_BUS_FMT_YUYV8_1X16
, 0,
88 V4L2_PIX_FMT_YUYV
, 16, "YUV 4:2:2 (YUYV)", },
89 { MEDIA_BUS_FMT_YUYV8_1_5X8
, MEDIA_BUS_FMT_YUYV8_1_5X8
,
90 MEDIA_BUS_FMT_YUYV8_1_5X8
, 0,
91 V4L2_PIX_FMT_NV12
, 8, "YUV 4:2:0 (NV12)", },
94 const struct iss_format_info
*
95 omap4iss_video_format_info(u32 code
)
99 for (i
= 0; i
< ARRAY_SIZE(formats
); ++i
) {
100 if (formats
[i
].code
== code
)
108 * iss_video_mbus_to_pix - Convert v4l2_mbus_framefmt to v4l2_pix_format
109 * @video: ISS video instance
110 * @mbus: v4l2_mbus_framefmt format (input)
111 * @pix: v4l2_pix_format format (output)
113 * Fill the output pix structure with information from the input mbus format.
114 * The bytesperline and sizeimage fields are computed from the requested bytes
115 * per line value in the pix format and information from the video instance.
117 * Return the number of padding bytes at end of line.
119 static unsigned int iss_video_mbus_to_pix(const struct iss_video
*video
,
120 const struct v4l2_mbus_framefmt
*mbus
,
121 struct v4l2_pix_format
*pix
)
123 unsigned int bpl
= pix
->bytesperline
;
124 unsigned int min_bpl
;
127 memset(pix
, 0, sizeof(*pix
));
128 pix
->width
= mbus
->width
;
129 pix
->height
= mbus
->height
;
131 /* Skip the last format in the loop so that it will be selected if no
134 for (i
= 0; i
< ARRAY_SIZE(formats
) - 1; ++i
) {
135 if (formats
[i
].code
== mbus
->code
)
139 min_bpl
= pix
->width
* ALIGN(formats
[i
].bpp
, 8) / 8;
141 /* Clamp the requested bytes per line value. If the maximum bytes per
142 * line value is zero, the module doesn't support user configurable line
143 * sizes. Override the requested value with the minimum in that case.
146 bpl
= clamp(bpl
, min_bpl
, video
->bpl_max
);
150 if (!video
->bpl_zero_padding
|| bpl
!= min_bpl
)
151 bpl
= ALIGN(bpl
, video
->bpl_alignment
);
153 pix
->pixelformat
= formats
[i
].pixelformat
;
154 pix
->bytesperline
= bpl
;
155 pix
->sizeimage
= pix
->bytesperline
* pix
->height
;
156 pix
->colorspace
= mbus
->colorspace
;
157 pix
->field
= mbus
->field
;
159 /* FIXME: Special case for NV12! We should make this nicer... */
160 if (pix
->pixelformat
== V4L2_PIX_FMT_NV12
)
161 pix
->sizeimage
+= (pix
->bytesperline
* pix
->height
) / 2;
163 return bpl
- min_bpl
;
166 static void iss_video_pix_to_mbus(const struct v4l2_pix_format
*pix
,
167 struct v4l2_mbus_framefmt
*mbus
)
171 memset(mbus
, 0, sizeof(*mbus
));
172 mbus
->width
= pix
->width
;
173 mbus
->height
= pix
->height
;
175 /* Skip the last format in the loop so that it will be selected if no
178 for (i
= 0; i
< ARRAY_SIZE(formats
) - 1; ++i
) {
179 if (formats
[i
].pixelformat
== pix
->pixelformat
)
183 mbus
->code
= formats
[i
].code
;
184 mbus
->colorspace
= pix
->colorspace
;
185 mbus
->field
= pix
->field
;
188 static struct v4l2_subdev
*
189 iss_video_remote_subdev(struct iss_video
*video
, u32
*pad
)
191 struct media_pad
*remote
;
193 remote
= media_entity_remote_pad(&video
->pad
);
195 if (!remote
|| !is_media_entity_v4l2_subdev(remote
->entity
))
199 *pad
= remote
->index
;
201 return media_entity_to_v4l2_subdev(remote
->entity
);
204 /* Return a pointer to the ISS video instance at the far end of the pipeline. */
205 static struct iss_video
*
206 iss_video_far_end(struct iss_video
*video
)
208 struct media_graph graph
;
209 struct media_entity
*entity
= &video
->video
.entity
;
210 struct media_device
*mdev
= entity
->graph_obj
.mdev
;
211 struct iss_video
*far_end
= NULL
;
213 mutex_lock(&mdev
->graph_mutex
);
215 if (media_graph_walk_init(&graph
, mdev
)) {
216 mutex_unlock(&mdev
->graph_mutex
);
220 media_graph_walk_start(&graph
, entity
);
222 while ((entity
= media_graph_walk_next(&graph
))) {
223 if (entity
== &video
->video
.entity
)
226 if (!is_media_entity_v4l2_video_device(entity
))
229 far_end
= to_iss_video(media_entity_to_video_device(entity
));
230 if (far_end
->type
!= video
->type
)
236 mutex_unlock(&mdev
->graph_mutex
);
238 media_graph_walk_cleanup(&graph
);
244 __iss_video_get_format(struct iss_video
*video
,
245 struct v4l2_mbus_framefmt
*format
)
247 struct v4l2_subdev_format fmt
;
248 struct v4l2_subdev
*subdev
;
252 subdev
= iss_video_remote_subdev(video
, &pad
);
256 memset(&fmt
, 0, sizeof(fmt
));
258 fmt
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
260 mutex_lock(&video
->mutex
);
261 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &fmt
);
262 mutex_unlock(&video
->mutex
);
267 *format
= fmt
.format
;
272 iss_video_check_format(struct iss_video
*video
, struct iss_video_fh
*vfh
)
274 struct v4l2_mbus_framefmt format
;
275 struct v4l2_pix_format pixfmt
;
278 ret
= __iss_video_get_format(video
, &format
);
282 pixfmt
.bytesperline
= 0;
283 ret
= iss_video_mbus_to_pix(video
, &format
, &pixfmt
);
285 if (vfh
->format
.fmt
.pix
.pixelformat
!= pixfmt
.pixelformat
||
286 vfh
->format
.fmt
.pix
.height
!= pixfmt
.height
||
287 vfh
->format
.fmt
.pix
.width
!= pixfmt
.width
||
288 vfh
->format
.fmt
.pix
.bytesperline
!= pixfmt
.bytesperline
||
289 vfh
->format
.fmt
.pix
.sizeimage
!= pixfmt
.sizeimage
)
295 /* -----------------------------------------------------------------------------
296 * Video queue operations
299 static int iss_video_queue_setup(struct vb2_queue
*vq
,
300 unsigned int *count
, unsigned int *num_planes
,
301 unsigned int sizes
[], struct device
*alloc_devs
[])
303 struct iss_video_fh
*vfh
= vb2_get_drv_priv(vq
);
304 struct iss_video
*video
= vfh
->video
;
306 /* Revisit multi-planar support for NV12 */
309 sizes
[0] = vfh
->format
.fmt
.pix
.sizeimage
;
313 *count
= min(*count
, video
->capture_mem
/ PAGE_ALIGN(sizes
[0]));
318 static void iss_video_buf_cleanup(struct vb2_buffer
*vb
)
320 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
321 struct iss_buffer
*buffer
= container_of(vbuf
, struct iss_buffer
, vb
);
323 if (buffer
->iss_addr
)
324 buffer
->iss_addr
= 0;
327 static int iss_video_buf_prepare(struct vb2_buffer
*vb
)
329 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
330 struct iss_video_fh
*vfh
= vb2_get_drv_priv(vb
->vb2_queue
);
331 struct iss_buffer
*buffer
= container_of(vbuf
, struct iss_buffer
, vb
);
332 struct iss_video
*video
= vfh
->video
;
333 unsigned long size
= vfh
->format
.fmt
.pix
.sizeimage
;
336 if (vb2_plane_size(vb
, 0) < size
)
339 addr
= vb2_dma_contig_plane_dma_addr(vb
, 0);
340 if (!IS_ALIGNED(addr
, 32)) {
341 dev_dbg(video
->iss
->dev
,
342 "Buffer address must be aligned to 32 bytes boundary.\n");
346 vb2_set_plane_payload(vb
, 0, size
);
347 buffer
->iss_addr
= addr
;
351 static void iss_video_buf_queue(struct vb2_buffer
*vb
)
353 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
354 struct iss_video_fh
*vfh
= vb2_get_drv_priv(vb
->vb2_queue
);
355 struct iss_video
*video
= vfh
->video
;
356 struct iss_buffer
*buffer
= container_of(vbuf
, struct iss_buffer
, vb
);
357 struct iss_pipeline
*pipe
= to_iss_pipeline(&video
->video
.entity
);
361 spin_lock_irqsave(&video
->qlock
, flags
);
363 /* Mark the buffer is faulty and give it back to the queue immediately
364 * if the video node has registered an error. vb2 will perform the same
365 * check when preparing the buffer, but that is inherently racy, so we
366 * need to handle the race condition with an authoritative check here.
368 if (unlikely(video
->error
)) {
369 vb2_buffer_done(vb
, VB2_BUF_STATE_ERROR
);
370 spin_unlock_irqrestore(&video
->qlock
, flags
);
374 empty
= list_empty(&video
->dmaqueue
);
375 list_add_tail(&buffer
->list
, &video
->dmaqueue
);
377 spin_unlock_irqrestore(&video
->qlock
, flags
);
380 enum iss_pipeline_state state
;
383 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
384 state
= ISS_PIPELINE_QUEUE_OUTPUT
;
386 state
= ISS_PIPELINE_QUEUE_INPUT
;
388 spin_lock_irqsave(&pipe
->lock
, flags
);
389 pipe
->state
|= state
;
390 video
->ops
->queue(video
, buffer
);
391 video
->dmaqueue_flags
|= ISS_VIDEO_DMAQUEUE_QUEUED
;
393 start
= iss_pipeline_ready(pipe
);
395 pipe
->state
|= ISS_PIPELINE_STREAM
;
396 spin_unlock_irqrestore(&pipe
->lock
, flags
);
399 omap4iss_pipeline_set_stream(pipe
,
400 ISS_PIPELINE_STREAM_SINGLESHOT
);
404 static const struct vb2_ops iss_video_vb2ops
= {
405 .queue_setup
= iss_video_queue_setup
,
406 .buf_prepare
= iss_video_buf_prepare
,
407 .buf_queue
= iss_video_buf_queue
,
408 .buf_cleanup
= iss_video_buf_cleanup
,
412 * omap4iss_video_buffer_next - Complete the current buffer and return the next
413 * @video: ISS video object
415 * Remove the current video buffer from the DMA queue and fill its timestamp,
416 * field count and state fields before waking up its completion handler.
418 * For capture video nodes, the buffer state is set to VB2_BUF_STATE_DONE if no
419 * error has been flagged in the pipeline, or to VB2_BUF_STATE_ERROR otherwise.
421 * The DMA queue is expected to contain at least one buffer.
423 * Return a pointer to the next buffer in the DMA queue, or NULL if the queue is
426 struct iss_buffer
*omap4iss_video_buffer_next(struct iss_video
*video
)
428 struct iss_pipeline
*pipe
= to_iss_pipeline(&video
->video
.entity
);
429 enum iss_pipeline_state state
;
430 struct iss_buffer
*buf
;
433 spin_lock_irqsave(&video
->qlock
, flags
);
434 if (WARN_ON(list_empty(&video
->dmaqueue
))) {
435 spin_unlock_irqrestore(&video
->qlock
, flags
);
439 buf
= list_first_entry(&video
->dmaqueue
, struct iss_buffer
,
441 list_del(&buf
->list
);
442 spin_unlock_irqrestore(&video
->qlock
, flags
);
444 buf
->vb
.vb2_buf
.timestamp
= ktime_get_ns();
446 /* Do frame number propagation only if this is the output video node.
447 * Frame number either comes from the CSI receivers or it gets
448 * incremented here if H3A is not active.
449 * Note: There is no guarantee that the output buffer will finish
450 * first, so the input number might lag behind by 1 in some cases.
452 if (video
== pipe
->output
&& !pipe
->do_propagation
)
454 atomic_inc_return(&pipe
->frame_number
);
456 buf
->vb
.sequence
= atomic_read(&pipe
->frame_number
);
458 vb2_buffer_done(&buf
->vb
.vb2_buf
, pipe
->error
?
459 VB2_BUF_STATE_ERROR
: VB2_BUF_STATE_DONE
);
462 spin_lock_irqsave(&video
->qlock
, flags
);
463 if (list_empty(&video
->dmaqueue
)) {
464 spin_unlock_irqrestore(&video
->qlock
, flags
);
465 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
466 state
= ISS_PIPELINE_QUEUE_OUTPUT
467 | ISS_PIPELINE_STREAM
;
469 state
= ISS_PIPELINE_QUEUE_INPUT
470 | ISS_PIPELINE_STREAM
;
472 spin_lock_irqsave(&pipe
->lock
, flags
);
473 pipe
->state
&= ~state
;
474 if (video
->pipe
.stream_state
== ISS_PIPELINE_STREAM_CONTINUOUS
)
475 video
->dmaqueue_flags
|= ISS_VIDEO_DMAQUEUE_UNDERRUN
;
476 spin_unlock_irqrestore(&pipe
->lock
, flags
);
480 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
&& pipe
->input
) {
481 spin_lock(&pipe
->lock
);
482 pipe
->state
&= ~ISS_PIPELINE_STREAM
;
483 spin_unlock(&pipe
->lock
);
486 buf
= list_first_entry(&video
->dmaqueue
, struct iss_buffer
,
488 spin_unlock_irqrestore(&video
->qlock
, flags
);
489 buf
->vb
.vb2_buf
.state
= VB2_BUF_STATE_ACTIVE
;
494 * omap4iss_video_cancel_stream - Cancel stream on a video node
495 * @video: ISS video object
497 * Cancelling a stream mark all buffers on the video node as erroneous and makes
498 * sure no new buffer can be queued.
500 void omap4iss_video_cancel_stream(struct iss_video
*video
)
504 spin_lock_irqsave(&video
->qlock
, flags
);
506 while (!list_empty(&video
->dmaqueue
)) {
507 struct iss_buffer
*buf
;
509 buf
= list_first_entry(&video
->dmaqueue
, struct iss_buffer
,
511 list_del(&buf
->list
);
512 vb2_buffer_done(&buf
->vb
.vb2_buf
, VB2_BUF_STATE_ERROR
);
515 vb2_queue_error(video
->queue
);
518 spin_unlock_irqrestore(&video
->qlock
, flags
);
521 /* -----------------------------------------------------------------------------
526 iss_video_querycap(struct file
*file
, void *fh
, struct v4l2_capability
*cap
)
528 struct iss_video
*video
= video_drvdata(file
);
530 strlcpy(cap
->driver
, ISS_VIDEO_DRIVER_NAME
, sizeof(cap
->driver
));
531 strlcpy(cap
->card
, video
->video
.name
, sizeof(cap
->card
));
532 strlcpy(cap
->bus_info
, "media", sizeof(cap
->bus_info
));
534 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
535 cap
->device_caps
= V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING
;
537 cap
->device_caps
= V4L2_CAP_VIDEO_OUTPUT
| V4L2_CAP_STREAMING
;
539 cap
->capabilities
= V4L2_CAP_DEVICE_CAPS
| V4L2_CAP_STREAMING
540 | V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_VIDEO_OUTPUT
;
546 iss_video_enum_format(struct file
*file
, void *fh
, struct v4l2_fmtdesc
*f
)
548 struct iss_video
*video
= video_drvdata(file
);
549 struct v4l2_mbus_framefmt format
;
550 unsigned int index
= f
->index
;
554 if (f
->type
!= video
->type
)
557 ret
= __iss_video_get_format(video
, &format
);
561 for (i
= 0; i
< ARRAY_SIZE(formats
); ++i
) {
562 const struct iss_format_info
*info
= &formats
[i
];
564 if (format
.code
!= info
->code
)
568 f
->pixelformat
= info
->pixelformat
;
569 strlcpy(f
->description
, info
->description
,
570 sizeof(f
->description
));
581 iss_video_get_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
583 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
584 struct iss_video
*video
= video_drvdata(file
);
586 if (format
->type
!= video
->type
)
589 mutex_lock(&video
->mutex
);
590 *format
= vfh
->format
;
591 mutex_unlock(&video
->mutex
);
597 iss_video_set_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
599 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
600 struct iss_video
*video
= video_drvdata(file
);
601 struct v4l2_mbus_framefmt fmt
;
603 if (format
->type
!= video
->type
)
606 mutex_lock(&video
->mutex
);
608 /* Fill the bytesperline and sizeimage fields by converting to media bus
609 * format and back to pixel format.
611 iss_video_pix_to_mbus(&format
->fmt
.pix
, &fmt
);
612 iss_video_mbus_to_pix(video
, &fmt
, &format
->fmt
.pix
);
614 vfh
->format
= *format
;
616 mutex_unlock(&video
->mutex
);
621 iss_video_try_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
623 struct iss_video
*video
= video_drvdata(file
);
624 struct v4l2_subdev_format fmt
;
625 struct v4l2_subdev
*subdev
;
629 if (format
->type
!= video
->type
)
632 subdev
= iss_video_remote_subdev(video
, &pad
);
636 iss_video_pix_to_mbus(&format
->fmt
.pix
, &fmt
.format
);
639 fmt
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
640 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &fmt
);
644 iss_video_mbus_to_pix(video
, &fmt
.format
, &format
->fmt
.pix
);
649 iss_video_get_selection(struct file
*file
, void *fh
, struct v4l2_selection
*sel
)
651 struct iss_video
*video
= video_drvdata(file
);
652 struct v4l2_subdev_format format
;
653 struct v4l2_subdev
*subdev
;
654 struct v4l2_subdev_selection sdsel
= {
655 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
656 .target
= sel
->target
,
661 switch (sel
->target
) {
662 case V4L2_SEL_TGT_CROP
:
663 case V4L2_SEL_TGT_CROP_BOUNDS
:
664 case V4L2_SEL_TGT_CROP_DEFAULT
:
665 if (video
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT
)
668 case V4L2_SEL_TGT_COMPOSE
:
669 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
670 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
671 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
677 subdev
= iss_video_remote_subdev(video
, &pad
);
681 /* Try the get selection operation first and fallback to get format if not
685 ret
= v4l2_subdev_call(subdev
, pad
, get_selection
, NULL
, &sdsel
);
688 if (ret
!= -ENOIOCTLCMD
)
692 format
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
693 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &format
);
695 return ret
== -ENOIOCTLCMD
? -ENOTTY
: ret
;
699 sel
->r
.width
= format
.format
.width
;
700 sel
->r
.height
= format
.format
.height
;
706 iss_video_set_selection(struct file
*file
, void *fh
, struct v4l2_selection
*sel
)
708 struct iss_video
*video
= video_drvdata(file
);
709 struct v4l2_subdev
*subdev
;
710 struct v4l2_subdev_selection sdsel
= {
711 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
712 .target
= sel
->target
,
719 switch (sel
->target
) {
720 case V4L2_SEL_TGT_CROP
:
721 if (video
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT
)
724 case V4L2_SEL_TGT_COMPOSE
:
725 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
731 subdev
= iss_video_remote_subdev(video
, &pad
);
736 mutex_lock(&video
->mutex
);
737 ret
= v4l2_subdev_call(subdev
, pad
, set_selection
, NULL
, &sdsel
);
738 mutex_unlock(&video
->mutex
);
742 return ret
== -ENOIOCTLCMD
? -ENOTTY
: ret
;
746 iss_video_get_param(struct file
*file
, void *fh
, struct v4l2_streamparm
*a
)
748 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
749 struct iss_video
*video
= video_drvdata(file
);
751 if (video
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
||
752 video
->type
!= a
->type
)
755 memset(a
, 0, sizeof(*a
));
756 a
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT
;
757 a
->parm
.output
.capability
= V4L2_CAP_TIMEPERFRAME
;
758 a
->parm
.output
.timeperframe
= vfh
->timeperframe
;
764 iss_video_set_param(struct file
*file
, void *fh
, struct v4l2_streamparm
*a
)
766 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
767 struct iss_video
*video
= video_drvdata(file
);
769 if (video
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
||
770 video
->type
!= a
->type
)
773 if (a
->parm
.output
.timeperframe
.denominator
== 0)
774 a
->parm
.output
.timeperframe
.denominator
= 1;
776 vfh
->timeperframe
= a
->parm
.output
.timeperframe
;
782 iss_video_reqbufs(struct file
*file
, void *fh
, struct v4l2_requestbuffers
*rb
)
784 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
786 return vb2_reqbufs(&vfh
->queue
, rb
);
790 iss_video_querybuf(struct file
*file
, void *fh
, struct v4l2_buffer
*b
)
792 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
794 return vb2_querybuf(&vfh
->queue
, b
);
798 iss_video_qbuf(struct file
*file
, void *fh
, struct v4l2_buffer
*b
)
800 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
802 return vb2_qbuf(&vfh
->queue
, b
);
806 iss_video_expbuf(struct file
*file
, void *fh
, struct v4l2_exportbuffer
*e
)
808 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
810 return vb2_expbuf(&vfh
->queue
, e
);
814 iss_video_dqbuf(struct file
*file
, void *fh
, struct v4l2_buffer
*b
)
816 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
818 return vb2_dqbuf(&vfh
->queue
, b
, file
->f_flags
& O_NONBLOCK
);
824 * Every ISS pipeline has a single input and a single output. The input can be
825 * either a sensor or a video node. The output is always a video node.
827 * As every pipeline has an output video node, the ISS video objects at the
828 * pipeline output stores the pipeline state. It tracks the streaming state of
829 * both the input and output, as well as the availability of buffers.
831 * In sensor-to-memory mode, frames are always available at the pipeline input.
832 * Starting the sensor usually requires I2C transfers and must be done in
833 * interruptible context. The pipeline is started and stopped synchronously
834 * to the stream on/off commands. All modules in the pipeline will get their
835 * subdev set stream handler called. The module at the end of the pipeline must
836 * delay starting the hardware until buffers are available at its output.
838 * In memory-to-memory mode, starting/stopping the stream requires
839 * synchronization between the input and output. ISS modules can't be stopped
840 * in the middle of a frame, and at least some of the modules seem to become
841 * busy as soon as they're started, even if they don't receive a frame start
842 * event. For that reason frames need to be processed in single-shot mode. The
843 * driver needs to wait until a frame is completely processed and written to
844 * memory before restarting the pipeline for the next frame. Pipelined
845 * processing might be possible but requires more testing.
847 * Stream start must be delayed until buffers are available at both the input
848 * and output. The pipeline must be started in the videobuf queue callback with
849 * the buffers queue spinlock held. The modules subdev set stream operation must
853 iss_video_streamon(struct file
*file
, void *fh
, enum v4l2_buf_type type
)
855 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
856 struct iss_video
*video
= video_drvdata(file
);
857 struct media_graph graph
;
858 struct media_entity
*entity
= &video
->video
.entity
;
859 enum iss_pipeline_state state
;
860 struct iss_pipeline
*pipe
;
861 struct iss_video
*far_end
;
865 if (type
!= video
->type
)
868 mutex_lock(&video
->stream_lock
);
870 /* Start streaming on the pipeline. No link touching an entity in the
871 * pipeline can be activated or deactivated once streaming is started.
874 ? to_iss_pipeline(entity
) : &video
->pipe
;
875 pipe
->external
= NULL
;
876 pipe
->external_rate
= 0;
877 pipe
->external_bpp
= 0;
879 ret
= media_entity_enum_init(&pipe
->ent_enum
, entity
->graph_obj
.mdev
);
881 goto err_graph_walk_init
;
883 ret
= media_graph_walk_init(&graph
, entity
->graph_obj
.mdev
);
885 goto err_graph_walk_init
;
887 if (video
->iss
->pdata
->set_constraints
)
888 video
->iss
->pdata
->set_constraints(video
->iss
, true);
890 ret
= media_pipeline_start(entity
, &pipe
->pipe
);
892 goto err_media_pipeline_start
;
894 media_graph_walk_start(&graph
, entity
);
895 while ((entity
= media_graph_walk_next(&graph
)))
896 media_entity_enum_set(&pipe
->ent_enum
, entity
);
898 /* Verify that the currently configured format matches the output of
899 * the connected subdev.
901 ret
= iss_video_check_format(video
, vfh
);
903 goto err_iss_video_check_format
;
905 video
->bpl_padding
= ret
;
906 video
->bpl_value
= vfh
->format
.fmt
.pix
.bytesperline
;
908 /* Find the ISS video node connected at the far end of the pipeline and
909 * update the pipeline.
911 far_end
= iss_video_far_end(video
);
913 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
) {
914 state
= ISS_PIPELINE_STREAM_OUTPUT
| ISS_PIPELINE_IDLE_OUTPUT
;
915 pipe
->input
= far_end
;
916 pipe
->output
= video
;
920 goto err_iss_video_check_format
;
923 state
= ISS_PIPELINE_STREAM_INPUT
| ISS_PIPELINE_IDLE_INPUT
;
925 pipe
->output
= far_end
;
928 spin_lock_irqsave(&pipe
->lock
, flags
);
929 pipe
->state
&= ~ISS_PIPELINE_STREAM
;
930 pipe
->state
|= state
;
931 spin_unlock_irqrestore(&pipe
->lock
, flags
);
933 /* Set the maximum time per frame as the value requested by userspace.
934 * This is a soft limit that can be overridden if the hardware doesn't
935 * support the request limit.
937 if (video
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT
)
938 pipe
->max_timeperframe
= vfh
->timeperframe
;
940 video
->queue
= &vfh
->queue
;
941 INIT_LIST_HEAD(&video
->dmaqueue
);
942 video
->error
= false;
943 atomic_set(&pipe
->frame_number
, -1);
945 ret
= vb2_streamon(&vfh
->queue
, type
);
947 goto err_iss_video_check_format
;
949 /* In sensor-to-memory mode, the stream can be started synchronously
950 * to the stream on command. In memory-to-memory mode, it will be
951 * started when buffers are queued on both the input and output.
956 ret
= omap4iss_pipeline_set_stream(pipe
,
957 ISS_PIPELINE_STREAM_CONTINUOUS
);
959 goto err_omap4iss_set_stream
;
960 spin_lock_irqsave(&video
->qlock
, flags
);
961 if (list_empty(&video
->dmaqueue
))
962 video
->dmaqueue_flags
|= ISS_VIDEO_DMAQUEUE_UNDERRUN
;
963 spin_unlock_irqrestore(&video
->qlock
, flags
);
966 media_graph_walk_cleanup(&graph
);
968 mutex_unlock(&video
->stream_lock
);
972 err_omap4iss_set_stream
:
973 vb2_streamoff(&vfh
->queue
, type
);
974 err_iss_video_check_format
:
975 media_pipeline_stop(&video
->video
.entity
);
976 err_media_pipeline_start
:
977 if (video
->iss
->pdata
->set_constraints
)
978 video
->iss
->pdata
->set_constraints(video
->iss
, false);
981 media_graph_walk_cleanup(&graph
);
984 media_entity_enum_cleanup(&pipe
->ent_enum
);
986 mutex_unlock(&video
->stream_lock
);
992 iss_video_streamoff(struct file
*file
, void *fh
, enum v4l2_buf_type type
)
994 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
995 struct iss_video
*video
= video_drvdata(file
);
996 struct iss_pipeline
*pipe
= to_iss_pipeline(&video
->video
.entity
);
997 enum iss_pipeline_state state
;
1000 if (type
!= video
->type
)
1003 mutex_lock(&video
->stream_lock
);
1005 if (!vb2_is_streaming(&vfh
->queue
))
1008 /* Update the pipeline state. */
1009 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
1010 state
= ISS_PIPELINE_STREAM_OUTPUT
1011 | ISS_PIPELINE_QUEUE_OUTPUT
;
1013 state
= ISS_PIPELINE_STREAM_INPUT
1014 | ISS_PIPELINE_QUEUE_INPUT
;
1016 spin_lock_irqsave(&pipe
->lock
, flags
);
1017 pipe
->state
&= ~state
;
1018 spin_unlock_irqrestore(&pipe
->lock
, flags
);
1020 /* Stop the stream. */
1021 omap4iss_pipeline_set_stream(pipe
, ISS_PIPELINE_STREAM_STOPPED
);
1022 vb2_streamoff(&vfh
->queue
, type
);
1023 video
->queue
= NULL
;
1025 media_entity_enum_cleanup(&pipe
->ent_enum
);
1027 if (video
->iss
->pdata
->set_constraints
)
1028 video
->iss
->pdata
->set_constraints(video
->iss
, false);
1029 media_pipeline_stop(&video
->video
.entity
);
1032 mutex_unlock(&video
->stream_lock
);
1037 iss_video_enum_input(struct file
*file
, void *fh
, struct v4l2_input
*input
)
1039 if (input
->index
> 0)
1042 strlcpy(input
->name
, "camera", sizeof(input
->name
));
1043 input
->type
= V4L2_INPUT_TYPE_CAMERA
;
1049 iss_video_g_input(struct file
*file
, void *fh
, unsigned int *input
)
1057 iss_video_s_input(struct file
*file
, void *fh
, unsigned int input
)
1059 return input
== 0 ? 0 : -EINVAL
;
1062 static const struct v4l2_ioctl_ops iss_video_ioctl_ops
= {
1063 .vidioc_querycap
= iss_video_querycap
,
1064 .vidioc_enum_fmt_vid_cap
= iss_video_enum_format
,
1065 .vidioc_g_fmt_vid_cap
= iss_video_get_format
,
1066 .vidioc_s_fmt_vid_cap
= iss_video_set_format
,
1067 .vidioc_try_fmt_vid_cap
= iss_video_try_format
,
1068 .vidioc_g_fmt_vid_out
= iss_video_get_format
,
1069 .vidioc_s_fmt_vid_out
= iss_video_set_format
,
1070 .vidioc_try_fmt_vid_out
= iss_video_try_format
,
1071 .vidioc_g_selection
= iss_video_get_selection
,
1072 .vidioc_s_selection
= iss_video_set_selection
,
1073 .vidioc_g_parm
= iss_video_get_param
,
1074 .vidioc_s_parm
= iss_video_set_param
,
1075 .vidioc_reqbufs
= iss_video_reqbufs
,
1076 .vidioc_querybuf
= iss_video_querybuf
,
1077 .vidioc_qbuf
= iss_video_qbuf
,
1078 .vidioc_expbuf
= iss_video_expbuf
,
1079 .vidioc_dqbuf
= iss_video_dqbuf
,
1080 .vidioc_streamon
= iss_video_streamon
,
1081 .vidioc_streamoff
= iss_video_streamoff
,
1082 .vidioc_enum_input
= iss_video_enum_input
,
1083 .vidioc_g_input
= iss_video_g_input
,
1084 .vidioc_s_input
= iss_video_s_input
,
1087 /* -----------------------------------------------------------------------------
1088 * V4L2 file operations
1091 static int iss_video_open(struct file
*file
)
1093 struct iss_video
*video
= video_drvdata(file
);
1094 struct iss_video_fh
*handle
;
1095 struct vb2_queue
*q
;
1098 handle
= kzalloc(sizeof(*handle
), GFP_KERNEL
);
1102 v4l2_fh_init(&handle
->vfh
, &video
->video
);
1103 v4l2_fh_add(&handle
->vfh
);
1105 /* If this is the first user, initialise the pipeline. */
1106 if (!omap4iss_get(video
->iss
)) {
1111 ret
= v4l2_pipeline_pm_use(&video
->video
.entity
, 1);
1113 omap4iss_put(video
->iss
);
1119 q
->type
= video
->type
;
1120 q
->io_modes
= VB2_MMAP
| VB2_DMABUF
;
1121 q
->drv_priv
= handle
;
1122 q
->ops
= &iss_video_vb2ops
;
1123 q
->mem_ops
= &vb2_dma_contig_memops
;
1124 q
->buf_struct_size
= sizeof(struct iss_buffer
);
1125 q
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC
;
1126 q
->dev
= video
->iss
->dev
;
1128 ret
= vb2_queue_init(q
);
1130 omap4iss_put(video
->iss
);
1134 memset(&handle
->format
, 0, sizeof(handle
->format
));
1135 handle
->format
.type
= video
->type
;
1136 handle
->timeperframe
.denominator
= 1;
1138 handle
->video
= video
;
1139 file
->private_data
= &handle
->vfh
;
1143 v4l2_fh_del(&handle
->vfh
);
1144 v4l2_fh_exit(&handle
->vfh
);
1151 static int iss_video_release(struct file
*file
)
1153 struct iss_video
*video
= video_drvdata(file
);
1154 struct v4l2_fh
*vfh
= file
->private_data
;
1155 struct iss_video_fh
*handle
= to_iss_video_fh(vfh
);
1157 /* Disable streaming and free the buffers queue resources. */
1158 iss_video_streamoff(file
, vfh
, video
->type
);
1160 v4l2_pipeline_pm_use(&video
->video
.entity
, 0);
1162 /* Release the videobuf2 queue */
1163 vb2_queue_release(&handle
->queue
);
1168 file
->private_data
= NULL
;
1170 omap4iss_put(video
->iss
);
1175 static unsigned int iss_video_poll(struct file
*file
, poll_table
*wait
)
1177 struct iss_video_fh
*vfh
= to_iss_video_fh(file
->private_data
);
1179 return vb2_poll(&vfh
->queue
, file
, wait
);
1182 static int iss_video_mmap(struct file
*file
, struct vm_area_struct
*vma
)
1184 struct iss_video_fh
*vfh
= to_iss_video_fh(file
->private_data
);
1186 return vb2_mmap(&vfh
->queue
, vma
);
1189 static struct v4l2_file_operations iss_video_fops
= {
1190 .owner
= THIS_MODULE
,
1191 .unlocked_ioctl
= video_ioctl2
,
1192 .open
= iss_video_open
,
1193 .release
= iss_video_release
,
1194 .poll
= iss_video_poll
,
1195 .mmap
= iss_video_mmap
,
1198 /* -----------------------------------------------------------------------------
1202 static const struct iss_video_operations iss_video_dummy_ops
= {
1205 int omap4iss_video_init(struct iss_video
*video
, const char *name
)
1207 const char *direction
;
1210 switch (video
->type
) {
1211 case V4L2_BUF_TYPE_VIDEO_CAPTURE
:
1212 direction
= "output";
1213 video
->pad
.flags
= MEDIA_PAD_FL_SINK
;
1215 case V4L2_BUF_TYPE_VIDEO_OUTPUT
:
1216 direction
= "input";
1217 video
->pad
.flags
= MEDIA_PAD_FL_SOURCE
;
1224 ret
= media_entity_pads_init(&video
->video
.entity
, 1, &video
->pad
);
1228 spin_lock_init(&video
->qlock
);
1229 mutex_init(&video
->mutex
);
1230 atomic_set(&video
->active
, 0);
1232 spin_lock_init(&video
->pipe
.lock
);
1233 mutex_init(&video
->stream_lock
);
1235 /* Initialize the video device. */
1237 video
->ops
= &iss_video_dummy_ops
;
1239 video
->video
.fops
= &iss_video_fops
;
1240 snprintf(video
->video
.name
, sizeof(video
->video
.name
),
1241 "OMAP4 ISS %s %s", name
, direction
);
1242 video
->video
.vfl_type
= VFL_TYPE_GRABBER
;
1243 video
->video
.release
= video_device_release_empty
;
1244 video
->video
.ioctl_ops
= &iss_video_ioctl_ops
;
1245 video
->pipe
.stream_state
= ISS_PIPELINE_STREAM_STOPPED
;
1247 video_set_drvdata(&video
->video
, video
);
1252 void omap4iss_video_cleanup(struct iss_video
*video
)
1254 media_entity_cleanup(&video
->video
.entity
);
1255 mutex_destroy(&video
->stream_lock
);
1256 mutex_destroy(&video
->mutex
);
1259 int omap4iss_video_register(struct iss_video
*video
, struct v4l2_device
*vdev
)
1263 video
->video
.v4l2_dev
= vdev
;
1265 ret
= video_register_device(&video
->video
, VFL_TYPE_GRABBER
, -1);
1267 dev_err(video
->iss
->dev
,
1268 "could not register video device (%d)\n", ret
);
1273 void omap4iss_video_unregister(struct iss_video
*video
)
1275 video_unregister_device(&video
->video
);