2 * TI OMAP4 ISS V4L2 Driver - Generic video node
4 * Copyright (C) 2012 Texas Instruments, Inc.
6 * Author: Sergio Aguirre <sergio.a.aguirre@gmail.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <asm/cacheflush.h>
15 #include <linux/clk.h>
17 #include <linux/pagemap.h>
18 #include <linux/sched.h>
19 #include <linux/slab.h>
20 #include <linux/vmalloc.h>
21 #include <linux/module.h>
22 #include <media/v4l2-dev.h>
23 #include <media/v4l2-ioctl.h>
25 #include "iss_video.h"
29 /* -----------------------------------------------------------------------------
33 static struct iss_format_info formats
[] = {
34 { MEDIA_BUS_FMT_Y8_1X8
, MEDIA_BUS_FMT_Y8_1X8
,
35 MEDIA_BUS_FMT_Y8_1X8
, MEDIA_BUS_FMT_Y8_1X8
,
36 V4L2_PIX_FMT_GREY
, 8, "Greyscale 8 bpp", },
37 { MEDIA_BUS_FMT_Y10_1X10
, MEDIA_BUS_FMT_Y10_1X10
,
38 MEDIA_BUS_FMT_Y10_1X10
, MEDIA_BUS_FMT_Y8_1X8
,
39 V4L2_PIX_FMT_Y10
, 10, "Greyscale 10 bpp", },
40 { MEDIA_BUS_FMT_Y12_1X12
, MEDIA_BUS_FMT_Y10_1X10
,
41 MEDIA_BUS_FMT_Y12_1X12
, MEDIA_BUS_FMT_Y8_1X8
,
42 V4L2_PIX_FMT_Y12
, 12, "Greyscale 12 bpp", },
43 { MEDIA_BUS_FMT_SBGGR8_1X8
, MEDIA_BUS_FMT_SBGGR8_1X8
,
44 MEDIA_BUS_FMT_SBGGR8_1X8
, MEDIA_BUS_FMT_SBGGR8_1X8
,
45 V4L2_PIX_FMT_SBGGR8
, 8, "BGGR Bayer 8 bpp", },
46 { MEDIA_BUS_FMT_SGBRG8_1X8
, MEDIA_BUS_FMT_SGBRG8_1X8
,
47 MEDIA_BUS_FMT_SGBRG8_1X8
, MEDIA_BUS_FMT_SGBRG8_1X8
,
48 V4L2_PIX_FMT_SGBRG8
, 8, "GBRG Bayer 8 bpp", },
49 { MEDIA_BUS_FMT_SGRBG8_1X8
, MEDIA_BUS_FMT_SGRBG8_1X8
,
50 MEDIA_BUS_FMT_SGRBG8_1X8
, MEDIA_BUS_FMT_SGRBG8_1X8
,
51 V4L2_PIX_FMT_SGRBG8
, 8, "GRBG Bayer 8 bpp", },
52 { MEDIA_BUS_FMT_SRGGB8_1X8
, MEDIA_BUS_FMT_SRGGB8_1X8
,
53 MEDIA_BUS_FMT_SRGGB8_1X8
, MEDIA_BUS_FMT_SRGGB8_1X8
,
54 V4L2_PIX_FMT_SRGGB8
, 8, "RGGB Bayer 8 bpp", },
55 { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8
, MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8
,
56 MEDIA_BUS_FMT_SGRBG10_1X10
, 0,
57 V4L2_PIX_FMT_SGRBG10DPCM8
, 8, "GRBG Bayer 10 bpp DPCM8", },
58 { MEDIA_BUS_FMT_SBGGR10_1X10
, MEDIA_BUS_FMT_SBGGR10_1X10
,
59 MEDIA_BUS_FMT_SBGGR10_1X10
, MEDIA_BUS_FMT_SBGGR8_1X8
,
60 V4L2_PIX_FMT_SBGGR10
, 10, "BGGR Bayer 10 bpp", },
61 { MEDIA_BUS_FMT_SGBRG10_1X10
, MEDIA_BUS_FMT_SGBRG10_1X10
,
62 MEDIA_BUS_FMT_SGBRG10_1X10
, MEDIA_BUS_FMT_SGBRG8_1X8
,
63 V4L2_PIX_FMT_SGBRG10
, 10, "GBRG Bayer 10 bpp", },
64 { MEDIA_BUS_FMT_SGRBG10_1X10
, MEDIA_BUS_FMT_SGRBG10_1X10
,
65 MEDIA_BUS_FMT_SGRBG10_1X10
, MEDIA_BUS_FMT_SGRBG8_1X8
,
66 V4L2_PIX_FMT_SGRBG10
, 10, "GRBG Bayer 10 bpp", },
67 { MEDIA_BUS_FMT_SRGGB10_1X10
, MEDIA_BUS_FMT_SRGGB10_1X10
,
68 MEDIA_BUS_FMT_SRGGB10_1X10
, MEDIA_BUS_FMT_SRGGB8_1X8
,
69 V4L2_PIX_FMT_SRGGB10
, 10, "RGGB Bayer 10 bpp", },
70 { MEDIA_BUS_FMT_SBGGR12_1X12
, MEDIA_BUS_FMT_SBGGR10_1X10
,
71 MEDIA_BUS_FMT_SBGGR12_1X12
, MEDIA_BUS_FMT_SBGGR8_1X8
,
72 V4L2_PIX_FMT_SBGGR12
, 12, "BGGR Bayer 12 bpp", },
73 { MEDIA_BUS_FMT_SGBRG12_1X12
, MEDIA_BUS_FMT_SGBRG10_1X10
,
74 MEDIA_BUS_FMT_SGBRG12_1X12
, MEDIA_BUS_FMT_SGBRG8_1X8
,
75 V4L2_PIX_FMT_SGBRG12
, 12, "GBRG Bayer 12 bpp", },
76 { MEDIA_BUS_FMT_SGRBG12_1X12
, MEDIA_BUS_FMT_SGRBG10_1X10
,
77 MEDIA_BUS_FMT_SGRBG12_1X12
, MEDIA_BUS_FMT_SGRBG8_1X8
,
78 V4L2_PIX_FMT_SGRBG12
, 12, "GRBG Bayer 12 bpp", },
79 { MEDIA_BUS_FMT_SRGGB12_1X12
, MEDIA_BUS_FMT_SRGGB10_1X10
,
80 MEDIA_BUS_FMT_SRGGB12_1X12
, MEDIA_BUS_FMT_SRGGB8_1X8
,
81 V4L2_PIX_FMT_SRGGB12
, 12, "RGGB Bayer 12 bpp", },
82 { MEDIA_BUS_FMT_UYVY8_1X16
, MEDIA_BUS_FMT_UYVY8_1X16
,
83 MEDIA_BUS_FMT_UYVY8_1X16
, 0,
84 V4L2_PIX_FMT_UYVY
, 16, "YUV 4:2:2 (UYVY)", },
85 { MEDIA_BUS_FMT_YUYV8_1X16
, MEDIA_BUS_FMT_YUYV8_1X16
,
86 MEDIA_BUS_FMT_YUYV8_1X16
, 0,
87 V4L2_PIX_FMT_YUYV
, 16, "YUV 4:2:2 (YUYV)", },
88 { MEDIA_BUS_FMT_YUYV8_1_5X8
, MEDIA_BUS_FMT_YUYV8_1_5X8
,
89 MEDIA_BUS_FMT_YUYV8_1_5X8
, 0,
90 V4L2_PIX_FMT_NV12
, 8, "YUV 4:2:0 (NV12)", },
93 const struct iss_format_info
*
94 omap4iss_video_format_info(u32 code
)
98 for (i
= 0; i
< ARRAY_SIZE(formats
); ++i
) {
99 if (formats
[i
].code
== code
)
107 * iss_video_mbus_to_pix - Convert v4l2_mbus_framefmt to v4l2_pix_format
108 * @video: ISS video instance
109 * @mbus: v4l2_mbus_framefmt format (input)
110 * @pix: v4l2_pix_format format (output)
112 * Fill the output pix structure with information from the input mbus format.
113 * The bytesperline and sizeimage fields are computed from the requested bytes
114 * per line value in the pix format and information from the video instance.
116 * Return the number of padding bytes at end of line.
118 static unsigned int iss_video_mbus_to_pix(const struct iss_video
*video
,
119 const struct v4l2_mbus_framefmt
*mbus
,
120 struct v4l2_pix_format
*pix
)
122 unsigned int bpl
= pix
->bytesperline
;
123 unsigned int min_bpl
;
126 memset(pix
, 0, sizeof(*pix
));
127 pix
->width
= mbus
->width
;
128 pix
->height
= mbus
->height
;
130 /* Skip the last format in the loop so that it will be selected if no
133 for (i
= 0; i
< ARRAY_SIZE(formats
) - 1; ++i
) {
134 if (formats
[i
].code
== mbus
->code
)
138 min_bpl
= pix
->width
* ALIGN(formats
[i
].bpp
, 8) / 8;
140 /* Clamp the requested bytes per line value. If the maximum bytes per
141 * line value is zero, the module doesn't support user configurable line
142 * sizes. Override the requested value with the minimum in that case.
145 bpl
= clamp(bpl
, min_bpl
, video
->bpl_max
);
149 if (!video
->bpl_zero_padding
|| bpl
!= min_bpl
)
150 bpl
= ALIGN(bpl
, video
->bpl_alignment
);
152 pix
->pixelformat
= formats
[i
].pixelformat
;
153 pix
->bytesperline
= bpl
;
154 pix
->sizeimage
= pix
->bytesperline
* pix
->height
;
155 pix
->colorspace
= mbus
->colorspace
;
156 pix
->field
= mbus
->field
;
158 /* FIXME: Special case for NV12! We should make this nicer... */
159 if (pix
->pixelformat
== V4L2_PIX_FMT_NV12
)
160 pix
->sizeimage
+= (pix
->bytesperline
* pix
->height
) / 2;
162 return bpl
- min_bpl
;
165 static void iss_video_pix_to_mbus(const struct v4l2_pix_format
*pix
,
166 struct v4l2_mbus_framefmt
*mbus
)
170 memset(mbus
, 0, sizeof(*mbus
));
171 mbus
->width
= pix
->width
;
172 mbus
->height
= pix
->height
;
174 for (i
= 0; i
< ARRAY_SIZE(formats
); ++i
) {
175 if (formats
[i
].pixelformat
== pix
->pixelformat
)
179 if (WARN_ON(i
== ARRAY_SIZE(formats
)))
182 mbus
->code
= formats
[i
].code
;
183 mbus
->colorspace
= pix
->colorspace
;
184 mbus
->field
= pix
->field
;
187 static struct v4l2_subdev
*
188 iss_video_remote_subdev(struct iss_video
*video
, u32
*pad
)
190 struct media_pad
*remote
;
192 remote
= media_entity_remote_pad(&video
->pad
);
194 if (remote
== NULL
||
195 media_entity_type(remote
->entity
) != MEDIA_ENT_T_V4L2_SUBDEV
)
199 *pad
= remote
->index
;
201 return media_entity_to_v4l2_subdev(remote
->entity
);
204 /* Return a pointer to the ISS video instance at the far end of the pipeline. */
205 static struct iss_video
*
206 iss_video_far_end(struct iss_video
*video
)
208 struct media_entity_graph graph
;
209 struct media_entity
*entity
= &video
->video
.entity
;
210 struct media_device
*mdev
= entity
->parent
;
211 struct iss_video
*far_end
= NULL
;
213 mutex_lock(&mdev
->graph_mutex
);
214 media_entity_graph_walk_start(&graph
, entity
);
216 while ((entity
= media_entity_graph_walk_next(&graph
))) {
217 if (entity
== &video
->video
.entity
)
220 if (media_entity_type(entity
) != MEDIA_ENT_T_DEVNODE
)
223 far_end
= to_iss_video(media_entity_to_video_device(entity
));
224 if (far_end
->type
!= video
->type
)
230 mutex_unlock(&mdev
->graph_mutex
);
235 __iss_video_get_format(struct iss_video
*video
,
236 struct v4l2_mbus_framefmt
*format
)
238 struct v4l2_subdev_format fmt
;
239 struct v4l2_subdev
*subdev
;
243 subdev
= iss_video_remote_subdev(video
, &pad
);
247 memset(&fmt
, 0, sizeof(fmt
));
249 fmt
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
251 mutex_lock(&video
->mutex
);
252 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &fmt
);
253 mutex_unlock(&video
->mutex
);
258 *format
= fmt
.format
;
263 iss_video_check_format(struct iss_video
*video
, struct iss_video_fh
*vfh
)
265 struct v4l2_mbus_framefmt format
;
266 struct v4l2_pix_format pixfmt
;
269 ret
= __iss_video_get_format(video
, &format
);
273 pixfmt
.bytesperline
= 0;
274 ret
= iss_video_mbus_to_pix(video
, &format
, &pixfmt
);
276 if (vfh
->format
.fmt
.pix
.pixelformat
!= pixfmt
.pixelformat
||
277 vfh
->format
.fmt
.pix
.height
!= pixfmt
.height
||
278 vfh
->format
.fmt
.pix
.width
!= pixfmt
.width
||
279 vfh
->format
.fmt
.pix
.bytesperline
!= pixfmt
.bytesperline
||
280 vfh
->format
.fmt
.pix
.sizeimage
!= pixfmt
.sizeimage
)
286 /* -----------------------------------------------------------------------------
287 * Video queue operations
290 static int iss_video_queue_setup(struct vb2_queue
*vq
,
291 const struct v4l2_format
*fmt
,
292 unsigned int *count
, unsigned int *num_planes
,
293 unsigned int sizes
[], void *alloc_ctxs
[])
295 struct iss_video_fh
*vfh
= vb2_get_drv_priv(vq
);
296 struct iss_video
*video
= vfh
->video
;
298 /* Revisit multi-planar support for NV12 */
301 sizes
[0] = vfh
->format
.fmt
.pix
.sizeimage
;
305 alloc_ctxs
[0] = video
->alloc_ctx
;
307 *count
= min(*count
, video
->capture_mem
/ PAGE_ALIGN(sizes
[0]));
312 static void iss_video_buf_cleanup(struct vb2_buffer
*vb
)
314 struct iss_buffer
*buffer
= container_of(vb
, struct iss_buffer
, vb
);
316 if (buffer
->iss_addr
)
317 buffer
->iss_addr
= 0;
320 static int iss_video_buf_prepare(struct vb2_buffer
*vb
)
322 struct iss_video_fh
*vfh
= vb2_get_drv_priv(vb
->vb2_queue
);
323 struct iss_buffer
*buffer
= container_of(vb
, struct iss_buffer
, vb
);
324 struct iss_video
*video
= vfh
->video
;
325 unsigned long size
= vfh
->format
.fmt
.pix
.sizeimage
;
328 if (vb2_plane_size(vb
, 0) < size
)
331 addr
= vb2_dma_contig_plane_dma_addr(vb
, 0);
332 if (!IS_ALIGNED(addr
, 32)) {
333 dev_dbg(video
->iss
->dev
,
334 "Buffer address must be aligned to 32 bytes boundary.\n");
338 vb2_set_plane_payload(vb
, 0, size
);
339 buffer
->iss_addr
= addr
;
343 static void iss_video_buf_queue(struct vb2_buffer
*vb
)
345 struct iss_video_fh
*vfh
= vb2_get_drv_priv(vb
->vb2_queue
);
346 struct iss_video
*video
= vfh
->video
;
347 struct iss_buffer
*buffer
= container_of(vb
, struct iss_buffer
, vb
);
348 struct iss_pipeline
*pipe
= to_iss_pipeline(&video
->video
.entity
);
352 spin_lock_irqsave(&video
->qlock
, flags
);
354 /* Mark the buffer is faulty and give it back to the queue immediately
355 * if the video node has registered an error. vb2 will perform the same
356 * check when preparing the buffer, but that is inherently racy, so we
357 * need to handle the race condition with an authoritative check here.
359 if (unlikely(video
->error
)) {
360 vb2_buffer_done(vb
, VB2_BUF_STATE_ERROR
);
361 spin_unlock_irqrestore(&video
->qlock
, flags
);
365 empty
= list_empty(&video
->dmaqueue
);
366 list_add_tail(&buffer
->list
, &video
->dmaqueue
);
368 spin_unlock_irqrestore(&video
->qlock
, flags
);
371 enum iss_pipeline_state state
;
374 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
375 state
= ISS_PIPELINE_QUEUE_OUTPUT
;
377 state
= ISS_PIPELINE_QUEUE_INPUT
;
379 spin_lock_irqsave(&pipe
->lock
, flags
);
380 pipe
->state
|= state
;
381 video
->ops
->queue(video
, buffer
);
382 video
->dmaqueue_flags
|= ISS_VIDEO_DMAQUEUE_QUEUED
;
384 start
= iss_pipeline_ready(pipe
);
386 pipe
->state
|= ISS_PIPELINE_STREAM
;
387 spin_unlock_irqrestore(&pipe
->lock
, flags
);
390 omap4iss_pipeline_set_stream(pipe
,
391 ISS_PIPELINE_STREAM_SINGLESHOT
);
395 static const struct vb2_ops iss_video_vb2ops
= {
396 .queue_setup
= iss_video_queue_setup
,
397 .buf_prepare
= iss_video_buf_prepare
,
398 .buf_queue
= iss_video_buf_queue
,
399 .buf_cleanup
= iss_video_buf_cleanup
,
403 * omap4iss_video_buffer_next - Complete the current buffer and return the next
404 * @video: ISS video object
406 * Remove the current video buffer from the DMA queue and fill its timestamp,
407 * field count and state fields before waking up its completion handler.
409 * For capture video nodes, the buffer state is set to VB2_BUF_STATE_DONE if no
410 * error has been flagged in the pipeline, or to VB2_BUF_STATE_ERROR otherwise.
412 * The DMA queue is expected to contain at least one buffer.
414 * Return a pointer to the next buffer in the DMA queue, or NULL if the queue is
417 struct iss_buffer
*omap4iss_video_buffer_next(struct iss_video
*video
)
419 struct iss_pipeline
*pipe
= to_iss_pipeline(&video
->video
.entity
);
420 enum iss_pipeline_state state
;
421 struct iss_buffer
*buf
;
425 spin_lock_irqsave(&video
->qlock
, flags
);
426 if (WARN_ON(list_empty(&video
->dmaqueue
))) {
427 spin_unlock_irqrestore(&video
->qlock
, flags
);
431 buf
= list_first_entry(&video
->dmaqueue
, struct iss_buffer
,
433 list_del(&buf
->list
);
434 spin_unlock_irqrestore(&video
->qlock
, flags
);
437 buf
->vb
.v4l2_buf
.timestamp
.tv_sec
= ts
.tv_sec
;
438 buf
->vb
.v4l2_buf
.timestamp
.tv_usec
= ts
.tv_nsec
/ NSEC_PER_USEC
;
440 /* Do frame number propagation only if this is the output video node.
441 * Frame number either comes from the CSI receivers or it gets
442 * incremented here if H3A is not active.
443 * Note: There is no guarantee that the output buffer will finish
444 * first, so the input number might lag behind by 1 in some cases.
446 if (video
== pipe
->output
&& !pipe
->do_propagation
)
447 buf
->vb
.v4l2_buf
.sequence
=
448 atomic_inc_return(&pipe
->frame_number
);
450 buf
->vb
.v4l2_buf
.sequence
= atomic_read(&pipe
->frame_number
);
452 vb2_buffer_done(&buf
->vb
, pipe
->error
?
453 VB2_BUF_STATE_ERROR
: VB2_BUF_STATE_DONE
);
456 spin_lock_irqsave(&video
->qlock
, flags
);
457 if (list_empty(&video
->dmaqueue
)) {
458 spin_unlock_irqrestore(&video
->qlock
, flags
);
459 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
460 state
= ISS_PIPELINE_QUEUE_OUTPUT
461 | ISS_PIPELINE_STREAM
;
463 state
= ISS_PIPELINE_QUEUE_INPUT
464 | ISS_PIPELINE_STREAM
;
466 spin_lock_irqsave(&pipe
->lock
, flags
);
467 pipe
->state
&= ~state
;
468 if (video
->pipe
.stream_state
== ISS_PIPELINE_STREAM_CONTINUOUS
)
469 video
->dmaqueue_flags
|= ISS_VIDEO_DMAQUEUE_UNDERRUN
;
470 spin_unlock_irqrestore(&pipe
->lock
, flags
);
474 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
&& pipe
->input
!= NULL
) {
475 spin_lock(&pipe
->lock
);
476 pipe
->state
&= ~ISS_PIPELINE_STREAM
;
477 spin_unlock(&pipe
->lock
);
480 buf
= list_first_entry(&video
->dmaqueue
, struct iss_buffer
,
482 spin_unlock_irqrestore(&video
->qlock
, flags
);
483 buf
->vb
.state
= VB2_BUF_STATE_ACTIVE
;
488 * omap4iss_video_cancel_stream - Cancel stream on a video node
489 * @video: ISS video object
491 * Cancelling a stream mark all buffers on the video node as erroneous and makes
492 * sure no new buffer can be queued.
494 void omap4iss_video_cancel_stream(struct iss_video
*video
)
498 spin_lock_irqsave(&video
->qlock
, flags
);
500 while (!list_empty(&video
->dmaqueue
)) {
501 struct iss_buffer
*buf
;
503 buf
= list_first_entry(&video
->dmaqueue
, struct iss_buffer
,
505 list_del(&buf
->list
);
506 vb2_buffer_done(&buf
->vb
, VB2_BUF_STATE_ERROR
);
509 vb2_queue_error(video
->queue
);
512 spin_unlock_irqrestore(&video
->qlock
, flags
);
515 /* -----------------------------------------------------------------------------
520 iss_video_querycap(struct file
*file
, void *fh
, struct v4l2_capability
*cap
)
522 struct iss_video
*video
= video_drvdata(file
);
524 strlcpy(cap
->driver
, ISS_VIDEO_DRIVER_NAME
, sizeof(cap
->driver
));
525 strlcpy(cap
->card
, video
->video
.name
, sizeof(cap
->card
));
526 strlcpy(cap
->bus_info
, "media", sizeof(cap
->bus_info
));
528 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
529 cap
->device_caps
= V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING
;
531 cap
->device_caps
= V4L2_CAP_VIDEO_OUTPUT
| V4L2_CAP_STREAMING
;
533 cap
->capabilities
= V4L2_CAP_DEVICE_CAPS
| V4L2_CAP_STREAMING
534 | V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_VIDEO_OUTPUT
;
540 iss_video_enum_format(struct file
*file
, void *fh
, struct v4l2_fmtdesc
*f
)
542 struct iss_video
*video
= video_drvdata(file
);
543 struct v4l2_mbus_framefmt format
;
544 unsigned int index
= f
->index
;
548 if (f
->type
!= video
->type
)
551 ret
= __iss_video_get_format(video
, &format
);
555 for (i
= 0; i
< ARRAY_SIZE(formats
); ++i
) {
556 const struct iss_format_info
*info
= &formats
[i
];
558 if (format
.code
!= info
->code
)
562 f
->pixelformat
= info
->pixelformat
;
563 strlcpy(f
->description
, info
->description
,
564 sizeof(f
->description
));
575 iss_video_get_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
577 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
578 struct iss_video
*video
= video_drvdata(file
);
580 if (format
->type
!= video
->type
)
583 mutex_lock(&video
->mutex
);
584 *format
= vfh
->format
;
585 mutex_unlock(&video
->mutex
);
591 iss_video_set_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
593 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
594 struct iss_video
*video
= video_drvdata(file
);
595 struct v4l2_mbus_framefmt fmt
;
597 if (format
->type
!= video
->type
)
600 mutex_lock(&video
->mutex
);
602 /* Fill the bytesperline and sizeimage fields by converting to media bus
603 * format and back to pixel format.
605 iss_video_pix_to_mbus(&format
->fmt
.pix
, &fmt
);
606 iss_video_mbus_to_pix(video
, &fmt
, &format
->fmt
.pix
);
608 vfh
->format
= *format
;
610 mutex_unlock(&video
->mutex
);
615 iss_video_try_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
617 struct iss_video
*video
= video_drvdata(file
);
618 struct v4l2_subdev_format fmt
;
619 struct v4l2_subdev
*subdev
;
623 if (format
->type
!= video
->type
)
626 subdev
= iss_video_remote_subdev(video
, &pad
);
630 iss_video_pix_to_mbus(&format
->fmt
.pix
, &fmt
.format
);
633 fmt
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
634 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &fmt
);
638 iss_video_mbus_to_pix(video
, &fmt
.format
, &format
->fmt
.pix
);
643 iss_video_cropcap(struct file
*file
, void *fh
, struct v4l2_cropcap
*cropcap
)
645 struct iss_video
*video
= video_drvdata(file
);
646 struct v4l2_subdev
*subdev
;
649 subdev
= iss_video_remote_subdev(video
, NULL
);
653 mutex_lock(&video
->mutex
);
654 ret
= v4l2_subdev_call(subdev
, video
, cropcap
, cropcap
);
655 mutex_unlock(&video
->mutex
);
657 return ret
== -ENOIOCTLCMD
? -ENOTTY
: ret
;
661 iss_video_get_crop(struct file
*file
, void *fh
, struct v4l2_crop
*crop
)
663 struct iss_video
*video
= video_drvdata(file
);
664 struct v4l2_subdev_format format
;
665 struct v4l2_subdev
*subdev
;
669 subdev
= iss_video_remote_subdev(video
, &pad
);
673 /* Try the get crop operation first and fallback to get format if not
676 ret
= v4l2_subdev_call(subdev
, video
, g_crop
, crop
);
677 if (ret
!= -ENOIOCTLCMD
)
681 format
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
682 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &format
);
684 return ret
== -ENOIOCTLCMD
? -ENOTTY
: ret
;
688 crop
->c
.width
= format
.format
.width
;
689 crop
->c
.height
= format
.format
.height
;
695 iss_video_set_crop(struct file
*file
, void *fh
, const struct v4l2_crop
*crop
)
697 struct iss_video
*video
= video_drvdata(file
);
698 struct v4l2_subdev
*subdev
;
701 subdev
= iss_video_remote_subdev(video
, NULL
);
705 mutex_lock(&video
->mutex
);
706 ret
= v4l2_subdev_call(subdev
, video
, s_crop
, crop
);
707 mutex_unlock(&video
->mutex
);
709 return ret
== -ENOIOCTLCMD
? -ENOTTY
: ret
;
713 iss_video_get_param(struct file
*file
, void *fh
, struct v4l2_streamparm
*a
)
715 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
716 struct iss_video
*video
= video_drvdata(file
);
718 if (video
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
||
719 video
->type
!= a
->type
)
722 memset(a
, 0, sizeof(*a
));
723 a
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT
;
724 a
->parm
.output
.capability
= V4L2_CAP_TIMEPERFRAME
;
725 a
->parm
.output
.timeperframe
= vfh
->timeperframe
;
731 iss_video_set_param(struct file
*file
, void *fh
, struct v4l2_streamparm
*a
)
733 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
734 struct iss_video
*video
= video_drvdata(file
);
736 if (video
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
||
737 video
->type
!= a
->type
)
740 if (a
->parm
.output
.timeperframe
.denominator
== 0)
741 a
->parm
.output
.timeperframe
.denominator
= 1;
743 vfh
->timeperframe
= a
->parm
.output
.timeperframe
;
749 iss_video_reqbufs(struct file
*file
, void *fh
, struct v4l2_requestbuffers
*rb
)
751 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
753 return vb2_reqbufs(&vfh
->queue
, rb
);
757 iss_video_querybuf(struct file
*file
, void *fh
, struct v4l2_buffer
*b
)
759 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
761 return vb2_querybuf(&vfh
->queue
, b
);
765 iss_video_qbuf(struct file
*file
, void *fh
, struct v4l2_buffer
*b
)
767 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
769 return vb2_qbuf(&vfh
->queue
, b
);
773 iss_video_expbuf(struct file
*file
, void *fh
, struct v4l2_exportbuffer
*e
)
775 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
777 return vb2_expbuf(&vfh
->queue
, e
);
781 iss_video_dqbuf(struct file
*file
, void *fh
, struct v4l2_buffer
*b
)
783 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
785 return vb2_dqbuf(&vfh
->queue
, b
, file
->f_flags
& O_NONBLOCK
);
791 * Every ISS pipeline has a single input and a single output. The input can be
792 * either a sensor or a video node. The output is always a video node.
794 * As every pipeline has an output video node, the ISS video objects at the
795 * pipeline output stores the pipeline state. It tracks the streaming state of
796 * both the input and output, as well as the availability of buffers.
798 * In sensor-to-memory mode, frames are always available at the pipeline input.
799 * Starting the sensor usually requires I2C transfers and must be done in
800 * interruptible context. The pipeline is started and stopped synchronously
801 * to the stream on/off commands. All modules in the pipeline will get their
802 * subdev set stream handler called. The module at the end of the pipeline must
803 * delay starting the hardware until buffers are available at its output.
805 * In memory-to-memory mode, starting/stopping the stream requires
806 * synchronization between the input and output. ISS modules can't be stopped
807 * in the middle of a frame, and at least some of the modules seem to become
808 * busy as soon as they're started, even if they don't receive a frame start
809 * event. For that reason frames need to be processed in single-shot mode. The
810 * driver needs to wait until a frame is completely processed and written to
811 * memory before restarting the pipeline for the next frame. Pipelined
812 * processing might be possible but requires more testing.
814 * Stream start must be delayed until buffers are available at both the input
815 * and output. The pipeline must be started in the videobuf queue callback with
816 * the buffers queue spinlock held. The modules subdev set stream operation must
820 iss_video_streamon(struct file
*file
, void *fh
, enum v4l2_buf_type type
)
822 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
823 struct iss_video
*video
= video_drvdata(file
);
824 struct media_entity_graph graph
;
825 struct media_entity
*entity
;
826 enum iss_pipeline_state state
;
827 struct iss_pipeline
*pipe
;
828 struct iss_video
*far_end
;
832 if (type
!= video
->type
)
835 mutex_lock(&video
->stream_lock
);
837 /* Start streaming on the pipeline. No link touching an entity in the
838 * pipeline can be activated or deactivated once streaming is started.
840 pipe
= video
->video
.entity
.pipe
841 ? to_iss_pipeline(&video
->video
.entity
) : &video
->pipe
;
842 pipe
->external
= NULL
;
843 pipe
->external_rate
= 0;
844 pipe
->external_bpp
= 0;
847 if (video
->iss
->pdata
->set_constraints
)
848 video
->iss
->pdata
->set_constraints(video
->iss
, true);
850 ret
= media_entity_pipeline_start(&video
->video
.entity
, &pipe
->pipe
);
852 goto err_media_entity_pipeline_start
;
854 entity
= &video
->video
.entity
;
855 media_entity_graph_walk_start(&graph
, entity
);
856 while ((entity
= media_entity_graph_walk_next(&graph
)))
857 pipe
->entities
|= 1 << entity
->id
;
859 /* Verify that the currently configured format matches the output of
860 * the connected subdev.
862 ret
= iss_video_check_format(video
, vfh
);
864 goto err_iss_video_check_format
;
866 video
->bpl_padding
= ret
;
867 video
->bpl_value
= vfh
->format
.fmt
.pix
.bytesperline
;
869 /* Find the ISS video node connected at the far end of the pipeline and
870 * update the pipeline.
872 far_end
= iss_video_far_end(video
);
874 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
) {
875 state
= ISS_PIPELINE_STREAM_OUTPUT
| ISS_PIPELINE_IDLE_OUTPUT
;
876 pipe
->input
= far_end
;
877 pipe
->output
= video
;
879 if (far_end
== NULL
) {
881 goto err_iss_video_check_format
;
884 state
= ISS_PIPELINE_STREAM_INPUT
| ISS_PIPELINE_IDLE_INPUT
;
886 pipe
->output
= far_end
;
889 spin_lock_irqsave(&pipe
->lock
, flags
);
890 pipe
->state
&= ~ISS_PIPELINE_STREAM
;
891 pipe
->state
|= state
;
892 spin_unlock_irqrestore(&pipe
->lock
, flags
);
894 /* Set the maximum time per frame as the value requested by userspace.
895 * This is a soft limit that can be overridden if the hardware doesn't
896 * support the request limit.
898 if (video
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT
)
899 pipe
->max_timeperframe
= vfh
->timeperframe
;
901 video
->queue
= &vfh
->queue
;
902 INIT_LIST_HEAD(&video
->dmaqueue
);
903 video
->error
= false;
904 atomic_set(&pipe
->frame_number
, -1);
906 ret
= vb2_streamon(&vfh
->queue
, type
);
908 goto err_iss_video_check_format
;
910 /* In sensor-to-memory mode, the stream can be started synchronously
911 * to the stream on command. In memory-to-memory mode, it will be
912 * started when buffers are queued on both the input and output.
914 if (pipe
->input
== NULL
) {
917 ret
= omap4iss_pipeline_set_stream(pipe
,
918 ISS_PIPELINE_STREAM_CONTINUOUS
);
920 goto err_omap4iss_set_stream
;
921 spin_lock_irqsave(&video
->qlock
, flags
);
922 if (list_empty(&video
->dmaqueue
))
923 video
->dmaqueue_flags
|= ISS_VIDEO_DMAQUEUE_UNDERRUN
;
924 spin_unlock_irqrestore(&video
->qlock
, flags
);
927 mutex_unlock(&video
->stream_lock
);
930 err_omap4iss_set_stream
:
931 vb2_streamoff(&vfh
->queue
, type
);
932 err_iss_video_check_format
:
933 media_entity_pipeline_stop(&video
->video
.entity
);
934 err_media_entity_pipeline_start
:
935 if (video
->iss
->pdata
->set_constraints
)
936 video
->iss
->pdata
->set_constraints(video
->iss
, false);
939 mutex_unlock(&video
->stream_lock
);
944 iss_video_streamoff(struct file
*file
, void *fh
, enum v4l2_buf_type type
)
946 struct iss_video_fh
*vfh
= to_iss_video_fh(fh
);
947 struct iss_video
*video
= video_drvdata(file
);
948 struct iss_pipeline
*pipe
= to_iss_pipeline(&video
->video
.entity
);
949 enum iss_pipeline_state state
;
952 if (type
!= video
->type
)
955 mutex_lock(&video
->stream_lock
);
957 if (!vb2_is_streaming(&vfh
->queue
))
960 /* Update the pipeline state. */
961 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
)
962 state
= ISS_PIPELINE_STREAM_OUTPUT
963 | ISS_PIPELINE_QUEUE_OUTPUT
;
965 state
= ISS_PIPELINE_STREAM_INPUT
966 | ISS_PIPELINE_QUEUE_INPUT
;
968 spin_lock_irqsave(&pipe
->lock
, flags
);
969 pipe
->state
&= ~state
;
970 spin_unlock_irqrestore(&pipe
->lock
, flags
);
972 /* Stop the stream. */
973 omap4iss_pipeline_set_stream(pipe
, ISS_PIPELINE_STREAM_STOPPED
);
974 vb2_streamoff(&vfh
->queue
, type
);
977 if (video
->iss
->pdata
->set_constraints
)
978 video
->iss
->pdata
->set_constraints(video
->iss
, false);
979 media_entity_pipeline_stop(&video
->video
.entity
);
982 mutex_unlock(&video
->stream_lock
);
987 iss_video_enum_input(struct file
*file
, void *fh
, struct v4l2_input
*input
)
989 if (input
->index
> 0)
992 strlcpy(input
->name
, "camera", sizeof(input
->name
));
993 input
->type
= V4L2_INPUT_TYPE_CAMERA
;
999 iss_video_g_input(struct file
*file
, void *fh
, unsigned int *input
)
1007 iss_video_s_input(struct file
*file
, void *fh
, unsigned int input
)
1009 return input
== 0 ? 0 : -EINVAL
;
1012 static const struct v4l2_ioctl_ops iss_video_ioctl_ops
= {
1013 .vidioc_querycap
= iss_video_querycap
,
1014 .vidioc_enum_fmt_vid_cap
= iss_video_enum_format
,
1015 .vidioc_g_fmt_vid_cap
= iss_video_get_format
,
1016 .vidioc_s_fmt_vid_cap
= iss_video_set_format
,
1017 .vidioc_try_fmt_vid_cap
= iss_video_try_format
,
1018 .vidioc_g_fmt_vid_out
= iss_video_get_format
,
1019 .vidioc_s_fmt_vid_out
= iss_video_set_format
,
1020 .vidioc_try_fmt_vid_out
= iss_video_try_format
,
1021 .vidioc_cropcap
= iss_video_cropcap
,
1022 .vidioc_g_crop
= iss_video_get_crop
,
1023 .vidioc_s_crop
= iss_video_set_crop
,
1024 .vidioc_g_parm
= iss_video_get_param
,
1025 .vidioc_s_parm
= iss_video_set_param
,
1026 .vidioc_reqbufs
= iss_video_reqbufs
,
1027 .vidioc_querybuf
= iss_video_querybuf
,
1028 .vidioc_qbuf
= iss_video_qbuf
,
1029 .vidioc_expbuf
= iss_video_expbuf
,
1030 .vidioc_dqbuf
= iss_video_dqbuf
,
1031 .vidioc_streamon
= iss_video_streamon
,
1032 .vidioc_streamoff
= iss_video_streamoff
,
1033 .vidioc_enum_input
= iss_video_enum_input
,
1034 .vidioc_g_input
= iss_video_g_input
,
1035 .vidioc_s_input
= iss_video_s_input
,
1038 /* -----------------------------------------------------------------------------
1039 * V4L2 file operations
1042 static int iss_video_open(struct file
*file
)
1044 struct iss_video
*video
= video_drvdata(file
);
1045 struct iss_video_fh
*handle
;
1046 struct vb2_queue
*q
;
1049 handle
= kzalloc(sizeof(*handle
), GFP_KERNEL
);
1053 v4l2_fh_init(&handle
->vfh
, &video
->video
);
1054 v4l2_fh_add(&handle
->vfh
);
1056 /* If this is the first user, initialise the pipeline. */
1057 if (omap4iss_get(video
->iss
) == NULL
) {
1062 ret
= omap4iss_pipeline_pm_use(&video
->video
.entity
, 1);
1064 omap4iss_put(video
->iss
);
1068 video
->alloc_ctx
= vb2_dma_contig_init_ctx(video
->iss
->dev
);
1069 if (IS_ERR(video
->alloc_ctx
)) {
1070 ret
= PTR_ERR(video
->alloc_ctx
);
1071 omap4iss_put(video
->iss
);
1077 q
->type
= video
->type
;
1078 q
->io_modes
= VB2_MMAP
| VB2_DMABUF
;
1079 q
->drv_priv
= handle
;
1080 q
->ops
= &iss_video_vb2ops
;
1081 q
->mem_ops
= &vb2_dma_contig_memops
;
1082 q
->buf_struct_size
= sizeof(struct iss_buffer
);
1083 q
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC
;
1085 ret
= vb2_queue_init(q
);
1087 omap4iss_put(video
->iss
);
1091 memset(&handle
->format
, 0, sizeof(handle
->format
));
1092 handle
->format
.type
= video
->type
;
1093 handle
->timeperframe
.denominator
= 1;
1095 handle
->video
= video
;
1096 file
->private_data
= &handle
->vfh
;
1100 v4l2_fh_del(&handle
->vfh
);
1107 static int iss_video_release(struct file
*file
)
1109 struct iss_video
*video
= video_drvdata(file
);
1110 struct v4l2_fh
*vfh
= file
->private_data
;
1111 struct iss_video_fh
*handle
= to_iss_video_fh(vfh
);
1113 /* Disable streaming and free the buffers queue resources. */
1114 iss_video_streamoff(file
, vfh
, video
->type
);
1116 omap4iss_pipeline_pm_use(&video
->video
.entity
, 0);
1118 /* Release the videobuf2 queue */
1119 vb2_queue_release(&handle
->queue
);
1121 /* Release the file handle. */
1124 file
->private_data
= NULL
;
1126 omap4iss_put(video
->iss
);
1131 static unsigned int iss_video_poll(struct file
*file
, poll_table
*wait
)
1133 struct iss_video_fh
*vfh
= to_iss_video_fh(file
->private_data
);
1135 return vb2_poll(&vfh
->queue
, file
, wait
);
1138 static int iss_video_mmap(struct file
*file
, struct vm_area_struct
*vma
)
1140 struct iss_video_fh
*vfh
= to_iss_video_fh(file
->private_data
);
1142 return vb2_mmap(&vfh
->queue
, vma
);
1145 static struct v4l2_file_operations iss_video_fops
= {
1146 .owner
= THIS_MODULE
,
1147 .unlocked_ioctl
= video_ioctl2
,
1148 .open
= iss_video_open
,
1149 .release
= iss_video_release
,
1150 .poll
= iss_video_poll
,
1151 .mmap
= iss_video_mmap
,
1154 /* -----------------------------------------------------------------------------
1158 static const struct iss_video_operations iss_video_dummy_ops
= {
1161 int omap4iss_video_init(struct iss_video
*video
, const char *name
)
1163 const char *direction
;
1166 switch (video
->type
) {
1167 case V4L2_BUF_TYPE_VIDEO_CAPTURE
:
1168 direction
= "output";
1169 video
->pad
.flags
= MEDIA_PAD_FL_SINK
;
1171 case V4L2_BUF_TYPE_VIDEO_OUTPUT
:
1172 direction
= "input";
1173 video
->pad
.flags
= MEDIA_PAD_FL_SOURCE
;
1180 ret
= media_entity_init(&video
->video
.entity
, 1, &video
->pad
, 0);
1184 spin_lock_init(&video
->qlock
);
1185 mutex_init(&video
->mutex
);
1186 atomic_set(&video
->active
, 0);
1188 spin_lock_init(&video
->pipe
.lock
);
1189 mutex_init(&video
->stream_lock
);
1191 /* Initialize the video device. */
1192 if (video
->ops
== NULL
)
1193 video
->ops
= &iss_video_dummy_ops
;
1195 video
->video
.fops
= &iss_video_fops
;
1196 snprintf(video
->video
.name
, sizeof(video
->video
.name
),
1197 "OMAP4 ISS %s %s", name
, direction
);
1198 video
->video
.vfl_type
= VFL_TYPE_GRABBER
;
1199 video
->video
.release
= video_device_release_empty
;
1200 video
->video
.ioctl_ops
= &iss_video_ioctl_ops
;
1201 video
->pipe
.stream_state
= ISS_PIPELINE_STREAM_STOPPED
;
1203 video_set_drvdata(&video
->video
, video
);
1208 void omap4iss_video_cleanup(struct iss_video
*video
)
1210 media_entity_cleanup(&video
->video
.entity
);
1211 mutex_destroy(&video
->stream_lock
);
1212 mutex_destroy(&video
->mutex
);
1215 int omap4iss_video_register(struct iss_video
*video
, struct v4l2_device
*vdev
)
1219 video
->video
.v4l2_dev
= vdev
;
1221 ret
= video_register_device(&video
->video
, VFL_TYPE_GRABBER
, -1);
1223 dev_err(video
->iss
->dev
,
1224 "%s: could not register video device (%d)\n",
1230 void omap4iss_video_unregister(struct iss_video
*video
)
1232 video_unregister_device(&video
->video
);