]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - drivers/media/platform/marvell-ccic/mcam-core.c
Input: wm97xx: add new AC97 bus support
[mirror_ubuntu-focal-kernel.git] / drivers / media / platform / marvell-ccic / mcam-core.c
1 /*
2 * The Marvell camera core. This device appears in a number of settings,
3 * so it needs platform-specific support outside of the core.
4 *
5 * Copyright 2011 Jonathan Corbet corbet@lwn.net
6 */
7 #include <linux/kernel.h>
8 #include <linux/module.h>
9 #include <linux/fs.h>
10 #include <linux/mm.h>
11 #include <linux/i2c.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock.h>
14 #include <linux/slab.h>
15 #include <linux/device.h>
16 #include <linux/wait.h>
17 #include <linux/list.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/delay.h>
20 #include <linux/vmalloc.h>
21 #include <linux/io.h>
22 #include <linux/clk.h>
23 #include <linux/videodev2.h>
24 #include <media/v4l2-device.h>
25 #include <media/v4l2-ioctl.h>
26 #include <media/v4l2-ctrls.h>
27 #include <media/v4l2-event.h>
28 #include <media/i2c/ov7670.h>
29 #include <media/videobuf2-vmalloc.h>
30 #include <media/videobuf2-dma-contig.h>
31 #include <media/videobuf2-dma-sg.h>
32
33 #include "mcam-core.h"
34
35 #ifdef MCAM_MODE_VMALLOC
36 /*
37 * Internal DMA buffer management. Since the controller cannot do S/G I/O,
38 * we must have physically contiguous buffers to bring frames into.
39 * These parameters control how many buffers we use, whether we
40 * allocate them at load time (better chance of success, but nails down
41 * memory) or when somebody tries to use the camera (riskier), and,
42 * for load-time allocation, how big they should be.
43 *
44 * The controller can cycle through three buffers. We could use
45 * more by flipping pointers around, but it probably makes little
46 * sense.
47 */
48
49 static bool alloc_bufs_at_read;
50 module_param(alloc_bufs_at_read, bool, 0444);
51 MODULE_PARM_DESC(alloc_bufs_at_read,
52 "Non-zero value causes DMA buffers to be allocated when the video capture device is read, rather than at module load time. This saves memory, but decreases the chances of successfully getting those buffers. This parameter is only used in the vmalloc buffer mode");
53
54 static int n_dma_bufs = 3;
55 module_param(n_dma_bufs, uint, 0644);
56 MODULE_PARM_DESC(n_dma_bufs,
57 "The number of DMA buffers to allocate. Can be either two (saves memory, makes timing tighter) or three.");
58
59 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
60 module_param(dma_buf_size, uint, 0444);
61 MODULE_PARM_DESC(dma_buf_size,
62 "The size of the allocated DMA buffers. If actual operating parameters require larger buffers, an attempt to reallocate will be made.");
63 #else /* MCAM_MODE_VMALLOC */
64 static const bool alloc_bufs_at_read;
65 static const int n_dma_bufs = 3; /* Used by S/G_PARM */
66 #endif /* MCAM_MODE_VMALLOC */
67
68 static bool flip;
69 module_param(flip, bool, 0444);
70 MODULE_PARM_DESC(flip,
71 "If set, the sensor will be instructed to flip the image vertically.");
72
73 static int buffer_mode = -1;
74 module_param(buffer_mode, int, 0444);
75 MODULE_PARM_DESC(buffer_mode,
76 "Set the buffer mode to be used; default is to go with what the platform driver asks for. Set to 0 for vmalloc, 1 for DMA contiguous.");
77
78 /*
79 * Status flags. Always manipulated with bit operations.
80 */
81 #define CF_BUF0_VALID 0 /* Buffers valid - first three */
82 #define CF_BUF1_VALID 1
83 #define CF_BUF2_VALID 2
84 #define CF_DMA_ACTIVE 3 /* A frame is incoming */
85 #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
86 #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
87 #define CF_SG_RESTART 6 /* SG restart needed */
88 #define CF_FRAME_SOF0 7 /* Frame 0 started */
89 #define CF_FRAME_SOF1 8
90 #define CF_FRAME_SOF2 9
91
92 #define sensor_call(cam, o, f, args...) \
93 v4l2_subdev_call(cam->sensor, o, f, ##args)
94
95 static struct mcam_format_struct {
96 __u8 *desc;
97 __u32 pixelformat;
98 int bpp; /* Bytes per pixel */
99 bool planar;
100 u32 mbus_code;
101 } mcam_formats[] = {
102 {
103 .desc = "YUYV 4:2:2",
104 .pixelformat = V4L2_PIX_FMT_YUYV,
105 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
106 .bpp = 2,
107 .planar = false,
108 },
109 {
110 .desc = "YVYU 4:2:2",
111 .pixelformat = V4L2_PIX_FMT_YVYU,
112 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
113 .bpp = 2,
114 .planar = false,
115 },
116 {
117 .desc = "YUV 4:2:0 PLANAR",
118 .pixelformat = V4L2_PIX_FMT_YUV420,
119 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
120 .bpp = 1,
121 .planar = true,
122 },
123 {
124 .desc = "YVU 4:2:0 PLANAR",
125 .pixelformat = V4L2_PIX_FMT_YVU420,
126 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
127 .bpp = 1,
128 .planar = true,
129 },
130 {
131 .desc = "XRGB 444",
132 .pixelformat = V4L2_PIX_FMT_XRGB444,
133 .mbus_code = MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE,
134 .bpp = 2,
135 .planar = false,
136 },
137 {
138 .desc = "RGB 565",
139 .pixelformat = V4L2_PIX_FMT_RGB565,
140 .mbus_code = MEDIA_BUS_FMT_RGB565_2X8_LE,
141 .bpp = 2,
142 .planar = false,
143 },
144 {
145 .desc = "Raw RGB Bayer",
146 .pixelformat = V4L2_PIX_FMT_SBGGR8,
147 .mbus_code = MEDIA_BUS_FMT_SBGGR8_1X8,
148 .bpp = 1,
149 .planar = false,
150 },
151 };
152 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
153
154 static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
155 {
156 unsigned i;
157
158 for (i = 0; i < N_MCAM_FMTS; i++)
159 if (mcam_formats[i].pixelformat == pixelformat)
160 return mcam_formats + i;
161 /* Not found? Then return the first format. */
162 return mcam_formats;
163 }
164
165 /*
166 * The default format we use until somebody says otherwise.
167 */
168 static const struct v4l2_pix_format mcam_def_pix_format = {
169 .width = VGA_WIDTH,
170 .height = VGA_HEIGHT,
171 .pixelformat = V4L2_PIX_FMT_YUYV,
172 .field = V4L2_FIELD_NONE,
173 .bytesperline = VGA_WIDTH*2,
174 .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
175 .colorspace = V4L2_COLORSPACE_SRGB,
176 };
177
178 static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
179
180
181 /*
182 * The two-word DMA descriptor format used by the Armada 610 and like. There
183 * Is a three-word format as well (set C1_DESC_3WORD) where the third
184 * word is a pointer to the next descriptor, but we don't use it. Two-word
185 * descriptors have to be contiguous in memory.
186 */
187 struct mcam_dma_desc {
188 u32 dma_addr;
189 u32 segment_len;
190 };
191
192 /*
193 * Our buffer type for working with videobuf2. Note that the vb2
194 * developers have decreed that struct vb2_v4l2_buffer must be at the
195 * beginning of this structure.
196 */
197 struct mcam_vb_buffer {
198 struct vb2_v4l2_buffer vb_buf;
199 struct list_head queue;
200 struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
201 dma_addr_t dma_desc_pa; /* Descriptor physical address */
202 int dma_desc_nent; /* Number of mapped descriptors */
203 };
204
205 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_v4l2_buffer *vb)
206 {
207 return container_of(vb, struct mcam_vb_buffer, vb_buf);
208 }
209
210 /*
211 * Hand a completed buffer back to user space.
212 */
213 static void mcam_buffer_done(struct mcam_camera *cam, int frame,
214 struct vb2_v4l2_buffer *vbuf)
215 {
216 vbuf->vb2_buf.planes[0].bytesused = cam->pix_format.sizeimage;
217 vbuf->sequence = cam->buf_seq[frame];
218 vbuf->field = V4L2_FIELD_NONE;
219 vbuf->vb2_buf.timestamp = ktime_get_ns();
220 vb2_set_plane_payload(&vbuf->vb2_buf, 0, cam->pix_format.sizeimage);
221 vb2_buffer_done(&vbuf->vb2_buf, VB2_BUF_STATE_DONE);
222 }
223
224
225
226 /*
227 * Debugging and related.
228 */
229 #define cam_err(cam, fmt, arg...) \
230 dev_err((cam)->dev, fmt, ##arg);
231 #define cam_warn(cam, fmt, arg...) \
232 dev_warn((cam)->dev, fmt, ##arg);
233 #define cam_dbg(cam, fmt, arg...) \
234 dev_dbg((cam)->dev, fmt, ##arg);
235
236
237 /*
238 * Flag manipulation helpers
239 */
240 static void mcam_reset_buffers(struct mcam_camera *cam)
241 {
242 int i;
243
244 cam->next_buf = -1;
245 for (i = 0; i < cam->nbufs; i++) {
246 clear_bit(i, &cam->flags);
247 clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
248 }
249 }
250
251 static inline int mcam_needs_config(struct mcam_camera *cam)
252 {
253 return test_bit(CF_CONFIG_NEEDED, &cam->flags);
254 }
255
256 static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
257 {
258 if (needed)
259 set_bit(CF_CONFIG_NEEDED, &cam->flags);
260 else
261 clear_bit(CF_CONFIG_NEEDED, &cam->flags);
262 }
263
264 /* ------------------------------------------------------------------- */
265 /*
266 * Make the controller start grabbing images. Everything must
267 * be set up before doing this.
268 */
269 static void mcam_ctlr_start(struct mcam_camera *cam)
270 {
271 /* set_bit performs a read, so no other barrier should be
272 needed here */
273 mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
274 }
275
276 static void mcam_ctlr_stop(struct mcam_camera *cam)
277 {
278 mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
279 }
280
281 static void mcam_enable_mipi(struct mcam_camera *mcam)
282 {
283 /* Using MIPI mode and enable MIPI */
284 cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
285 mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
286 mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
287 mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
288 mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
289
290 if (!mcam->mipi_enabled) {
291 if (mcam->lane > 4 || mcam->lane <= 0) {
292 cam_warn(mcam, "lane number error\n");
293 mcam->lane = 1; /* set the default value */
294 }
295 /*
296 * 0x41 actives 1 lane
297 * 0x43 actives 2 lanes
298 * 0x45 actives 3 lanes (never happen)
299 * 0x47 actives 4 lanes
300 */
301 mcam_reg_write(mcam, REG_CSI2_CTRL0,
302 CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
303 mcam_reg_write(mcam, REG_CLKCTRL,
304 (mcam->mclk_src << 29) | mcam->mclk_div);
305
306 mcam->mipi_enabled = true;
307 }
308 }
309
310 static void mcam_disable_mipi(struct mcam_camera *mcam)
311 {
312 /* Using Parallel mode or disable MIPI */
313 mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
314 mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
315 mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
316 mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
317 mcam->mipi_enabled = false;
318 }
319
320 static bool mcam_fmt_is_planar(__u32 pfmt)
321 {
322 struct mcam_format_struct *f;
323
324 f = mcam_find_format(pfmt);
325 return f->planar;
326 }
327
328 static void mcam_write_yuv_bases(struct mcam_camera *cam,
329 unsigned frame, dma_addr_t base)
330 {
331 struct v4l2_pix_format *fmt = &cam->pix_format;
332 u32 pixel_count = fmt->width * fmt->height;
333 dma_addr_t y, u = 0, v = 0;
334
335 y = base;
336
337 switch (fmt->pixelformat) {
338 case V4L2_PIX_FMT_YUV420:
339 u = y + pixel_count;
340 v = u + pixel_count / 4;
341 break;
342 case V4L2_PIX_FMT_YVU420:
343 v = y + pixel_count;
344 u = v + pixel_count / 4;
345 break;
346 default:
347 break;
348 }
349
350 mcam_reg_write(cam, REG_Y0BAR + frame * 4, y);
351 if (mcam_fmt_is_planar(fmt->pixelformat)) {
352 mcam_reg_write(cam, REG_U0BAR + frame * 4, u);
353 mcam_reg_write(cam, REG_V0BAR + frame * 4, v);
354 }
355 }
356
357 /* ------------------------------------------------------------------- */
358
359 #ifdef MCAM_MODE_VMALLOC
360 /*
361 * Code specific to the vmalloc buffer mode.
362 */
363
364 /*
365 * Allocate in-kernel DMA buffers for vmalloc mode.
366 */
367 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
368 {
369 int i;
370
371 mcam_set_config_needed(cam, 1);
372 if (loadtime)
373 cam->dma_buf_size = dma_buf_size;
374 else
375 cam->dma_buf_size = cam->pix_format.sizeimage;
376 if (n_dma_bufs > 3)
377 n_dma_bufs = 3;
378
379 cam->nbufs = 0;
380 for (i = 0; i < n_dma_bufs; i++) {
381 cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
382 cam->dma_buf_size, cam->dma_handles + i,
383 GFP_KERNEL);
384 if (cam->dma_bufs[i] == NULL) {
385 cam_warn(cam, "Failed to allocate DMA buffer\n");
386 break;
387 }
388 (cam->nbufs)++;
389 }
390
391 switch (cam->nbufs) {
392 case 1:
393 dma_free_coherent(cam->dev, cam->dma_buf_size,
394 cam->dma_bufs[0], cam->dma_handles[0]);
395 cam->nbufs = 0;
396 /* fall-through */
397 case 0:
398 cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
399 return -ENOMEM;
400
401 case 2:
402 if (n_dma_bufs > 2)
403 cam_warn(cam, "Will limp along with only 2 buffers\n");
404 break;
405 }
406 return 0;
407 }
408
409 static void mcam_free_dma_bufs(struct mcam_camera *cam)
410 {
411 int i;
412
413 for (i = 0; i < cam->nbufs; i++) {
414 dma_free_coherent(cam->dev, cam->dma_buf_size,
415 cam->dma_bufs[i], cam->dma_handles[i]);
416 cam->dma_bufs[i] = NULL;
417 }
418 cam->nbufs = 0;
419 }
420
421
422 /*
423 * Set up DMA buffers when operating in vmalloc mode
424 */
425 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
426 {
427 /*
428 * Store the first two YUV buffers. Then either
429 * set the third if it exists, or tell the controller
430 * to just use two.
431 */
432 mcam_write_yuv_bases(cam, 0, cam->dma_handles[0]);
433 mcam_write_yuv_bases(cam, 1, cam->dma_handles[1]);
434 if (cam->nbufs > 2) {
435 mcam_write_yuv_bases(cam, 2, cam->dma_handles[2]);
436 mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
437 } else
438 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
439 if (cam->chip_id == MCAM_CAFE)
440 mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
441 }
442
443 /*
444 * Copy data out to user space in the vmalloc case
445 */
446 static void mcam_frame_tasklet(unsigned long data)
447 {
448 struct mcam_camera *cam = (struct mcam_camera *) data;
449 int i;
450 unsigned long flags;
451 struct mcam_vb_buffer *buf;
452
453 spin_lock_irqsave(&cam->dev_lock, flags);
454 for (i = 0; i < cam->nbufs; i++) {
455 int bufno = cam->next_buf;
456
457 if (cam->state != S_STREAMING || bufno < 0)
458 break; /* I/O got stopped */
459 if (++(cam->next_buf) >= cam->nbufs)
460 cam->next_buf = 0;
461 if (!test_bit(bufno, &cam->flags))
462 continue;
463 if (list_empty(&cam->buffers)) {
464 cam->frame_state.singles++;
465 break; /* Leave it valid, hope for better later */
466 }
467 cam->frame_state.delivered++;
468 clear_bit(bufno, &cam->flags);
469 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
470 queue);
471 list_del_init(&buf->queue);
472 /*
473 * Drop the lock during the big copy. This *should* be safe...
474 */
475 spin_unlock_irqrestore(&cam->dev_lock, flags);
476 memcpy(vb2_plane_vaddr(&buf->vb_buf.vb2_buf, 0),
477 cam->dma_bufs[bufno],
478 cam->pix_format.sizeimage);
479 mcam_buffer_done(cam, bufno, &buf->vb_buf);
480 spin_lock_irqsave(&cam->dev_lock, flags);
481 }
482 spin_unlock_irqrestore(&cam->dev_lock, flags);
483 }
484
485
486 /*
487 * Make sure our allocated buffers are up to the task.
488 */
489 static int mcam_check_dma_buffers(struct mcam_camera *cam)
490 {
491 if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
492 mcam_free_dma_bufs(cam);
493 if (cam->nbufs == 0)
494 return mcam_alloc_dma_bufs(cam, 0);
495 return 0;
496 }
497
498 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
499 {
500 tasklet_schedule(&cam->s_tasklet);
501 }
502
503 #else /* MCAM_MODE_VMALLOC */
504
505 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
506 {
507 return 0;
508 }
509
510 static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
511 {
512 return;
513 }
514
515 static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
516 {
517 return 0;
518 }
519
520
521
522 #endif /* MCAM_MODE_VMALLOC */
523
524
525 #ifdef MCAM_MODE_DMA_CONTIG
526 /* ---------------------------------------------------------------------- */
527 /*
528 * DMA-contiguous code.
529 */
530
531 /*
532 * Set up a contiguous buffer for the given frame. Here also is where
533 * the underrun strategy is set: if there is no buffer available, reuse
534 * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
535 * keep the interrupt handler from giving that buffer back to user
536 * space. In this way, we always have a buffer to DMA to and don't
537 * have to try to play games stopping and restarting the controller.
538 */
539 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
540 {
541 struct mcam_vb_buffer *buf;
542 dma_addr_t dma_handle;
543 struct vb2_v4l2_buffer *vb;
544
545 /*
546 * If there are no available buffers, go into single mode
547 */
548 if (list_empty(&cam->buffers)) {
549 buf = cam->vb_bufs[frame ^ 0x1];
550 set_bit(CF_SINGLE_BUFFER, &cam->flags);
551 cam->frame_state.singles++;
552 } else {
553 /*
554 * OK, we have a buffer we can use.
555 */
556 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
557 queue);
558 list_del_init(&buf->queue);
559 clear_bit(CF_SINGLE_BUFFER, &cam->flags);
560 }
561
562 cam->vb_bufs[frame] = buf;
563 vb = &buf->vb_buf;
564
565 dma_handle = vb2_dma_contig_plane_dma_addr(&vb->vb2_buf, 0);
566 mcam_write_yuv_bases(cam, frame, dma_handle);
567 }
568
569 /*
570 * Initial B_DMA_contig setup.
571 */
572 static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
573 {
574 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
575 cam->nbufs = 2;
576 mcam_set_contig_buffer(cam, 0);
577 mcam_set_contig_buffer(cam, 1);
578 }
579
580 /*
581 * Frame completion handling.
582 */
583 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
584 {
585 struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
586
587 if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
588 cam->frame_state.delivered++;
589 cam->vb_bufs[frame] = NULL;
590 mcam_buffer_done(cam, frame, &buf->vb_buf);
591 }
592 mcam_set_contig_buffer(cam, frame);
593 }
594
595 #endif /* MCAM_MODE_DMA_CONTIG */
596
597 #ifdef MCAM_MODE_DMA_SG
598 /* ---------------------------------------------------------------------- */
599 /*
600 * Scatter/gather-specific code.
601 */
602
603 /*
604 * Set up the next buffer for S/G I/O; caller should be sure that
605 * the controller is stopped and a buffer is available.
606 */
607 static void mcam_sg_next_buffer(struct mcam_camera *cam)
608 {
609 struct mcam_vb_buffer *buf;
610
611 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
612 list_del_init(&buf->queue);
613 /*
614 * Very Bad Not Good Things happen if you don't clear
615 * C1_DESC_ENA before making any descriptor changes.
616 */
617 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
618 mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
619 mcam_reg_write(cam, REG_DESC_LEN_Y,
620 buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
621 mcam_reg_write(cam, REG_DESC_LEN_U, 0);
622 mcam_reg_write(cam, REG_DESC_LEN_V, 0);
623 mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
624 cam->vb_bufs[0] = buf;
625 }
626
627 /*
628 * Initial B_DMA_sg setup
629 */
630 static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
631 {
632 /*
633 * The list-empty condition can hit us at resume time
634 * if the buffer list was empty when the system was suspended.
635 */
636 if (list_empty(&cam->buffers)) {
637 set_bit(CF_SG_RESTART, &cam->flags);
638 return;
639 }
640
641 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
642 mcam_sg_next_buffer(cam);
643 cam->nbufs = 3;
644 }
645
646
647 /*
648 * Frame completion with S/G is trickier. We can't muck with
649 * a descriptor chain on the fly, since the controller buffers it
650 * internally. So we have to actually stop and restart; Marvell
651 * says this is the way to do it.
652 *
653 * Of course, stopping is easier said than done; experience shows
654 * that the controller can start a frame *after* C0_ENABLE has been
655 * cleared. So when running in S/G mode, the controller is "stopped"
656 * on receipt of the start-of-frame interrupt. That means we can
657 * safely change the DMA descriptor array here and restart things
658 * (assuming there's another buffer waiting to go).
659 */
660 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
661 {
662 struct mcam_vb_buffer *buf = cam->vb_bufs[0];
663
664 /*
665 * If we're no longer supposed to be streaming, don't do anything.
666 */
667 if (cam->state != S_STREAMING)
668 return;
669 /*
670 * If we have another buffer available, put it in and
671 * restart the engine.
672 */
673 if (!list_empty(&cam->buffers)) {
674 mcam_sg_next_buffer(cam);
675 mcam_ctlr_start(cam);
676 /*
677 * Otherwise set CF_SG_RESTART and the controller will
678 * be restarted once another buffer shows up.
679 */
680 } else {
681 set_bit(CF_SG_RESTART, &cam->flags);
682 cam->frame_state.singles++;
683 cam->vb_bufs[0] = NULL;
684 }
685 /*
686 * Now we can give the completed frame back to user space.
687 */
688 cam->frame_state.delivered++;
689 mcam_buffer_done(cam, frame, &buf->vb_buf);
690 }
691
692
693 /*
694 * Scatter/gather mode requires stopping the controller between
695 * frames so we can put in a new DMA descriptor array. If no new
696 * buffer exists at frame completion, the controller is left stopped;
697 * this function is charged with gettig things going again.
698 */
699 static void mcam_sg_restart(struct mcam_camera *cam)
700 {
701 mcam_ctlr_dma_sg(cam);
702 mcam_ctlr_start(cam);
703 clear_bit(CF_SG_RESTART, &cam->flags);
704 }
705
706 #else /* MCAM_MODE_DMA_SG */
707
708 static inline void mcam_sg_restart(struct mcam_camera *cam)
709 {
710 return;
711 }
712
713 #endif /* MCAM_MODE_DMA_SG */
714
715 /* ---------------------------------------------------------------------- */
716 /*
717 * Buffer-mode-independent controller code.
718 */
719
720 /*
721 * Image format setup
722 */
723 static void mcam_ctlr_image(struct mcam_camera *cam)
724 {
725 struct v4l2_pix_format *fmt = &cam->pix_format;
726 u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
727
728 cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
729 fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
730 imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
731 imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
732
733 switch (fmt->pixelformat) {
734 case V4L2_PIX_FMT_YUYV:
735 case V4L2_PIX_FMT_YVYU:
736 widthy = fmt->width * 2;
737 widthuv = 0;
738 break;
739 case V4L2_PIX_FMT_YUV420:
740 case V4L2_PIX_FMT_YVU420:
741 widthy = fmt->width;
742 widthuv = fmt->width / 2;
743 break;
744 default:
745 widthy = fmt->bytesperline;
746 widthuv = 0;
747 break;
748 }
749
750 mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
751 IMGP_YP_MASK | IMGP_UVP_MASK);
752 mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
753 mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
754
755 /*
756 * Tell the controller about the image format we are using.
757 */
758 switch (fmt->pixelformat) {
759 case V4L2_PIX_FMT_YUV420:
760 case V4L2_PIX_FMT_YVU420:
761 mcam_reg_write_mask(cam, REG_CTRL0,
762 C0_DF_YUV | C0_YUV_420PL | C0_YUVE_VYUY, C0_DF_MASK);
763 break;
764 case V4L2_PIX_FMT_YUYV:
765 mcam_reg_write_mask(cam, REG_CTRL0,
766 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_NOSWAP, C0_DF_MASK);
767 break;
768 case V4L2_PIX_FMT_YVYU:
769 mcam_reg_write_mask(cam, REG_CTRL0,
770 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK);
771 break;
772 case V4L2_PIX_FMT_XRGB444:
773 mcam_reg_write_mask(cam, REG_CTRL0,
774 C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XBGR, C0_DF_MASK);
775 break;
776 case V4L2_PIX_FMT_RGB565:
777 mcam_reg_write_mask(cam, REG_CTRL0,
778 C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
779 break;
780 case V4L2_PIX_FMT_SBGGR8:
781 mcam_reg_write_mask(cam, REG_CTRL0,
782 C0_DF_RGB | C0_RGB5_GRBG, C0_DF_MASK);
783 break;
784 default:
785 cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
786 break;
787 }
788
789 /*
790 * Make sure it knows we want to use hsync/vsync.
791 */
792 mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
793 /*
794 * This field controls the generation of EOF(DVP only)
795 */
796 if (cam->bus_type != V4L2_MBUS_CSI2)
797 mcam_reg_set_bit(cam, REG_CTRL0,
798 C0_EOF_VSYNC | C0_VEDGE_CTRL);
799 }
800
801
802 /*
803 * Configure the controller for operation; caller holds the
804 * device mutex.
805 */
806 static int mcam_ctlr_configure(struct mcam_camera *cam)
807 {
808 unsigned long flags;
809
810 spin_lock_irqsave(&cam->dev_lock, flags);
811 clear_bit(CF_SG_RESTART, &cam->flags);
812 cam->dma_setup(cam);
813 mcam_ctlr_image(cam);
814 mcam_set_config_needed(cam, 0);
815 spin_unlock_irqrestore(&cam->dev_lock, flags);
816 return 0;
817 }
818
819 static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
820 {
821 /*
822 * Clear any pending interrupts, since we do not
823 * expect to have I/O active prior to enabling.
824 */
825 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
826 mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
827 }
828
829 static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
830 {
831 mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
832 }
833
834
835
836 static void mcam_ctlr_init(struct mcam_camera *cam)
837 {
838 unsigned long flags;
839
840 spin_lock_irqsave(&cam->dev_lock, flags);
841 /*
842 * Make sure it's not powered down.
843 */
844 mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
845 /*
846 * Turn off the enable bit. It sure should be off anyway,
847 * but it's good to be sure.
848 */
849 mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
850 /*
851 * Clock the sensor appropriately. Controller clock should
852 * be 48MHz, sensor "typical" value is half that.
853 */
854 mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
855 spin_unlock_irqrestore(&cam->dev_lock, flags);
856 }
857
858
859 /*
860 * Stop the controller, and don't return until we're really sure that no
861 * further DMA is going on.
862 */
863 static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
864 {
865 unsigned long flags;
866
867 /*
868 * Theory: stop the camera controller (whether it is operating
869 * or not). Delay briefly just in case we race with the SOF
870 * interrupt, then wait until no DMA is active.
871 */
872 spin_lock_irqsave(&cam->dev_lock, flags);
873 clear_bit(CF_SG_RESTART, &cam->flags);
874 mcam_ctlr_stop(cam);
875 cam->state = S_IDLE;
876 spin_unlock_irqrestore(&cam->dev_lock, flags);
877 /*
878 * This is a brutally long sleep, but experience shows that
879 * it can take the controller a while to get the message that
880 * it needs to stop grabbing frames. In particular, we can
881 * sometimes (on mmp) get a frame at the end WITHOUT the
882 * start-of-frame indication.
883 */
884 msleep(150);
885 if (test_bit(CF_DMA_ACTIVE, &cam->flags))
886 cam_err(cam, "Timeout waiting for DMA to end\n");
887 /* This would be bad news - what now? */
888 spin_lock_irqsave(&cam->dev_lock, flags);
889 mcam_ctlr_irq_disable(cam);
890 spin_unlock_irqrestore(&cam->dev_lock, flags);
891 }
892
893 /*
894 * Power up and down.
895 */
896 static int mcam_ctlr_power_up(struct mcam_camera *cam)
897 {
898 unsigned long flags;
899 int ret;
900
901 spin_lock_irqsave(&cam->dev_lock, flags);
902 ret = cam->plat_power_up(cam);
903 if (ret) {
904 spin_unlock_irqrestore(&cam->dev_lock, flags);
905 return ret;
906 }
907 mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
908 spin_unlock_irqrestore(&cam->dev_lock, flags);
909 msleep(5); /* Just to be sure */
910 return 0;
911 }
912
913 static void mcam_ctlr_power_down(struct mcam_camera *cam)
914 {
915 unsigned long flags;
916
917 spin_lock_irqsave(&cam->dev_lock, flags);
918 /*
919 * School of hard knocks department: be sure we do any register
920 * twiddling on the controller *before* calling the platform
921 * power down routine.
922 */
923 mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
924 cam->plat_power_down(cam);
925 spin_unlock_irqrestore(&cam->dev_lock, flags);
926 }
927
928 /* -------------------------------------------------------------------- */
929 /*
930 * Communications with the sensor.
931 */
932
933 static int __mcam_cam_reset(struct mcam_camera *cam)
934 {
935 return sensor_call(cam, core, reset, 0);
936 }
937
938 /*
939 * We have found the sensor on the i2c. Let's try to have a
940 * conversation.
941 */
942 static int mcam_cam_init(struct mcam_camera *cam)
943 {
944 int ret;
945
946 if (cam->state != S_NOTREADY)
947 cam_warn(cam, "Cam init with device in funky state %d",
948 cam->state);
949 ret = __mcam_cam_reset(cam);
950 /* Get/set parameters? */
951 cam->state = S_IDLE;
952 mcam_ctlr_power_down(cam);
953 return ret;
954 }
955
956 /*
957 * Configure the sensor to match the parameters we have. Caller should
958 * hold s_mutex
959 */
960 static int mcam_cam_set_flip(struct mcam_camera *cam)
961 {
962 struct v4l2_control ctrl;
963
964 memset(&ctrl, 0, sizeof(ctrl));
965 ctrl.id = V4L2_CID_VFLIP;
966 ctrl.value = flip;
967 return v4l2_s_ctrl(NULL, cam->sensor->ctrl_handler, &ctrl);
968 }
969
970
971 static int mcam_cam_configure(struct mcam_camera *cam)
972 {
973 struct v4l2_subdev_format format = {
974 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
975 };
976 int ret;
977
978 v4l2_fill_mbus_format(&format.format, &cam->pix_format, cam->mbus_code);
979 ret = sensor_call(cam, core, init, 0);
980 if (ret == 0)
981 ret = sensor_call(cam, pad, set_fmt, NULL, &format);
982 /*
983 * OV7670 does weird things if flip is set *before* format...
984 */
985 ret += mcam_cam_set_flip(cam);
986 return ret;
987 }
988
989 /*
990 * Get everything ready, and start grabbing frames.
991 */
992 static int mcam_read_setup(struct mcam_camera *cam)
993 {
994 int ret;
995 unsigned long flags;
996
997 /*
998 * Configuration. If we still don't have DMA buffers,
999 * make one last, desperate attempt.
1000 */
1001 if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1002 mcam_alloc_dma_bufs(cam, 0))
1003 return -ENOMEM;
1004
1005 if (mcam_needs_config(cam)) {
1006 mcam_cam_configure(cam);
1007 ret = mcam_ctlr_configure(cam);
1008 if (ret)
1009 return ret;
1010 }
1011
1012 /*
1013 * Turn it loose.
1014 */
1015 spin_lock_irqsave(&cam->dev_lock, flags);
1016 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1017 mcam_reset_buffers(cam);
1018 /*
1019 * Update CSI2_DPHY value
1020 */
1021 if (cam->calc_dphy)
1022 cam->calc_dphy(cam);
1023 cam_dbg(cam, "camera: DPHY sets: dphy3=0x%x, dphy5=0x%x, dphy6=0x%x\n",
1024 cam->dphy[0], cam->dphy[1], cam->dphy[2]);
1025 if (cam->bus_type == V4L2_MBUS_CSI2)
1026 mcam_enable_mipi(cam);
1027 else
1028 mcam_disable_mipi(cam);
1029 mcam_ctlr_irq_enable(cam);
1030 cam->state = S_STREAMING;
1031 if (!test_bit(CF_SG_RESTART, &cam->flags))
1032 mcam_ctlr_start(cam);
1033 spin_unlock_irqrestore(&cam->dev_lock, flags);
1034 return 0;
1035 }
1036
1037 /* ----------------------------------------------------------------------- */
1038 /*
1039 * Videobuf2 interface code.
1040 */
1041
1042 static int mcam_vb_queue_setup(struct vb2_queue *vq,
1043 unsigned int *nbufs,
1044 unsigned int *num_planes, unsigned int sizes[],
1045 struct device *alloc_devs[])
1046 {
1047 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1048 int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1049 unsigned size = cam->pix_format.sizeimage;
1050
1051 if (*nbufs < minbufs)
1052 *nbufs = minbufs;
1053
1054 if (*num_planes)
1055 return sizes[0] < size ? -EINVAL : 0;
1056 sizes[0] = size;
1057 *num_planes = 1; /* Someday we have to support planar formats... */
1058 return 0;
1059 }
1060
1061
1062 static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1063 {
1064 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1065 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1066 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1067 unsigned long flags;
1068 int start;
1069
1070 spin_lock_irqsave(&cam->dev_lock, flags);
1071 start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1072 list_add(&mvb->queue, &cam->buffers);
1073 if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1074 mcam_sg_restart(cam);
1075 spin_unlock_irqrestore(&cam->dev_lock, flags);
1076 if (start)
1077 mcam_read_setup(cam);
1078 }
1079
1080 static void mcam_vb_requeue_bufs(struct vb2_queue *vq,
1081 enum vb2_buffer_state state)
1082 {
1083 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1084 struct mcam_vb_buffer *buf, *node;
1085 unsigned long flags;
1086 unsigned i;
1087
1088 spin_lock_irqsave(&cam->dev_lock, flags);
1089 list_for_each_entry_safe(buf, node, &cam->buffers, queue) {
1090 vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1091 list_del(&buf->queue);
1092 }
1093 for (i = 0; i < MAX_DMA_BUFS; i++) {
1094 buf = cam->vb_bufs[i];
1095
1096 if (buf) {
1097 vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1098 cam->vb_bufs[i] = NULL;
1099 }
1100 }
1101 spin_unlock_irqrestore(&cam->dev_lock, flags);
1102 }
1103
1104 /*
1105 * These need to be called with the mutex held from vb2
1106 */
1107 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1108 {
1109 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1110 unsigned int frame;
1111 int ret;
1112
1113 if (cam->state != S_IDLE) {
1114 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1115 return -EINVAL;
1116 }
1117 cam->frame_state.frames = 0;
1118 cam->frame_state.singles = 0;
1119 cam->frame_state.delivered = 0;
1120 cam->sequence = 0;
1121 /*
1122 * Videobuf2 sneakily hoards all the buffers and won't
1123 * give them to us until *after* streaming starts. But
1124 * we can't actually start streaming until we have a
1125 * destination. So go into a wait state and hope they
1126 * give us buffers soon.
1127 */
1128 if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1129 cam->state = S_BUFWAIT;
1130 return 0;
1131 }
1132
1133 /*
1134 * Ensure clear the left over frame flags
1135 * before every really start streaming
1136 */
1137 for (frame = 0; frame < cam->nbufs; frame++)
1138 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1139
1140 ret = mcam_read_setup(cam);
1141 if (ret)
1142 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1143 return ret;
1144 }
1145
1146 static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1147 {
1148 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1149
1150 cam_dbg(cam, "stop_streaming: %d frames, %d singles, %d delivered\n",
1151 cam->frame_state.frames, cam->frame_state.singles,
1152 cam->frame_state.delivered);
1153 if (cam->state == S_BUFWAIT) {
1154 /* They never gave us buffers */
1155 cam->state = S_IDLE;
1156 return;
1157 }
1158 if (cam->state != S_STREAMING)
1159 return;
1160 mcam_ctlr_stop_dma(cam);
1161 /*
1162 * Reset the CCIC PHY after stopping streaming,
1163 * otherwise, the CCIC may be unstable.
1164 */
1165 if (cam->ctlr_reset)
1166 cam->ctlr_reset(cam);
1167 /*
1168 * VB2 reclaims the buffers, so we need to forget
1169 * about them.
1170 */
1171 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_ERROR);
1172 }
1173
1174
1175 static const struct vb2_ops mcam_vb2_ops = {
1176 .queue_setup = mcam_vb_queue_setup,
1177 .buf_queue = mcam_vb_buf_queue,
1178 .start_streaming = mcam_vb_start_streaming,
1179 .stop_streaming = mcam_vb_stop_streaming,
1180 .wait_prepare = vb2_ops_wait_prepare,
1181 .wait_finish = vb2_ops_wait_finish,
1182 };
1183
1184
1185 #ifdef MCAM_MODE_DMA_SG
1186 /*
1187 * Scatter/gather mode uses all of the above functions plus a
1188 * few extras to deal with DMA mapping.
1189 */
1190 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1191 {
1192 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1193 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1194 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1195 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1196
1197 mvb->dma_desc = dma_alloc_coherent(cam->dev,
1198 ndesc * sizeof(struct mcam_dma_desc),
1199 &mvb->dma_desc_pa, GFP_KERNEL);
1200 if (mvb->dma_desc == NULL) {
1201 cam_err(cam, "Unable to get DMA descriptor array\n");
1202 return -ENOMEM;
1203 }
1204 return 0;
1205 }
1206
1207 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1208 {
1209 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1210 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1211 struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1212 struct mcam_dma_desc *desc = mvb->dma_desc;
1213 struct scatterlist *sg;
1214 int i;
1215
1216 for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
1217 desc->dma_addr = sg_dma_address(sg);
1218 desc->segment_len = sg_dma_len(sg);
1219 desc++;
1220 }
1221 return 0;
1222 }
1223
1224 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1225 {
1226 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1227 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1228 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1229 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1230
1231 dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1232 mvb->dma_desc, mvb->dma_desc_pa);
1233 }
1234
1235
1236 static const struct vb2_ops mcam_vb2_sg_ops = {
1237 .queue_setup = mcam_vb_queue_setup,
1238 .buf_init = mcam_vb_sg_buf_init,
1239 .buf_prepare = mcam_vb_sg_buf_prepare,
1240 .buf_queue = mcam_vb_buf_queue,
1241 .buf_cleanup = mcam_vb_sg_buf_cleanup,
1242 .start_streaming = mcam_vb_start_streaming,
1243 .stop_streaming = mcam_vb_stop_streaming,
1244 .wait_prepare = vb2_ops_wait_prepare,
1245 .wait_finish = vb2_ops_wait_finish,
1246 };
1247
1248 #endif /* MCAM_MODE_DMA_SG */
1249
1250 static int mcam_setup_vb2(struct mcam_camera *cam)
1251 {
1252 struct vb2_queue *vq = &cam->vb_queue;
1253
1254 memset(vq, 0, sizeof(*vq));
1255 vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1256 vq->drv_priv = cam;
1257 vq->lock = &cam->s_mutex;
1258 vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1259 vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF | VB2_READ;
1260 vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1261 vq->dev = cam->dev;
1262 INIT_LIST_HEAD(&cam->buffers);
1263 switch (cam->buffer_mode) {
1264 case B_DMA_contig:
1265 #ifdef MCAM_MODE_DMA_CONTIG
1266 vq->ops = &mcam_vb2_ops;
1267 vq->mem_ops = &vb2_dma_contig_memops;
1268 cam->dma_setup = mcam_ctlr_dma_contig;
1269 cam->frame_complete = mcam_dma_contig_done;
1270 #endif
1271 break;
1272 case B_DMA_sg:
1273 #ifdef MCAM_MODE_DMA_SG
1274 vq->ops = &mcam_vb2_sg_ops;
1275 vq->mem_ops = &vb2_dma_sg_memops;
1276 cam->dma_setup = mcam_ctlr_dma_sg;
1277 cam->frame_complete = mcam_dma_sg_done;
1278 #endif
1279 break;
1280 case B_vmalloc:
1281 #ifdef MCAM_MODE_VMALLOC
1282 tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
1283 (unsigned long) cam);
1284 vq->ops = &mcam_vb2_ops;
1285 vq->mem_ops = &vb2_vmalloc_memops;
1286 cam->dma_setup = mcam_ctlr_dma_vmalloc;
1287 cam->frame_complete = mcam_vmalloc_done;
1288 #endif
1289 break;
1290 }
1291 return vb2_queue_init(vq);
1292 }
1293
1294
1295 /* ---------------------------------------------------------------------- */
1296 /*
1297 * The long list of V4L2 ioctl() operations.
1298 */
1299
1300 static int mcam_vidioc_querycap(struct file *file, void *priv,
1301 struct v4l2_capability *cap)
1302 {
1303 struct mcam_camera *cam = video_drvdata(file);
1304
1305 strcpy(cap->driver, "marvell_ccic");
1306 strcpy(cap->card, "marvell_ccic");
1307 strlcpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info));
1308 cap->device_caps = V4L2_CAP_VIDEO_CAPTURE |
1309 V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
1310 cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
1311 return 0;
1312 }
1313
1314
1315 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1316 void *priv, struct v4l2_fmtdesc *fmt)
1317 {
1318 if (fmt->index >= N_MCAM_FMTS)
1319 return -EINVAL;
1320 strlcpy(fmt->description, mcam_formats[fmt->index].desc,
1321 sizeof(fmt->description));
1322 fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1323 return 0;
1324 }
1325
1326 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1327 struct v4l2_format *fmt)
1328 {
1329 struct mcam_camera *cam = video_drvdata(filp);
1330 struct mcam_format_struct *f;
1331 struct v4l2_pix_format *pix = &fmt->fmt.pix;
1332 struct v4l2_subdev_pad_config pad_cfg;
1333 struct v4l2_subdev_format format = {
1334 .which = V4L2_SUBDEV_FORMAT_TRY,
1335 };
1336 int ret;
1337
1338 f = mcam_find_format(pix->pixelformat);
1339 pix->pixelformat = f->pixelformat;
1340 v4l2_fill_mbus_format(&format.format, pix, f->mbus_code);
1341 ret = sensor_call(cam, pad, set_fmt, &pad_cfg, &format);
1342 v4l2_fill_pix_format(pix, &format.format);
1343 pix->bytesperline = pix->width * f->bpp;
1344 switch (f->pixelformat) {
1345 case V4L2_PIX_FMT_YUV420:
1346 case V4L2_PIX_FMT_YVU420:
1347 pix->sizeimage = pix->height * pix->bytesperline * 3 / 2;
1348 break;
1349 default:
1350 pix->sizeimage = pix->height * pix->bytesperline;
1351 break;
1352 }
1353 pix->colorspace = V4L2_COLORSPACE_SRGB;
1354 return ret;
1355 }
1356
1357 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1358 struct v4l2_format *fmt)
1359 {
1360 struct mcam_camera *cam = video_drvdata(filp);
1361 struct mcam_format_struct *f;
1362 int ret;
1363
1364 /*
1365 * Can't do anything if the device is not idle
1366 * Also can't if there are streaming buffers in place.
1367 */
1368 if (cam->state != S_IDLE || vb2_is_busy(&cam->vb_queue))
1369 return -EBUSY;
1370
1371 f = mcam_find_format(fmt->fmt.pix.pixelformat);
1372
1373 /*
1374 * See if the formatting works in principle.
1375 */
1376 ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1377 if (ret)
1378 return ret;
1379 /*
1380 * Now we start to change things for real, so let's do it
1381 * under lock.
1382 */
1383 cam->pix_format = fmt->fmt.pix;
1384 cam->mbus_code = f->mbus_code;
1385
1386 /*
1387 * Make sure we have appropriate DMA buffers.
1388 */
1389 if (cam->buffer_mode == B_vmalloc) {
1390 ret = mcam_check_dma_buffers(cam);
1391 if (ret)
1392 goto out;
1393 }
1394 mcam_set_config_needed(cam, 1);
1395 out:
1396 return ret;
1397 }
1398
1399 /*
1400 * Return our stored notion of how the camera is/should be configured.
1401 * The V4l2 spec wants us to be smarter, and actually get this from
1402 * the camera (and not mess with it at open time). Someday.
1403 */
1404 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1405 struct v4l2_format *f)
1406 {
1407 struct mcam_camera *cam = video_drvdata(filp);
1408
1409 f->fmt.pix = cam->pix_format;
1410 return 0;
1411 }
1412
1413 /*
1414 * We only have one input - the sensor - so minimize the nonsense here.
1415 */
1416 static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1417 struct v4l2_input *input)
1418 {
1419 if (input->index != 0)
1420 return -EINVAL;
1421
1422 input->type = V4L2_INPUT_TYPE_CAMERA;
1423 strcpy(input->name, "Camera");
1424 return 0;
1425 }
1426
1427 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1428 {
1429 *i = 0;
1430 return 0;
1431 }
1432
1433 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1434 {
1435 if (i != 0)
1436 return -EINVAL;
1437 return 0;
1438 }
1439
1440 /*
1441 * G/S_PARM. Most of this is done by the sensor, but we are
1442 * the level which controls the number of read buffers.
1443 */
1444 static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1445 struct v4l2_streamparm *parms)
1446 {
1447 struct mcam_camera *cam = video_drvdata(filp);
1448 int ret;
1449
1450 ret = sensor_call(cam, video, g_parm, parms);
1451 parms->parm.capture.readbuffers = n_dma_bufs;
1452 return ret;
1453 }
1454
1455 static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1456 struct v4l2_streamparm *parms)
1457 {
1458 struct mcam_camera *cam = video_drvdata(filp);
1459 int ret;
1460
1461 ret = sensor_call(cam, video, s_parm, parms);
1462 parms->parm.capture.readbuffers = n_dma_bufs;
1463 return ret;
1464 }
1465
1466 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1467 struct v4l2_frmsizeenum *sizes)
1468 {
1469 struct mcam_camera *cam = video_drvdata(filp);
1470 struct mcam_format_struct *f;
1471 struct v4l2_subdev_frame_size_enum fse = {
1472 .index = sizes->index,
1473 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1474 };
1475 int ret;
1476
1477 f = mcam_find_format(sizes->pixel_format);
1478 if (f->pixelformat != sizes->pixel_format)
1479 return -EINVAL;
1480 fse.code = f->mbus_code;
1481 ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
1482 if (ret)
1483 return ret;
1484 if (fse.min_width == fse.max_width &&
1485 fse.min_height == fse.max_height) {
1486 sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1487 sizes->discrete.width = fse.min_width;
1488 sizes->discrete.height = fse.min_height;
1489 return 0;
1490 }
1491 sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS;
1492 sizes->stepwise.min_width = fse.min_width;
1493 sizes->stepwise.max_width = fse.max_width;
1494 sizes->stepwise.min_height = fse.min_height;
1495 sizes->stepwise.max_height = fse.max_height;
1496 sizes->stepwise.step_width = 1;
1497 sizes->stepwise.step_height = 1;
1498 return 0;
1499 }
1500
1501 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1502 struct v4l2_frmivalenum *interval)
1503 {
1504 struct mcam_camera *cam = video_drvdata(filp);
1505 struct mcam_format_struct *f;
1506 struct v4l2_subdev_frame_interval_enum fie = {
1507 .index = interval->index,
1508 .width = interval->width,
1509 .height = interval->height,
1510 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1511 };
1512 int ret;
1513
1514 f = mcam_find_format(interval->pixel_format);
1515 if (f->pixelformat != interval->pixel_format)
1516 return -EINVAL;
1517 fie.code = f->mbus_code;
1518 ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
1519 if (ret)
1520 return ret;
1521 interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1522 interval->discrete = fie.interval;
1523 return 0;
1524 }
1525
1526 #ifdef CONFIG_VIDEO_ADV_DEBUG
1527 static int mcam_vidioc_g_register(struct file *file, void *priv,
1528 struct v4l2_dbg_register *reg)
1529 {
1530 struct mcam_camera *cam = video_drvdata(file);
1531
1532 if (reg->reg > cam->regs_size - 4)
1533 return -EINVAL;
1534 reg->val = mcam_reg_read(cam, reg->reg);
1535 reg->size = 4;
1536 return 0;
1537 }
1538
1539 static int mcam_vidioc_s_register(struct file *file, void *priv,
1540 const struct v4l2_dbg_register *reg)
1541 {
1542 struct mcam_camera *cam = video_drvdata(file);
1543
1544 if (reg->reg > cam->regs_size - 4)
1545 return -EINVAL;
1546 mcam_reg_write(cam, reg->reg, reg->val);
1547 return 0;
1548 }
1549 #endif
1550
1551 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1552 .vidioc_querycap = mcam_vidioc_querycap,
1553 .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1554 .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
1555 .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
1556 .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
1557 .vidioc_enum_input = mcam_vidioc_enum_input,
1558 .vidioc_g_input = mcam_vidioc_g_input,
1559 .vidioc_s_input = mcam_vidioc_s_input,
1560 .vidioc_reqbufs = vb2_ioctl_reqbufs,
1561 .vidioc_create_bufs = vb2_ioctl_create_bufs,
1562 .vidioc_querybuf = vb2_ioctl_querybuf,
1563 .vidioc_qbuf = vb2_ioctl_qbuf,
1564 .vidioc_dqbuf = vb2_ioctl_dqbuf,
1565 .vidioc_expbuf = vb2_ioctl_expbuf,
1566 .vidioc_streamon = vb2_ioctl_streamon,
1567 .vidioc_streamoff = vb2_ioctl_streamoff,
1568 .vidioc_g_parm = mcam_vidioc_g_parm,
1569 .vidioc_s_parm = mcam_vidioc_s_parm,
1570 .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1571 .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1572 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
1573 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1574 #ifdef CONFIG_VIDEO_ADV_DEBUG
1575 .vidioc_g_register = mcam_vidioc_g_register,
1576 .vidioc_s_register = mcam_vidioc_s_register,
1577 #endif
1578 };
1579
1580 /* ---------------------------------------------------------------------- */
1581 /*
1582 * Our various file operations.
1583 */
1584 static int mcam_v4l_open(struct file *filp)
1585 {
1586 struct mcam_camera *cam = video_drvdata(filp);
1587 int ret;
1588
1589 mutex_lock(&cam->s_mutex);
1590 ret = v4l2_fh_open(filp);
1591 if (ret)
1592 goto out;
1593 if (v4l2_fh_is_singular_file(filp)) {
1594 ret = mcam_ctlr_power_up(cam);
1595 if (ret)
1596 goto out;
1597 __mcam_cam_reset(cam);
1598 mcam_set_config_needed(cam, 1);
1599 }
1600 out:
1601 mutex_unlock(&cam->s_mutex);
1602 if (ret)
1603 v4l2_fh_release(filp);
1604 return ret;
1605 }
1606
1607
1608 static int mcam_v4l_release(struct file *filp)
1609 {
1610 struct mcam_camera *cam = video_drvdata(filp);
1611 bool last_open;
1612
1613 mutex_lock(&cam->s_mutex);
1614 last_open = v4l2_fh_is_singular_file(filp);
1615 _vb2_fop_release(filp, NULL);
1616 if (last_open) {
1617 mcam_disable_mipi(cam);
1618 mcam_ctlr_power_down(cam);
1619 if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1620 mcam_free_dma_bufs(cam);
1621 }
1622
1623 mutex_unlock(&cam->s_mutex);
1624 return 0;
1625 }
1626
1627 static const struct v4l2_file_operations mcam_v4l_fops = {
1628 .owner = THIS_MODULE,
1629 .open = mcam_v4l_open,
1630 .release = mcam_v4l_release,
1631 .read = vb2_fop_read,
1632 .poll = vb2_fop_poll,
1633 .mmap = vb2_fop_mmap,
1634 .unlocked_ioctl = video_ioctl2,
1635 };
1636
1637
1638 /*
1639 * This template device holds all of those v4l2 methods; we
1640 * clone it for specific real devices.
1641 */
1642 static const struct video_device mcam_v4l_template = {
1643 .name = "mcam",
1644 .fops = &mcam_v4l_fops,
1645 .ioctl_ops = &mcam_v4l_ioctl_ops,
1646 .release = video_device_release_empty,
1647 };
1648
1649 /* ---------------------------------------------------------------------- */
1650 /*
1651 * Interrupt handler stuff
1652 */
1653 static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1654 {
1655 /*
1656 * Basic frame housekeeping.
1657 */
1658 set_bit(frame, &cam->flags);
1659 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1660 cam->next_buf = frame;
1661 cam->buf_seq[frame] = cam->sequence++;
1662 cam->frame_state.frames++;
1663 /*
1664 * "This should never happen"
1665 */
1666 if (cam->state != S_STREAMING)
1667 return;
1668 /*
1669 * Process the frame and set up the next one.
1670 */
1671 cam->frame_complete(cam, frame);
1672 }
1673
1674
1675 /*
1676 * The interrupt handler; this needs to be called from the
1677 * platform irq handler with the lock held.
1678 */
1679 int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1680 {
1681 unsigned int frame, handled = 0;
1682
1683 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1684 /*
1685 * Handle any frame completions. There really should
1686 * not be more than one of these, or we have fallen
1687 * far behind.
1688 *
1689 * When running in S/G mode, the frame number lacks any
1690 * real meaning - there's only one descriptor array - but
1691 * the controller still picks a different one to signal
1692 * each time.
1693 */
1694 for (frame = 0; frame < cam->nbufs; frame++)
1695 if (irqs & (IRQ_EOF0 << frame) &&
1696 test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1697 mcam_frame_complete(cam, frame);
1698 handled = 1;
1699 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1700 if (cam->buffer_mode == B_DMA_sg)
1701 break;
1702 }
1703 /*
1704 * If a frame starts, note that we have DMA active. This
1705 * code assumes that we won't get multiple frame interrupts
1706 * at once; may want to rethink that.
1707 */
1708 for (frame = 0; frame < cam->nbufs; frame++) {
1709 if (irqs & (IRQ_SOF0 << frame)) {
1710 set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1711 handled = IRQ_HANDLED;
1712 }
1713 }
1714
1715 if (handled == IRQ_HANDLED) {
1716 set_bit(CF_DMA_ACTIVE, &cam->flags);
1717 if (cam->buffer_mode == B_DMA_sg)
1718 mcam_ctlr_stop(cam);
1719 }
1720 return handled;
1721 }
1722
1723 /* ---------------------------------------------------------------------- */
1724 /*
1725 * Registration and such.
1726 */
1727 static struct ov7670_config sensor_cfg = {
1728 /*
1729 * Exclude QCIF mode, because it only captures a tiny portion
1730 * of the sensor FOV
1731 */
1732 .min_width = 320,
1733 .min_height = 240,
1734 };
1735
1736
1737 int mccic_register(struct mcam_camera *cam)
1738 {
1739 struct i2c_board_info ov7670_info = {
1740 .type = "ov7670",
1741 .addr = 0x42 >> 1,
1742 .platform_data = &sensor_cfg,
1743 };
1744 int ret;
1745
1746 /*
1747 * Validate the requested buffer mode.
1748 */
1749 if (buffer_mode >= 0)
1750 cam->buffer_mode = buffer_mode;
1751 if (cam->buffer_mode == B_DMA_sg &&
1752 cam->chip_id == MCAM_CAFE) {
1753 printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, attempting vmalloc mode instead\n");
1754 cam->buffer_mode = B_vmalloc;
1755 }
1756 if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1757 printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1758 cam->buffer_mode);
1759 return -EINVAL;
1760 }
1761 /*
1762 * Register with V4L
1763 */
1764 ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
1765 if (ret)
1766 return ret;
1767
1768 mutex_init(&cam->s_mutex);
1769 cam->state = S_NOTREADY;
1770 mcam_set_config_needed(cam, 1);
1771 cam->pix_format = mcam_def_pix_format;
1772 cam->mbus_code = mcam_def_mbus_code;
1773 mcam_ctlr_init(cam);
1774
1775 /*
1776 * Get the v4l2 setup done.
1777 */
1778 ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1779 if (ret)
1780 goto out_unregister;
1781 cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1782
1783 /*
1784 * Try to find the sensor.
1785 */
1786 sensor_cfg.clock_speed = cam->clock_speed;
1787 sensor_cfg.use_smbus = cam->use_smbus;
1788 cam->sensor_addr = ov7670_info.addr;
1789 cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
1790 cam->i2c_adapter, &ov7670_info, NULL);
1791 if (cam->sensor == NULL) {
1792 ret = -ENODEV;
1793 goto out_unregister;
1794 }
1795
1796 ret = mcam_cam_init(cam);
1797 if (ret)
1798 goto out_unregister;
1799
1800 ret = mcam_setup_vb2(cam);
1801 if (ret)
1802 goto out_unregister;
1803
1804 mutex_lock(&cam->s_mutex);
1805 cam->vdev = mcam_v4l_template;
1806 cam->vdev.v4l2_dev = &cam->v4l2_dev;
1807 cam->vdev.lock = &cam->s_mutex;
1808 cam->vdev.queue = &cam->vb_queue;
1809 video_set_drvdata(&cam->vdev, cam);
1810 ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
1811 if (ret) {
1812 mutex_unlock(&cam->s_mutex);
1813 goto out_unregister;
1814 }
1815
1816 /*
1817 * If so requested, try to get our DMA buffers now.
1818 */
1819 if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1820 if (mcam_alloc_dma_bufs(cam, 1))
1821 cam_warn(cam, "Unable to alloc DMA buffers at load will try again later.");
1822 }
1823
1824 mutex_unlock(&cam->s_mutex);
1825 return 0;
1826
1827 out_unregister:
1828 v4l2_ctrl_handler_free(&cam->ctrl_handler);
1829 v4l2_device_unregister(&cam->v4l2_dev);
1830 return ret;
1831 }
1832
1833
1834 void mccic_shutdown(struct mcam_camera *cam)
1835 {
1836 /*
1837 * If we have no users (and we really, really should have no
1838 * users) the device will already be powered down. Trying to
1839 * take it down again will wedge the machine, which is frowned
1840 * upon.
1841 */
1842 if (!list_empty(&cam->vdev.fh_list)) {
1843 cam_warn(cam, "Removing a device with users!\n");
1844 mcam_ctlr_power_down(cam);
1845 }
1846 if (cam->buffer_mode == B_vmalloc)
1847 mcam_free_dma_bufs(cam);
1848 video_unregister_device(&cam->vdev);
1849 v4l2_ctrl_handler_free(&cam->ctrl_handler);
1850 v4l2_device_unregister(&cam->v4l2_dev);
1851 }
1852
1853 /*
1854 * Power management
1855 */
1856 #ifdef CONFIG_PM
1857
1858 void mccic_suspend(struct mcam_camera *cam)
1859 {
1860 mutex_lock(&cam->s_mutex);
1861 if (!list_empty(&cam->vdev.fh_list)) {
1862 enum mcam_state cstate = cam->state;
1863
1864 mcam_ctlr_stop_dma(cam);
1865 mcam_ctlr_power_down(cam);
1866 cam->state = cstate;
1867 }
1868 mutex_unlock(&cam->s_mutex);
1869 }
1870
1871 int mccic_resume(struct mcam_camera *cam)
1872 {
1873 int ret = 0;
1874
1875 mutex_lock(&cam->s_mutex);
1876 if (!list_empty(&cam->vdev.fh_list)) {
1877 ret = mcam_ctlr_power_up(cam);
1878 if (ret) {
1879 mutex_unlock(&cam->s_mutex);
1880 return ret;
1881 }
1882 __mcam_cam_reset(cam);
1883 } else {
1884 mcam_ctlr_power_down(cam);
1885 }
1886 mutex_unlock(&cam->s_mutex);
1887
1888 set_bit(CF_CONFIG_NEEDED, &cam->flags);
1889 if (cam->state == S_STREAMING) {
1890 /*
1891 * If there was a buffer in the DMA engine at suspend
1892 * time, put it back on the queue or we'll forget about it.
1893 */
1894 if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
1895 list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
1896 ret = mcam_read_setup(cam);
1897 }
1898 return ret;
1899 }
1900 #endif /* CONFIG_PM */