1/*
2 * The Marvell camera core.  This device appears in a number of settings,
3 * so it needs platform-specific support outside of the core.
4 *
5 * Copyright 2011 Jonathan Corbet corbet@lwn.net
6 */
7#include <linux/kernel.h>
8#include <linux/module.h>
9#include <linux/fs.h>
10#include <linux/mm.h>
11#include <linux/i2c.h>
12#include <linux/interrupt.h>
13#include <linux/spinlock.h>
14#include <linux/slab.h>
15#include <linux/device.h>
16#include <linux/wait.h>
17#include <linux/list.h>
18#include <linux/dma-mapping.h>
19#include <linux/delay.h>
20#include <linux/vmalloc.h>
21#include <linux/io.h>
22#include <linux/clk.h>
23#include <linux/videodev2.h>
24#include <media/v4l2-device.h>
25#include <media/v4l2-ioctl.h>
26#include <media/v4l2-ctrls.h>
27#include <media/ov7670.h>
28#include <media/videobuf2-vmalloc.h>
29#include <media/videobuf2-dma-contig.h>
30#include <media/videobuf2-dma-sg.h>
31
32#include "mcam-core.h"
33
34#ifdef MCAM_MODE_VMALLOC
35/*
36 * Internal DMA buffer management.  Since the controller cannot do S/G I/O,
37 * we must have physically contiguous buffers to bring frames into.
38 * These parameters control how many buffers we use, whether we
39 * allocate them at load time (better chance of success, but nails down
40 * memory) or when somebody tries to use the camera (riskier), and,
41 * for load-time allocation, how big they should be.
42 *
43 * The controller can cycle through three buffers.  We could use
44 * more by flipping pointers around, but it probably makes little
45 * sense.
46 */
47
48static bool alloc_bufs_at_read;
49module_param(alloc_bufs_at_read, bool, 0444);
50MODULE_PARM_DESC(alloc_bufs_at_read,
51		"Non-zero value causes DMA buffers to be allocated when the "
52		"video capture device is read, rather than at module load "
53		"time.  This saves memory, but decreases the chances of "
54		"successfully getting those buffers.  This parameter is "
55		"only used in the vmalloc buffer mode");
56
57static int n_dma_bufs = 3;
58module_param(n_dma_bufs, uint, 0644);
59MODULE_PARM_DESC(n_dma_bufs,
60		"The number of DMA buffers to allocate.  Can be either two "
61		"(saves memory, makes timing tighter) or three.");
62
63static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2;  /* Worst case */
64module_param(dma_buf_size, uint, 0444);
65MODULE_PARM_DESC(dma_buf_size,
66		"The size of the allocated DMA buffers.  If actual operating "
67		"parameters require larger buffers, an attempt to reallocate "
68		"will be made.");
69#else /* MCAM_MODE_VMALLOC */
70static const bool alloc_bufs_at_read;
71static const int n_dma_bufs = 3;  /* Used by S/G_PARM */
72#endif /* MCAM_MODE_VMALLOC */
73
74static bool flip;
75module_param(flip, bool, 0444);
76MODULE_PARM_DESC(flip,
77		"If set, the sensor will be instructed to flip the image "
78		"vertically.");
79
80static int buffer_mode = -1;
81module_param(buffer_mode, int, 0444);
82MODULE_PARM_DESC(buffer_mode,
83		"Set the buffer mode to be used; default is to go with what "
84		"the platform driver asks for.  Set to 0 for vmalloc, 1 for "
85		"DMA contiguous.");
86
87/*
88 * Status flags.  Always manipulated with bit operations.
89 */
90#define CF_BUF0_VALID	 0	/* Buffers valid - first three */
91#define CF_BUF1_VALID	 1
92#define CF_BUF2_VALID	 2
93#define CF_DMA_ACTIVE	 3	/* A frame is incoming */
94#define CF_CONFIG_NEEDED 4	/* Must configure hardware */
95#define CF_SINGLE_BUFFER 5	/* Running with a single buffer */
96#define CF_SG_RESTART	 6	/* SG restart needed */
97#define CF_FRAME_SOF0	 7	/* Frame 0 started */
98#define CF_FRAME_SOF1	 8
99#define CF_FRAME_SOF2	 9
100
101#define sensor_call(cam, o, f, args...) \
102	v4l2_subdev_call(cam->sensor, o, f, ##args)
103
104static struct mcam_format_struct {
105	__u8 *desc;
106	__u32 pixelformat;
107	int bpp;   /* Bytes per pixel */
108	bool planar;
109	u32 mbus_code;
110} mcam_formats[] = {
111	{
112		.desc		= "YUYV 4:2:2",
113		.pixelformat	= V4L2_PIX_FMT_YUYV,
114		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
115		.bpp		= 2,
116		.planar		= false,
117	},
118	{
119		.desc		= "YVYU 4:2:2",
120		.pixelformat	= V4L2_PIX_FMT_YVYU,
121		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
122		.bpp		= 2,
123		.planar		= false,
124	},
125	{
126		.desc		= "YUV 4:2:2 PLANAR",
127		.pixelformat	= V4L2_PIX_FMT_YUV422P,
128		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
129		.bpp		= 2,
130		.planar		= true,
131	},
132	{
133		.desc		= "YUV 4:2:0 PLANAR",
134		.pixelformat	= V4L2_PIX_FMT_YUV420,
135		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
136		.bpp		= 2,
137		.planar		= true,
138	},
139	{
140		.desc		= "YVU 4:2:0 PLANAR",
141		.pixelformat	= V4L2_PIX_FMT_YVU420,
142		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
143		.bpp		= 2,
144		.planar		= true,
145	},
146	{
147		.desc		= "RGB 444",
148		.pixelformat	= V4L2_PIX_FMT_RGB444,
149		.mbus_code	= MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE,
150		.bpp		= 2,
151		.planar		= false,
152	},
153	{
154		.desc		= "RGB 565",
155		.pixelformat	= V4L2_PIX_FMT_RGB565,
156		.mbus_code	= MEDIA_BUS_FMT_RGB565_2X8_LE,
157		.bpp		= 2,
158		.planar		= false,
159	},
160	{
161		.desc		= "Raw RGB Bayer",
162		.pixelformat	= V4L2_PIX_FMT_SBGGR8,
163		.mbus_code	= MEDIA_BUS_FMT_SBGGR8_1X8,
164		.bpp		= 1,
165		.planar		= false,
166	},
167};
168#define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
169
170static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
171{
172	unsigned i;
173
174	for (i = 0; i < N_MCAM_FMTS; i++)
175		if (mcam_formats[i].pixelformat == pixelformat)
176			return mcam_formats + i;
177	/* Not found? Then return the first format. */
178	return mcam_formats;
179}
180
181/*
182 * The default format we use until somebody says otherwise.
183 */
184static const struct v4l2_pix_format mcam_def_pix_format = {
185	.width		= VGA_WIDTH,
186	.height		= VGA_HEIGHT,
187	.pixelformat	= V4L2_PIX_FMT_YUYV,
188	.field		= V4L2_FIELD_NONE,
189	.bytesperline	= VGA_WIDTH*2,
190	.sizeimage	= VGA_WIDTH*VGA_HEIGHT*2,
191};
192
193static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
194
195
196/*
197 * The two-word DMA descriptor format used by the Armada 610 and like.  There
198 * Is a three-word format as well (set C1_DESC_3WORD) where the third
199 * word is a pointer to the next descriptor, but we don't use it.  Two-word
200 * descriptors have to be contiguous in memory.
201 */
202struct mcam_dma_desc {
203	u32 dma_addr;
204	u32 segment_len;
205};
206
207struct yuv_pointer_t {
208	dma_addr_t y;
209	dma_addr_t u;
210	dma_addr_t v;
211};
212
213/*
214 * Our buffer type for working with videobuf2.  Note that the vb2
215 * developers have decreed that struct vb2_buffer must be at the
216 * beginning of this structure.
217 */
218struct mcam_vb_buffer {
219	struct vb2_buffer vb_buf;
220	struct list_head queue;
221	struct mcam_dma_desc *dma_desc;	/* Descriptor virtual address */
222	dma_addr_t dma_desc_pa;		/* Descriptor physical address */
223	int dma_desc_nent;		/* Number of mapped descriptors */
224	struct yuv_pointer_t yuv_p;
225};
226
227static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
228{
229	return container_of(vb, struct mcam_vb_buffer, vb_buf);
230}
231
232/*
233 * Hand a completed buffer back to user space.
234 */
235static void mcam_buffer_done(struct mcam_camera *cam, int frame,
236		struct vb2_buffer *vbuf)
237{
238	vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
239	vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
240	vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
241	vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
242}
243
244
245
246/*
247 * Debugging and related.
248 */
249#define cam_err(cam, fmt, arg...) \
250	dev_err((cam)->dev, fmt, ##arg);
251#define cam_warn(cam, fmt, arg...) \
252	dev_warn((cam)->dev, fmt, ##arg);
253#define cam_dbg(cam, fmt, arg...) \
254	dev_dbg((cam)->dev, fmt, ##arg);
255
256
257/*
258 * Flag manipulation helpers
259 */
260static void mcam_reset_buffers(struct mcam_camera *cam)
261{
262	int i;
263
264	cam->next_buf = -1;
265	for (i = 0; i < cam->nbufs; i++) {
266		clear_bit(i, &cam->flags);
267		clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
268	}
269}
270
271static inline int mcam_needs_config(struct mcam_camera *cam)
272{
273	return test_bit(CF_CONFIG_NEEDED, &cam->flags);
274}
275
276static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
277{
278	if (needed)
279		set_bit(CF_CONFIG_NEEDED, &cam->flags);
280	else
281		clear_bit(CF_CONFIG_NEEDED, &cam->flags);
282}
283
284/* ------------------------------------------------------------------- */
285/*
286 * Make the controller start grabbing images.  Everything must
287 * be set up before doing this.
288 */
289static void mcam_ctlr_start(struct mcam_camera *cam)
290{
291	/* set_bit performs a read, so no other barrier should be
292	   needed here */
293	mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
294}
295
296static void mcam_ctlr_stop(struct mcam_camera *cam)
297{
298	mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
299}
300
301static void mcam_enable_mipi(struct mcam_camera *mcam)
302{
303	/* Using MIPI mode and enable MIPI */
304	cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
305			mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
306	mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
307	mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
308	mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
309
310	if (!mcam->mipi_enabled) {
311		if (mcam->lane > 4 || mcam->lane <= 0) {
312			cam_warn(mcam, "lane number error\n");
313			mcam->lane = 1;	/* set the default value */
314		}
315		/*
316		 * 0x41 actives 1 lane
317		 * 0x43 actives 2 lanes
318		 * 0x45 actives 3 lanes (never happen)
319		 * 0x47 actives 4 lanes
320		 */
321		mcam_reg_write(mcam, REG_CSI2_CTRL0,
322			CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
323		mcam_reg_write(mcam, REG_CLKCTRL,
324			(mcam->mclk_src << 29) | mcam->mclk_div);
325
326		mcam->mipi_enabled = true;
327	}
328}
329
330static void mcam_disable_mipi(struct mcam_camera *mcam)
331{
332	/* Using Parallel mode or disable MIPI */
333	mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
334	mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
335	mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
336	mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
337	mcam->mipi_enabled = false;
338}
339
340/* ------------------------------------------------------------------- */
341
342#ifdef MCAM_MODE_VMALLOC
343/*
344 * Code specific to the vmalloc buffer mode.
345 */
346
347/*
348 * Allocate in-kernel DMA buffers for vmalloc mode.
349 */
350static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
351{
352	int i;
353
354	mcam_set_config_needed(cam, 1);
355	if (loadtime)
356		cam->dma_buf_size = dma_buf_size;
357	else
358		cam->dma_buf_size = cam->pix_format.sizeimage;
359	if (n_dma_bufs > 3)
360		n_dma_bufs = 3;
361
362	cam->nbufs = 0;
363	for (i = 0; i < n_dma_bufs; i++) {
364		cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
365				cam->dma_buf_size, cam->dma_handles + i,
366				GFP_KERNEL);
367		if (cam->dma_bufs[i] == NULL) {
368			cam_warn(cam, "Failed to allocate DMA buffer\n");
369			break;
370		}
371		(cam->nbufs)++;
372	}
373
374	switch (cam->nbufs) {
375	case 1:
376		dma_free_coherent(cam->dev, cam->dma_buf_size,
377				cam->dma_bufs[0], cam->dma_handles[0]);
378		cam->nbufs = 0;
379	case 0:
380		cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
381		return -ENOMEM;
382
383	case 2:
384		if (n_dma_bufs > 2)
385			cam_warn(cam, "Will limp along with only 2 buffers\n");
386		break;
387	}
388	return 0;
389}
390
391static void mcam_free_dma_bufs(struct mcam_camera *cam)
392{
393	int i;
394
395	for (i = 0; i < cam->nbufs; i++) {
396		dma_free_coherent(cam->dev, cam->dma_buf_size,
397				cam->dma_bufs[i], cam->dma_handles[i]);
398		cam->dma_bufs[i] = NULL;
399	}
400	cam->nbufs = 0;
401}
402
403
404/*
405 * Set up DMA buffers when operating in vmalloc mode
406 */
407static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
408{
409	/*
410	 * Store the first two Y buffers (we aren't supporting
411	 * planar formats for now, so no UV bufs).  Then either
412	 * set the third if it exists, or tell the controller
413	 * to just use two.
414	 */
415	mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
416	mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
417	if (cam->nbufs > 2) {
418		mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
419		mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
420	} else
421		mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
422	if (cam->chip_id == MCAM_CAFE)
423		mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
424}
425
426/*
427 * Copy data out to user space in the vmalloc case
428 */
429static void mcam_frame_tasklet(unsigned long data)
430{
431	struct mcam_camera *cam = (struct mcam_camera *) data;
432	int i;
433	unsigned long flags;
434	struct mcam_vb_buffer *buf;
435
436	spin_lock_irqsave(&cam->dev_lock, flags);
437	for (i = 0; i < cam->nbufs; i++) {
438		int bufno = cam->next_buf;
439
440		if (cam->state != S_STREAMING || bufno < 0)
441			break;  /* I/O got stopped */
442		if (++(cam->next_buf) >= cam->nbufs)
443			cam->next_buf = 0;
444		if (!test_bit(bufno, &cam->flags))
445			continue;
446		if (list_empty(&cam->buffers)) {
447			cam->frame_state.singles++;
448			break;  /* Leave it valid, hope for better later */
449		}
450		cam->frame_state.delivered++;
451		clear_bit(bufno, &cam->flags);
452		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
453				queue);
454		list_del_init(&buf->queue);
455		/*
456		 * Drop the lock during the big copy.  This *should* be safe...
457		 */
458		spin_unlock_irqrestore(&cam->dev_lock, flags);
459		memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
460				cam->pix_format.sizeimage);
461		mcam_buffer_done(cam, bufno, &buf->vb_buf);
462		spin_lock_irqsave(&cam->dev_lock, flags);
463	}
464	spin_unlock_irqrestore(&cam->dev_lock, flags);
465}
466
467
468/*
469 * Make sure our allocated buffers are up to the task.
470 */
471static int mcam_check_dma_buffers(struct mcam_camera *cam)
472{
473	if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
474			mcam_free_dma_bufs(cam);
475	if (cam->nbufs == 0)
476		return mcam_alloc_dma_bufs(cam, 0);
477	return 0;
478}
479
480static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
481{
482	tasklet_schedule(&cam->s_tasklet);
483}
484
485#else /* MCAM_MODE_VMALLOC */
486
487static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
488{
489	return 0;
490}
491
492static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
493{
494	return;
495}
496
497static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
498{
499	return 0;
500}
501
502
503
504#endif /* MCAM_MODE_VMALLOC */
505
506
507#ifdef MCAM_MODE_DMA_CONTIG
508/* ---------------------------------------------------------------------- */
509/*
510 * DMA-contiguous code.
511 */
512
513static bool mcam_fmt_is_planar(__u32 pfmt)
514{
515	struct mcam_format_struct *f;
516
517	f = mcam_find_format(pfmt);
518	return f->planar;
519}
520
521/*
522 * Set up a contiguous buffer for the given frame.  Here also is where
523 * the underrun strategy is set: if there is no buffer available, reuse
524 * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
525 * keep the interrupt handler from giving that buffer back to user
526 * space.  In this way, we always have a buffer to DMA to and don't
527 * have to try to play games stopping and restarting the controller.
528 */
529static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
530{
531	struct mcam_vb_buffer *buf;
532	struct v4l2_pix_format *fmt = &cam->pix_format;
533	dma_addr_t dma_handle;
534	u32 pixel_count = fmt->width * fmt->height;
535	struct vb2_buffer *vb;
536
537	/*
538	 * If there are no available buffers, go into single mode
539	 */
540	if (list_empty(&cam->buffers)) {
541		buf = cam->vb_bufs[frame ^ 0x1];
542		set_bit(CF_SINGLE_BUFFER, &cam->flags);
543		cam->frame_state.singles++;
544	} else {
545		/*
546		 * OK, we have a buffer we can use.
547		 */
548		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
549					queue);
550		list_del_init(&buf->queue);
551		clear_bit(CF_SINGLE_BUFFER, &cam->flags);
552	}
553
554	cam->vb_bufs[frame] = buf;
555	vb = &buf->vb_buf;
556
557	dma_handle = vb2_dma_contig_plane_dma_addr(vb, 0);
558	buf->yuv_p.y = dma_handle;
559
560	switch (cam->pix_format.pixelformat) {
561	case V4L2_PIX_FMT_YUV422P:
562		buf->yuv_p.u = buf->yuv_p.y + pixel_count;
563		buf->yuv_p.v = buf->yuv_p.u + pixel_count / 2;
564		break;
565	case V4L2_PIX_FMT_YUV420:
566		buf->yuv_p.u = buf->yuv_p.y + pixel_count;
567		buf->yuv_p.v = buf->yuv_p.u + pixel_count / 4;
568		break;
569	case V4L2_PIX_FMT_YVU420:
570		buf->yuv_p.v = buf->yuv_p.y + pixel_count;
571		buf->yuv_p.u = buf->yuv_p.v + pixel_count / 4;
572		break;
573	default:
574		break;
575	}
576
577	mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR, buf->yuv_p.y);
578	if (mcam_fmt_is_planar(fmt->pixelformat)) {
579		mcam_reg_write(cam, frame == 0 ?
580					REG_U0BAR : REG_U1BAR, buf->yuv_p.u);
581		mcam_reg_write(cam, frame == 0 ?
582					REG_V0BAR : REG_V1BAR, buf->yuv_p.v);
583	}
584}
585
586/*
587 * Initial B_DMA_contig setup.
588 */
589static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
590{
591	mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
592	cam->nbufs = 2;
593	mcam_set_contig_buffer(cam, 0);
594	mcam_set_contig_buffer(cam, 1);
595}
596
597/*
598 * Frame completion handling.
599 */
600static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
601{
602	struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
603
604	if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
605		cam->frame_state.delivered++;
606		mcam_buffer_done(cam, frame, &buf->vb_buf);
607	}
608	mcam_set_contig_buffer(cam, frame);
609}
610
611#endif /* MCAM_MODE_DMA_CONTIG */
612
613#ifdef MCAM_MODE_DMA_SG
614/* ---------------------------------------------------------------------- */
615/*
616 * Scatter/gather-specific code.
617 */
618
619/*
620 * Set up the next buffer for S/G I/O; caller should be sure that
621 * the controller is stopped and a buffer is available.
622 */
623static void mcam_sg_next_buffer(struct mcam_camera *cam)
624{
625	struct mcam_vb_buffer *buf;
626
627	buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
628	list_del_init(&buf->queue);
629	/*
630	 * Very Bad Not Good Things happen if you don't clear
631	 * C1_DESC_ENA before making any descriptor changes.
632	 */
633	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
634	mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
635	mcam_reg_write(cam, REG_DESC_LEN_Y,
636			buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
637	mcam_reg_write(cam, REG_DESC_LEN_U, 0);
638	mcam_reg_write(cam, REG_DESC_LEN_V, 0);
639	mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
640	cam->vb_bufs[0] = buf;
641}
642
643/*
644 * Initial B_DMA_sg setup
645 */
646static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
647{
648	/*
649	 * The list-empty condition can hit us at resume time
650	 * if the buffer list was empty when the system was suspended.
651	 */
652	if (list_empty(&cam->buffers)) {
653		set_bit(CF_SG_RESTART, &cam->flags);
654		return;
655	}
656
657	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
658	mcam_sg_next_buffer(cam);
659	cam->nbufs = 3;
660}
661
662
663/*
664 * Frame completion with S/G is trickier.  We can't muck with
665 * a descriptor chain on the fly, since the controller buffers it
666 * internally.  So we have to actually stop and restart; Marvell
667 * says this is the way to do it.
668 *
669 * Of course, stopping is easier said than done; experience shows
670 * that the controller can start a frame *after* C0_ENABLE has been
671 * cleared.  So when running in S/G mode, the controller is "stopped"
672 * on receipt of the start-of-frame interrupt.  That means we can
673 * safely change the DMA descriptor array here and restart things
674 * (assuming there's another buffer waiting to go).
675 */
676static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
677{
678	struct mcam_vb_buffer *buf = cam->vb_bufs[0];
679
680	/*
681	 * If we're no longer supposed to be streaming, don't do anything.
682	 */
683	if (cam->state != S_STREAMING)
684		return;
685	/*
686	 * If we have another buffer available, put it in and
687	 * restart the engine.
688	 */
689	if (!list_empty(&cam->buffers)) {
690		mcam_sg_next_buffer(cam);
691		mcam_ctlr_start(cam);
692	/*
693	 * Otherwise set CF_SG_RESTART and the controller will
694	 * be restarted once another buffer shows up.
695	 */
696	} else {
697		set_bit(CF_SG_RESTART, &cam->flags);
698		cam->frame_state.singles++;
699		cam->vb_bufs[0] = NULL;
700	}
701	/*
702	 * Now we can give the completed frame back to user space.
703	 */
704	cam->frame_state.delivered++;
705	mcam_buffer_done(cam, frame, &buf->vb_buf);
706}
707
708
709/*
710 * Scatter/gather mode requires stopping the controller between
711 * frames so we can put in a new DMA descriptor array.  If no new
712 * buffer exists at frame completion, the controller is left stopped;
713 * this function is charged with gettig things going again.
714 */
715static void mcam_sg_restart(struct mcam_camera *cam)
716{
717	mcam_ctlr_dma_sg(cam);
718	mcam_ctlr_start(cam);
719	clear_bit(CF_SG_RESTART, &cam->flags);
720}
721
722#else /* MCAM_MODE_DMA_SG */
723
724static inline void mcam_sg_restart(struct mcam_camera *cam)
725{
726	return;
727}
728
729#endif /* MCAM_MODE_DMA_SG */
730
731/* ---------------------------------------------------------------------- */
732/*
733 * Buffer-mode-independent controller code.
734 */
735
736/*
737 * Image format setup
738 */
739static void mcam_ctlr_image(struct mcam_camera *cam)
740{
741	struct v4l2_pix_format *fmt = &cam->pix_format;
742	u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
743
744	cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
745		fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
746	imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
747	imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
748
749	switch (fmt->pixelformat) {
750	case V4L2_PIX_FMT_YUYV:
751	case V4L2_PIX_FMT_YVYU:
752		widthy = fmt->width * 2;
753		widthuv = 0;
754		break;
755	case V4L2_PIX_FMT_JPEG:
756		imgsz_h = (fmt->sizeimage / fmt->bytesperline) << IMGSZ_V_SHIFT;
757		widthy = fmt->bytesperline;
758		widthuv = 0;
759		break;
760	case V4L2_PIX_FMT_YUV422P:
761	case V4L2_PIX_FMT_YUV420:
762	case V4L2_PIX_FMT_YVU420:
763		widthy = fmt->width;
764		widthuv = fmt->width / 2;
765		break;
766	default:
767		widthy = fmt->bytesperline;
768		widthuv = 0;
769	}
770
771	mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
772			IMGP_YP_MASK | IMGP_UVP_MASK);
773	mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
774	mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
775
776	/*
777	 * Tell the controller about the image format we are using.
778	 */
779	switch (fmt->pixelformat) {
780	case V4L2_PIX_FMT_YUV422P:
781		mcam_reg_write_mask(cam, REG_CTRL0,
782			C0_DF_YUV | C0_YUV_PLANAR | C0_YUVE_YVYU, C0_DF_MASK);
783		break;
784	case V4L2_PIX_FMT_YUV420:
785	case V4L2_PIX_FMT_YVU420:
786		mcam_reg_write_mask(cam, REG_CTRL0,
787			C0_DF_YUV | C0_YUV_420PL | C0_YUVE_VYUY, C0_DF_MASK);
788		break;
789	case V4L2_PIX_FMT_YUYV:
790		mcam_reg_write_mask(cam, REG_CTRL0,
791			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_NOSWAP, C0_DF_MASK);
792		break;
793	case V4L2_PIX_FMT_YVYU:
794		mcam_reg_write_mask(cam, REG_CTRL0,
795			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK);
796		break;
797	case V4L2_PIX_FMT_JPEG:
798		mcam_reg_write_mask(cam, REG_CTRL0,
799			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_YUYV, C0_DF_MASK);
800		break;
801	case V4L2_PIX_FMT_RGB444:
802		mcam_reg_write_mask(cam, REG_CTRL0,
803			C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XRGB, C0_DF_MASK);
804		/* Alpha value? */
805		break;
806	case V4L2_PIX_FMT_RGB565:
807		mcam_reg_write_mask(cam, REG_CTRL0,
808			C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
809		break;
810	default:
811		cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
812		break;
813	}
814
815	/*
816	 * Make sure it knows we want to use hsync/vsync.
817	 */
818	mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
819	/*
820	 * This field controls the generation of EOF(DVP only)
821	 */
822	if (cam->bus_type != V4L2_MBUS_CSI2)
823		mcam_reg_set_bit(cam, REG_CTRL0,
824				C0_EOF_VSYNC | C0_VEDGE_CTRL);
825}
826
827
828/*
829 * Configure the controller for operation; caller holds the
830 * device mutex.
831 */
832static int mcam_ctlr_configure(struct mcam_camera *cam)
833{
834	unsigned long flags;
835
836	spin_lock_irqsave(&cam->dev_lock, flags);
837	clear_bit(CF_SG_RESTART, &cam->flags);
838	cam->dma_setup(cam);
839	mcam_ctlr_image(cam);
840	mcam_set_config_needed(cam, 0);
841	spin_unlock_irqrestore(&cam->dev_lock, flags);
842	return 0;
843}
844
845static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
846{
847	/*
848	 * Clear any pending interrupts, since we do not
849	 * expect to have I/O active prior to enabling.
850	 */
851	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
852	mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
853}
854
855static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
856{
857	mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
858}
859
860
861
862static void mcam_ctlr_init(struct mcam_camera *cam)
863{
864	unsigned long flags;
865
866	spin_lock_irqsave(&cam->dev_lock, flags);
867	/*
868	 * Make sure it's not powered down.
869	 */
870	mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
871	/*
872	 * Turn off the enable bit.  It sure should be off anyway,
873	 * but it's good to be sure.
874	 */
875	mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
876	/*
877	 * Clock the sensor appropriately.  Controller clock should
878	 * be 48MHz, sensor "typical" value is half that.
879	 */
880	mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
881	spin_unlock_irqrestore(&cam->dev_lock, flags);
882}
883
884
885/*
886 * Stop the controller, and don't return until we're really sure that no
887 * further DMA is going on.
888 */
889static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
890{
891	unsigned long flags;
892
893	/*
894	 * Theory: stop the camera controller (whether it is operating
895	 * or not).  Delay briefly just in case we race with the SOF
896	 * interrupt, then wait until no DMA is active.
897	 */
898	spin_lock_irqsave(&cam->dev_lock, flags);
899	clear_bit(CF_SG_RESTART, &cam->flags);
900	mcam_ctlr_stop(cam);
901	cam->state = S_IDLE;
902	spin_unlock_irqrestore(&cam->dev_lock, flags);
903	/*
904	 * This is a brutally long sleep, but experience shows that
905	 * it can take the controller a while to get the message that
906	 * it needs to stop grabbing frames.  In particular, we can
907	 * sometimes (on mmp) get a frame at the end WITHOUT the
908	 * start-of-frame indication.
909	 */
910	msleep(150);
911	if (test_bit(CF_DMA_ACTIVE, &cam->flags))
912		cam_err(cam, "Timeout waiting for DMA to end\n");
913		/* This would be bad news - what now? */
914	spin_lock_irqsave(&cam->dev_lock, flags);
915	mcam_ctlr_irq_disable(cam);
916	spin_unlock_irqrestore(&cam->dev_lock, flags);
917}
918
919/*
920 * Power up and down.
921 */
922static int mcam_ctlr_power_up(struct mcam_camera *cam)
923{
924	unsigned long flags;
925	int ret;
926
927	spin_lock_irqsave(&cam->dev_lock, flags);
928	ret = cam->plat_power_up(cam);
929	if (ret) {
930		spin_unlock_irqrestore(&cam->dev_lock, flags);
931		return ret;
932	}
933	mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
934	spin_unlock_irqrestore(&cam->dev_lock, flags);
935	msleep(5); /* Just to be sure */
936	return 0;
937}
938
939static void mcam_ctlr_power_down(struct mcam_camera *cam)
940{
941	unsigned long flags;
942
943	spin_lock_irqsave(&cam->dev_lock, flags);
944	/*
945	 * School of hard knocks department: be sure we do any register
946	 * twiddling on the controller *before* calling the platform
947	 * power down routine.
948	 */
949	mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
950	cam->plat_power_down(cam);
951	spin_unlock_irqrestore(&cam->dev_lock, flags);
952}
953
954/* -------------------------------------------------------------------- */
955/*
956 * Communications with the sensor.
957 */
958
959static int __mcam_cam_reset(struct mcam_camera *cam)
960{
961	return sensor_call(cam, core, reset, 0);
962}
963
964/*
965 * We have found the sensor on the i2c.  Let's try to have a
966 * conversation.
967 */
968static int mcam_cam_init(struct mcam_camera *cam)
969{
970	int ret;
971
972	mutex_lock(&cam->s_mutex);
973	if (cam->state != S_NOTREADY)
974		cam_warn(cam, "Cam init with device in funky state %d",
975				cam->state);
976	ret = __mcam_cam_reset(cam);
977	/* Get/set parameters? */
978	cam->state = S_IDLE;
979	mcam_ctlr_power_down(cam);
980	mutex_unlock(&cam->s_mutex);
981	return ret;
982}
983
984/*
985 * Configure the sensor to match the parameters we have.  Caller should
986 * hold s_mutex
987 */
988static int mcam_cam_set_flip(struct mcam_camera *cam)
989{
990	struct v4l2_control ctrl;
991
992	memset(&ctrl, 0, sizeof(ctrl));
993	ctrl.id = V4L2_CID_VFLIP;
994	ctrl.value = flip;
995	return sensor_call(cam, core, s_ctrl, &ctrl);
996}
997
998
999static int mcam_cam_configure(struct mcam_camera *cam)
1000{
1001	struct v4l2_mbus_framefmt mbus_fmt;
1002	int ret;
1003
1004	v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
1005	ret = sensor_call(cam, core, init, 0);
1006	if (ret == 0)
1007		ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
1008	/*
1009	 * OV7670 does weird things if flip is set *before* format...
1010	 */
1011	ret += mcam_cam_set_flip(cam);
1012	return ret;
1013}
1014
1015/*
1016 * Get everything ready, and start grabbing frames.
1017 */
1018static int mcam_read_setup(struct mcam_camera *cam)
1019{
1020	int ret;
1021	unsigned long flags;
1022
1023	/*
1024	 * Configuration.  If we still don't have DMA buffers,
1025	 * make one last, desperate attempt.
1026	 */
1027	if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1028			mcam_alloc_dma_bufs(cam, 0))
1029		return -ENOMEM;
1030
1031	if (mcam_needs_config(cam)) {
1032		mcam_cam_configure(cam);
1033		ret = mcam_ctlr_configure(cam);
1034		if (ret)
1035			return ret;
1036	}
1037
1038	/*
1039	 * Turn it loose.
1040	 */
1041	spin_lock_irqsave(&cam->dev_lock, flags);
1042	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1043	mcam_reset_buffers(cam);
1044	/*
1045	 * Update CSI2_DPHY value
1046	 */
1047	if (cam->calc_dphy)
1048		cam->calc_dphy(cam);
1049	cam_dbg(cam, "camera: DPHY sets: dphy3=0x%x, dphy5=0x%x, dphy6=0x%x\n",
1050			cam->dphy[0], cam->dphy[1], cam->dphy[2]);
1051	if (cam->bus_type == V4L2_MBUS_CSI2)
1052		mcam_enable_mipi(cam);
1053	else
1054		mcam_disable_mipi(cam);
1055	mcam_ctlr_irq_enable(cam);
1056	cam->state = S_STREAMING;
1057	if (!test_bit(CF_SG_RESTART, &cam->flags))
1058		mcam_ctlr_start(cam);
1059	spin_unlock_irqrestore(&cam->dev_lock, flags);
1060	return 0;
1061}
1062
1063/* ----------------------------------------------------------------------- */
1064/*
1065 * Videobuf2 interface code.
1066 */
1067
1068static int mcam_vb_queue_setup(struct vb2_queue *vq,
1069		const struct v4l2_format *fmt, unsigned int *nbufs,
1070		unsigned int *num_planes, unsigned int sizes[],
1071		void *alloc_ctxs[])
1072{
1073	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1074	int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1075
1076	sizes[0] = cam->pix_format.sizeimage;
1077	*num_planes = 1; /* Someday we have to support planar formats... */
1078	if (*nbufs < minbufs)
1079		*nbufs = minbufs;
1080	if (cam->buffer_mode == B_DMA_contig)
1081		alloc_ctxs[0] = cam->vb_alloc_ctx;
1082	else if (cam->buffer_mode == B_DMA_sg)
1083		alloc_ctxs[0] = cam->vb_alloc_ctx_sg;
1084	return 0;
1085}
1086
1087
1088static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1089{
1090	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1091	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1092	unsigned long flags;
1093	int start;
1094
1095	spin_lock_irqsave(&cam->dev_lock, flags);
1096	start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1097	list_add(&mvb->queue, &cam->buffers);
1098	if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1099		mcam_sg_restart(cam);
1100	spin_unlock_irqrestore(&cam->dev_lock, flags);
1101	if (start)
1102		mcam_read_setup(cam);
1103}
1104
1105/*
1106 * These need to be called with the mutex held from vb2
1107 */
1108static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1109{
1110	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1111	unsigned int frame;
1112
1113	if (cam->state != S_IDLE) {
1114		INIT_LIST_HEAD(&cam->buffers);
1115		return -EINVAL;
1116	}
1117	cam->sequence = 0;
1118	/*
1119	 * Videobuf2 sneakily hoards all the buffers and won't
1120	 * give them to us until *after* streaming starts.  But
1121	 * we can't actually start streaming until we have a
1122	 * destination.  So go into a wait state and hope they
1123	 * give us buffers soon.
1124	 */
1125	if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1126		cam->state = S_BUFWAIT;
1127		return 0;
1128	}
1129
1130	/*
1131	 * Ensure clear the left over frame flags
1132	 * before every really start streaming
1133	 */
1134	for (frame = 0; frame < cam->nbufs; frame++)
1135		clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1136
1137	return mcam_read_setup(cam);
1138}
1139
1140static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1141{
1142	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1143	unsigned long flags;
1144
1145	if (cam->state == S_BUFWAIT) {
1146		/* They never gave us buffers */
1147		cam->state = S_IDLE;
1148		return;
1149	}
1150	if (cam->state != S_STREAMING)
1151		return;
1152	mcam_ctlr_stop_dma(cam);
1153	/*
1154	 * Reset the CCIC PHY after stopping streaming,
1155	 * otherwise, the CCIC may be unstable.
1156	 */
1157	if (cam->ctlr_reset)
1158		cam->ctlr_reset(cam);
1159	/*
1160	 * VB2 reclaims the buffers, so we need to forget
1161	 * about them.
1162	 */
1163	spin_lock_irqsave(&cam->dev_lock, flags);
1164	INIT_LIST_HEAD(&cam->buffers);
1165	spin_unlock_irqrestore(&cam->dev_lock, flags);
1166}
1167
1168
1169static const struct vb2_ops mcam_vb2_ops = {
1170	.queue_setup		= mcam_vb_queue_setup,
1171	.buf_queue		= mcam_vb_buf_queue,
1172	.start_streaming	= mcam_vb_start_streaming,
1173	.stop_streaming		= mcam_vb_stop_streaming,
1174	.wait_prepare		= vb2_ops_wait_prepare,
1175	.wait_finish		= vb2_ops_wait_finish,
1176};
1177
1178
1179#ifdef MCAM_MODE_DMA_SG
1180/*
1181 * Scatter/gather mode uses all of the above functions plus a
1182 * few extras to deal with DMA mapping.
1183 */
1184static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1185{
1186	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1187	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1188	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1189
1190	mvb->dma_desc = dma_alloc_coherent(cam->dev,
1191			ndesc * sizeof(struct mcam_dma_desc),
1192			&mvb->dma_desc_pa, GFP_KERNEL);
1193	if (mvb->dma_desc == NULL) {
1194		cam_err(cam, "Unable to get DMA descriptor array\n");
1195		return -ENOMEM;
1196	}
1197	return 0;
1198}
1199
1200static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1201{
1202	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1203	struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1204	struct mcam_dma_desc *desc = mvb->dma_desc;
1205	struct scatterlist *sg;
1206	int i;
1207
1208	for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
1209		desc->dma_addr = sg_dma_address(sg);
1210		desc->segment_len = sg_dma_len(sg);
1211		desc++;
1212	}
1213	return 0;
1214}
1215
1216static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1217{
1218	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1219	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1220	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1221
1222	dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1223			mvb->dma_desc, mvb->dma_desc_pa);
1224}
1225
1226
1227static const struct vb2_ops mcam_vb2_sg_ops = {
1228	.queue_setup		= mcam_vb_queue_setup,
1229	.buf_init		= mcam_vb_sg_buf_init,
1230	.buf_prepare		= mcam_vb_sg_buf_prepare,
1231	.buf_queue		= mcam_vb_buf_queue,
1232	.buf_cleanup		= mcam_vb_sg_buf_cleanup,
1233	.start_streaming	= mcam_vb_start_streaming,
1234	.stop_streaming		= mcam_vb_stop_streaming,
1235	.wait_prepare		= vb2_ops_wait_prepare,
1236	.wait_finish		= vb2_ops_wait_finish,
1237};
1238
1239#endif /* MCAM_MODE_DMA_SG */
1240
1241static int mcam_setup_vb2(struct mcam_camera *cam)
1242{
1243	struct vb2_queue *vq = &cam->vb_queue;
1244
1245	memset(vq, 0, sizeof(*vq));
1246	vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1247	vq->drv_priv = cam;
1248	vq->lock = &cam->s_mutex;
1249	INIT_LIST_HEAD(&cam->buffers);
1250	switch (cam->buffer_mode) {
1251	case B_DMA_contig:
1252#ifdef MCAM_MODE_DMA_CONTIG
1253		vq->ops = &mcam_vb2_ops;
1254		vq->mem_ops = &vb2_dma_contig_memops;
1255		vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1256		vq->io_modes = VB2_MMAP | VB2_USERPTR;
1257		cam->dma_setup = mcam_ctlr_dma_contig;
1258		cam->frame_complete = mcam_dma_contig_done;
1259		cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
1260		if (IS_ERR(cam->vb_alloc_ctx))
1261			return PTR_ERR(cam->vb_alloc_ctx);
1262#endif
1263		break;
1264	case B_DMA_sg:
1265#ifdef MCAM_MODE_DMA_SG
1266		vq->ops = &mcam_vb2_sg_ops;
1267		vq->mem_ops = &vb2_dma_sg_memops;
1268		vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1269		vq->io_modes = VB2_MMAP | VB2_USERPTR;
1270		cam->dma_setup = mcam_ctlr_dma_sg;
1271		cam->frame_complete = mcam_dma_sg_done;
1272		cam->vb_alloc_ctx_sg = vb2_dma_sg_init_ctx(cam->dev);
1273		if (IS_ERR(cam->vb_alloc_ctx_sg))
1274			return PTR_ERR(cam->vb_alloc_ctx_sg);
1275#endif
1276		break;
1277	case B_vmalloc:
1278#ifdef MCAM_MODE_VMALLOC
1279		tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
1280				(unsigned long) cam);
1281		vq->ops = &mcam_vb2_ops;
1282		vq->mem_ops = &vb2_vmalloc_memops;
1283		vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1284		vq->io_modes = VB2_MMAP;
1285		cam->dma_setup = mcam_ctlr_dma_vmalloc;
1286		cam->frame_complete = mcam_vmalloc_done;
1287#endif
1288		break;
1289	}
1290	return vb2_queue_init(vq);
1291}
1292
1293static void mcam_cleanup_vb2(struct mcam_camera *cam)
1294{
1295	vb2_queue_release(&cam->vb_queue);
1296#ifdef MCAM_MODE_DMA_CONTIG
1297	if (cam->buffer_mode == B_DMA_contig)
1298		vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
1299#endif
1300#ifdef MCAM_MODE_DMA_SG
1301	if (cam->buffer_mode == B_DMA_sg)
1302		vb2_dma_sg_cleanup_ctx(cam->vb_alloc_ctx_sg);
1303#endif
1304}
1305
1306
1307/* ---------------------------------------------------------------------- */
1308/*
1309 * The long list of V4L2 ioctl() operations.
1310 */
1311
1312static int mcam_vidioc_streamon(struct file *filp, void *priv,
1313		enum v4l2_buf_type type)
1314{
1315	struct mcam_camera *cam = filp->private_data;
1316	int ret;
1317
1318	mutex_lock(&cam->s_mutex);
1319	ret = vb2_streamon(&cam->vb_queue, type);
1320	mutex_unlock(&cam->s_mutex);
1321	return ret;
1322}
1323
1324
1325static int mcam_vidioc_streamoff(struct file *filp, void *priv,
1326		enum v4l2_buf_type type)
1327{
1328	struct mcam_camera *cam = filp->private_data;
1329	int ret;
1330
1331	mutex_lock(&cam->s_mutex);
1332	ret = vb2_streamoff(&cam->vb_queue, type);
1333	mutex_unlock(&cam->s_mutex);
1334	return ret;
1335}
1336
1337
1338static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
1339		struct v4l2_requestbuffers *req)
1340{
1341	struct mcam_camera *cam = filp->private_data;
1342	int ret;
1343
1344	mutex_lock(&cam->s_mutex);
1345	ret = vb2_reqbufs(&cam->vb_queue, req);
1346	mutex_unlock(&cam->s_mutex);
1347	return ret;
1348}
1349
1350
1351static int mcam_vidioc_querybuf(struct file *filp, void *priv,
1352		struct v4l2_buffer *buf)
1353{
1354	struct mcam_camera *cam = filp->private_data;
1355	int ret;
1356
1357	mutex_lock(&cam->s_mutex);
1358	ret = vb2_querybuf(&cam->vb_queue, buf);
1359	mutex_unlock(&cam->s_mutex);
1360	return ret;
1361}
1362
1363static int mcam_vidioc_qbuf(struct file *filp, void *priv,
1364		struct v4l2_buffer *buf)
1365{
1366	struct mcam_camera *cam = filp->private_data;
1367	int ret;
1368
1369	mutex_lock(&cam->s_mutex);
1370	ret = vb2_qbuf(&cam->vb_queue, buf);
1371	mutex_unlock(&cam->s_mutex);
1372	return ret;
1373}
1374
1375static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
1376		struct v4l2_buffer *buf)
1377{
1378	struct mcam_camera *cam = filp->private_data;
1379	int ret;
1380
1381	mutex_lock(&cam->s_mutex);
1382	ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
1383	mutex_unlock(&cam->s_mutex);
1384	return ret;
1385}
1386
1387static int mcam_vidioc_querycap(struct file *file, void *priv,
1388		struct v4l2_capability *cap)
1389{
1390	strcpy(cap->driver, "marvell_ccic");
1391	strcpy(cap->card, "marvell_ccic");
1392	cap->device_caps = V4L2_CAP_VIDEO_CAPTURE |
1393		V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
1394	cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
1395	return 0;
1396}
1397
1398
1399static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1400		void *priv, struct v4l2_fmtdesc *fmt)
1401{
1402	if (fmt->index >= N_MCAM_FMTS)
1403		return -EINVAL;
1404	strlcpy(fmt->description, mcam_formats[fmt->index].desc,
1405			sizeof(fmt->description));
1406	fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1407	return 0;
1408}
1409
1410static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1411		struct v4l2_format *fmt)
1412{
1413	struct mcam_camera *cam = priv;
1414	struct mcam_format_struct *f;
1415	struct v4l2_pix_format *pix = &fmt->fmt.pix;
1416	struct v4l2_mbus_framefmt mbus_fmt;
1417	int ret;
1418
1419	f = mcam_find_format(pix->pixelformat);
1420	pix->pixelformat = f->pixelformat;
1421	v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
1422	mutex_lock(&cam->s_mutex);
1423	ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
1424	mutex_unlock(&cam->s_mutex);
1425	v4l2_fill_pix_format(pix, &mbus_fmt);
1426	switch (f->pixelformat) {
1427	case V4L2_PIX_FMT_YUV420:
1428	case V4L2_PIX_FMT_YVU420:
1429		pix->bytesperline = pix->width * 3 / 2;
1430		break;
1431	default:
1432		pix->bytesperline = pix->width * f->bpp;
1433		break;
1434	}
1435	pix->sizeimage = pix->height * pix->bytesperline;
1436	return ret;
1437}
1438
1439static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1440		struct v4l2_format *fmt)
1441{
1442	struct mcam_camera *cam = priv;
1443	struct mcam_format_struct *f;
1444	int ret;
1445
1446	/*
1447	 * Can't do anything if the device is not idle
1448	 * Also can't if there are streaming buffers in place.
1449	 */
1450	if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
1451		return -EBUSY;
1452
1453	f = mcam_find_format(fmt->fmt.pix.pixelformat);
1454
1455	/*
1456	 * See if the formatting works in principle.
1457	 */
1458	ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1459	if (ret)
1460		return ret;
1461	/*
1462	 * Now we start to change things for real, so let's do it
1463	 * under lock.
1464	 */
1465	mutex_lock(&cam->s_mutex);
1466	cam->pix_format = fmt->fmt.pix;
1467	cam->mbus_code = f->mbus_code;
1468
1469	/*
1470	 * Make sure we have appropriate DMA buffers.
1471	 */
1472	if (cam->buffer_mode == B_vmalloc) {
1473		ret = mcam_check_dma_buffers(cam);
1474		if (ret)
1475			goto out;
1476	}
1477	mcam_set_config_needed(cam, 1);
1478out:
1479	mutex_unlock(&cam->s_mutex);
1480	return ret;
1481}
1482
1483/*
1484 * Return our stored notion of how the camera is/should be configured.
1485 * The V4l2 spec wants us to be smarter, and actually get this from
1486 * the camera (and not mess with it at open time).  Someday.
1487 */
1488static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1489		struct v4l2_format *f)
1490{
1491	struct mcam_camera *cam = priv;
1492
1493	f->fmt.pix = cam->pix_format;
1494	return 0;
1495}
1496
1497/*
1498 * We only have one input - the sensor - so minimize the nonsense here.
1499 */
1500static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1501		struct v4l2_input *input)
1502{
1503	if (input->index != 0)
1504		return -EINVAL;
1505
1506	input->type = V4L2_INPUT_TYPE_CAMERA;
1507	input->std = V4L2_STD_ALL; /* Not sure what should go here */
1508	strcpy(input->name, "Camera");
1509	return 0;
1510}
1511
1512static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1513{
1514	*i = 0;
1515	return 0;
1516}
1517
1518static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1519{
1520	if (i != 0)
1521		return -EINVAL;
1522	return 0;
1523}
1524
1525/* from vivi.c */
1526static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id a)
1527{
1528	return 0;
1529}
1530
1531static int mcam_vidioc_g_std(struct file *filp, void *priv, v4l2_std_id *a)
1532{
1533	*a = V4L2_STD_NTSC_M;
1534	return 0;
1535}
1536
1537/*
1538 * G/S_PARM.  Most of this is done by the sensor, but we are
1539 * the level which controls the number of read buffers.
1540 */
1541static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1542		struct v4l2_streamparm *parms)
1543{
1544	struct mcam_camera *cam = priv;
1545	int ret;
1546
1547	mutex_lock(&cam->s_mutex);
1548	ret = sensor_call(cam, video, g_parm, parms);
1549	mutex_unlock(&cam->s_mutex);
1550	parms->parm.capture.readbuffers = n_dma_bufs;
1551	return ret;
1552}
1553
1554static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1555		struct v4l2_streamparm *parms)
1556{
1557	struct mcam_camera *cam = priv;
1558	int ret;
1559
1560	mutex_lock(&cam->s_mutex);
1561	ret = sensor_call(cam, video, s_parm, parms);
1562	mutex_unlock(&cam->s_mutex);
1563	parms->parm.capture.readbuffers = n_dma_bufs;
1564	return ret;
1565}
1566
1567static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1568		struct v4l2_frmsizeenum *sizes)
1569{
1570	struct mcam_camera *cam = priv;
1571	struct mcam_format_struct *f;
1572	struct v4l2_subdev_frame_size_enum fse = {
1573		.index = sizes->index,
1574		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1575	};
1576	int ret;
1577
1578	f = mcam_find_format(sizes->pixel_format);
1579	if (f->pixelformat != sizes->pixel_format)
1580		return -EINVAL;
1581	fse.code = f->mbus_code;
1582	mutex_lock(&cam->s_mutex);
1583	ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
1584	mutex_unlock(&cam->s_mutex);
1585	if (ret)
1586		return ret;
1587	if (fse.min_width == fse.max_width &&
1588	    fse.min_height == fse.max_height) {
1589		sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1590		sizes->discrete.width = fse.min_width;
1591		sizes->discrete.height = fse.min_height;
1592		return 0;
1593	}
1594	sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS;
1595	sizes->stepwise.min_width = fse.min_width;
1596	sizes->stepwise.max_width = fse.max_width;
1597	sizes->stepwise.min_height = fse.min_height;
1598	sizes->stepwise.max_height = fse.max_height;
1599	sizes->stepwise.step_width = 1;
1600	sizes->stepwise.step_height = 1;
1601	return 0;
1602}
1603
1604static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1605		struct v4l2_frmivalenum *interval)
1606{
1607	struct mcam_camera *cam = priv;
1608	struct mcam_format_struct *f;
1609	struct v4l2_subdev_frame_interval_enum fie = {
1610		.index = interval->index,
1611		.width = interval->width,
1612		.height = interval->height,
1613		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1614	};
1615	int ret;
1616
1617	f = mcam_find_format(interval->pixel_format);
1618	if (f->pixelformat != interval->pixel_format)
1619		return -EINVAL;
1620	fie.code = f->mbus_code;
1621	mutex_lock(&cam->s_mutex);
1622	ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
1623	mutex_unlock(&cam->s_mutex);
1624	if (ret)
1625		return ret;
1626	interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1627	interval->discrete = fie.interval;
1628	return 0;
1629}
1630
1631#ifdef CONFIG_VIDEO_ADV_DEBUG
1632static int mcam_vidioc_g_register(struct file *file, void *priv,
1633		struct v4l2_dbg_register *reg)
1634{
1635	struct mcam_camera *cam = priv;
1636
1637	if (reg->reg > cam->regs_size - 4)
1638		return -EINVAL;
1639	reg->val = mcam_reg_read(cam, reg->reg);
1640	reg->size = 4;
1641	return 0;
1642}
1643
1644static int mcam_vidioc_s_register(struct file *file, void *priv,
1645		const struct v4l2_dbg_register *reg)
1646{
1647	struct mcam_camera *cam = priv;
1648
1649	if (reg->reg > cam->regs_size - 4)
1650		return -EINVAL;
1651	mcam_reg_write(cam, reg->reg, reg->val);
1652	return 0;
1653}
1654#endif
1655
1656static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1657	.vidioc_querycap	= mcam_vidioc_querycap,
1658	.vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1659	.vidioc_try_fmt_vid_cap	= mcam_vidioc_try_fmt_vid_cap,
1660	.vidioc_s_fmt_vid_cap	= mcam_vidioc_s_fmt_vid_cap,
1661	.vidioc_g_fmt_vid_cap	= mcam_vidioc_g_fmt_vid_cap,
1662	.vidioc_enum_input	= mcam_vidioc_enum_input,
1663	.vidioc_g_input		= mcam_vidioc_g_input,
1664	.vidioc_s_input		= mcam_vidioc_s_input,
1665	.vidioc_s_std		= mcam_vidioc_s_std,
1666	.vidioc_g_std		= mcam_vidioc_g_std,
1667	.vidioc_reqbufs		= mcam_vidioc_reqbufs,
1668	.vidioc_querybuf	= mcam_vidioc_querybuf,
1669	.vidioc_qbuf		= mcam_vidioc_qbuf,
1670	.vidioc_dqbuf		= mcam_vidioc_dqbuf,
1671	.vidioc_streamon	= mcam_vidioc_streamon,
1672	.vidioc_streamoff	= mcam_vidioc_streamoff,
1673	.vidioc_g_parm		= mcam_vidioc_g_parm,
1674	.vidioc_s_parm		= mcam_vidioc_s_parm,
1675	.vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1676	.vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1677#ifdef CONFIG_VIDEO_ADV_DEBUG
1678	.vidioc_g_register	= mcam_vidioc_g_register,
1679	.vidioc_s_register	= mcam_vidioc_s_register,
1680#endif
1681};
1682
1683/* ---------------------------------------------------------------------- */
1684/*
1685 * Our various file operations.
1686 */
1687static int mcam_v4l_open(struct file *filp)
1688{
1689	struct mcam_camera *cam = video_drvdata(filp);
1690	int ret = 0;
1691
1692	filp->private_data = cam;
1693
1694	cam->frame_state.frames = 0;
1695	cam->frame_state.singles = 0;
1696	cam->frame_state.delivered = 0;
1697	mutex_lock(&cam->s_mutex);
1698	if (cam->users == 0) {
1699		ret = mcam_setup_vb2(cam);
1700		if (ret)
1701			goto out;
1702		ret = mcam_ctlr_power_up(cam);
1703		if (ret)
1704			goto out;
1705		__mcam_cam_reset(cam);
1706		mcam_set_config_needed(cam, 1);
1707	}
1708	(cam->users)++;
1709out:
1710	mutex_unlock(&cam->s_mutex);
1711	return ret;
1712}
1713
1714
1715static int mcam_v4l_release(struct file *filp)
1716{
1717	struct mcam_camera *cam = filp->private_data;
1718
1719	cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
1720			cam->frame_state.frames, cam->frame_state.singles,
1721			cam->frame_state.delivered);
1722	mutex_lock(&cam->s_mutex);
1723	(cam->users)--;
1724	if (cam->users == 0) {
1725		mcam_ctlr_stop_dma(cam);
1726		mcam_cleanup_vb2(cam);
1727		mcam_disable_mipi(cam);
1728		mcam_ctlr_power_down(cam);
1729		if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1730			mcam_free_dma_bufs(cam);
1731	}
1732
1733	mutex_unlock(&cam->s_mutex);
1734	return 0;
1735}
1736
1737static ssize_t mcam_v4l_read(struct file *filp,
1738		char __user *buffer, size_t len, loff_t *pos)
1739{
1740	struct mcam_camera *cam = filp->private_data;
1741	int ret;
1742
1743	mutex_lock(&cam->s_mutex);
1744	ret = vb2_read(&cam->vb_queue, buffer, len, pos,
1745			filp->f_flags & O_NONBLOCK);
1746	mutex_unlock(&cam->s_mutex);
1747	return ret;
1748}
1749
1750
1751
1752static unsigned int mcam_v4l_poll(struct file *filp,
1753		struct poll_table_struct *pt)
1754{
1755	struct mcam_camera *cam = filp->private_data;
1756	int ret;
1757
1758	mutex_lock(&cam->s_mutex);
1759	ret = vb2_poll(&cam->vb_queue, filp, pt);
1760	mutex_unlock(&cam->s_mutex);
1761	return ret;
1762}
1763
1764
1765static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
1766{
1767	struct mcam_camera *cam = filp->private_data;
1768	int ret;
1769
1770	mutex_lock(&cam->s_mutex);
1771	ret = vb2_mmap(&cam->vb_queue, vma);
1772	mutex_unlock(&cam->s_mutex);
1773	return ret;
1774}
1775
1776
1777
1778static const struct v4l2_file_operations mcam_v4l_fops = {
1779	.owner = THIS_MODULE,
1780	.open = mcam_v4l_open,
1781	.release = mcam_v4l_release,
1782	.read = mcam_v4l_read,
1783	.poll = mcam_v4l_poll,
1784	.mmap = mcam_v4l_mmap,
1785	.unlocked_ioctl = video_ioctl2,
1786};
1787
1788
1789/*
1790 * This template device holds all of those v4l2 methods; we
1791 * clone it for specific real devices.
1792 */
1793static struct video_device mcam_v4l_template = {
1794	.name = "mcam",
1795	.tvnorms = V4L2_STD_NTSC_M,
1796
1797	.fops = &mcam_v4l_fops,
1798	.ioctl_ops = &mcam_v4l_ioctl_ops,
1799	.release = video_device_release_empty,
1800};
1801
1802/* ---------------------------------------------------------------------- */
1803/*
1804 * Interrupt handler stuff
1805 */
1806static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1807{
1808	/*
1809	 * Basic frame housekeeping.
1810	 */
1811	set_bit(frame, &cam->flags);
1812	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1813	cam->next_buf = frame;
1814	cam->buf_seq[frame] = ++(cam->sequence);
1815	cam->frame_state.frames++;
1816	/*
1817	 * "This should never happen"
1818	 */
1819	if (cam->state != S_STREAMING)
1820		return;
1821	/*
1822	 * Process the frame and set up the next one.
1823	 */
1824	cam->frame_complete(cam, frame);
1825}
1826
1827
1828/*
1829 * The interrupt handler; this needs to be called from the
1830 * platform irq handler with the lock held.
1831 */
1832int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1833{
1834	unsigned int frame, handled = 0;
1835
1836	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1837	/*
1838	 * Handle any frame completions.  There really should
1839	 * not be more than one of these, or we have fallen
1840	 * far behind.
1841	 *
1842	 * When running in S/G mode, the frame number lacks any
1843	 * real meaning - there's only one descriptor array - but
1844	 * the controller still picks a different one to signal
1845	 * each time.
1846	 */
1847	for (frame = 0; frame < cam->nbufs; frame++)
1848		if (irqs & (IRQ_EOF0 << frame) &&
1849			test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1850			mcam_frame_complete(cam, frame);
1851			handled = 1;
1852			clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1853			if (cam->buffer_mode == B_DMA_sg)
1854				break;
1855		}
1856	/*
1857	 * If a frame starts, note that we have DMA active.  This
1858	 * code assumes that we won't get multiple frame interrupts
1859	 * at once; may want to rethink that.
1860	 */
1861	for (frame = 0; frame < cam->nbufs; frame++) {
1862		if (irqs & (IRQ_SOF0 << frame)) {
1863			set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1864			handled = IRQ_HANDLED;
1865		}
1866	}
1867
1868	if (handled == IRQ_HANDLED) {
1869		set_bit(CF_DMA_ACTIVE, &cam->flags);
1870		if (cam->buffer_mode == B_DMA_sg)
1871			mcam_ctlr_stop(cam);
1872	}
1873	return handled;
1874}
1875
1876/* ---------------------------------------------------------------------- */
1877/*
1878 * Registration and such.
1879 */
1880static struct ov7670_config sensor_cfg = {
1881	/*
1882	 * Exclude QCIF mode, because it only captures a tiny portion
1883	 * of the sensor FOV
1884	 */
1885	.min_width = 320,
1886	.min_height = 240,
1887};
1888
1889
1890int mccic_register(struct mcam_camera *cam)
1891{
1892	struct i2c_board_info ov7670_info = {
1893		.type = "ov7670",
1894		.addr = 0x42 >> 1,
1895		.platform_data = &sensor_cfg,
1896	};
1897	int ret;
1898
1899	/*
1900	 * Validate the requested buffer mode.
1901	 */
1902	if (buffer_mode >= 0)
1903		cam->buffer_mode = buffer_mode;
1904	if (cam->buffer_mode == B_DMA_sg &&
1905			cam->chip_id == MCAM_CAFE) {
1906		printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
1907			"attempting vmalloc mode instead\n");
1908		cam->buffer_mode = B_vmalloc;
1909	}
1910	if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1911		printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1912				cam->buffer_mode);
1913		return -EINVAL;
1914	}
1915	/*
1916	 * Register with V4L
1917	 */
1918	ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
1919	if (ret)
1920		return ret;
1921
1922	mutex_init(&cam->s_mutex);
1923	cam->state = S_NOTREADY;
1924	mcam_set_config_needed(cam, 1);
1925	cam->pix_format = mcam_def_pix_format;
1926	cam->mbus_code = mcam_def_mbus_code;
1927	INIT_LIST_HEAD(&cam->buffers);
1928	mcam_ctlr_init(cam);
1929
1930	/*
1931	 * Try to find the sensor.
1932	 */
1933	sensor_cfg.clock_speed = cam->clock_speed;
1934	sensor_cfg.use_smbus = cam->use_smbus;
1935	cam->sensor_addr = ov7670_info.addr;
1936	cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
1937			cam->i2c_adapter, &ov7670_info, NULL);
1938	if (cam->sensor == NULL) {
1939		ret = -ENODEV;
1940		goto out_unregister;
1941	}
1942
1943	ret = mcam_cam_init(cam);
1944	if (ret)
1945		goto out_unregister;
1946	/*
1947	 * Get the v4l2 setup done.
1948	 */
1949	ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1950	if (ret)
1951		goto out_unregister;
1952	cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1953
1954	mutex_lock(&cam->s_mutex);
1955	cam->vdev = mcam_v4l_template;
1956	cam->vdev.v4l2_dev = &cam->v4l2_dev;
1957	video_set_drvdata(&cam->vdev, cam);
1958	ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
1959	if (ret)
1960		goto out;
1961
1962	/*
1963	 * If so requested, try to get our DMA buffers now.
1964	 */
1965	if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1966		if (mcam_alloc_dma_bufs(cam, 1))
1967			cam_warn(cam, "Unable to alloc DMA buffers at load"
1968					" will try again later.");
1969	}
1970
1971out:
1972	v4l2_ctrl_handler_free(&cam->ctrl_handler);
1973	mutex_unlock(&cam->s_mutex);
1974	return ret;
1975out_unregister:
1976	v4l2_device_unregister(&cam->v4l2_dev);
1977	return ret;
1978}
1979
1980
1981void mccic_shutdown(struct mcam_camera *cam)
1982{
1983	/*
1984	 * If we have no users (and we really, really should have no
1985	 * users) the device will already be powered down.  Trying to
1986	 * take it down again will wedge the machine, which is frowned
1987	 * upon.
1988	 */
1989	if (cam->users > 0) {
1990		cam_warn(cam, "Removing a device with users!\n");
1991		mcam_ctlr_power_down(cam);
1992	}
1993	vb2_queue_release(&cam->vb_queue);
1994	if (cam->buffer_mode == B_vmalloc)
1995		mcam_free_dma_bufs(cam);
1996	video_unregister_device(&cam->vdev);
1997	v4l2_ctrl_handler_free(&cam->ctrl_handler);
1998	v4l2_device_unregister(&cam->v4l2_dev);
1999}
2000
2001/*
2002 * Power management
2003 */
2004#ifdef CONFIG_PM
2005
2006void mccic_suspend(struct mcam_camera *cam)
2007{
2008	mutex_lock(&cam->s_mutex);
2009	if (cam->users > 0) {
2010		enum mcam_state cstate = cam->state;
2011
2012		mcam_ctlr_stop_dma(cam);
2013		mcam_ctlr_power_down(cam);
2014		cam->state = cstate;
2015	}
2016	mutex_unlock(&cam->s_mutex);
2017}
2018
2019int mccic_resume(struct mcam_camera *cam)
2020{
2021	int ret = 0;
2022
2023	mutex_lock(&cam->s_mutex);
2024	if (cam->users > 0) {
2025		ret = mcam_ctlr_power_up(cam);
2026		if (ret) {
2027			mutex_unlock(&cam->s_mutex);
2028			return ret;
2029		}
2030		__mcam_cam_reset(cam);
2031	} else {
2032		mcam_ctlr_power_down(cam);
2033	}
2034	mutex_unlock(&cam->s_mutex);
2035
2036	set_bit(CF_CONFIG_NEEDED, &cam->flags);
2037	if (cam->state == S_STREAMING) {
2038		/*
2039		 * If there was a buffer in the DMA engine at suspend
2040		 * time, put it back on the queue or we'll forget about it.
2041		 */
2042		if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
2043			list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
2044		ret = mcam_read_setup(cam);
2045	}
2046	return ret;
2047}
2048#endif /* CONFIG_PM */
2049