Ejemplo n.º 1
0
static int cpia2_dqbuf(struct file *file, void *fh, struct v4l2_buffer *buf)
{
	struct camera_data *cam = video_drvdata(file);
	int frame;

	if(buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
	   buf->memory != V4L2_MEMORY_MMAP)
		return -EINVAL;

	frame = find_earliest_filled_buffer(cam);

	if(frame < 0 && file->f_flags&O_NONBLOCK)
		return -EAGAIN;

	if(frame < 0) {
		/* Wait for a frame to become available */
		struct framebuf *cb=cam->curbuff;
		mutex_unlock(&cam->v4l2_lock);
		wait_event_interruptible(cam->wq_stream,
					 !video_is_registered(&cam->vdev) ||
					 (cb=cam->curbuff)->status == FRAME_READY);
		mutex_lock(&cam->v4l2_lock);
		if (signal_pending(current))
			return -ERESTARTSYS;
		if (!video_is_registered(&cam->vdev))
			return -ENOTTY;
		frame = cb->num;
	}


	buf->index = frame;
	buf->bytesused = cam->buffers[buf->index].length;
	buf->flags = V4L2_BUF_FLAG_MAPPED | V4L2_BUF_FLAG_DONE;
	buf->field = V4
Ejemplo n.º 2
0
static int cpia2_dqbuf(struct file *file, void *fh, struct v4l2_buffer *buf)
{
	struct camera_data *cam = video_drvdata(file);
	int frame;

	if(buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
	   buf->memory != V4L2_MEMORY_MMAP)
		return -EINVAL;

	frame = find_earliest_filled_buffer(cam);

	if(frame < 0 && file->f_flags&O_NONBLOCK)
		return -EAGAIN;

	if(frame < 0) {
		/* Wait for a frame to become available */
		struct framebuf *cb=cam->curbuff;
		mutex_unlock(&cam->v4l2_lock);
		wait_event_interruptible(cam->wq_stream,
					 !video_is_registered(&cam->vdev) ||
					 (cb=cam->curbuff)->status == FRAME_READY);
		mutex_lock(&cam->v4l2_lock);
		if (signal_pending(current))
			return -ERESTARTSYS;
		if (!video_is_registered(&cam->vdev))
			return -ENOTTY;
		frame = cb->num;
	}


	buf->index = frame;
	buf->bytesused = cam->buffers[buf->index].length;
	buf->flags = V4L2_BUF_FLAG_MAPPED | V4L2_BUF_FLAG_DONE
		| V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
	buf->field = V4L2_FIELD_NONE;
	buf->timestamp = ns_to_timeval(cam->buffers[buf->index].ts);
	buf->sequence = cam->buffers[buf->index].seq;
	buf->m.offset = cam->buffers[buf->index].data - cam->frame_buffer;
	buf->length = cam->frame_size;
	buf->reserved2 = 0;
	buf->request_fd = 0;
	memset(&buf->timecode, 0, sizeof(buf->timecode));

	DBG("DQBUF #%d status:%d seq:%d length:%d\n", buf->index,
	    cam->buffers[buf->index].status, buf->sequence, buf->bytesused);

	return 0;
}
Ejemplo n.º 3
0
static int sync(struct camera_data *cam, int frame_nr)
{
	struct framebuf *frame = &cam->buffers[frame_nr];

	while (1) {
		if (frame->status == FRAME_READY)
			return 0;

		if (!cam->streaming) {
			frame->status = FRAME_READY;
			frame->length = 0;
			return 0;
		}

		mutex_unlock(&cam->v4l2_lock);
		wait_event_interruptible(cam->wq_stream,
					 !cam->streaming ||
					 frame->status == FRAME_READY);
		mutex_lock(&cam->v4l2_lock);
		if (signal_pending(current))
			return -ERESTARTSYS;
		if (!video_is_registered(&cam->vdev))
			return -ENOTTY;
	}
}
Ejemplo n.º 4
0
static void vsp1_histogram_cleanup(struct vsp1_histogram *histo)
{
	if (video_is_registered(&histo->video))
		video_unregister_device(&histo->video);

	media_entity_cleanup(&histo->video.entity);
}
Ejemplo n.º 5
0
void isp_video_unregister(struct isp_video *video)
{
	if (video_is_registered(&video->video)) {
		media_entity_cleanup(&video->video.entity);
		video_unregister_device(&video->video);
	}
}
Ejemplo n.º 6
0
void atomisp_video_unregister(struct atomisp_video_pipe *video)
{
	if (video_is_registered(&video->vdev)) {
		media_entity_cleanup(&video->vdev.entity);
		video_unregister_device(&video->vdev);
	}
}
Ejemplo n.º 7
0
static void omap24xxcam_device_unregister(struct v4l2_int_device *s)
{
	struct omap24xxcam_device *cam = s->u.slave->master->priv;

	omap24xxcam_sensor_exit(cam);

	if (cam->vfd) {
		if (!video_is_registered(cam->vfd)) {
			/*
			 * The device was never registered, so release the
			 * video_device struct directly.
			 */
			video_device_release(cam->vfd);
		} else {
			/*
			 * The unregister function will release the
			 * video_device struct as well as
			 * unregistering it.
			 */
			video_unregister_device(cam->vfd);
		}
		cam->vfd = NULL;
	}

	device_remove_file(cam->dev, &dev_attr_streaming);

	cam->sdev = NULL;
}
Ejemplo n.º 8
0
void rvin_v4l2_unregister(struct rvin_dev *vin)
{
	if (!video_is_registered(&vin->vdev))
		return;

	v4l2_info(&vin->v4l2_dev, "Removing %s\n",
		  video_device_node_name(&vin->vdev));

	/* Checks internaly if vdev have been init or not */
	video_unregister_device(&vin->vdev);
}
Ejemplo n.º 9
0
static void fimc_lite_subdev_unregistered(struct v4l2_subdev *sd)
{
	struct fimc_lite *fimc = v4l2_get_subdevdata(sd);

	if (fimc == NULL)
		return;

	if (video_is_registered(&fimc->vfd)) {
		video_unregister_device(&fimc->vfd);
		media_entity_cleanup(&fimc->vfd.entity);
	}
}
Ejemplo n.º 10
0
void fimc_unregister_m2m_device(struct fimc_dev *fimc)
{
    if (!fimc)
        return;

    if (fimc->m2m.m2m_dev)
        v4l2_m2m_release(fimc->m2m.m2m_dev);

    if (video_is_registered(&fimc->m2m.vfd)) {
        video_unregister_device(&fimc->m2m.vfd);
        media_entity_cleanup(&fimc->m2m.vfd.entity);
    }
}
Ejemplo n.º 11
0
void xvip_dma_cleanup(struct xvip_dma *dma)
{
	if (video_is_registered(&dma->video))
		video_unregister_device(&dma->video);

	if (dma->dma)
		dma_release_channel(dma->dma);

	if (!IS_ERR_OR_NULL(dma->alloc_ctx))
		vb2_dma_contig_cleanup_ctx(dma->alloc_ctx);

	media_entity_cleanup(&dma->video.entity);

	mutex_destroy(&dma->lock);
	mutex_destroy(&dma->pipe.lock);
}
Ejemplo n.º 12
0
static void fimc_lite_subdev_unregistered(struct v4l2_subdev *sd)
{
	struct fimc_lite *fimc = v4l2_get_subdevdata(sd);

	if (fimc == NULL)
		return;

	mutex_lock(&fimc->lock);

	if (video_is_registered(&fimc->ve.vdev)) {
		video_unregister_device(&fimc->ve.vdev);
		media_entity_cleanup(&fimc->ve.vdev.entity);
		fimc->ve.pipe = NULL;
	}

	mutex_unlock(&fimc->lock);
}
Ejemplo n.º 13
0
void fimc_isp_video_device_unregister(struct fimc_isp *isp,
				      enum v4l2_buf_type type)
{
	struct exynos_video_entity *ve;

	if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
		ve = &isp->video_capture.ve;
	else
		return;

	mutex_lock(&isp->video_lock);

	if (video_is_registered(&ve->vdev)) {
		video_unregister_device(&ve->vdev);
		media_entity_cleanup(&ve->vdev.entity);
		ve->pipe = NULL;
	}

	mutex_unlock(&isp->video_lock);
}
Ejemplo n.º 14
0
/******************************************************************************
 *
 *  cpia2_close
 *
 *****************************************************************************/
static int cpia2_close(struct file *file)
{
	struct video_device *dev = video_devdata(file);
	struct camera_data *cam = video_get_drvdata(dev);

	if (video_is_registered(&cam->vdev) && v4l2_fh_is_singular_file(file)) {
		cpia2_usb_stream_stop(cam);

		/* save camera state for later open */
		cpia2_save_camera_state(cam);

		cpia2_set_low_power(cam);
		cpia2_free_buffers(cam);
	}

	if (cam->stream_fh == file->private_data) {
		cam->stream_fh = NULL;
		cam->mmapped = 0;
	}
	return v4l2_fh_release(file);
}
Ejemplo n.º 15
0
static int vivid_fop_release(struct file *file)
{
	struct vivid_dev *dev = video_drvdata(file);
	struct video_device *vdev = video_devdata(file);

	mutex_lock(&dev->mutex);
	if (!no_error_inj && v4l2_fh_is_singular_file(file) &&
	    !video_is_registered(vdev) && vivid_is_last_user(dev)) {
		/*
		 * I am the last user of this driver, and a disconnect
		 * was forced (since this video_device is unregistered),
		 * so re-register all video_device's again.
		 */
		v4l2_info(&dev->v4l2_dev, "reconnect\n");
		set_bit(V4L2_FL_REGISTERED, &dev->vid_cap_dev.flags);
		set_bit(V4L2_FL_REGISTERED, &dev->vid_out_dev.flags);
		set_bit(V4L2_FL_REGISTERED, &dev->vbi_cap_dev.flags);
		set_bit(V4L2_FL_REGISTERED, &dev->vbi_out_dev.flags);
		set_bit(V4L2_FL_REGISTERED, &dev->sdr_cap_dev.flags);
		set_bit(V4L2_FL_REGISTERED, &dev->radio_rx_dev.flags);
		set_bit(V4L2_FL_REGISTERED, &dev->radio_tx_dev.flags);
	}
	mutex_unlock(&dev->mutex);
	if (file->private_data == dev->overlay_cap_owner)
		dev->overlay_cap_owner = NULL;
	if (file->private_data == dev->radio_rx_rds_owner) {
		dev->radio_rx_rds_last_block = 0;
		dev->radio_rx_rds_owner = NULL;
	}
	if (file->private_data == dev->radio_tx_rds_owner) {
		dev->radio_tx_rds_last_block = 0;
		dev->radio_tx_rds_owner = NULL;
	}
	if (vdev->queue)
		return vb2_fop_release(file);
	return v4l2_fh_release(file);
}
Ejemplo n.º 16
0
void vivid_send_source_change(struct vivid_dev *dev, unsigned type)
{
	struct v4l2_event ev = {
		.type = V4L2_EVENT_SOURCE_CHANGE,
		.u.src_change.changes = V4L2_EVENT_SRC_CH_RESOLUTION,
	};
	unsigned i;

	for (i = 0; i < dev->num_inputs; i++) {
		ev.id = i;
		if (dev->input_type[i] == type) {
			if (video_is_registered(&dev->vid_cap_dev) && dev->has_vid_cap)
				v4l2_event_queue(&dev->vid_cap_dev, &ev);
			if (video_is_registered(&dev->vbi_cap_dev) && dev->has_vbi_cap)
				v4l2_event_queue(&dev->vbi_cap_dev, &ev);
		}
	}
}

/*
 * Conversion function that converts a single-planar format to a
 * single-plane multiplanar format.
 */
void fmt_sp2mp(const struct v4l2_format *sp_fmt, struct v4l2_format *mp_fmt)
{
	struct v4l2_pix_format_mplane *mp = &mp_fmt->fmt.pix_mp;
	struct v4l2_plane_pix_format *ppix = &mp->plane_fmt[0];
	const struct v4l2_pix_format *pix = &sp_fmt->fmt.pix;
	bool is_out = sp_fmt->type == V4L2_BUF_TYPE_VIDEO_OUTPUT;

	memset(mp->reserved, 0, sizeof(mp->reserved));
	mp_fmt->type = is_out ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE :
			   V4L2_CAP_VIDEO_CAPTURE_MPLANE;
	mp->width = pix->width;
	mp->height = pix->height;
	mp->pixelformat = pix->pixelformat;
	mp->field = pix->field;
	mp->colorspace = pix->colorspace;
	mp->xfer_func = pix->xfer_func;
	mp->ycbcr_enc = pix->ycbcr_enc;
	mp->quantization = pix->quantization;
	mp->num_planes = 1;
	mp->flags = pix->flags;
	ppix->sizeimage = pix->sizeimage;
	ppix->bytesperline = pix->bytesperline;
	memset(ppix->reserved, 0, sizeof(ppix->reserved));
}

int fmt_sp2mp_func(struct file *file, void *priv,
		struct v4l2_format *f, fmtfunc func)
{
	struct v4l2_format fmt;
	struct v4l2_pix_format_mplane *mp = &fmt.fmt.pix_mp;
	struct v4l2_plane_pix_format *ppix = &mp->plane_fmt[0];
	struct v4l2_pix_format *pix = &f->fmt.pix;
	int ret;

	/* Converts to a mplane format */
	fmt_sp2mp(f, &fmt);
	/* Passes it to the generic mplane format function */
	ret = func(file, priv, &fmt);
	/* Copies back the mplane data to the single plane format */
	pix->width = mp->width;
	pix->height = mp->height;
	pix->pixelformat = mp->pixelformat;
	pix->field = mp->field;
	pix->colorspace = mp->colorspace;
	pix->xfer_func = mp->xfer_func;
	pix->ycbcr_enc = mp->ycbcr_enc;
	pix->quantization = mp->quantization;
	pix->sizeimage = ppix->sizeimage;
	pix->bytesperline = ppix->bytesperline;
	pix->flags = mp->flags;
	return ret;
}

int vivid_vid_adjust_sel(unsigned flags, struct v4l2_rect *r)
{
	unsigned w = r->width;
	unsigned h = r->height;

	/* sanitize w and h in case someone passes ~0 as the value */
	w &= 0xffff;
	h &= 0xffff;
	if (!(flags & V4L2_SEL_FLAG_LE)) {
		w++;
		h++;
		if (w < 2)
			w = 2;
		if (h < 2)
			h = 2;
	}
	if (!(flags & V4L2_SEL_FLAG_GE)) {
		if (w > MAX_WIDTH)
			w = MAX_WIDTH;
		if (h > MAX_HEIGHT)
			h = MAX_HEIGHT;
	}
	w = w & ~1;
	h = h & ~1;
	if (w < 2 || h < 2)
		return -ERANGE;
	if (w > MAX_WIDTH || h > MAX_HEIGHT)
		return -ERANGE;
	if (r->top < 0)
		r->top = 0;
	if (r->left < 0)
		r->left = 0;
	/* sanitize left and top in case someone passes ~0 as the value */
	r->left &= 0xfffe;
	r->top &= 0xfffe;
	if (r->left + w > MAX_WIDTH)
		r->left = MAX_WIDTH - w;
	if (r->top + h > MAX_HEIGHT)
		r->top = MAX_HEIGHT - h;
	if ((flags & (V4L2_SEL_FLAG_GE | V4L2_SEL_FLAG_LE)) ==
			(V4L2_SEL_FLAG_GE | V4L2_SEL_FLAG_LE) &&
	    (r->width != w || r->height != h))
		return -ERANGE;
	r->width = w;
	r->height = h;
	return 0;
}
Ejemplo n.º 17
0
/******************************************************************************
 *
 * configure_transfer_mode
 *
 *****************************************************************************/
static int configure_transfer_mode(struct camera_data *cam, unsigned int alt)
{
	static unsigned char iso_regs[8][4] = {
		{0x00, 0x00, 0x00, 0x00},
		{0x00, 0x00, 0x00, 0x00},
		{0xB9, 0x00, 0x00, 0x7E},
		{0xB9, 0x00, 0x01, 0x7E},
		{0xB9, 0x00, 0x02, 0x7E},
		{0xB9, 0x00, 0x02, 0xFE},
		{0xB9, 0x00, 0x03, 0x7E},
		{0xB9, 0x00, 0x03, 0xFD}
	};
	struct cpia2_command cmd;
	unsigned char reg;

	if (!video_is_registered(&cam->vdev))
		return -ENODEV;

	/***
	 * Write the isoc registers according to the alternate selected
	 ***/
	cmd.direction = TRANSFER_WRITE;
	cmd.buffer.block_data[0] = iso_regs[alt][0];
	cmd.buffer.block_data[1] = iso_regs[alt][1];
	cmd.buffer.block_data[2] = iso_regs[alt][2];
	cmd.buffer.block_data[3] = iso_regs[alt][3];
	cmd.req_mode = CAMERAACCESS_TYPE_BLOCK | CAMERAACCESS_VC;
	cmd.start = CPIA2_VC_USB_ISOLIM;
	cmd.reg_count = 4;
	cpia2_send_command(cam, &cmd);

	/***
	 * Enable relevant streams before starting polling.
	 * First read USB Stream Config Register.
	 ***/
	cmd.direction = TRANSFER_READ;
	cmd.req_mode = CAMERAACCESS_TYPE_BLOCK | CAMERAACCESS_VC;
	cmd.start = CPIA2_VC_USB_STRM;
	cmd.reg_count = 1;
	cpia2_send_command(cam, &cmd);
	reg = cmd.buffer.block_data[0];

	/* Clear iso, bulk, and int */
	reg &= ~(CPIA2_VC_USB_STRM_BLK_ENABLE |
		 CPIA2_VC_USB_STRM_ISO_ENABLE |
		 CPIA2_VC_USB_STRM_INT_ENABLE);

	if (alt == USBIF_BULK) {
		DBG("Enabling bulk xfer\n");
		reg |= CPIA2_VC_USB_STRM_BLK_ENABLE;	/* Enable Bulk */
		cam->xfer_mode = XFER_BULK;
	} else if (alt >= USBIF_ISO_1) {
		DBG("Enabling ISOC xfer\n");
		reg |= CPIA2_VC_USB_STRM_ISO_ENABLE;
		cam->xfer_mode = XFER_ISOC;
	}

	cmd.buffer.block_data[0] = reg;
	cmd.direction = TRANSFER_WRITE;
	cmd.start = CPIA2_VC_USB_STRM;
	cmd.reg_count = 1;
	cmd.req_mode = CAMERAACCESS_TYPE_BLOCK | CAMERAACCESS_VC;
	cpia2_send_command(cam, &cmd);

	return 0;
}
Ejemplo n.º 18
0
/******************************************************************************
 *
 *  cpia2_usb_complete
 *
 *  callback when incoming packet is received
 *****************************************************************************/
static void cpia2_usb_complete(struct urb *urb)
{
	int i;
	unsigned char *cdata;
	static bool frame_ready = false;
	struct camera_data *cam = (struct camera_data *) urb->context;

	if (urb->status!=0) {
		if (!(urb->status == -ENOENT ||
		      urb->status == -ECONNRESET ||
		      urb->status == -ESHUTDOWN))
		{
			DBG("urb->status = %d!\n", urb->status);
		}
		DBG("Stopping streaming\n");
		return;
	}

	if (!cam->streaming || !video_is_registered(&cam->vdev)) {
		LOG("Will now stop the streaming: streaming = %d, present=%d\n",
		    cam->streaming, video_is_registered(&cam->vdev));
		return;
	}

	/***
	 * Packet collater
	 ***/
	//DBG("Collating %d packets\n", urb->number_of_packets);
	for (i = 0; i < urb->number_of_packets; i++) {
		u16 checksum, iso_checksum;
		int j;
		int n = urb->iso_frame_desc[i].actual_length;
		int st = urb->iso_frame_desc[i].status;

		if(cam->workbuff->status == FRAME_READY) {
			struct framebuf *ptr;
			/* Try to find an available buffer */
			DBG("workbuff full, searching\n");
			for (ptr = cam->workbuff->next;
			     ptr != cam->workbuff;
			     ptr = ptr->next)
			{
				if (ptr->status == FRAME_EMPTY) {
					ptr->status = FRAME_READING;
					ptr->length = 0;
					break;
				}
			}
			if (ptr == cam->workbuff)
				break; /* No READING or EMPTY buffers left */

			cam->workbuff = ptr;
		}

		if (cam->workbuff->status == FRAME_EMPTY ||
		    cam->workbuff->status == FRAME_ERROR) {
			cam->workbuff->status = FRAME_READING;
			cam->workbuff->length = 0;
		}

		//DBG("   Packet %d length = %d, status = %d\n", i, n, st);
		cdata = urb->transfer_buffer + urb->iso_frame_desc[i].offset;

		if (st) {
			LOG("cpia2 data error: [%d] len=%d, status = %d\n",
			    i, n, st);
			if(!ALLOW_CORRUPT)
				cam->workbuff->status = FRAME_ERROR;
			continue;
		}

		if(n<=2)
			continue;

		checksum = 0;
		for(j=0; j<n-2; ++j)
			checksum += cdata[j];
		iso_checksum = cdata[j] + cdata[j+1]*256;
		if(checksum != iso_checksum) {
			LOG("checksum mismatch: [%d] len=%d, calculated = %x, checksum = %x\n",
			    i, n, (int)checksum, (int)iso_checksum);
			if(!ALLOW_CORRUPT) {
				cam->workbuff->status = FRAME_ERROR;
				continue;
			}
		}
		n -= 2;

		if(cam->workbuff->status != FRAME_READING) {
			if((0xFF == cdata[0] && 0xD8 == cdata[1]) ||
			   (0xD8 == cdata[0] && 0xFF == cdata[1] &&
			    0 != cdata[2])) {
				/* frame is skipped, but increment total
				 * frame count anyway */
				cam->frame_count++;
			}
			DBG("workbuff not reading, status=%d\n",
			    cam->workbuff->status);
			continue;
		}

		if (cam->frame_size < cam->workbuff->length + n) {
			ERR("buffer overflow! length: %d, n: %d\n",
			    cam->workbuff->length, n);
			cam->workbuff->status = FRAME_ERROR;
			if(cam->workbuff->length > cam->workbuff->max_length)
				cam->workbuff->max_length =
					cam->workbuff->length;
			continue;
		}

		if (cam->workbuff->length == 0) {
			int data_offset;
			if ((0xD8 == cdata[0]) && (0xFF == cdata[1])) {
				data_offset = 1;
			} else if((0xFF == cdata[0]) && (0xD8 == cdata[1])
				  && (0xFF == cdata[2])) {
				data_offset = 2;
			} else {
				DBG("Ignoring packet, not beginning!\n");
				continue;
			}
			DBG("Start of frame pattern found\n");
			v4l2_get_timestamp(&cam->workbuff->timestamp);
			cam->workbuff->seq = cam->frame_count++;
			cam->workbuff->data[0] = 0xFF;
			cam->workbuff->data[1] = 0xD8;
			cam->workbuff->length = 2;
			add_APPn(cam);
			add_COM(cam);
			memcpy(cam->workbuff->data+cam->workbuff->length,
			       cdata+data_offset, n-data_offset);
			cam->workbuff->length += n-data_offset;
		} else if (cam->workbuff->length > 0) {
			memcpy(cam->workbuff->data + cam->workbuff->length,
			       cdata, n);
			cam->workbuff->length += n;
		}

		if ((cam->workbuff->length >= 3) &&
		    (cam->workbuff->data[cam->workbuff->length - 3] == 0xFF) &&
		    (cam->workbuff->data[cam->workbuff->length - 2] == 0xD9) &&
		    (cam->workbuff->data[cam->workbuff->length - 1] == 0xFF)) {
			frame_ready = true;
			cam->workbuff->data[cam->workbuff->length - 1] = 0;
			cam->workbuff->length -= 1;
		} else if ((cam->workbuff->length >= 2) &&
		   (cam->workbuff->data[cam->workbuff->length - 2] == 0xFF) &&
		   (cam->workbuff->data[cam->workbuff->length - 1] == 0xD9)) {
			frame_ready = true;
		}

		if (frame_ready) {
			DBG("Workbuff image size = %d\n",cam->workbuff->length);
			process_frame(cam);

			frame_ready = false;

			if (waitqueue_active(&cam->wq_stream))
				wake_up_interruptible(&cam->wq_stream);
		}
	}

	if(cam->streaming) {
		/* resubmit */
		urb->dev = cam->dev;
		if ((i = usb_submit_urb(urb, GFP_ATOMIC)) != 0)
			ERR("%s: usb_submit_urb ret %d!\n", __func__, i);
	}
}
Ejemplo n.º 19
0
void vivid_send_source_change(struct vivid_dev *dev, unsigned type)
{
	struct v4l2_event ev = {
		.type = V4L2_EVENT_SOURCE_CHANGE,
		.u.src_change.changes = V4L2_EVENT_SRC_CH_RESOLUTION,
	};
	unsigned i;

	for (i = 0; i < dev->num_inputs; i++) {
		ev.id = i;
		if (dev->input_type[i] == type) {
			if (video_is_registered(&dev->vid_cap_dev) && dev->has_vid_cap)
				v4l2_event_queue(&dev->vid_cap_dev, &ev);
			if (video_is_registered(&dev->vbi_cap_dev) && dev->has_vbi_cap)
				v4l2_event_queue(&dev->vbi_cap_dev, &ev);
		}
	}
}

/*
 * Conversion function that converts a single-planar format to a
 * single-plane multiplanar format.
 */
void fmt_sp2mp(const struct v4l2_format *sp_fmt, struct v4l2_format *mp_fmt)
{
	struct v4l2_pix_format_mplane *mp = &mp_fmt->fmt.pix_mp;
	struct v4l2_plane_pix_format *ppix = &mp->plane_fmt[0];
	const struct v4l2_pix_format *pix = &sp_fmt->fmt.pix;
	bool is_out = sp_fmt->type == V4L2_BUF_TYPE_VIDEO_OUTPUT;

	memset(mp->reserved, 0, sizeof(mp->reserved));
	mp_fmt->type = is_out ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE :
			   V4L2_CAP_VIDEO_CAPTURE_MPLANE;
	mp->width = pix->width;
	mp->height = pix->height;
	mp->pixelformat = pix->pixelformat;
	mp->field = pix->field;
	mp->colorspace = pix->colorspace;
	mp->ycbcr_enc = pix->ycbcr_enc;
	mp->quantization = pix->quantization;
	mp->num_planes = 1;
	mp->flags = pix->flags;
	ppix->sizeimage = pix->sizeimage;
	ppix->bytesperline = pix->bytesperline;
	memset(ppix->reserved, 0, sizeof(ppix->reserved));
}

int fmt_sp2mp_func(struct file *file, void *priv,
		struct v4l2_format *f, fmtfunc func)
{
	struct v4l2_format fmt;
	struct v4l2_pix_format_mplane *mp = &fmt.fmt.pix_mp;
	struct v4l2_plane_pix_format *ppix = &mp->plane_fmt[0];
	struct v4l2_pix_format *pix = &f->fmt.pix;
	int ret;

	/* Converts to a mplane format */
	fmt_sp2mp(f, &fmt);
	/* Passes it to the generic mplane format function */
	ret = func(file, priv, &fmt);
	/* Copies back the mplane data to the single plane format */
	pix->width = mp->width;
	pix->height = mp->height;
	pix->pixelformat = mp->pixelformat;
	pix->field = mp->field;
	pix->colorspace = mp->colorspace;
	pix->ycbcr_enc = mp->ycbcr_enc;
	pix->quantization = mp->quantization;
	pix->sizeimage = ppix->sizeimage;
	pix->bytesperline = ppix->bytesperline;
	pix->flags = mp->flags;
	return ret;
}

/* v4l2_rect helper function: copy the width/height values */
void rect_set_size_to(struct v4l2_rect *r, const struct v4l2_rect *size)
{
	r->width = size->width;
	r->height = size->height;
}