/* * si470x_fops_open - file open */ static int si470x_fops_open(struct inode *inode, struct file *file) { struct si470x_device *radio = video_get_drvdata(video_devdata(file)); int retval; radio->users++; retval = usb_autopm_get_interface(radio->intf); if (retval < 0) { radio->users--; return -EIO; } if (radio->users == 1) { retval = si470x_start(radio); if (retval < 0) usb_autopm_put_interface(radio->intf); return retval; } return 0; }
/* release the camera */ static int zr364xx_release(struct inode *inode, struct file *file) { struct video_device *vdev = video_devdata(file); struct zr364xx_camera *cam; struct usb_device *udev; int i, err; DBG("zr364xx_release"); if (vdev == NULL) return -ENODEV; cam = video_get_drvdata(vdev); udev = cam->udev; mutex_lock(&cam->lock); for (i = 0; i < 2; i++) { err = send_control_msg(udev, 1, init[cam->method][i].value, 0, init[i][cam->method].bytes, init[cam->method][i].size); if (err < 0) { info("error during release sequence"); mutex_unlock(&cam->lock); return err; } } file->private_data = NULL; video_exclusive_release(inode, file); /* Added some delay here, since opening/closing the camera quickly, * like Ekiga does during its startup, can crash the webcam */ mdelay(100); mutex_unlock(&cam->lock); return 0; }
/* * si470x_vidioc_s_tuner - set tuner attributes */ static int si470x_vidioc_s_tuner(struct file *file, void *priv, struct v4l2_tuner *tuner) { struct si470x_device *radio = video_get_drvdata(video_devdata(file)); int retval; if (radio->disconnected) return -EIO; if (tuner->index > 0) return -EINVAL; if (tuner->audmode == V4L2_TUNER_MODE_MONO) radio->registers[POWERCFG] |= POWERCFG_MONO; /* force mono */ else radio->registers[POWERCFG] &= ~POWERCFG_MONO; /* try stereo */ retval = si470x_set_register(radio, POWERCFG); if (retval < 0) printk(KERN_WARNING DRIVER_NAME ": set tuner failed with %d\n", retval); return retval; }
static int uvc_v4l2_release(struct file *file) { struct video_device *vdev = video_devdata(file); struct uvc_device *uvc = video_get_drvdata(vdev); struct uvc_file_handle *handle = to_uvc_file_handle(file->private_data); struct uvc_video *video = handle->device; uvc_function_disconnect(uvc); uvc_video_enable(video, 0); mutex_lock(&video->queue.mutex); if (uvc_free_buffers(&video->queue) < 0) printk(KERN_ERR "uvc_v4l2_release: Unable to free " "buffers.\n"); mutex_unlock(&video->queue.mutex); file->private_data = NULL; v4l2_fh_del(&handle->vfh); v4l2_fh_exit(&handle->vfh); kfree(handle); return 0; }
static int si4703_close(struct inode *inode, struct file *file) { struct si4703_device *chip = video_get_drvdata(video_devdata(file)); unset_radio_dvfm_constraint(); if (!chip) { disable_oscc_tout_s0(); return -ENODEV; } if (si4703_power_down(chip)) printk(KERN_ERR "Radio did not shutdown properly"); chip->users = 0; if (chip->removed) kfree(chip); /* disable clock */ disable_oscc_tout_s0(); return 0; }
/* * camif_release() */ static int camif_release(struct file *file) { struct video_device *vdev=file->private_data; s3c2440camif_dev *pcam = (s3c2440camif_dev *)video_get_drvdata(vdev); int ret=0; if(pcam->state == CAMIF_STATE_PREVIEWING || pcam->state == CAMIF_STATE_CODECING) { pcam->cmdcode = CAMIF_CMD_STOP; ret = wait_event_interruptible(pcam->cmdqueue, pcam->cmdcode == CAMIF_CMD_NONE); } clk_disable(pcam->clk); // stop camif clock free_irq(IRQ_S3C2440_CAM_P, pcam); // free camif IRQs free_irq(IRQ_S3C2440_CAM_C, pcam); if(!static_allocate) s3c2440camif_deallocate_frame_buf(pcam,S3C2440_FRAME_NUM); return ret; }
/* * si470x_fops_release - file release */ static int si470x_fops_release(struct inode *inode, struct file *file) { struct si470x_device *radio = video_get_drvdata(video_devdata(file)); int retval; if (!radio) return -ENODEV; radio->users--; if (radio->users == 0) { /* stop rds reception */ cancel_delayed_work_sync(&radio->work); /* cancel read processes */ wake_up_interruptible(&radio->read_queue); retval = si470x_stop(radio); usb_autopm_put_interface(radio->intf); return retval; } return 0; }
/****************************************************************************** * * cpia2_close * *****************************************************************************/ static int cpia2_close(struct file *file) { struct video_device *dev = video_devdata(file); struct camera_data *cam = video_get_drvdata(dev); mutex_lock(&cam->v4l2_lock); if (video_is_registered(&cam->vdev) && v4l2_fh_is_singular_file(file)) { cpia2_usb_stream_stop(cam); /* save camera state for later open */ cpia2_save_camera_state(cam); cpia2_set_low_power(cam); cpia2_free_buffers(cam); } if (cam->stream_fh == file->private_data) { cam->stream_fh = NULL; cam->mmapped = 0; } mutex_unlock(&cam->v4l2_lock); return v4l2_fh_release(file); }
/* * media_entity_operations */ static int nxp_link_setup(struct media_entity *entity, const struct media_pad *local, const struct media_pad *remote, u32 flags) { struct video_device *vdev = media_entity_to_video_device(entity); struct nxp_video *me = video_get_drvdata(vdev); pr_debug("%s: me type(%d)\n", __func__, me->type); switch (local->index | media_entity_type(remote->entity)) { case 0 | MEDIA_ENT_T_V4L2_SUBDEV: /* capture, m2m : sink * video : source */ pr_debug("local %d, link subdev\n", local->index); break; case 0 | MEDIA_ENT_T_DEVNODE: pr_debug("local %d, link videodev\n", local->index); break; case 1 | MEDIA_ENT_T_V4L2_SUBDEV: pr_debug("local %d, link subdev\n", local->index); break; case 1 | MEDIA_ENT_T_DEVNODE: pr_debug("local %d, link videodev\n", local->index); break; } if (flags & MEDIA_LNK_FL_ENABLED) pr_debug("====> linked\n"); else pr_debug("====> unlinked\n"); return 0; }
static int atomisp_g_fmt_file(struct file *file, void *fh, struct v4l2_format *f) { struct video_device *vdev = video_devdata(file); struct atomisp_device *isp = video_get_drvdata(vdev); struct atomisp_video_pipe *pipe = atomisp_to_video_pipe(vdev); int ret; if (f->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) { v4l2_err(&atomisp_dev, "unsupported v4l2 buf type\n"); return -EINVAL; } memset(f, 0, sizeof(struct v4l2_format)); f->type = V4L2_BUF_TYPE_VIDEO_OUTPUT; switch (isp->sw_contex.output_mode) { case OUTPUT_MODE_FILE: f->fmt.pix.width = pipe->out_fmt->width; f->fmt.pix.height = pipe->out_fmt->height; f->fmt.pix.pixelformat = pipe->out_fmt->pixelformat; f->fmt.pix.bytesperline = pipe->out_fmt->bytesperline; f->fmt.pix.sizeimage = pipe->out_fmt->imagesize; break; case OUTPUT_MODE_TEXT: f->fmt.pix.sizeimage = pipe->out_fmt->framesize; break; default: v4l2_err(&atomisp_dev, "Unspported output mode\n"); ret = -EINVAL; break; } return ret; }
static int at91sam9x5_video_vidioc_s_fmt_vid_overlay(struct file *filp, void *fh, struct v4l2_format *f) { struct video_device *vdev = filp->private_data; struct at91sam9x5_video_priv *priv = video_get_drvdata(vdev); struct v4l2_window *win = &f->fmt.win; unsigned long flags; if (f->type != V4L2_BUF_TYPE_VIDEO_OVERLAY) return -EINVAL; debug("rect=(%d,%d)+(%d,%d)\n", win->w.left, win->w.top, win->w.width, win->w.height); spin_lock_irqsave(&priv->lock, flags); priv->fmt_vid_overlay = *win; at91sam9x5_video_update_config(priv, 1); spin_unlock_irqrestore(&priv->lock, flags); return 0; }
static int zr364xx_vidioc_try_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { struct video_device *vdev = video_devdata(file); struct zr364xx_camera *cam; if (vdev == NULL) return -ENODEV; cam = video_get_drvdata(vdev); if (f->fmt.pix.pixelformat != V4L2_PIX_FMT_JPEG) return -EINVAL; if (f->fmt.pix.field != V4L2_FIELD_ANY && f->fmt.pix.field != V4L2_FIELD_NONE) return -EINVAL; f->fmt.pix.field = V4L2_FIELD_NONE; f->fmt.pix.width = cam->width; f->fmt.pix.height = cam->height; f->fmt.pix.bytesperline = f->fmt.pix.width * 2; f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline; f->fmt.pix.colorspace = 0; f->fmt.pix.priv = 0; return 0; }
static int vidioc_s_ctrl(struct file *file, void *priv, struct v4l2_control *ctrl) { struct si4703_device *chip = video_get_drvdata(video_devdata(file)); switch (ctrl->id) { case V4L2_CID_AUDIO_MUTE: if (ctrl->value) { if (si4703_mute(chip)) printk(KERN_ERR "si4703: no reponse"); } else { if (si4703_unmute(chip)) printk(KERN_ERR "si4703: no reponse"); } break; case V4L2_CID_AUDIO_VOLUME: si4703_setvol(chip, ctrl->value); break; default: return -EINVAL; } return 0; }
static int uvc_v4l2_open(struct file *file) { struct video_device *vdev = video_devdata(file); struct uvc_device *uvc = video_get_drvdata(vdev); struct uvc_file_handle *handle; int ret; handle = kzalloc(sizeof(*handle), GFP_KERNEL); if (handle == NULL) return -ENOMEM; ret = v4l2_fh_init(&handle->vfh, vdev); if (ret < 0) goto error; ret = v4l2_event_init(&handle->vfh); if (ret < 0) goto error; ret = v4l2_event_alloc(&handle->vfh, 8); if (ret < 0) goto error; v4l2_fh_add(&handle->vfh); handle->device = &uvc->video; file->private_data = &handle->vfh; uvc_function_connect(uvc); return 0; error: v4l2_fh_exit(&handle->vfh); return ret; }
static int zr364xx_vidioc_g_fmt_cap(struct file *file, void *priv, struct v4l2_format *f) { struct video_device *vdev = video_devdata(file); struct zr364xx_camera *cam; if (vdev == NULL) return -ENODEV; cam = video_get_drvdata(vdev); if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; memset(&f->fmt.pix, 0, sizeof(struct v4l2_pix_format)); f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; f->fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; f->fmt.pix.field = V4L2_FIELD_NONE; f->fmt.pix.width = cam->width; f->fmt.pix.height = cam->height; f->fmt.pix.bytesperline = f->fmt.pix.width * 2; f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline; f->fmt.pix.colorspace = 0; f->fmt.pix.priv = 0; return 0; }
/* * This ioctl allows applications to enumerate all frame sizes that the * device supports for the given pixel format. * discrete means the applicatons should increase the index until EINVAL is * returned. * stepwise means the applications only need to set pixel_format, then * driver will return maximum value and minimum value of frame size supported * and step size. */ static int atomisp_enum_framesizes(struct file *file, void *fh, struct v4l2_frmsizeenum *arg) { struct video_device *vdev = video_devdata(file); struct atomisp_device *isp = video_get_drvdata(vdev); struct v4l2_mbus_framefmt snr_mbus_fmt; struct v4l2_streamparm sensor_parm; struct atomisp_input_subdev *input; unsigned int padding_w, padding_h; int max_width, max_height, min_width, min_height; int ret; bool same_type; u32 pixel_format; if (arg->index != 0) return -EINVAL; if (!atomisp_is_pixelformat_supported(arg->pixel_format)) return -EINVAL; input = &isp->inputs[isp->input_curr]; if (input->type != SOC_CAMERA && input->type != RAW_CAMERA) return -EINVAL; pixel_format = input->frame_size.pixel_format; /* * only judge if the pixel format and previous pixel format are * the same type */ same_type = is_pixelformat_raw(pixel_format) == is_pixelformat_raw(arg->pixel_format); /* * when frame size is requested previously, we can get the value * rapidly from cache. */ if (input->frame_size.pixel_format != 0 && same_type) { memcpy(arg, &input->frame_size, sizeof(input->frame_size)); return 0; } /* get padding value via subdev type and requested isp pixelformat */ if (input->type == SOC_CAMERA || (input->type == RAW_CAMERA && is_pixelformat_raw(arg->pixel_format))) { padding_h = 0; padding_w = 0; } else { padding_h = pad_h; padding_w = pad_w; } /* setting run mode to the sensor */ memset(&sensor_parm, 0, sizeof(sensor_parm)); sensor_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; sensor_parm.parm.capture.capturemode = CI_MODE_STILL_CAPTURE; v4l2_subdev_call(input->camera, video, s_parm, &sensor_parm); /* get the sensor max resolution supported */ snr_mbus_fmt.height = ATOM_ISP_MAX_HEIGHT; snr_mbus_fmt.width = ATOM_ISP_MAX_WIDTH; ret = v4l2_subdev_call(input->camera, video, try_mbus_fmt, &snr_mbus_fmt); if (ret < 0) return ret; max_width = snr_mbus_fmt.width - padding_w; max_height = snr_mbus_fmt.height - padding_h; /* app vs isp */ max_width = max_width - max_width % ATOM_ISP_STEP_WIDTH; max_height = max_height - max_height % ATOM_ISP_STEP_HEIGHT; max_width = clamp(max_width, ATOM_ISP_MIN_WIDTH, ATOM_ISP_MAX_WIDTH); max_height = clamp(max_height, ATOM_ISP_MIN_HEIGHT, ATOM_ISP_MAX_HEIGHT); /* set the supported minimum resolution to sub-QCIF resolution */ min_width = ATOM_RESOLUTION_SUBQCIF_WIDTH; min_height = ATOM_RESOLUTION_SUBQCIF_HEIGHT; /* app vs isp */ min_width = min_width - min_width % ATOM_ISP_STEP_WIDTH; min_height = min_height - min_height % ATOM_ISP_STEP_HEIGHT; min_width = clamp(min_width, ATOM_ISP_MIN_WIDTH, ATOM_ISP_MAX_WIDTH); min_height = clamp(min_height, ATOM_ISP_MIN_HEIGHT, ATOM_ISP_MAX_HEIGHT); arg->stepwise.max_width = max_width; arg->stepwise.max_height = max_height; arg->stepwise.min_width = min_width; arg->stepwise.min_height = min_height; arg->stepwise.step_width = ATOM_ISP_STEP_WIDTH; arg->stepwise.step_height = ATOM_ISP_STEP_HEIGHT; arg->type = V4L2_FRMSIZE_TYPE_STEPWISE; /* * store frame size in particular struct of every subdev, * when enumerate frame size next,we can get it rapidly. */ memcpy(&input->frame_size, arg, sizeof(*arg)); return 0; }
/*! * V4L2 interface - ioctl function * * @param inode struct inode * * * @param file struct file * * * @param ioctlnr unsigned int * * @param arg void * * * @return 0 success, ENODEV for invalid device instance, * -1 for other errors. */ static int mxc_v4l2out_do_ioctl(struct inode *inode, struct file *file, unsigned int ioctlnr, void *arg) { struct video_device *dev = file->private_data; vout_data *vout = video_get_drvdata(dev); int retval = 0; int i = 0; if (!vout) return -ENODEV; /* make this _really_ smp-safe */ if (down_interruptible(&vout->busy_lock)) return -EINTR; switch (ioctlnr) { case VIDIOC_QUERYCAP: { struct v4l2_capability *cap = arg; strcpy(cap->driver, "mxc_v4l2_output"); cap->version = 0; cap->capabilities = V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_STREAMING; retval = 0; break; } case VIDIOC_G_FMT: { struct v4l2_format *gf = arg; retval = mxc_v4l2out_g_fmt(vout, gf); break; } case VIDIOC_S_FMT: { struct v4l2_format *sf = arg; if (vout->state != STATE_STREAM_OFF) { retval = -EBUSY; break; } retval = mxc_v4l2out_s_fmt(vout, sf); break; } case VIDIOC_REQBUFS: { struct v4l2_requestbuffers *req = arg; if ((req->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) || (req->memory != V4L2_MEMORY_MMAP)) { DPRINTK ("VIDIOC_REQBUFS: incorrect buffer type\n"); retval = -EINVAL; break; } if (req->count == 0) { mxc_v4l2out_streamoff(vout); if (vout->queue_buf_paddr[0] != 0) { mxc_free_buffers(vout->queue_buf_paddr, vout->buffer_cnt); DPRINTK ("VIDIOC_REQBUFS: freed buffers\n"); } vout->buffer_cnt = 0; break; } if (vout->queue_buf_paddr[0] != 0) { DPRINTK ("VIDIOC_REQBUFS: Cannot allocate buffers\n"); retval = -EBUSY; break; } if (req->count < MIN_FRAME_NUM) { req->count = MIN_FRAME_NUM; } else if (req->count > MAX_FRAME_NUM) { req->count = MAX_FRAME_NUM; } vout->buffer_cnt = req->count; retval = mxc_allocate_buffers(vout->queue_buf_paddr, vout->buffer_cnt, PAGE_ALIGN(vout->v2f.fmt. pix. sizeimage)); if (retval < 0) break; /* Init buffer queues */ vout->done_q.head = 0; vout->done_q.tail = 0; vout->ready_q.head = 0; vout->ready_q.tail = 0; for (i = 0; i < vout->buffer_cnt; i++) { memset(&(vout->v4l2_bufs[i]), 0, sizeof(vout->v4l2_bufs[i])); vout->v4l2_bufs[i].flags = 0; vout->v4l2_bufs[i].memory = V4L2_MEMORY_MMAP; vout->v4l2_bufs[i].index = i; vout->v4l2_bufs[i].type = V4L2_BUF_TYPE_VIDEO_OUTPUT; vout->v4l2_bufs[i].length = PAGE_ALIGN(vout->v2f.fmt.pix.sizeimage); vout->v4l2_bufs[i].m.offset = (unsigned long)vout->queue_buf_paddr[i]; vout->v4l2_bufs[i].timestamp.tv_sec = 0; vout->v4l2_bufs[i].timestamp.tv_usec = 0; } break; } case VIDIOC_QUERYBUF: { struct v4l2_buffer *buf = arg; u32 type = buf->type; int index = buf->index; if ((type != V4L2_BUF_TYPE_VIDEO_OUTPUT) || (index >= vout->buffer_cnt)) { DPRINTK ("VIDIOC_QUERYBUFS: incorrect buffer type\n"); retval = -EINVAL; break; } down(&vout->param_lock); memcpy(buf, &(vout->v4l2_bufs[index]), sizeof(*buf)); up(&vout->param_lock); break; } case VIDIOC_QBUF: { struct v4l2_buffer *buf = arg; int index = buf->index; u32 lock_flags; if ((buf->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) || (index >= vout->buffer_cnt) || (buf->flags != 0)) { retval = -EINVAL; break; } DPRINTK("VIDIOC_QBUF: %d\n", buf->index); spin_lock_irqsave(&g_lock, lock_flags); memcpy(&(vout->v4l2_bufs[index]), buf, sizeof(*buf)); vout->v4l2_bufs[index].flags |= V4L2_BUF_FLAG_QUEUED; g_buf_q_cnt++; queue_buf(&vout->ready_q, index); if (vout->state == STATE_STREAM_PAUSED) { unsigned long timeout = timeval_to_jiffies(&vout->v4l2_bufs[index]. timestamp); if (!timeout) { /* if timestamp is 0, then default to 30fps */ timeout = vout->start_jiffies + msecs_to_jiffies(vout->frame_count * 33); } else { /* Adjust time from time of day to jiffies */ timeout -= vout->start_tod_jiffies; } vout->output_timer.expires = timeout; DPRINTK("QBUF: frame #%u timeout @ %u jiffies, " "current = %u\n", vout->frame_count, timeout, jiffies); add_timer(&vout->output_timer); vout->state = STATE_STREAM_ON; vout->frame_count++; } spin_unlock_irqrestore(&g_lock, lock_flags); break; } case VIDIOC_DQBUF: { struct v4l2_buffer *buf = arg; int idx; /* DPRINTK("VIDIOC_DQBUF: q size = %d\n", queue_size(&vout->done_q)); */ if ((queue_size(&vout->done_q) == 0) && (file->f_flags & O_NONBLOCK)) { retval = -EAGAIN; break; } if (!wait_event_interruptible_timeout(vout->v4l_bufq, queue_size(&vout-> done_q) != 0, 2 * HZ)) { printk("VIDIOC_DQBUF: timeout\n"); retval = -ETIME; break; } else if (signal_pending(current)) { printk("VIDIOC_DQBUF: interrupt received\n"); vout->state = STATE_STREAM_STOPPING; retval = -ERESTARTSYS; break; } idx = dequeue_buf(&vout->done_q); if (idx == -1) { /* No frame free */ printk ("VIDIOC_DQBUF: no free buffers, returning\n"); retval = -EAGAIN; break; } if ((vout->v4l2_bufs[idx].flags & V4L2_BUF_FLAG_DONE) == 0) printk ("VIDIOC_DQBUF: buffer in done q, but not " "flagged as done\n"); vout->v4l2_bufs[idx].flags = 0; memcpy(buf, &(vout->v4l2_bufs[idx]), sizeof(*buf)); DPRINTK("VIDIOC_DQBUF: %d\n", buf->index); break; } case VIDIOC_STREAMON: { struct timeval t; do_gettimeofday(&t); vout->start_tod_jiffies = timeval_to_jiffies(&t) - jiffies; vout->frame_count = 2; vout->start_jiffies = jiffies; DPRINTK("VIDIOC_STREAMON: start time = %u jiffies, " "tod adjustment = %u\n", vout->start_jiffies, vout->start_tod_jiffies); retval = mxc_v4l2out_streamon(vout); break; } case VIDIOC_STREAMOFF: { retval = mxc_v4l2out_streamoff(vout); break; } case VIDIOC_G_CTRL: { retval = mxc_get_v42lout_control(vout, arg); break; } case VIDIOC_S_CTRL: { retval = mxc_set_v42lout_control(vout, arg); break; } case VIDIOC_CROPCAP: { struct v4l2_cropcap *cap = arg; if (cap->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) { retval = -EINVAL; break; } cap->bounds = vout->crop_bounds[vout->cur_disp_output]; cap->defrect = vout->crop_bounds[vout->cur_disp_output]; retval = 0; break; } case VIDIOC_G_CROP: { struct v4l2_crop *crop = arg; if (crop->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) { retval = -EINVAL; break; } crop->c = vout->crop_current; break; } case VIDIOC_S_CROP: { struct v4l2_crop *crop = arg; struct v4l2_rect *b = &(vout->crop_bounds[vout->cur_disp_output]); if (crop->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) { retval = -EINVAL; break; } if (crop->c.height < 0) { retval = -EINVAL; break; } if (crop->c.width < 0) { retval = -EINVAL; break; } if (crop->c.top < b->top) crop->c.top = b->top; if (crop->c.top > b->top + b->height) crop->c.top = b->top + b->height; if (crop->c.height > b->top - crop->c.top + b->height) crop->c.height = b->top - crop->c.top + b->height; if (crop->c.left < b->left) crop->c.top = b->left; if (crop->c.left > b->left + b->width) crop->c.top = b->left + b->width; if (crop->c.width > b->left - crop->c.left + b->width) crop->c.width = b->left - crop->c.left + b->width; /* stride line limitation */ crop->c.height -= crop->c.height % 8; crop->c.width -= crop->c.width % 8; vout->crop_current = crop->c; vout->sdc_fg_buf_size = vout->crop_current.width * vout->crop_current.height; vout->sdc_fg_buf_size *= fmt_to_bpp(SDC_FG_FB_FORMAT) / 8; /* Free previously allocated buffer */ if (vout->display_bufs[0] != NULL) { mxc_free_buffers(vout->display_bufs, 2); } if ((retval = mxc_allocate_buffers(vout->display_bufs, 2, vout-> sdc_fg_buf_size)) < 0) { DPRINTK("unable to allocate SDC FG buffers\n"); retval = -ENOMEM; break; } break; } case VIDIOC_ENUMOUTPUT: { struct v4l2_output *output = arg; if ((output->index >= 4) || (vout->output_enabled[output->index] == false)) { retval = -EINVAL; break; } if (output->index < 3) { *output = mxc_outputs[MXC_V4L2_OUT_2_ADC]; output->name[4] = '0' + output->index; } else { *output = mxc_outputs[MXC_V4L2_OUT_2_SDC]; } break; } case VIDIOC_G_OUTPUT: { int *p_output_num = arg; *p_output_num = vout->cur_disp_output; break; } case VIDIOC_S_OUTPUT: { int *p_output_num = arg; if ((*p_output_num >= 4) || (vout->output_enabled[*p_output_num] == false)) { retval = -EINVAL; break; } if (vout->state != STATE_STREAM_OFF) { retval = -EBUSY; break; } vout->cur_disp_output = *p_output_num; break; } case VIDIOC_ENUM_FMT: case VIDIOC_TRY_FMT: case VIDIOC_QUERYCTRL: case VIDIOC_G_PARM: case VIDIOC_ENUMSTD: case VIDIOC_G_STD: case VIDIOC_S_STD: case VIDIOC_G_TUNER: case VIDIOC_S_TUNER: case VIDIOC_G_FREQUENCY: case VIDIOC_S_FREQUENCY: default: retval = -EINVAL; break; } up(&vout->busy_lock); return retval; }
static long uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg) { struct video_device *vdev = video_devdata(file); struct uvc_device *uvc = video_get_drvdata(vdev); struct uvc_file_handle *handle = to_uvc_file_handle(file->private_data); struct usb_composite_dev *cdev = uvc->func.config->cdev; struct uvc_video *video = &uvc->video; int ret = 0; switch (cmd) { /* Query capabilities */ case VIDIOC_QUERYCAP: { struct v4l2_capability *cap = arg; memset(cap, 0, sizeof *cap); strncpy(cap->driver, "g_uvc", sizeof(cap->driver)); strncpy(cap->card, cdev->gadget->name, sizeof(cap->card)); strncpy(cap->bus_info, dev_name(&cdev->gadget->dev), sizeof cap->bus_info); cap->version = DRIVER_VERSION_NUMBER; cap->capabilities = V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_STREAMING; break; } /* Get & Set format */ case VIDIOC_G_FMT: { struct v4l2_format *fmt = arg; if (fmt->type != video->queue.type) return -EINVAL; return uvc_v4l2_get_format(video, fmt); } case VIDIOC_S_FMT: { struct v4l2_format *fmt = arg; if (fmt->type != video->queue.type) return -EINVAL; return uvc_v4l2_set_format(video, fmt); } /* Buffers & streaming */ case VIDIOC_REQBUFS: { struct v4l2_requestbuffers *rb = arg; if (rb->type != video->queue.type || rb->memory != V4L2_MEMORY_MMAP) return -EINVAL; ret = uvc_alloc_buffers(&video->queue, rb->count, video->imagesize); if (ret < 0) return ret; rb->count = ret; ret = 0; break; } case VIDIOC_QUERYBUF: { struct v4l2_buffer *buf = arg; if (buf->type != video->queue.type) return -EINVAL; return uvc_query_buffer(&video->queue, buf); } case VIDIOC_QBUF: if ((ret = uvc_queue_buffer(&video->queue, arg)) < 0) return ret; return uvc_video_pump(video); case VIDIOC_DQBUF: return uvc_dequeue_buffer(&video->queue, arg, file->f_flags & O_NONBLOCK); case VIDIOC_STREAMON: { int *type = arg; if (*type != video->queue.type) return -EINVAL; return uvc_video_enable(video, 1); } case VIDIOC_STREAMOFF: { int *type = arg; if (*type != video->queue.type) return -EINVAL; return uvc_video_enable(video, 0); } /* Events */ case VIDIOC_DQEVENT: { struct v4l2_event *event = arg; ret = v4l2_event_dequeue(&handle->vfh, event, file->f_flags & O_NONBLOCK); if (ret == 0 && event->type == UVC_EVENT_SETUP) { struct uvc_event *uvc_event = (void *)&event->u.data; /* Tell the complete callback to generate an event for * the next request that will be enqueued by * uvc_event_write. */ uvc->event_setup_out = !(uvc_event->req.bRequestType & USB_DIR_IN); uvc->event_length = uvc_event->req.wLength; } return ret; } case VIDIOC_SUBSCRIBE_EVENT: { struct v4l2_event_subscription *sub = arg; if (sub->type < UVC_EVENT_FIRST || sub->type > UVC_EVENT_LAST) return -EINVAL; return v4l2_event_subscribe(&handle->vfh, arg, 2, NULL); } case VIDIOC_UNSUBSCRIBE_EVENT: return v4l2_event_unsubscribe(&handle->vfh, arg); case UVCIOC_SEND_RESPONSE: ret = uvc_send_response(uvc, arg); break; default: return -ENOIOCTLCMD; } return ret; }
/* * Initiate Memory Mapping or User Pointer I/O */ int atomisp_reqbufs(struct file *file, void *fh, struct v4l2_requestbuffers *req) { struct video_device *vdev = video_devdata(file); struct atomisp_device *isp = video_get_drvdata(vdev); struct atomisp_video_pipe *pipe = atomisp_to_video_pipe(vdev); struct sh_css_frame_info out_info, vf_info; struct sh_css_frame *frame; struct videobuf_vmalloc_memory *vm_mem; int ret = 0, i = 0; if (req->count == 0) { atomisp_videobuf_free(&pipe->capq); if ((!isp->isp_subdev.video_out_vf.opened) && (isp->vf_frame)) { sh_css_frame_free(isp->vf_frame); isp->vf_frame = NULL; } return 0; } if ((!pipe->is_main) && (!atomisp_is_viewfinder_support(isp))) return -EINVAL; ret = videobuf_reqbufs(&pipe->capq, req); if (ret) return ret; switch (isp->sw_contex.run_mode) { case CI_MODE_STILL_CAPTURE: if (isp->main_format->out_sh_fmt != SH_CSS_FRAME_FORMAT_RAW) { if (sh_css_capture_get_viewfinder_frame_info(&vf_info)) goto error; if ((!isp->isp_subdev.video_out_vf.opened) && (sh_css_frame_allocate_from_info(&isp->vf_frame, &vf_info))) goto error; } if (sh_css_capture_get_output_frame_info(&out_info)) goto error; break; case CI_MODE_VIDEO: if (sh_css_video_get_viewfinder_frame_info(&vf_info)) goto error; if ((!isp->isp_subdev.video_out_vf.opened) && (sh_css_frame_allocate_from_info(&isp->vf_frame, &vf_info))) goto error; if (sh_css_video_get_output_frame_info(&out_info)) goto error; break; case CI_MODE_PREVIEW: if (sh_css_preview_get_output_frame_info(&out_info)) goto error; break; default: return -EINVAL; } /* * for user pointer type, buffers are not really allcated here, * buffers are setup in QBUF operation through v4l2_buffer structure */ if (req->memory == V4L2_MEMORY_USERPTR) { v4l2_info(&atomisp_dev, "user pointer, not really allocate" " memory here.\n"); return 0; } if (!pipe->is_main) /* * Allocate the real frame here for preview node using our * memery management function */ for (i = 0; i < req->count; i++) { if (sh_css_frame_allocate_from_info(&frame, &vf_info)) goto error; vm_mem = pipe->capq.bufs[i]->priv; vm_mem->vaddr = frame; } else /* * Allocate the real frame here for capture node using our * memery management function */ for (i = 0; i < req->count; i++) { if (sh_css_frame_allocate_from_info(&frame, &out_info)) goto error; vm_mem = pipe->capq.bufs[i]->priv; vm_mem->vaddr = frame; } return ret; error: while (i--) { vm_mem = pipe->capq.bufs[i]->priv; sh_css_frame_free(vm_mem->vaddr); } if (isp->vf_frame) sh_css_frame_free(isp->vf_frame); return -ENOMEM; }
static int fimc_open(struct file *filp) { struct fimc_control *ctrl; struct s3c_platform_fimc *pdata; unsigned long flags; int ret; u32 cfg; #ifdef VIEW_FUNCTION_CALL printk("[FIMC_DEV] %s(%d)\n", __func__, __LINE__); #endif /* An ugly hack to make the i2c pins output low */ if (unlikely(make_i2c_pin_low == 1)) { cfg = readl(S5P64XX_GPD1DAT); cfg &= ~(0x1 << 0); writel(cfg, S5P64XX_GPD1DAT); cfg = readl(S5P64XX_GPD1DAT); cfg &= ~(0x1 << 1); writel(cfg, S5P64XX_GPD1DAT); make_i2c_pin_low = 0; } ctrl = video_get_drvdata(video_devdata(filp)); #ifdef S5P6442_POWER_GATING_CAM del_timer(&g_fimc_domain_timer); // fimc0 controller for Camera if(ctrl->id == CAM_ID){ spin_lock_irqsave(&fimc_domain_lock, flags); gFIMC_CNT[CAM_ID]++; s5p6442_idle_pm_gpiocfg(S5P6442_CAM_ID, S5P6442_ACTIVE_MODE); s5p6442_pwrgate_config(S5P6442_CAM_ID, S5P6442_ACTIVE_MODE); spin_unlock_irqrestore(&fimc_domain_lock, flags); } // gFIMC_CNT++; // } #endif #ifdef CONFIG_CPU_FREQ if(ctrl->id == CAM_ID){ set_dvfs_level(0); } #endif /* CONFIG_CPU_FREQ */ pdata = to_fimc_plat(ctrl->dev); mutex_lock(&ctrl->lock); if (atomic_read(&ctrl->in_use)) { ret = -EBUSY; goto resource_busy; } else { atomic_inc(&ctrl->in_use); } if (pdata->clk_on) pdata->clk_on(to_platform_device(ctrl->dev), ctrl->clk); /* Apply things to interface register */ fimc_hwset_reset(ctrl); filp->private_data = ctrl; ctrl->fb.open_fifo = s3cfb_open_fifo; ctrl->fb.close_fifo = s3cfb_close_fifo; ret = s3cfb_direct_ioctl(ctrl->id, S3CFB_GET_LCD_WIDTH, (unsigned long)&ctrl->fb.lcd_hres); if (ret < 0) dev_err(ctrl->dev, "Fail: S3CFB_GET_LCD_WIDTH\n"); ret = s3cfb_direct_ioctl(ctrl->id, S3CFB_GET_LCD_HEIGHT, (unsigned long)&ctrl->fb.lcd_vres); if (ret < 0) dev_err(ctrl->dev, "Fail: S3CFB_GET_LCD_HEIGHT\n"); ctrl->status = FIMC_STREAMOFF; #if 0 /* To do : have to send ctrl to the fimd driver. */ ret = s3cfb_direct_ioctl(ctrl->id, S3CFB_SET_SUSPEND_FIFO, (unsigned long)fimc_sleep); if (ret < 0) dev_err(ctrl->dev, "s3cfb_direct_ioctl(S3CFB_SET_SUSPEND_FIFO) fail\n"); ret = s3cfb_direct_ioctl(ctrl->id, S3CFB_SET_RESUME_FIFO, (unsigned long)fimc_wakeup); if (ret < 0) dev_err(ctrl->dev, "s3cfb_direct_ioctl(S3CFB_SET_SUSPEND_FIFO) fail\n"); #endif mutex_unlock(&ctrl->lock); return 0; resource_busy: mutex_unlock(&ctrl->lock); return ret; }
long atomisp_compat_ioctl32(struct file *file, unsigned int cmd, unsigned long arg) { struct video_device *vdev = video_devdata(file); struct atomisp_device *isp = video_get_drvdata(vdev); long ret = -ENOIOCTLCMD; if (!file->f_op->unlocked_ioctl) return ret; switch (cmd) { case ATOMISP_IOC_G_XNR: case ATOMISP_IOC_S_XNR: case ATOMISP_IOC_G_NR: case ATOMISP_IOC_S_NR: case ATOMISP_IOC_G_TNR: case ATOMISP_IOC_S_TNR: case ATOMISP_IOC_G_BLACK_LEVEL_COMP: case ATOMISP_IOC_S_BLACK_LEVEL_COMP: case ATOMISP_IOC_G_EE: case ATOMISP_IOC_S_EE: case ATOMISP_IOC_S_DIS_VECTOR: case ATOMISP_IOC_G_ISP_PARM: case ATOMISP_IOC_S_ISP_PARM: case ATOMISP_IOC_G_ISP_GAMMA: case ATOMISP_IOC_S_ISP_GAMMA: case ATOMISP_IOC_ISP_MAKERNOTE: case ATOMISP_IOC_G_ISP_MACC: case ATOMISP_IOC_S_ISP_MACC: case ATOMISP_IOC_G_ISP_BAD_PIXEL_DETECTION: case ATOMISP_IOC_S_ISP_BAD_PIXEL_DETECTION: case ATOMISP_IOC_G_ISP_FALSE_COLOR_CORRECTION: case ATOMISP_IOC_S_ISP_FALSE_COLOR_CORRECTION: case ATOMISP_IOC_G_ISP_CTC: case ATOMISP_IOC_S_ISP_CTC: case ATOMISP_IOC_G_ISP_WHITE_BALANCE: case ATOMISP_IOC_S_ISP_WHITE_BALANCE: case ATOMISP_IOC_CAMERA_BRIDGE: case ATOMISP_IOC_G_SENSOR_MODE_DATA: case ATOMISP_IOC_S_BINNING_SUM: case ATOMISP_IOC_S_EXPOSURE: case ATOMISP_IOC_G_3A_CONFIG: case ATOMISP_IOC_S_3A_CONFIG: case ATOMISP_IOC_ACC_UNLOAD: case ATOMISP_IOC_ACC_START: case ATOMISP_IOC_ACC_WAIT: case ATOMISP_IOC_ACC_ABORT: case ATOMISP_IOC_G_ISP_GAMMA_CORRECTION: case ATOMISP_IOC_S_ISP_GAMMA_CORRECTION: case ATOMISP_IOC_S_CONT_CAPTURE_CONFIG: ret = native_ioctl(file, cmd, arg); break; case ATOMISP_IOC_G_HISTOGRAM32: case ATOMISP_IOC_S_HISTOGRAM32: case ATOMISP_IOC_G_DIS_STAT32: case ATOMISP_IOC_S_DIS_COEFS32: case ATOMISP_IOC_S_DIS_VECTOR32: case ATOMISP_IOC_G_3A_STAT32: case ATOMISP_IOC_G_ISP_GDC_TAB32: case ATOMISP_IOC_S_ISP_GDC_TAB32: case ATOMISP_IOC_S_ISP_FPN_TABLE32: case ATOMISP_IOC_G_ISP_OVERLAY32: case ATOMISP_IOC_S_ISP_OVERLAY32: case ATOMISP_IOC_G_SENSOR_CALIBRATION_GROUP32: case ATOMISP_IOC_ACC_LOAD32: case ATOMISP_IOC_ACC_S_ARG32: case ATOMISP_IOC_G_SENSOR_PRIV_INT_DATA32: case ATOMISP_IOC_S_ISP_SHD_TAB32: case ATOMISP_IOC_ACC_DESTAB32: case ATOMISP_IOC_G_MOTOR_PRIV_INT_DATA32: case ATOMISP_IOC_ACC_MAP32: case ATOMISP_IOC_ACC_UNMAP32: case ATOMISP_IOC_ACC_S_MAPPED_ARG32: case ATOMISP_IOC_S_PARAMETERS32: case ATOMISP_IOC_ACC_LOAD_TO_PIPE32: ret = atomisp_do_compat_ioctl(file, cmd, arg); break; default: dev_warn(isp->dev, "%s: unknown ioctl '%c', dir=%d, #%d (0x%08x)\n", __func__, _IOC_TYPE(cmd), _IOC_DIR(cmd), _IOC_NR(cmd), cmd); break; } return ret; }
static int vloopback_mmap(struct file *f, struct vm_area_struct *vma) { struct video_device *loopdev=video_devdata(f); #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) priv_ptr ptr=(priv_ptr)video_get_drvdata(loopdev); #else priv_ptr ptr=(priv_ptr)loopdev->priv; #endif int nr=ptr->pipenr; unsigned long start = (unsigned long)vma->vm_start; long size = vma->vm_end - vma->vm_start; unsigned long page, pos; down(&loops[nr]->lock); if (ptr->in) { loops[nr]->zerocopy=1; if (loops[nr]->ropen) { info("Can't change size while opened for read"); up(&loops[nr]->lock); return -EINVAL; } if (!size) { up(&loops[nr]->lock); return -EINVAL; } if (loops[nr]->buffer) rvfree(loops[nr]->buffer, loops[nr]->buflength*N_BUFFS); loops[nr]->buflength=size; loops[nr]->buffer=rvmalloc(loops[nr]->buflength*N_BUFFS); } if (loops[nr]->buffer == NULL) { up(&loops[nr]->lock); return -EINVAL; } if (size > (((N_BUFFS * loops[nr]->buflength) + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1))) { up(&loops[nr]->lock); return -EINVAL; } pos = (unsigned long)loops[nr]->buffer; while (size > 0) { #if LINUX_VERSION_CODE < KERNEL_VERSION(2,6,9) page = kvirt_to_pa(pos); if (remap_page_range(vma,start, page, PAGE_SIZE, PAGE_SHARED)) { #else page = vmalloc_to_pfn((void *)pos); if (remap_pfn_range(vma, start, page, PAGE_SIZE, PAGE_SHARED)) { #endif up(&loops[nr]->lock); return -EAGAIN; } start += PAGE_SIZE; pos += PAGE_SIZE; size -= PAGE_SIZE; } up(&loops[nr]->lock); return 0; } #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,31) static long vloopback_ioctl(struct file *f, unsigned int cmd, unsigned long arg) #else static int vloopback_ioctl(struct inode *inod, struct file *f, unsigned int cmd, unsigned long arg) #endif { struct video_device *loopdev=video_devdata(f); #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) priv_ptr ptr=(priv_ptr)video_get_drvdata(loopdev); #else priv_ptr ptr=(priv_ptr)loopdev->priv; #endif int nr=ptr->pipenr; int i; if (loops[nr]->zerocopy) { if (!ptr->in) { loops[nr]->ioctlnr=cmd; loops[nr]->ioctllength=_IOC_SIZE(cmd); /* info("DEBUG: vl_ioctl: !loop->in"); */ /* info("DEBUG: vl_ioctl: cmd %lu", cmd); */ /* info("DEBUG: vl_ioctl: len %lu", loops[nr]->ioctllength); */ if(copy_from_user(loops[nr]->ioctldata, (void*)arg, _IOC_SIZE(cmd))) return -EFAULT; kill_proc(loops[nr]->pid, SIGIO, 1); #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) wait_event_interruptible(loops[nr]->wait, loops[nr]->ioctlnr==-1); #else interruptible_sleep_on(&loops[nr]->wait); #endif if (loops[nr]->invalid_ioctl) { //info ("DEBUG: There was an invalid ioctl"); loops[nr]->invalid_ioctl = 0; return -ENOTTY; } if (cmd & IOC_IN && !(cmd & IOC_OUT)) { //info("DEBUG: vl_ioctl: cmd & IOC_IN 1"); if (memcmp(loops[nr]->ioctlretdata, loops[nr]->ioctldata, _IOC_SIZE(cmd))) { return -EINVAL; } //info("DEBUG: vl_ioctl: cmd & IOC_IN 2"); return 0; } else { if (copy_to_user((void*)arg, loops[nr]->ioctlretdata, _IOC_SIZE(cmd))) return -EFAULT; //info("DEBUG: vl_ioctl: !(cmd & IOC_IN) 1"); return 0; } } else { if ( (loops[nr]->ioctlnr!=cmd) && (cmd != (VIDIOCSINVALID))) { /* wrong ioctl */ info("DEBUG: vo_ioctl: Wrong IOCTL"); return 0; } if (cmd == VIDIOCSINVALID) { loops[nr]->invalid_ioctl = 1; } else { if (copy_from_user(loops[nr]->ioctlretdata, (void*)arg, loops[nr]->ioctllength)) return -EFAULT; } loops[nr]->ioctlnr=-1; if (waitqueue_active(&loops[nr]->wait)) wake_up(&loops[nr]->wait); return 0; } } switch(cmd) { /* Get capabilities */ case VIDIOCGCAP: { struct video_capability b; if (ptr->in) { sprintf(b.name, "Video loopback %d input", ptr->pipenr); b.type = 0; } else { sprintf(b.name, "Video loopback %d output", ptr->pipenr); b.type = VID_TYPE_CAPTURE; } b.channels=1; b.audios=0; b.maxwidth=loops[nr]->width; b.maxheight=loops[nr]->height; b.minwidth=20; b.minheight=20; if(copy_to_user((void*)arg, &b, sizeof(b))) return -EFAULT; return 0; } /* Get channel info (sources) */ case VIDIOCGCHAN: { struct video_channel v; if(copy_from_user(&v, (void*)arg, sizeof(v))) return -EFAULT; if(v.channel!=0) { info("VIDIOCGCHAN: Invalid Channel, was %d", v.channel); v.channel=0; //return -EINVAL; } v.flags=0; v.tuners=0; v.norm=0; v.type = VIDEO_TYPE_CAMERA; /*strcpy(v.name, "Loopback"); -- tibit */ strcpy(v.name, "Composite1"); if(copy_to_user((void*)arg, &v, sizeof(v))) return -EFAULT; return 0; } /* Set channel */ case VIDIOCSCHAN: { int v; if(copy_from_user(&v, (void*)arg, sizeof(v))) return -EFAULT; if(v!=0) { info("VIDIOCSCHAN: Invalid Channel, was %d", v); return -EINVAL; } return 0; } /* Get tuner abilities */ case VIDIOCGTUNER: { struct video_tuner v; if(copy_from_user(&v, (void*)arg, sizeof(v))!=0) return -EFAULT; if(v.tuner) { info("VIDIOCGTUNER: Invalid Tuner, was %d", v.tuner); return -EINVAL; } strcpy(v.name, "Format"); v.rangelow=0; v.rangehigh=0; v.flags=0; v.mode=VIDEO_MODE_AUTO; if(copy_to_user((void*)arg,&v, sizeof(v))!=0) return -EFAULT; return 0; } /* Get picture properties */ case VIDIOCGPICT: { struct video_picture p; p.colour=0x8000; p.hue=0x8000; p.brightness=0x8000; p.contrast=0x8000; p.whiteness=0x8000; p.depth=0x8000; p.palette=loops[nr]->palette; if(copy_to_user((void*)arg, &p, sizeof(p))) return -EFAULT; return 0; } /* Set picture properties */ case VIDIOCSPICT: { struct video_picture p; if(copy_from_user(&p, (void*)arg, sizeof(p))) return -EFAULT; if (!ptr->in) { if (p.palette!=loops[nr]->palette) return -EINVAL; } else loops[nr]->palette=p.palette; return 0; } /* Get the video overlay window */ case VIDIOCGWIN: { struct video_window vw; vw.x=0; vw.y=0; vw.width=loops[nr]->width; vw.height=loops[nr]->height; vw.chromakey=0; vw.flags=0; vw.clipcount=0; if(copy_to_user((void*)arg, &vw, sizeof(vw))) return -EFAULT; return 0; } /* Set the video overlay window - passes clip list for hardware smarts , chromakey etc */ case VIDIOCSWIN: { struct video_window vw; if(copy_from_user(&vw, (void*)arg, sizeof(vw))) return -EFAULT; if(vw.flags) return -EINVAL; if(vw.clipcount) return -EINVAL; if (loops[nr]->height==vw.height && loops[nr]->width==vw.width) return 0; if(!ptr->in) { return -EINVAL; } else { loops[nr]->height=vw.height; loops[nr]->width=vw.width; /* Make sure nobody is using the buffer while we fool around with it. We are also not allowing changes while somebody using mmap has the output open. */ down(&loops[nr]->lock); if (loops[nr]->ropen) { info("Can't change size while opened for read"); up(&loops[nr]->lock); return -EINVAL; } if (loops[nr]->buffer) rvfree(loops[nr]->buffer, loops[nr]->buflength*N_BUFFS); loops[nr]->buflength=vw.width*vw.height*4; loops[nr]->buffer=rvmalloc(loops[nr]->buflength*N_BUFFS); up(&loops[nr]->lock); } return 0; } /* Memory map buffer info */ case VIDIOCGMBUF: { struct video_mbuf vm; vm.size=loops[nr]->buflength*N_BUFFS; vm.frames=N_BUFFS; for (i=0; i<vm.frames; i++) vm.offsets[i]=i*loops[nr]->buflength; if(copy_to_user((void*)arg, &vm, sizeof(vm))) return -EFAULT; return 0; } /* Grab frames */ case VIDIOCMCAPTURE: { struct video_mmap vm; if (ptr->in) return -EINVAL; if (!loops[nr]->buffer) return -EINVAL; if (copy_from_user(&vm, (void*)arg, sizeof(vm))) return -EFAULT; if (vm.format!=loops[nr]->palette) return -EINVAL; if (vm.frame > N_BUFFS) return -EINVAL; return 0; } /* Sync with mmap grabbing */ case VIDIOCSYNC: { int frame; unsigned long fw; if (copy_from_user((void *)&frame, (void*)arg, sizeof(int))) return -EFAULT; if (ptr->in) return -EINVAL; if (!loops[nr]->buffer) return -EINVAL; /* Ok, everything should be alright since the program should have called VIDIOMCAPTURE and we are ready to do the 'capturing' */ if (frame > 1) return -EINVAL; loops[nr]->frame=frame; #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) fw = loops[nr]->frameswrite; wait_event_interruptible(loops[nr]->wait, fw!=loops[nr]->frameswrite); #else interruptible_sleep_on(&loops[nr]->wait); #endif if (!loops[nr]->buffer) /* possibly released during sleep */ return -EINVAL; loops[nr]->framesread++; return 0; } /* Get attached units */ case VIDIOCGUNIT: { struct video_unit vu; if (ptr->in) vu.video=loops[nr]->vloopout->minor; else vu.video=loops[nr]->vloopin->minor; vu.vbi=VIDEO_NO_UNIT; vu.radio=VIDEO_NO_UNIT; vu.audio=VIDEO_NO_UNIT; vu.teletext=VIDEO_NO_UNIT; if (copy_to_user((void*)arg, &vu, sizeof(vu))) return -EFAULT; return 0; } /* Get frame buffer */ case VIDIOCGFBUF: { struct video_buffer vb; memset(&vb, 0, sizeof(vb)); vb.base=NULL; if(copy_to_user((void *)arg, (void *)&vb, sizeof(vb))) return -EFAULT; return 0; } /* Start, end capture */ case VIDIOCCAPTURE: { int start; if (copy_from_user(&start, (void*)arg, sizeof(int))) return -EFAULT; /* if (start) info ("Capture started"); else info ("Capture stopped"); */ return 0; } case VIDIOCGFREQ: case VIDIOCSFREQ: case VIDIOCGAUDIO: case VIDIOCSAUDIO: return -EINVAL; case VIDIOCKEY: return 0; default: return -ENOTTY; //return -ENOIOCTLCMD; } return 0; } static unsigned int vloopback_poll(struct file *f, struct poll_table_struct *wait) { struct video_device *loopdev=video_devdata(f); #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) priv_ptr ptr=(priv_ptr)video_get_drvdata(loopdev); #else priv_ptr ptr=(priv_ptr)loopdev->priv; #endif int nr=ptr->pipenr; if (loopdev==NULL) return -EFAULT; if (!ptr->in) return 0; if (loops[nr]->ioctlnr!=-1) { if (loops[nr]->zerocopy) { return (POLLIN | POLLPRI | POLLOUT | POLLRDNORM); } else { return (POLLOUT); } } return 0; } #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,31) static struct v4l2_file_operations fileops_template= #else static struct file_operations fileops_template= #endif { owner: THIS_MODULE, open: vloopback_open, release: vloopback_release, read: vloopback_read, write: vloopback_write, poll: vloopback_poll, ioctl: vloopback_ioctl, #if LINUX_VERSION_CODE > KERNEL_VERSION(2,6,15) && defined(CONFIG_COMPAT) compat_ioctl: v4l_compat_ioctl32, #endif mmap: vloopback_mmap, };
static ssize_t vloopback_read (struct file * f, char * buf, size_t count, loff_t *offset) { struct video_device *loopdev=video_devdata(f); #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) priv_ptr ptr=(priv_ptr)video_get_drvdata(loopdev); #else priv_ptr ptr=(priv_ptr)loopdev->priv; #endif int nr=ptr->pipenr; unsigned long realcount=count; if (loops[nr]->zerocopy) { if (ptr->in) { if (realcount > loops[nr]->ioctllength+sizeof(unsigned long int)) realcount=loops[nr]->ioctllength+sizeof(unsigned long int); if (copy_to_user(buf , &loops[nr]->ioctlnr, sizeof(unsigned long int))) return -EFAULT; if (copy_to_user(buf+sizeof(unsigned long int) , loops[nr]->ioctldata, realcount-sizeof(unsigned long int))) return -EFAULT; if (loops[nr]->ioctlnr==0) loops[nr]->ioctlnr=-1; return realcount; } else { struct video_window vidwin; struct video_mmap vidmmap; struct video_picture vidpic; fake_ioctl(nr, VIDIOCGWIN, &vidwin); fake_ioctl(nr, VIDIOCGPICT, &vidpic); vidmmap.height=vidwin.height; vidmmap.width=vidwin.width; vidmmap.format=vidpic.palette; vidmmap.frame=0; if (fake_ioctl(nr, VIDIOCMCAPTURE, &vidmmap)) return 0; if (fake_ioctl(nr, VIDIOCSYNC, &vidmmap)) return 0; realcount=vidwin.height*vidwin.width*vidpic.depth; } } if (ptr->in) return -EINVAL; if (realcount > loops[nr]->buflength) { realcount = loops[nr]->buflength; info("Not so much data in buffer!"); } loops[nr]->pendingread = 1; wake_up(&loops[nr]->wait); if (!loops[nr]->zerocopy) { #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) unsigned long fw=loops[nr]->frameswrite; wait_event_interruptible(loops[nr]->wait, fw!=loops[nr]->frameswrite); #else interruptible_sleep_on(&loops[nr]->wait); #endif } loops[nr]->pendingread = 0; down(&loops[nr]->lock); if (!loops[nr]->buffer) { up(&loops[nr]->lock); return 0; } if (copy_to_user(buf, loops[nr]->buffer, realcount)) return -EFAULT; up(&loops[nr]->lock); loops[nr]->framesread++; return realcount; }
static int create_pipe(int nr) { int minor_in, minor_out , ret; if (dev_offset == -1) { if (inminor == -1) { minor_in = -1; } else { minor_in = inminor+nr; } if (outminor == -1) { minor_out = -1; } else { minor_out = outminor+nr; } } else { minor_in = 2*nr + dev_offset; minor_out = 2*nr+1 + dev_offset; } /* allocate space for this pipe */ loops[nr]= kmalloc(sizeof(struct vloopback_pipe), GFP_KERNEL); if (!loops[nr]) return -ENOMEM; /* set up a new video device plus our private area */ loops[nr]->vloopin= video_device_alloc(); if (loops[nr]->vloopin == NULL) return -ENOMEM; *loops[nr]->vloopin = vloopback_template; #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) video_set_drvdata(loops[nr]->vloopin,kmalloc(sizeof(struct vloopback_private),GFP_KERNEL)); #else loops[nr]->vloopin->priv= kmalloc(sizeof(struct vloopback_private),GFP_KERNEL); #endif #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) if ((priv_ptr)video_get_drvdata(loops[nr]->vloopin) == NULL) { #else if (loops[nr]->vloopin->priv == NULL) { #endif kfree(loops[nr]->vloopin); return -ENOMEM; } /* repeat for the output device */ loops[nr]->vloopout= video_device_alloc(); if (loops[nr]->vloopout == NULL) { #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) kfree((priv_ptr)video_get_drvdata(loops[nr]->vloopin)); #else kfree(loops[nr]->vloopin->priv); #endif kfree(loops[nr]->vloopin); return -ENOMEM; } *loops[nr]->vloopout = vloopback_template; #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) video_set_drvdata(loops[nr]->vloopout,kmalloc(sizeof(struct vloopback_private),GFP_KERNEL)); #else loops[nr]->vloopout->priv= kmalloc(sizeof(struct vloopback_private),GFP_KERNEL); #endif if ((priv_ptr)video_get_drvdata(loops[nr]->vloopout) == NULL) { #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) kfree((priv_ptr)video_get_drvdata(loops[nr]->vloopin)); #else kfree(loops[nr]->vloopin->priv); #endif kfree(loops[nr]->vloopin); kfree(loops[nr]->vloopout); return -ENOMEM; } #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) ((priv_ptr)video_get_drvdata(loops[nr]->vloopin))->pipenr=nr; ((priv_ptr)video_get_drvdata(loops[nr]->vloopout))->pipenr=nr; #else ((priv_ptr)loops[nr]->vloopin->priv)->pipenr=nr; ((priv_ptr)loops[nr]->vloopout->priv)->pipenr=nr; #endif loops[nr]->invalid_ioctl = 0; /* tibit */ loops[nr]->buffer=NULL; loops[nr]->width=0; loops[nr]->height=0; loops[nr]->palette=0; loops[nr]->frameswrite=0; loops[nr]->framesread=0; loops[nr]->framesdumped=0; loops[nr]->wopen=0; loops[nr]->ropen=0; loops[nr]->frame=0; loops[nr]->pendingread=0; #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) ((priv_ptr)video_get_drvdata(loops[nr]->vloopin))->in=1; ((priv_ptr)video_get_drvdata(loops[nr]->vloopout))->in=0; #else ((priv_ptr)loops[nr]->vloopin->priv)->in=1; ((priv_ptr)loops[nr]->vloopout->priv)->in=0; #endif #if LINUX_VERSION_CODE < KERNEL_VERSION(2,6,27) loops[nr]->vloopin->type=0; #endif sprintf(loops[nr]->vloopin->name, "Video loopback %d input", nr); #if LINUX_VERSION_CODE < KERNEL_VERSION(2,6,27) loops[nr]->vloopout->type=VID_TYPE_CAPTURE; #endif sprintf(loops[nr]->vloopout->name, "Video loopback %d output", nr); init_waitqueue_head(&loops[nr]->wait); init_MUTEX(&loops[nr]->lock); ret = video_register_device(loops[nr]->vloopout, VFL_TYPE_GRABBER, minor_out); if ((ret ==-1) || (ret == -23)) { info("error registering device %s", loops[nr]->vloopout->name); #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) kfree(((priv_ptr)video_get_drvdata(loops[nr]->vloopin))); video_unregister_device(loops[nr]->vloopin); kfree(((priv_ptr)video_get_drvdata(loops[nr]->vloopout))); #else kfree(loops[nr]->vloopin->priv); video_unregister_device(loops[nr]->vloopin); kfree(loops[nr]->vloopout->priv); #endif kfree(loops[nr]->vloopout); kfree(loops[nr]); loops[nr]=NULL; return ret; } ret = video_register_device(loops[nr]->vloopin, VFL_TYPE_GRABBER, minor_in); if ((ret == -1 ) || ( ret == -23 )) { info("error registering device %s",loops[nr]->vloopin->name); #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,27) kfree(((priv_ptr)video_get_drvdata(loops[nr]->vloopin))); kfree(((priv_ptr)video_get_drvdata(loops[nr]->vloopout))); #else kfree(loops[nr]->vloopin->priv); kfree(loops[nr]->vloopout->priv); #endif kfree(loops[nr]->vloopin); kfree(loops[nr]->vloopout); kfree(loops[nr]); loops[nr]=NULL; return ret; } loops[nr]->ioctldata=kmalloc(1024, GFP_KERNEL); loops[nr]->ioctlretdata=kmalloc(1024, GFP_KERNEL); return 0; } /**************************************************************************** * init stuff ****************************************************************************/ MODULE_AUTHOR("J.B. Vreeken ([email protected])"); MODULE_DESCRIPTION("Video4linux loopback device."); #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) module_param(pipes, int, 000); #else MODULE_PARM(pipes, "i"); #endif MODULE_PARM_DESC(pipes, "Nr of pipes to create (each pipe uses two video devices)"); #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) module_param(spares, int, 000); #else MODULE_PARM(spares, "i"); #endif MODULE_PARM_DESC(spares, "Nr of spare pipes that should be created"); #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) module_param(dev_offset, int, 000); #else MODULE_PARM(dev_offset_param, "i"); #endif #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) module_param(inminor, int, 000); #else MODULE_PARM(inminor, "i"); #endif #if LINUX_VERSION_CODE > KERNEL_VERSION(2,5,0) module_param(outminor, int, 000); #else MODULE_PARM(outminor, "i"); #endif MODULE_PARM_DESC(dev_offset, "Prefered offset for video device numbers"); MODULE_LICENSE("GPL"); MODULE_VERSION( VLOOPBACK_VERSION ); static int __init vloopback_init(void) { int i,ret; info("Video4linux loopback driver v"VLOOPBACK_VERSION); if (pipes==-1) pipes=1; if (pipes > MAX_PIPES) { pipes=MAX_PIPES; info("Nr of pipes is limited to: %d", MAX_PIPES); } for (i=0; i<pipes; i++) { ret = create_pipe(i); if (ret == 0) { info("Loopback %d registered, input: video%d, output: video%d", i, loops[i]->vloopin->minor, loops[i]->vloopout->minor); nr_o_pipes=i+1; }else{ return ret; } } return 0; }
static int ivtv_open(struct file *filp) { struct video_device *vdev = video_devdata(filp); struct ivtv_stream *s = video_get_drvdata(vdev); struct ivtv *itv = s->itv; struct ivtv_open_id *item; int res = 0; IVTV_DEBUG_FILE("open %s\n", s->name); if (ivtv_init_on_first_open(itv)) { IVTV_ERR("Failed to initialize on device %s\n", video_device_node_name(vdev)); return -ENXIO; } #ifdef CONFIG_VIDEO_ADV_DEBUG /* Unless ivtv_fw_debug is set, error out if firmware dead. */ if (ivtv_fw_debug) { IVTV_WARN("Opening %s with dead firmware lockout disabled\n", video_device_node_name(vdev)); IVTV_WARN("Selected firmware errors will be ignored\n"); } else { #else if (1) { #endif res = ivtv_firmware_check(itv, "ivtv_serialized_open"); if (res == -EAGAIN) res = ivtv_firmware_check(itv, "ivtv_serialized_open"); if (res < 0) return -EIO; } if (s->type == IVTV_DEC_STREAM_TYPE_MPG && test_bit(IVTV_F_S_CLAIMED, &itv->streams[IVTV_DEC_STREAM_TYPE_YUV].s_flags)) return -EBUSY; if (s->type == IVTV_DEC_STREAM_TYPE_YUV && test_bit(IVTV_F_S_CLAIMED, &itv->streams[IVTV_DEC_STREAM_TYPE_MPG].s_flags)) return -EBUSY; if (s->type == IVTV_DEC_STREAM_TYPE_YUV) { if (read_reg(0x82c) == 0) { IVTV_ERR("Tried to open YUV output device but need to send data to mpeg decoder before it can be used\n"); /* return -ENODEV; */ } ivtv_udma_alloc(itv); } /* Allocate memory */ item = kzalloc(sizeof(struct ivtv_open_id), GFP_KERNEL); if (NULL == item) { IVTV_DEBUG_WARN("nomem on v4l2 open\n"); return -ENOMEM; } v4l2_fh_init(&item->fh, &s->vdev); item->itv = itv; item->type = s->type; filp->private_data = &item->fh; v4l2_fh_add(&item->fh); if (item->type == IVTV_ENC_STREAM_TYPE_RAD && v4l2_fh_is_singular_file(filp)) { if (!test_bit(IVTV_F_I_RADIO_USER, &itv->i_flags)) { if (atomic_read(&itv->capturing) > 0) { /* switching to radio while capture is in progress is not polite */ v4l2_fh_del(&item->fh); v4l2_fh_exit(&item->fh); kfree(item); return -EBUSY; } } /* Mark that the radio is being used. */ set_bit(IVTV_F_I_RADIO_USER, &itv->i_flags); /* We have the radio */ ivtv_mute(itv); /* Switch tuner to radio */ ivtv_call_all(itv, tuner, s_radio); /* Select the correct audio input (i.e. radio tuner) */ ivtv_audio_set_io(itv); if (itv->hw_flags & IVTV_HW_SAA711X) { ivtv_call_hw(itv, IVTV_HW_SAA711X, video, s_crystal_freq, SAA7115_FREQ_32_11_MHZ, SAA7115_FREQ_FL_APLL); } /* Done! Unmute and continue. */ ivtv_unmute(itv); } /* YUV or MPG Decoding Mode? */ if (s->type == IVTV_DEC_STREAM_TYPE_MPG) { clear_bit(IVTV_F_I_DEC_YUV, &itv->i_flags); } else if (s->type == IVTV_DEC_STREAM_TYPE_YUV) { set_bit(IVTV_F_I_DEC_YUV, &itv->i_flags); /* For yuv, we need to know the dma size before we start */ itv->dma_data_req_size = 1080 * ((itv->yuv_info.v4l2_src_h + 31) & ~31); itv->yuv_info.stream_size = 0; } return 0; } int ivtv_v4l2_open(struct file *filp) { struct video_device *vdev = video_devdata(filp); int res; if (mutex_lock_interruptible(vdev->lock)) return -ERESTARTSYS; res = ivtv_open(filp); mutex_unlock(vdev->lock); return res; } void ivtv_mute(struct ivtv *itv) { if (atomic_read(&itv->capturing)) ivtv_vapi(itv, CX2341X_ENC_MUTE_AUDIO, 1, 1); IVTV_DEBUG_INFO("Mute\n"); } void ivtv_unmute(struct ivtv *itv) { if (atomic_read(&itv->capturing)) { ivtv_msleep_timeout(100, 0); ivtv_vapi(itv, CX2341X_ENC_MISC, 1, 12); ivtv_vapi(itv, CX2341X_ENC_MUTE_AUDIO, 1, 0); } IVTV_DEBUG_INFO("Unmute\n"); }
/* * Applications call the VIDIOC_QBUF ioctl to enqueue an empty (capturing) or * filled (output) buffer in the drivers incoming queue. */ static int atomisp_qbuf(struct file *file, void *fh, struct v4l2_buffer *buf) { struct video_device *vdev = video_devdata(file); struct atomisp_device *isp = video_get_drvdata(vdev); struct atomisp_video_pipe *pipe = atomisp_to_video_pipe(vdev); unsigned long userptr = buf->m.userptr; struct videobuf_buffer *vb; struct videobuf_vmalloc_memory *vm_mem; struct sh_css_frame_info out_info, vf_info; struct sh_css_frame *handle = NULL; u32 length; u32 pgnr; int ret = 0; if ((!pipe->is_main) && (!atomisp_is_viewfinder_support(isp))) return -EINVAL; if (!buf || buf->index >= VIDEO_MAX_FRAME || !pipe->capq.bufs[buf->index]) { v4l2_err(&atomisp_dev, "Invalid index for qbuf.\n"); return -EINVAL; } v4l2_dbg(2, dbg_level, &atomisp_dev, "%s\n", __func__); /* * For userptr type frame, we convert user space address to physic * address and reprograme out page table properly */ if (buf->memory == V4L2_MEMORY_USERPTR) { vb = pipe->capq.bufs[buf->index]; vm_mem = vb->priv; if (!vm_mem) return -EINVAL; length = vb->bsize; pgnr = (length + (PAGE_SIZE - 1)) >> PAGE_SHIFT; /* We must stop to atomisp to remove the * race condition when updating the new userptr. */ if (buf->flags & V4L2_BUF_FLAG_BUFFER_INVALID) { isp->sw_contex.updating_uptr = true; return 0; } /* Check whether need to start the atomisp_work */ if (buf->flags & V4L2_BUF_FLAG_BUFFER_VALID) { isp->sw_contex.updating_uptr = false; wake_up_interruptible_sync(&pipe->capq.wait); return 0; } if ((vb->baddr == userptr) && (vm_mem->vaddr)) goto done; switch (isp->sw_contex.run_mode) { case CI_MODE_STILL_CAPTURE: if ((isp->main_format->out_sh_fmt != SH_CSS_FRAME_FORMAT_RAW) && sh_css_capture_get_viewfinder_frame_info(&vf_info)) goto error; if (sh_css_capture_get_output_frame_info(&out_info)) goto error; break; case CI_MODE_VIDEO: if (sh_css_video_get_viewfinder_frame_info(&vf_info)) goto error; if (sh_css_video_get_output_frame_info(&out_info)) goto error; break; case CI_MODE_PREVIEW: if (sh_css_preview_get_output_frame_info(&out_info)) goto error; break; } hrt_isp_css_mm_set_user_ptr(userptr, pgnr); if (!pipe->is_main) ret = sh_css_frame_allocate_from_info(&handle, &vf_info); else ret = sh_css_frame_allocate_from_info(&handle, &out_info); hrt_isp_css_mm_set_user_ptr(0, 0); if (ret != sh_css_success) { v4l2_err(&atomisp_dev, "Error to allocate frame\n"); return -ENOMEM; } if (vm_mem->vaddr) { mutex_lock(&pipe->capq.vb_lock); sh_css_frame_free(vm_mem->vaddr); vm_mem->vaddr = NULL; vb->state = VIDEOBUF_NEEDS_INIT; mutex_unlock(&pipe->capq.vb_lock); } vm_mem->vaddr = handle; buf->flags &= ~V4L2_BUF_FLAG_MAPPED; buf->flags |= V4L2_BUF_FLAG_QUEUED; buf->flags &= ~V4L2_BUF_FLAG_DONE; } else if (buf->memory == V4L2_MEMORY_MMAP) {
/** * vpif_open : vpif open handler * @filep: file ptr * * It creates object of file handle structure and stores it in private_data * member of filepointer */ static int vpif_open(struct file *filep) { struct vpif_capture_config *config = vpif_dev->platform_data; struct video_device *vdev = video_devdata(filep); struct common_obj *common; struct video_obj *vid_ch; struct channel_obj *ch; struct vpif_fh *fh; int i, ret = 0; vpif_dbg(2, debug, "vpif_open\n"); ch = video_get_drvdata(vdev); vid_ch = &ch->video; common = &ch->common[VPIF_VIDEO_INDEX]; if (mutex_lock_interruptible(&common->lock)) return -ERESTARTSYS; if (NULL == ch->curr_subdev_info) { /** * search through the sub device to see a registered * sub device and make it as current sub device */ for (i = 0; i < config->subdev_count; i++) { if (vpif_obj.sd[i]) { /* the sub device is registered */ ch->curr_subdev_info = &config->subdev_info[i]; /* make first input as the current input */ vid_ch->input_idx = 0; break; } } if (i == config->subdev_count) { vpif_err("No sub device registered\n"); ret = -ENOENT; goto exit; } } /* Allocate memory for the file handle object */ fh = kmalloc(sizeof(struct vpif_fh), GFP_KERNEL); if (NULL == fh) { vpif_err("unable to allocate memory for file handle object\n"); ret = -ENOMEM; goto exit; } /* store pointer to fh in private_data member of filep */ filep->private_data = fh; fh->channel = ch; fh->initialized = 0; /* If decoder is not initialized. initialize it */ if (!ch->initialized) { fh->initialized = 1; ch->initialized = 1; memset(&(ch->vpifparams), 0, sizeof(struct vpif_params)); } /* Increment channel usrs counter */ ch->usrs++; /* Set io_allowed member to false */ fh->io_allowed[VPIF_VIDEO_INDEX] = 0; /* Initialize priority of this instance to default priority */ fh->prio = V4L2_PRIORITY_UNSET; v4l2_prio_open(&ch->prio, &fh->prio); exit: mutex_unlock(&common->lock); return ret; }
static int snd_tea575x_ioctl(struct inode *inode, struct file *file, unsigned int cmd, unsigned long data) { struct video_device *dev = video_devdata(file); struct snd_tea575x *tea = video_get_drvdata(dev); void __user *arg = (void __user *)data; switch(cmd) { case VIDIOCGCAP: { struct video_capability v; v.type = VID_TYPE_TUNER; v.channels = 1; v.audios = 1; /* No we don't do pictures */ v.maxwidth = 0; v.maxheight = 0; v.minwidth = 0; v.minheight = 0; strcpy(v.name, tea->tea5759 ? "TEA5759" : "TEA5757"); if (copy_to_user(arg,&v,sizeof(v))) return -EFAULT; return 0; } case VIDIOCGTUNER: { struct video_tuner v; if (copy_from_user(&v, arg,sizeof(v))!=0) return -EFAULT; if (v.tuner) /* Only 1 tuner */ return -EINVAL; v.rangelow = (87*16000); v.rangehigh = (108*16000); v.flags = VIDEO_TUNER_LOW; v.mode = VIDEO_MODE_AUTO; strcpy(v.name, "FM"); v.signal = 0xFFFF; if (copy_to_user(arg, &v, sizeof(v))) return -EFAULT; return 0; } case VIDIOCSTUNER: { struct video_tuner v; if(copy_from_user(&v, arg, sizeof(v))) return -EFAULT; if(v.tuner!=0) return -EINVAL; /* Only 1 tuner so no setting needed ! */ return 0; } case VIDIOCGFREQ: if(copy_to_user(arg, &tea->freq, sizeof(tea->freq))) return -EFAULT; return 0; case VIDIOCSFREQ: if(copy_from_user(&tea->freq, arg, sizeof(tea->freq))) return -EFAULT; snd_tea575x_set_freq(tea); return 0; case VIDIOCGAUDIO: { struct video_audio v; memset(&v, 0, sizeof(v)); strcpy(v.name, "Radio"); if(copy_to_user(arg,&v, sizeof(v))) return -EFAULT; return 0; } case VIDIOCSAUDIO: { struct video_audio v; if(copy_from_user(&v, arg, sizeof(v))) return -EFAULT; if(v.audio) return -EINVAL; return 0; } default: return -ENOIOCTLCMD; } }
static inline struct usb_usbvision *cd_to_usbvision(struct device *cd) { struct video_device *vdev = container_of(cd, struct video_device, dev); return video_get_drvdata(vdev); }