static int empress_streamon(struct file *file, void *priv, enum v4l2_buf_type type) { struct saa7134_dev *dev = file->private_data; return videobuf_streamon(&dev->empress_tsq); }
static int vidioc_streamon(struct file *file, void *fh, enum v4l2_buf_type i) { struct omap24xxcam_fh *ofh = fh; struct omap24xxcam_device *cam = ofh->cam; int rval; mutex_lock(&cam->mutex); if (cam->streaming) { rval = -EBUSY; goto out; } rval = omap24xxcam_sensor_if_enable(cam); if (rval) { dev_dbg(cam->dev, "vidioc_int_g_ifparm failed\n"); goto out; } rval = videobuf_streamon(&ofh->vbq); if (!rval) { cam->streaming = file; sysfs_notify(&cam->dev->kobj, NULL, "streaming"); } out: mutex_unlock(&cam->mutex); return rval; }
static int vidioc_streamon(struct file *file, void *__fh, enum v4l2_buf_type type) { struct saa7146_fh *fh = __fh; int err; DEB_D("VIDIOC_STREAMON, type:%d\n", type); err = video_begin(fh); if (err) return err; if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE) return videobuf_streamon(&fh->video_q); if (type == V4L2_BUF_TYPE_VBI_CAPTURE) return videobuf_streamon(&fh->vbi_q); return -EINVAL; }
static int empress_streamon(struct file *file, void *priv, enum v4l2_buf_type type) { struct saa7134_fh *fh = priv; struct saa7134_dev *dev = fh->dev; return videobuf_streamon(&dev->empress_tsq); }
static int solo_streamon(struct file *file, void *priv, enum v4l2_buf_type i) { struct solo_filehandle *fh = priv; if (i != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; return videobuf_streamon(&fh->vidq); }
static int vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i) { struct unicorn_fh *fh = priv; struct unicorn_dev *dev = fh->dev; if (unlikely(fh->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)) { return -EINVAL; } if (unlikely(i != fh->type)) { return -EINVAL; } if (unlikely(!res_get(dev, fh, get_resource(fh, 0x01 << fh->channel)))) { return -EBUSY; } return videobuf_streamon(&fh->vidq); }
static int vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i) { int ret = 0; struct marucam_device *dev = priv; if (dev->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) { return -EINVAL; } if (i != dev->type) { return -EINVAL; } mutex_lock(&dev->mlock); if (dev->opstate != S_IDLE) { marucam_err("The device state is not S_IDLE\n"); mutex_unlock(&dev->mlock); return -EBUSY; } iowrite32(1, dev->mmregs + MARUCAM_START_PREVIEW); ret = (int)ioread32(dev->mmregs + MARUCAM_START_PREVIEW); if (ret) { marucam_err("Failed to start preview\n"); mutex_unlock(&dev->mlock); return -ret; } INIT_LIST_HEAD(&dev->active); ret = videobuf_streamon(&dev->vb_vidq); if (ret) { marucam_err("Failed to stream on the video buffer: %d\n", ret); mutex_unlock(&dev->mlock); return ret; } dev->opstate = S_RUNNING; mutex_unlock(&dev->mlock); return ret; }
int saa7146_video_do_ioctl(struct inode *inode, struct file *file, unsigned int cmd, void *arg) { struct saa7146_fh *fh = file->private_data; struct saa7146_dev *dev = fh->dev; struct saa7146_vv *vv = dev->vv_data; int err = 0, result = 0, ee = 0; struct saa7146_use_ops *ops; struct videobuf_queue *q; /* check if extension handles the command */ for(ee = 0; dev->ext_vv_data->ioctls[ee].flags != 0; ee++) { if( cmd == dev->ext_vv_data->ioctls[ee].cmd ) break; } if( 0 != (dev->ext_vv_data->ioctls[ee].flags & SAA7146_EXCLUSIVE) ) { DEB_D(("extension handles ioctl exclusive.\n")); result = dev->ext_vv_data->ioctl(fh, cmd, arg); return result; } if( 0 != (dev->ext_vv_data->ioctls[ee].flags & SAA7146_BEFORE) ) { DEB_D(("extension handles ioctl before.\n")); result = dev->ext_vv_data->ioctl(fh, cmd, arg); if( -EAGAIN != result ) { return result; } } /* fixme: add handle "after" case (is it still needed?) */ switch (fh->type) { case V4L2_BUF_TYPE_VIDEO_CAPTURE: { ops = &saa7146_video_uops; q = &fh->video_q; break; } case V4L2_BUF_TYPE_VBI_CAPTURE: { ops = &saa7146_vbi_uops; q = &fh->vbi_q; break; } default: BUG(); return 0; } switch (cmd) { case VIDIOC_QUERYCAP: { struct v4l2_capability *cap = arg; memset(cap,0,sizeof(*cap)); DEB_EE(("VIDIOC_QUERYCAP\n")); strcpy(cap->driver, "saa7146 v4l2"); strlcpy(cap->card, dev->ext->name, sizeof(cap->card)); sprintf(cap->bus_info,"PCI:%s", pci_name(dev->pci)); cap->version = SAA7146_VERSION_CODE; cap->capabilities = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OVERLAY | V4L2_CAP_READWRITE | V4L2_CAP_STREAMING; cap->capabilities |= dev->ext_vv_data->capabilities; return 0; } case VIDIOC_G_FBUF: { struct v4l2_framebuffer *fb = arg; DEB_EE(("VIDIOC_G_FBUF\n")); *fb = vv->ov_fb; fb->capability = V4L2_FBUF_CAP_LIST_CLIPPING; return 0; } case VIDIOC_S_FBUF: { struct v4l2_framebuffer *fb = arg; struct saa7146_format *fmt; DEB_EE(("VIDIOC_S_FBUF\n")); if(!capable(CAP_SYS_ADMIN) && !capable(CAP_SYS_RAWIO)) return -EPERM; /* check args */ fmt = format_by_fourcc(dev,fb->fmt.pixelformat); if (NULL == fmt) { return -EINVAL; } /* planar formats are not allowed for overlay video, clipping and video dma would clash */ if (0 != (fmt->flags & FORMAT_IS_PLANAR)) { DEB_S(("planar pixelformat '%4.4s' not allowed for overlay\n",(char *)&fmt->pixelformat)); } /* check if overlay is running */ if (IS_OVERLAY_ACTIVE(fh) != 0) { if (vv->video_fh != fh) { DEB_D(("refusing to change framebuffer informations while overlay is active in another open.\n")); return -EBUSY; } } down(&dev->lock); /* ok, accept it */ vv->ov_fb = *fb; vv->ov_fmt = fmt; if (0 == vv->ov_fb.fmt.bytesperline) vv->ov_fb.fmt.bytesperline = vv->ov_fb.fmt.width*fmt->depth/8; up(&dev->lock); return 0; } case VIDIOC_ENUM_FMT: { struct v4l2_fmtdesc *f = arg; int index; switch (f->type) { case V4L2_BUF_TYPE_VIDEO_CAPTURE: case V4L2_BUF_TYPE_VIDEO_OVERLAY: { index = f->index; if (index < 0 || index >= NUM_FORMATS) { return -EINVAL; } memset(f,0,sizeof(*f)); f->index = index; strlcpy(f->description,formats[index].name,sizeof(f->description)); f->pixelformat = formats[index].pixelformat; break; } default: return -EINVAL; } DEB_EE(("VIDIOC_ENUM_FMT: type:%d, index:%d\n",f->type,f->index)); return 0; } case VIDIOC_QUERYCTRL: { const struct v4l2_queryctrl *ctrl; struct v4l2_queryctrl *c = arg; if ((c->id < V4L2_CID_BASE || c->id >= V4L2_CID_LASTP1) && (c->id < V4L2_CID_PRIVATE_BASE || c->id >= V4L2_CID_PRIVATE_LASTP1)) return -EINVAL; ctrl = ctrl_by_id(c->id); if( NULL == ctrl ) { return -EINVAL; /* c->flags = V4L2_CTRL_FLAG_DISABLED; return 0; */ } DEB_EE(("VIDIOC_QUERYCTRL: id:%d\n",c->id)); *c = *ctrl; return 0; } case VIDIOC_G_CTRL: { DEB_EE(("VIDIOC_G_CTRL\n")); return get_control(fh,arg); } case VIDIOC_S_CTRL: { DEB_EE(("VIDIOC_S_CTRL\n")); err = set_control(fh,arg); return err; } case VIDIOC_G_PARM: { struct v4l2_streamparm *parm = arg; if( parm->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ) { return -EINVAL; } memset(&parm->parm.capture,0,sizeof(struct v4l2_captureparm)); parm->parm.capture.readbuffers = 1; // fixme: only for PAL! parm->parm.capture.timeperframe.numerator = 1; parm->parm.capture.timeperframe.denominator = 25; return 0; } case VIDIOC_G_FMT: { struct v4l2_format *f = arg; DEB_EE(("VIDIOC_G_FMT\n")); return g_fmt(fh,f); } case VIDIOC_S_FMT: { struct v4l2_format *f = arg; DEB_EE(("VIDIOC_S_FMT\n")); return s_fmt(fh,f); } case VIDIOC_TRY_FMT: { struct v4l2_format *f = arg; DEB_EE(("VIDIOC_TRY_FMT\n")); return try_fmt(fh,f); } case VIDIOC_G_STD: { v4l2_std_id *id = arg; DEB_EE(("VIDIOC_G_STD\n")); *id = vv->standard->id; return 0; } /* the saa7146 supfhrts (used in conjunction with the saa7111a for example) PAL / NTSC / SECAM. if your hardware does not (or does more) -- override this function in your extension */ case VIDIOC_ENUMSTD: { struct v4l2_standard *e = arg; if (e->index < 0 ) return -EINVAL; if( e->index < dev->ext_vv_data->num_stds ) { DEB_EE(("VIDIOC_ENUMSTD: index:%d\n",e->index)); v4l2_video_std_construct(e, dev->ext_vv_data->stds[e->index].id, dev->ext_vv_data->stds[e->index].name); return 0; } return -EINVAL; } case VIDIOC_S_STD: { v4l2_std_id *id = arg; int found = 0; int i, err; DEB_EE(("VIDIOC_S_STD\n")); if ((vv->video_status & STATUS_CAPTURE) == STATUS_CAPTURE) { DEB_D(("cannot change video standard while streaming capture is active\n")); return -EBUSY; } if ((vv->video_status & STATUS_OVERLAY) != 0) { vv->ov_suspend = vv->video_fh; err = saa7146_stop_preview(vv->video_fh); /* side effect: video_status is now 0, video_fh is NULL */ if (0 != err) { DEB_D(("suspending video failed. aborting\n")); return err; } } down(&dev->lock); for(i = 0; i < dev->ext_vv_data->num_stds; i++) if (*id & dev->ext_vv_data->stds[i].id) break; if (i != dev->ext_vv_data->num_stds) { vv->standard = &dev->ext_vv_data->stds[i]; if( NULL != dev->ext_vv_data->std_callback ) dev->ext_vv_data->std_callback(dev, vv->standard); found = 1; } up(&dev->lock); if (vv->ov_suspend != NULL) { saa7146_start_preview(vv->ov_suspend); vv->ov_suspend = NULL; } if( 0 == found ) { DEB_EE(("VIDIOC_S_STD: standard not found.\n")); return -EINVAL; } DEB_EE(("VIDIOC_S_STD: set to standard to '%s'\n",vv->standard->name)); return 0; } case VIDIOC_OVERLAY: { int on = *(int *)arg; int err = 0; DEB_D(("VIDIOC_OVERLAY on:%d\n",on)); if (on != 0) { err = saa7146_start_preview(fh); } else { err = saa7146_stop_preview(fh); } return err; } case VIDIOC_REQBUFS: { struct v4l2_requestbuffers *req = arg; DEB_D(("VIDIOC_REQBUFS, type:%d\n",req->type)); return videobuf_reqbufs(q,req); } case VIDIOC_QUERYBUF: { struct v4l2_buffer *buf = arg; DEB_D(("VIDIOC_QUERYBUF, type:%d, offset:%d\n",buf->type,buf->m.offset)); return videobuf_querybuf(q,buf); } case VIDIOC_QBUF: { struct v4l2_buffer *buf = arg; int ret = 0; ret = videobuf_qbuf(q,buf); DEB_D(("VIDIOC_QBUF: ret:%d, index:%d\n",ret,buf->index)); return ret; } case VIDIOC_DQBUF: { struct v4l2_buffer *buf = arg; int ret = 0; ret = videobuf_dqbuf(q,buf,file->f_flags & O_NONBLOCK); DEB_D(("VIDIOC_DQBUF: ret:%d, index:%d\n",ret,buf->index)); return ret; } case VIDIOC_STREAMON: { int *type = arg; DEB_D(("VIDIOC_STREAMON, type:%d\n",*type)); err = video_begin(fh); if( 0 != err) { return err; } err = videobuf_streamon(q); return err; } case VIDIOC_STREAMOFF: { int *type = arg; DEB_D(("VIDIOC_STREAMOFF, type:%d\n",*type)); /* ugly: we need to copy some checks from video_end(), because videobuf_streamoff() relies on the capture running. check and fix this */ if ((vv->video_status & STATUS_CAPTURE) != STATUS_CAPTURE) { DEB_S(("not capturing.\n")); return 0; } if (vv->video_fh != fh) { DEB_S(("capturing, but in another open.\n")); return -EBUSY; } err = videobuf_streamoff(q); if (0 != err) { DEB_D(("warning: videobuf_streamoff() failed.\n")); video_end(fh, file); } else { err = video_end(fh, file); } return err; } case VIDIOCGMBUF: { struct video_mbuf *mbuf = arg; struct videobuf_queue *q; int i; /* fixme: number of capture buffers and sizes for v4l apps */ int gbuffers = 2; int gbufsize = 768*576*4; DEB_D(("VIDIOCGMBUF \n")); q = &fh->video_q; down(&q->lock); err = videobuf_mmap_setup(q,gbuffers,gbufsize, V4L2_MEMORY_MMAP); if (err < 0) { up(&q->lock); return err; } memset(mbuf,0,sizeof(*mbuf)); mbuf->frames = gbuffers; mbuf->size = gbuffers * gbufsize; for (i = 0; i < gbuffers; i++) mbuf->offsets[i] = i * gbufsize; up(&q->lock); return 0; } default: return v4l_compat_translate_ioctl(inode,file,cmd,arg, saa7146_video_do_ioctl); } return 0; }
/** * vpif_streamon() - streamon handler * @file: file ptr * @priv: file handle * @buftype: v4l2 buffer type */ static int vpif_streamon(struct file *file, void *priv, enum v4l2_buf_type buftype) { struct vpif_capture_config *config = vpif_dev->platform_data; struct vpif_fh *fh = priv; struct channel_obj *ch = fh->channel; struct common_obj *common = &ch->common[VPIF_VIDEO_INDEX]; struct channel_obj *oth_ch = vpif_obj.dev[!ch->channel_id]; struct vpif_params *vpif; unsigned long addr = 0; int ret = 0; vpif_dbg(2, debug, "vpif_streamon\n"); vpif = &ch->vpifparams; if (buftype != V4L2_BUF_TYPE_VIDEO_CAPTURE) { vpif_dbg(1, debug, "buffer type not supported\n"); return -EINVAL; } /* If file handle is not allowed IO, return error */ if (!fh->io_allowed[VPIF_VIDEO_INDEX]) { vpif_dbg(1, debug, "io not allowed\n"); return -EACCES; } /* If Streaming is already started, return error */ if (common->started) { vpif_dbg(1, debug, "channel->started\n"); return -EBUSY; } if ((ch->channel_id == VPIF_CHANNEL0_VIDEO && oth_ch->common[VPIF_VIDEO_INDEX].started && vpif->std_info.ycmux_mode == 0) || ((ch->channel_id == VPIF_CHANNEL1_VIDEO) && (2 == oth_ch->common[VPIF_VIDEO_INDEX].started))) { vpif_dbg(1, debug, "other channel is being used\n"); return -EBUSY; } ret = vpif_check_format(ch, &common->fmt.fmt.pix, 0); if (ret) return ret; /* Enable streamon on the sub device */ ret = v4l2_subdev_call(vpif_obj.sd[ch->curr_sd_index], video, s_stream, 1); if (ret && (ret != -ENOIOCTLCMD)) { vpif_dbg(1, debug, "stream on failed in subdev\n"); return ret; } /* Call videobuf_streamon to start streaming in videobuf */ ret = videobuf_streamon(&common->buffer_queue); if (ret) { vpif_dbg(1, debug, "videobuf_streamon\n"); return ret; } if (mutex_lock_interruptible(&common->lock)) { ret = -ERESTARTSYS; goto streamoff_exit; } /* If buffer queue is empty, return error */ if (list_empty(&common->dma_queue)) { vpif_dbg(1, debug, "buffer queue is empty\n"); ret = -EIO; goto exit; } /* Get the next frame from the buffer queue */ common->cur_frm = list_entry(common->dma_queue.next, struct videobuf_buffer, queue); common->next_frm = common->cur_frm; /* Remove buffer from the buffer queue */ list_del(&common->cur_frm->queue); /* Mark state of the current frame to active */ common->cur_frm->state = VIDEOBUF_ACTIVE; /* Initialize field_id and started member */ ch->field_id = 0; common->started = 1; if (V4L2_MEMORY_USERPTR == common->memory) addr = common->cur_frm->boff; else addr = videobuf_to_dma_contig(common->cur_frm); /* Calculate the offset for Y and C data in the buffer */ vpif_calculate_offsets(ch); if ((vpif->std_info.frm_fmt && ((common->fmt.fmt.pix.field != V4L2_FIELD_NONE) && (common->fmt.fmt.pix.field != V4L2_FIELD_ANY))) || (!vpif->std_info.frm_fmt && (common->fmt.fmt.pix.field == V4L2_FIELD_NONE))) { vpif_dbg(1, debug, "conflict in field format and std format\n"); ret = -EINVAL; goto exit; } /* configure 1 or 2 channel mode */ ret = config->setup_input_channel_mode(vpif->std_info.ycmux_mode); if (ret < 0) { vpif_dbg(1, debug, "can't set vpif channel mode\n"); goto exit; } /* Call vpif_set_params function to set the parameters and addresses */ ret = vpif_set_video_params(vpif, ch->channel_id); if (ret < 0) { vpif_dbg(1, debug, "can't set video params\n"); goto exit; } common->started = ret; vpif_config_addr(ch, ret); common->set_addr(addr + common->ytop_off, addr + common->ybtm_off, addr + common->ctop_off, addr + common->cbtm_off); /** * Set interrupt for both the fields in VPIF Register enable channel in * VPIF register */ if ((VPIF_CHANNEL0_VIDEO == ch->channel_id)) { channel0_intr_assert(); channel0_intr_enable(1); enable_channel0(1); } if ((VPIF_CHANNEL1_VIDEO == ch->channel_id) || (common->started == 2)) { channel1_intr_assert(); channel1_intr_enable(1); enable_channel1(1); } channel_first_int[VPIF_VIDEO_INDEX][ch->channel_id] = 1; mutex_unlock(&common->lock); return ret; exit: mutex_unlock(&common->lock); streamoff_exit: ret = videobuf_streamoff(&common->buffer_queue); return ret; }
static int camera_core_do_ioctl(struct inode *inode, struct file *file, unsigned int cmd, void *arg) { struct camera_fh *fh = file->private_data; struct camera_device *cam = fh->cam; int err; switch (cmd) { case VIDIOC_ENUMINPUT: { /* default handler assumes 1 video input (the camera) */ struct v4l2_input *input = (struct v4l2_input *)arg; int index = input->index; memset(input, 0, sizeof(*input)); input->index = index; if (index > 0) return -EINVAL; strlcpy(input->name, "camera", sizeof(input->name)); input->type = V4L2_INPUT_TYPE_CAMERA; return 0; } case VIDIOC_G_INPUT: { unsigned int *input = arg; *input = 0; return 0; } case VIDIOC_S_INPUT: { unsigned int *input = arg; if (*input > 0) return -EINVAL; return 0; } case VIDIOC_ENUM_FMT: { struct v4l2_fmtdesc *fmt = arg; return cam->cam_sensor->enum_pixformat(fmt, cam->sensor_data); } case VIDIOC_TRY_FMT: { struct v4l2_format *fmt = arg; return cam->cam_sensor->try_format(&fmt->fmt.pix, cam->sensor_data); } case VIDIOC_G_FMT: { struct v4l2_format *fmt = arg; /* get the current format */ memset(&fmt->fmt.pix, 0, sizeof (fmt->fmt.pix)); fmt->fmt.pix = cam->pix; return 0; } case VIDIOC_S_FMT: { struct v4l2_format *fmt = arg; unsigned int temp_sizeimage = 0; temp_sizeimage = cam->pix.sizeimage; cam->cam_sensor->try_format(&fmt->fmt.pix, cam->sensor_data); cam->pix = fmt->fmt.pix; cam->xclk = cam->cam_sensor->calc_xclk(&cam->pix, &cam->nominal_timeperframe, cam->sensor_data); cam->cparm.timeperframe = cam->nominal_timeperframe; cam->xclk = cam->cam_hardware->set_xclk(cam->xclk, cam->hardware_data); return cam->cam_sensor->configure(&cam->pix, cam->xclk, &cam->cparm.timeperframe, cam->sensor_data); } case VIDIOC_QUERYCTRL: { struct v4l2_queryctrl *qc = arg; return cam->cam_sensor->query_control(qc, cam->sensor_data); } case VIDIOC_G_CTRL: { struct v4l2_control *vc = arg; return cam->cam_sensor->get_control(vc, cam->sensor_data); } case VIDIOC_S_CTRL: { struct v4l2_control *vc = arg; return cam->cam_sensor->set_control(vc, cam->sensor_data); } case VIDIOC_QUERYCAP: { struct v4l2_capability *cap = (struct v4l2_capability *) arg; memset(cap, 0, sizeof(*cap)); strlcpy(cap->driver, CAM_NAME, sizeof(cap->driver)); strlcpy(cap->card, cam->vfd->name, sizeof(cap->card)); cap->bus_info[0] = '\0'; cap->version = KERNEL_VERSION(0, 0, 0); cap->capabilities = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OVERLAY | V4L2_CAP_READWRITE | V4L2_CAP_STREAMING; return 0; } case VIDIOC_G_FBUF: /* Get the frame buffer parameters */ { struct v4l2_framebuffer *fbuf = (struct v4l2_framebuffer *) arg; spin_lock(&cam->img_lock); *fbuf = cam->fbuf; spin_unlock(&cam->img_lock); return 0; } case VIDIOC_S_FBUF: /* set the frame buffer parameters */ { struct v4l2_framebuffer *fbuf = (struct v4l2_framebuffer *) arg; spin_lock(&cam->img_lock); if (cam->previewing) { spin_unlock(&cam->img_lock); return -EBUSY; } cam->fbuf.base = fbuf->base; cam->fbuf.fmt = fbuf->fmt; spin_unlock(&cam->img_lock); return 0; } case VIDIOC_OVERLAY: { int enable = *((int *) arg); /* * check whether the capture format and ** the display format matches * return failure if they are different */ if (cam->pix.pixelformat != cam->fbuf.fmt.pixelformat) { return -EINVAL; } /* If the camera image size is greater ** than LCD size return failure */ if ((cam->pix.width > cam->fbuf.fmt.height) || (cam->pix.height > cam->fbuf.fmt.width)) { return -EINVAL; } if (!cam->previewing && enable) { cam->previewing = fh; cam->overlay_cnt = 0; camera_core_start_overlay(cam); } else if (!enable) { cam->previewing = NULL; } return 0; } case VIDIOC_REQBUFS: return videobuf_reqbufs(&fh->vbq, arg); case VIDIOC_QUERYBUF: return videobuf_querybuf(&fh->vbq, arg); case VIDIOC_QBUF: return videobuf_qbuf(&fh->vbq, arg); case VIDIOC_DQBUF: return videobuf_dqbuf(&fh->vbq, arg, file->f_flags & O_NONBLOCK); case VIDIOC_STREAMON: { spin_lock(&cam->img_lock); if (cam->streaming || cam->reading) { spin_unlock(&cam->img_lock); return -EBUSY; } else { cam->streaming = fh; /* FIXME: start camera interface */ } spin_unlock(&cam->img_lock); /* Allocate dummy buffer. We will use this buffer then no * videobufs available to prevent FIFO_FULL. Use the same * field of camera_device structure as in 'read()' function * (capture_base_phys and capture_base) */ if (!cam->capture_base) { cam->capture_base = (unsigned long)dma_alloc_coherent(NULL, cam->pix.sizeimage, (dma_addr_t *) &cam->capture_base_phys, GFP_KERNEL | GFP_DMA); } if (!cam->capture_base) { printk(KERN_ERR CAM_NAME ": cannot allocate dummy buffer\n"); spin_lock(&cam->img_lock); cam->streaming = NULL; spin_unlock(&cam->img_lock); return -ENOMEM; } return videobuf_streamon(&fh->vbq); } case VIDIOC_STREAMOFF: { struct videobuf_queue *q = &fh->vbq; int i; /* video-buf lib has trouble to turn off streaming while * any buffer is still in QUEUED state. Let's wait until * all queued buffers are filled. * */ for (i = 0; i < VIDEO_MAX_FRAME; i++) { if (NULL == q->bufs[i]) continue; while (q->bufs[i]->state == STATE_QUEUED) { schedule(); } } err = videobuf_streamoff(&fh->vbq); if (err < 0) return err; spin_lock(&cam->img_lock); if (cam->streaming == fh) { cam->streaming = NULL; } spin_unlock(&cam->img_lock); /* Wait for dma finished and remove dummy buffer */ cam->cam_hardware->finish_dma(cam->hardware_data); if (cam->capture_base) { dma_free_coherent(NULL, cam->pix.sizeimage, (void *)cam->capture_base, cam->capture_base_phys); cam->capture_base = 0; cam->capture_base_phys = 0; } return 0; } case VIDIOC_ENUMSTD: case VIDIOC_G_STD: case VIDIOC_S_STD: case VIDIOC_QUERYSTD: { /* Digital cameras don't have an analog video standard, * so we don't need to implement these ioctls. */ return -EINVAL; } case VIDIOC_G_AUDIO: case VIDIOC_S_AUDIO: case VIDIOC_G_AUDOUT: case VIDIOC_S_AUDOUT: { /* we don't have any audio inputs or outputs */ return -EINVAL; } case VIDIOC_G_JPEGCOMP: case VIDIOC_S_JPEGCOMP: { /* JPEG compression is not supported */ return -EINVAL; } case VIDIOC_G_TUNER: case VIDIOC_S_TUNER: case VIDIOC_G_MODULATOR: case VIDIOC_S_MODULATOR: case VIDIOC_G_FREQUENCY: case VIDIOC_S_FREQUENCY: { /* we don't have a tuner or modulator */ return -EINVAL; } case VIDIOC_ENUMOUTPUT: case VIDIOC_G_OUTPUT: case VIDIOC_S_OUTPUT: { /* we don't have any video outputs */ return -EINVAL; } default: { /* unrecognized ioctl */ return -ENOIOCTLCMD; } } return 0; }
static int myvivi_vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i) { return videobuf_streamon(&myvivi_vb_vidquene); }
static int vpif_streamon(struct file *file, void *priv, enum v4l2_buf_type buftype) { struct vpif_fh *fh = priv; struct channel_obj *ch = fh->channel; struct common_obj *common = &ch->common[VPIF_VIDEO_INDEX]; struct channel_obj *oth_ch = vpif_obj.dev[!ch->channel_id]; struct vpif_params *vpif = &ch->vpifparams; struct vpif_display_config *vpif_config_data = vpif_dev->platform_data; unsigned long addr = 0; int ret = 0; if (buftype != V4L2_BUF_TYPE_VIDEO_OUTPUT) { vpif_err("buffer type not supported\n"); return -EINVAL; } if (!fh->io_allowed[VPIF_VIDEO_INDEX]) { vpif_err("fh->io_allowed\n"); return -EACCES; } /* If Streaming is already started, return error */ if (common->started) { vpif_err("channel->started\n"); return -EBUSY; } if ((ch->channel_id == VPIF_CHANNEL2_VIDEO && oth_ch->common[VPIF_VIDEO_INDEX].started && ch->vpifparams.std_info.ycmux_mode == 0) || ((ch->channel_id == VPIF_CHANNEL3_VIDEO) && (2 == oth_ch->common[VPIF_VIDEO_INDEX].started))) { vpif_err("other channel is using\n"); return -EBUSY; } ret = vpif_check_format(ch, &common->fmt.fmt.pix); if (ret < 0) return ret; /* Call videobuf_streamon to start streaming in videobuf */ ret = videobuf_streamon(&common->buffer_queue); if (ret < 0) { vpif_err("videobuf_streamon\n"); return ret; } /* If buffer queue is empty, return error */ if (list_empty(&common->dma_queue)) { vpif_err("buffer queue is empty\n"); return -EIO; } /* Get the next frame from the buffer queue */ common->next_frm = common->cur_frm = list_entry(common->dma_queue.next, struct videobuf_buffer, queue); list_del(&common->cur_frm->queue); /* Mark state of the current frame to active */ common->cur_frm->state = VIDEOBUF_ACTIVE; /* Initialize field_id and started member */ ch->field_id = 0; common->started = 1; if (buftype == V4L2_BUF_TYPE_VIDEO_OUTPUT) { addr = common->cur_frm->boff; /* Calculate the offset for Y and C data in the buffer */ vpif_calculate_offsets(ch); if ((ch->vpifparams.std_info.frm_fmt && ((common->fmt.fmt.pix.field != V4L2_FIELD_NONE) && (common->fmt.fmt.pix.field != V4L2_FIELD_ANY))) || (!ch->vpifparams.std_info.frm_fmt && (common->fmt.fmt.pix.field == V4L2_FIELD_NONE))) { vpif_err("conflict in field format and std format\n"); return -EINVAL; } /* clock settings */ ret = vpif_config_data->set_clock(ch->vpifparams.std_info.ycmux_mode, ch->vpifparams.std_info.hd_sd); if (ret < 0) { vpif_err("can't set clock\n"); return ret; } /* set the parameters and addresses */ ret = vpif_set_video_params(vpif, ch->channel_id + 2); if (ret < 0) return ret; common->started = ret; vpif_config_addr(ch, ret); common->set_addr((addr + common->ytop_off), (addr + common->ybtm_off), (addr + common->ctop_off), (addr + common->cbtm_off)); /* Set interrupt for both the fields in VPIF Register enable channel in VPIF register */ if (VPIF_CHANNEL2_VIDEO == ch->channel_id) { channel2_intr_assert(); channel2_intr_enable(1); enable_channel2(1); } if ((VPIF_CHANNEL3_VIDEO == ch->channel_id) || (common->started == 2)) { channel3_intr_assert(); channel3_intr_enable(1); enable_channel3(1); } channel_first_int[VPIF_VIDEO_INDEX][ch->channel_id] = 1; } return ret; }
static int vpif_streamon(struct file *file, void *priv, enum v4l2_buf_type buftype) { struct vpif_capture_config *config = vpif_dev->platform_data; struct vpif_fh *fh = priv; struct channel_obj *ch = fh->channel; struct common_obj *common = &ch->common[VPIF_VIDEO_INDEX]; struct channel_obj *oth_ch = vpif_obj.dev[!ch->channel_id]; struct vpif_params *vpif; unsigned long addr = 0; int ret = 0; vpif_dbg(2, debug, "vpif_streamon\n"); vpif = &ch->vpifparams; if (buftype != V4L2_BUF_TYPE_VIDEO_CAPTURE) { vpif_dbg(1, debug, "buffer type not supported\n"); return -EINVAL; } if (!fh->io_allowed[VPIF_VIDEO_INDEX]) { vpif_dbg(1, debug, "io not allowed\n"); return -EACCES; } if (common->started) { vpif_dbg(1, debug, "channel->started\n"); return -EBUSY; } if ((ch->channel_id == VPIF_CHANNEL0_VIDEO && oth_ch->common[VPIF_VIDEO_INDEX].started && vpif->std_info.ycmux_mode == 0) || ((ch->channel_id == VPIF_CHANNEL1_VIDEO) && (2 == oth_ch->common[VPIF_VIDEO_INDEX].started))) { vpif_dbg(1, debug, "other channel is being used\n"); return -EBUSY; } ret = vpif_check_format(ch, &common->fmt.fmt.pix, 0); if (ret) return ret; ret = v4l2_subdev_call(vpif_obj.sd[ch->curr_sd_index], video, s_stream, 1); if (ret && (ret != -ENOIOCTLCMD)) { vpif_dbg(1, debug, "stream on failed in subdev\n"); return ret; } ret = videobuf_streamon(&common->buffer_queue); if (ret) { vpif_dbg(1, debug, "videobuf_streamon\n"); return ret; } if (list_empty(&common->dma_queue)) { vpif_dbg(1, debug, "buffer queue is empty\n"); ret = -EIO; goto exit; } common->cur_frm = list_entry(common->dma_queue.next, struct videobuf_buffer, queue); common->next_frm = common->cur_frm; list_del(&common->cur_frm->queue); common->cur_frm->state = VIDEOBUF_ACTIVE; ch->field_id = 0; common->started = 1; if (V4L2_MEMORY_USERPTR == common->memory) addr = common->cur_frm->boff; else addr = videobuf_to_dma_contig(common->cur_frm); vpif_calculate_offsets(ch); if ((vpif->std_info.frm_fmt && ((common->fmt.fmt.pix.field != V4L2_FIELD_NONE) && (common->fmt.fmt.pix.field != V4L2_FIELD_ANY))) || (!vpif->std_info.frm_fmt && (common->fmt.fmt.pix.field == V4L2_FIELD_NONE))) { vpif_dbg(1, debug, "conflict in field format and std format\n"); ret = -EINVAL; goto exit; } ret = config->setup_input_channel_mode(vpif->std_info.ycmux_mode); if (ret < 0) { vpif_dbg(1, debug, "can't set vpif channel mode\n"); goto exit; } ret = vpif_set_video_params(vpif, ch->channel_id); if (ret < 0) { vpif_dbg(1, debug, "can't set video params\n"); goto exit; } common->started = ret; vpif_config_addr(ch, ret); common->set_addr(addr + common->ytop_off, addr + common->ybtm_off, addr + common->ctop_off, addr + common->cbtm_off); if ((VPIF_CHANNEL0_VIDEO == ch->channel_id)) { channel0_intr_assert(); channel0_intr_enable(1); enable_channel0(1); } if ((VPIF_CHANNEL1_VIDEO == ch->channel_id) || (common->started == 2)) { channel1_intr_assert(); channel1_intr_enable(1); enable_channel1(1); } channel_first_int[VPIF_VIDEO_INDEX][ch->channel_id] = 1; return ret; exit: videobuf_streamoff(&common->buffer_queue); return ret; }
static int fimc_cap_streamon(struct file *file, void *priv, enum v4l2_buf_type type) { struct s3c_fimc_isp_info *isp_info; struct fimc_ctx *ctx = priv; struct fimc_dev *fimc = ctx->fimc_dev; int ret = -EBUSY; if (mutex_lock_interruptible(&fimc->lock)) return -ERESTARTSYS; if (fimc_capture_active(fimc) || !fimc->vid_cap.sd) goto s_unlock; if (!(ctx->state & FIMC_DST_FMT)) { v4l2_err(&fimc->vid_cap.v4l2_dev, "Format is not set\n"); ret = -EINVAL; goto s_unlock; } ret = v4l2_subdev_call(fimc->vid_cap.sd, video, s_stream, 1); if (ret && ret != -ENOIOCTLCMD) goto s_unlock; ret = fimc_prepare_config(ctx, ctx->state); if (ret) goto s_unlock; isp_info = fimc->pdata->isp_info[fimc->vid_cap.input_index]; fimc_hw_set_camera_type(fimc, isp_info); fimc_hw_set_camera_source(fimc, isp_info); fimc_hw_set_camera_offset(fimc, &ctx->s_frame); if (ctx->state & FIMC_PARAMS) { ret = fimc_set_scaler_info(ctx); if (ret) { err("Scaler setup error"); goto s_unlock; } fimc_hw_set_input_path(ctx); fimc_hw_set_scaler(ctx); fimc_hw_set_target_format(ctx); fimc_hw_set_rotation(ctx); fimc_hw_set_effect(ctx); } fimc_hw_set_output_path(ctx); fimc_hw_set_out_dma(ctx); INIT_LIST_HEAD(&fimc->vid_cap.pending_buf_q); INIT_LIST_HEAD(&fimc->vid_cap.active_buf_q); fimc->vid_cap.active_buf_cnt = 0; fimc->vid_cap.frame_count = 0; set_bit(ST_CAPT_PEND, &fimc->state); ret = videobuf_streamon(&fimc->vid_cap.vbq); s_unlock: mutex_unlock(&fimc->lock); return ret; }