static void iss_video_buf_queue(struct vb2_buffer *vb) { struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue); struct iss_video *video = vfh->video; struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb); struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity); unsigned long flags; bool empty; spin_lock_irqsave(&video->qlock, flags); /* Mark the buffer is faulty and give it back to the queue immediately * if the video node has registered an error. vb2 will perform the same * check when preparing the buffer, but that is inherently racy, so we * need to handle the race condition with an authoritative check here. */ if (unlikely(video->error)) { vb2_buffer_done(vb, VB2_BUF_STATE_ERROR); spin_unlock_irqrestore(&video->qlock, flags); return; } empty = list_empty(&video->dmaqueue); list_add_tail(&buffer->list, &video->dmaqueue); spin_unlock_irqrestore(&video->qlock, flags); if (empty) { enum iss_pipeline_state state; unsigned int start; if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) state = ISS_PIPELINE_QUEUE_OUTPUT; else state = ISS_PIPELINE_QUEUE_INPUT; spin_lock_irqsave(&pipe->lock, flags); pipe->state |= state; video->ops->queue(video, buffer); video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_QUEUED; start = iss_pipeline_ready(pipe); if (start) pipe->state |= ISS_PIPELINE_STREAM; spin_unlock_irqrestore(&pipe->lock, flags); if (start) omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_SINGLESHOT); } }
static void iss_video_buf_queue(struct vb2_buffer *vb) { struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue); struct iss_video *video = vfh->video; struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb); struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity); unsigned long flags; bool empty; spin_lock_irqsave(&video->qlock, flags); if (unlikely(video->error)) { vb2_buffer_done(vb, VB2_BUF_STATE_ERROR); spin_unlock_irqrestore(&video->qlock, flags); return; } empty = list_empty(&video->dmaqueue); list_add_tail(&buffer->list, &video->dmaqueue); spin_unlock_irqrestore(&video->qlock, flags); if (empty) { enum iss_pipeline_state state; unsigned int start; if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) state = ISS_PIPELINE_QUEUE_OUTPUT; else state = ISS_PIPELINE_QUEUE_INPUT; spin_lock_irqsave(&pipe->lock, flags); pipe->state |= state; video->ops->queue(video, buffer); video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_QUEUED; start = iss_pipeline_ready(pipe); if (start) pipe->state |= ISS_PIPELINE_STREAM; spin_unlock_irqrestore(&pipe->lock, flags); if (start) omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_SINGLESHOT); } }
static int iss_video_streamoff(struct file *file, void *fh, enum v4l2_buf_type type) { struct iss_video_fh *vfh = to_iss_video_fh(fh); struct iss_video *video = video_drvdata(file); struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity); enum iss_pipeline_state state; unsigned long flags; if (type != video->type) return -EINVAL; mutex_lock(&video->stream_lock); if (!vb2_is_streaming(&vfh->queue)) goto done; /* Update the pipeline state. */ if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) state = ISS_PIPELINE_STREAM_OUTPUT | ISS_PIPELINE_QUEUE_OUTPUT; else state = ISS_PIPELINE_STREAM_INPUT | ISS_PIPELINE_QUEUE_INPUT; spin_lock_irqsave(&pipe->lock, flags); pipe->state &= ~state; spin_unlock_irqrestore(&pipe->lock, flags); /* Stop the stream. */ omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_STOPPED); vb2_streamoff(&vfh->queue, type); video->queue = NULL; if (video->iss->pdata->set_constraints) video->iss->pdata->set_constraints(video->iss, false); media_entity_pipeline_stop(&video->video.entity); done: mutex_unlock(&video->stream_lock); return 0; }
/* * Stream management * * Every ISS pipeline has a single input and a single output. The input can be * either a sensor or a video node. The output is always a video node. * * As every pipeline has an output video node, the ISS video objects at the * pipeline output stores the pipeline state. It tracks the streaming state of * both the input and output, as well as the availability of buffers. * * In sensor-to-memory mode, frames are always available at the pipeline input. * Starting the sensor usually requires I2C transfers and must be done in * interruptible context. The pipeline is started and stopped synchronously * to the stream on/off commands. All modules in the pipeline will get their * subdev set stream handler called. The module at the end of the pipeline must * delay starting the hardware until buffers are available at its output. * * In memory-to-memory mode, starting/stopping the stream requires * synchronization between the input and output. ISS modules can't be stopped * in the middle of a frame, and at least some of the modules seem to become * busy as soon as they're started, even if they don't receive a frame start * event. For that reason frames need to be processed in single-shot mode. The * driver needs to wait until a frame is completely processed and written to * memory before restarting the pipeline for the next frame. Pipelined * processing might be possible but requires more testing. * * Stream start must be delayed until buffers are available at both the input * and output. The pipeline must be started in the videobuf queue callback with * the buffers queue spinlock held. The modules subdev set stream operation must * not sleep. */ static int iss_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type) { struct iss_video_fh *vfh = to_iss_video_fh(fh); struct iss_video *video = video_drvdata(file); struct media_entity_graph graph; struct media_entity *entity; enum iss_pipeline_state state; struct iss_pipeline *pipe; struct iss_video *far_end; unsigned long flags; int ret; if (type != video->type) return -EINVAL; mutex_lock(&video->stream_lock); /* Start streaming on the pipeline. No link touching an entity in the * pipeline can be activated or deactivated once streaming is started. */ pipe = video->video.entity.pipe ? to_iss_pipeline(&video->video.entity) : &video->pipe; pipe->external = NULL; pipe->external_rate = 0; pipe->external_bpp = 0; pipe->entities = 0; if (video->iss->pdata->set_constraints) video->iss->pdata->set_constraints(video->iss, true); ret = media_entity_pipeline_start(&video->video.entity, &pipe->pipe); if (ret < 0) goto err_media_entity_pipeline_start; entity = &video->video.entity; media_entity_graph_walk_start(&graph, entity); while ((entity = media_entity_graph_walk_next(&graph))) pipe->entities |= 1 << entity->id; /* Verify that the currently configured format matches the output of * the connected subdev. */ ret = iss_video_check_format(video, vfh); if (ret < 0) goto err_iss_video_check_format; video->bpl_padding = ret; video->bpl_value = vfh->format.fmt.pix.bytesperline; /* Find the ISS video node connected at the far end of the pipeline and * update the pipeline. */ far_end = iss_video_far_end(video); if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { state = ISS_PIPELINE_STREAM_OUTPUT | ISS_PIPELINE_IDLE_OUTPUT; pipe->input = far_end; pipe->output = video; } else { if (far_end == NULL) { ret = -EPIPE; goto err_iss_video_check_format; } state = ISS_PIPELINE_STREAM_INPUT | ISS_PIPELINE_IDLE_INPUT; pipe->input = video; pipe->output = far_end; } spin_lock_irqsave(&pipe->lock, flags); pipe->state &= ~ISS_PIPELINE_STREAM; pipe->state |= state; spin_unlock_irqrestore(&pipe->lock, flags); /* Set the maximum time per frame as the value requested by userspace. * This is a soft limit that can be overridden if the hardware doesn't * support the request limit. */ if (video->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) pipe->max_timeperframe = vfh->timeperframe; video->queue = &vfh->queue; INIT_LIST_HEAD(&video->dmaqueue); spin_lock_init(&video->qlock); video->error = false; atomic_set(&pipe->frame_number, -1); ret = vb2_streamon(&vfh->queue, type); if (ret < 0) goto err_iss_video_check_format; /* In sensor-to-memory mode, the stream can be started synchronously * to the stream on command. In memory-to-memory mode, it will be * started when buffers are queued on both the input and output. */ if (pipe->input == NULL) { unsigned long flags; ret = omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_CONTINUOUS); if (ret < 0) goto err_omap4iss_set_stream; spin_lock_irqsave(&video->qlock, flags); if (list_empty(&video->dmaqueue)) video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN; spin_unlock_irqrestore(&video->qlock, flags); } mutex_unlock(&video->stream_lock); return 0; err_omap4iss_set_stream: vb2_streamoff(&vfh->queue, type); err_iss_video_check_format: media_entity_pipeline_stop(&video->video.entity); err_media_entity_pipeline_start: if (video->iss->pdata->set_constraints) video->iss->pdata->set_constraints(video->iss, false); video->queue = NULL; mutex_unlock(&video->stream_lock); return ret; }