/* * Queue a video buffer. Attempting to queue a buffer that has already been * queued will return -EINVAL. */ int uvc_queue_buffer(struct uvc_video_queue *queue, struct v4l2_buffer *v4l2_buf) { struct uvc_buffer *buf; unsigned long flags; int ret = 0; uvc_trace(UVC_TRACE_CAPTURE, "Queuing buffer %u.\n", v4l2_buf->index); if (v4l2_buf->type != queue->type || v4l2_buf->memory != V4L2_MEMORY_MMAP) { uvc_trace(UVC_TRACE_CAPTURE, "[E] Invalid buffer type (%u) " "and/or memory (%u).\n", v4l2_buf->type, v4l2_buf->memory); return -EINVAL; } mutex_lock(&queue->mutex); if (v4l2_buf->index >= queue->count) { uvc_trace(UVC_TRACE_CAPTURE, "[E] Out of range index.\n"); ret = -EINVAL; goto done; } buf = &queue->buffer[v4l2_buf->index]; if (buf->state != UVC_BUF_STATE_IDLE) { uvc_trace(UVC_TRACE_CAPTURE, "[E] Invalid buffer state " "(%u).\n", buf->state); ret = -EINVAL; goto done; } if (v4l2_buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && v4l2_buf->bytesused > buf->buf.length) { uvc_trace(UVC_TRACE_CAPTURE, "[E] Bytes used out of bounds.\n"); ret = -EINVAL; goto done; } spin_lock_irqsave(&queue->irqlock, flags); if (queue->flags & UVC_QUEUE_DISCONNECTED) { spin_unlock_irqrestore(&queue->irqlock, flags); ret = -ENODEV; goto done; } buf->state = UVC_BUF_STATE_QUEUED; if (v4l2_buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) buf->buf.bytesused = 0; else buf->buf.bytesused = v4l2_buf->bytesused; list_add_tail(&buf->stream, &queue->mainqueue); list_add_tail(&buf->queue, &queue->irqqueue); spin_unlock_irqrestore(&queue->irqlock, flags); done: mutex_unlock(&queue->mutex); return ret; }
/* * Dequeue a video buffer. If nonblocking is false, block until a buffer is * available. */ int uvc_dequeue_buffer(struct uvc_video_queue *queue, struct v4l2_buffer *v4l2_buf, int nonblocking) { struct uvc_buffer *buf; int ret = 0; if (v4l2_buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE || v4l2_buf->memory != V4L2_MEMORY_MMAP) { uvc_trace(UVC_TRACE_CAPTURE, "[E] Invalid buffer type (%u) " "and/or memory (%u).\n", v4l2_buf->type, v4l2_buf->memory); return -EINVAL; } mutex_lock(&queue->mutex); if (list_empty(&queue->mainqueue)) { uvc_trace(UVC_TRACE_CAPTURE, "[E] Empty buffer queue.\n"); ret = -EINVAL; goto done; } buf = list_first_entry(&queue->mainqueue, struct uvc_buffer, stream); if ((ret = uvc_queue_waiton(buf, nonblocking)) < 0) goto done; uvc_trace(UVC_TRACE_CAPTURE, "Dequeuing buffer %u (%u, %u bytes).\n", buf->buf.index, buf->state, buf->buf.bytesused); switch (buf->state) { case UVC_BUF_STATE_ERROR: uvc_trace(UVC_TRACE_CAPTURE, "[W] Corrupted data " "(transmission error).\n"); ret = -EIO; case UVC_BUF_STATE_DONE: buf->state = UVC_BUF_STATE_IDLE; break; case UVC_BUF_STATE_IDLE: case UVC_BUF_STATE_QUEUED: case UVC_BUF_STATE_ACTIVE: default: uvc_trace(UVC_TRACE_CAPTURE, "[E] Invalid buffer state %u " "(driver bug?).\n", buf->state); ret = -EINVAL; goto done; } list_del(&buf->stream); //evan(T_dequeue) __uvc_query_buffer(buf, v4l2_buf); done: mutex_unlock(&queue->mutex); return ret; }
static void uvc_event_control(struct uvc_device *dev, __u8 *data, int len) { char *attrs[3] = { "value", "info", "failure" }; if (len < 6 || data[2] != 0 || data[4] > 2) { uvc_trace(UVC_TRACE_STATUS, "Invalid control status event " "received.\n"); return; } uvc_trace(UVC_TRACE_STATUS, "Control %u/%u %s change len %d.\n", data[1], data[3], attrs[data[4]], len); }
static void uvc_video_decode_end(struct uvc_streaming *stream, struct uvc_buffer *buf, const __u8 *data, int len) { /* Mark the buffer as done if the EOF marker is set. */ if (data[1] & UVC_STREAM_EOF && buf->bytesused != 0) { uvc_trace(UVC_TRACE_FRAME, "Frame complete (EOF found).\n"); if (data[0] == len) uvc_trace(UVC_TRACE_FRAME, "EOF in empty payload.\n"); buf->state = UVC_BUF_STATE_READY; if (stream->dev->quirks & UVC_QUIRK_STREAM_NO_FID) stream->last_fid ^= UVC_STREAM_FID; } }
static void uvc_video_stats_update(struct uvc_streaming *stream) { struct uvc_stats_frame *frame = &stream->stats.frame; uvc_trace(UVC_TRACE_STATS, "frame %u stats: %u/%u/%u packets, " "%u/%u/%u pts (%searly %sinitial), %u/%u scr, " "last pts/stc/sof %u/%u/%u\n", stream->sequence, frame->first_data, frame->nb_packets - frame->nb_empty, frame->nb_packets, frame->nb_pts_diffs, frame->last_pts_diff, frame->nb_pts, frame->has_early_pts ? "" : "!", frame->has_initial_pts ? "" : "!", frame->nb_scr_diffs, frame->nb_scr, frame->pts, frame->scr_stc, frame->scr_sof); stream->stats.stream.nb_frames++; stream->stats.stream.nb_packets += stream->stats.frame.nb_packets; stream->stats.stream.nb_empty += stream->stats.frame.nb_empty; stream->stats.stream.nb_errors += stream->stats.frame.nb_errors; stream->stats.stream.nb_invalid += stream->stats.frame.nb_invalid; if (frame->has_early_pts) stream->stats.stream.nb_pts_early++; if (frame->has_initial_pts) stream->stats.stream.nb_pts_initial++; if (frame->last_pts_diff <= frame->first_data) stream->stats.stream.nb_pts_constant++; if (frame->nb_scr >= frame->nb_packets - frame->nb_empty) stream->stats.stream.nb_scr_count_ok++; if (frame->nb_scr_diffs + 1 == frame->nb_scr) stream->stats.stream.nb_scr_diffs_ok++; memset(&stream->stats.frame, 0, sizeof(stream->stats.frame)); }
void uvc_video_decode_isight(struct urb *urb, struct uvc_video_device *video, struct uvc_buffer *buf) { int ret, i; for (i = 0; i < urb->number_of_packets; ++i) { if (urb->iso_frame_desc[i].status < 0) { uvc_trace(UVC_TRACE_FRAME, "USB isochronous frame " "lost (%d).\n", urb->iso_frame_desc[i].status); } /* Decode the payload packet. * uvc_video_decode is entered twice when a frame transition * has been detected because the end of frame can only be * reliably detected when the first packet of the new frame * is processed. The first pass detects the transition and * closes the previous frame's buffer, the second pass * processes the data of the first payload of the new frame. */ do { ret = isight_decode(&video->queue, buf, urb->transfer_buffer + urb->iso_frame_desc[i].offset, urb->iso_frame_desc[i].actual_length); if (buf == NULL) break; if (buf->state == UVC_BUF_STATE_DONE || buf->state == UVC_BUF_STATE_ERROR) buf = uvc_queue_next_buffer(&video->queue, buf); } while (ret == -EAGAIN); } }
static int uvc_buffer_prepare(struct vb2_buffer *vb) { struct uvc_video_queue *queue = vb2_get_drv_priv(vb->vb2_queue); struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); struct uvc_buffer *buf = container_of(vbuf, struct uvc_buffer, buf); if (vb->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && vb2_get_plane_payload(vb, 0) > vb2_plane_size(vb, 0)) { uvc_trace(UVC_TRACE_CAPTURE, "[E] Bytes used out of bounds.\n"); return -EINVAL; } if (unlikely(queue->flags & UVC_QUEUE_DISCONNECTED)) return -ENODEV; buf->state = UVC_BUF_STATE_QUEUED; buf->mem = vb2_plane_vaddr(vb, 0); buf->length = vb2_plane_size(vb, 0); if (vb->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) buf->bytesused = 0; else buf->bytesused = vb2_get_plane_payload(vb, 0); return 0; }
/* -------------------------------------------------------------------------- * Status interrupt endpoint */ static void uvc_event_streaming(struct uvc_device *dev, __u8 *data, int len) { if (len < 3) { uvc_trace(UVC_TRACE_STATUS, "Invalid streaming status event " "received.\n"); return; } if (data[2] == 0) { if (len < 4) return; uvc_trace(UVC_TRACE_STATUS, "Button (intf %u) %s len %d\n", data[1], data[3] ? "pressed" : "released", len); uvc_input_report_key(dev, KEY_CAMERA, data[3]); } else { uvc_trace(UVC_TRACE_STATUS, "Stream %u error event %02x %02x " "len %d.\n", data[1], data[2], data[3], len); } }
static void uvc_event_streaming(struct uvc_device *dev, struct uvc_streaming_status *status, int len) { if (len < 3) { uvc_trace(UVC_TRACE_STATUS, "Invalid streaming status event " "received.\n"); return; } if (status->bEvent == 0) { if (len < 4) return; uvc_trace(UVC_TRACE_STATUS, "Button (intf %u) %s len %d\n", status->bOriginator, status->bValue[0] ? "pressed" : "released", len); uvc_input_report_key(dev, KEY_CAMERA, status->bValue[0]); } else { uvc_trace(UVC_TRACE_STATUS, "Stream %u error event %02x len %d.\n", status->bOriginator, status->bEvent, len); } }
static void uvc_status_complete(struct urb *urb) { struct uvc_device *dev = urb->context; int len, ret; switch (urb->status) { case 0: break; case -ENOENT: /* usb_kill_urb() called. */ case -ECONNRESET: /* usb_unlink_urb() called. */ case -ESHUTDOWN: /* The endpoint is being disabled. */ case -EPROTO: /* Device is disconnected (reported by some * host controller). */ return; default: uvc_printk(KERN_WARNING, "Non-zero status (%d) in status " "completion handler.\n", urb->status); return; } len = urb->actual_length; if (len > 0) { switch (dev->status[0] & 0x0f) { case UVC_STATUS_TYPE_CONTROL: uvc_event_control(dev, dev->status, len); dev->motion = 1; break; case UVC_STATUS_TYPE_STREAMING: uvc_event_streaming(dev, dev->status, len); break; default: uvc_trace(UVC_TRACE_STATUS, "Unknown status event " "type %u.\n", dev->status[0]); break; } } /* Resubmit the URB. */ urb->interval = dev->int_ep->desc.bInterval; if ((ret = usb_submit_urb(urb, GFP_ATOMIC)) < 0) { uvc_printk(KERN_ERR, "Failed to resubmit status URB (%d).\n", ret); } }
static void uvc_status_complete(struct urb *urb) { struct uvc_device *dev = urb->context; int len, ret; switch (urb->status) { case 0: break; case -ENOENT: case -ECONNRESET: case -ESHUTDOWN: case -EPROTO: return; default: uvc_printk(KERN_WARNING, "Non-zero status (%d) in status " "completion handler.\n", urb->status); return; } len = urb->actual_length; if (len > 0) { switch (dev->status[0] & 0x0f) { case UVC_STATUS_TYPE_CONTROL: uvc_event_control(dev, dev->status, len); break; case UVC_STATUS_TYPE_STREAMING: uvc_event_streaming(dev, dev->status, len); break; default: uvc_trace(UVC_TRACE_STATUS, "Unknown status event " "type %u.\n", dev->status[0]); break; } } urb->interval = dev->int_ep->desc.bInterval; if ((ret = usb_submit_urb(urb, GFP_ATOMIC)) < 0) { uvc_printk(KERN_ERR, "Failed to resubmit status URB (%d).\n", ret); } }
/* * Completion handler for video URBs. */ static void uvc_video_decode_isoc(struct urb *urb, struct uvc_streaming *stream, struct uvc_buffer *buf) { u8 *mem; int ret, i; for (i = 0; i < urb->number_of_packets; ++i) { if (urb->iso_frame_desc[i].status < 0) { uvc_trace(UVC_TRACE_FRAME, "USB isochronous frame " "lost (%d).\n", urb->iso_frame_desc[i].status); /* Mark the buffer as faulty. */ if (buf != NULL) buf->error = 1; continue; } /* Decode the payload header. */ mem = urb->transfer_buffer + urb->iso_frame_desc[i].offset; do { ret = uvc_video_decode_start(stream, buf, mem, urb->iso_frame_desc[i].actual_length); if (ret == -EAGAIN) { uvc_video_validate_buffer(stream, buf); buf = uvc_queue_next_buffer(&stream->queue, buf); } } while (ret == -EAGAIN); if (ret < 0) continue; /* Decode the payload data. */ uvc_video_decode_data(stream, buf, mem + ret, urb->iso_frame_desc[i].actual_length - ret); /* Process the header again. */ uvc_video_decode_end(stream, buf, mem, urb->iso_frame_desc[i].actual_length); if (buf->state == UVC_BUF_STATE_READY) { uvc_video_validate_buffer(stream, buf); buf = uvc_queue_next_buffer(&stream->queue, buf); } } }
static void uvc_video_decode_data(struct uvc_streaming *stream, struct uvc_buffer *buf, const __u8 *data, int len) { unsigned int maxlen, nbytes; void *mem; if (len <= 0) return; /* Copy the video data to the buffer. */ maxlen = buf->length - buf->bytesused; mem = buf->mem + buf->bytesused; nbytes = min((unsigned int)len, maxlen); memcpy(mem, data, nbytes); buf->bytesused += nbytes; /* Complete the current frame if the buffer size was exceeded. */ if (len > maxlen) { uvc_trace(UVC_TRACE_FRAME, "Frame complete (overflow).\n"); buf->state = UVC_BUF_STATE_READY; } }
void uvc_video_decode_isight(struct urb *urb, struct uvc_streaming *stream, struct uvc_buffer *buf) { int ret, i; for (i = 0; i < urb->number_of_packets; ++i) { if (urb->iso_frame_desc[i].status < 0) { uvc_trace(UVC_TRACE_FRAME, "USB isochronous frame " "lost (%d).\n", urb->iso_frame_desc[i].status); } /* */ do { ret = isight_decode(&stream->queue, buf, urb->transfer_buffer + urb->iso_frame_desc[i].offset, urb->iso_frame_desc[i].actual_length); if (buf == NULL) break; if (buf->state == UVC_BUF_STATE_DONE || buf->state == UVC_BUF_STATE_ERROR) buf = uvc_queue_next_buffer(&stream->queue, buf); } while (ret == -EAGAIN); } }
static int isight_decode(struct uvc_video_queue *queue, struct uvc_buffer *buf, const __u8 *data, unsigned int len) { static const __u8 hdr[] = { 0x11, 0x22, 0x33, 0x44, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xfa, 0xce }; unsigned int maxlen, nbytes; __u8 *mem; int is_header = 0; if (buf == NULL) return 0; if ((len >= 14 && memcmp(&data[2], hdr, 12) == 0) || (len >= 15 && memcmp(&data[3], hdr, 12) == 0)) { uvc_trace(UVC_TRACE_FRAME, "iSight header found\n"); is_header = 1; } /* Synchronize to the input stream by waiting for a header packet. */ if (buf->state != UVC_BUF_STATE_ACTIVE) { if (!is_header) { uvc_trace(UVC_TRACE_FRAME, "Dropping packet (out of " "sync).\n"); return 0; } buf->state = UVC_BUF_STATE_ACTIVE; } /* Mark the buffer as done if we're at the beginning of a new frame. * * Empty buffers (bytesused == 0) don't trigger end of frame detection * as it doesn't make sense to return an empty buffer. */ if (is_header && buf->buf.bytesused != 0) { buf->state = UVC_BUF_STATE_DONE; return -EAGAIN; } /* Copy the video data to the buffer. Skip header packets, as they * contain no data. */ if (!is_header) { maxlen = buf->buf.length - buf->buf.bytesused; mem = queue->mem + buf->buf.m.offset + buf->buf.bytesused; nbytes = min(len, maxlen); memcpy(mem, data, nbytes); buf->buf.bytesused += nbytes; if (len > maxlen || buf->buf.bytesused == buf->buf.length) { uvc_trace(UVC_TRACE_FRAME, "Frame complete " "(overflow).\n"); buf->state = UVC_BUF_STATE_DONE; } } return 0; }
static int isight_decode(struct uvc_video_queue *queue, struct uvc_buffer *buf, const __u8 *data, unsigned int len) { static const __u8 hdr[] = { 0x11, 0x22, 0x33, 0x44, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xfa, 0xce }; unsigned int maxlen, nbytes; __u8 *mem; int is_header = 0; if (buf == NULL) return 0; if ((len >= 14 && memcmp(&data[2], hdr, 12) == 0) || (len >= 15 && memcmp(&data[3], hdr, 12) == 0)) { uvc_trace(UVC_TRACE_FRAME, "iSight header found\n"); is_header = 1; } /* */ if (buf->state != UVC_BUF_STATE_ACTIVE) { if (!is_header) { uvc_trace(UVC_TRACE_FRAME, "Dropping packet (out of " "sync).\n"); return 0; } buf->state = UVC_BUF_STATE_ACTIVE; } /* */ if (is_header && buf->bytesused != 0) { buf->state = UVC_BUF_STATE_DONE; return -EAGAIN; } /* */ if (!is_header) { maxlen = buf->length - buf->bytesused; mem = buf->mem + buf->bytesused; nbytes = min(len, maxlen); memcpy(mem, data, nbytes); buf->bytesused += nbytes; if (len > maxlen || buf->bytesused == buf->length) { uvc_trace(UVC_TRACE_FRAME, "Frame complete " "(overflow).\n"); buf->state = UVC_BUF_STATE_DONE; } } return 0; }
/* Video payload decoding is handled by uvc_video_decode_start(), * uvc_video_decode_data() and uvc_video_decode_end(). * * uvc_video_decode_start is called with URB data at the start of a bulk or * isochronous payload. It processes header data and returns the header size * in bytes if successful. If an error occurs, it returns a negative error * code. The following error codes have special meanings. * * - EAGAIN informs the caller that the current video buffer should be marked * as done, and that the function should be called again with the same data * and a new video buffer. This is used when end of frame conditions can be * reliably detected at the beginning of the next frame only. * * If an error other than -EAGAIN is returned, the caller will drop the current * payload. No call to uvc_video_decode_data and uvc_video_decode_end will be * made until the next payload. -ENODATA can be used to drop the current * payload if no other error code is appropriate. * * uvc_video_decode_data is called for every URB with URB data. It copies the * data to the video buffer. * * uvc_video_decode_end is called with header data at the end of a bulk or * isochronous payload. It performs any additional header data processing and * returns 0 or a negative error code if an error occurred. As header data have * already been processed by uvc_video_decode_start, this functions isn't * required to perform sanity checks a second time. * * For isochronous transfers where a payload is always transferred in a single * URB, the three functions will be called in a row. * * To let the decoder process header data and update its internal state even * when no video buffer is available, uvc_video_decode_start must be prepared * to be called with a NULL buf parameter. uvc_video_decode_data and * uvc_video_decode_end will never be called with a NULL buffer. */ static int uvc_video_decode_start(struct uvc_streaming *stream, struct uvc_buffer *buf, const __u8 *data, int len) { __u8 fid; /* Sanity checks: * - packet must be at least 2 bytes long * - bHeaderLength value must be at least 2 bytes (see above) * - bHeaderLength value can't be larger than the packet size. */ if (len < 2 || data[0] < 2 || data[0] > len) { stream->stats.frame.nb_invalid++; return -EINVAL; } fid = data[1] & UVC_STREAM_FID; /* Increase the sequence number regardless of any buffer states, so * that discontinuous sequence numbers always indicate lost frames. */ if (stream->last_fid != fid) { stream->sequence++; if (stream->sequence) uvc_video_stats_update(stream); } uvc_video_clock_decode(stream, buf, data, len); uvc_video_stats_decode(stream, data, len); /* Store the payload FID bit and return immediately when the buffer is * NULL. */ if (buf == NULL) { stream->last_fid = fid; return -ENODATA; } /* Mark the buffer as bad if the error bit is set. */ if (data[1] & UVC_STREAM_ERR) { uvc_trace(UVC_TRACE_FRAME, "Marking buffer as bad (error bit " "set).\n"); buf->error = 1; } /* Synchronize to the input stream by waiting for the FID bit to be * toggled when the the buffer state is not UVC_BUF_STATE_ACTIVE. * stream->last_fid is initialized to -1, so the first isochronous * frame will always be in sync. * * If the device doesn't toggle the FID bit, invert stream->last_fid * when the EOF bit is set to force synchronisation on the next packet. */ if (buf->state != UVC_BUF_STATE_ACTIVE) { struct timespec ts; if (fid == stream->last_fid) { uvc_trace(UVC_TRACE_FRAME, "Dropping payload (out of " "sync).\n"); if ((stream->dev->quirks & UVC_QUIRK_STREAM_NO_FID) && (data[1] & UVC_STREAM_EOF)) stream->last_fid ^= UVC_STREAM_FID; return -ENODATA; } uvc_video_get_ts(&ts); buf->buf.v4l2_buf.sequence = stream->sequence; buf->buf.v4l2_buf.timestamp.tv_sec = ts.tv_sec; buf->buf.v4l2_buf.timestamp.tv_usec = ts.tv_nsec / NSEC_PER_USEC; /* TODO: Handle PTS and SCR. */ buf->state = UVC_BUF_STATE_ACTIVE; } /* Mark the buffer as done if we're at the beginning of a new frame. * End of frame detection is better implemented by checking the EOF * bit (FID bit toggling is delayed by one frame compared to the EOF * bit), but some devices don't set the bit at end of frame (and the * last payload can be lost anyway). We thus must check if the FID has * been toggled. * * stream->last_fid is initialized to -1, so the first isochronous * frame will never trigger an end of frame detection. * * Empty buffers (bytesused == 0) don't trigger end of frame detection * as it doesn't make sense to return an empty buffer. This also * avoids detecting end of frame conditions at FID toggling if the * previous payload had the EOF bit set. */ if (fid != stream->last_fid && buf->bytesused != 0) { uvc_trace(UVC_TRACE_FRAME, "Frame complete (FID bit " "toggled).\n"); buf->state = UVC_BUF_STATE_READY; return -EAGAIN; } stream->last_fid = fid; return data[0]; }