static gboolean gst_ks_read_request_pick_buffer (GstKsVideoDevice * self, ReadRequest * req) { GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self); gboolean buffer_found = FALSE; guint i; buffer_found = gst_buffer_is_writable (req->buf); for (i = 0; !buffer_found && i < G_N_ELEMENTS (priv->spare_buffers); i++) { if (gst_buffer_is_writable (priv->spare_buffers[i])) { GstBuffer *hold; hold = req->buf; req->buf = priv->spare_buffers[i]; priv->spare_buffers[i] = hold; buffer_found = TRUE; } } if (!buffer_found) { gst_buffer_unref (req->buf); req->buf = self->allocfunc (gst_ks_video_device_get_frame_size (self), KS_BUFFER_ALIGNMENT, self->allocfunc_data); } if (req->buf != NULL) { GST_BUFFER_FLAGS (req->buf) = 0; return TRUE; } else { return FALSE; } }
static gboolean gst_ks_video_device_request_frame (GstKsVideoDevice * self, ReadRequest * req, gulong * error_code, gchar ** error_str) { GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self); HANDLE event; KSSTREAM_READ_PARAMS *params; BOOL success; DWORD bytes_returned = 0; if (!gst_ks_read_request_pick_buffer (self, req)) goto error_pick_buffer; /* Reset the OVERLAPPED structure */ event = req->overlapped.hEvent; memset (&req->overlapped, 0, sizeof (OVERLAPPED)); req->overlapped.hEvent = event; /* Fill out KSSTREAM_HEADER and KS_FRAME_INFO */ params = &req->params; memset (params, 0, sizeof (KSSTREAM_READ_PARAMS)); params->header.Size = sizeof (KSSTREAM_HEADER) + sizeof (KS_FRAME_INFO); params->header.PresentationTime.Numerator = 1; params->header.PresentationTime.Denominator = 1; params->header.FrameExtent = gst_ks_video_device_get_frame_size (self); params->header.Data = GST_BUFFER_DATA (req->buf); params->frame_info.ExtendedHeaderSize = sizeof (KS_FRAME_INFO); success = DeviceIoControl (priv->pin_handle, IOCTL_KS_READ_STREAM, NULL, 0, params, params->header.Size, &bytes_returned, &req->overlapped); if (!success && GetLastError () != ERROR_IO_PENDING) goto error_ioctl; return TRUE; /* ERRORS */ error_pick_buffer: { if (error_code != NULL) *error_code = 0; if (error_str != NULL) *error_str = NULL; return FALSE; } error_ioctl: { gst_ks_video_device_parse_win32_error ("DeviceIoControl", GetLastError (), error_code, error_str); return FALSE; } }
static gboolean gst_ks_video_device_request_frame (GstKsVideoDevice * self, ReadRequest * req, gulong * error_code, gchar ** error_str) { GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self); HANDLE event; KSSTREAM_READ_PARAMS *params; BOOL success; DWORD bytes_returned = 0; /* Reset the OVERLAPPED structure */ event = req->overlapped.hEvent; memset (&req->overlapped, 0, sizeof (OVERLAPPED)); req->overlapped.hEvent = event; /* Fill out KSSTREAM_HEADER and KS_FRAME_INFO */ params = &req->params; memset (params, 0, sizeof (KSSTREAM_READ_PARAMS)); params->header.Size = sizeof (KSSTREAM_HEADER) + sizeof (KS_FRAME_INFO); params->header.PresentationTime.Numerator = 1; params->header.PresentationTime.Denominator = 1; params->header.FrameExtent = gst_ks_video_device_get_frame_size (self); params->header.Data = req->buf; params->frame_info.ExtendedHeaderSize = sizeof (KS_FRAME_INFO); /* * Clear the buffer like DirectShow does * * REVISIT: Could probably remove this later, for now it's here to help * track down the case where we capture old frames. This has been * observed with UVC cameras, presumably with some system load. */ memset (params->header.Data, 0, params->header.FrameExtent); success = DeviceIoControl (priv->pin_handle, IOCTL_KS_READ_STREAM, NULL, 0, params, params->header.Size, &bytes_returned, &req->overlapped); if (!success && GetLastError () != ERROR_IO_PENDING) goto error_ioctl; return TRUE; /* ERRORS */ error_ioctl: { gst_ks_video_device_parse_win32_error ("DeviceIoControl", GetLastError (), error_code, error_str); return FALSE; } }
static void gst_ks_video_device_prepare_buffers (GstKsVideoDevice * self) { GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self); guint i; guint frame_size; g_assert (priv->cur_media_type != NULL); gst_ks_video_device_clear_buffers (self); priv->requests = g_array_sized_new (FALSE, TRUE, sizeof (ReadRequest), priv->num_requests); priv->request_events = g_array_sized_new (FALSE, TRUE, sizeof (HANDLE), priv->num_requests + 1); frame_size = gst_ks_video_device_get_frame_size (self); for (i = 0; i < G_N_ELEMENTS (priv->spare_buffers); i++) { priv->spare_buffers[i] = self->allocfunc (frame_size, KS_BUFFER_ALIGNMENT, self->allocfunc_data); } for (i = 0; i < priv->num_requests; i++) { ReadRequest req = { 0, }; req.buf = self->allocfunc (frame_size, KS_BUFFER_ALIGNMENT, self->allocfunc_data); req.overlapped.hEvent = CreateEvent (NULL, TRUE, FALSE, NULL); g_array_append_val (priv->requests, req); g_array_append_val (priv->request_events, req.overlapped.hEvent); } g_array_append_val (priv->request_events, priv->cancel_event); /* * REVISIT: Could probably remove this later, for now it's here to help * track down the case where we capture old frames. This has been * observed with UVC cameras, presumably with some system load. */ priv->last_timestamp = GST_CLOCK_TIME_NONE; }
static GstFlowReturn gst_ks_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer) { GstKsVideoSrc *self = GST_KS_VIDEO_SRC (pushsrc); GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self); guint buf_size; GstCaps *caps; GstBuffer *buf = NULL; GstFlowReturn result; GstClockTime presentation_time; gulong error_code; gchar *error_str; g_assert (priv->device != NULL); if (!gst_ks_video_device_has_caps (priv->device)) goto error_no_caps; buf_size = gst_ks_video_device_get_frame_size (priv->device); g_assert (buf_size); caps = gst_pad_get_negotiated_caps (GST_BASE_SRC_PAD (self)); if (caps == NULL) goto error_no_caps; result = gst_pad_alloc_buffer (GST_BASE_SRC_PAD (self), priv->offset, buf_size, caps, &buf); gst_caps_unref (caps); if (G_UNLIKELY (result != GST_FLOW_OK)) goto error_alloc_buffer; if (G_UNLIKELY (!priv->running)) { KS_WORKER_LOCK (priv); priv->worker_pending_run = TRUE; KS_WORKER_NOTIFY (priv); while (priv->worker_pending_run) KS_WORKER_WAIT_FOR_RESULT (priv); priv->running = priv->worker_run_result; KS_WORKER_UNLOCK (priv); if (!priv->running) goto error_start_capture; } do { gulong bytes_read; result = gst_ks_video_device_read_frame (priv->device, GST_BUFFER_DATA (buf), buf_size, &bytes_read, &presentation_time, &error_code, &error_str); if (G_UNLIKELY (result != GST_FLOW_OK)) goto error_read_frame; GST_BUFFER_SIZE (buf) = bytes_read; } while (!gst_ks_video_src_timestamp_buffer (self, buf, presentation_time)); if (G_UNLIKELY (priv->do_stats)) gst_ks_video_src_update_statistics (self); gst_ks_video_device_postprocess_frame (priv->device, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); *buffer = buf; return GST_FLOW_OK; /* ERRORS */ error_no_caps: { GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("not negotiated"), ("maybe setcaps failed?")); return GST_FLOW_ERROR; } error_start_capture: { GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ, ("could not start capture"), ("failed to change pin state to KSSTATE_RUN")); return GST_FLOW_ERROR; } error_alloc_buffer: { GST_ELEMENT_ERROR (self, CORE, PAD, ("alloc_buffer failed"), (NULL)); return result; } error_read_frame: { if (result != GST_FLOW_WRONG_STATE && result != GST_FLOW_UNEXPECTED) { GST_ELEMENT_ERROR (self, RESOURCE, READ, ("read failed: %s [0x%08x]", error_str, error_code), ("gst_ks_video_device_read_frame failed")); } g_free (error_str); gst_buffer_unref (buf); return result; } }