static gboolean gst_decklink_video_src_query (GstBaseSrc * bsrc, GstQuery * query) { GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc); gboolean ret = TRUE; switch (GST_QUERY_TYPE (query)) { case GST_QUERY_LATENCY:{ if (self->input) { GstClockTime min, max; const GstDecklinkMode *mode; g_mutex_lock (&self->lock); mode = gst_decklink_get_mode (self->caps_mode); g_mutex_unlock (&self->lock); min = gst_util_uint64_scale_ceil (GST_SECOND, mode->fps_d, mode->fps_n); max = self->buffer_size * min; gst_query_set_latency (query, TRUE, min, max); ret = TRUE; } else { ret = FALSE; } break; } default: ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query); break; } return ret; }
static gboolean gst_decklink_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) { GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink); const GstDecklinkMode *mode; HRESULT ret; GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps); if (!gst_video_info_from_caps (&self->info, caps)) return FALSE; self->output->output->SetScheduledFrameCompletionCallback (new GStreamerVideoOutputCallback (self)); mode = gst_decklink_get_mode (self->mode); g_assert (mode != NULL); ret = self->output->output->EnableVideoOutput (mode->mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_WARNING_OBJECT (self, "Failed to enable video output"); return FALSE; } g_mutex_lock (&self->output->lock); self->output->mode = mode; self->output->video_enabled = TRUE; if (self->output->start_scheduled_playback) self->output->start_scheduled_playback (self->output->videosink); g_mutex_unlock (&self->output->lock); return TRUE; }
static gboolean gst_decklink_video_src_open (GstDecklinkVideoSrc * self) { const GstDecklinkMode *mode; GST_DEBUG_OBJECT (self, "Opening"); self->input = gst_decklink_acquire_nth_input (self->device_number, GST_ELEMENT_CAST (self), FALSE); if (!self->input) { GST_ERROR_OBJECT (self, "Failed to acquire input"); return FALSE; } mode = gst_decklink_get_mode (self->mode); g_assert (mode != NULL); g_mutex_lock (&self->input->lock); self->input->mode = mode; self->input->got_video_frame = gst_decklink_video_src_got_frame; self->input->start_streams = gst_decklink_video_src_start_streams; self->input->clock_start_time = GST_CLOCK_TIME_NONE; self->input->clock_epoch += self->input->clock_last_time; self->input->clock_last_time = 0; self->input->clock_offset = 0; g_mutex_unlock (&self->input->lock); return TRUE; }
virtual HRESULT STDMETHODCALLTYPE VideoInputFormatChanged (BMDVideoInputFormatChangedEvents, IDeckLinkDisplayMode * mode, BMDDetectedVideoInputFormatFlags formatFlags) { BMDPixelFormat pixelFormat = bmdFormat8BitYUV; GST_INFO ("Video input format changed"); if (formatFlags & bmdDetectedVideoInputRGB444) pixelFormat = bmdFormat8BitARGB; g_mutex_lock (&m_input->lock); m_input->input->PauseStreams (); m_input->input->EnableVideoInput (mode->GetDisplayMode (), pixelFormat, bmdVideoInputEnableFormatDetection); m_input->input->FlushStreams (); m_input->input->StartStreams (); m_input->mode = gst_decklink_get_mode (gst_decklink_get_mode_enum_from_bmd (mode->GetDisplayMode ())); m_input->format = pixelFormat; g_mutex_unlock (&m_input->lock); return S_OK; }
static gboolean gst_decklink_video_sink_open (GstBaseSink * bsink) { GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink); const GstDecklinkMode *mode; GST_DEBUG_OBJECT (self, "Stopping"); self->output = gst_decklink_acquire_nth_output (self->device_number, GST_ELEMENT_CAST (self), FALSE); if (!self->output) { GST_ERROR_OBJECT (self, "Failed to acquire output"); return FALSE; } mode = gst_decklink_get_mode (self->mode); g_assert (mode != NULL); g_mutex_lock (&self->output->lock); self->output->mode = mode; self->output->start_scheduled_playback = gst_decklink_video_sink_start_scheduled_playback; self->output->clock_start_time = GST_CLOCK_TIME_NONE; self->output->clock_epoch += self->output->clock_last_time; self->output->clock_last_time = 0; self->output->clock_offset = 0; g_mutex_unlock (&self->output->lock); return TRUE; }
static gboolean gst_decklink_sink_start (GstDecklinkSink * decklinksink) { IDeckLinkIterator *iterator; HRESULT ret; const GstDecklinkMode *mode; BMDAudioSampleType sample_depth; iterator = CreateDeckLinkIteratorInstance (); if (iterator == NULL) { GST_ERROR ("no driver"); return FALSE; } ret = iterator->Next (&decklinksink->decklink); if (ret != S_OK) { GST_ERROR ("no card"); return FALSE; } ret = decklinksink->decklink->QueryInterface (IID_IDeckLinkOutput, (void **) &decklinksink->output); if (ret != S_OK) { GST_ERROR ("no output"); return FALSE; } decklinksink->output->SetAudioCallback (decklinksink->callback); mode = gst_decklink_get_mode (decklinksink->mode); ret = decklinksink->output->EnableVideoOutput (mode->mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_ERROR ("failed to enable video output"); return FALSE; } //decklinksink->video_enabled = TRUE; decklinksink->output-> SetScheduledFrameCompletionCallback (decklinksink->callback); sample_depth = bmdAudioSampleType16bitInteger; ret = decklinksink->output->EnableAudioOutput (bmdAudioSampleRate48kHz, sample_depth, 2, bmdAudioOutputStreamContinuous); if (ret != S_OK) { GST_ERROR ("failed to enable audio output"); return FALSE; } decklinksink->audio_buffer = gst_buffer_new (); decklinksink->num_frames = 0; return TRUE; }
static gboolean gst_decklink_src_video_src_query (GstPad * pad, GstObject * parent, GstQuery * query) { GstDecklinkSrc *decklinksrc; gboolean ret = FALSE; decklinksrc = GST_DECKLINK_SRC (parent); GST_DEBUG_OBJECT (pad, "query: %" GST_PTR_FORMAT, query); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_LATENCY:{ GstClockTime min_latency, max_latency; const GstDecklinkMode *mode; /* device must be open */ if (decklinksrc->decklink == NULL) { GST_WARNING_OBJECT (decklinksrc, "Can't give latency since device isn't open !"); goto done; } mode = gst_decklink_get_mode (decklinksrc->mode); /* min latency is the time to capture one frame */ min_latency = gst_util_uint64_scale_int (GST_SECOND, mode->fps_d, mode->fps_n); /* max latency is total duration of the frame buffer */ max_latency = 2 * min_latency; GST_DEBUG_OBJECT (decklinksrc, "report latency min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency)); /* we are always live, the min latency is 1 frame and the max latency is * the complete buffer of frames. */ gst_query_set_latency (query, TRUE, min_latency, max_latency); ret = TRUE; break; } default: ret = gst_pad_query_default (pad, parent, query); break; } done: return ret; }
const GstDecklinkMode * gst_decklink_find_mode_for_caps (GstCaps * caps) { int i; GstCaps *mode_caps; for (i = 1; i < (int) G_N_ELEMENTS (modes); i++) { mode_caps = gst_decklink_mode_get_caps ((GstDecklinkModeEnum) i, bmdFormat8BitYUV); if (gst_caps_can_intersect (caps, mode_caps)) { gst_caps_unref (mode_caps); return gst_decklink_get_mode ((GstDecklinkModeEnum) i); } gst_caps_unref (mode_caps); } return NULL; }
static gboolean gst_decklink_sink_start (GstDecklinkSink * decklinksink) { HRESULT ret; const GstDecklinkMode *mode; BMDAudioSampleType sample_depth; decklinksink->decklink = gst_decklink_get_nth_device (decklinksink->device); if (!decklinksink->decklink) { GST_WARNING ("failed to get device %d", decklinksink->device); return FALSE; } decklinksink->output = gst_decklink_get_nth_output (decklinksink->device); decklinksink->output->SetAudioCallback (decklinksink->callback); mode = gst_decklink_get_mode (decklinksink->mode); ret = decklinksink->output->EnableVideoOutput (mode->mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_WARNING ("failed to enable video output"); return FALSE; } //decklinksink->video_enabled = TRUE; decklinksink->output->SetScheduledFrameCompletionCallback (decklinksink-> callback); sample_depth = bmdAudioSampleType16bitInteger; ret = decklinksink->output->EnableAudioOutput (bmdAudioSampleRate48kHz, sample_depth, 2, bmdAudioOutputStreamContinuous); if (ret != S_OK) { GST_WARNING ("failed to enable audio output"); return FALSE; } decklinksink->audio_adapter = gst_adapter_new (); decklinksink->num_frames = 0; return TRUE; }
static gboolean gst_decklink_src_start (GstElement * element) { GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element); IDeckLinkIterator *iterator; DeckLinkCaptureDelegate *delegate; //IDeckLinkDisplayModeIterator *mode_iterator; //IDeckLinkDisplayMode *mode; BMDAudioSampleType sample_depth; int channels; HRESULT ret; const GstDecklinkMode *mode; IDeckLinkConfiguration *config; BMDVideoConnection conn; BMDAudioConnection aconn; int i; GST_DEBUG_OBJECT (decklinksrc, "start"); iterator = CreateDeckLinkIteratorInstance (); if (iterator == NULL) { GST_ERROR ("no driver"); return FALSE; } ret = iterator->Next (&decklinksrc->decklink); if (ret != S_OK) { GST_ERROR ("no card"); return FALSE; } for (i = 0; i < decklinksrc->subdevice; i++) { ret = iterator->Next (&decklinksrc->decklink); if (ret != S_OK) { GST_ERROR ("no card"); return FALSE; } } ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkInput, (void **) &decklinksrc->input); if (ret != S_OK) { GST_ERROR ("query interface failed"); return FALSE; } delegate = new DeckLinkCaptureDelegate (); delegate->priv = decklinksrc; decklinksrc->input->SetCallback (delegate); ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkConfiguration, (void **) &config); if (ret != S_OK) { GST_ERROR ("query interface failed"); return FALSE; } switch (decklinksrc->connection) { default: case GST_DECKLINK_CONNECTION_SDI: conn = bmdVideoConnectionSDI; aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_HDMI: conn = bmdVideoConnectionHDMI; aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_OPTICAL_SDI: conn = bmdVideoConnectionOpticalSDI; aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_COMPONENT: conn = bmdVideoConnectionComponent; aconn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_COMPOSITE: conn = bmdVideoConnectionComposite; aconn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_SVIDEO: conn = bmdVideoConnectionSVideo; aconn = bmdAudioConnectionAnalog; break; } ret = config->SetInt (bmdDeckLinkConfigVideoInputConnection, conn); if (ret != S_OK) { GST_ERROR ("set configuration (input source)"); return FALSE; } if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE) { ret = config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags, bmdAnalogVideoFlagCompositeSetup75); if (ret != S_OK) { GST_ERROR ("set configuration (composite setup)"); return FALSE; } } switch (decklinksrc->audio_connection) { default: case GST_DECKLINK_AUDIO_CONNECTION_AUTO: break; case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED: aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU: aconn = bmdAudioConnectionAESEBU; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG: aconn = bmdAudioConnectionAnalog; break; } ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, aconn); if (ret != S_OK) { GST_ERROR ("set configuration (audio input connection)"); return FALSE; } #if 0 ret = decklinksrc->input->GetDisplayModeIterator (&mode_iterator); if (ret != S_OK) { GST_ERROR ("failed to get display mode iterator"); return FALSE; } i = 0; while (mode_iterator->Next (&mode) == S_OK) { const char *mode_name; mode->GetName (&mode_name); GST_DEBUG ("%d: mode name: %s", i, mode_name); mode->Release (); i++; } #endif mode = gst_decklink_get_mode (decklinksrc->mode); ret = decklinksrc->input->EnableVideoInput (mode->mode, bmdFormat8BitYUV, 0); if (ret != S_OK) { GST_ERROR ("enable video input failed"); return FALSE; } sample_depth = bmdAudioSampleType16bitInteger; channels = 2; ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz, sample_depth, channels); if (ret != S_OK) { GST_ERROR ("enable video input failed"); return FALSE; } ret = decklinksrc->input->StartStreams (); if (ret != S_OK) { GST_ERROR ("start streams failed"); return FALSE; } g_static_rec_mutex_lock (&decklinksrc->task_mutex); gst_task_start (decklinksrc->task); g_static_rec_mutex_unlock (&decklinksrc->task_mutex); return TRUE; }
static gboolean gst_decklink_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) { GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink); const GstDecklinkMode *mode; HRESULT ret; BMDVideoOutputFlags flags; GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps); if (!gst_video_info_from_caps (&self->info, caps)) return FALSE; self->output->output->SetScheduledFrameCompletionCallback (new GStreamerVideoOutputCallback (self)); if (self->mode == GST_DECKLINK_MODE_AUTO) { BMDPixelFormat f; mode = gst_decklink_find_mode_and_format_for_caps (caps, &f); if (mode == NULL) { GST_WARNING_OBJECT (self, "Failed to find compatible mode for caps %" GST_PTR_FORMAT, caps); return FALSE; } if (self->video_format != GST_DECKLINK_VIDEO_FORMAT_AUTO && gst_decklink_pixel_format_from_type (self->video_format) != f) { GST_WARNING_OBJECT (self, "Failed to set pixel format to %d", self->video_format); return FALSE; } } else { /* We don't have to give the format in EnableVideoOutput. Therefore, * even if it's AUTO, we have it stored in self->info and set it in * gst_decklink_video_sink_prepare */ mode = gst_decklink_get_mode (self->mode); g_assert (mode != NULL); }; /* The timecode_format itself is used when we embed the actual timecode data * into the frame. Now we only need to know which of the two standards the * timecode format will adhere to: VITC or RP188, and send the appropriate * flag to EnableVideoOutput. The exact format is specified later. * * Note that this flag will have no effect in practice if the video stream * does not contain timecode metadata. */ if (self->timecode_format == GST_DECKLINK_TIMECODE_FORMAT_VITC || self->timecode_format == GST_DECKLINK_TIMECODE_FORMAT_VITCFIELD2) flags = bmdVideoOutputVITC; else flags = bmdVideoOutputRP188; ret = self->output->output->EnableVideoOutput (mode->mode, flags); if (ret != S_OK) { GST_WARNING_OBJECT (self, "Failed to enable video output: 0x%08x", ret); return FALSE; } g_mutex_lock (&self->output->lock); self->output->mode = mode; self->output->video_enabled = TRUE; if (self->output->start_scheduled_playback) self->output->start_scheduled_playback (self->output->videosink); g_mutex_unlock (&self->output->lock); return TRUE; }
static GstFlowReturn gst_decklink_sink_videosink_chain (GstPad * pad, GstBuffer * buffer) { GstDecklinkSink *decklinksink; IDeckLinkMutableVideoFrame *frame; void *data; GstFlowReturn ret; const GstDecklinkMode *mode; decklinksink = GST_DECKLINK_SINK (gst_pad_get_parent (pad)); GST_DEBUG_OBJECT (decklinksink, "chain"); #if 0 if (!decklinksink->video_enabled) { HRESULT ret; ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_ERROR ("failed to enable video output"); //return FALSE; } decklinksink->video_enabled = TRUE; } #endif mode = gst_decklink_get_mode (decklinksink->mode); decklinksink->output->CreateVideoFrame (mode->width, mode->height, mode->width * 2, bmdFormat8BitYUV, bmdFrameFlagDefault, &frame); frame->GetBytes (&data); memcpy (data, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer)); gst_buffer_unref (buffer); g_mutex_lock (decklinksink->mutex); while (decklinksink->queued_frames > 2 && !decklinksink->stop) { g_cond_wait (decklinksink->cond, decklinksink->mutex); } if (!decklinksink->stop) { decklinksink->queued_frames++; } g_mutex_unlock (decklinksink->mutex); if (!decklinksink->stop) { decklinksink->output->ScheduleVideoFrame (frame, decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n); decklinksink->num_frames++; if (!decklinksink->sched_started) { decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0); decklinksink->sched_started = TRUE; } ret = GST_FLOW_OK; } else { ret = GST_FLOW_WRONG_STATE; } frame->Release (); gst_object_unref (decklinksink); return ret; }
/* FIXME: post error messages for the misc. failures */ static gboolean gst_decklink_src_start (GstElement * element) { GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element); DeckLinkCaptureDelegate *delegate; BMDAudioSampleType sample_depth; int channels; HRESULT ret; const GstDecklinkMode *mode; IDeckLinkConfiguration *config; BMDVideoConnection conn; BMDAudioConnection aconn; GST_DEBUG_OBJECT (decklinksrc, "start"); decklinksrc->decklink = gst_decklink_get_nth_device (decklinksrc->device); if (decklinksrc->decklink == NULL) { return FALSE; } decklinksrc->input = gst_decklink_get_nth_input (decklinksrc->device); delegate = new DeckLinkCaptureDelegate (); delegate->priv = decklinksrc; ret = decklinksrc->input->SetCallback (delegate); if (ret != S_OK) { GST_ERROR ("set callback failed (input source)"); return FALSE; } decklinksrc->config = gst_decklink_get_nth_config (decklinksrc->device); config = decklinksrc->config; switch (decklinksrc->connection) { default: case GST_DECKLINK_CONNECTION_SDI: conn = bmdVideoConnectionSDI; aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_HDMI: conn = bmdVideoConnectionHDMI; aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_OPTICAL_SDI: conn = bmdVideoConnectionOpticalSDI; aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_CONNECTION_COMPONENT: conn = bmdVideoConnectionComponent; aconn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_COMPOSITE: conn = bmdVideoConnectionComposite; aconn = bmdAudioConnectionAnalog; break; case GST_DECKLINK_CONNECTION_SVIDEO: conn = bmdVideoConnectionSVideo; aconn = bmdAudioConnectionAnalog; break; } ret = config->SetInt (bmdDeckLinkConfigVideoInputConnection, conn); if (ret != S_OK) { GST_ERROR ("set configuration (input source)"); return FALSE; } if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE) { ret = config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags, bmdAnalogVideoFlagCompositeSetup75); if (ret != S_OK) { GST_ERROR ("set configuration (composite setup)"); return FALSE; } } switch (decklinksrc->audio_connection) { default: case GST_DECKLINK_AUDIO_CONNECTION_AUTO: /* set above */ break; case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED: aconn = bmdAudioConnectionEmbedded; break; case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU: aconn = bmdAudioConnectionAESEBU; break; case GST_DECKLINK_AUDIO_CONNECTION_ANALOG: aconn = bmdAudioConnectionAnalog; break; } ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, aconn); if (ret != S_OK) { GST_ERROR ("set configuration (audio input connection)"); return FALSE; } mode = gst_decklink_get_mode (decklinksrc->mode); ret = decklinksrc->input->EnableVideoInput (mode->mode, bmdFormat8BitYUV, 0); if (ret != S_OK) { GST_ERROR ("enable video input failed"); return FALSE; } sample_depth = bmdAudioSampleType16bitInteger; channels = 2; ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz, sample_depth, channels); if (ret != S_OK) { GST_ERROR ("enable video input failed"); return FALSE; } ret = decklinksrc->input->StartStreams (); if (ret != S_OK) { GST_ERROR ("start streams failed"); return FALSE; } g_rec_mutex_lock (&decklinksrc->task_mutex); gst_task_start (decklinksrc->task); g_rec_mutex_unlock (&decklinksrc->task_mutex); return TRUE; }
static void gst_decklink_video_src_got_frame (GstElement * element, IDeckLinkVideoInputFrame * frame, GstDecklinkModeEnum mode, GstClockTime capture_time, GstClockTime capture_duration, guint hours, guint minutes, guint seconds, guint frames, BMDTimecodeFlags bflags) { GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element); GST_LOG_OBJECT (self, "Got video frame at %" GST_TIME_FORMAT, GST_TIME_ARGS (capture_time)); gst_decklink_video_src_convert_to_external_clock (self, &capture_time, &capture_duration); GST_LOG_OBJECT (self, "Actual timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (capture_time)); g_mutex_lock (&self->lock); if (!self->flushing) { CaptureFrame *f; const GstDecklinkMode *bmode; GstVideoTimeCodeFlags flags = GST_VIDEO_TIME_CODE_FLAGS_NONE; guint field_count = 0; while (g_queue_get_length (&self->current_frames) >= self->buffer_size) { f = (CaptureFrame *) g_queue_pop_head (&self->current_frames); GST_WARNING_OBJECT (self, "Dropping old frame at %" GST_TIME_FORMAT, GST_TIME_ARGS (f->capture_time)); capture_frame_free (f); } f = (CaptureFrame *) g_malloc0 (sizeof (CaptureFrame)); f->frame = frame; f->capture_time = capture_time; f->capture_duration = capture_duration; f->mode = mode; f->format = frame->GetPixelFormat (); bmode = gst_decklink_get_mode (mode); if (bmode->interlaced) { flags = (GstVideoTimeCodeFlags) (flags | GST_VIDEO_TIME_CODE_FLAGS_INTERLACED); if (bflags & bmdTimecodeFieldMark) field_count = 2; else field_count = 1; } if (bflags & bmdTimecodeIsDropFrame) flags = (GstVideoTimeCodeFlags) (flags | GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME); f->tc = gst_video_time_code_new (bmode->fps_n, bmode->fps_d, NULL, flags, hours, minutes, seconds, frames, field_count); frame->AddRef (); g_queue_push_tail (&self->current_frames, f); g_cond_signal (&self->cond); } g_mutex_unlock (&self->lock); }
static gboolean gst_decklink_video_src_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc); GstCaps *current_caps; const GstDecklinkMode *mode; BMDVideoInputFlags flags; HRESULT ret; BMDPixelFormat format; GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps); if ((current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc)))) { GST_DEBUG_OBJECT (self, "Pad already has caps %" GST_PTR_FORMAT, caps); if (!gst_caps_is_equal (caps, current_caps)) { GST_DEBUG_OBJECT (self, "New caps, reconfiguring"); gst_caps_unref (current_caps); if (self->mode == GST_DECKLINK_MODE_AUTO) { return TRUE; } else { return FALSE; } } else { gst_caps_unref (current_caps); return TRUE; } } if (!gst_video_info_from_caps (&self->info, caps)) return FALSE; if (self->input->config && self->connection != GST_DECKLINK_CONNECTION_AUTO) { ret = self->input->config->SetInt (bmdDeckLinkConfigVideoInputConnection, gst_decklink_get_connection (self->connection)); if (ret != S_OK) { GST_ERROR_OBJECT (self, "Failed to set configuration (input source): 0x%08x", ret); return FALSE; } if (self->connection == GST_DECKLINK_CONNECTION_COMPOSITE) { ret = self->input->config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags, bmdAnalogVideoFlagCompositeSetup75); if (ret != S_OK) { GST_ERROR_OBJECT (self, "Failed to set configuration (composite setup): 0x%08x", ret); return FALSE; } } } flags = bmdVideoInputFlagDefault; if (self->mode == GST_DECKLINK_MODE_AUTO) { bool autoDetection = false; if (self->input->attributes) { ret = self->input-> attributes->GetFlag (BMDDeckLinkSupportsInputFormatDetection, &autoDetection); if (ret != S_OK) { GST_ERROR_OBJECT (self, "Failed to get attribute (autodetection): 0x%08x", ret); return FALSE; } if (autoDetection) flags |= bmdVideoInputEnableFormatDetection; } if (!autoDetection) { GST_ERROR_OBJECT (self, "Failed to activate auto-detection"); return FALSE; } } mode = gst_decklink_get_mode (self->mode); g_assert (mode != NULL); format = self->caps_format; ret = self->input->input->EnableVideoInput (mode->mode, format, flags); if (ret != S_OK) { GST_WARNING_OBJECT (self, "Failed to enable video input: 0x%08x", ret); return FALSE; } g_mutex_lock (&self->input->lock); self->input->mode = mode; self->input->video_enabled = TRUE; if (self->input->start_streams) self->input->start_streams (self->input->videosrc); g_mutex_unlock (&self->input->lock); return TRUE; }
static GstFlowReturn gst_decklink_sink_videosink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstDecklinkSink *decklinksink; IDeckLinkMutableVideoFrame *frame; void *data; GstFlowReturn ret; const GstDecklinkMode *mode; decklinksink = GST_DECKLINK_SINK (parent); #if 0 if (!decklinksink->video_enabled) { HRESULT ret; ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_WARNING ("failed to enable video output"); //return FALSE; } decklinksink->video_enabled = TRUE; } #endif mode = gst_decklink_get_mode (decklinksink->mode); decklinksink->output->CreateVideoFrame (mode->width, mode->height, mode->width * 2, decklinksink->pixel_format, bmdFrameFlagDefault, &frame); frame->GetBytes (&data); gst_buffer_extract (buffer, 0, data, gst_buffer_get_size (buffer)); gst_buffer_unref (buffer); g_mutex_lock (&decklinksink->mutex); while (decklinksink->queued_frames > 2 && !decklinksink->stop) { g_cond_wait (&decklinksink->cond, &decklinksink->mutex); } if (!decklinksink->stop) { decklinksink->queued_frames++; } g_mutex_unlock (&decklinksink->mutex); if (!decklinksink->stop) { decklinksink->output->ScheduleVideoFrame (frame, decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n); decklinksink->num_frames++; if (!decklinksink->sched_started) { decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0); decklinksink->sched_started = TRUE; } ret = GST_FLOW_OK; } else { ret = GST_FLOW_FLUSHING; } frame->Release (); return ret; }
static void gst_decklink_src_task (void *priv) { GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv); GstBuffer *buffer; GstBuffer *audio_buffer; IDeckLinkVideoInputFrame *video_frame; IDeckLinkAudioInputPacket *audio_frame; void *data; gsize data_size; int n_samples; GstFlowReturn ret; const GstDecklinkMode *mode; gboolean discont = FALSE; GST_DEBUG_OBJECT (decklinksrc, "task"); g_mutex_lock (&decklinksrc->mutex); while (decklinksrc->video_frame == NULL && !decklinksrc->stop) { g_cond_wait (&decklinksrc->cond, &decklinksrc->mutex); } video_frame = decklinksrc->video_frame; audio_frame = decklinksrc->audio_frame; decklinksrc->video_frame = NULL; decklinksrc->audio_frame = NULL; g_mutex_unlock (&decklinksrc->mutex); if (decklinksrc->stop) { if (video_frame) video_frame->Release (); if (audio_frame) audio_frame->Release (); GST_DEBUG ("stopping task"); return; } /* warning on dropped frames */ /* FIXME: post QoS message */ if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) { GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ, ("Dropped %d frame(s), for a total of %d frame(s)", decklinksrc->dropped_frames - decklinksrc->dropped_frames_old, decklinksrc->dropped_frames), (NULL)); decklinksrc->dropped_frames_old = decklinksrc->dropped_frames; /* FIXME: discont = TRUE; ? */ } if (!decklinksrc->started) { gst_decklink_src_send_initial_events (decklinksrc); decklinksrc->started = TRUE; } mode = gst_decklink_get_mode (decklinksrc->mode); video_frame->GetBytes (&data); data_size = mode->width * mode->height * 2; if (decklinksrc->copy_data) { buffer = gst_buffer_new_and_alloc (data_size); gst_buffer_fill (buffer, 0, data, data_size); video_frame->Release (); } else { VideoFrame *vf; vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame)); buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) 0, data, data_size, 0, data_size, vf, (GDestroyNotify) video_frame_free); vf->frame = video_frame; vf->input = decklinksrc->input; vf->input->AddRef (); } GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND, mode->fps_d, mode->fps_n); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND, mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num; GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; /* FIXME: +1? */ /* FIXME: set video meta */ if (decklinksrc->frame_num == 0) discont = TRUE; if (discont) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); else GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); /* FIXME: proper flow aggregation with audio flow */ ret = gst_pad_push (decklinksrc->videosrcpad, buffer); if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED || ret == GST_FLOW_FLUSHING)) { GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (ret))); goto pause; } if (gst_pad_is_linked (decklinksrc->audiosrcpad)) { n_samples = audio_frame->GetSampleFrameCount (); audio_frame->GetBytes (&data); audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2); gst_buffer_fill (audio_buffer, 0, data, n_samples * 2 * 2); GST_BUFFER_TIMESTAMP (audio_buffer) = gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND, 1, 48000); /* FIXME: should be next_timestamp - timestamp for perfect stream */ GST_BUFFER_DURATION (audio_buffer) = gst_util_uint64_scale_int (n_samples * GST_SECOND, 1, 48000); GST_BUFFER_OFFSET (audio_buffer) = decklinksrc->num_audio_samples; GST_BUFFER_OFFSET_END (audio_buffer) = GST_BUFFER_OFFSET (audio_buffer) + n_samples; decklinksrc->num_audio_samples += n_samples; /* FIXME: proper flow aggregation with video flow */ ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer); if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED || ret == GST_FLOW_FLUSHING)) { GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (ret))); goto pause; } } done: if (audio_frame) audio_frame->Release (); return; pause: { const gchar *reason = gst_flow_get_name (ret); GstEvent *event = NULL; GST_DEBUG_OBJECT (decklinksrc, "pausing task, reason %s", reason); gst_task_pause (decklinksrc->task); if (ret == GST_FLOW_EOS) { /* perform EOS logic (very crude, we don't even keep a GstSegment) */ event = gst_event_new_eos (); } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) { event = gst_event_new_eos (); /* for fatal errors we post an error message, post the error * first so the app knows about the error first. * Also don't do this for FLUSHING because it happens * due to flushing and posting an error message because of * that is the wrong thing to do, e.g. when we're doing * a flushing seek. */ GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED, ("Internal data flow error."), ("streaming task paused, reason %s (%d)", reason, ret)); } if (event != NULL) { GST_INFO_OBJECT (decklinksrc->videosrcpad, "pushing EOS event"); gst_pad_push_event (decklinksrc->videosrcpad, gst_event_ref (event)); GST_INFO_OBJECT (decklinksrc->audiosrcpad, "pushing EOS event"); gst_pad_push_event (decklinksrc->audiosrcpad, event); } goto done; } }
static void gst_decklink_src_task (void *priv) { GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv); GstBuffer *buffer; GstBuffer *audio_buffer; IDeckLinkVideoInputFrame *video_frame; IDeckLinkAudioInputPacket *audio_frame; void *data; int n_samples; GstFlowReturn ret; const GstDecklinkMode *mode; GST_DEBUG_OBJECT (decklinksrc, "task"); g_mutex_lock (decklinksrc->mutex); while (decklinksrc->video_frame == NULL && !decklinksrc->stop) { g_cond_wait (decklinksrc->cond, decklinksrc->mutex); } video_frame = decklinksrc->video_frame; audio_frame = decklinksrc->audio_frame; decklinksrc->video_frame = NULL; decklinksrc->audio_frame = NULL; g_mutex_unlock (decklinksrc->mutex); if (decklinksrc->stop) { GST_DEBUG ("stopping task"); return; } /* warning on dropped frames */ if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) { GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ, ("Dropped %d frame(s), for a total of %d frame(s)", decklinksrc->dropped_frames - decklinksrc->dropped_frames_old, decklinksrc->dropped_frames), (NULL)); decklinksrc->dropped_frames_old = decklinksrc->dropped_frames; } mode = gst_decklink_get_mode (decklinksrc->mode); video_frame->GetBytes (&data); if (decklinksrc->copy_data) { buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2); memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2); video_frame->Release (); } else { buffer = gst_buffer_new (); GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2; GST_BUFFER_DATA (buffer) = (guint8 *) data; GST_BUFFER_FREE_FUNC (buffer) = video_frame_free; GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) video_frame; } GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND, mode->fps_d, mode->fps_n); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND, mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num; GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; if (decklinksrc->frame_num == 0) { GstEvent *event; gboolean ret; GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); ret = gst_pad_push_event (decklinksrc->videosrcpad, event); if (!ret) { GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret); return; } } if (decklinksrc->video_caps == NULL) { decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode); } gst_buffer_set_caps (buffer, decklinksrc->video_caps); ret = gst_pad_push (decklinksrc->videosrcpad, buffer); if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); } if (gst_pad_is_linked (decklinksrc->audiosrcpad)) { n_samples = audio_frame->GetSampleFrameCount (); audio_frame->GetBytes (&data); audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2); memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2); GST_BUFFER_TIMESTAMP (audio_buffer) = gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND, 1, 48000); GST_BUFFER_DURATION (audio_buffer) = gst_util_uint64_scale_int ((decklinksrc->num_audio_samples + n_samples) * GST_SECOND, 1, 48000) - GST_BUFFER_TIMESTAMP (audio_buffer); decklinksrc->num_audio_samples += n_samples; if (decklinksrc->audio_caps == NULL) { decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int", "endianness", G_TYPE_INT, G_LITTLE_ENDIAN, "signed", G_TYPE_BOOLEAN, TRUE, "depth", G_TYPE_INT, 16, "width", G_TYPE_INT, 16, "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000, NULL); } gst_buffer_set_caps (audio_buffer, decklinksrc->audio_caps); ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer); if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); } } audio_frame->Release (); }