示例#1
0
static gboolean
gst_decklink_src_stop (GstElement * element)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element);

  gst_task_stop (decklinksrc->task);

  g_mutex_lock (&decklinksrc->mutex);
  decklinksrc->stop = TRUE;
  g_cond_signal (&decklinksrc->cond);
  g_mutex_unlock (&decklinksrc->mutex);

  gst_task_join (decklinksrc->task);

  decklinksrc->input->StopStreams ();
  decklinksrc->input->DisableVideoInput ();
  decklinksrc->input->DisableAudioInput ();

  g_list_free_full (decklinksrc->pending_events,
      (GDestroyNotify) gst_mini_object_unref);
  decklinksrc->pending_events = NULL;
  decklinksrc->have_events = FALSE;
  decklinksrc->pending_eos = FALSE;

  return TRUE;
}
void
gst_decklink_src_finalize (GObject * object)
{
  GstDecklinkSrc *decklinksrc;

  g_return_if_fail (GST_IS_DECKLINK_SRC (object));
  decklinksrc = GST_DECKLINK_SRC (object);

  /* clean up object here */

  g_cond_clear (&decklinksrc->cond);
  g_mutex_clear (&decklinksrc->mutex);
  gst_task_set_lock (decklinksrc->task, NULL);
  g_object_unref (decklinksrc->task);

#ifdef _MSC_VER
  /* signal the COM thread that it should uninitialize COM */
  if (decklinksrc->comInitialized) {
    g_mutex_lock (&decklinksrc->com_deinit_lock);
    g_cond_signal (&decklinksrc->com_uninitialize);
    g_cond_wait (&decklinksrc->com_uninitialized, &decklinksrc->com_deinit_lock);
    g_mutex_unlock (&decklinksrc->com_deinit_lock);
  }

  g_mutex_clear (&decklinksrc->com_init_lock);
  g_mutex_clear (&decklinksrc->com_deinit_lock);
  g_cond_clear (&decklinksrc->com_initialized);
  g_cond_clear (&decklinksrc->com_uninitialize);
  g_cond_clear (&decklinksrc->com_uninitialized);
#endif /* _MSC_VER */

  g_rec_mutex_clear (&decklinksrc->task_mutex);

  G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
gst_decklink_src_stop (GstElement * element)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element);

  gst_task_stop (decklinksrc->task);

  g_mutex_lock (decklinksrc->mutex);
  decklinksrc->stop = TRUE;
  g_cond_signal (decklinksrc->cond);
  g_mutex_unlock (decklinksrc->mutex);

  gst_task_join (decklinksrc->task);

  decklinksrc->input->StopStreams ();
  decklinksrc->input->DisableVideoInput ();
  decklinksrc->input->DisableAudioInput ();

  decklinksrc->input->Release ();
  decklinksrc->input = NULL;

  decklinksrc->decklink->Release ();
  decklinksrc->decklink = NULL;

  return TRUE;
}
void
gst_decklink_src_get_property (GObject * object, guint property_id,
                               GValue * value, GParamSpec * pspec)
{
    GstDecklinkSrc *decklinksrc;

    g_return_if_fail (GST_IS_DECKLINK_SRC (object));
    decklinksrc = GST_DECKLINK_SRC (object);

    switch (property_id) {
    case PROP_MODE:
        g_value_set_enum (value, decklinksrc->mode);
        break;
    case PROP_CONNECTION:
        g_value_set_enum (value, decklinksrc->connection);
        break;
    case PROP_AUDIO_INPUT:
        g_value_set_enum (value, decklinksrc->audio_connection);
        break;
    case PROP_SUBDEVICE:
        g_value_set_int (value, decklinksrc->subdevice);
        break;
    default:
        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
        break;
    }
}
void
gst_decklink_src_set_property (GObject * object, guint property_id,
    const GValue * value, GParamSpec * pspec)
{
  GstDecklinkSrc *decklinksrc;

  decklinksrc = GST_DECKLINK_SRC (object);

  switch (property_id) {
    case PROP_MODE:
      decklinksrc->mode = (GstDecklinkModeEnum) g_value_get_enum (value);
      break;
    case PROP_CONNECTION:
      decklinksrc->connection =
          (GstDecklinkConnectionEnum) g_value_get_enum (value);
      break;
    case PROP_AUDIO_INPUT:
      decklinksrc->audio_connection =
          (GstDecklinkAudioConnectionEnum) g_value_get_enum (value);
      break;
    case PROP_DEVICE:
      decklinksrc->device = g_value_get_int (value);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
      break;
  }
}
static void
gst_decklink_src_audio_src_fixatecaps (GstPad * pad, GstCaps * caps)
{
    GstDecklinkSrc *decklinksrc;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "fixatecaps");


    gst_object_unref (decklinksrc);
}
static gboolean
gst_decklink_src_audio_src_activatepull (GstPad * pad, gboolean active)
{
  GstDecklinkSrc *decklinksrc;

  decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (decklinksrc, "activatepull");

  gst_object_unref (decklinksrc);
  return FALSE;
}
示例#8
0
HRESULT
    DeckLinkCaptureDelegate::VideoInputFrameArrived (IDeckLinkVideoInputFrame *
    videoFrame, IDeckLinkAudioInputPacket * audioFrame)
{
  GstDecklinkSrc *decklinksrc;

  g_return_val_if_fail (priv != NULL, S_OK);
  g_return_val_if_fail (GST_IS_DECKLINK_SRC (priv), S_OK);

  decklinksrc = GST_DECKLINK_SRC (priv);

  // Handle Video Frame
  if (videoFrame) {
    if (videoFrame->GetFlags () & bmdFrameHasNoInputSource) {
      GST_DEBUG ("Frame received - No input signal detected");
    } else {
      const char *timecodeString = NULL;
      if (g_timecodeFormat != 0) {
        IDeckLinkTimecode *timecode;
        if (videoFrame->GetTimecode (g_timecodeFormat, &timecode) == S_OK) {
          timecode->GetString (&timecodeString);
          CONVERT_COM_STRING (timecodeString);
        }
      }

      GST_DEBUG ("Frame received [%s] - %s - Size: %li bytes",
          timecodeString != NULL ? timecodeString : "No timecode",
          "Valid Frame", videoFrame->GetRowBytes () * videoFrame->GetHeight ());

      if (timecodeString)
        FREE_COM_STRING (timecodeString);

      g_mutex_lock (decklinksrc->mutex);
      if (decklinksrc->video_frame != NULL) {
        decklinksrc->dropped_frames++;
      } else {
        videoFrame->AddRef ();
        decklinksrc->video_frame = videoFrame;
        if (audioFrame) {
          audioFrame->AddRef ();
          decklinksrc->audio_frame = audioFrame;
        }
      }

      /* increment regardless whether frame was dropped or not */
      decklinksrc->frame_num++;

      g_cond_signal (decklinksrc->cond);
      g_mutex_unlock (decklinksrc->mutex);
    }
  }
  return S_OK;
}
示例#9
0
/* events sent to this element directly, mainly from the application */
static gboolean
gst_decklink_src_send_event (GstElement * element, GstEvent * event)
{
  GstDecklinkSrc *src;
  gboolean result = FALSE;

  src = GST_DECKLINK_SRC (element);

  GST_DEBUG_OBJECT (src, "handling event %p %" GST_PTR_FORMAT, event, event);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
      g_atomic_int_set (&src->pending_eos, TRUE);
      GST_INFO_OBJECT (src, "EOS pending");
      result = TRUE;
      break;
      break;
    case GST_EVENT_TAG:
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    case GST_EVENT_CUSTOM_BOTH:
      /* Insert TAG, CUSTOM_DOWNSTREAM, CUSTOM_BOTH in the dataflow */
      GST_OBJECT_LOCK (src);
      src->pending_events = g_list_append (src->pending_events, event);
      g_atomic_int_set (&src->have_events, TRUE);
      GST_OBJECT_UNLOCK (src);
      event = NULL;
      result = TRUE;
      break;
    case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:
    case GST_EVENT_CUSTOM_BOTH_OOB:
      /* insert a random custom event into the pipeline */
      GST_DEBUG_OBJECT (src, "pushing custom OOB event downstream");
      result = gst_pad_push_event (src->videosrcpad, gst_event_ref (event));
      result |= gst_pad_push_event (src->audiosrcpad, event);
      /* we gave away the ref to the event in the push */
      event = NULL;
      break;
    case GST_EVENT_CUSTOM_UPSTREAM:
      /* drop */
    case GST_EVENT_SEGMENT:
      /* sending random SEGMENT downstream can break sync - drop */
    default:
      GST_LOG_OBJECT (src, "dropping %s event", GST_EVENT_TYPE_NAME (event));
      break;
  }

  /* if we still have a ref to the event, unref it now */
  if (event)
    gst_event_unref (event);

  return result;
}
static gboolean
gst_decklink_src_video_src_acceptcaps (GstPad * pad, GstCaps * caps)
{
    GstDecklinkSrc *decklinksrc;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "acceptcaps");


    gst_object_unref (decklinksrc);
    return TRUE;
}
static GstPadLinkReturn
gst_decklink_src_audio_src_link (GstPad * pad, GstPad * peer)
{
    GstDecklinkSrc *decklinksrc;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "link");


    gst_object_unref (decklinksrc);
    return GST_PAD_LINK_OK;
}
static gboolean
gst_decklink_src_video_src_query (GstPad * pad, GstObject * parent,
    GstQuery * query)
{
  GstDecklinkSrc *decklinksrc;
  gboolean ret = FALSE;

  decklinksrc = GST_DECKLINK_SRC (parent);

  GST_DEBUG_OBJECT (pad, "query: %" GST_PTR_FORMAT, query);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:{
      GstClockTime min_latency, max_latency;
      const GstDecklinkMode *mode;

      /* device must be open */
      if (decklinksrc->decklink == NULL) {
        GST_WARNING_OBJECT (decklinksrc,
            "Can't give latency since device isn't open !");
        goto done;
      }

      mode = gst_decklink_get_mode (decklinksrc->mode);

      /* min latency is the time to capture one frame */
      min_latency =
          gst_util_uint64_scale_int (GST_SECOND, mode->fps_d, mode->fps_n);

      /* max latency is total duration of the frame buffer */
      max_latency = 2 * min_latency;

      GST_DEBUG_OBJECT (decklinksrc,
          "report latency min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
          GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));

      /* we are always live, the min latency is 1 frame and the max latency is
       * the complete buffer of frames. */
      gst_query_set_latency (query, TRUE, min_latency, max_latency);

      ret = TRUE;
      break;
    }
    default:
      ret = gst_pad_query_default (pad, parent, query);
      break;
  }

done:
  return ret;
}
static GstFlowReturn
gst_decklink_src_audio_src_getrange (GstPad * pad, guint64 offset, guint length,
                                     GstBuffer ** buffer)
{
    GstDecklinkSrc *decklinksrc;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "getrange");


    gst_object_unref (decklinksrc);
    return GST_FLOW_OK;
}
static GstCaps *
gst_decklink_src_audio_src_getcaps (GstPad * pad)
{
    GstDecklinkSrc *decklinksrc;
    GstCaps *caps;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "getcaps");

    caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));

    gst_object_unref (decklinksrc);
    return caps;
}
static GstIterator *
gst_decklink_src_audio_src_iterintlink (GstPad * pad)
{
    GstDecklinkSrc *decklinksrc;
    GstIterator *iter;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "iterintlink");

    iter = gst_pad_iterate_internal_links_default (pad);

    gst_object_unref (decklinksrc);
    return iter;
}
static GstCaps *
gst_decklink_src_video_src_getcaps (GstPad * pad)
{
    GstDecklinkSrc *decklinksrc;
    GstCaps *caps;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "getcaps");

    caps = gst_decklink_mode_get_caps (decklinksrc->mode);

    gst_object_unref (decklinksrc);
    return caps;
}
示例#17
0
HRESULT
DeckLinkCaptureDelegate::VideoInputFormatChanged (
    BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode * mode,
    BMDDetectedVideoInputFormatFlags)
{
  GstDecklinkSrc *decklinksrc;

  g_return_val_if_fail (priv != NULL, S_OK);
  g_return_val_if_fail (GST_IS_DECKLINK_SRC (priv), S_OK);

  decklinksrc = GST_DECKLINK_SRC (priv);

  GST_ERROR_OBJECT (decklinksrc, "unimplemented: video input format changed");

  return S_OK;
}
示例#18
0
static gboolean
gst_decklink_src_query (GstElement * element, GstQuery * query)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element);
  gboolean ret;

  GST_DEBUG_OBJECT (decklinksrc, "query");

  switch (GST_QUERY_TYPE (query)) {
    default:
      ret = GST_ELEMENT_CLASS (parent_class)->query (element, query);
      break;
  }

  return ret;
}
示例#19
0
static gboolean
gst_decklink_src_audio_src_acceptcaps (GstPad * pad, GstCaps * caps)
{
  GstDecklinkSrc *decklinksrc;
  GstCaps *pad_caps;
  gboolean can_intersect;

  decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

  pad_caps = gst_pad_get_caps_reffed (pad);
  can_intersect = gst_caps_can_intersect (pad_caps, caps);
  gst_caps_unref (pad_caps);

  GST_DEBUG_OBJECT (decklinksrc, "acceptcaps");

  gst_object_unref (decklinksrc);
  return can_intersect;
}
示例#20
0
static gboolean
gst_decklink_src_video_src_setcaps (GstPad * pad, GstCaps * caps)
{
  GstDecklinkSrc *decklinksrc;
  GstCaps *mode_caps;
  gboolean can_intersect;

  decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (decklinksrc, "setcaps");

  mode_caps = gst_decklink_mode_get_caps (decklinksrc->mode);
  can_intersect = gst_caps_can_intersect (mode_caps, caps);
  gst_caps_unref (mode_caps);

  gst_object_unref (decklinksrc);
  return can_intersect;
}
示例#21
0
HRESULT
DeckLinkCaptureDelegate::VideoInputFrameArrived (IDeckLinkVideoInputFrame *
    videoFrame, IDeckLinkAudioInputPacket * audioFrame)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv);

  // Handle Video Frame
  if (videoFrame) {
    if (videoFrame->GetFlags () & bmdFrameHasNoInputSource) {
      GST_DEBUG("Frame received - No input signal detected");
    } else {
      const char *timecodeString = NULL;
      if (g_timecodeFormat != 0) {
        IDeckLinkTimecode *timecode;
        if (videoFrame->GetTimecode (g_timecodeFormat, &timecode) == S_OK) {
          timecode->GetString (&timecodeString);
        }
      }

      GST_DEBUG("Frame received [%s] - %s - Size: %li bytes",
          timecodeString != NULL ? timecodeString : "No timecode",
          "Valid Frame",
          videoFrame->GetRowBytes () * videoFrame->GetHeight ());

      if (timecodeString)
        free ((void *) timecodeString);

      g_mutex_lock (decklinksrc->mutex);
      if (decklinksrc->video_frame != NULL) {
        decklinksrc->dropped_frames++;
      } else {
        videoFrame->AddRef();
        decklinksrc->video_frame = videoFrame;
        if (audioFrame) {
          audioFrame->AddRef();
          decklinksrc->audio_frame = audioFrame;
        }
      }
      g_cond_signal (decklinksrc->cond);
      g_mutex_unlock (decklinksrc->mutex);
    }
  }
  return S_OK;
}
static gboolean
gst_decklink_src_audio_src_query (GstPad * pad, GstQuery * query)
{
    gboolean res;
    GstDecklinkSrc *decklinksrc;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "query");

    switch (GST_QUERY_TYPE (query)) {
    default:
        res = gst_pad_query_default (pad, query);
        break;
    }

    gst_object_unref (decklinksrc);
    return res;
}
static gboolean
gst_decklink_src_audio_src_event (GstPad * pad, GstEvent * event)
{
    gboolean res;
    GstDecklinkSrc *decklinksrc;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "event");

    switch (GST_EVENT_TYPE (event)) {
    default:
        res = gst_pad_event_default (pad, event);
        break;
    }

    gst_object_unref (decklinksrc);
    return res;
}
static gboolean
gst_decklink_src_audio_src_activate (GstPad * pad)
{
    GstDecklinkSrc *decklinksrc;
    gboolean ret;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "activate");

    if (gst_pad_check_pull_range (pad)) {
        GST_DEBUG_OBJECT (pad, "activating pull");
        ret = gst_pad_activate_pull (pad, TRUE);
    } else {
        GST_DEBUG_OBJECT (pad, "activating push");
        ret = gst_pad_activate_push (pad, TRUE);
    }

    gst_object_unref (decklinksrc);
    return ret;
}
static void
gst_decklink_src_task (void *priv)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv);
  GstBuffer *buffer;
  GstBuffer *audio_buffer;
  IDeckLinkVideoInputFrame *video_frame;
  IDeckLinkAudioInputPacket *audio_frame;
  void *data;
  gsize data_size;
  int n_samples;
  GstFlowReturn ret;
  const GstDecklinkMode *mode;
  gboolean discont = FALSE;

  GST_DEBUG_OBJECT (decklinksrc, "task");

  g_mutex_lock (&decklinksrc->mutex);
  while (decklinksrc->video_frame == NULL && !decklinksrc->stop) {
    g_cond_wait (&decklinksrc->cond, &decklinksrc->mutex);
  }
  video_frame = decklinksrc->video_frame;
  audio_frame = decklinksrc->audio_frame;
  decklinksrc->video_frame = NULL;
  decklinksrc->audio_frame = NULL;
  g_mutex_unlock (&decklinksrc->mutex);

  if (decklinksrc->stop) {
    if (video_frame)
      video_frame->Release ();
    if (audio_frame)
      audio_frame->Release ();
    GST_DEBUG ("stopping task");
    return;
  }

  /* warning on dropped frames */
  /* FIXME: post QoS message */
  if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) {
    GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ,
        ("Dropped %d frame(s), for a total of %d frame(s)",
            decklinksrc->dropped_frames - decklinksrc->dropped_frames_old,
            decklinksrc->dropped_frames), (NULL));
    decklinksrc->dropped_frames_old = decklinksrc->dropped_frames;
    /* FIXME: discont = TRUE; ? */
  }

  if (!decklinksrc->started) {
    gst_decklink_src_send_initial_events (decklinksrc);
    decklinksrc->started = TRUE;
  }

  mode = gst_decklink_get_mode (decklinksrc->mode);

  video_frame->GetBytes (&data);

  data_size = mode->width * mode->height * 2;

  if (decklinksrc->copy_data) {
    buffer = gst_buffer_new_and_alloc (data_size);

    gst_buffer_fill (buffer, 0, data, data_size);

    video_frame->Release ();
  } else {
    VideoFrame *vf;

    vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

    buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) 0, data, data_size,
        0, data_size, vf, (GDestroyNotify) video_frame_free);

    vf->frame = video_frame;
    vf->input = decklinksrc->input;
    vf->input->AddRef ();
  }

  GST_BUFFER_TIMESTAMP (buffer) =
      gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND,
      mode->fps_d, mode->fps_n);
  GST_BUFFER_DURATION (buffer) =
      gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND,
      mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer);
  GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num;
  GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; /* FIXME: +1? */

  /* FIXME: set video meta */

  if (decklinksrc->frame_num == 0)
    discont = TRUE;

  if (discont)
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
  else
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);

  /* FIXME: proper flow aggregation with audio flow */
  ret = gst_pad_push (decklinksrc->videosrcpad, buffer);
  if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED ||
          ret == GST_FLOW_FLUSHING)) {
    GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED,
        ("Internal data stream error."),
        ("stream stopped, reason %s", gst_flow_get_name (ret)));
      goto pause;
  }

  if (gst_pad_is_linked (decklinksrc->audiosrcpad)) {
    n_samples = audio_frame->GetSampleFrameCount ();
    audio_frame->GetBytes (&data);
    audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2);
    gst_buffer_fill (audio_buffer, 0, data, n_samples * 2 * 2);

    GST_BUFFER_TIMESTAMP (audio_buffer) =
        gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND,
        1, 48000);
    /* FIXME: should be next_timestamp - timestamp for perfect stream */
    GST_BUFFER_DURATION (audio_buffer) =
        gst_util_uint64_scale_int (n_samples * GST_SECOND, 1, 48000);
    GST_BUFFER_OFFSET (audio_buffer) = decklinksrc->num_audio_samples;
    GST_BUFFER_OFFSET_END (audio_buffer) =
        GST_BUFFER_OFFSET (audio_buffer) + n_samples;

    decklinksrc->num_audio_samples += n_samples;

  /* FIXME: proper flow aggregation with video flow */
    ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer);
    if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED ||
            ret == GST_FLOW_FLUSHING)) {
      GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED,
          ("Internal data stream error."),
          ("stream stopped, reason %s", gst_flow_get_name (ret)));
      goto pause;
    }
  }

done:

  if (audio_frame)
    audio_frame->Release ();

  return;

pause:
  {
    const gchar *reason = gst_flow_get_name (ret);
    GstEvent *event = NULL;

    GST_DEBUG_OBJECT (decklinksrc, "pausing task, reason %s", reason);
    gst_task_pause (decklinksrc->task);
    if (ret == GST_FLOW_EOS) {
      /* perform EOS logic (very crude, we don't even keep a GstSegment) */
      event = gst_event_new_eos ();
    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
      event = gst_event_new_eos ();
      /* for fatal errors we post an error message, post the error
       * first so the app knows about the error first.
       * Also don't do this for FLUSHING because it happens
       * due to flushing and posting an error message because of
       * that is the wrong thing to do, e.g. when we're doing
       * a flushing seek. */
      GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED,
          ("Internal data flow error."),
          ("streaming task paused, reason %s (%d)", reason, ret));
    }
    if (event != NULL) {
      GST_INFO_OBJECT (decklinksrc->videosrcpad, "pushing EOS event");
      gst_pad_push_event (decklinksrc->videosrcpad, gst_event_ref (event));
      GST_INFO_OBJECT (decklinksrc->audiosrcpad, "pushing EOS event");
      gst_pad_push_event (decklinksrc->audiosrcpad, event);
    }
    goto done;
  }
}
/* FIXME: post error messages for the misc. failures */
static gboolean
gst_decklink_src_start (GstElement * element)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element);
  DeckLinkCaptureDelegate *delegate;
  BMDAudioSampleType sample_depth;
  int channels;
  HRESULT ret;
  const GstDecklinkMode *mode;
  IDeckLinkConfiguration *config;
  BMDVideoConnection conn;
  BMDAudioConnection aconn;

  GST_DEBUG_OBJECT (decklinksrc, "start");

  decklinksrc->decklink = gst_decklink_get_nth_device (decklinksrc->device);
  if (decklinksrc->decklink == NULL) {
    return FALSE;
  }

  decklinksrc->input = gst_decklink_get_nth_input (decklinksrc->device);

  delegate = new DeckLinkCaptureDelegate ();
  delegate->priv = decklinksrc;
  ret = decklinksrc->input->SetCallback (delegate);
  if (ret != S_OK) {
    GST_ERROR ("set callback failed (input source)");
    return FALSE;
  }

  decklinksrc->config = gst_decklink_get_nth_config (decklinksrc->device);
  config = decklinksrc->config;

  switch (decklinksrc->connection) {
    default:
    case GST_DECKLINK_CONNECTION_SDI:
      conn = bmdVideoConnectionSDI;
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_CONNECTION_HDMI:
      conn = bmdVideoConnectionHDMI;
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_CONNECTION_OPTICAL_SDI:
      conn = bmdVideoConnectionOpticalSDI;
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_CONNECTION_COMPONENT:
      conn = bmdVideoConnectionComponent;
      aconn = bmdAudioConnectionAnalog;
      break;
    case GST_DECKLINK_CONNECTION_COMPOSITE:
      conn = bmdVideoConnectionComposite;
      aconn = bmdAudioConnectionAnalog;
      break;
    case GST_DECKLINK_CONNECTION_SVIDEO:
      conn = bmdVideoConnectionSVideo;
      aconn = bmdAudioConnectionAnalog;
      break;
  }

  ret = config->SetInt (bmdDeckLinkConfigVideoInputConnection, conn);
  if (ret != S_OK) {
    GST_ERROR ("set configuration (input source)");
    return FALSE;
  }

  if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {
    ret = config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,
        bmdAnalogVideoFlagCompositeSetup75);
    if (ret != S_OK) {
      GST_ERROR ("set configuration (composite setup)");
      return FALSE;
    }
  }

  switch (decklinksrc->audio_connection) {
    default:
    case GST_DECKLINK_AUDIO_CONNECTION_AUTO:
      /* set above */
      break;
    case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED:
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU:
      aconn = bmdAudioConnectionAESEBU;
      break;
    case GST_DECKLINK_AUDIO_CONNECTION_ANALOG:
      aconn = bmdAudioConnectionAnalog;
      break;
  }
  ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, aconn);
  if (ret != S_OK) {
    GST_ERROR ("set configuration (audio input connection)");
    return FALSE;
  }

  mode = gst_decklink_get_mode (decklinksrc->mode);

  ret = decklinksrc->input->EnableVideoInput (mode->mode, bmdFormat8BitYUV, 0);
  if (ret != S_OK) {
    GST_ERROR ("enable video input failed");
    return FALSE;
  }

  sample_depth = bmdAudioSampleType16bitInteger;
  channels = 2;
  ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz,
      sample_depth, channels);
  if (ret != S_OK) {
    GST_ERROR ("enable video input failed");
    return FALSE;
  }

  ret = decklinksrc->input->StartStreams ();
  if (ret != S_OK) {
    GST_ERROR ("start streams failed");
    return FALSE;
  }

  g_rec_mutex_lock (&decklinksrc->task_mutex);
  gst_task_start (decklinksrc->task);
  g_rec_mutex_unlock (&decklinksrc->task_mutex);

  return TRUE;
}
static void
gst_decklink_src_task (void *priv)
{
    GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv);
    GstBuffer *buffer;
    GstBuffer *audio_buffer;
    IDeckLinkVideoInputFrame *video_frame;
    IDeckLinkAudioInputPacket *audio_frame;
    void *data;
    int n_samples;
    GstFlowReturn ret;
    const GstDecklinkMode *mode;

    GST_DEBUG_OBJECT (decklinksrc, "task");

    g_mutex_lock (decklinksrc->mutex);
    while (decklinksrc->video_frame == NULL && !decklinksrc->stop) {
        g_cond_wait (decklinksrc->cond, decklinksrc->mutex);
    }
    video_frame = decklinksrc->video_frame;
    audio_frame = decklinksrc->audio_frame;
    decklinksrc->video_frame = NULL;
    decklinksrc->audio_frame = NULL;
    g_mutex_unlock (decklinksrc->mutex);

    if (decklinksrc->stop) {
        GST_DEBUG ("stopping task");
        return;
    }

    /* warning on dropped frames */
    if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) {
        GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ,
                             ("Dropped %d frame(s), for a total of %d frame(s)",
                              decklinksrc->dropped_frames - decklinksrc->dropped_frames_old,
                              decklinksrc->dropped_frames),
                             (NULL));
        decklinksrc->dropped_frames_old = decklinksrc->dropped_frames;
    }

    mode = gst_decklink_get_mode (decklinksrc->mode);

    video_frame->GetBytes (&data);
    if (decklinksrc->copy_data) {
        buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2);

        memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2);

        video_frame->Release ();
    } else {
        buffer = gst_buffer_new ();
        GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2;

        GST_BUFFER_DATA (buffer) = (guint8 *) data;

        GST_BUFFER_FREE_FUNC (buffer) = video_frame_free;
        GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) video_frame;
    }

    GST_BUFFER_TIMESTAMP (buffer) =
        gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND,
                                   mode->fps_d, mode->fps_n);
    GST_BUFFER_DURATION (buffer) =
        gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND,
                                   mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer);
    GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num;
    GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num;
    if (decklinksrc->frame_num == 0) {
        GstEvent *event;
        gboolean ret;

        GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);

        event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
                                           GST_CLOCK_TIME_NONE, 0);

        ret = gst_pad_push_event (decklinksrc->videosrcpad, event);
        if (!ret) {
            GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret);
            return;
        }
    }

    if (decklinksrc->video_caps == NULL) {
        decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode);
    }
    gst_buffer_set_caps (buffer, decklinksrc->video_caps);

    ret = gst_pad_push (decklinksrc->videosrcpad, buffer);
    if (ret != GST_FLOW_OK) {
        GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL));
    }

    if (gst_pad_is_linked (decklinksrc->audiosrcpad)) {
        n_samples = audio_frame->GetSampleFrameCount ();
        audio_frame->GetBytes (&data);
        audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2);
        memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2);

        GST_BUFFER_TIMESTAMP (audio_buffer) =
            gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND,
                                       1, 48000);
        GST_BUFFER_DURATION (audio_buffer) =
            gst_util_uint64_scale_int ((decklinksrc->num_audio_samples +
                                        n_samples) * GST_SECOND, 1,
                                       48000) - GST_BUFFER_TIMESTAMP (audio_buffer);
        decklinksrc->num_audio_samples += n_samples;

        if (decklinksrc->audio_caps == NULL) {
            decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int",
                                      "endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
                                      "signed", G_TYPE_BOOLEAN, TRUE,
                                      "depth", G_TYPE_INT, 16,
                                      "width", G_TYPE_INT, 16,
                                      "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000, NULL);
        }
        gst_buffer_set_caps (audio_buffer, decklinksrc->audio_caps);

        ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer);
        if (ret != GST_FLOW_OK) {
            GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL));
        }
    }
    audio_frame->Release ();
}
static gboolean
gst_decklink_src_start (GstElement * element)
{
    GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element);
    IDeckLinkIterator *iterator;
    DeckLinkCaptureDelegate *delegate;
    //IDeckLinkDisplayModeIterator *mode_iterator;
    //IDeckLinkDisplayMode *mode;
    BMDAudioSampleType sample_depth;
    int channels;
    HRESULT ret;
    const GstDecklinkMode *mode;
    IDeckLinkConfiguration *config;
    BMDVideoConnection conn;
    BMDAudioConnection aconn;
    int i;

    GST_DEBUG_OBJECT (decklinksrc, "start");

    iterator = CreateDeckLinkIteratorInstance ();
    if (iterator == NULL) {
        GST_ERROR ("no driver");
        return FALSE;
    }

    ret = iterator->Next (&decklinksrc->decklink);
    if (ret != S_OK) {
        GST_ERROR ("no card");
        return FALSE;
    }
    for (i = 0; i < decklinksrc->subdevice; i++) {
        ret = iterator->Next (&decklinksrc->decklink);
        if (ret != S_OK) {
            GST_ERROR ("no card");
            return FALSE;
        }
    }

    ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkInput,
            (void **) &decklinksrc->input);
    if (ret != S_OK) {
        GST_ERROR ("query interface failed");
        return FALSE;
    }

    delegate = new DeckLinkCaptureDelegate ();
    delegate->priv = decklinksrc;
    decklinksrc->input->SetCallback (delegate);

    ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkConfiguration,
            (void **) &config);
    if (ret != S_OK) {
        GST_ERROR ("query interface failed");
        return FALSE;
    }

    switch (decklinksrc->connection) {
    default:
    case GST_DECKLINK_CONNECTION_SDI:
        conn = bmdVideoConnectionSDI;
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_CONNECTION_HDMI:
        conn = bmdVideoConnectionHDMI;
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_CONNECTION_OPTICAL_SDI:
        conn = bmdVideoConnectionOpticalSDI;
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_CONNECTION_COMPONENT:
        conn = bmdVideoConnectionComponent;
        aconn = bmdAudioConnectionAnalog;
        break;
    case GST_DECKLINK_CONNECTION_COMPOSITE:
        conn = bmdVideoConnectionComposite;
        aconn = bmdAudioConnectionAnalog;
        break;
    case GST_DECKLINK_CONNECTION_SVIDEO:
        conn = bmdVideoConnectionSVideo;
        aconn = bmdAudioConnectionAnalog;
        break;
    }

    ret = config->SetInt (bmdDeckLinkConfigVideoInputConnection, conn);
    if (ret != S_OK) {
        GST_ERROR ("set configuration (input source)");
        return FALSE;
    }

    if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {
        ret = config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,
                              bmdAnalogVideoFlagCompositeSetup75);
        if (ret != S_OK) {
            GST_ERROR ("set configuration (composite setup)");
            return FALSE;
        }
    }

    switch (decklinksrc->audio_connection) {
    default:
    case GST_DECKLINK_AUDIO_CONNECTION_AUTO:
        break;
    case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED:
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU:
        aconn = bmdAudioConnectionAESEBU;
        break;
    case GST_DECKLINK_AUDIO_CONNECTION_ANALOG:
        aconn = bmdAudioConnectionAnalog;
        break;
    }
    ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, aconn);
    if (ret != S_OK) {
        GST_ERROR ("set configuration (audio input connection)");
        return FALSE;
    }
#if 0
    ret = decklinksrc->input->GetDisplayModeIterator (&mode_iterator);
    if (ret != S_OK) {
        GST_ERROR ("failed to get display mode iterator");
        return FALSE;
    }

    i = 0;
    while (mode_iterator->Next (&mode) == S_OK) {
        const char *mode_name;

        mode->GetName (&mode_name);

        GST_DEBUG ("%d: mode name: %s", i, mode_name);

        mode->Release ();
        i++;
    }
#endif

    mode = gst_decklink_get_mode (decklinksrc->mode);

    ret = decklinksrc->input->EnableVideoInput (mode->mode, bmdFormat8BitYUV, 0);
    if (ret != S_OK) {
        GST_ERROR ("enable video input failed");
        return FALSE;
    }

    sample_depth = bmdAudioSampleType16bitInteger;
    channels = 2;
    ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz,
            sample_depth, channels);
    if (ret != S_OK) {
        GST_ERROR ("enable video input failed");
        return FALSE;
    }

    ret = decklinksrc->input->StartStreams ();
    if (ret != S_OK) {
        GST_ERROR ("start streams failed");
        return FALSE;
    }

    g_static_rec_mutex_lock (&decklinksrc->task_mutex);
    gst_task_start (decklinksrc->task);
    g_static_rec_mutex_unlock (&decklinksrc->task_mutex);

    return TRUE;
}