static gboolean
gst_decklink_sink_videosink_query (GstPad * pad, GstObject * parent,
    GstQuery * query)
{
  gboolean res;
  GstDecklinkSink *decklinksink;

  decklinksink = GST_DECKLINK_SINK (parent);

  GST_DEBUG_OBJECT (decklinksink, "query");

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:{
      GstCaps *mode_caps, *filter, *caps;

      /* FIXME: do we change mode if incoming caps change? If yes, we
       * should probably return the template caps instead */
      mode_caps = gst_decklink_mode_get_caps (decklinksink->mode);
      gst_query_parse_caps (query, &filter);
      caps = gst_caps_intersect (mode_caps, filter);
      gst_caps_unref (mode_caps);
      gst_query_set_caps_result (query, caps);
      gst_caps_unref (caps);
      break;
    }
    default:
      res = gst_pad_query_default (pad, parent, query);
      break;
  }

  return res;
}
static GstCaps *
gst_decklink_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstCaps *mode_caps, *caps;

  if (self->mode == GST_DECKLINK_MODE_AUTO
      && self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
    mode_caps = gst_decklink_mode_get_template_caps (FALSE);
  else if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
    mode_caps = gst_decklink_mode_get_caps_all_formats (self->mode, FALSE);
  else if (self->mode == GST_DECKLINK_MODE_AUTO)
    mode_caps =
        gst_decklink_pixel_format_get_caps (gst_decklink_pixel_format_from_type
        (self->video_format), FALSE);
  else
    mode_caps =
        gst_decklink_mode_get_caps (self->mode,
        gst_decklink_pixel_format_from_type (self->video_format), FALSE);
  mode_caps = gst_caps_make_writable (mode_caps);
  /* For output we support any framerate and only really care about timestamps */
  gst_caps_map_in_place (mode_caps, reset_framerate, NULL);

  if (filter) {
    caps =
        gst_caps_intersect_full (filter, mode_caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (mode_caps);
  } else {
    caps = mode_caps;
  }

  return caps;
}
static GstCaps *
gst_decklink_video_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstCaps *mode_caps, *caps;
  BMDPixelFormat format;
  GstDecklinkModeEnum mode;

  g_mutex_lock (&self->lock);
  mode = self->caps_mode;
  format = self->caps_format;
  g_mutex_unlock (&self->lock);

  mode_caps = gst_decklink_mode_get_caps (mode, format);

  if (filter) {
    caps =
        gst_caps_intersect_full (filter, mode_caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (mode_caps);
  } else {
    caps = mode_caps;
  }

  return caps;
}
static GstCaps *
gst_decklink_video_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstCaps *mode_caps, *caps;

  g_mutex_lock (&self->lock);
  if (self->caps_mode != GST_DECKLINK_MODE_AUTO)
    mode_caps = gst_decklink_mode_get_caps (self->caps_mode);
  else
    mode_caps = gst_decklink_mode_get_caps (self->mode);
  g_mutex_unlock (&self->lock);

  if (filter) {
    caps =
        gst_caps_intersect_full (filter, mode_caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (mode_caps);
  } else {
    caps = mode_caps;
  }

  return caps;
}
static GstCaps *
gst_decklink_src_video_src_getcaps (GstPad * pad)
{
    GstDecklinkSrc *decklinksrc;
    GstCaps *caps;

    decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

    GST_DEBUG_OBJECT (decklinksrc, "getcaps");

    caps = gst_decklink_mode_get_caps (decklinksrc->mode);

    gst_object_unref (decklinksrc);
    return caps;
}
static GstCaps *
gst_decklink_sink_videosink_getcaps (GstPad * pad)
{
  GstDecklinkSink *decklinksink;
  GstCaps *caps;

  decklinksink = GST_DECKLINK_SINK (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (decklinksink, "getcaps");

  caps = gst_decklink_mode_get_caps (decklinksink->mode);

  gst_object_unref (decklinksink);
  return caps;
}
static GstCaps *
gst_decklink_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstCaps *mode_caps, *caps;

  mode_caps = gst_decklink_mode_get_caps (self->mode);
  if (filter) {
    caps =
        gst_caps_intersect_full (filter, mode_caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (mode_caps);
  } else {
    caps = mode_caps;
  }

  return caps;
}
const GstDecklinkMode *
gst_decklink_find_mode_for_caps (GstCaps * caps)
{
  int i;
  GstCaps *mode_caps;

  for (i = 1; i < (int) G_N_ELEMENTS (modes); i++) {
    mode_caps = gst_decklink_mode_get_caps ((GstDecklinkModeEnum) i, bmdFormat8BitYUV);
    if (gst_caps_can_intersect (caps, mode_caps)) {
      gst_caps_unref (mode_caps);
      return gst_decklink_get_mode ((GstDecklinkModeEnum) i);
    }
    gst_caps_unref (mode_caps);
  }

  return NULL;
}
static gboolean
gst_decklink_src_video_src_setcaps (GstPad * pad, GstCaps * caps)
{
  GstDecklinkSrc *decklinksrc;
  GstCaps *mode_caps;
  gboolean can_intersect;

  decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (decklinksrc, "setcaps");

  mode_caps = gst_decklink_mode_get_caps (decklinksrc->mode);
  can_intersect = gst_caps_can_intersect (mode_caps, caps);
  gst_caps_unref (mode_caps);

  gst_object_unref (decklinksrc);
  return can_intersect;
}
Beispiel #10
0
static void
gst_decklink_src_send_initial_events (GstDecklinkSrc * src)
{
  GstSegment segment;
  GstEvent *event;
  guint group_id;
  guint32 audio_id, video_id;
  gchar stream_id[9];

  /* stream-start */
  audio_id = g_random_int ();
  video_id = g_random_int ();
  while (video_id == audio_id)
    video_id = g_random_int ();

  group_id = gst_util_group_id_next ();
  g_snprintf (stream_id, sizeof (stream_id), "%08x", audio_id);
  event = gst_event_new_stream_start (stream_id);
  gst_event_set_group_id (event, group_id);
  gst_pad_push_event (src->audiosrcpad, event);

  g_snprintf (stream_id, sizeof (stream_id), "%08x", video_id);
  event = gst_event_new_stream_start (stream_id);
  gst_event_set_group_id (event, group_id);
  gst_pad_push_event (src->videosrcpad, event);

  /* segment */
  gst_segment_init (&segment, GST_FORMAT_TIME);
  event = gst_event_new_segment (&segment);
  gst_pad_push_event (src->videosrcpad, gst_event_ref (event));
  gst_pad_push_event (src->audiosrcpad, event);

  /* caps */
  gst_pad_push_event (src->audiosrcpad,
      gst_event_new_caps (gst_caps_new_simple ("audio/x-raw",
          "format", G_TYPE_STRING, "S16LE", "channels", G_TYPE_INT, 2,
          "rate", G_TYPE_INT, 48000, "layout", G_TYPE_STRING, "interleaved",
          NULL)));

  gst_pad_push_event (src->videosrcpad,
      gst_event_new_caps (gst_decklink_mode_get_caps (src->mode)));
}
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;
  gboolean caps_changed = FALSE;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }
  // If we're not flushing, we should have a valid frame from the queue
  g_assert (f != NULL);

  g_mutex_lock (&self->lock);
  if (self->caps_mode != f->mode) {
    if (self->mode == GST_DECKLINK_MODE_AUTO) {
      GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
          f->mode);
      caps_changed = TRUE;
      self->caps_mode = f->mode;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid mode in captured frame"),
          ("Mode set to %d but captured %d", self->caps_mode, f->mode));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }
  if (self->caps_format != f->format) {
    if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO) {
      GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format,
          f->format);
      caps_changed = TRUE;
      self->caps_format = f->format;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid pixel format in captured frame"),
          ("Format set to %d but captured %d", self->caps_format, f->format));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  g_mutex_unlock (&self->lock);
  if (caps_changed) {
    caps = gst_decklink_mode_get_caps (f->mode, f->format);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);

  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;
  gst_buffer_add_video_time_code_meta (*buffer, f->tc);

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}
static void
gst_decklink_src_task (void *priv)
{
    GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv);
    GstBuffer *buffer;
    GstBuffer *audio_buffer;
    IDeckLinkVideoInputFrame *video_frame;
    IDeckLinkAudioInputPacket *audio_frame;
    void *data;
    int n_samples;
    GstFlowReturn ret;
    const GstDecklinkMode *mode;

    GST_DEBUG_OBJECT (decklinksrc, "task");

    g_mutex_lock (decklinksrc->mutex);
    while (decklinksrc->video_frame == NULL && !decklinksrc->stop) {
        g_cond_wait (decklinksrc->cond, decklinksrc->mutex);
    }
    video_frame = decklinksrc->video_frame;
    audio_frame = decklinksrc->audio_frame;
    decklinksrc->video_frame = NULL;
    decklinksrc->audio_frame = NULL;
    g_mutex_unlock (decklinksrc->mutex);

    if (decklinksrc->stop) {
        GST_DEBUG ("stopping task");
        return;
    }

    /* warning on dropped frames */
    if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) {
        GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ,
                             ("Dropped %d frame(s), for a total of %d frame(s)",
                              decklinksrc->dropped_frames - decklinksrc->dropped_frames_old,
                              decklinksrc->dropped_frames),
                             (NULL));
        decklinksrc->dropped_frames_old = decklinksrc->dropped_frames;
    }

    mode = gst_decklink_get_mode (decklinksrc->mode);

    video_frame->GetBytes (&data);
    if (decklinksrc->copy_data) {
        buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2);

        memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2);

        video_frame->Release ();
    } else {
        buffer = gst_buffer_new ();
        GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2;

        GST_BUFFER_DATA (buffer) = (guint8 *) data;

        GST_BUFFER_FREE_FUNC (buffer) = video_frame_free;
        GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) video_frame;
    }

    GST_BUFFER_TIMESTAMP (buffer) =
        gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND,
                                   mode->fps_d, mode->fps_n);
    GST_BUFFER_DURATION (buffer) =
        gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND,
                                   mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer);
    GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num;
    GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num;
    if (decklinksrc->frame_num == 0) {
        GstEvent *event;
        gboolean ret;

        GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);

        event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
                                           GST_CLOCK_TIME_NONE, 0);

        ret = gst_pad_push_event (decklinksrc->videosrcpad, event);
        if (!ret) {
            GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret);
            return;
        }
    }

    if (decklinksrc->video_caps == NULL) {
        decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode);
    }
    gst_buffer_set_caps (buffer, decklinksrc->video_caps);

    ret = gst_pad_push (decklinksrc->videosrcpad, buffer);
    if (ret != GST_FLOW_OK) {
        GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL));
    }

    if (gst_pad_is_linked (decklinksrc->audiosrcpad)) {
        n_samples = audio_frame->GetSampleFrameCount ();
        audio_frame->GetBytes (&data);
        audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2);
        memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2);

        GST_BUFFER_TIMESTAMP (audio_buffer) =
            gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND,
                                       1, 48000);
        GST_BUFFER_DURATION (audio_buffer) =
            gst_util_uint64_scale_int ((decklinksrc->num_audio_samples +
                                        n_samples) * GST_SECOND, 1,
                                       48000) - GST_BUFFER_TIMESTAMP (audio_buffer);
        decklinksrc->num_audio_samples += n_samples;

        if (decklinksrc->audio_caps == NULL) {
            decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int",
                                      "endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
                                      "signed", G_TYPE_BOOLEAN, TRUE,
                                      "depth", G_TYPE_INT, 16,
                                      "width", G_TYPE_INT, 16,
                                      "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000, NULL);
        }
        gst_buffer_set_caps (audio_buffer, decklinksrc->audio_caps);

        ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer);
        if (ret != GST_FLOW_OK) {
            GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL));
        }
    }
    audio_frame->Release ();
}
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }

  g_mutex_lock (&self->lock);
  if (self->mode == GST_DECKLINK_MODE_AUTO && self->caps_mode != f->mode) {
    GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
        f->mode);
    self->caps_mode = f->mode;
    g_mutex_unlock (&self->lock);
    caps = gst_decklink_mode_get_caps (f->mode);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);
  } else {
    g_mutex_unlock (&self->lock);
  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}