コード例 #1
0
static void
gst_decklink_video_sink_init (GstDecklinkVideoSink * self)
{
  self->mode = GST_DECKLINK_MODE_NTSC;
  self->device_number = 0;

  gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND);
  gst_base_sink_set_qos_enabled (GST_BASE_SINK_CAST (self), TRUE);
}
コード例 #2
0
static void
gst_decklink_video_sink_init (GstDecklinkVideoSink * self)
{
  self->mode = GST_DECKLINK_MODE_NTSC;
  self->device_number = 0;
  self->video_format = GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV;
  /* VITC is legacy, we should expect RP188 in modern use cases */
  self->timecode_format = bmdTimecodeRP188Any;

  gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND);
  gst_base_sink_set_qos_enabled (GST_BASE_SINK_CAST (self), TRUE);
}
コード例 #3
0
static gboolean
gst_fake_sink_event (GstBaseSink * bsink, GstEvent * event)
{
  GstFakeSink *sink = GST_FAKE_SINK (bsink);

  if (!sink->silent) {
    const GstStructure *s;
    const gchar *tstr;
    gchar *sstr;

    GST_OBJECT_LOCK (sink);
    g_free (sink->last_message);

    if (GST_EVENT_TYPE (event) == GST_EVENT_SINK_MESSAGE) {
      GstMessage *msg;
      const GstStructure *structure;

      gst_event_parse_sink_message (event, &msg);
      structure = gst_message_get_structure (msg);
      sstr = gst_structure_to_string (structure);
      sink->last_message =
          g_strdup_printf ("message ******* (%s:%s) M (type: %d, %s) %p",
          GST_DEBUG_PAD_NAME (GST_BASE_SINK_CAST (sink)->sinkpad),
          GST_MESSAGE_TYPE (msg), sstr, msg);
      gst_message_unref (msg);
    } else {
      tstr = gst_event_type_get_name (GST_EVENT_TYPE (event));

      if ((s = gst_event_get_structure (event))) {
        sstr = gst_structure_to_string (s);
      } else {
        sstr = g_strdup ("");
      }

      sink->last_message =
          g_strdup_printf ("event   ******* (%s:%s) E (type: %s (%d), %s) %p",
          GST_DEBUG_PAD_NAME (GST_BASE_SINK_CAST (sink)->sinkpad),
          tstr, GST_EVENT_TYPE (event), sstr, event);
    }
    g_free (sstr);
    GST_OBJECT_UNLOCK (sink);

    gst_fake_sink_notify_last_message (sink);
  }

  return GST_BASE_SINK_CLASS (parent_class)->event (bsink, event);
}
コード例 #4
0
static gboolean
gst_inter_audio_sink_query (GstBaseSink * sink, GstQuery * query)
{
  GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink);
  gboolean ret;

  GST_DEBUG_OBJECT (sink, "query");

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:{
      gboolean live, us_live;
      GstClockTime min_l, max_l;

      GST_DEBUG_OBJECT (sink, "latency query");

      if ((ret =
              gst_base_sink_query_latency (GST_BASE_SINK_CAST (sink), &live,
                  &us_live, &min_l, &max_l))) {
        GstClockTime base_latency, min_latency, max_latency;

        /* we and upstream are both live, adjust the min_latency */
        if (live && us_live) {
          /* FIXME: The other side can change this value when it starts */
          base_latency = interaudiosink->surface->audio_latency_time;

          /* we cannot go lower than the buffer size and the min peer latency */
          min_latency = base_latency + min_l;
          /* the max latency is the max of the peer, we can delay an infinite
           * amount of time. */
          max_latency = (max_l == -1) ? -1 : (base_latency + max_l);

          GST_DEBUG_OBJECT (sink,
              "peer min %" GST_TIME_FORMAT ", our min latency: %"
              GST_TIME_FORMAT, GST_TIME_ARGS (min_l),
              GST_TIME_ARGS (min_latency));
          GST_DEBUG_OBJECT (sink,
              "peer max %" GST_TIME_FORMAT ", our max latency: %"
              GST_TIME_FORMAT, GST_TIME_ARGS (max_l),
              GST_TIME_ARGS (max_latency));
        } else {
          GST_DEBUG_OBJECT (sink,
              "peer or we are not live, don't care about latency");
          min_latency = min_l;
          max_latency = max_l;
        }
        gst_query_set_latency (query, live, min_latency, max_latency);
      }
      break;
    }
    default:
      ret =
          GST_BASE_SINK_CLASS (gst_inter_audio_sink_parent_class)->query (sink,
          query);
      break;
  }

  return ret;
}
コード例 #5
0
static void
gst_decklink_audio_sink_init (GstDecklinkAudioSink * self)
{
  self->device_number = DEFAULT_DEVICE_NUMBER;
  self->stream_align =
      gst_audio_stream_align_new (48000, DEFAULT_ALIGNMENT_THRESHOLD,
      DEFAULT_DISCONT_WAIT);
  self->buffer_time = DEFAULT_BUFFER_TIME * 1000;

  gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND);
}
コード例 #6
0
ファイル: VideoSinkGStreamer.c プロジェクト: ceyusa/gst-wk
static void print_buffer_metadata(WebKitVideoSink* sink, GstBuffer* buffer)
{
    gchar dts_str[64], pts_str[64], dur_str[64];
    gchar flag_str[100];

    if (GST_BUFFER_DTS (buffer) != GST_CLOCK_TIME_NONE) {
        g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT,
                    GST_TIME_ARGS (GST_BUFFER_DTS (buffer)));
    } else {
        g_strlcpy (dts_str, "none", sizeof (dts_str));
    }

    if (GST_BUFFER_PTS (buffer) != GST_CLOCK_TIME_NONE) {
        g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT,
                    GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));
    } else {
        g_strlcpy (pts_str, "none", sizeof (pts_str));
    }

    if (GST_BUFFER_DURATION (buffer) != GST_CLOCK_TIME_NONE) {
        g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT,
                    GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
    } else {
        g_strlcpy (dur_str, "none", sizeof (dur_str));
    }

    {
        const char *flag_list[15] = {
            "", "", "", "", "live", "decode-only", "discont", "resync", "corrupted",
            "marker", "header", "gap", "droppable", "delta-unit", "in-caps"
        };
        guint i;
        char *end = flag_str;
        end[0] = '\0';
        for (i = 0; i < G_N_ELEMENTS (flag_list); i++) {
            if (GST_MINI_OBJECT_CAST (buffer)->flags & (1 << i)) {
                strcpy (end, flag_list[i]);
                end += strlen (end);
                end[0] = ' ';
                end[1] = '\0';
                end++;
            }
        }
    }

    g_printerr ("chain   ******* (%s:%s) (%u bytes, dts: %s, pts: %s"
                ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %"
                G_GINT64_FORMAT ", flags: %08x %s) %p\n",
                    GST_DEBUG_PAD_NAME (GST_BASE_SINK_CAST (sink)->sinkpad),
                (guint) gst_buffer_get_size (buffer), dts_str, pts_str,
                dur_str, GST_BUFFER_OFFSET (buffer), GST_BUFFER_OFFSET_END (buffer),
                GST_MINI_OBJECT_CAST (buffer)->flags, flag_str, buffer);
}
コード例 #7
0
static GstFlowReturn
gst_fake_sink_render (GstBaseSink * bsink, GstBuffer * buf)
{
  GstFakeSink *sink = GST_FAKE_SINK_CAST (bsink);

  if (sink->num_buffers_left == 0)
    goto eos;

  if (sink->num_buffers_left != -1)
    sink->num_buffers_left--;

  if (!sink->silent) {
    gchar dts_str[64], pts_str[64], dur_str[64];
    gchar *flag_str, *meta_str;

    GST_OBJECT_LOCK (sink);
    g_free (sink->last_message);

    if (GST_BUFFER_DTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DTS (buf)));
    } else {
      g_strlcpy (dts_str, "none", sizeof (dts_str));
    }

    if (GST_BUFFER_PTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
    } else {
      g_strlcpy (pts_str, "none", sizeof (pts_str));
    }

    if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
    } else {
      g_strlcpy (dur_str, "none", sizeof (dur_str));
    }

    flag_str = gst_buffer_get_flags_string (buf);
    meta_str = gst_buffer_get_meta_string (buf);

    sink->last_message =
        g_strdup_printf ("chain   ******* (%s:%s) (%u bytes, dts: %s, pts: %s"
        ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %"
        G_GINT64_FORMAT ", flags: %08x %s, meta: %s) %p",
        GST_DEBUG_PAD_NAME (GST_BASE_SINK_CAST (sink)->sinkpad),
        (guint) gst_buffer_get_size (buf), dts_str, pts_str,
        dur_str, GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf),
        GST_MINI_OBJECT_CAST (buf)->flags, flag_str,
        meta_str ? meta_str : "none", buf);
    g_free (flag_str);
    g_free (meta_str);
    GST_OBJECT_UNLOCK (sink);

    gst_fake_sink_notify_last_message (sink);
  }
  if (sink->signal_handoffs)
    g_signal_emit (sink, gst_fake_sink_signals[SIGNAL_HANDOFF], 0, buf,
        bsink->sinkpad);

  if (sink->dump) {
    GstMapInfo info;

    if (gst_buffer_map (buf, &info, GST_MAP_READ)) {
      gst_util_dump_mem (info.data, info.size);
      gst_buffer_unmap (buf, &info);
    }
  }
  if (sink->num_buffers_left == 0)
    goto eos;

  return GST_FLOW_OK;

  /* ERRORS */
eos:
  {
    GST_DEBUG_OBJECT (sink, "we are EOS");
    return GST_FLOW_EOS;
  }
}
コード例 #8
0
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint bpp;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);
  bpp = gst_decklink_bpp_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
コード例 #9
0
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  gint i;
  GstClock *clock;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  // FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916
  // We need to drop late buffers here immediately instead of
  // potentially overflowing the internal queue of the hardware
  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock) {
    GstClockTime clock_running_time, base_time, clock_time, latency,
        max_lateness;

    base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
    clock_time = gst_clock_get_time (clock);
    if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {
      clock_running_time = clock_time - base_time;
      latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));
      max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));

      if (clock_running_time >
          running_time + running_time_duration + latency + max_lateness) {
        GST_DEBUG_OBJECT (self,
            "Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_running_time),
            GST_TIME_ARGS (running_time + running_time_duration));

        if (self->last_render_time == GST_CLOCK_TIME_NONE
            || (self->last_render_time < clock_running_time
                && clock_running_time - self->last_render_time >= GST_SECOND)) {
          GST_DEBUG_OBJECT (self,
              "Rendering frame nonetheless because we had none for more than 1s");
          running_time = clock_running_time;
          running_time_duration = 0;
        } else {
          GST_WARNING_OBJECT (self, "Dropping frame");
          gst_object_unref (clock);
          return GST_FLOW_OK;
        }
      }
    }

    gst_object_unref (clock);
  }
  self->last_render_time = running_time;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], bmdFormat8BitYUV,
      bmdFrameFlagDefault, &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
コード例 #10
0
ファイル: gstfakesink.c プロジェクト: Grobik1/gstreamer
static GstFlowReturn
gst_fake_sink_render (GstBaseSink * bsink, GstBuffer * buf)
{
  GstFakeSink *sink = GST_FAKE_SINK_CAST (bsink);

  if (sink->num_buffers_left == 0)
    goto eos;

  if (sink->num_buffers_left != -1)
    sink->num_buffers_left--;

  if (!sink->silent) {
    gchar dts_str[64], pts_str[64], dur_str[64];
    gchar flag_str[100];

    GST_OBJECT_LOCK (sink);
    g_free (sink->last_message);

    if (GST_BUFFER_DTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DTS (buf)));
    } else {
      g_strlcpy (dts_str, "none", sizeof (dts_str));
    }

    if (GST_BUFFER_PTS (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
    } else {
      g_strlcpy (pts_str, "none", sizeof (pts_str));
    }

    if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
      g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
    } else {
      g_strlcpy (dur_str, "none", sizeof (dur_str));
    }

    {
      const char *flag_list[15] = {
        "", "", "", "", "live", "decode-only", "discont", "resync", "corrupted",
        "marker", "header", "gap", "droppable", "delta-unit", "in-caps"
      };
      int i;
      char *end = flag_str;
      end[0] = '\0';
      for (i = 0; i < G_N_ELEMENTS (flag_list); i++) {
        if (GST_MINI_OBJECT_CAST (buf)->flags & (1 << i)) {
          strcpy (end, flag_list[i]);
          end += strlen (end);
          end[0] = ' ';
          end[1] = '\0';
          end++;
        }
      }
    }

    sink->last_message =
        g_strdup_printf ("chain   ******* (%s:%s) (%u bytes, dts: %s, pts: %s"
        ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %"
        G_GINT64_FORMAT ", flags: %08x %s) %p",
        GST_DEBUG_PAD_NAME (GST_BASE_SINK_CAST (sink)->sinkpad),
        (guint) gst_buffer_get_size (buf), dts_str, pts_str,
        dur_str, GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf),
        GST_MINI_OBJECT_CAST (buf)->flags, flag_str, buf);
    GST_OBJECT_UNLOCK (sink);

    gst_fake_sink_notify_last_message (sink);
  }
  if (sink->signal_handoffs)
    g_signal_emit (sink, gst_fake_sink_signals[SIGNAL_HANDOFF], 0, buf,
        bsink->sinkpad);

  if (sink->dump) {
    GstMapInfo info;

    gst_buffer_map (buf, &info, GST_MAP_READ);
    gst_util_dump_mem (info.data, info.size);
    gst_buffer_unmap (buf, &info);
  }
  if (sink->num_buffers_left == 0)
    goto eos;

  return GST_FLOW_OK;

  /* ERRORS */
eos:
  {
    GST_DEBUG_OBJECT (sink, "we are EOS");
    return GST_FLOW_EOS;
  }
}
コード例 #11
0
static GstFlowReturn
gst_decklink_audio_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkAudioSink *self = GST_DECKLINK_AUDIO_SINK_CAST (bsink);
  GstDecklinkVideoSink *video_sink;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime schedule_time, schedule_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  GstMapInfo map_info;
  const guint8 *data;
  gsize len, written_all;
  gboolean discont;

  GST_DEBUG_OBJECT (self, "Rendering buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  if (GST_BASE_SINK_CAST (self)->flushing) {
    return GST_FLOW_FLUSHING;
  }
  // If we're called before output is actually started, start pre-rolling
  if (!self->output->started) {
    self->output->output->BeginAudioPreroll ();
  }

  video_sink =
      GST_DECKLINK_VIDEO_SINK (gst_object_ref (self->output->videosink));

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  discont = gst_audio_stream_align_process (self->stream_align,
      GST_BUFFER_IS_DISCONT (buffer), timestamp,
      gst_buffer_get_size (buffer) / self->info.bpf, &timestamp, &duration,
      NULL);

  if (discont && self->resampler)
    gst_audio_resampler_reset (self->resampler);

  if (GST_BASE_SINK_CAST (self)->segment.rate < 0.0) {
    GstMapInfo out_map;
    gint out_frames = gst_buffer_get_size (buffer) / self->info.bpf;

    buffer = gst_buffer_make_writable (gst_buffer_ref (buffer));

    gst_buffer_map (buffer, &out_map, GST_MAP_READWRITE);
    if (self->info.finfo->format == GST_AUDIO_FORMAT_S16) {
      gint16 *swap_data = (gint16 *) out_map.data;
      gint16 *swap_data_end =
          swap_data + (out_frames - 1) * self->info.channels;
      gint16 swap_tmp[16];

      while (out_frames > 0) {
        memcpy (&swap_tmp, swap_data, self->info.bpf);
        memcpy (swap_data, swap_data_end, self->info.bpf);
        memcpy (swap_data_end, &swap_tmp, self->info.bpf);

        swap_data += self->info.channels;
        swap_data_end -= self->info.channels;

        out_frames -= 2;
      }
    } else {
      gint32 *swap_data = (gint32 *) out_map.data;
      gint32 *swap_data_end =
          swap_data + (out_frames - 1) * self->info.channels;
      gint32 swap_tmp[16];

      while (out_frames > 0) {
        memcpy (&swap_tmp, swap_data, self->info.bpf);
        memcpy (swap_data, swap_data_end, self->info.bpf);
        memcpy (swap_data_end, &swap_tmp, self->info.bpf);

        swap_data += self->info.channels;
        swap_data_end -= self->info.channels;

        out_frames -= 2;
      }
    }
    gst_buffer_unmap (buffer, &out_map);
  } else {
    gst_buffer_ref (buffer);
  }

  if (self->resampler) {
    gint in_frames = gst_buffer_get_size (buffer) / self->info.bpf;
    gint out_frames =
        gst_audio_resampler_get_out_frames (self->resampler, in_frames);
    GstBuffer *out_buf = gst_buffer_new_and_alloc (out_frames * self->info.bpf);
    GstMapInfo out_map;

    gst_buffer_map (buffer, &map_info, GST_MAP_READ);
    gst_buffer_map (out_buf, &out_map, GST_MAP_READWRITE);

    gst_audio_resampler_resample (self->resampler, (gpointer *) & map_info.data,
        in_frames, (gpointer *) & out_map.data, out_frames);

    gst_buffer_unmap (out_buf, &out_map);
    gst_buffer_unmap (buffer, &map_info);
    buffer = out_buf;
  }

  gst_buffer_map (buffer, &map_info, GST_MAP_READ);
  data = map_info.data;
  len = map_info.size / self->info.bpf;
  written_all = 0;

  do {
    GstClockTime timestamp_now =
        timestamp + gst_util_uint64_scale (written_all, GST_SECOND,
        self->info.rate);
    guint32 buffered_samples;
    GstClockTime buffered_time;
    guint32 written = 0;
    GstClock *clock;
    GstClockTime clock_ahead;

    if (GST_BASE_SINK_CAST (self)->flushing) {
      flow_ret = GST_FLOW_FLUSHING;
      break;
    }

    running_time =
        gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
        GST_FORMAT_TIME, timestamp_now);
    running_time_duration =
        gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
        GST_FORMAT_TIME, timestamp_now + duration) - running_time;

    /* See gst_base_sink_adjust_time() */
    latency = gst_base_sink_get_latency (bsink);
    render_delay = gst_base_sink_get_render_delay (bsink);
    ts_offset = gst_base_sink_get_ts_offset (bsink);
    running_time += latency;

    if (ts_offset < 0) {
      ts_offset = -ts_offset;
      if ((GstClockTime) ts_offset < running_time)
        running_time -= ts_offset;
      else
        running_time = 0;
    } else {
      running_time += ts_offset;
    }

    if (running_time > render_delay)
      running_time -= render_delay;
    else
      running_time = 0;

    clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
    clock_ahead = 0;
    if (clock) {
      GstClockTime clock_now = gst_clock_get_time (clock);
      GstClockTime base_time =
          gst_element_get_base_time (GST_ELEMENT_CAST (self));
      gst_object_unref (clock);
      clock = NULL;

      if (clock_now != GST_CLOCK_TIME_NONE && base_time != GST_CLOCK_TIME_NONE) {
        GST_DEBUG_OBJECT (self,
            "Clock time %" GST_TIME_FORMAT ", base time %" GST_TIME_FORMAT
            ", target running time %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_now), GST_TIME_ARGS (base_time),
            GST_TIME_ARGS (running_time));
        if (clock_now > base_time)
          clock_now -= base_time;
        else
          clock_now = 0;

        if (clock_now < running_time)
          clock_ahead = running_time - clock_now;
      }
    }

    GST_DEBUG_OBJECT (self,
        "Ahead %" GST_TIME_FORMAT " of the clock running time",
        GST_TIME_ARGS (clock_ahead));

    if (self->output->
        output->GetBufferedAudioSampleFrameCount (&buffered_samples) != S_OK)
      buffered_samples = 0;

    buffered_time =
        gst_util_uint64_scale (buffered_samples, GST_SECOND, self->info.rate);
    buffered_time /= ABS (GST_BASE_SINK_CAST (self)->segment.rate);
    GST_DEBUG_OBJECT (self,
        "Buffered %" GST_TIME_FORMAT " in the driver (%u samples)",
        GST_TIME_ARGS (buffered_time), buffered_samples);
    // We start waiting once we have more than buffer-time buffered
    if (buffered_time > self->buffer_time || clock_ahead > self->buffer_time) {
      GstClockReturn clock_ret;
      GstClockTime wait_time = running_time;

      GST_DEBUG_OBJECT (self,
          "Buffered enough, wait for preroll or the clock or flushing");

      if (wait_time < self->buffer_time)
        wait_time = 0;
      else
        wait_time -= self->buffer_time;

      flow_ret =
          gst_base_sink_do_preroll (GST_BASE_SINK_CAST (self),
          GST_MINI_OBJECT_CAST (buffer));
      if (flow_ret != GST_FLOW_OK)
        break;

      clock_ret =
          gst_base_sink_wait_clock (GST_BASE_SINK_CAST (self), wait_time, NULL);
      if (GST_BASE_SINK_CAST (self)->flushing) {
        flow_ret = GST_FLOW_FLUSHING;
        break;
      }
      // Rerun the whole loop again
      if (clock_ret == GST_CLOCK_UNSCHEDULED)
        continue;
    }

    schedule_time = running_time;
    schedule_time_duration = running_time_duration;

    gst_decklink_video_sink_convert_to_internal_clock (video_sink,
        &schedule_time, &schedule_time_duration);

    GST_LOG_OBJECT (self, "Scheduling audio samples at %" GST_TIME_FORMAT
        " with duration %" GST_TIME_FORMAT, GST_TIME_ARGS (schedule_time),
        GST_TIME_ARGS (schedule_time_duration));

    ret = self->output->output->ScheduleAudioSamples ((void *) data, len,
        schedule_time, GST_SECOND, &written);
    if (ret != S_OK) {
      bool is_running = true;
      self->output->output->IsScheduledPlaybackRunning (&is_running);

      if (is_running && !GST_BASE_SINK_CAST (self)->flushing
          && self->output->started) {
        GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL),
            ("Failed to schedule frame: 0x%08lx", (unsigned long) ret));
        flow_ret = GST_FLOW_ERROR;
        break;
      } else {
        // Ignore the error and go out of the loop here, we're shutting down
        // or are not started yet and there's nothing we can do at this point
        GST_INFO_OBJECT (self,
            "Ignoring scheduling error 0x%08x because we're not started yet"
            " or not anymore", (guint) ret);
        flow_ret = GST_FLOW_OK;
        break;
      }
    }

    len -= written;
    data += written * self->info.bpf;
    if (self->resampler)
      written_all += written * ABS (GST_BASE_SINK_CAST (self)->segment.rate);
    else
      written_all += written;

    flow_ret = GST_FLOW_OK;
  } while (len > 0);

  gst_buffer_unmap (buffer, &map_info);
  gst_buffer_unref (buffer);

  GST_DEBUG_OBJECT (self, "Returning %s", gst_flow_get_name (flow_ret));

  return flow_ret;
}
コード例 #12
0
static gboolean
gst_decklink_audio_sink_query (GstBaseSink * bsink, GstQuery * query)
{
  GstDecklinkAudioSink *self = GST_DECKLINK_AUDIO_SINK (bsink);
  gboolean res = FALSE;

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:
    {
      gboolean live, us_live;
      GstClockTime min_l, max_l;

      GST_DEBUG_OBJECT (self, "latency query");

      /* ask parent first, it will do an upstream query for us. */
      if ((res =
              gst_base_sink_query_latency (GST_BASE_SINK_CAST (self), &live,
                  &us_live, &min_l, &max_l))) {
        GstClockTime base_latency, min_latency, max_latency;

        /* we and upstream are both live, adjust the min_latency */
        if (live && us_live) {
          GST_OBJECT_LOCK (self);
          if (!self->info.rate) {
            GST_OBJECT_UNLOCK (self);

            GST_DEBUG_OBJECT (self,
                "we are not negotiated, can't report latency yet");
            res = FALSE;
            goto done;
          }

          base_latency = self->buffer_time * 1000;
          GST_OBJECT_UNLOCK (self);

          /* we cannot go lower than the buffer size and the min peer latency */
          min_latency = base_latency + min_l;
          /* the max latency is the max of the peer, we can delay an infinite
           * amount of time. */
          max_latency =
              (max_l ==
              GST_CLOCK_TIME_NONE) ? GST_CLOCK_TIME_NONE : (base_latency +
              max_l);

          GST_DEBUG_OBJECT (self,
              "peer min %" GST_TIME_FORMAT ", our min latency: %"
              GST_TIME_FORMAT, GST_TIME_ARGS (min_l),
              GST_TIME_ARGS (min_latency));
          GST_DEBUG_OBJECT (self,
              "peer max %" GST_TIME_FORMAT ", our max latency: %"
              GST_TIME_FORMAT, GST_TIME_ARGS (max_l),
              GST_TIME_ARGS (max_latency));
        } else {
          GST_DEBUG_OBJECT (self,
              "peer or we are not live, don't care about latency");
          min_latency = min_l;
          max_latency = max_l;
        }
        gst_query_set_latency (query, live, min_latency, max_latency);
      }
      break;
    }
    default:
      res = GST_BASE_SINK_CLASS (parent_class)->query (bsink, query);
      break;
  }

done:
  return res;
}