コード例 #1
0
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint bpp;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);
  bpp = gst_decklink_bpp_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
コード例 #2
0
static GstFlowReturn
gst_decklink_audio_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkAudioSink *self = GST_DECKLINK_AUDIO_SINK_CAST (bsink);
  GstDecklinkVideoSink *video_sink;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime schedule_time, schedule_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  GstMapInfo map_info;
  const guint8 *data;
  gsize len, written_all;
  gboolean discont;

  GST_DEBUG_OBJECT (self, "Rendering buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  if (GST_BASE_SINK_CAST (self)->flushing) {
    return GST_FLOW_FLUSHING;
  }
  // If we're called before output is actually started, start pre-rolling
  if (!self->output->started) {
    self->output->output->BeginAudioPreroll ();
  }

  video_sink =
      GST_DECKLINK_VIDEO_SINK (gst_object_ref (self->output->videosink));

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  discont = gst_audio_stream_align_process (self->stream_align,
      GST_BUFFER_IS_DISCONT (buffer), timestamp,
      gst_buffer_get_size (buffer) / self->info.bpf, &timestamp, &duration,
      NULL);

  if (discont && self->resampler)
    gst_audio_resampler_reset (self->resampler);

  if (GST_BASE_SINK_CAST (self)->segment.rate < 0.0) {
    GstMapInfo out_map;
    gint out_frames = gst_buffer_get_size (buffer) / self->info.bpf;

    buffer = gst_buffer_make_writable (gst_buffer_ref (buffer));

    gst_buffer_map (buffer, &out_map, GST_MAP_READWRITE);
    if (self->info.finfo->format == GST_AUDIO_FORMAT_S16) {
      gint16 *swap_data = (gint16 *) out_map.data;
      gint16 *swap_data_end =
          swap_data + (out_frames - 1) * self->info.channels;
      gint16 swap_tmp[16];

      while (out_frames > 0) {
        memcpy (&swap_tmp, swap_data, self->info.bpf);
        memcpy (swap_data, swap_data_end, self->info.bpf);
        memcpy (swap_data_end, &swap_tmp, self->info.bpf);

        swap_data += self->info.channels;
        swap_data_end -= self->info.channels;

        out_frames -= 2;
      }
    } else {
      gint32 *swap_data = (gint32 *) out_map.data;
      gint32 *swap_data_end =
          swap_data + (out_frames - 1) * self->info.channels;
      gint32 swap_tmp[16];

      while (out_frames > 0) {
        memcpy (&swap_tmp, swap_data, self->info.bpf);
        memcpy (swap_data, swap_data_end, self->info.bpf);
        memcpy (swap_data_end, &swap_tmp, self->info.bpf);

        swap_data += self->info.channels;
        swap_data_end -= self->info.channels;

        out_frames -= 2;
      }
    }
    gst_buffer_unmap (buffer, &out_map);
  } else {
    gst_buffer_ref (buffer);
  }

  if (self->resampler) {
    gint in_frames = gst_buffer_get_size (buffer) / self->info.bpf;
    gint out_frames =
        gst_audio_resampler_get_out_frames (self->resampler, in_frames);
    GstBuffer *out_buf = gst_buffer_new_and_alloc (out_frames * self->info.bpf);
    GstMapInfo out_map;

    gst_buffer_map (buffer, &map_info, GST_MAP_READ);
    gst_buffer_map (out_buf, &out_map, GST_MAP_READWRITE);

    gst_audio_resampler_resample (self->resampler, (gpointer *) & map_info.data,
        in_frames, (gpointer *) & out_map.data, out_frames);

    gst_buffer_unmap (out_buf, &out_map);
    gst_buffer_unmap (buffer, &map_info);
    buffer = out_buf;
  }

  gst_buffer_map (buffer, &map_info, GST_MAP_READ);
  data = map_info.data;
  len = map_info.size / self->info.bpf;
  written_all = 0;

  do {
    GstClockTime timestamp_now =
        timestamp + gst_util_uint64_scale (written_all, GST_SECOND,
        self->info.rate);
    guint32 buffered_samples;
    GstClockTime buffered_time;
    guint32 written = 0;
    GstClock *clock;
    GstClockTime clock_ahead;

    if (GST_BASE_SINK_CAST (self)->flushing) {
      flow_ret = GST_FLOW_FLUSHING;
      break;
    }

    running_time =
        gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
        GST_FORMAT_TIME, timestamp_now);
    running_time_duration =
        gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
        GST_FORMAT_TIME, timestamp_now + duration) - running_time;

    /* See gst_base_sink_adjust_time() */
    latency = gst_base_sink_get_latency (bsink);
    render_delay = gst_base_sink_get_render_delay (bsink);
    ts_offset = gst_base_sink_get_ts_offset (bsink);
    running_time += latency;

    if (ts_offset < 0) {
      ts_offset = -ts_offset;
      if ((GstClockTime) ts_offset < running_time)
        running_time -= ts_offset;
      else
        running_time = 0;
    } else {
      running_time += ts_offset;
    }

    if (running_time > render_delay)
      running_time -= render_delay;
    else
      running_time = 0;

    clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
    clock_ahead = 0;
    if (clock) {
      GstClockTime clock_now = gst_clock_get_time (clock);
      GstClockTime base_time =
          gst_element_get_base_time (GST_ELEMENT_CAST (self));
      gst_object_unref (clock);
      clock = NULL;

      if (clock_now != GST_CLOCK_TIME_NONE && base_time != GST_CLOCK_TIME_NONE) {
        GST_DEBUG_OBJECT (self,
            "Clock time %" GST_TIME_FORMAT ", base time %" GST_TIME_FORMAT
            ", target running time %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_now), GST_TIME_ARGS (base_time),
            GST_TIME_ARGS (running_time));
        if (clock_now > base_time)
          clock_now -= base_time;
        else
          clock_now = 0;

        if (clock_now < running_time)
          clock_ahead = running_time - clock_now;
      }
    }

    GST_DEBUG_OBJECT (self,
        "Ahead %" GST_TIME_FORMAT " of the clock running time",
        GST_TIME_ARGS (clock_ahead));

    if (self->output->
        output->GetBufferedAudioSampleFrameCount (&buffered_samples) != S_OK)
      buffered_samples = 0;

    buffered_time =
        gst_util_uint64_scale (buffered_samples, GST_SECOND, self->info.rate);
    buffered_time /= ABS (GST_BASE_SINK_CAST (self)->segment.rate);
    GST_DEBUG_OBJECT (self,
        "Buffered %" GST_TIME_FORMAT " in the driver (%u samples)",
        GST_TIME_ARGS (buffered_time), buffered_samples);
    // We start waiting once we have more than buffer-time buffered
    if (buffered_time > self->buffer_time || clock_ahead > self->buffer_time) {
      GstClockReturn clock_ret;
      GstClockTime wait_time = running_time;

      GST_DEBUG_OBJECT (self,
          "Buffered enough, wait for preroll or the clock or flushing");

      if (wait_time < self->buffer_time)
        wait_time = 0;
      else
        wait_time -= self->buffer_time;

      flow_ret =
          gst_base_sink_do_preroll (GST_BASE_SINK_CAST (self),
          GST_MINI_OBJECT_CAST (buffer));
      if (flow_ret != GST_FLOW_OK)
        break;

      clock_ret =
          gst_base_sink_wait_clock (GST_BASE_SINK_CAST (self), wait_time, NULL);
      if (GST_BASE_SINK_CAST (self)->flushing) {
        flow_ret = GST_FLOW_FLUSHING;
        break;
      }
      // Rerun the whole loop again
      if (clock_ret == GST_CLOCK_UNSCHEDULED)
        continue;
    }

    schedule_time = running_time;
    schedule_time_duration = running_time_duration;

    gst_decklink_video_sink_convert_to_internal_clock (video_sink,
        &schedule_time, &schedule_time_duration);

    GST_LOG_OBJECT (self, "Scheduling audio samples at %" GST_TIME_FORMAT
        " with duration %" GST_TIME_FORMAT, GST_TIME_ARGS (schedule_time),
        GST_TIME_ARGS (schedule_time_duration));

    ret = self->output->output->ScheduleAudioSamples ((void *) data, len,
        schedule_time, GST_SECOND, &written);
    if (ret != S_OK) {
      bool is_running = true;
      self->output->output->IsScheduledPlaybackRunning (&is_running);

      if (is_running && !GST_BASE_SINK_CAST (self)->flushing
          && self->output->started) {
        GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL),
            ("Failed to schedule frame: 0x%08lx", (unsigned long) ret));
        flow_ret = GST_FLOW_ERROR;
        break;
      } else {
        // Ignore the error and go out of the loop here, we're shutting down
        // or are not started yet and there's nothing we can do at this point
        GST_INFO_OBJECT (self,
            "Ignoring scheduling error 0x%08x because we're not started yet"
            " or not anymore", (guint) ret);
        flow_ret = GST_FLOW_OK;
        break;
      }
    }

    len -= written;
    data += written * self->info.bpf;
    if (self->resampler)
      written_all += written * ABS (GST_BASE_SINK_CAST (self)->segment.rate);
    else
      written_all += written;

    flow_ret = GST_FLOW_OK;
  } while (len > 0);

  gst_buffer_unmap (buffer, &map_info);
  gst_buffer_unref (buffer);

  GST_DEBUG_OBJECT (self, "Returning %s", gst_flow_get_name (flow_ret));

  return flow_ret;
}