/**
 * gst_video_buffer_get_overlay_composition:
 * @buf: a #GstBuffer
 *
 * Get the overlay composition that has previously been attached to a buffer
 * with gst_video_buffer_get_overlay_composition(), usually by another element
 * upstream.
 *
 * Returns: (transfer none): the #GstVideoOverlayComposition attached to
 *    this buffer, or NULL. Does not return a reference to the composition,
 *    caller must obtain her own ref via gst_video_overlay_composition_ref()
 *    if needed.
 *
 * Since: 0.10.36
 */
GstVideoOverlayComposition *
gst_video_buffer_get_overlay_composition (GstBuffer * buf)
{
    const GstStructure *s;
    const GValue *val;

    s = gst_buffer_get_qdata (buf, GST_OVERLAY_COMPOSITION_QUARK);
    if (s == NULL)
        return NULL;

    val = gst_structure_id_get_value (s, COMPOSITION_QUARK);
    if (val == NULL)
        return NULL;

    return GST_VIDEO_OVERLAY_COMPOSITION (gst_value_get_mini_object (val));
}
bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                         int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer* buffer = nullptr;
  int64_t timestamp, nextTimestamp;
  while (true)
  {
    if (!WaitForDecodedData(&mVideoSinkBufferCount)) {
      mVideoQueue.Finish();
      break;
    }
    mDecoder->NotifyDecodedFrames(0, 1);

    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
    bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT);
    if ((aKeyFrameSkip && !isKeyframe)) {
      gst_buffer_unref(buffer);
      buffer = nullptr;
      continue;
    }

    timestamp = GST_BUFFER_TIMESTAMP(buffer);
    {
      ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
      timestamp = gst_segment_to_stream_time(&mVideoSegment,
          GST_FORMAT_TIME, timestamp);
    }
    NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
        "frame has invalid timestamp");
    timestamp = nextTimestamp = GST_TIME_AS_USECONDS(timestamp);
    if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
      nextTimestamp += GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
    else if (fpsNum && fpsDen)
      /* add 1-frame duration */
      nextTimestamp += gst_util_uint64_scale(GST_USECOND, fpsNum, fpsDen);

    if (timestamp < aTimeThreshold) {
      LOG(PR_LOG_DEBUG, ("skipping frame %" GST_TIME_FORMAT
            " threshold %" GST_TIME_FORMAT,
            GST_TIME_ARGS(timestamp), GST_TIME_ARGS(aTimeThreshold)));
      gst_buffer_unref(buffer);
      buffer = nullptr;
      continue;
    }

    break;
  }

  if (!buffer)
    /* no more frames */
    return false;

  nsRefPtr<PlanarYCbCrImage> image;
#if GST_VERSION_MICRO >= 36
  const GstStructure* structure = gst_buffer_get_qdata(buffer,
      g_quark_from_string("moz-reader-data"));
  const GValue* value = gst_structure_get_value(structure, "image");
  if (value) {
    BufferData* data = reinterpret_cast<BufferData*>(g_value_get_boxed(value));
    image = data->mImage;
  }
#endif

  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(buffer),
        GST_BUFFER_SIZE(buffer), nullptr, &tmp, image);

    /* copy */
    gst_buffer_copy_metadata(tmp, buffer, (GstBufferCopyFlags)GST_BUFFER_COPY_ALL);
    memcpy(GST_BUFFER_DATA(tmp), GST_BUFFER_DATA(buffer),
        GST_BUFFER_SIZE(tmp));
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  guint8* data = GST_BUFFER_DATA(buffer);

  int width = mPicture.width;
  int height = mPicture.height;
  GstVideoFormat format = mFormat;

  VideoData::YCbCrBuffer b;
  for(int i = 0; i < 3; i++) {
    b.mPlanes[i].mData = data + gst_video_format_get_component_offset(format, i,
        width, height);
    b.mPlanes[i].mStride = gst_video_format_get_row_stride(format, i, width);
    b.mPlanes[i].mHeight = gst_video_format_get_component_height(format,
        i, height);
    b.mPlanes[i].mWidth = gst_video_format_get_component_width(format,
        i, width);
    b.mPlanes[i].mOffset = 0;
    b.mPlanes[i].mSkip = 0;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer,
      GST_BUFFER_FLAG_DELTA_UNIT);
  /* XXX ? */
  int64_t offset = 0;
  VideoData* video = VideoData::Create(mInfo, image, offset,
                                       timestamp, nextTimestamp, b,
                                       isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);
  gst_buffer_unref(buffer);

  return true;
}