예제 #1
0
static gboolean
schedule_next_key_unit (GstHlsSink * sink)
{
  gboolean res = TRUE;
  GstClockTime running_time;
  GstPad *sinkpad = gst_element_get_static_pad (GST_ELEMENT (sink), "sink");

  if (sink->target_duration == 0)
    /* target-duration == 0 means that the app schedules key units itself */
    goto out;

  running_time = sink->last_running_time + sink->target_duration * GST_SECOND;
  GST_INFO_OBJECT (sink, "sending upstream force-key-unit, index %d "
      "now %" GST_TIME_FORMAT " target %" GST_TIME_FORMAT,
      sink->index + 1, GST_TIME_ARGS (sink->last_running_time),
      GST_TIME_ARGS (running_time));

  if (!(res = gst_pad_push_event (sinkpad,
              gst_video_event_new_upstream_force_key_unit (running_time,
                  TRUE, sink->index + 1)))) {
    GST_ERROR_OBJECT (sink, "Failed to push upstream force key unit event");
  }

out:
  /* mark as waiting for a fku event if the app schedules them or if we just
   * successfully scheduled one
   */
  sink->waiting_fku = res;
  gst_object_unref (sinkpad);
  return res;
}
static GstFlowReturn gst_videorepair_sink_chain(GstPad *pad, GstObject *parent, GstBuffer *buffer)
{
    GstVideoRepair *videorepair = GST_VIDEOREPAIR(parent);
    (void)pad;

    if (!GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
        GST_DEBUG_OBJECT(videorepair, "got keyframe");
        videorepair->needs_intra = FALSE;
        videorepair->drop_count = 0;
    }

    if (videorepair->needs_intra && videorepair->drop_until_intra) {
        GST_DEBUG_OBJECT(videorepair, "dropping buffer waiting for intra");
        videorepair->drop_count++;
        if (videorepair->retry_interval
            && (videorepair->drop_count >= videorepair->retry_interval)) {
            GST_INFO_OBJECT(videorepair, "still no intra picture, requesting a key unit again");
            gst_pad_push_event(videorepair->sinkpad,
                gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
            videorepair->drop_count = 0;
        }
        gst_buffer_unref(buffer);
        return GST_FLOW_OK;
    }

    return gst_pad_push(videorepair->srcpad, buffer);
}
예제 #3
0
static void
send_force_key_unit_event (GstPad * pad, gboolean all_headers)
{
  GstEvent *event;
  GstCaps *caps = gst_pad_get_current_caps (pad);

  if (caps == NULL) {
    caps = gst_pad_get_allowed_caps (pad);
  }

  if (caps == NULL) {
    return;
  }

  if (is_raw_caps (caps)) {
    goto end;
  }

  event =
      gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
      all_headers, 0);

  if (GST_PAD_DIRECTION (pad) == GST_PAD_SRC) {
    gst_pad_send_event (pad, event);
  } else {
    gst_pad_push_event (pad, event);
  }

end:
  gst_caps_unref (caps);
}
static void clear_queue(guint stream_id, GstScreamQueue *self)
{
    GstScreamStream *stream;
    g_rw_lock_reader_lock(&self->lock);
    stream = g_hash_table_lookup(self->streams, GUINT_TO_POINTER(stream_id));
    g_rw_lock_reader_unlock(&self->lock);
    clear_packet_queue(stream->packet_queue);
    stream->enqueued_payload_size = 0;
    stream->enqueued_packets = 0;
    gst_pad_push_event(self->sink_pad,
        gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
}
예제 #5
0
static GstFlowReturn
gst_vaapidecode_parse_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiDecoderStatus status;
  GstFlowReturn ret;
  guint got_unit_size;
  gboolean got_frame;

  status = gst_vaapi_decoder_parse (decode->decoder, frame,
      adapter, at_eos, &got_unit_size, &got_frame);

  switch (status) {
    case GST_VAAPI_DECODER_STATUS_SUCCESS:
      if (got_unit_size > 0) {
        gst_video_decoder_add_to_frame (vdec, got_unit_size);
        decode->current_frame_size += got_unit_size;
      }
      if (got_frame) {
        ret = gst_video_decoder_have_frame (vdec);
        decode->current_frame_size = 0;
      } else
        ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA:
      ret = GST_VIDEO_DECODER_FLOW_NEED_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
      GST_WARNING ("parse error %d", status);
      ret = GST_FLOW_NOT_SUPPORTED;
      decode->current_frame_size = 0;
      break;
    default:
      GST_WARNING ("parse error %d", status);
      /* just keep parsing, the decoder should have flushed the broken unit */
      ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
      decode->current_frame_size = 0;

      GST_INFO ("requesting upstream a key unit");
      gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decode),
          gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
              FALSE, 0));
      break;
  }
  return ret;
}
예제 #6
0
static gboolean
gst_base_adaptive_sink_send_force_key_unit_event (GstBaseAdaptiveSink * sink,
    GstPad * pad, GstClockTime ts, guint count)
{
  GstEvent *event;

  event = gst_video_event_new_upstream_force_key_unit (ts,
      sink->chunked && sink->prepend_headers, count);

  if (!gst_pad_push_event (pad, event)) {
    GST_WARNING_OBJECT (sink, "Failed to push upstream force key unit event");
    return FALSE;
  }
  return TRUE;
}
static gboolean gst_videorepair_sink_event(GstPad *pad, GstObject *parent, GstEvent *event)
{
    GstVideoRepair *videorepair = GST_VIDEOREPAIR(parent);
    gboolean ret;

    switch (GST_EVENT_TYPE(event)) {
    case GST_EVENT_GAP:
        GST_INFO_OBJECT(videorepair, "got GAP event");
        gst_pad_push_event(videorepair->sinkpad,
            gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
        videorepair->needs_intra = TRUE;
        ret = TRUE;
        break;

    default:
        ret = gst_pad_event_default(pad, parent, event);
    }

    return ret;
}
예제 #8
0
static GstBuffer *
gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp)
{
  GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (depay);
  GstBuffer *payload;
  guint8 *data;
  guint hdrsize = 1;
  guint size;
  gint spatial_layer = 0;
  gboolean i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit;

  if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) {
    GST_LOG_OBJECT (self, "Discontinuity, flushing adapter");
    gst_adapter_clear (self->adapter);
    self->started = FALSE;
  }

  size = gst_rtp_buffer_get_payload_len (rtp);

  /* Mandatory with at least one header and one vp9 byte */
  if (G_UNLIKELY (size < hdrsize + 1))
    goto too_small;

  data = gst_rtp_buffer_get_payload (rtp);
  i_bit = (data[0] & 0x80) != 0;
  p_bit = (data[0] & 0x40) != 0;
  l_bit = (data[0] & 0x20) != 0;
  f_bit = (data[0] & 0x10) != 0;
  b_bit = (data[0] & 0x08) != 0;
  e_bit = (data[0] & 0x04) != 0;
  v_bit = (data[0] & 0x02) != 0;

  if (G_UNLIKELY (!self->started)) {
    /* Check if this is the start of a VP9 layer frame, otherwise bail */
    if (!b_bit)
      goto done;

    self->started = TRUE;
  }

  GST_TRACE_OBJECT (self, "IPLFBEV : %d%d%d%d%d%d%d", i_bit, p_bit, l_bit,
      f_bit, b_bit, e_bit, v_bit);

  /* Check I optional header Picture ID */
  if (i_bit) {
    hdrsize++;
    if (G_UNLIKELY (size < hdrsize + 1))
      goto too_small;
    /* Check M for 15 bits PictureID */
    if ((data[1] & 0x80) != 0) {
      hdrsize++;
      if (G_UNLIKELY (size < hdrsize + 1))
        goto too_small;
    }
  }

  /* flexible-mode not implemented */
  g_assert (!f_bit);

  /* Check L optional header layer indices */
  if (l_bit) {
    hdrsize++;
    /* Check TL0PICIDX temporal layer zero index (non-flexible mode) */
    if (!f_bit)
      hdrsize++;
  }

  /* Check V optional Scalability Structure */
  if (v_bit) {
    guint n_s, y_bit, g_bit;
    guint8 *ss = &data[hdrsize];
    guint sssize = 1;

    if (G_UNLIKELY (size < hdrsize + sssize + 1))
      goto too_small;

    n_s = (ss[0] & 0xe0) >> 5;
    y_bit = (ss[0] & 0x10) != 0;
    g_bit = (ss[0] & 0x08) != 0;

    GST_TRACE_OBJECT (self, "SS header: N_S=%u, Y=%u, G=%u", n_s, y_bit, g_bit);

    sssize += y_bit ? (n_s + 1) * 4 : 0;
    if (G_UNLIKELY (size < hdrsize + sssize + 1))
      goto too_small;

    if (y_bit) {
      guint i;
      for (i = 0; i <= n_s; i++) {
        /* For now, simply use the last layer specified for width and height */
        self->ss_width = ss[1 + i * 4] * 256 + ss[2 + i * 4];
        self->ss_height = ss[3 + i * 4] * 256 + ss[4 + i * 4];
        GST_TRACE_OBJECT (self, "N_S[%d]: WIDTH=%u, HEIGHT=%u", i,
            self->ss_width, self->ss_height);
      }
    }

    if (g_bit) {
      guint i, j;
      guint n_g = ss[sssize];
      sssize++;
      if (G_UNLIKELY (size < hdrsize + sssize + 1))
        goto too_small;
      for (i = 0; i < n_g; i++) {
        guint t = (ss[sssize] & 0xe0) >> 5;
        guint u = (ss[sssize] & 0x10) >> 4;
        guint r = (ss[sssize] & 0x0c) >> 2;
        GST_TRACE_OBJECT (self, "N_G[%u]: 0x%02x -> T=%u, U=%u, R=%u", i,
            ss[sssize], t, u, r);
        for (j = 0; j < r; j++)
          GST_TRACE_OBJECT (self, "  R[%u]: P_DIFF=%u", j, ss[sssize + 1 + j]);
        sssize += 1 + r;
        if (G_UNLIKELY (size < hdrsize + sssize + 1))
          goto too_small;
      }
    }
    hdrsize += sssize;
  }

  GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size);

  if (G_UNLIKELY (hdrsize >= size))
    goto too_small;

  payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1);
  {
    GstMapInfo map;
    gst_buffer_map (payload, &map, GST_MAP_READ);
    GST_MEMDUMP_OBJECT (self, "vp9 payload", map.data, 16);
    gst_buffer_unmap (payload, &map);
  }
  gst_adapter_push (self->adapter, payload);

  /* Marker indicates that it was the last rtp packet for this frame */
  if (gst_rtp_buffer_get_marker (rtp)) {
    GstBuffer *out;
    gboolean key_frame_first_layer = !p_bit && spatial_layer == 0;


    if (gst_adapter_available (self->adapter) < 10)
      goto too_small;

    out = gst_adapter_take_buffer (self->adapter,
        gst_adapter_available (self->adapter));

    self->started = FALSE;

    /* mark keyframes */
    out = gst_buffer_make_writable (out);
    /* Filter away all metas that are not sensible to copy */
    gst_rtp_drop_meta (GST_ELEMENT_CAST (self), out,
        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
    if (!key_frame_first_layer) {
      GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT);

      if (!self->caps_sent) {
        gst_buffer_unref (out);
        out = NULL;
        GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame");
        gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay),
            gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
                TRUE, 0));
      }
    } else {
      GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT);

      if (self->last_width != self->ss_width ||
          self->last_height != self->ss_height) {
        GstCaps *srccaps;

        /* Width and height are optional in the RTP header. Consider to parse
         * the frame header in addition if missing from RTP header */
        if (self->ss_width != 0 && self->ss_height != 0) {
          srccaps = gst_caps_new_simple ("video/x-vp9",
              "framerate", GST_TYPE_FRACTION, 0, 1,
              "width", G_TYPE_INT, self->ss_width,
              "height", G_TYPE_INT, self->ss_height, NULL);
        } else {
          srccaps = gst_caps_new_simple ("video/x-vp9",
              "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
        }

        gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps);
        gst_caps_unref (srccaps);

        self->caps_sent = TRUE;
        self->last_width = self->ss_width;
        self->last_height = self->ss_height;
        self->ss_width = 0;
        self->ss_height = 0;
      }
    }

    return out;
  }

done:
  return NULL;

too_small:
  GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring");
  gst_adapter_clear (self->adapter);
  self->started = FALSE;

  goto done;
}
static GstBuffer *
gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp)
{
  GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay);
  GstBuffer *payload;
  guint8 *data;
  guint hdrsize;
  guint size;

  if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) {
    GST_LOG_OBJECT (self, "Discontinuity, flushing adapter");
    gst_adapter_clear (self->adapter);
    self->started = FALSE;
  }

  size = gst_rtp_buffer_get_payload_len (rtp);

  /* At least one header and one vp8 byte */
  if (G_UNLIKELY (size < 2))
    goto too_small;

  data = gst_rtp_buffer_get_payload (rtp);

  if (G_UNLIKELY (!self->started)) {
    /* Check if this is the start of a VP8 frame, otherwise bail */
    /* S=1 and PartID= 0 */
    if ((data[0] & 0x17) != 0x10)
      goto done;

    self->started = TRUE;
  }

  hdrsize = 1;
  /* Check X optional header */
  if ((data[0] & 0x80) != 0) {
    hdrsize++;
    /* Check I optional header */
    if ((data[1] & 0x80) != 0) {
      if (G_UNLIKELY (size < 3))
        goto too_small;
      hdrsize++;
      /* Check for 16 bits PictureID */
      if ((data[2] & 0x80) != 0)
        hdrsize++;
    }
    /* Check L optional header */
    if ((data[1] & 0x40) != 0)
      hdrsize++;
    /* Check T or K optional headers */
    if ((data[1] & 0x20) != 0 || (data[1] & 0x10) != 0)
      hdrsize++;
  }
  GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size);

  if (G_UNLIKELY (hdrsize >= size))
    goto too_small;

  payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1);
  gst_adapter_push (self->adapter, payload);

  /* Marker indicates that it was the last rtp packet for this frame */
  if (gst_rtp_buffer_get_marker (rtp)) {
    GstBuffer *out;
    guint8 header[10];

    gst_adapter_copy (self->adapter, &header, 0, 10);

    out = gst_adapter_take_buffer (self->adapter,
        gst_adapter_available (self->adapter));

    self->started = FALSE;

    /* mark keyframes */
    out = gst_buffer_make_writable (out);
    if ((header[0] & 0x01)) {
      GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT);

      if (!self->caps_sent) {
        gst_buffer_unref (out);
        out = NULL;
        GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame");
        gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay),
            gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
                TRUE, 0));
      }
    } else {
      guint profile, width, height;

      GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT);

      profile = (header[0] & 0x0e) >> 1;
      width = GST_READ_UINT16_LE (header + 6) & 0x3fff;
      height = GST_READ_UINT16_LE (header + 8) & 0x3fff;

      if (G_UNLIKELY (self->last_width != width ||
              self->last_height != height || self->last_profile != profile)) {
        gchar profile_str[3];
        GstCaps *srccaps;

        snprintf (profile_str, 3, "%u", profile);
        srccaps = gst_caps_new_simple ("video/x-vp8",
            "framerate", GST_TYPE_FRACTION, 0, 1,
            "height", G_TYPE_INT, height,
            "width", G_TYPE_INT, width,
            "profile", G_TYPE_STRING, profile_str, NULL);

        gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps);
        gst_caps_unref (srccaps);

        self->caps_sent = TRUE;
        self->last_width = width;
        self->last_height = height;
        self->last_profile = profile;
      }
    }

    return out;
  }

done:
  return NULL;

too_small:
  GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring");
  gst_adapter_clear (self->adapter);
  self->started = FALSE;

  goto done;
}
예제 #10
0
static GstFlowReturn
gst_vaapidecode_handle_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * frame)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiDecoderStatus status;
  GstFlowReturn ret;

  if (!decode->input_state)
    goto not_negotiated;

  /* Decode current frame */
  for (;;) {
    status = gst_vaapi_decoder_decode (decode->decoder, frame);
    if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) {
      /* Make sure that there are no decoded frames waiting in the
         output queue. */
      ret = gst_vaapidecode_push_all_decoded_frames (decode);
      if (ret != GST_FLOW_OK)
        goto error_push_all_decoded_frames;

      g_mutex_lock (&decode->surface_ready_mutex);
      if (gst_vaapi_decoder_check_status (decode->decoder) ==
          GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE)
        g_cond_wait (&decode->surface_ready, &decode->surface_ready_mutex);
      g_mutex_unlock (&decode->surface_ready_mutex);
      continue;
    }
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
      goto error_decode;
    break;
  }

  /* Note that gst_vaapi_decoder_decode cannot return success without
     completing the decode and pushing all decoded frames into the output
     queue */
  return gst_vaapidecode_push_all_decoded_frames (decode);

  /* ERRORS */
error_push_all_decoded_frames:
  {
    GST_ERROR ("push loop error while decoding %d", ret);
    gst_video_decoder_drop_frame (vdec, frame);
    return ret;
  }
error_decode:
  {
    GST_ERROR ("decode error %d", status);
    switch (status) {
      case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
      case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
      case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
        ret = GST_FLOW_NOT_SUPPORTED;
        break;
      default:
        ret = GST_FLOW_OK;
        GST_VIDEO_DECODER_ERROR (vdec, 1, STREAM, DECODE, ("Decoding error"),
            ("Decode error %d", status), ret);
        GST_INFO ("requesting upstream a key unit");
        gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decode),
            gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
                FALSE, 0));
        ret = GST_FLOW_OK;
        break;
    }
    gst_video_decoder_drop_frame (vdec, frame);
    return ret;
  }
not_negotiated:
  {
    GST_ERROR_OBJECT (decode, "not negotiated");
    ret = GST_FLOW_NOT_NEGOTIATED;
    gst_video_decoder_drop_frame (vdec, frame);
    return ret;
  }
}
static GstFlowReturn gst_openh264dec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame)
{
    GstOpenh264Dec *openh264dec = GST_OPENH264DEC(decoder);
    GstMapInfo map_info;
    GstVideoCodecState *state;
    SBufferInfo dst_buf_info;
    DECODING_STATE ret;
    guint8 *yuvdata[3];
    GstFlowReturn flow_status;
    GstVideoFrame video_frame;
    guint actual_width, actual_height;
    guint i;
    guint8 *p;
    guint row_stride, component_width, component_height, src_width, row;

    if (frame) {
        if (!gst_buffer_map(frame->input_buffer, &map_info, GST_MAP_READ)) {
            GST_ERROR_OBJECT(openh264dec, "Cannot map input buffer!");
            return GST_FLOW_ERROR;
        }

        GST_LOG_OBJECT(openh264dec, "handle frame, %d", map_info.size > 4 ? map_info.data[4] & 0x1f : -1);

        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(map_info.data, map_info.size, yuvdata, &dst_buf_info);

        if (ret == dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
        }

        if (ret != dsErrorFree && ret != dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                               gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
            GST_LOG_OBJECT(openh264dec, "error decoding nal, return code: %d", ret);
        }

        gst_buffer_unmap(frame->input_buffer, &map_info);
        gst_video_codec_frame_unref (frame);
        frame = NULL;
    } else {
        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(NULL, 0, yuvdata, &dst_buf_info);
        if (ret != dsErrorFree)
            return GST_FLOW_EOS;
    }

    /* FIXME: openh264 has no way for us to get a connection
     * between the input and output frames, we just have to
     * guess based on the input. Fortunately openh264 can
     * only do baseline profile. */
    frame = gst_video_decoder_get_oldest_frame (decoder);
    if (!frame) {
      /* Can only happen in finish() */
      return GST_FLOW_EOS;
    }

    /* No output available yet */
    if (dst_buf_info.iBufferStatus != 1) {
        return (frame ? GST_FLOW_OK : GST_FLOW_EOS);
    }

    actual_width  = dst_buf_info.UsrData.sSystemBuffer.iWidth;
    actual_height = dst_buf_info.UsrData.sSystemBuffer.iHeight;

    if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (openh264dec)) || actual_width != openh264dec->priv->width || actual_height != openh264dec->priv->height) {
        state = gst_video_decoder_set_output_state(decoder,
            GST_VIDEO_FORMAT_I420,
            actual_width,
            actual_height,
            openh264dec->priv->input_state);
        openh264dec->priv->width = actual_width;
        openh264dec->priv->height = actual_height;

        if (!gst_video_decoder_negotiate(decoder)) {
            GST_ERROR_OBJECT(openh264dec, "Failed to negotiate with downstream elements");
            return GST_FLOW_NOT_NEGOTIATED;
        }
    } else {
        state = gst_video_decoder_get_output_state(decoder);
    }

    flow_status = gst_video_decoder_allocate_output_frame(decoder, frame);
    if (flow_status != GST_FLOW_OK) {
        gst_video_codec_state_unref (state);
        return flow_status;
    }

    if (!gst_video_frame_map(&video_frame, &state->info, frame->output_buffer, GST_MAP_WRITE)) {
        GST_ERROR_OBJECT(openh264dec, "Cannot map output buffer!");
        gst_video_codec_state_unref (state);
        return GST_FLOW_ERROR;
    }

    for (i = 0; i < 3; i++) {
        p = GST_VIDEO_FRAME_COMP_DATA(&video_frame, i);
        row_stride = GST_VIDEO_FRAME_COMP_STRIDE(&video_frame, i);
        component_width = GST_VIDEO_FRAME_COMP_WIDTH(&video_frame, i);
        component_height = GST_VIDEO_FRAME_COMP_HEIGHT(&video_frame, i);
        src_width = i < 1 ? dst_buf_info.UsrData.sSystemBuffer.iStride[0] : dst_buf_info.UsrData.sSystemBuffer.iStride[1];
        for (row = 0; row < component_height; row++) {
            memcpy(p, yuvdata[i], component_width);
            p += row_stride;
            yuvdata[i] += src_width;
        }
    }
    gst_video_codec_state_unref (state);
    gst_video_frame_unmap(&video_frame);

    return gst_video_decoder_finish_frame(decoder, frame);
}