Пример #1
0
static void
flv_write_tag (guint8 * data,
    guint8 packet_type, guint payload_size, guint32 timestamp)
{
  data[0] = packet_type;
  GST_WRITE_UINT24_BE (&data[1], payload_size);

  if (timestamp > EXT_TIMESTAMP_LIMIT) {
    GST_WRITE_UINT32_BE (&data[4], timestamp);
  } else {
    GST_WRITE_UINT24_BE (&data[4], timestamp);
    data[7] = 0;
  }
  GST_WRITE_UINT24_BE (&data[8], 0);
}
Пример #2
0
static void
put_packet (GstPluginLoader * l, guint type, guint32 tag,
    const guint8 * payload, guint32 payload_len)
{
  guint8 *out;
  guint len = payload_len + HEADER_SIZE;

  if (l->tx_buf_write + len >= l->tx_buf_size) {
    GST_LOG ("Expanding tx buf from %d to %d for packet of size %d",
        l->tx_buf_size, l->tx_buf_write + len + BUF_GROW_EXTRA, len);
    l->tx_buf_size = l->tx_buf_write + len + BUF_GROW_EXTRA;
    l->tx_buf = g_realloc (l->tx_buf, l->tx_buf_size);
  }

  out = l->tx_buf + l->tx_buf_write;

  /* one byte packet type */
  out[0] = type;
  /* 3 byte packet tag number */
  GST_WRITE_UINT24_BE (out + 1, tag);
  /* 4 bytes packet length */
  GST_WRITE_UINT32_BE (out + 4, payload_len);
  /* payload */
  memcpy (out + HEADER_SIZE, payload, payload_len);
  /* Write magic into the header */
  GST_WRITE_UINT32_BE (out + 8, HEADER_MAGIC);

  l->tx_buf_write += len;
  gst_poll_fd_ctl_write (l->fdset, &l->fd_w, TRUE);
}
Пример #3
0
static void
_serialize_u24 (AmfSerializer * serializer, int value)
{
  if (_serialize_check (serializer, 3)) {
    GST_WRITE_UINT24_BE (serializer->data + serializer->offset, value);
    serializer->offset += 3;
  }
}
Пример #4
0
static GstFlowReturn
gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer)
{
  GstRTPBasePayload *rtppay;
  GstAsfPacketInfo *packetinfo;
  guint8 flags;
  guint8 *data;
  guint32 packet_util_size;
  guint32 packet_offset;
  guint32 size_left;
  GstFlowReturn ret = GST_FLOW_OK;

  rtppay = GST_RTP_BASE_PAYLOAD (rtpasfpay);
  packetinfo = &rtpasfpay->packetinfo;

  if (!gst_asf_parse_packet (buffer, packetinfo, TRUE,
          rtpasfpay->asfinfo.packet_size)) {
    GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet");
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }

  if (packetinfo->packet_size == 0)
    packetinfo->packet_size = rtpasfpay->asfinfo.packet_size;

  GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT
      ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size,
      packetinfo->padding);

  /* update padding field to 0 */
  if (packetinfo->padding > 0) {
    GstAsfPacketInfo info;
    /* find padding field offset */
    guint offset = packetinfo->err_cor_len + 2 +
        gst_asf_get_var_size_field_len (packetinfo->packet_field_type) +
        gst_asf_get_var_size_field_len (packetinfo->seq_field_type);
    buffer = gst_buffer_make_writable (buffer);
    switch (packetinfo->padd_field_type) {
      case ASF_FIELD_TYPE_DWORD:
        gst_buffer_memset (buffer, offset, 0, 4);
        break;
      case ASF_FIELD_TYPE_WORD:
        gst_buffer_memset (buffer, offset, 0, 2);
        break;
      case ASF_FIELD_TYPE_BYTE:
        gst_buffer_memset (buffer, offset, 0, 1);
        break;
      case ASF_FIELD_TYPE_NONE:
      default:
        break;
    }
    gst_asf_parse_packet (buffer, &info, FALSE, 0);
  }

  if (packetinfo->padding != 0)
    packet_util_size = rtpasfpay->asfinfo.packet_size - packetinfo->padding;
  else
    packet_util_size = packetinfo->packet_size;
  packet_offset = 0;
  while (packet_util_size > 0) {
    /* Even if we don't fill completely an output buffer we
     * push it when we add an fragment. Because it seems that
     * it is not possible to determine where a asf packet
     * fragment ends inside a rtp packet payload.
     * This flag tells us to push the packet.
     */
    gboolean force_push = FALSE;
    GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;

    /* we have no output buffer pending, create one */
    if (rtpasfpay->current == NULL) {
      GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer");
      rtpasfpay->current =
          gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU (rtpasfpay),
          0, 0);
      rtpasfpay->cur_off = 0;
      rtpasfpay->has_ts = FALSE;
      rtpasfpay->marker = FALSE;
    }
    gst_rtp_buffer_map (rtpasfpay->current, GST_MAP_READWRITE, &rtp);
    data = gst_rtp_buffer_get_payload (&rtp);
    data += rtpasfpay->cur_off;
    size_left = gst_rtp_buffer_get_payload_len (&rtp) - rtpasfpay->cur_off;

    GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %"
        G_GUINT32_FORMAT "/%" G_GSIZE_FORMAT, packet_offset,
        gst_buffer_get_size (buffer));

    GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status");
    GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT,
        rtpasfpay->cur_off);
    GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left);
    GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s",
        rtpasfpay->has_ts ? "yes" : "no");
    if (rtpasfpay->has_ts) {
      GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts);
    }

    flags = 0;
    if (packetinfo->has_keyframe) {
      flags = flags | 0x80;
    }
    flags = flags | 0x20;       /* Relative timestamp is present */

    if (!rtpasfpay->has_ts) {
      /* this is the first asf packet, its send time is the 
       * rtp packet timestamp */
      rtpasfpay->has_ts = TRUE;
      rtpasfpay->ts = packetinfo->send_time;
    }

    if (size_left >= packet_util_size + 8) {
      /* enough space for the rest of the packet */
      if (packet_offset == 0) {
        flags = flags | 0x40;
        GST_WRITE_UINT24_BE (data + 1, packet_util_size);
      } else {
        GST_WRITE_UINT24_BE (data + 1, packet_offset);
        force_push = TRUE;
      }
      data[0] = flags;
      GST_WRITE_UINT32_BE (data + 4,
          (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
      gst_buffer_extract (buffer, packet_offset, data + 8, packet_util_size);

      /* updating status variables */
      rtpasfpay->cur_off += 8 + packet_util_size;
      size_left -= packet_util_size + 8;
      packet_offset += packet_util_size;
      packet_util_size = 0;
      rtpasfpay->marker = TRUE;
    } else {
      /* fragment packet */
      data[0] = flags;
      GST_WRITE_UINT24_BE (data + 1, packet_offset);
      GST_WRITE_UINT32_BE (data + 4,
          (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
      gst_buffer_extract (buffer, packet_offset, data + 8, size_left - 8);

      /* updating status variables */
      rtpasfpay->cur_off += size_left;
      packet_offset += size_left - 8;
      packet_util_size -= size_left - 8;
      size_left = 0;
      force_push = TRUE;
    }

    /* there is not enough room for any more buffers */
    if (force_push || size_left <= 8) {

      gst_rtp_buffer_set_ssrc (&rtp, rtppay->current_ssrc);
      gst_rtp_buffer_set_marker (&rtp, rtpasfpay->marker);
      gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (rtppay));
      gst_rtp_buffer_set_seq (&rtp, rtppay->seqnum + 1);
      gst_rtp_buffer_set_timestamp (&rtp, packetinfo->send_time);
      gst_rtp_buffer_unmap (&rtp);

      /* trim remaining bytes not used */
      if (size_left != 0) {
        gst_buffer_set_size (rtpasfpay->current,
            gst_buffer_get_size (rtpasfpay->current) - size_left);
      }

      GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer);

      rtppay->seqnum++;
      rtppay->timestamp = packetinfo->send_time;

      GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer");
      ret = gst_rtp_base_payload_push (rtppay, rtpasfpay->current);
      rtpasfpay->current = NULL;
      if (ret != GST_FLOW_OK) {
        gst_buffer_unref (buffer);
        return ret;
      }
    }
  }
  gst_buffer_unref (buffer);
  return ret;
}
Пример #5
0
static GstCaps *
gst_vp8_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
{
  GstCaps *caps;
  const GstVideoState *state;
  GstTagList *tags = NULL;
  const GstTagList *iface_tags;
  GstBuffer *stream_hdr, *vorbiscomment;
  guint8 *data;
  GstStructure *s;
  GValue array = { 0 };
  GValue value = { 0 };

  state = gst_base_video_encoder_get_state (base_video_encoder);

  caps = gst_caps_new_simple ("video/x-vp8",
      "width", G_TYPE_INT, state->width,
      "height", G_TYPE_INT, state->height,
      "framerate", GST_TYPE_FRACTION, state->fps_n,
      state->fps_d,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
      state->par_d, NULL);

  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  data = GST_BUFFER_DATA (stream_hdr);

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, state->width);
  GST_WRITE_UINT16_BE (data + 10, state->height);
  GST_WRITE_UINT24_BE (data + 12, state->par_n);
  GST_WRITE_UINT24_BE (data + 15, state->par_d);
  GST_WRITE_UINT32_BE (data + 18, state->fps_n);
  GST_WRITE_UINT32_BE (data + 22, state->fps_d);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags =
      gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer ((iface_tags) ? iface_tags : tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

  return caps;
}
Пример #6
0
static void
gst_vp8_enc_set_stream_info (GstVPXEnc * enc, GstCaps * caps,
    GstVideoInfo * info)
{
  GstStructure *s;
  GstVideoEncoder *video_encoder;
  GstBuffer *stream_hdr, *vorbiscomment;
  const GstTagList *iface_tags;
  GValue array = { 0, };
  GValue value = { 0, };
  guint8 *data = NULL;
  GstMapInfo map;

  video_encoder = GST_VIDEO_ENCODER (enc);
  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE);
  data = map.data;

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info));
  GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info));
  GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info));
  GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info));
  GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info));
  GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info));

  gst_buffer_unmap (stream_hdr, &map);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer (iface_tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

}
static gboolean
gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
    GstVideoState * state)
{
  GstVP8Enc *encoder;
  vpx_codec_enc_cfg_t cfg;
  vpx_codec_err_t status;
  vpx_image_t *image;
  guint8 *data = NULL;
  GstCaps *caps;
  gboolean ret;

  encoder = GST_VP8_ENC (base_video_encoder);
  GST_DEBUG_OBJECT (base_video_encoder, "set_format");

  if (encoder->inited) {
    GST_DEBUG_OBJECT (base_video_encoder, "refusing renegotiation");
    return FALSE;
  }

  status = vpx_codec_enc_config_default (&vpx_codec_vp8_cx_algo, &cfg, 0);
  if (status != VPX_CODEC_OK) {
    GST_ELEMENT_ERROR (encoder, LIBRARY, INIT,
        ("Failed to get default encoder configuration"), ("%s",
            gst_vpx_error_name (status)));
    return FALSE;
  }

  /* Scale default bitrate to our size */
  cfg.rc_target_bitrate = gst_util_uint64_scale (cfg.rc_target_bitrate,
      state->width * state->height,
      cfg.g_w * cfg.g_h);

  cfg.g_w = state->width;
  cfg.g_h = state->height;
  cfg.g_timebase.num = state->fps_d;
  cfg.g_timebase.den = state->fps_n;

  cfg.g_error_resilient = encoder->error_resilient;
  cfg.g_lag_in_frames = encoder->max_latency;
  cfg.g_threads = encoder->threads;
  cfg.rc_end_usage = encoder->mode;
  cfg.rc_2pass_vbr_minsection_pct = encoder->minsection_pct;
  cfg.rc_2pass_vbr_maxsection_pct = encoder->maxsection_pct;
  /* Standalone qp-min do not make any sence, with bitrate=0 and qp-min=1
   * encoder will use only default qp-max=63. Also this will make
   * worst possbile quality.
   */
  if (encoder->bitrate != DEFAULT_BITRATE ||
      encoder->max_quantizer != DEFAULT_MAX_QUANTIZER) {
    cfg.rc_target_bitrate = encoder->bitrate / 1000;
    cfg.rc_min_quantizer = encoder->min_quantizer;
    cfg.rc_max_quantizer = encoder->max_quantizer;
  } else {
    cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2);
    cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2);
  }
  cfg.rc_dropframe_thresh = encoder->drop_frame;
  cfg.rc_resize_allowed = encoder->resize_allowed;

  cfg.kf_mode = VPX_KF_AUTO;
  cfg.kf_min_dist = 0;
  cfg.kf_max_dist = encoder->max_keyframe_distance;

  cfg.g_pass = encoder->multipass_mode;
  if (encoder->multipass_mode == VPX_RC_FIRST_PASS) {
    encoder->first_pass_cache_content = g_byte_array_sized_new (4096);
  } else if (encoder->multipass_mode == VPX_RC_LAST_PASS) {
    GError *err = NULL;

    if (!encoder->multipass_cache_file) {
      GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ,
          ("No multipass cache file provided"), (NULL));
      return FALSE;
    }

    if (!g_file_get_contents (encoder->multipass_cache_file,
            (gchar **) & encoder->last_pass_cache_content.buf,
            &encoder->last_pass_cache_content.sz, &err)) {
      GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ,
          ("Failed to read multipass cache file provided"), ("%s",
              err->message));
      g_error_free (err);
      return FALSE;
    }
    cfg.rc_twopass_stats_in = encoder->last_pass_cache_content;
  }

  status = vpx_codec_enc_init (&encoder->encoder, &vpx_codec_vp8_cx_algo,
      &cfg, 0);
  if (status != VPX_CODEC_OK) {
    GST_ELEMENT_ERROR (encoder, LIBRARY, INIT,
        ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status)));
    return FALSE;
  }

  /* FIXME move this to a set_speed() function */
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED,
      (encoder->speed == 0) ? 0 : (encoder->speed - 1));
  if (status != VPX_CODEC_OK) {
    GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s",
        gst_vpx_error_name (status));
  }

  status = vpx_codec_control (&encoder->encoder, VP8E_SET_NOISE_SENSITIVITY,
      encoder->noise_sensitivity);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_SHARPNESS,
      encoder->sharpness);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_STATIC_THRESHOLD,
      encoder->static_threshold);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_TOKEN_PARTITIONS,
      encoder->partitions);
#if 0
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_MAXFRAMES,
      encoder->arnr_maxframes);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_STRENGTH,
      encoder->arnr_strength);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_TYPE,
      encoder->arnr_type);
#endif
#ifdef HAVE_VP8ENC_TUNING
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_TUNING,
      encoder->tuning);
#endif

  status =
      vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF,
      (encoder->auto_alt_ref_frames ? 1 : 0));
  if (status != VPX_CODEC_OK) {
    GST_WARNING_OBJECT (encoder,
        "Failed to set VP8E_ENABLEAUTOALTREF to %d: %s",
        (encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status));
  }

  cfg.g_lag_in_frames = encoder->lag_in_frames;

  gst_base_video_encoder_set_latency (base_video_encoder, 0,
      gst_util_uint64_scale (encoder->max_latency,
          state->fps_d * GST_SECOND, state->fps_n));
  encoder->inited = TRUE;

  /* prepare cached image buffer setup */
  image = &encoder->image;
  memset (image, 0, sizeof (*image));

  image->fmt = VPX_IMG_FMT_I420;
  image->bps = 12;
  image->x_chroma_shift = image->y_chroma_shift = 1;
  image->w = image->d_w = state->width;
  image->h = image->d_h = state->height;

  image->stride[VPX_PLANE_Y] =
      gst_video_format_get_row_stride (state->format, 0, state->width);
  image->stride[VPX_PLANE_U] =
      gst_video_format_get_row_stride (state->format, 1, state->width);
  image->stride[VPX_PLANE_V] =
      gst_video_format_get_row_stride (state->format, 2, state->width);
  image->planes[VPX_PLANE_Y] =
      data + gst_video_format_get_component_offset (state->format, 0,
      state->width, state->height);
  image->planes[VPX_PLANE_U] =
      data + gst_video_format_get_component_offset (state->format, 1,
      state->width, state->height);
  image->planes[VPX_PLANE_V] =
      data + gst_video_format_get_component_offset (state->format, 2,
      state->width, state->height);


  caps = gst_caps_new_simple ("video/x-vp8",
      "width", G_TYPE_INT, state->width,
      "height", G_TYPE_INT, state->height,
      "framerate", GST_TYPE_FRACTION, state->fps_n,
      state->fps_d,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
      state->par_d, NULL);
  {
    GstStructure *s;
    GstBuffer *stream_hdr, *vorbiscomment;
    const GstTagList *iface_tags;
    GValue array = { 0, };
    GValue value = { 0, };
    s = gst_caps_get_structure (caps, 0);

    /* put buffers in a fixed list */
    g_value_init (&array, GST_TYPE_ARRAY);
    g_value_init (&value, GST_TYPE_BUFFER);

    /* Create Ogg stream-info */
    stream_hdr = gst_buffer_new_and_alloc (26);
    data = GST_BUFFER_DATA (stream_hdr);

    GST_WRITE_UINT8 (data, 0x4F);
    GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */
    GST_WRITE_UINT8 (data + 5, 0x01);   /* stream info header */
    GST_WRITE_UINT8 (data + 6, 1);      /* Major version 1 */
    GST_WRITE_UINT8 (data + 7, 0);      /* Minor version 0 */
    GST_WRITE_UINT16_BE (data + 8, state->width);
    GST_WRITE_UINT16_BE (data + 10, state->height);
    GST_WRITE_UINT24_BE (data + 12, state->par_n);
    GST_WRITE_UINT24_BE (data + 15, state->par_d);
    GST_WRITE_UINT32_BE (data + 18, state->fps_n);
    GST_WRITE_UINT32_BE (data + 22, state->fps_d);

    GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
    gst_value_set_buffer (&value, stream_hdr);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (stream_hdr);

    iface_tags =
        gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
    if (iface_tags) {
      vorbiscomment =
          gst_tag_list_to_vorbiscomment_buffer (iface_tags,
          (const guint8 *) "OVP80\2 ", 7,
          "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

      GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);

      g_value_init (&value, GST_TYPE_BUFFER);
      gst_value_set_buffer (&value, vorbiscomment);
      gst_value_array_append_value (&array, &value);
      g_value_unset (&value);
      gst_buffer_unref (vorbiscomment);
    }

    gst_structure_set_value (s, "streamheader", &array);
    g_value_unset (&array);
  }

  ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder), caps);
  gst_caps_unref (caps);

  return ret;
}