示例#1
0
EXPORT_C
#endif

gboolean
gst_rtp_buffer_set_extension_data (GstBuffer * buffer, guint16 bits,
    guint16 length)
{
  guint32 min_size = 0;
  guint8 *data;

  g_return_val_if_fail (GST_IS_BUFFER (buffer), FALSE);
  g_return_val_if_fail (GST_BUFFER_DATA (buffer) != NULL, FALSE);

  gst_rtp_buffer_set_extension (buffer, TRUE);
  min_size =
      GST_RTP_HEADER_LEN + GST_RTP_HEADER_CSRC_SIZE (buffer) + 4 +
      length * sizeof (guint32);

  if (min_size > GST_BUFFER_SIZE (buffer)) {
    GST_WARNING_OBJECT (buffer,
        "rtp buffer too small: need more than %d bytes but only have %d bytes",
        min_size, GST_BUFFER_SIZE (buffer));
    return FALSE;
  }

  data =
      GST_BUFFER_DATA (buffer) + GST_RTP_HEADER_LEN +
      GST_RTP_HEADER_CSRC_SIZE (buffer);
  GST_WRITE_UINT16_BE (data, bits);
  GST_WRITE_UINT16_BE (data + 2, length);
  return TRUE;
}
示例#2
0
/**
 * gst_rtp_buffer_set_extension_data:
 * @buffer: the buffer
 * @bits: the bits specific for the extension
 * @length: the length that counts the number of 32-bit words in
 * the extension, excluding the extension header ( therefore zero is a valid length)
 *
 * Set the extension bit of the rtp buffer and fill in the @bits and @length of the
 * extension header. It will refuse to set the extension data if the buffer is not
 * large enough.
 *
 * Returns: True if done.
 *
 * Since : 0.10.18
 */
gboolean
gst_rtp_buffer_set_extension_data (GstBuffer * buffer, guint16 bits,
    guint16 length)
{
  guint32 min_size = 0;
  guint8 *data;

  data = GST_BUFFER_DATA (buffer);

  /* check if the buffer is big enough to hold the extension */
  min_size =
      GST_RTP_HEADER_LEN + GST_RTP_HEADER_CSRC_SIZE (data) + 4 +
      length * sizeof (guint32);
  if (G_UNLIKELY (min_size > GST_BUFFER_SIZE (buffer)))
    goto too_small;

  /* now we can set the extension bit */
  gst_rtp_buffer_set_extension (buffer, TRUE);

  data += GST_RTP_HEADER_LEN + GST_RTP_HEADER_CSRC_SIZE (data);
  GST_WRITE_UINT16_BE (data, bits);
  GST_WRITE_UINT16_BE (data + 2, length);

  return TRUE;

  /* ERRORS */
too_small:
  {
    g_warning
        ("rtp buffer too small: need more than %d bytes but only have %d bytes",
        min_size, GST_BUFFER_SIZE (buffer));
    return FALSE;
  }
}
static gboolean
atoms_recov_write_version (FILE * f)
{
  guint8 data[2];
  GST_WRITE_UINT16_BE (data, ATOMS_RECOV_FILE_VERSION);
  return fwrite (data, 2, 1, f) == 1;
}
示例#4
0
文件: amf.c 项目: dschleef/gst-rtmp
static void
_serialize_u16 (AmfSerializer * serializer, int value)
{
  if (_serialize_check (serializer, 2)) {
    GST_WRITE_UINT16_BE (serializer->data + serializer->offset, value);
    serializer->offset += 2;
  }
}
示例#5
0
static void
write_fragment (SnraServerClient * client, gchar * body, gsize len)
{
    gchar header[14];
    gsize header_len, i;
    const guint8 masking = 0x00;  // 0x80 to enable masking
    GIOStatus status;

    union
    {
        gchar bytes[4];
        guint32 val;
    } mask;

    header[0] = 0x81;
    header_len = 2;
    if (len < 126) {
        header[1] = masking | len;
    } else if (len < 65536) {
        header[1] = masking | 126;
        GST_WRITE_UINT16_BE (header + 2, (guint16) (len));
        header_len += 2;
    } else {
        header[1] = masking | 127;
        GST_WRITE_UINT64_BE (header + 2, (guint64) (len));
        header_len += 8;
    }
    /* Add a random mask word. */
    if (masking) {
        mask.val = g_random_int ();
        memcpy (header + header_len, mask.bytes, 4);
        header_len += 4;
    }

    /* Write WebSocket frame header */
    status = write_to_io_channel (client->io, header, header_len);
    if (status != G_IO_STATUS_NORMAL)
        goto done;

    /* Mask the body, and send it */
    if (masking) {
        if (len > client->out_bufsize) {
            client->out_bufsize = len;
            client->out_buf = g_realloc (client->out_buf, client->out_bufsize);
        }
        for (i = 0; i < len; i++) {
            client->out_buf[i] = body[i] ^ mask.bytes[i % 4];
        }
        status = write_to_io_channel (client->io, client->out_buf, len);
    } else {
        status = write_to_io_channel (client->io, body, len);
    }

done:
    if (status != G_IO_STATUS_NORMAL)
        snra_server_connection_lost (client);
}
示例#6
0
文件: amf.c 项目: dschleef/gst-rtmp
static void
_serialize_utf8_string (AmfSerializer * serializer, const char *s)
{
  int size;

  size = strlen (s);
  if (_serialize_check (serializer, 2 + size)) {
    GST_WRITE_UINT16_BE (serializer->data + serializer->offset, size);
    memcpy (serializer->data + serializer->offset + 2, s, size);
    serializer->offset += 2 + size;
  }
}
示例#7
0
static void
gst_exif_writer_write_short_tag (GstExifWriter * writer, guint16 tag,
    guint16 value)
{
  guint32 offset = 0;

  if (writer->byte_order == G_LITTLE_ENDIAN) {
    GST_WRITE_UINT16_LE ((guint8 *) & offset, value);
  } else {
    GST_WRITE_UINT16_BE ((guint8 *) & offset, value);
  }

  gst_exif_writer_write_tag_header (writer, tag, EXIF_TYPE_SHORT,
      1, offset, TRUE);
}
示例#8
0
GstBuffer *
gst_rm_utils_descramble_dnet_buffer (GstBuffer * buf)
{
  guint8 *data, *end;

  buf = gst_buffer_make_writable (buf);

  /* dnet = byte-order swapped AC3 */
  data = GST_BUFFER_DATA (buf);
  end = GST_BUFFER_DATA (buf) + GST_BUFFER_SIZE (buf);
  while ((data + 1) < end) {
    /* byte-swap in an alignment-safe way */
    GST_WRITE_UINT16_BE (data, GST_READ_UINT16_LE (data));
    data += sizeof (guint16);
  }
  return buf;
}
示例#9
0
static void
send_rtp_packet (GstPad * src, guint timestamp, gboolean marker, gboolean end,
    guint number, guint volume, guint duration)
{
  GstBuffer *buf;
  GstRTPBuffer rtpbuf = GST_RTP_BUFFER_INIT;
  gchar *payload;
  static guint seqnum = 1;

  buf = gst_rtp_buffer_new_allocate (4, 0, 0);
  fail_unless (gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtpbuf));
  gst_rtp_buffer_set_seq (&rtpbuf, seqnum++);
  gst_rtp_buffer_set_timestamp (&rtpbuf, timestamp);
  gst_rtp_buffer_set_marker (&rtpbuf, marker);
  payload = gst_rtp_buffer_get_payload (&rtpbuf);
  payload[0] = number;
  payload[1] = volume | (end ? END_BIT : 0);
  GST_WRITE_UINT16_BE (payload + 2, duration);
  gst_rtp_buffer_unmap (&rtpbuf);
  fail_unless (gst_pad_push (src, buf) == GST_FLOW_OK);
}
示例#10
0
/* byte together avc codec data based on collected pps and sps so far */
static GstBuffer *
gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
{
  GstBuffer *buf, *nal;
  gint i, sps_size = 0, pps_size = 0, num_sps = 0, num_pps = 0;
  guint8 profile_idc = 0, profile_comp = 0, level_idc = 0;
  gboolean found = FALSE;
  guint8 *data;

  /* only nal payload in stored nals */

  for (i = 0; i < MAX_SPS_COUNT; i++) {
    if ((nal = h264parse->params->sps_nals[i])) {
      num_sps++;
      /* size bytes also count */
      sps_size += GST_BUFFER_SIZE (nal) + 2;
      if (GST_BUFFER_SIZE (nal) >= 4) {
        found = TRUE;
        profile_idc = (GST_BUFFER_DATA (nal))[1];
        profile_comp = (GST_BUFFER_DATA (nal))[2];
        level_idc = (GST_BUFFER_DATA (nal))[3];
      }
    }
  }
  for (i = 0; i < MAX_PPS_COUNT; i++) {
    if ((nal = h264parse->params->pps_nals[i])) {
      num_pps++;
      /* size bytes also count */
      pps_size += GST_BUFFER_SIZE (nal) + 2;
    }
  }

  GST_DEBUG_OBJECT (h264parse,
      "constructing codec_data: num_sps=%d, num_pps=%d", num_sps, num_pps);

  if (!found || !num_pps)
    return NULL;

  buf = gst_buffer_new_and_alloc (5 + 1 + sps_size + 1 + pps_size);
  data = GST_BUFFER_DATA (buf);

  data[0] = 1;                  /* AVC Decoder Configuration Record ver. 1 */
  data[1] = profile_idc;        /* profile_idc                             */
  data[2] = profile_comp;       /* profile_compability                     */
  data[3] = level_idc;          /* level_idc                               */
  data[4] = 0xfc | (4 - 1);     /* nal_length_size_minus1                  */
  data[5] = 0xe0 | num_sps;     /* number of SPSs */

  data += 6;
  for (i = 0; i < MAX_SPS_COUNT; i++) {
    if ((nal = h264parse->params->sps_nals[i])) {
      GST_WRITE_UINT16_BE (data, GST_BUFFER_SIZE (nal));
      memcpy (data + 2, GST_BUFFER_DATA (nal), GST_BUFFER_SIZE (nal));
      data += 2 + GST_BUFFER_SIZE (nal);
    }
  }

  data[0] = num_pps;
  data++;
  for (i = 0; i < MAX_PPS_COUNT; i++) {
    if ((nal = h264parse->params->pps_nals[i])) {
      GST_WRITE_UINT16_BE (data, GST_BUFFER_SIZE (nal));
      memcpy (data + 2, GST_BUFFER_DATA (nal), GST_BUFFER_SIZE (nal));
      data += 2 + GST_BUFFER_SIZE (nal);
    }
  }

  return buf;
}
示例#11
0
static GstFlowReturn
gst_rdt_depay_handle_data (GstRDTDepay * rdtdepay, GstClockTime outtime,
    GstRDTPacket * packet)
{
  GstFlowReturn ret;
  GstBuffer *outbuf;
  GstMapInfo outmap;
  guint8 *data, *outdata;
  guint size;
  guint16 stream_id;
  guint32 timestamp;
  gint gap;
  guint16 seqnum;
  guint8 flags;
  guint16 outflags;

  /* get pointers to the packet data */
  data = gst_rdt_packet_data_map (packet, &size);

  outbuf = gst_buffer_new_and_alloc (12 + size);
  GST_BUFFER_TIMESTAMP (outbuf) = outtime;

  GST_DEBUG_OBJECT (rdtdepay, "have size %u", size);

  /* copy over some things */
  stream_id = gst_rdt_packet_data_get_stream_id (packet);
  timestamp = gst_rdt_packet_data_get_timestamp (packet);
  flags = gst_rdt_packet_data_get_flags (packet);

  seqnum = gst_rdt_packet_data_get_seq (packet);

  GST_DEBUG_OBJECT (rdtdepay, "stream_id %u, timestamp %u, seqnum %d, flags %d",
      stream_id, timestamp, seqnum, flags);

  if (rdtdepay->next_seqnum != -1) {
    gap = gst_rdt_buffer_compare_seqnum (seqnum, rdtdepay->next_seqnum);

    /* if we have no gap, all is fine */
    if (G_UNLIKELY (gap != 0)) {
      GST_LOG_OBJECT (rdtdepay, "got packet %u, expected %u, gap %d", seqnum,
          rdtdepay->next_seqnum, gap);
      if (gap < 0) {
        /* seqnum > next_seqnum, we are missing some packets, this is always a
         * DISCONT. */
        GST_LOG_OBJECT (rdtdepay, "%d missing packets", gap);
        rdtdepay->discont = TRUE;
      } else {
        /* seqnum < next_seqnum, we have seen this packet before or the sender
         * could be restarted. If the packet is not too old, we throw it away as
         * a duplicate, otherwise we mark discont and continue. 100 misordered
         * packets is a good threshold. See also RFC 4737. */
        if (gap < 100)
          goto dropping;

        GST_LOG_OBJECT (rdtdepay,
            "%d > 100, packet too old, sender likely restarted", gap);
        rdtdepay->discont = TRUE;
      }
    }
  }
  rdtdepay->next_seqnum = (seqnum + 1);
  if (rdtdepay->next_seqnum == 0xff00)
    rdtdepay->next_seqnum = 0;

  if ((flags & 1) == 0)
    outflags = 2;
  else
    outflags = 0;

  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
  outdata = outmap.data;
  GST_WRITE_UINT16_BE (outdata + 0, 0); /* version   */
  GST_WRITE_UINT16_BE (outdata + 2, size + 12); /* length    */
  GST_WRITE_UINT16_BE (outdata + 4, stream_id); /* stream    */
  GST_WRITE_UINT32_BE (outdata + 6, timestamp); /* timestamp */
  GST_WRITE_UINT16_BE (outdata + 10, outflags); /* flags     */
  memcpy (outdata + 12, data, size);
  gst_buffer_unmap (outbuf, &outmap);
  gst_buffer_resize (outbuf, 0, 12 + size);

  gst_rdt_packet_data_unmap (packet);

  GST_DEBUG_OBJECT (rdtdepay, "Pushing packet, outtime %" GST_TIME_FORMAT,
      GST_TIME_ARGS (outtime));

  ret = gst_rdt_depay_push (rdtdepay, outbuf);

  return ret;

  /* ERRORS */
dropping:
  {
    GST_WARNING_OBJECT (rdtdepay, "%d <= 100, dropping old packet", gap);
    return GST_FLOW_OK;
  }
}
示例#12
0
static void
flush_data (GstRtpQDM2Depay * depay)
{
  guint i;
  guint avail;

  if ((avail = gst_adapter_available (depay->adapter)))
    gst_adapter_flush (depay->adapter, avail);

  GST_DEBUG ("Flushing %d packets", depay->nbpackets);

  for (i = 0; depay->packets[i]; i++) {
    QDM2Packet *pack = depay->packets[i];
    guint32 crc = 0;
    int i = 0;
    GstBuffer *buf;
    guint8 *data;

    /* CRC is the sum of everything (including first bytes) */

    data = pack->data;

    if (G_UNLIKELY (data == NULL))
      continue;

    /* If the packet size is bigger than 0xff, we need 2 bytes to store the size */
    if (depay->packetsize > 0xff) {
      /* Expanded size 0x02 | 0x80 */
      data[0] = 0x82;
      GST_WRITE_UINT16_BE (data + 1, depay->packetsize - 3);
    } else {
      data[0] = 0x2;
      data[1] = depay->packetsize - 2;
    }

    /* Calculate CRC */
    for (; i < depay->packetsize; i++)
      crc += data[i];

    GST_DEBUG ("CRC is 0x%x", crc);

    /* Write CRC */
    if (depay->packetsize > 0xff)
      GST_WRITE_UINT16_BE (data + 3, crc);
    else
      GST_WRITE_UINT16_BE (data + 2, crc);

    GST_MEMDUMP ("Extracted packet", data, depay->packetsize);

    buf = gst_buffer_new ();
    GST_BUFFER_DATA (buf) = data;
    GST_BUFFER_MALLOCDATA (buf) = data;
    GST_BUFFER_SIZE (buf) = depay->packetsize;

    gst_adapter_push (depay->adapter, buf);

    if (pack->data) {
      pack->data = NULL;
    }
  }
}
示例#13
0
static GstCaps *
gst_vp8_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
{
  GstCaps *caps;
  const GstVideoState *state;
  GstTagList *tags = NULL;
  const GstTagList *iface_tags;
  GstBuffer *stream_hdr, *vorbiscomment;
  guint8 *data;
  GstStructure *s;
  GValue array = { 0 };
  GValue value = { 0 };

  state = gst_base_video_encoder_get_state (base_video_encoder);

  caps = gst_caps_new_simple ("video/x-vp8",
      "width", G_TYPE_INT, state->width,
      "height", G_TYPE_INT, state->height,
      "framerate", GST_TYPE_FRACTION, state->fps_n,
      state->fps_d,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
      state->par_d, NULL);

  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  data = GST_BUFFER_DATA (stream_hdr);

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, state->width);
  GST_WRITE_UINT16_BE (data + 10, state->height);
  GST_WRITE_UINT24_BE (data + 12, state->par_n);
  GST_WRITE_UINT24_BE (data + 15, state->par_d);
  GST_WRITE_UINT32_BE (data + 18, state->fps_n);
  GST_WRITE_UINT32_BE (data + 22, state->fps_d);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags =
      gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer ((iface_tags) ? iface_tags : tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

  return caps;
}
示例#14
0
static void
gst_vp8_enc_set_stream_info (GstVPXEnc * enc, GstCaps * caps,
    GstVideoInfo * info)
{
  GstStructure *s;
  GstVideoEncoder *video_encoder;
  GstBuffer *stream_hdr, *vorbiscomment;
  const GstTagList *iface_tags;
  GValue array = { 0, };
  GValue value = { 0, };
  guint8 *data = NULL;
  GstMapInfo map;

  video_encoder = GST_VIDEO_ENCODER (enc);
  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE);
  data = map.data;

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info));
  GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info));
  GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info));
  GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info));
  GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info));
  GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info));

  gst_buffer_unmap (stream_hdr, &map);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer (iface_tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

}
static gboolean
gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
    GstVideoState * state)
{
  GstVP8Enc *encoder;
  vpx_codec_enc_cfg_t cfg;
  vpx_codec_err_t status;
  vpx_image_t *image;
  guint8 *data = NULL;
  GstCaps *caps;
  gboolean ret;

  encoder = GST_VP8_ENC (base_video_encoder);
  GST_DEBUG_OBJECT (base_video_encoder, "set_format");

  if (encoder->inited) {
    GST_DEBUG_OBJECT (base_video_encoder, "refusing renegotiation");
    return FALSE;
  }

  status = vpx_codec_enc_config_default (&vpx_codec_vp8_cx_algo, &cfg, 0);
  if (status != VPX_CODEC_OK) {
    GST_ELEMENT_ERROR (encoder, LIBRARY, INIT,
        ("Failed to get default encoder configuration"), ("%s",
            gst_vpx_error_name (status)));
    return FALSE;
  }

  /* Scale default bitrate to our size */
  cfg.rc_target_bitrate = gst_util_uint64_scale (cfg.rc_target_bitrate,
      state->width * state->height,
      cfg.g_w * cfg.g_h);

  cfg.g_w = state->width;
  cfg.g_h = state->height;
  cfg.g_timebase.num = state->fps_d;
  cfg.g_timebase.den = state->fps_n;

  cfg.g_error_resilient = encoder->error_resilient;
  cfg.g_lag_in_frames = encoder->max_latency;
  cfg.g_threads = encoder->threads;
  cfg.rc_end_usage = encoder->mode;
  cfg.rc_2pass_vbr_minsection_pct = encoder->minsection_pct;
  cfg.rc_2pass_vbr_maxsection_pct = encoder->maxsection_pct;
  /* Standalone qp-min do not make any sence, with bitrate=0 and qp-min=1
   * encoder will use only default qp-max=63. Also this will make
   * worst possbile quality.
   */
  if (encoder->bitrate != DEFAULT_BITRATE ||
      encoder->max_quantizer != DEFAULT_MAX_QUANTIZER) {
    cfg.rc_target_bitrate = encoder->bitrate / 1000;
    cfg.rc_min_quantizer = encoder->min_quantizer;
    cfg.rc_max_quantizer = encoder->max_quantizer;
  } else {
    cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2);
    cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2);
  }
  cfg.rc_dropframe_thresh = encoder->drop_frame;
  cfg.rc_resize_allowed = encoder->resize_allowed;

  cfg.kf_mode = VPX_KF_AUTO;
  cfg.kf_min_dist = 0;
  cfg.kf_max_dist = encoder->max_keyframe_distance;

  cfg.g_pass = encoder->multipass_mode;
  if (encoder->multipass_mode == VPX_RC_FIRST_PASS) {
    encoder->first_pass_cache_content = g_byte_array_sized_new (4096);
  } else if (encoder->multipass_mode == VPX_RC_LAST_PASS) {
    GError *err = NULL;

    if (!encoder->multipass_cache_file) {
      GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ,
          ("No multipass cache file provided"), (NULL));
      return FALSE;
    }

    if (!g_file_get_contents (encoder->multipass_cache_file,
            (gchar **) & encoder->last_pass_cache_content.buf,
            &encoder->last_pass_cache_content.sz, &err)) {
      GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ,
          ("Failed to read multipass cache file provided"), ("%s",
              err->message));
      g_error_free (err);
      return FALSE;
    }
    cfg.rc_twopass_stats_in = encoder->last_pass_cache_content;
  }

  status = vpx_codec_enc_init (&encoder->encoder, &vpx_codec_vp8_cx_algo,
      &cfg, 0);
  if (status != VPX_CODEC_OK) {
    GST_ELEMENT_ERROR (encoder, LIBRARY, INIT,
        ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status)));
    return FALSE;
  }

  /* FIXME move this to a set_speed() function */
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED,
      (encoder->speed == 0) ? 0 : (encoder->speed - 1));
  if (status != VPX_CODEC_OK) {
    GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s",
        gst_vpx_error_name (status));
  }

  status = vpx_codec_control (&encoder->encoder, VP8E_SET_NOISE_SENSITIVITY,
      encoder->noise_sensitivity);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_SHARPNESS,
      encoder->sharpness);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_STATIC_THRESHOLD,
      encoder->static_threshold);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_TOKEN_PARTITIONS,
      encoder->partitions);
#if 0
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_MAXFRAMES,
      encoder->arnr_maxframes);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_STRENGTH,
      encoder->arnr_strength);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_TYPE,
      encoder->arnr_type);
#endif
#ifdef HAVE_VP8ENC_TUNING
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_TUNING,
      encoder->tuning);
#endif

  status =
      vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF,
      (encoder->auto_alt_ref_frames ? 1 : 0));
  if (status != VPX_CODEC_OK) {
    GST_WARNING_OBJECT (encoder,
        "Failed to set VP8E_ENABLEAUTOALTREF to %d: %s",
        (encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status));
  }

  cfg.g_lag_in_frames = encoder->lag_in_frames;

  gst_base_video_encoder_set_latency (base_video_encoder, 0,
      gst_util_uint64_scale (encoder->max_latency,
          state->fps_d * GST_SECOND, state->fps_n));
  encoder->inited = TRUE;

  /* prepare cached image buffer setup */
  image = &encoder->image;
  memset (image, 0, sizeof (*image));

  image->fmt = VPX_IMG_FMT_I420;
  image->bps = 12;
  image->x_chroma_shift = image->y_chroma_shift = 1;
  image->w = image->d_w = state->width;
  image->h = image->d_h = state->height;

  image->stride[VPX_PLANE_Y] =
      gst_video_format_get_row_stride (state->format, 0, state->width);
  image->stride[VPX_PLANE_U] =
      gst_video_format_get_row_stride (state->format, 1, state->width);
  image->stride[VPX_PLANE_V] =
      gst_video_format_get_row_stride (state->format, 2, state->width);
  image->planes[VPX_PLANE_Y] =
      data + gst_video_format_get_component_offset (state->format, 0,
      state->width, state->height);
  image->planes[VPX_PLANE_U] =
      data + gst_video_format_get_component_offset (state->format, 1,
      state->width, state->height);
  image->planes[VPX_PLANE_V] =
      data + gst_video_format_get_component_offset (state->format, 2,
      state->width, state->height);


  caps = gst_caps_new_simple ("video/x-vp8",
      "width", G_TYPE_INT, state->width,
      "height", G_TYPE_INT, state->height,
      "framerate", GST_TYPE_FRACTION, state->fps_n,
      state->fps_d,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
      state->par_d, NULL);
  {
    GstStructure *s;
    GstBuffer *stream_hdr, *vorbiscomment;
    const GstTagList *iface_tags;
    GValue array = { 0, };
    GValue value = { 0, };
    s = gst_caps_get_structure (caps, 0);

    /* put buffers in a fixed list */
    g_value_init (&array, GST_TYPE_ARRAY);
    g_value_init (&value, GST_TYPE_BUFFER);

    /* Create Ogg stream-info */
    stream_hdr = gst_buffer_new_and_alloc (26);
    data = GST_BUFFER_DATA (stream_hdr);

    GST_WRITE_UINT8 (data, 0x4F);
    GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */
    GST_WRITE_UINT8 (data + 5, 0x01);   /* stream info header */
    GST_WRITE_UINT8 (data + 6, 1);      /* Major version 1 */
    GST_WRITE_UINT8 (data + 7, 0);      /* Minor version 0 */
    GST_WRITE_UINT16_BE (data + 8, state->width);
    GST_WRITE_UINT16_BE (data + 10, state->height);
    GST_WRITE_UINT24_BE (data + 12, state->par_n);
    GST_WRITE_UINT24_BE (data + 15, state->par_d);
    GST_WRITE_UINT32_BE (data + 18, state->fps_n);
    GST_WRITE_UINT32_BE (data + 22, state->fps_d);

    GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
    gst_value_set_buffer (&value, stream_hdr);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (stream_hdr);

    iface_tags =
        gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
    if (iface_tags) {
      vorbiscomment =
          gst_tag_list_to_vorbiscomment_buffer (iface_tags,
          (const guint8 *) "OVP80\2 ", 7,
          "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

      GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);

      g_value_init (&value, GST_TYPE_BUFFER);
      gst_value_set_buffer (&value, vorbiscomment);
      gst_value_array_append_value (&array, &value);
      g_value_unset (&value);
      gst_buffer_unref (vorbiscomment);
    }

    gst_structure_set_value (s, "streamheader", &array);
    g_value_unset (&array);
  }

  ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder), caps);
  gst_caps_unref (caps);

  return ret;
}