Пример #1
0
static guint
tag_setter_list_length (GstTagSetter * setter)
{
  guint len = 0;

  if (gst_tag_setter_get_tag_list (setter) == NULL)
    return 0;

  gst_tag_list_foreach (gst_tag_setter_get_tag_list (setter),
      (GstTagForeachFunc) tag_list_foreach, &len);
  return len;
}
Пример #2
0
static GstBuffer *
gst_speex_enc_create_metadata_buffer (GstSpeexEnc * enc)
{
  const GstTagList *user_tags;
  GstTagList *merged_tags;
  GstBuffer *comments = NULL;

  user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc));

  GST_DEBUG_OBJECT (enc, "upstream tags = %" GST_PTR_FORMAT, enc->tags);
  GST_DEBUG_OBJECT (enc, "user-set tags = %" GST_PTR_FORMAT, user_tags);

  /* gst_tag_list_merge() will handle NULL for either or both lists fine */
  merged_tags = gst_tag_list_merge (user_tags, enc->tags,
      gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (enc)));

  if (merged_tags == NULL)
    merged_tags = gst_tag_list_new ();

  GST_DEBUG_OBJECT (enc, "merged   tags = %" GST_PTR_FORMAT, merged_tags);
  comments = gst_tag_list_to_vorbiscomment_buffer (merged_tags, NULL,
      0, "Encoded with GStreamer Speexenc");
  gst_tag_list_free (merged_tags);

  GST_BUFFER_OFFSET (comments) = enc->bytes_out;
  GST_BUFFER_OFFSET_END (comments) = 0;

  return comments;
}
Пример #3
0
static GstBuffer *
gst_celt_enc_create_metadata_buffer (GstCeltEnc * enc)
{
  const GstTagList *tags;
  GstTagList *empty_tags = NULL;
  GstBuffer *comments = NULL;

  tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc));

  GST_DEBUG_OBJECT (enc, "tags = %" GST_PTR_FORMAT, tags);

  if (tags == NULL) {
    /* FIXME: better fix chain of callers to not write metadata at all,
     * if there is none */
    empty_tags = gst_tag_list_new ();
    tags = empty_tags;
  }
  comments = gst_tag_list_to_vorbiscomment_buffer (tags, NULL,
      0, "Encoded with GStreamer Celtenc");

  GST_BUFFER_OFFSET (comments) = 0;
  GST_BUFFER_OFFSET_END (comments) = 0;

  if (empty_tags)
    gst_tag_list_free (empty_tags);

  return comments;
}
Пример #4
0
static void
gst_shout2send_set_metadata (GstShout2send * shout2send)
{
  const GstTagList *user_tags;
  GstTagList *copy;
  char *tempmetadata;
  shout_metadata_t *pmetadata;

  g_return_if_fail (shout2send != NULL);
  user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (shout2send));
  if ((shout2send->tags == NULL) && (user_tags == NULL)) {
    return;
  }
  copy = gst_tag_list_merge (user_tags, shout2send->tags,
      gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (shout2send)));
  /* lets get the artist and song tags */
  tempmetadata = NULL;
  gst_tag_list_foreach ((GstTagList *) copy, set_shout_metadata,
      (gpointer) & tempmetadata);
  if (tempmetadata) {
    pmetadata = shout_metadata_new ();
    shout_metadata_add (pmetadata, "song", tempmetadata);
    shout_set_metadata (shout2send->conn, pmetadata);
    shout_metadata_free (pmetadata);
  }

  gst_tag_list_unref (copy);
}
Пример #5
0
GST_END_TEST
GST_START_TEST (test_merge_modes)
{
  GstTagMergeMode mode;

  for (mode = GST_TAG_MERGE_REPLACE_ALL; mode < GST_TAG_MERGE_COUNT; mode++) {
    gint i;

    for (i = 0; i < 4; i++) {
      GstElement *enc;
      GstTagSetter *setter;
      GstTagList *list1, *list2, *merged;

      enc = g_object_new (GST_TYPE_DUMMY_ENC, NULL);
      fail_unless (enc != NULL);

      setter = GST_TAG_SETTER (enc);
      list1 = gst_tag_list_new_empty ();
      list2 = gst_tag_list_new_empty ();

      /* i = 0: -     -
       * i = 1: list1 -
       * i = 2: -     list2
       * i = 3: list1 list2 */

      if (i % 2 == 1) {
        gst_tag_list_add (list1, GST_TAG_MERGE_APPEND, GST_TAG_ARTIST,
            "artist1", NULL);
      }
      if (i > 1) {
        gst_tag_list_add (list2, GST_TAG_MERGE_APPEND, GST_TAG_ARTIST,
            "artist2", NULL);
      }

      gst_tag_setter_merge_tags (setter, list1, GST_TAG_MERGE_APPEND);
      gst_tag_setter_merge_tags (setter, list2, mode);

      merged = gst_tag_list_merge (list1, list2, mode);

      fail_unless_equals_int (tag_list_length (gst_tag_setter_get_tag_list
              (setter)), tag_list_length (merged));

      gst_tag_list_unref (list1);
      gst_tag_list_unref (list2);
      gst_tag_list_unref (merged);
      gst_object_unref (enc);
    }
  }
}
static GstFlowReturn
gst_vorbis_tag_parse_packet (GstVorbisParse * parse, GstBuffer * buffer)
{
  GstTagList *old_tags, *new_tags;
  const GstTagList *user_tags;
  GstVorbisTag *tagger;
  gchar *encoder = NULL;
  GstBuffer *new_buf;
  GstMapInfo map;
  gboolean do_parse = FALSE;

  gst_buffer_map (buffer, &map, GST_MAP_READ);
  /* just pass everything except the comments packet */
  if (map.size >= 1 && map.data[0] != 0x03)
    do_parse = TRUE;
  gst_buffer_unmap (buffer, &map);

  if (do_parse) {
    return GST_VORBIS_PARSE_CLASS (parent_class)->parse_packet (parse, buffer);
  }

  tagger = GST_VORBIS_TAG (parse);

  old_tags =
      gst_tag_list_from_vorbiscomment_buffer (buffer, (guint8 *) "\003vorbis",
      7, &encoder);
  user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (tagger));

  /* build new tag list */
  new_tags = gst_tag_list_merge (user_tags, old_tags,
      gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (tagger)));
  gst_tag_list_unref (old_tags);

  new_buf =
      gst_tag_list_to_vorbiscomment_buffer (new_tags, (guint8 *) "\003vorbis",
      7, encoder);
  gst_buffer_copy_into (new_buf, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);

  gst_tag_list_unref (new_tags);
  g_free (encoder);
  gst_buffer_unref (buffer);

  return GST_VORBIS_PARSE_CLASS (parent_class)->parse_packet (parse, new_buf);
}
Пример #7
0
static GstFlowReturn
gst_wavenc_write_tags (GstWavEnc * wavenc)
{
  const GstTagList *user_tags;
  GstTagList *tags;
  guint size;
  GstBuffer *buf;
  GstByteWriter bw;

  g_return_val_if_fail (wavenc != NULL, GST_FLOW_OK);

  user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (wavenc));
  if ((!wavenc->tags) && (!user_tags)) {
    GST_DEBUG_OBJECT (wavenc, "have no tags");
    return GST_FLOW_OK;
  }
  tags =
      gst_tag_list_merge (user_tags, wavenc->tags,
      gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (wavenc)));

  GST_DEBUG_OBJECT (wavenc, "writing tags");

  gst_byte_writer_init_with_size (&bw, 1024, FALSE);

  /* add LIST INFO chunk */
  gst_byte_writer_put_data (&bw, (const guint8 *) "LIST", 4);
  gst_byte_writer_put_uint32_le (&bw, 0);
  gst_byte_writer_put_data (&bw, (const guint8 *) "INFO", 4);

  /* add tags */
  gst_tag_list_foreach (tags, gst_wavparse_tags_foreach, &bw);

  /* sets real size of LIST INFO chunk */
  size = gst_byte_writer_get_pos (&bw);
  gst_byte_writer_set_pos (&bw, 4);
  gst_byte_writer_put_uint32_le (&bw, size - 8);

  gst_tag_list_unref (tags);

  buf = gst_byte_writer_reset_and_get_buffer (&bw);
  wavenc->meta_length += gst_buffer_get_size (buf);
  return gst_pad_push (wavenc->srcpad, buf);
}
Пример #8
0
static void
gst_kate_enc_set_metadata (GstKateEnc * ke)
{
  GstTagList *merged_tags;
  const GstTagList *user_tags;

  user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (ke));

  GST_DEBUG_OBJECT (ke, "upstream tags = %" GST_PTR_FORMAT, ke->tags);
  GST_DEBUG_OBJECT (ke, "user-set tags = %" GST_PTR_FORMAT, user_tags);

  /* gst_tag_list_merge() will handle NULL for either or both lists fine */
  merged_tags = gst_tag_list_merge (user_tags, ke->tags,
      gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (ke)));

  if (merged_tags) {
    GST_DEBUG_OBJECT (ke, "merged   tags = %" GST_PTR_FORMAT, merged_tags);
    gst_tag_list_foreach (merged_tags, gst_kate_enc_metadata_set1, ke);
    gst_tag_list_unref (merged_tags);
  }
}
Пример #9
0
static GstFlowReturn
gst_opus_enc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
{
  GstOpusEnc *enc;
  GstFlowReturn ret = GST_FLOW_OK;

  enc = GST_OPUS_ENC (benc);
  GST_DEBUG_OBJECT (enc, "handle_frame");

  if (!enc->header_sent) {
    GstCaps *caps;

    g_slist_foreach (enc->headers, (GFunc) gst_buffer_unref, NULL);
    g_slist_free (enc->headers);
    enc->headers = NULL;

    gst_opus_header_create_caps (&caps, &enc->headers, enc->n_channels,
        enc->n_stereo_streams, enc->sample_rate, enc->channel_mapping_family,
        enc->decoding_channel_mapping,
        gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc)));


    /* negotiate with these caps */
    GST_DEBUG_OBJECT (enc, "here are the caps: %" GST_PTR_FORMAT, caps);

    gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (enc), caps);
    gst_caps_unref (caps);

    enc->header_sent = TRUE;
  }

  GST_DEBUG_OBJECT (enc, "received buffer %p of %u bytes", buf,
      buf ? GST_BUFFER_SIZE (buf) : 0);

  ret = gst_opus_enc_encode (enc, buf);

  return ret;
}
static void
gst_vorbis_enc_set_metadata (GstVorbisEnc * enc)
{
  GstTagList *merged_tags;
  const GstTagList *user_tags;

  vorbis_comment_init (&enc->vc);

  user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc));

  GST_DEBUG_OBJECT (enc, "upstream tags = %" GST_PTR_FORMAT, enc->tags);
  GST_DEBUG_OBJECT (enc, "user-set tags = %" GST_PTR_FORMAT, user_tags);

  /* gst_tag_list_merge() will handle NULL for either or both lists fine */
  merged_tags = gst_tag_list_merge (user_tags, enc->tags,
      gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (enc)));

  if (merged_tags) {
    GST_DEBUG_OBJECT (enc, "merged   tags = %" GST_PTR_FORMAT, merged_tags);
    gst_tag_list_foreach (merged_tags, gst_vorbis_enc_metadata_set1, enc);
    gst_tag_list_unref (merged_tags);
  }
}
Пример #11
0
static GstTagList *
gst_tag_mux_get_tags (GstTagMux * mux)
{
  GstTagSetter *tagsetter = GST_TAG_SETTER (mux);
  const GstTagList *tagsetter_tags;
  GstTagMergeMode merge_mode;

  if (mux->priv->final_tags)
    return mux->priv->final_tags;

  tagsetter_tags = gst_tag_setter_get_tag_list (tagsetter);
  merge_mode = gst_tag_setter_get_tag_merge_mode (tagsetter);

  GST_LOG_OBJECT (mux, "merging tags, merge mode = %d", merge_mode);
  GST_LOG_OBJECT (mux, "event tags: %" GST_PTR_FORMAT, mux->priv->event_tags);
  GST_LOG_OBJECT (mux, "set   tags: %" GST_PTR_FORMAT, tagsetter_tags);

  mux->priv->final_tags =
      gst_tag_list_merge (tagsetter_tags, mux->priv->event_tags, merge_mode);

  GST_LOG_OBJECT (mux, "final tags: %" GST_PTR_FORMAT, mux->priv->final_tags);

  return mux->priv->final_tags;
}
Пример #12
0
static GstCaps *
gst_vp8_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
{
  GstCaps *caps;
  const GstVideoState *state;
  GstTagList *tags = NULL;
  const GstTagList *iface_tags;
  GstBuffer *stream_hdr, *vorbiscomment;
  guint8 *data;
  GstStructure *s;
  GValue array = { 0 };
  GValue value = { 0 };

  state = gst_base_video_encoder_get_state (base_video_encoder);

  caps = gst_caps_new_simple ("video/x-vp8",
      "width", G_TYPE_INT, state->width,
      "height", G_TYPE_INT, state->height,
      "framerate", GST_TYPE_FRACTION, state->fps_n,
      state->fps_d,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
      state->par_d, NULL);

  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  data = GST_BUFFER_DATA (stream_hdr);

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, state->width);
  GST_WRITE_UINT16_BE (data + 10, state->height);
  GST_WRITE_UINT24_BE (data + 12, state->par_n);
  GST_WRITE_UINT24_BE (data + 15, state->par_d);
  GST_WRITE_UINT32_BE (data + 18, state->fps_n);
  GST_WRITE_UINT32_BE (data + 22, state->fps_d);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags =
      gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer ((iface_tags) ? iface_tags : tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

  return caps;
}
Пример #13
0
static GstFlowReturn
gst_flac_tag_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
    GstFlacTag *tag;
    GstFlowReturn ret;
    GstMapInfo map;
    gsize size;

    ret = GST_FLOW_OK;
    tag = GST_FLAC_TAG (parent);

    gst_adapter_push (tag->adapter, buffer);

    /* Initial state, we don't even know if we are dealing with a flac file */
    if (tag->state == GST_FLAC_TAG_STATE_INIT) {
        GstBuffer *id_buffer;

        if (gst_adapter_available (tag->adapter) < sizeof (FLAC_MAGIC))
            goto cleanup;

        id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE);
        GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier");
        if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) {

            GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer");
            ret = gst_pad_push (tag->srcpad, id_buffer);
            if (ret != GST_FLOW_OK)
                goto cleanup;

            tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS;
        } else {
            /* FIXME: does that work well with FLAC files containing ID3v2 tags ? */
            gst_buffer_unref (id_buffer);
            GST_ELEMENT_ERROR (tag, STREAM, WRONG_TYPE, (NULL), (NULL));
            ret = GST_FLOW_ERROR;
        }
    }


    /* The fLaC magic string has been skipped, try to detect the beginning
     * of a metadata block
     */
    if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) {
        guint type;
        gboolean is_last;
        const guint8 *block_header;

        g_assert (tag->metadata_block_size == 0);
        g_assert (tag->metadata_last_block == FALSE);

        /* The header of a flac metadata block is 4 bytes long:
         * 1st bit: indicates whether this is the last metadata info block
         * 7 next bits: 4 if vorbis comment block
         * 24 next bits: size of the metadata to follow (big endian)
         */
        if (gst_adapter_available (tag->adapter) < 4)
            goto cleanup;

        block_header = gst_adapter_map (tag->adapter, 4);

        is_last = ((block_header[0] & 0x80) == 0x80);
        type = block_header[0] & 0x7F;
        size = (block_header[1] << 16)
               | (block_header[2] << 8)
               | block_header[3];
        gst_adapter_unmap (tag->adapter);

        /* The 4 bytes long header isn't included in the metadata size */
        tag->metadata_block_size = size + 4;
        tag->metadata_last_block = is_last;

        GST_DEBUG_OBJECT (tag,
                          "got metadata block: %" G_GSIZE_FORMAT " bytes, type %d, "
                          "is vorbiscomment: %d, is last: %d",
                          size, type, (type == 0x04), is_last);

        /* Metadata blocks of type 4 are vorbis comment blocks */
        if (type == 0x04) {
            tag->state = GST_FLAC_TAG_STATE_VC_METADATA_BLOCK;
        } else {
            tag->state = GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK;
        }
    }


    /* Reads a metadata block */
    if ((tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) ||
            (tag->state == GST_FLAC_TAG_STATE_VC_METADATA_BLOCK)) {
        GstBuffer *metadata_buffer;

        if (gst_adapter_available (tag->adapter) < tag->metadata_block_size)
            goto cleanup;

        metadata_buffer = gst_adapter_take_buffer (tag->adapter,
                          tag->metadata_block_size);
        /* clear the is-last flag, as the last metadata block will
         * be the vorbis comment block which we will build ourselves.
         */
        gst_buffer_map (metadata_buffer, &map, GST_MAP_READWRITE);
        map.data[0] &= (~0x80);
        gst_buffer_unmap (metadata_buffer, &map);

        if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) {
            GST_DEBUG_OBJECT (tag, "pushing metadata block buffer");
            ret = gst_pad_push (tag->srcpad, metadata_buffer);
            if (ret != GST_FLOW_OK)
                goto cleanup;
        } else {
            tag->vorbiscomment = metadata_buffer;
        }
        tag->metadata_block_size = 0;
        tag->state = GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK;
    }

    /* This state is mainly used to be able to stop as soon as we read
     * a vorbiscomment block from the flac file if we are in an only output
     * tags mode
     */
    if (tag->state == GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK) {
        /* Check if in the previous iteration we read a vorbis comment metadata
         * block, and stop now if the user only wants to read tags
         */
        if (tag->vorbiscomment != NULL) {
            guint8 id_data[4];
            /* We found some tags, try to parse them and notify the other elements
             * that we encountered some tags
             */
            GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags");
            gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4);
            tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment,
                        id_data, 4, NULL);
            if (tag->tags != NULL) {
                gst_pad_push_event (tag->srcpad,
                                    gst_event_new_tag (gst_tag_list_copy (tag->tags)));
            }

            gst_buffer_unref (tag->vorbiscomment);
            tag->vorbiscomment = NULL;
        }

        /* Skip to next state */
        if (tag->metadata_last_block == FALSE) {
            tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS;
        } else {
            tag->state = GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT;
        }
    }


    /* Creates a vorbis comment block from the metadata which was set
     * on the gstreamer element, and add it to the flac stream
     */
    if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) {
        GstBuffer *buffer;
        const GstTagList *user_tags;
        GstTagList *merged_tags;

        /* merge the tag lists */
        user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (tag));
        if (user_tags != NULL) {
            merged_tags = gst_tag_list_merge (user_tags, tag->tags,
                                              gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (tag)));
        } else {
            merged_tags = gst_tag_list_copy (tag->tags);
        }

        if (merged_tags == NULL) {
            /* If we get a NULL list of tags, we must generate a padding block
             * which is marked as the last metadata block, otherwise we'll
             * end up with a corrupted flac file.
             */
            GST_WARNING_OBJECT (tag, "No tags found");
            buffer = gst_buffer_new_and_alloc (12);
            if (buffer == NULL)
                goto no_buffer;

            gst_buffer_map (buffer, &map, GST_MAP_WRITE);
            memset (map.data, 0, map.size);
            map.data[0] = 0x81;       /* 0x80 = Last metadata block,
                                 * 0x01 = padding block */
            gst_buffer_unmap (buffer, &map);
        } else {
            guchar header[4];
            guint8 fbit[1];

            memset (header, 0, sizeof (header));
            header[0] = 0x84;         /* 0x80 = Last metadata block,
                                 * 0x04 = vorbiscomment block */
            buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header,
                     sizeof (header), NULL);
            GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags);
            gst_tag_list_free (merged_tags);
            if (buffer == NULL)
                goto no_comment;

            size = gst_buffer_get_size (buffer);
            if ((size < 4) || ((size - 4) > 0xFFFFFF))
                goto comment_too_long;

            fbit[0] = 1;
            /* Get rid of the framing bit at the end of the vorbiscomment buffer
             * if it exists since libFLAC seems to lose sync because of this
             * bit in gstflacdec
             */
            if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) {
                buffer = gst_buffer_make_writable (buffer);
                gst_buffer_resize (buffer, 0, size - 1);
            }
        }

        /* The 4 byte metadata block header isn't accounted for in the total
         * size of the metadata block
         */
        gst_buffer_map (buffer, &map, GST_MAP_WRITE);
        map.data[1] = (((map.size - 4) & 0xFF0000) >> 16);
        map.data[2] = (((map.size - 4) & 0x00FF00) >> 8);
        map.data[3] = ((map.size - 4) & 0x0000FF);
        gst_buffer_unmap (buffer, &map);

        GST_DEBUG_OBJECT (tag, "pushing %" G_GSIZE_FORMAT " byte vorbiscomment "
                          "buffer", map.size);

        ret = gst_pad_push (tag->srcpad, buffer);
        if (ret != GST_FLOW_OK) {
            goto cleanup;
        }
        tag->state = GST_FLAC_TAG_STATE_AUDIO_DATA;
    }
Пример #14
0
static GstFlowReturn
gst_ffmpegmux_collected (GstCollectPads * pads, gpointer user_data)
{
  GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) user_data;
  GSList *collected;
  GstFFMpegMuxPad *best_pad;
  GstClockTime best_time;
  const GstTagList *tags;

  /* open "file" (gstreamer protocol to next element) */
  if (!ffmpegmux->opened) {
    int open_flags = URL_WRONLY;

    /* we do need all streams to have started capsnego,
     * or things will go horribly wrong */
    for (collected = ffmpegmux->collect->data; collected;
        collected = g_slist_next (collected)) {
      GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
      AVStream *st = ffmpegmux->context->streams[collect_pad->padnum];

      /* check whether the pad has successfully completed capsnego */
      if (st->codec->codec_id == CODEC_ID_NONE) {
        GST_ELEMENT_ERROR (ffmpegmux, CORE, NEGOTIATION, (NULL),
            ("no caps set on stream %d (%s)", collect_pad->padnum,
                (st->codec->codec_type == CODEC_TYPE_VIDEO) ?
                "video" : "audio"));
        return GST_FLOW_ERROR;
      }
      /* set framerate for audio */
      if (st->codec->codec_type == CODEC_TYPE_AUDIO) {
        switch (st->codec->codec_id) {
          case CODEC_ID_PCM_S16LE:
          case CODEC_ID_PCM_S16BE:
          case CODEC_ID_PCM_U16LE:
          case CODEC_ID_PCM_U16BE:
          case CODEC_ID_PCM_S8:
          case CODEC_ID_PCM_U8:
            st->codec->frame_size = 1;
            break;
          default:
          {
            GstBuffer *buffer;

            /* FIXME : This doesn't work for RAW AUDIO...
             * in fact I'm wondering if it even works for any kind of audio... */
            buffer = gst_collect_pads_peek (ffmpegmux->collect,
                (GstCollectData *) collect_pad);
            if (buffer) {
              st->codec->frame_size =
                  st->codec->sample_rate *
                  GST_BUFFER_DURATION (buffer) / GST_SECOND;
              gst_buffer_unref (buffer);
            }
          }
        }
      }
    }

    /* tags */
    tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (ffmpegmux));
    if (tags) {
      gint i;
      gchar *s;

      /* get the interesting ones */
      if (gst_tag_list_get_string (tags, GST_TAG_TITLE, &s)) {
        strncpy (ffmpegmux->context->title, s,
            sizeof (ffmpegmux->context->title));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ARTIST, &s)) {
        strncpy (ffmpegmux->context->author, s,
            sizeof (ffmpegmux->context->author));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COPYRIGHT, &s)) {
        strncpy (ffmpegmux->context->copyright, s,
            sizeof (ffmpegmux->context->copyright));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COMMENT, &s)) {
        strncpy (ffmpegmux->context->comment, s,
            sizeof (ffmpegmux->context->comment));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ALBUM, &s)) {
        strncpy (ffmpegmux->context->album, s,
            sizeof (ffmpegmux->context->album));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_GENRE, &s)) {
        strncpy (ffmpegmux->context->genre, s,
            sizeof (ffmpegmux->context->genre));
      }
      if (gst_tag_list_get_int (tags, GST_TAG_TRACK_NUMBER, &i)) {
        ffmpegmux->context->track = i;
      }
    }

    /* set the streamheader flag for gstffmpegprotocol if codec supports it */
    if (!strcmp (ffmpegmux->context->oformat->name, "flv")) {
      open_flags |= GST_FFMPEG_URL_STREAMHEADER;
    }

    if (url_fopen (&ffmpegmux->context->pb,
            ffmpegmux->context->filename, open_flags) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, TOO_LAZY, (NULL),
          ("Failed to open stream context in ffmux"));
      return GST_FLOW_ERROR;
    }

    if (av_set_parameters (ffmpegmux->context, NULL) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, INIT, (NULL),
          ("Failed to initialize muxer"));
      return GST_FLOW_ERROR;
    }

    /* now open the mux format */
    if (av_write_header (ffmpegmux->context) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, SETTINGS, (NULL),
          ("Failed to write file header - check codec settings"));
      return GST_FLOW_ERROR;
    }

    /* we're now opened */
    ffmpegmux->opened = TRUE;

    /* flush the header so it will be used as streamheader */
    put_flush_packet (ffmpegmux->context->pb);
  }

  /* take the one with earliest timestamp,
   * and push it forward */
  best_pad = NULL;
  best_time = GST_CLOCK_TIME_NONE;
  for (collected = ffmpegmux->collect->data; collected;
      collected = g_slist_next (collected)) {
    GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
    GstBuffer *buffer = gst_collect_pads_peek (ffmpegmux->collect,
        (GstCollectData *) collect_pad);

    /* if there's no buffer, just continue */
    if (buffer == NULL) {
      continue;
    }

    /* if we have no buffer yet, just use the first one */
    if (best_pad == NULL) {
      best_pad = collect_pad;
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      goto next_pad;
    }

    /* if we do have one, only use this one if it's older */
    if (GST_BUFFER_TIMESTAMP (buffer) < best_time) {
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      best_pad = collect_pad;
    }

  next_pad:
    gst_buffer_unref (buffer);

    /* Mux buffers with invalid timestamp first */
    if (!GST_CLOCK_TIME_IS_VALID (best_time))
      break;
  }

  /* now handle the buffer, or signal EOS if we have
   * no buffers left */
  if (best_pad != NULL) {
    GstBuffer *buf;
    AVPacket pkt;
    gboolean need_free = FALSE;

    /* push out current buffer */
    buf = gst_collect_pads_pop (ffmpegmux->collect,
        (GstCollectData *) best_pad);

    ffmpegmux->context->streams[best_pad->padnum]->codec->frame_number++;

    /* set time */
    pkt.pts = gst_ffmpeg_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buf),
        ffmpegmux->context->streams[best_pad->padnum]->time_base);
    pkt.dts = pkt.pts;

    if (strcmp (ffmpegmux->context->oformat->name, "gif") == 0) {
      AVStream *st = ffmpegmux->context->streams[best_pad->padnum];
      AVPicture src, dst;

      need_free = TRUE;
      pkt.size = st->codec->width * st->codec->height * 3;
      pkt.data = g_malloc (pkt.size);

      dst.data[0] = pkt.data;
      dst.data[1] = NULL;
      dst.data[2] = NULL;
      dst.linesize[0] = st->codec->width * 3;

      gst_ffmpeg_avpicture_fill (&src, GST_BUFFER_DATA (buf),
          PIX_FMT_RGB24, st->codec->width, st->codec->height);

      av_picture_copy (&dst, &src, PIX_FMT_RGB24,
          st->codec->width, st->codec->height);
    } else {
      pkt.data = GST_BUFFER_DATA (buf);
      pkt.size = GST_BUFFER_SIZE (buf);
    }

    pkt.stream_index = best_pad->padnum;
    pkt.flags = 0;

    if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT))
      pkt.flags |= PKT_FLAG_KEY;

    if (GST_BUFFER_DURATION_IS_VALID (buf))
      pkt.duration =
          gst_ffmpeg_time_gst_to_ff (GST_BUFFER_DURATION (buf),
          ffmpegmux->context->streams[best_pad->padnum]->time_base);
    else
      pkt.duration = 0;
    av_write_frame (ffmpegmux->context, &pkt);
    gst_buffer_unref (buf);
    if (need_free)
      g_free (pkt.data);
  } else {
    /* close down */
    av_write_trailer (ffmpegmux->context);
    ffmpegmux->opened = FALSE;
    put_flush_packet (ffmpegmux->context->pb);
    url_fclose (ffmpegmux->context->pb);
    gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_eos ());
    return GST_FLOW_UNEXPECTED;
  }

  return GST_FLOW_OK;
}
Пример #15
0
static void
gst_vp8_enc_set_stream_info (GstVPXEnc * enc, GstCaps * caps,
    GstVideoInfo * info)
{
  GstStructure *s;
  GstVideoEncoder *video_encoder;
  GstBuffer *stream_hdr, *vorbiscomment;
  const GstTagList *iface_tags;
  GValue array = { 0, };
  GValue value = { 0, };
  guint8 *data = NULL;
  GstMapInfo map;

  video_encoder = GST_VIDEO_ENCODER (enc);
  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE);
  data = map.data;

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info));
  GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info));
  GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info));
  GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info));
  GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info));
  GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info));

  gst_buffer_unmap (stream_hdr, &map);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer (iface_tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

}
Пример #16
0
static gboolean
gst_jif_mux_mangle_markers (GstJifMux * self)
{
    gboolean modified = FALSE;
    GstTagList *tags = NULL;
    gboolean cleanup_tags;
    GstJifMuxMarker *m;
    GList *node, *file_hdr = NULL, *frame_hdr = NULL, *scan_hdr = NULL;
    GList *app0_jfif = NULL, *app1_exif = NULL, *app1_xmp = NULL, *com = NULL;
    GstBuffer *xmp_data;
    gchar *str = NULL;
    gint colorspace = COLORSPACE_UNKNOWN;

    /* update the APP markers
     * - put any JFIF APP0 first
     * - the Exif APP1 next,
     * - the XMP APP1 next,
     * - the PSIR APP13 next,
     * - followed by all other marker segments
     */

    /* find some reference points where we insert before/after */
    file_hdr = self->priv->markers;
    for (node = self->priv->markers; node; node = g_list_next (node)) {
        m = (GstJifMuxMarker *) node->data;

        switch (m->marker) {
        case APP0:
            if (m->size > 5 && !memcmp (m->data, "JFIF\0", 5)) {
                GST_DEBUG_OBJECT (self, "found APP0 JFIF");
                colorspace |= COLORSPACE_GRAYSCALE | COLORSPACE_YUV;
                if (!app0_jfif)
                    app0_jfif = node;
            }
            break;
        case APP1:
            if (m->size > 6 && (!memcmp (m->data, "EXIF\0\0", 6) ||
                                !memcmp (m->data, "Exif\0\0", 6))) {
                GST_DEBUG_OBJECT (self, "found APP1 EXIF");
                if (!app1_exif)
                    app1_exif = node;
            } else if (m->size > 29
                       && !memcmp (m->data, "http://ns.adobe.com/xap/1.0/\0", 29)) {
                GST_INFO_OBJECT (self, "found APP1 XMP, will be replaced");
                if (!app1_xmp)
                    app1_xmp = node;
            }
            break;
        case APP14:
            /* check if this contains RGB */
            /*
             * This marker should have:
             * - 'Adobe\0'
             * - 2 bytes DCTEncodeVersion
             * - 2 bytes flags0
             * - 2 bytes flags1
             * - 1 byte  ColorTransform
             *             - 0 means unknown (RGB or CMYK)
             *             - 1 YCbCr
             *             - 2 YCCK
             */

            if ((m->size >= 14)
                    && (strncmp ((gchar *) m->data, "Adobe\0", 6) == 0)) {
                switch (m->data[11]) {
                case 0:
                    colorspace |= COLORSPACE_RGB | COLORSPACE_CMYK;
                    break;
                case 1:
                    colorspace |= COLORSPACE_YUV;
                    break;
                case 2:
                    colorspace |= COLORSPACE_YCCK;
                    break;
                default:
                    break;
                }
            }

            break;
        case COM:
            GST_INFO_OBJECT (self, "found COM, will be replaced");
            if (!com)
                com = node;
            break;
        case DQT:
        case SOF0:
        case SOF1:
        case SOF2:
        case SOF3:
        case SOF5:
        case SOF6:
        case SOF7:
        case SOF9:
        case SOF10:
        case SOF11:
        case SOF13:
        case SOF14:
        case SOF15:
            if (!frame_hdr)
                frame_hdr = node;
            break;
        case DAC:
        case DHT:
        case DRI:
        case SOS:
            if (!scan_hdr)
                scan_hdr = node;
            break;
        }
    }

    /* if we want combined or JFIF */
    /* check if we don't have JFIF APP0 */
    if (!app0_jfif && (colorspace & (COLORSPACE_GRAYSCALE | COLORSPACE_YUV))) {
        /* build jfif header */
        static const struct
        {
            gchar id[5];
            guint8 ver[2];
            guint8 du;
            guint8 xd[2], yd[2];
            guint8 tw, th;
        } jfif_data = {
            "JFIF", {
                1, 2
            }, 0, {
                0, 1
            },                    /* FIXME: check pixel-aspect from caps */
            {
                0, 1
            }, 0, 0
        };
        m = gst_jif_mux_new_marker (APP0, sizeof (jfif_data),
                                    (const guint8 *) &jfif_data, FALSE);
        /* insert into self->markers list */
        self->priv->markers = g_list_insert (self->priv->markers, m, 1);
        app0_jfif = g_list_nth (self->priv->markers, 1);
    }
    /* else */
    /* remove JFIF if exists */

    /* Existing exif tags will be removed and our own will be added */
    if (!tags) {
        tags = (GstTagList *) gst_tag_setter_get_tag_list (GST_TAG_SETTER (self));
        cleanup_tags = FALSE;
    }
    if (!tags) {
        tags = gst_tag_list_new ();
        cleanup_tags = TRUE;
    }

    GST_DEBUG_OBJECT (self, "Tags to be serialized %" GST_PTR_FORMAT, tags);

    /* FIXME: not happy with those
     * - else where we would use VIDEO_CODEC = "Jpeg"
     gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
     GST_TAG_VIDEO_CODEC, "image/jpeg", NULL);
     */

    /* Add EXIF */
    {
        GstBuffer *exif_data;
        guint8 *data;
        GstJifMuxMarker *m;
        GList *pos;

        /* insert into self->markers list */
        exif_data = gst_tag_list_to_exif_buffer_with_tiff_header (tags);
        if (exif_data &&
                GST_BUFFER_SIZE (exif_data) + 8 >= G_GUINT64_CONSTANT (65536)) {
            GST_WARNING_OBJECT (self, "Exif tags data size exceed maximum size");
            gst_buffer_unref (exif_data);
            exif_data = NULL;
        }
        if (exif_data) {
            data = g_malloc0 (GST_BUFFER_SIZE (exif_data) + 6);
            memcpy (data, "Exif", 4);
            memcpy (data + 6, GST_BUFFER_DATA (exif_data),
                    GST_BUFFER_SIZE (exif_data));
            m = gst_jif_mux_new_marker (APP1, GST_BUFFER_SIZE (exif_data) + 6, data,
                                        TRUE);
            gst_buffer_unref (exif_data);

            if (app1_exif) {
                gst_jif_mux_marker_free ((GstJifMuxMarker *) app1_exif->data);
                app1_exif->data = m;
            } else {
                pos = file_hdr;
                if (app0_jfif)
                    pos = app0_jfif;
                pos = g_list_next (pos);

                self->priv->markers =
                    g_list_insert_before (self->priv->markers, pos, m);
                if (pos) {
                    app1_exif = g_list_previous (pos);
                } else {
                    app1_exif = g_list_last (self->priv->markers);
                }
            }
            modified = TRUE;
        }
    }

    /* add xmp */
    xmp_data =
        gst_tag_xmp_writer_tag_list_to_xmp_buffer (GST_TAG_XMP_WRITER (self),
                tags, FALSE);
    if (xmp_data) {
        guint8 *data, *xmp = GST_BUFFER_DATA (xmp_data);
        guint size = GST_BUFFER_SIZE (xmp_data);
        GList *pos;

        data = g_malloc (size + 29);
        memcpy (data, "http://ns.adobe.com/xap/1.0/\0", 29);
        memcpy (&data[29], xmp, size);
        m = gst_jif_mux_new_marker (APP1, size + 29, data, TRUE);

        /*
         * Replace the old xmp marker and not add a new one.
         * There shouldn't be a xmp packet in the input, but it is better
         * to be safe than add another one and end up with 2 packets.
         */
        if (app1_xmp) {
            gst_jif_mux_marker_free ((GstJifMuxMarker *) app1_xmp->data);
            app1_xmp->data = m;
        } else {

            pos = file_hdr;
            if (app1_exif)
                pos = app1_exif;
            else if (app0_jfif)
                pos = app0_jfif;
            pos = g_list_next (pos);

            self->priv->markers = g_list_insert_before (self->priv->markers, pos, m);

        }
        gst_buffer_unref (xmp_data);
        modified = TRUE;
    }

    /* add jpeg comment from any of those */
    (void) (gst_tag_list_get_string (tags, GST_TAG_COMMENT, &str) ||
            gst_tag_list_get_string (tags, GST_TAG_DESCRIPTION, &str) ||
            gst_tag_list_get_string (tags, GST_TAG_TITLE, &str));

    if (str) {
        GST_DEBUG_OBJECT (self, "set COM marker to '%s'", str);
        /* insert new marker into self->markers list */
        m = gst_jif_mux_new_marker (COM, strlen (str) + 1, (const guint8 *) str,
                                    TRUE);
        /* FIXME: if we have one already, replace */
        /* this should go before SOS, maybe at the end of file-header */
        self->priv->markers = g_list_insert_before (self->priv->markers,
                              frame_hdr, m);

        modified = TRUE;
    }

    if (tags && cleanup_tags)
        gst_tag_list_free (tags);
    return modified;
}
static GstBuffer *
gst_tag_lib_mux_render_tag (GstTagLibMux * mux)
{
  GstTagLibMuxClass *klass;
  GstTagMergeMode merge_mode;
  GstTagSetter *tagsetter;
  GstBuffer *buffer;
  const GstTagList *tagsetter_tags;
  GstTagList *taglist;
  GstEvent *event;

  tagsetter = GST_TAG_SETTER (mux);

  tagsetter_tags = gst_tag_setter_get_tag_list (tagsetter);
  merge_mode = gst_tag_setter_get_tag_merge_mode (tagsetter);

  GST_LOG_OBJECT (mux, "merging tags, merge mode = %d", merge_mode);
  GST_LOG_OBJECT (mux, "event tags: %" GST_PTR_FORMAT, mux->event_tags);
  GST_LOG_OBJECT (mux, "set   tags: %" GST_PTR_FORMAT, tagsetter_tags);

  taglist = gst_tag_list_merge (tagsetter_tags, mux->event_tags, merge_mode);

  GST_LOG_OBJECT (mux, "final tags: %" GST_PTR_FORMAT, taglist);

  klass = GST_TAG_LIB_MUX_CLASS (G_OBJECT_GET_CLASS (mux));

  if (klass->render_tag == NULL)
    goto no_vfunc;

  buffer = klass->render_tag (mux, taglist);

  if (buffer == NULL)
    goto render_error;

  mux->tag_size = GST_BUFFER_SIZE (buffer);
  GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes", mux->tag_size);

  /* Send newsegment event from byte position 0, so the tag really gets
   * written to the start of the file, independent of the upstream segment */
  gst_pad_push_event (mux->srcpad,
      gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));

  /* Send an event about the new tags to downstream elements */
  /* gst_event_new_tag takes ownership of the list, so no need to unref it */
  event = gst_event_new_tag (taglist);
  gst_pad_push_event (mux->srcpad, event);

  GST_BUFFER_OFFSET (buffer) = 0;

  return buffer;

no_vfunc:
  {
    GST_ERROR_OBJECT (mux, "Subclass does not implement render_tag vfunc!");
    gst_tag_list_free (taglist);
    return NULL;
  }

render_error:
  {
    GST_ERROR_OBJECT (mux, "Failed to render tag");
    gst_tag_list_free (taglist);
    return NULL;
  }
}
static gboolean
gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
    GstVideoState * state)
{
  GstVP8Enc *encoder;
  vpx_codec_enc_cfg_t cfg;
  vpx_codec_err_t status;
  vpx_image_t *image;
  guint8 *data = NULL;
  GstCaps *caps;
  gboolean ret;

  encoder = GST_VP8_ENC (base_video_encoder);
  GST_DEBUG_OBJECT (base_video_encoder, "set_format");

  if (encoder->inited) {
    GST_DEBUG_OBJECT (base_video_encoder, "refusing renegotiation");
    return FALSE;
  }

  status = vpx_codec_enc_config_default (&vpx_codec_vp8_cx_algo, &cfg, 0);
  if (status != VPX_CODEC_OK) {
    GST_ELEMENT_ERROR (encoder, LIBRARY, INIT,
        ("Failed to get default encoder configuration"), ("%s",
            gst_vpx_error_name (status)));
    return FALSE;
  }

  /* Scale default bitrate to our size */
  cfg.rc_target_bitrate = gst_util_uint64_scale (cfg.rc_target_bitrate,
      state->width * state->height,
      cfg.g_w * cfg.g_h);

  cfg.g_w = state->width;
  cfg.g_h = state->height;
  cfg.g_timebase.num = state->fps_d;
  cfg.g_timebase.den = state->fps_n;

  cfg.g_error_resilient = encoder->error_resilient;
  cfg.g_lag_in_frames = encoder->max_latency;
  cfg.g_threads = encoder->threads;
  cfg.rc_end_usage = encoder->mode;
  cfg.rc_2pass_vbr_minsection_pct = encoder->minsection_pct;
  cfg.rc_2pass_vbr_maxsection_pct = encoder->maxsection_pct;
  /* Standalone qp-min do not make any sence, with bitrate=0 and qp-min=1
   * encoder will use only default qp-max=63. Also this will make
   * worst possbile quality.
   */
  if (encoder->bitrate != DEFAULT_BITRATE ||
      encoder->max_quantizer != DEFAULT_MAX_QUANTIZER) {
    cfg.rc_target_bitrate = encoder->bitrate / 1000;
    cfg.rc_min_quantizer = encoder->min_quantizer;
    cfg.rc_max_quantizer = encoder->max_quantizer;
  } else {
    cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2);
    cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2);
  }
  cfg.rc_dropframe_thresh = encoder->drop_frame;
  cfg.rc_resize_allowed = encoder->resize_allowed;

  cfg.kf_mode = VPX_KF_AUTO;
  cfg.kf_min_dist = 0;
  cfg.kf_max_dist = encoder->max_keyframe_distance;

  cfg.g_pass = encoder->multipass_mode;
  if (encoder->multipass_mode == VPX_RC_FIRST_PASS) {
    encoder->first_pass_cache_content = g_byte_array_sized_new (4096);
  } else if (encoder->multipass_mode == VPX_RC_LAST_PASS) {
    GError *err = NULL;

    if (!encoder->multipass_cache_file) {
      GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ,
          ("No multipass cache file provided"), (NULL));
      return FALSE;
    }

    if (!g_file_get_contents (encoder->multipass_cache_file,
            (gchar **) & encoder->last_pass_cache_content.buf,
            &encoder->last_pass_cache_content.sz, &err)) {
      GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ,
          ("Failed to read multipass cache file provided"), ("%s",
              err->message));
      g_error_free (err);
      return FALSE;
    }
    cfg.rc_twopass_stats_in = encoder->last_pass_cache_content;
  }

  status = vpx_codec_enc_init (&encoder->encoder, &vpx_codec_vp8_cx_algo,
      &cfg, 0);
  if (status != VPX_CODEC_OK) {
    GST_ELEMENT_ERROR (encoder, LIBRARY, INIT,
        ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status)));
    return FALSE;
  }

  /* FIXME move this to a set_speed() function */
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED,
      (encoder->speed == 0) ? 0 : (encoder->speed - 1));
  if (status != VPX_CODEC_OK) {
    GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s",
        gst_vpx_error_name (status));
  }

  status = vpx_codec_control (&encoder->encoder, VP8E_SET_NOISE_SENSITIVITY,
      encoder->noise_sensitivity);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_SHARPNESS,
      encoder->sharpness);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_STATIC_THRESHOLD,
      encoder->static_threshold);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_TOKEN_PARTITIONS,
      encoder->partitions);
#if 0
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_MAXFRAMES,
      encoder->arnr_maxframes);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_STRENGTH,
      encoder->arnr_strength);
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_TYPE,
      encoder->arnr_type);
#endif
#ifdef HAVE_VP8ENC_TUNING
  status = vpx_codec_control (&encoder->encoder, VP8E_SET_TUNING,
      encoder->tuning);
#endif

  status =
      vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF,
      (encoder->auto_alt_ref_frames ? 1 : 0));
  if (status != VPX_CODEC_OK) {
    GST_WARNING_OBJECT (encoder,
        "Failed to set VP8E_ENABLEAUTOALTREF to %d: %s",
        (encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status));
  }

  cfg.g_lag_in_frames = encoder->lag_in_frames;

  gst_base_video_encoder_set_latency (base_video_encoder, 0,
      gst_util_uint64_scale (encoder->max_latency,
          state->fps_d * GST_SECOND, state->fps_n));
  encoder->inited = TRUE;

  /* prepare cached image buffer setup */
  image = &encoder->image;
  memset (image, 0, sizeof (*image));

  image->fmt = VPX_IMG_FMT_I420;
  image->bps = 12;
  image->x_chroma_shift = image->y_chroma_shift = 1;
  image->w = image->d_w = state->width;
  image->h = image->d_h = state->height;

  image->stride[VPX_PLANE_Y] =
      gst_video_format_get_row_stride (state->format, 0, state->width);
  image->stride[VPX_PLANE_U] =
      gst_video_format_get_row_stride (state->format, 1, state->width);
  image->stride[VPX_PLANE_V] =
      gst_video_format_get_row_stride (state->format, 2, state->width);
  image->planes[VPX_PLANE_Y] =
      data + gst_video_format_get_component_offset (state->format, 0,
      state->width, state->height);
  image->planes[VPX_PLANE_U] =
      data + gst_video_format_get_component_offset (state->format, 1,
      state->width, state->height);
  image->planes[VPX_PLANE_V] =
      data + gst_video_format_get_component_offset (state->format, 2,
      state->width, state->height);


  caps = gst_caps_new_simple ("video/x-vp8",
      "width", G_TYPE_INT, state->width,
      "height", G_TYPE_INT, state->height,
      "framerate", GST_TYPE_FRACTION, state->fps_n,
      state->fps_d,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
      state->par_d, NULL);
  {
    GstStructure *s;
    GstBuffer *stream_hdr, *vorbiscomment;
    const GstTagList *iface_tags;
    GValue array = { 0, };
    GValue value = { 0, };
    s = gst_caps_get_structure (caps, 0);

    /* put buffers in a fixed list */
    g_value_init (&array, GST_TYPE_ARRAY);
    g_value_init (&value, GST_TYPE_BUFFER);

    /* Create Ogg stream-info */
    stream_hdr = gst_buffer_new_and_alloc (26);
    data = GST_BUFFER_DATA (stream_hdr);

    GST_WRITE_UINT8 (data, 0x4F);
    GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */
    GST_WRITE_UINT8 (data + 5, 0x01);   /* stream info header */
    GST_WRITE_UINT8 (data + 6, 1);      /* Major version 1 */
    GST_WRITE_UINT8 (data + 7, 0);      /* Minor version 0 */
    GST_WRITE_UINT16_BE (data + 8, state->width);
    GST_WRITE_UINT16_BE (data + 10, state->height);
    GST_WRITE_UINT24_BE (data + 12, state->par_n);
    GST_WRITE_UINT24_BE (data + 15, state->par_d);
    GST_WRITE_UINT32_BE (data + 18, state->fps_n);
    GST_WRITE_UINT32_BE (data + 22, state->fps_d);

    GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
    gst_value_set_buffer (&value, stream_hdr);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (stream_hdr);

    iface_tags =
        gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
    if (iface_tags) {
      vorbiscomment =
          gst_tag_list_to_vorbiscomment_buffer (iface_tags,
          (const guint8 *) "OVP80\2 ", 7,
          "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

      GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);

      g_value_init (&value, GST_TYPE_BUFFER);
      gst_value_set_buffer (&value, vorbiscomment);
      gst_value_array_append_value (&array, &value);
      g_value_unset (&value);
      gst_buffer_unref (vorbiscomment);
    }

    gst_structure_set_value (s, "streamheader", &array);
    g_value_unset (&array);
  }

  ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder), caps);
  gst_caps_unref (caps);

  return ret;
}