示例#1
0
static void
gst_ivf_parse_update_src_caps (GstIvfParse * ivf)
{
  GstCaps *caps;
  const gchar *media_type;
  if (!ivf->update_caps &&
      G_LIKELY (gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (ivf))))
    return;
  ivf->update_caps = FALSE;

  media_type = fourcc_to_media_type (ivf->fourcc);

  /* Create src pad caps */
  caps = gst_caps_new_simple (media_type, "width", G_TYPE_INT, ivf->width,
      "height", G_TYPE_INT, ivf->height, NULL);

  if (ivf->fps_n > 0 && ivf->fps_d > 0) {
    gst_base_parse_set_frame_rate (GST_BASE_PARSE_CAST (ivf),
        ivf->fps_n, ivf->fps_d, 0, 0);
    gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, ivf->fps_n,
        ivf->fps_d, NULL);
  }

  gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (ivf), caps);
  gst_caps_unref (caps);
}
示例#2
0
static gboolean
gst_srtp_dec_sink_event_rtcp (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  gboolean ret;
  GstCaps *caps;
  GstSrtpDec *filter = GST_SRTP_DEC (parent);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CAPS:
      gst_event_parse_caps (event, &caps);
      ret = gst_srtp_dec_sink_setcaps (pad, parent, caps, TRUE);
      gst_event_unref (event);
      return ret;
    case GST_EVENT_SEGMENT:
      /* Make sure to send a caps event downstream before the segment event,
       * even if upstream didn't */
      if (!gst_pad_has_current_caps (filter->rtcp_srcpad)) {
        GstCaps *caps = gst_caps_new_empty_simple ("application/x-rtcp");

        gst_pad_set_caps (filter->rtcp_srcpad, caps);
        gst_caps_unref (caps);
      }
      filter->rtcp_has_segment = TRUE;
      break;
    case GST_EVENT_FLUSH_STOP:
      filter->rtcp_has_segment = FALSE;
      break;
    default:
      break;
  }

  return gst_pad_event_default (pad, parent, event);
}
示例#3
0
文件: gstdecode.c 项目: CityFire/vlc
/* Emitted by decodebin and links decodebin to vlcvideosink.
 * Since only one elementary codec stream is fed to decodebin,
 * this signal cannot be emitted more than once. */
static void pad_added_cb( GstElement *p_ele, GstPad *p_pad, gpointer p_data )
{
    VLC_UNUSED( p_ele );
    decoder_t *p_dec = p_data;
    decoder_sys_t *p_sys = p_dec->p_sys;

    if( likely( gst_pad_has_current_caps( p_pad ) ) )
    {
        GstPadLinkReturn ret;
        GstPad *p_sinkpad;

        msg_Dbg( p_dec, "linking the decoder with the vsink");

        p_sinkpad = gst_element_get_static_pad(
                p_sys->p_decode_out, "sink" );
        ret = gst_pad_link( p_pad, p_sinkpad );
        if( ret != GST_PAD_LINK_OK )
            msg_Err( p_dec, "failed to link decoder with vsink");

        gst_object_unref( p_sinkpad );
    }
    else
    {
        msg_Err( p_dec, "decodebin src pad has no caps" );
        GST_ELEMENT_ERROR( p_sys->p_decoder, STREAM, FAILED,
                ( "vlc stream error" ), NULL );
    }
}
示例#4
0
static void
gst_ffmpegdeinterlace_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstFFMpegDeinterlace *self;

  g_return_if_fail (GST_IS_FFMPEGDEINTERLACE (object));
  self = GST_FFMPEGDEINTERLACE (object);

  switch (prop_id) {
    case PROP_MODE:{
      gint new_mode;

      GST_OBJECT_LOCK (self);
      new_mode = g_value_get_enum (value);
      if (self->mode != new_mode && gst_pad_has_current_caps (self->srcpad)) {
        self->reconfigure = TRUE;
        self->new_mode = new_mode;
      } else {
        self->mode = new_mode;
        gst_ffmpegdeinterlace_update_passthrough (self);
      }
      GST_OBJECT_UNLOCK (self);
      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
  }

}
static void
gst_aspect_ratio_crop_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAspectRatioCrop *aspect_ratio_crop;
  gboolean recheck = FALSE;

  aspect_ratio_crop = GST_ASPECT_RATIO_CROP (object);

  GST_OBJECT_LOCK (aspect_ratio_crop);
  switch (prop_id) {
    case ARG_ASPECT_RATIO_CROP:
      if (GST_VALUE_HOLDS_FRACTION (value)) {
        aspect_ratio_crop->ar_num = gst_value_get_fraction_numerator (value);
        aspect_ratio_crop->ar_denom =
            gst_value_get_fraction_denominator (value);
        recheck = gst_pad_has_current_caps (aspect_ratio_crop->sink);
      }
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
  GST_OBJECT_UNLOCK (aspect_ratio_crop);

  if (recheck) {
    GstCaps *caps = gst_pad_get_current_caps (aspect_ratio_crop->sink);
    gst_aspect_ratio_crop_set_caps (aspect_ratio_crop, caps);
    gst_caps_unref (caps);
  }
}
static void
gst_interleave_release_pad (GstElement * element, GstPad * pad)
{
  GstInterleave *self = GST_INTERLEAVE (element);
  GList *l;
  GstAudioChannelPosition position;

  g_return_if_fail (GST_IS_INTERLEAVE_PAD (pad));

  /* Take lock to make sure we're not changing this when processing buffers */
  GST_OBJECT_LOCK (self->collect);

  g_atomic_int_add (&self->channels, -1);

  if (gst_pad_has_current_caps (pad))
    g_atomic_int_add (&self->configured_sinkpads_counter, -1);

  position = GST_INTERLEAVE_PAD_CAST (pad)->channel;
  g_value_array_remove (self->input_channel_positions, position);

  /* Update channel numbers */
  GST_OBJECT_LOCK (self);
  for (l = GST_ELEMENT_CAST (self)->sinkpads; l != NULL; l = l->next) {
    GstInterleavePad *ipad = GST_INTERLEAVE_PAD (l->data);

    if (GST_INTERLEAVE_PAD_CAST (pad)->channel < ipad->channel)
      ipad->channel--;
  }
  GST_OBJECT_UNLOCK (self);

  /* Update the src caps if we already have them */
  if (self->sinkcaps) {
    if (self->channels > 0) {
      GstCaps *srccaps;
      GstStructure *s;

      srccaps = gst_caps_copy (self->sinkcaps);
      s = gst_caps_get_structure (srccaps, 0);

      gst_structure_set (s, "channels", G_TYPE_INT, self->channels, NULL);
      gst_interleave_set_channel_positions (self, s);

      gst_pad_set_active (self->src, TRUE);
      gst_pad_set_caps (self->src, srccaps);
      gst_caps_unref (srccaps);
    } else {
      gst_caps_replace (&self->sinkcaps, NULL);
    }
  }

  GST_OBJECT_UNLOCK (self->collect);

  gst_collect_pads_remove_pad (self->collect, pad);
  gst_element_remove_pad (element, pad);
}
示例#7
0
static GstFlowReturn
gst_ffmpegdeinterlace_chain (GstPad * pad, GstObject * parent,
    GstBuffer * inbuf)
{
  GstFFMpegDeinterlace *deinterlace = GST_FFMPEGDEINTERLACE (parent);
  GstBuffer *outbuf = NULL;
  GstFlowReturn result;
  GstMapInfo from_map, to_map;

  GST_OBJECT_LOCK (deinterlace);
  if (deinterlace->reconfigure) {
    if (deinterlace->new_mode != -1)
      deinterlace->mode = deinterlace->new_mode;
    deinterlace->new_mode = -1;

    deinterlace->reconfigure = FALSE;
    GST_OBJECT_UNLOCK (deinterlace);
    if (gst_pad_has_current_caps (deinterlace->srcpad)) {
      GstCaps *caps;

      caps = gst_pad_get_current_caps (deinterlace->sinkpad);
      gst_ffmpegdeinterlace_sink_setcaps (deinterlace->sinkpad, caps);
      gst_caps_unref (caps);
    }
  } else {
    GST_OBJECT_UNLOCK (deinterlace);
  }

  if (deinterlace->passthrough)
    return gst_pad_push (deinterlace->srcpad, inbuf);

  outbuf = gst_buffer_new_and_alloc (deinterlace->to_size);

  gst_buffer_map (inbuf, &from_map, GST_MAP_READ);
  gst_ffmpeg_avpicture_fill (&deinterlace->from_frame, from_map.data,
      deinterlace->pixfmt, deinterlace->width, deinterlace->height);

  gst_buffer_map (outbuf, &to_map, GST_MAP_WRITE);
  gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, to_map.data,
      deinterlace->pixfmt, deinterlace->width, deinterlace->height);

  avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame,
      deinterlace->pixfmt, deinterlace->width, deinterlace->height);
  gst_buffer_unmap (outbuf, &to_map);
  gst_buffer_unmap (inbuf, &from_map);

  gst_buffer_copy_into (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);

  result = gst_pad_push (deinterlace->srcpad, outbuf);

  gst_buffer_unref (inbuf);

  return result;
}
示例#8
0
static gboolean
gst_dvdec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstDVDec *dvdec;
  gboolean res = TRUE;

  dvdec = GST_DVDEC (parent);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_STOP:
      gst_segment_init (&dvdec->segment, GST_FORMAT_UNDEFINED);
      dvdec->need_segment = FALSE;
      break;
    case GST_EVENT_SEGMENT:{
      const GstSegment *segment;

      gst_event_parse_segment (event, &segment);

      GST_DEBUG_OBJECT (dvdec, "Got SEGMENT %" GST_SEGMENT_FORMAT, &segment);

      gst_segment_copy_into (segment, &dvdec->segment);
      if (!gst_pad_has_current_caps (dvdec->srcpad)) {
        dvdec->need_segment = TRUE;
        gst_event_unref (event);
        event = NULL;
        res = TRUE;
      } else {
        dvdec->need_segment = FALSE;
      }
      break;
    }
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);
      res = gst_dvdec_sink_setcaps (dvdec, caps);
      gst_event_unref (event);
      event = NULL;
      break;
    }

    default:
      break;
  }

  if (event)
    res = gst_pad_push_event (dvdec->srcpad, event);

  return res;
}
/* make sure we are negotiated */
static GstFlowReturn
ensure_negotiated (GstMonoscope * monoscope)
{
  gboolean reconfigure;

  reconfigure = gst_pad_check_reconfigure (monoscope->srcpad);

  /* we don't know an output format yet, pick one */
  if (reconfigure || !gst_pad_has_current_caps (monoscope->srcpad)) {
    if (!gst_monoscope_src_negotiate (monoscope))
      return GST_FLOW_NOT_NEGOTIATED;
  }
  return GST_FLOW_OK;
}
示例#10
0
static int
gst_dv1394src_iec61883_receive (unsigned char *data, int len,
    int complete, void *cbdata)
{
  GstDV1394Src *dv1394src = GST_DV1394SRC (cbdata);

  if (G_UNLIKELY (!gst_pad_has_current_caps (GST_BASE_SRC_PAD (dv1394src)))) {
    GstCaps *caps;
    unsigned char *p = data;

    // figure format (NTSC/PAL)
    if (p[3] & 0x80) {
      // PAL
      dv1394src->frame_size = PAL_FRAMESIZE;
      dv1394src->frame_rate = PAL_FRAMERATE;
      GST_DEBUG ("PAL data");
      caps = gst_caps_new_simple ("video/x-dv",
          "format", G_TYPE_STRING, "PAL",
          "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
    } else {
      // NTSC (untested)
      dv1394src->frame_size = NTSC_FRAMESIZE;
      dv1394src->frame_rate = NTSC_FRAMERATE;
      GST_DEBUG
          ("NTSC data [untested] - please report success/failure to <*****@*****.**>");
      caps = gst_caps_new_simple ("video/x-dv",
          "format", G_TYPE_STRING, "NTSC",
          "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
    }
    gst_pad_set_caps (GST_BASE_SRC_PAD (dv1394src), caps);
    gst_caps_unref (caps);
  }

  dv1394src->frame = NULL;
  if (G_LIKELY ((dv1394src->frame_sequence + 1) % (dv1394src->skip +
              dv1394src->consecutive) < dv1394src->consecutive)) {
    if (complete && len == dv1394src->frame_size) {
      GstBuffer *buf;

      buf = gst_buffer_new_and_alloc (dv1394src->frame_size);

      GST_BUFFER_OFFSET (buf) = dv1394src->frame_sequence;
      gst_buffer_fill (buf, 0, data, len);
      dv1394src->buf = buf;
    }
  }
  dv1394src->frame_sequence++;
  return 0;
}
示例#11
0
static GstCaps *
gst_data_uri_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
{
  GstDataURISrc *src = GST_DATA_URI_SRC (basesrc);
  GstCaps *caps;

  GST_OBJECT_LOCK (src);
  if (gst_pad_has_current_caps (GST_BASE_SRC_PAD (basesrc)))
    caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (basesrc));
  else
    caps = gst_caps_new_any ();
  GST_OBJECT_UNLOCK (src);

  return caps;
}
示例#12
0
static GstFlowReturn
gst_kate_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstKateParseClass *klass;
  GstKateParse *parse;

  parse = GST_KATE_PARSE (parent);
  klass = GST_KATE_PARSE_CLASS (G_OBJECT_GET_CLASS (parse));

  g_assert (klass->parse_packet != NULL);

  if (G_UNLIKELY (!gst_pad_has_current_caps (pad)))
    return GST_FLOW_NOT_NEGOTIATED;

  return klass->parse_packet (parse, buffer);
}
示例#13
0
/* Queue the segment event if there was no caps event */
static gboolean
gst_capsfilter_sink_event (GstBaseTransform * trans, GstEvent * event)
{
  GstCapsFilter *filter = GST_CAPSFILTER (trans);

  if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
    GList *l;

    for (l = filter->pending_events; l;) {
      if (GST_EVENT_TYPE (l->data) == GST_EVENT_SEGMENT) {
        gst_event_unref (l->data);
        l = g_list_delete_link (l, l);
      } else {
        l = l->next;
      }
    }
  }

  if (!GST_EVENT_IS_STICKY (event) || GST_EVENT_TYPE (event) <= GST_EVENT_CAPS)
    goto done;

  /* If we get EOS before any buffers, just push all pending events */
  if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
    GList *l;

    for (l = g_list_last (filter->pending_events); l; l = l->prev) {
      GST_LOG_OBJECT (trans, "Forwarding %s event",
          GST_EVENT_TYPE_NAME (l->data));
      GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, l->data);
    }
    g_list_free (filter->pending_events);
    filter->pending_events = NULL;
  } else if (!gst_pad_has_current_caps (trans->sinkpad)) {
    GST_LOG_OBJECT (trans, "Got %s event before caps, queueing",
        GST_EVENT_TYPE_NAME (event));

    filter->pending_events = g_list_prepend (filter->pending_events, event);

    return TRUE;
  }

done:

  GST_LOG_OBJECT (trans, "Forwarding %s event", GST_EVENT_TYPE_NAME (event));
  return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
}
static gboolean
gst_flxdec_sink_event_handler (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  GstFlxDec *flxdec;
  gboolean ret;

  flxdec = GST_FLXDEC (parent);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEGMENT:
    {
      gst_event_copy_segment (event, &flxdec->segment);
      if (flxdec->segment.format != GST_FORMAT_TIME) {
        GST_DEBUG_OBJECT (flxdec, "generating TIME segment");
        gst_segment_init (&flxdec->segment, GST_FORMAT_TIME);
        gst_event_unref (event);
        event = gst_event_new_segment (&flxdec->segment);
      }

      if (gst_pad_has_current_caps (flxdec->srcpad)) {
        ret = gst_pad_event_default (pad, parent, event);
      } else {
        flxdec->need_segment = TRUE;
        gst_event_unref (event);
        ret = TRUE;
      }
      break;
    }
    case GST_EVENT_FLUSH_STOP:
      gst_segment_init (&flxdec->segment, GST_FORMAT_UNDEFINED);
      ret = gst_pad_event_default (pad, parent, event);
      break;
    default:
      ret = gst_pad_event_default (pad, parent, event);
      break;
  }

  return ret;
}
static GstCaps *
gst_image_freeze_sink_getcaps (GstImageFreeze * self, GstCaps * filter)
{
    GstCaps *ret, *tmp, *templ;
    GstPad *pad;

    pad = self->sinkpad;
    if (gst_pad_has_current_caps (pad)) {
        ret = gst_pad_get_current_caps (pad);
        goto done;
    }

    if (filter) {
        filter = gst_caps_copy (filter);
        gst_image_freeze_remove_fps (self, filter);
    }
    templ = gst_pad_get_pad_template_caps (pad);
    tmp = gst_pad_peer_query_caps (self->srcpad, filter);
    if (tmp) {
        GST_LOG_OBJECT (self, "peer caps %" GST_PTR_FORMAT, tmp);
        ret = gst_caps_intersect (tmp, templ);
        gst_caps_unref (tmp);
    } else {
        GST_LOG_OBJECT (self, "going to copy");
        ret = gst_caps_copy (templ);
    }
    gst_caps_unref (templ);
    if (filter)
        gst_caps_unref (filter);

    ret = gst_caps_make_writable (ret);
    gst_image_freeze_remove_fps (self, ret);

done:
    GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);

    return ret;
}
示例#16
0
static GstFlowReturn
gst_wavpack_dec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buf)
{
  GstWavpackDec *dec;
  GstBuffer *outbuf = NULL;
  GstFlowReturn ret = GST_FLOW_OK;
  WavpackHeader wph;
  int32_t decoded, unpacked_size;
  gboolean format_changed;
  gint width, depth, i, j, max;
  gint32 *dec_data = NULL;
  guint8 *out_data;
  GstMapInfo map, omap;

  dec = GST_WAVPACK_DEC (bdec);

  g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);

  gst_buffer_map (buf, &map, GST_MAP_READ);

  /* check input, we only accept framed input with complete chunks */
  if (map.size < sizeof (WavpackHeader))
    goto input_not_framed;

  if (!gst_wavpack_read_header (&wph, map.data))
    goto invalid_header;

  if (map.size < wph.ckSize + 4 * 1 + 4)
    goto input_not_framed;

  if (!(wph.flags & INITIAL_BLOCK))
    goto input_not_framed;

  dec->wv_id.buffer = map.data;
  dec->wv_id.length = map.size;
  dec->wv_id.position = 0;

  /* create a new wavpack context if there is none yet but if there
   * was already one (i.e. caps were set on the srcpad) check whether
   * the new one has the same caps */
  if (!dec->context) {
    gchar error_msg[80];

    dec->context = WavpackOpenFileInputEx (dec->stream_reader,
        &dec->wv_id, NULL, error_msg, OPEN_STREAMING, 0);

    /* expect this to work */
    if (!dec->context) {
      GST_WARNING_OBJECT (dec, "Couldn't decode buffer: %s", error_msg);
      goto context_failed;
    }
  }

  g_assert (dec->context != NULL);

  format_changed =
      (dec->sample_rate != WavpackGetSampleRate (dec->context)) ||
      (dec->channels != WavpackGetNumChannels (dec->context)) ||
      (dec->depth != WavpackGetBytesPerSample (dec->context) * 8) ||
      (dec->channel_mask != WavpackGetChannelMask (dec->context));

  if (!gst_pad_has_current_caps (GST_AUDIO_DECODER_SRC_PAD (dec)) ||
      format_changed) {
    gint channel_mask;

    dec->sample_rate = WavpackGetSampleRate (dec->context);
    dec->channels = WavpackGetNumChannels (dec->context);
    dec->depth = WavpackGetBytesPerSample (dec->context) * 8;

    channel_mask = WavpackGetChannelMask (dec->context);
    if (channel_mask == 0)
      channel_mask = gst_wavpack_get_default_channel_mask (dec->channels);

    dec->channel_mask = channel_mask;

    gst_wavpack_dec_negotiate (dec);

    /* send GST_TAG_AUDIO_CODEC and GST_TAG_BITRATE tags before something
     * is decoded or after the format has changed */
    gst_wavpack_dec_post_tags (dec);
  }

  /* alloc output buffer */
  dec_data = g_malloc (4 * wph.block_samples * dec->channels);

  /* decode */
  decoded = WavpackUnpackSamples (dec->context, dec_data, wph.block_samples);
  if (decoded != wph.block_samples)
    goto decode_error;

  unpacked_size = (dec->width / 8) * wph.block_samples * dec->channels;
  outbuf = gst_buffer_new_and_alloc (unpacked_size);

  /* legacy; pass along offset, whatever that might entail */
  GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buf);

  gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
  out_data = omap.data;

  width = dec->width;
  depth = dec->depth;
  max = dec->channels * wph.block_samples;
  if (width == 8) {
    gint8 *outbuffer = (gint8 *) out_data;
    gint *reorder_map = dec->channel_reorder_map;

    for (i = 0; i < max; i += dec->channels) {
      for (j = 0; j < dec->channels; j++)
        *outbuffer++ = (gint8) (dec_data[i + reorder_map[j]]);
    }
  } else if (width == 16) {
    gint16 *outbuffer = (gint16 *) out_data;
    gint *reorder_map = dec->channel_reorder_map;

    for (i = 0; i < max; i += dec->channels) {
      for (j = 0; j < dec->channels; j++)
        *outbuffer++ = (gint16) (dec_data[i + reorder_map[j]]);
    }
  } else if (dec->width == 32) {
    gint32 *outbuffer = (gint32 *) out_data;
    gint *reorder_map = dec->channel_reorder_map;

    if (width != depth) {
      for (i = 0; i < max; i += dec->channels) {
        for (j = 0; j < dec->channels; j++)
          *outbuffer++ =
              (gint32) (dec_data[i + reorder_map[j]] << (width - depth));
      }
    } else {
      for (i = 0; i < max; i += dec->channels) {
        for (j = 0; j < dec->channels; j++)
          *outbuffer++ = (gint32) (dec_data[i + reorder_map[j]]);
      }
    }
  } else {
    g_assert_not_reached ();
  }

  gst_buffer_unmap (outbuf, &omap);
  gst_buffer_unmap (buf, &map);
  buf = NULL;

  g_free (dec_data);

  ret = gst_audio_decoder_finish_frame (bdec, outbuf, 1);

out:
  if (buf)
    gst_buffer_unmap (buf, &map);

  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (ret));
  }

  return ret;

/* ERRORS */
input_not_framed:
  {
    GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Expected framed input"));
    ret = GST_FLOW_ERROR;
    goto out;
  }
invalid_header:
  {
    GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Invalid wavpack header"));
    ret = GST_FLOW_ERROR;
    goto out;
  }
context_failed:
  {
    GST_AUDIO_DECODER_ERROR (bdec, 1, LIBRARY, INIT, (NULL),
        ("error creating Wavpack context"), ret);
    goto out;
  }
decode_error:
  {
    const gchar *reason = "unknown";

    if (dec->context) {
      reason = WavpackGetErrorMessage (dec->context);
    } else {
      reason = "couldn't create decoder context";
    }
    GST_AUDIO_DECODER_ERROR (bdec, 1, STREAM, DECODE, (NULL),
        ("decoding error: %s", reason), ret);
    g_free (dec_data);
    if (ret == GST_FLOW_OK)
      gst_audio_decoder_finish_frame (bdec, NULL, 1);
    goto out;
  }
}
static void
gst_omx_audio_dec_loop (GstOMXAudioDec * self)
{
  GstOMXPort *port = self->dec_out_port;
  GstOMXBuffer *buf = NULL;
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstOMXAcquireBufferReturn acq_return;
  OMX_ERRORTYPE err;

  acq_return = gst_omx_port_acquire_buffer (port, &buf);
  if (acq_return == GST_OMX_ACQUIRE_BUFFER_ERROR) {
    goto component_error;
  } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_FLUSHING) {
    goto flushing;
  } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_EOS) {
    goto eos;
  }

  if (!gst_pad_has_current_caps (GST_AUDIO_DECODER_SRC_PAD (self)) ||
      acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) {
    OMX_PARAM_PORTDEFINITIONTYPE port_def;
    OMX_AUDIO_PARAM_PCMMODETYPE pcm_param;
    GstAudioChannelPosition omx_position[OMX_AUDIO_MAXCHANNELS];
    GstOMXAudioDecClass *klass = GST_OMX_AUDIO_DEC_GET_CLASS (self);
    gint i;

    GST_DEBUG_OBJECT (self, "Port settings have changed, updating caps");

    /* Reallocate all buffers */
    if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE
        && gst_omx_port_is_enabled (port)) {
      err = gst_omx_port_set_enabled (port, FALSE);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_wait_buffers_released (port, 5 * GST_SECOND);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_deallocate_buffers (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_wait_enabled (port, 1 * GST_SECOND);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;
    }

    /* Just update caps */
    GST_AUDIO_DECODER_STREAM_LOCK (self);

    gst_omx_port_get_port_definition (port, &port_def);
    g_assert (port_def.format.audio.eEncoding == OMX_AUDIO_CodingPCM);

    GST_OMX_INIT_STRUCT (&pcm_param);
    pcm_param.nPortIndex = self->dec_out_port->index;
    err =
        gst_omx_component_get_parameter (self->dec, OMX_IndexParamAudioPcm,
        &pcm_param);
    if (err != OMX_ErrorNone) {
      GST_ERROR_OBJECT (self, "Failed to get PCM parameters: %s (0x%08x)",
          gst_omx_error_to_string (err), err);
      goto caps_failed;
    }

    g_assert (pcm_param.ePCMMode == OMX_AUDIO_PCMModeLinear);
    g_assert (pcm_param.bInterleaved == OMX_TRUE);

    gst_audio_info_init (&self->info);

    for (i = 0; i < pcm_param.nChannels; i++) {
      switch (pcm_param.eChannelMapping[i]) {
        case OMX_AUDIO_ChannelLF:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
          break;
        case OMX_AUDIO_ChannelRF:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
          break;
        case OMX_AUDIO_ChannelCF:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
          break;
        case OMX_AUDIO_ChannelLS:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT;
          break;
        case OMX_AUDIO_ChannelRS:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT;
          break;
        case OMX_AUDIO_ChannelLFE:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_LFE1;
          break;
        case OMX_AUDIO_ChannelCS:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_CENTER;
          break;
        case OMX_AUDIO_ChannelLR:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
          break;
        case OMX_AUDIO_ChannelRR:
          omx_position[i] = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
          break;
        case OMX_AUDIO_ChannelNone:
        default:
          /* This will break the outer loop too as the
           * i == pcm_param.nChannels afterwards */
          for (i = 0; i < pcm_param.nChannels; i++)
            omx_position[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
          break;
      }
    }
    if (pcm_param.nChannels == 1
        && omx_position[0] == GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER)
      omx_position[0] = GST_AUDIO_CHANNEL_POSITION_MONO;

    if (omx_position[0] == GST_AUDIO_CHANNEL_POSITION_NONE
        && klass->get_channel_positions) {
      GST_WARNING_OBJECT (self,
          "Failed to get a valid channel layout, trying fallback");
      klass->get_channel_positions (self, self->dec_out_port, omx_position);
    }

    memcpy (self->position, omx_position, sizeof (omx_position));
    gst_audio_channel_positions_to_valid_order (self->position,
        pcm_param.nChannels);
    self->needs_reorder =
        (memcmp (self->position, omx_position,
            sizeof (GstAudioChannelPosition) * pcm_param.nChannels) != 0);
    if (self->needs_reorder)
      gst_audio_get_channel_reorder_map (pcm_param.nChannels, self->position,
          omx_position, self->reorder_map);

    gst_audio_info_set_format (&self->info,
        gst_audio_format_build_integer (pcm_param.eNumData ==
            OMX_NumericalDataSigned,
            pcm_param.eEndian ==
            OMX_EndianLittle ? G_LITTLE_ENDIAN : G_BIG_ENDIAN,
            pcm_param.nBitPerSample, pcm_param.nBitPerSample),
        pcm_param.nSamplingRate, pcm_param.nChannels, self->position);

    GST_DEBUG_OBJECT (self,
        "Setting output state: format %s, rate %u, channels %u",
        gst_audio_format_to_string (self->info.finfo->format),
        (guint) pcm_param.nSamplingRate, (guint) pcm_param.nChannels);

    if (!gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (self),
            &self->info)
        || !gst_audio_decoder_negotiate (GST_AUDIO_DECODER (self))) {
      if (buf)
        gst_omx_port_release_buffer (port, buf);
      goto caps_failed;
    }

    GST_AUDIO_DECODER_STREAM_UNLOCK (self);

    if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) {
      err = gst_omx_port_set_enabled (port, TRUE);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_allocate_buffers (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_wait_enabled (port, 5 * GST_SECOND);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_populate (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_mark_reconfigured (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;
    }

    /* Now get a buffer */
    if (acq_return != GST_OMX_ACQUIRE_BUFFER_OK) {
      return;
    }
  }

  g_assert (acq_return == GST_OMX_ACQUIRE_BUFFER_OK);
  if (!buf) {
    g_assert ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER));
    GST_AUDIO_DECODER_STREAM_LOCK (self);
    goto eos;
  }

  /* This prevents a deadlock between the srcpad stream
   * lock and the audiocodec stream lock, if ::reset()
   * is called at the wrong time
   */
  if (gst_omx_port_is_flushing (port)) {
    GST_DEBUG_OBJECT (self, "Flushing");
    gst_omx_port_release_buffer (port, buf);
    goto flushing;
  }

  GST_DEBUG_OBJECT (self, "Handling buffer: 0x%08x %" G_GUINT64_FORMAT,
      (guint) buf->omx_buf->nFlags, (guint64) buf->omx_buf->nTimeStamp);

  GST_AUDIO_DECODER_STREAM_LOCK (self);

  if (buf->omx_buf->nFilledLen > 0) {
    GstBuffer *outbuf;
    gint nframes, spf;
    GstMapInfo minfo;
    GstOMXAudioDecClass *klass = GST_OMX_AUDIO_DEC_GET_CLASS (self);

    GST_DEBUG_OBJECT (self, "Handling output data");

    if (buf->omx_buf->nFilledLen % self->info.bpf != 0) {
      gst_omx_port_release_buffer (port, buf);
      goto invalid_buffer;
    }

    outbuf =
        gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (self),
        buf->omx_buf->nFilledLen);

    gst_buffer_map (outbuf, &minfo, GST_MAP_WRITE);
    if (self->needs_reorder) {
      gint i, n_samples, c, n_channels;
      gint *reorder_map = self->reorder_map;
      gint16 *dest, *source;

      dest = (gint16 *) minfo.data;
      source = (gint16 *) (buf->omx_buf->pBuffer + buf->omx_buf->nOffset);
      n_samples = buf->omx_buf->nFilledLen / self->info.bpf;
      n_channels = self->info.channels;

      for (i = 0; i < n_samples; i++) {
        for (c = 0; c < n_channels; c++) {
          dest[i * n_channels + reorder_map[c]] = source[i * n_channels + c];
        }
      }
    } else {
      memcpy (minfo.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset,
          buf->omx_buf->nFilledLen);
    }
    gst_buffer_unmap (outbuf, &minfo);

    nframes = 1;
    spf = klass->get_samples_per_frame (self, self->dec_out_port);
    if (spf != -1) {
      nframes = buf->omx_buf->nFilledLen / self->info.bpf;
      if (nframes % spf != 0)
        GST_WARNING_OBJECT (self, "Output buffer does not contain an integer "
            "number of input frames (frames: %d, spf: %d)", nframes, spf);
      nframes = (nframes + spf - 1) / spf;
    }

    GST_BUFFER_TIMESTAMP (outbuf) =
        gst_util_uint64_scale (buf->omx_buf->nTimeStamp, GST_SECOND,
        OMX_TICKS_PER_SECOND);
    if (buf->omx_buf->nTickCount != 0)
      GST_BUFFER_DURATION (outbuf) =
          gst_util_uint64_scale (buf->omx_buf->nTickCount, GST_SECOND,
          OMX_TICKS_PER_SECOND);

    flow_ret =
        gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf,
        nframes);
  }

  GST_DEBUG_OBJECT (self, "Read frame from component");

  GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));

  if (buf) {
    err = gst_omx_port_release_buffer (port, buf);
    if (err != OMX_ErrorNone)
      goto release_error;
  }

  self->downstream_flow_ret = flow_ret;

  if (flow_ret != GST_FLOW_OK)
    goto flow_error;

  GST_AUDIO_DECODER_STREAM_UNLOCK (self);

  return;

component_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
        ("OpenMAX component in error state %s (0x%08x)",
            gst_omx_component_get_last_error_string (self->dec),
            gst_omx_component_get_last_error (self->dec)));
    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    self->started = FALSE;
    return;
  }

flushing:
  {
    GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_FLUSHING;
    self->started = FALSE;
    return;
  }

eos:
  {
    g_mutex_lock (&self->drain_lock);
    if (self->draining) {
      GST_DEBUG_OBJECT (self, "Drained");
      self->draining = FALSE;
      g_cond_broadcast (&self->drain_cond);
      flow_ret = GST_FLOW_OK;
      gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
    } else {
      GST_DEBUG_OBJECT (self, "Component signalled EOS");
      flow_ret = GST_FLOW_EOS;
    }
    g_mutex_unlock (&self->drain_lock);

    GST_AUDIO_DECODER_STREAM_LOCK (self);
    self->downstream_flow_ret = flow_ret;

    /* Here we fallback and pause the task for the EOS case */
    if (flow_ret != GST_FLOW_OK)
      goto flow_error;

    GST_AUDIO_DECODER_STREAM_UNLOCK (self);

    return;
  }

flow_error:
  {
    if (flow_ret == GST_FLOW_EOS) {
      GST_DEBUG_OBJECT (self, "EOS");

      gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
      self->started = FALSE;
    } else if (flow_ret < GST_FLOW_EOS) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED,
          ("Internal data stream error."), ("stream stopped, reason %s",
              gst_flow_get_name (flow_ret)));

      gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
      self->started = FALSE;
    } else if (flow_ret == GST_FLOW_FLUSHING) {
      GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
      gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
      self->started = FALSE;
    }
    GST_AUDIO_DECODER_STREAM_UNLOCK (self);
    return;
  }

reconfigure_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
        ("Unable to reconfigure output port"));
    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    self->started = FALSE;
    return;
  }

invalid_buffer:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
        ("Invalid sized input buffer"));
    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
    self->started = FALSE;
    GST_AUDIO_DECODER_STREAM_UNLOCK (self);
    return;
  }

caps_failed:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to set caps"));
    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
    GST_AUDIO_DECODER_STREAM_UNLOCK (self);
    self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
    self->started = FALSE;
    return;
  }
release_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
        ("Failed to relase output buffer to component: %s (0x%08x)",
            gst_omx_error_to_string (err), err));
    gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    self->started = FALSE;
    GST_AUDIO_DECODER_STREAM_UNLOCK (self);
    return;
  }
}
示例#18
0
static GstFlowReturn
gst_app_sink_render (GstBaseSink * psink, GstBuffer * buffer)
{
  GstFlowReturn ret;
  GstAppSink *appsink = GST_APP_SINK_CAST (psink);
  GstAppSinkPrivate *priv = appsink->priv;
  gboolean emit;

restart:
  g_mutex_lock (&priv->mutex);
  if (priv->flushing)
    goto flushing;

  /* queue holding caps event might have been FLUSHed,
   * but caps state still present in pad caps */
  if (G_UNLIKELY (!priv->last_caps &&
          gst_pad_has_current_caps (GST_BASE_SINK_PAD (psink)))) {
    priv->last_caps = gst_pad_get_current_caps (GST_BASE_SINK_PAD (psink));
    GST_DEBUG_OBJECT (appsink, "activating pad caps %" GST_PTR_FORMAT,
        priv->last_caps);
  }

  GST_DEBUG_OBJECT (appsink, "pushing render buffer %p on queue (%d)",
      buffer, priv->num_buffers);

  while (priv->max_buffers > 0 && priv->num_buffers >= priv->max_buffers) {
    if (priv->drop) {
      GstBuffer *old;

      /* we need to drop the oldest buffer and try again */
      if ((old = dequeue_buffer (appsink))) {
        GST_DEBUG_OBJECT (appsink, "dropping old buffer %p", old);
        gst_buffer_unref (old);
      }
    } else {
      GST_DEBUG_OBJECT (appsink, "waiting for free space, length %d >= %d",
          priv->num_buffers, priv->max_buffers);

      if (priv->unlock) {
        /* we are asked to unlock, call the wait_preroll method */
        g_mutex_unlock (&priv->mutex);
        if ((ret = gst_base_sink_wait_preroll (psink)) != GST_FLOW_OK)
          goto stopping;

        /* we are allowed to continue now */
        goto restart;
      }

      /* wait for a buffer to be removed or flush */
      g_cond_wait (&priv->cond, &priv->mutex);
      if (priv->flushing)
        goto flushing;
    }
  }
  /* we need to ref the buffer when pushing it in the queue */
  g_queue_push_tail (priv->queue, gst_buffer_ref (buffer));
  priv->num_buffers++;
  g_cond_signal (&priv->cond);
  emit = priv->emit_signals;
  g_mutex_unlock (&priv->mutex);

  if (priv->callbacks.new_sample) {
    ret = priv->callbacks.new_sample (appsink, priv->user_data);
  } else {
    ret = GST_FLOW_OK;
    if (emit)
      g_signal_emit (appsink, gst_app_sink_signals[SIGNAL_NEW_SAMPLE], 0, &ret);
  }
  return ret;

flushing:
  {
    GST_DEBUG_OBJECT (appsink, "we are flushing");
    g_mutex_unlock (&priv->mutex);
    return GST_FLOW_FLUSHING;
  }
stopping:
  {
    GST_DEBUG_OBJECT (appsink, "we are stopping");
    return ret;
  }
}
static void
gst_mpegv_parse_update_src_caps (GstMpegvParse * mpvparse)
{
  GstCaps *caps = NULL;

  /* only update if no src caps yet or explicitly triggered */
  if (G_LIKELY (gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (mpvparse)) &&
          !mpvparse->update_caps))
    return;

  /* carry over input caps as much as possible; override with our own stuff */
  caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (mpvparse));
  if (caps) {
    caps = gst_caps_make_writable (caps);
  } else {
    caps = gst_caps_new_empty_simple ("video/mpeg");
  }

  /* typically we don't output buffers until we have properly parsed some
   * config data, so we should at least know about version.
   * If not, it means it has been requested not to drop data, and
   * upstream and/or app must know what they are doing ... */
  gst_caps_set_simple (caps,
      "mpegversion", G_TYPE_INT, (mpvparse->config_flags & FLAG_MPEG2) ? 2 : 1,
      NULL);

  gst_caps_set_simple (caps, "systemstream", G_TYPE_BOOLEAN, FALSE,
      "parsed", G_TYPE_BOOLEAN, TRUE, NULL);

  if (mpvparse->sequencehdr.width > 0 && mpvparse->sequencehdr.height > 0) {
    gst_caps_set_simple (caps, "width", G_TYPE_INT, mpvparse->sequencehdr.width,
        "height", G_TYPE_INT, mpvparse->sequencehdr.height, NULL);
  }

  /* perhaps we have  a framerate */
  if (mpvparse->fps_num > 0 && mpvparse->fps_den > 0) {
    gint fps_num = mpvparse->fps_num;
    gint fps_den = mpvparse->fps_den;
    GstClockTime latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num);

    gst_caps_set_simple (caps, "framerate",
        GST_TYPE_FRACTION, fps_num, fps_den, NULL);
    gst_base_parse_set_frame_rate (GST_BASE_PARSE (mpvparse),
        fps_num, fps_den, 0, 0);
    gst_base_parse_set_latency (GST_BASE_PARSE (mpvparse), latency, latency);
  }

  /* or pixel-aspect-ratio */
  if (mpvparse->sequencehdr.par_w && mpvparse->sequencehdr.par_h > 0) {
    gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
        mpvparse->sequencehdr.par_w, mpvparse->sequencehdr.par_h, NULL);
  }

  if (mpvparse->config != NULL) {
    gst_caps_set_simple (caps, "codec_data",
        GST_TYPE_BUFFER, mpvparse->config, NULL);
  }

  if (mpvparse->config_flags & FLAG_SEQUENCE_EXT) {
    const guint profile_c = mpvparse->sequenceext.profile;
    const guint level_c = mpvparse->sequenceext.level;
    const gchar *profile = NULL, *level = NULL;
    /*
     * Profile indication - 1 => High, 2 => Spatially Scalable,
     *                      3 => SNR Scalable, 4 => Main, 5 => Simple
     * 4:2:2 and Multi-view have profile = 0, with the escape bit set to 1
     */
    const gchar *const profiles[] =
        { "high", "spatial", "snr", "main", "simple" };
    /*
     * Level indication - 4 => High, 6 => High-1440, 8 => Main, 10 => Low,
     *                    except in the case of profile = 0
     */
    const gchar *const levels[] = { "high", "high-1440", "main", "low" };

    if (profile_c > 0 && profile_c < 6)
      profile = profiles[profile_c - 1];

    if ((level_c > 3) && (level_c < 11) && (level_c % 2 == 0))
      level = levels[(level_c >> 1) - 2];

    if (profile_c == 8) {
      /* Non-hierarchical profile */
      switch (level_c) {
        case 2:
          level = levels[0];
        case 5:
          level = levels[2];
          profile = "4:2:2";
          break;
        case 10:
          level = levels[0];
        case 11:
          level = levels[1];
        case 13:
          level = levels[2];
        case 14:
          level = levels[3];
          profile = "multiview";
          break;
        default:
          break;
      }
    }

    /* FIXME does it make sense to expose profile/level in the caps ? */

    GST_DEBUG_OBJECT (mpvparse, "profile:'%s' level:'%s'", profile, level);

    if (profile)
      gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile, NULL);
    else
      GST_DEBUG_OBJECT (mpvparse, "Invalid profile - %u", profile_c);

    if (level)
      gst_caps_set_simple (caps, "level", G_TYPE_STRING, level, NULL);
    else
      GST_DEBUG_OBJECT (mpvparse, "Invalid level - %u", level_c);

    gst_caps_set_simple (caps, "interlace-mode",
        G_TYPE_STRING,
        (mpvparse->sequenceext.progressive ? "progressive" : "mixed"), NULL);
  }

  gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (mpvparse), caps);
  gst_caps_unref (caps);

  mpvparse->update_caps = FALSE;
}
static GstFlowReturn gst_openh264dec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame)
{
    GstOpenh264Dec *openh264dec = GST_OPENH264DEC(decoder);
    GstMapInfo map_info;
    GstVideoCodecState *state;
    SBufferInfo dst_buf_info;
    DECODING_STATE ret;
    guint8 *yuvdata[3];
    GstFlowReturn flow_status;
    GstVideoFrame video_frame;
    guint actual_width, actual_height;
    guint i;
    guint8 *p;
    guint row_stride, component_width, component_height, src_width, row;

    if (frame) {
        if (!gst_buffer_map(frame->input_buffer, &map_info, GST_MAP_READ)) {
            GST_ERROR_OBJECT(openh264dec, "Cannot map input buffer!");
            return GST_FLOW_ERROR;
        }

        GST_LOG_OBJECT(openh264dec, "handle frame, %d", map_info.size > 4 ? map_info.data[4] & 0x1f : -1);

        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(map_info.data, map_info.size, yuvdata, &dst_buf_info);

        if (ret == dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
        }

        if (ret != dsErrorFree && ret != dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                               gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
            GST_LOG_OBJECT(openh264dec, "error decoding nal, return code: %d", ret);
        }

        gst_buffer_unmap(frame->input_buffer, &map_info);
        gst_video_codec_frame_unref (frame);
        frame = NULL;
    } else {
        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(NULL, 0, yuvdata, &dst_buf_info);
        if (ret != dsErrorFree)
            return GST_FLOW_EOS;
    }

    /* FIXME: openh264 has no way for us to get a connection
     * between the input and output frames, we just have to
     * guess based on the input. Fortunately openh264 can
     * only do baseline profile. */
    frame = gst_video_decoder_get_oldest_frame (decoder);
    if (!frame) {
      /* Can only happen in finish() */
      return GST_FLOW_EOS;
    }

    /* No output available yet */
    if (dst_buf_info.iBufferStatus != 1) {
        return (frame ? GST_FLOW_OK : GST_FLOW_EOS);
    }

    actual_width  = dst_buf_info.UsrData.sSystemBuffer.iWidth;
    actual_height = dst_buf_info.UsrData.sSystemBuffer.iHeight;

    if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (openh264dec)) || actual_width != openh264dec->priv->width || actual_height != openh264dec->priv->height) {
        state = gst_video_decoder_set_output_state(decoder,
            GST_VIDEO_FORMAT_I420,
            actual_width,
            actual_height,
            openh264dec->priv->input_state);
        openh264dec->priv->width = actual_width;
        openh264dec->priv->height = actual_height;

        if (!gst_video_decoder_negotiate(decoder)) {
            GST_ERROR_OBJECT(openh264dec, "Failed to negotiate with downstream elements");
            return GST_FLOW_NOT_NEGOTIATED;
        }
    } else {
        state = gst_video_decoder_get_output_state(decoder);
    }

    flow_status = gst_video_decoder_allocate_output_frame(decoder, frame);
    if (flow_status != GST_FLOW_OK) {
        gst_video_codec_state_unref (state);
        return flow_status;
    }

    if (!gst_video_frame_map(&video_frame, &state->info, frame->output_buffer, GST_MAP_WRITE)) {
        GST_ERROR_OBJECT(openh264dec, "Cannot map output buffer!");
        gst_video_codec_state_unref (state);
        return GST_FLOW_ERROR;
    }

    for (i = 0; i < 3; i++) {
        p = GST_VIDEO_FRAME_COMP_DATA(&video_frame, i);
        row_stride = GST_VIDEO_FRAME_COMP_STRIDE(&video_frame, i);
        component_width = GST_VIDEO_FRAME_COMP_WIDTH(&video_frame, i);
        component_height = GST_VIDEO_FRAME_COMP_HEIGHT(&video_frame, i);
        src_width = i < 1 ? dst_buf_info.UsrData.sSystemBuffer.iStride[0] : dst_buf_info.UsrData.sSystemBuffer.iStride[1];
        for (row = 0; row < component_height; row++) {
            memcpy(p, yuvdata[i], component_width);
            p += row_stride;
            yuvdata[i] += src_width;
        }
    }
    gst_video_codec_state_unref (state);
    gst_video_frame_unmap(&video_frame);

    return gst_video_decoder_finish_frame(decoder, frame);
}
示例#21
0
static gboolean
gst_interleave_sink_event (GstCollectPads * pads, GstCollectData * data,
    GstEvent * event, gpointer user_data)
{
  GstInterleave *self = GST_INTERLEAVE (user_data);
  gboolean ret = TRUE;

  GST_DEBUG ("Got %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event),
      GST_DEBUG_PAD_NAME (data->pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_STOP:
      GST_OBJECT_LOCK (self);
      gst_event_replace (&self->pending_segment, NULL);
      GST_OBJECT_UNLOCK (self);
      break;
    case GST_EVENT_SEGMENT:
    {
      GST_OBJECT_LOCK (self);
      gst_event_replace (&self->pending_segment, event);
      GST_OBJECT_UNLOCK (self);
      break;
    }
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;
      GstAudioInfo info;
      GValue *val;
      guint channel;

      gst_event_parse_caps (event, &caps);

      if (!gst_audio_info_from_caps (&info, caps)) {
        GST_WARNING_OBJECT (self, "invalid sink caps");
        gst_event_unref (event);
        event = NULL;
        ret = FALSE;
        break;
      }

      if (self->channel_positions_from_input
          && GST_AUDIO_INFO_CHANNELS (&info) == 1) {
        channel = GST_INTERLEAVE_PAD_CAST (data->pad)->channel;
        val = g_value_array_get_nth (self->input_channel_positions, channel);
        g_value_set_enum (val, GST_AUDIO_INFO_POSITION (&info, 0));
      }

      if (!gst_pad_has_current_caps (data->pad))
        g_atomic_int_add (&self->configured_sinkpads_counter, 1);

      /* Last caps that are set on a sink pad are used as output caps */
      if (g_atomic_int_get (&self->configured_sinkpads_counter) ==
          self->channels) {
        ret = gst_interleave_sink_setcaps (self, data->pad, caps, &info);
        gst_event_unref (event);
        event = NULL;
      }
      break;
    }
    case GST_EVENT_TAG:
      GST_FIXME_OBJECT (self, "FIXME: merge tags and send after stream-start");
      break;
    default:
      break;
  }

  /* now GstCollectPads can take care of the rest, e.g. EOS */
  if (event != NULL)
    return gst_collect_pads_event_default (pads, data, event, FALSE);

  return ret;
}
static void
gst_image_freeze_src_loop (GstPad * pad)
{
    GstImageFreeze *self = GST_IMAGE_FREEZE (GST_PAD_PARENT (pad));
    GstBuffer *buffer;
    guint64 offset;
    GstClockTime timestamp, timestamp_end;
    guint64 cstart, cstop;
    gboolean in_seg, eos;
    GstFlowReturn flow_ret = GST_FLOW_OK;

    g_mutex_lock (&self->lock);
    if (!gst_pad_has_current_caps (self->srcpad)) {
        GST_ERROR_OBJECT (pad, "Not negotiated yet");
        flow_ret = GST_FLOW_NOT_NEGOTIATED;
        g_mutex_unlock (&self->lock);
        goto pause_task;
    }

    if (!self->buffer) {
        GST_ERROR_OBJECT (pad, "Have no buffer yet");
        flow_ret = GST_FLOW_ERROR;
        g_mutex_unlock (&self->lock);
        goto pause_task;
    }
    buffer = gst_buffer_ref (self->buffer);
    buffer = gst_buffer_make_writable (buffer);
    g_mutex_unlock (&self->lock);

    if (self->need_segment) {
        GstEvent *e;

        GST_DEBUG_OBJECT (pad, "Pushing SEGMENT event: %" GST_SEGMENT_FORMAT,
                          &self->segment);
        e = gst_event_new_segment (&self->segment);

        g_mutex_lock (&self->lock);
        if (self->segment.rate >= 0) {
            self->offset =
                gst_util_uint64_scale (self->segment.start, self->fps_n,
                                       self->fps_d * GST_SECOND);
        } else {
            self->offset =
                gst_util_uint64_scale (self->segment.stop, self->fps_n,
                                       self->fps_d * GST_SECOND);
        }
        g_mutex_unlock (&self->lock);

        self->need_segment = FALSE;

        gst_pad_push_event (self->srcpad, e);
    }

    g_mutex_lock (&self->lock);
    offset = self->offset;

    if (self->fps_n != 0) {
        timestamp =
            gst_util_uint64_scale (offset, self->fps_d * GST_SECOND, self->fps_n);
        timestamp_end =
            gst_util_uint64_scale (offset + 1, self->fps_d * GST_SECOND,
                                   self->fps_n);
    } else {
        timestamp = self->segment.start;
        timestamp_end = GST_CLOCK_TIME_NONE;
    }

    eos = (self->fps_n == 0 && offset > 0) ||
          (self->segment.rate >= 0 && self->segment.stop != -1
           && timestamp > self->segment.stop) || (self->segment.rate < 0
                   && offset == 0) || (self->segment.rate < 0
                                       && self->segment.start != -1 && timestamp_end < self->segment.start);

    if (self->fps_n == 0 && offset > 0)
        in_seg = FALSE;
    else
        in_seg =
            gst_segment_clip (&self->segment, GST_FORMAT_TIME, timestamp,
                              timestamp_end, &cstart, &cstop);

    if (in_seg) {
        self->segment.position = cstart;
        if (self->segment.rate >= 0)
            self->segment.position = cstop;
    }

    if (self->segment.rate >= 0)
        self->offset++;
    else
        self->offset--;
    g_mutex_unlock (&self->lock);

    GST_DEBUG_OBJECT (pad, "Handling buffer with timestamp %" GST_TIME_FORMAT,
                      GST_TIME_ARGS (timestamp));

    if (in_seg) {
        GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
        GST_BUFFER_PTS (buffer) = cstart;
        GST_BUFFER_DURATION (buffer) = cstop - cstart;
        GST_BUFFER_OFFSET (buffer) = offset;
        GST_BUFFER_OFFSET_END (buffer) = offset + 1;
        flow_ret = gst_pad_push (self->srcpad, buffer);
        GST_DEBUG_OBJECT (pad, "Pushing buffer resulted in %s",
                          gst_flow_get_name (flow_ret));
        if (flow_ret != GST_FLOW_OK)
            goto pause_task;
    } else {
        gst_buffer_unref (buffer);
    }

    if (eos) {
        flow_ret = GST_FLOW_EOS;
        goto pause_task;
    }

    return;

pause_task:
    {
        const gchar *reason = gst_flow_get_name (flow_ret);

        GST_LOG_OBJECT (self, "pausing task, reason %s", reason);
        gst_pad_pause_task (pad);

        if (flow_ret == GST_FLOW_EOS) {
            if ((self->segment.flags & GST_SEEK_FLAG_SEGMENT)) {
                GstMessage *m;
                GstEvent *e;

                GST_DEBUG_OBJECT (pad, "Sending segment done at end of segment");
                if (self->segment.rate >= 0) {
                    m = gst_message_new_segment_done (GST_OBJECT_CAST (self),
                                                      GST_FORMAT_TIME, self->segment.stop);
                    e = gst_event_new_segment_done (GST_FORMAT_TIME, self->segment.stop);
                } else {
                    m = gst_message_new_segment_done (GST_OBJECT_CAST (self),
                                                      GST_FORMAT_TIME, self->segment.start);
                    e = gst_event_new_segment_done (GST_FORMAT_TIME, self->segment.start);
                }
                gst_element_post_message (GST_ELEMENT_CAST (self), m);
                gst_pad_push_event (self->srcpad, e);
            } else {
                GST_DEBUG_OBJECT (pad, "Sending EOS at end of segment");
                gst_pad_push_event (self->srcpad, gst_event_new_eos ());
            }
        } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) {
            GST_ELEMENT_ERROR (self, STREAM, FAILED,
                               ("Internal data stream error."),
                               ("stream stopped, reason %s", reason));
            gst_pad_push_event (self->srcpad, gst_event_new_eos ());
        }
        return;
    }
}
示例#23
0
static FLAC__StreamDecoderWriteStatus
gst_flac_dec_write (GstFlacDec * flacdec, const FLAC__Frame * frame,
    const FLAC__int32 * const buffer[])
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *outbuf;
  guint depth = frame->header.bits_per_sample;
  guint width, gdepth;
  guint sample_rate = frame->header.sample_rate;
  guint channels = frame->header.channels;
  guint samples = frame->header.blocksize;
  guint j, i;
  GstMapInfo map;
  gboolean caps_changed;

  GST_LOG_OBJECT (flacdec, "samples in frame header: %d", samples);

  if (depth == 0) {
    if (flacdec->depth < 4 || flacdec->depth > 32) {
      GST_ERROR_OBJECT (flacdec, "unsupported depth %d from STREAMINFO",
          flacdec->depth);
      ret = GST_FLOW_ERROR;
      goto done;
    }

    depth = flacdec->depth;
  }

  switch (depth) {
    case 8:
      gdepth = width = 8;
      break;
    case 12:
    case 16:
      gdepth = width = 16;
      break;
    case 20:
    case 24:
      gdepth = 24;
      width = 32;
      break;
    case 32:
      gdepth = width = 32;
      break;
    default:
      GST_ERROR_OBJECT (flacdec, "unsupported depth %d", depth);
      ret = GST_FLOW_ERROR;
      goto done;
  }

  if (sample_rate == 0) {
    if (flacdec->info.rate != 0) {
      sample_rate = flacdec->info.rate;
    } else {
      GST_ERROR_OBJECT (flacdec, "unknown sample rate");
      ret = GST_FLOW_ERROR;
      goto done;
    }
  }

  caps_changed = (sample_rate != GST_AUDIO_INFO_RATE (&flacdec->info))
      || (width != GST_AUDIO_INFO_WIDTH (&flacdec->info))
      || (gdepth != GST_AUDIO_INFO_DEPTH (&flacdec->info))
      || (channels != GST_AUDIO_INFO_CHANNELS (&flacdec->info));

  if (caps_changed
      || !gst_pad_has_current_caps (GST_AUDIO_DECODER_SRC_PAD (flacdec))) {
    GST_DEBUG_OBJECT (flacdec, "Negotiating %d Hz @ %d channels", sample_rate,
        channels);

    gst_audio_info_set_format (&flacdec->info,
        gst_audio_format_build_integer (TRUE, G_BYTE_ORDER, width, gdepth),
        sample_rate, channels, NULL);

    memcpy (flacdec->info.position,
        channel_positions[flacdec->info.channels - 1],
        sizeof (GstAudioChannelPosition) * flacdec->info.channels);
    gst_audio_channel_positions_to_valid_order (flacdec->info.position,
        flacdec->info.channels);
    /* Note: we create the inverse reordering map here */
    gst_audio_get_channel_reorder_map (flacdec->info.channels,
        flacdec->info.position, channel_positions[flacdec->info.channels - 1],
        flacdec->channel_reorder_map);

    flacdec->depth = depth;

    gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (flacdec),
        &flacdec->info);
  }

  outbuf =
      gst_buffer_new_allocate (NULL, samples * channels * (width / 8), NULL);

  gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
  if (width == 8) {
    gint8 *outbuffer = (gint8 *) map.data;
    gint *reorder_map = flacdec->channel_reorder_map;

    if (gdepth != depth) {
      for (i = 0; i < samples; i++) {
        for (j = 0; j < channels; j++) {
          *outbuffer++ =
              (gint8) (buffer[reorder_map[j]][i] << (gdepth - depth));
        }
      }
    } else {
      for (i = 0; i < samples; i++) {
        for (j = 0; j < channels; j++) {
          *outbuffer++ = (gint8) buffer[reorder_map[j]][i];
        }
      }
    }
  } else if (width == 16) {
    gint16 *outbuffer = (gint16 *) map.data;
    gint *reorder_map = flacdec->channel_reorder_map;

    if (gdepth != depth) {
      for (i = 0; i < samples; i++) {
        for (j = 0; j < channels; j++) {
          *outbuffer++ =
              (gint16) (buffer[reorder_map[j]][i] << (gdepth - depth));
        }
      }
    } else {
      for (i = 0; i < samples; i++) {
        for (j = 0; j < channels; j++) {
          *outbuffer++ = (gint16) buffer[reorder_map[j]][i];
        }
      }
    }
  } else if (width == 32) {
    gint32 *outbuffer = (gint32 *) map.data;
    gint *reorder_map = flacdec->channel_reorder_map;

    if (gdepth != depth) {
      for (i = 0; i < samples; i++) {
        for (j = 0; j < channels; j++) {
          *outbuffer++ =
              (gint32) (buffer[reorder_map[j]][i] << (gdepth - depth));
        }
      }
    } else {
      for (i = 0; i < samples; i++) {
        for (j = 0; j < channels; j++) {
          *outbuffer++ = (gint32) buffer[reorder_map[j]][i];
        }
      }
    }
  } else {
    g_assert_not_reached ();
  }
  gst_buffer_unmap (outbuf, &map);

  GST_DEBUG_OBJECT (flacdec, "pushing %d samples", samples);
  if (flacdec->error_count)
    flacdec->error_count--;

  ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (flacdec), outbuf, 1);

  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (flacdec, "finish_frame flow %s", gst_flow_get_name (ret));
  }

done:

  /* we act on the flow return value later in the handle_frame function, as we
   * don't want to mess up the internal decoder state by returning ABORT when
   * the error is in fact non-fatal (like a pad in flushing mode) and we want
   * to continue later. So just pretend everything's dandy and act later. */
  flacdec->last_flow = ret;

  return FLAC__STREAM_DECODER_WRITE_STATUS_CONTINUE;
}
static GstFlowReturn
gst_irtsp_parse_handle_frame (GstBaseParse * parse,
    GstBaseParseFrame * frame, gint * skipsize)
{
  GstIRTSPParse *IRTSPParse = GST_IRTSP_PARSE (parse);
  GstBuffer *buf = frame->buffer;
  GstByteReader reader;
  gint off;
  GstMapInfo map;
  gboolean ret = FALSE;
  guint framesize;

  gst_buffer_map (buf, &map, GST_MAP_READ);
  if (G_UNLIKELY (map.size < 4))
    goto exit;

  gst_byte_reader_init (&reader, map.data, map.size);

  off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000,
      0x24000000 + (IRTSPParse->channel_id << 16), 0, map.size);

  GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);

  /* didn't find anything that looks like a sync word, skip */
  if (off < 0) {
    *skipsize = map.size - 3;
    goto exit;
  }

  /* possible frame header, but not at offset 0? skip bytes before sync */
  if (off > 0) {
    *skipsize = off;
    goto exit;
  }

  framesize = GST_READ_UINT16_BE (map.data + 2) + 4;
  GST_LOG_OBJECT (parse, "got frame size %d", framesize);
  ret = TRUE;

  if (!gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (parse))) {
    GstCaps *caps;

    caps = gst_caps_new_empty_simple ("application/x-rtp");
    gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
    gst_caps_unref (caps);
  }

exit:
  gst_buffer_unmap (buf, &map);

  if (ret && framesize <= map.size) {
    /* HACK HACK skip header.
     * could also ask baseparse to skip this,
     * but that would give us a discontinuity for free
     * which is a bit too much to have on all our packets */
    frame->out_buffer = gst_buffer_copy (frame->buffer);
    gst_buffer_resize (frame->out_buffer, 4, -1);
    GST_BUFFER_FLAG_UNSET (frame->out_buffer, GST_BUFFER_FLAG_DISCONT);
    return gst_base_parse_finish_frame (parse, frame, framesize);
  }

  return GST_FLOW_OK;
}
示例#25
0
static GstFlowReturn
gst_asteriskh263_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstAsteriskh263 *asteriskh263;
  GstBuffer *outbuf;
  GstFlowReturn ret;

  asteriskh263 = GST_ASTERISK_H263 (parent);

  if (!gst_rtp_buffer_validate (buf))
    goto bad_packet;

  {
    gint payload_len;
    guint8 *payload;
    gboolean M;
    guint32 timestamp;
    guint32 samples;
    guint16 asterisk_len;
    GstRTPBuffer rtp = { NULL };
    GstMapInfo map;

    gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);

    payload_len = gst_rtp_buffer_get_payload_len (&rtp);
    payload = gst_rtp_buffer_get_payload (&rtp);

    M = gst_rtp_buffer_get_marker (&rtp);
    timestamp = gst_rtp_buffer_get_timestamp (&rtp);

    gst_rtp_buffer_unmap (&rtp);

    outbuf = gst_buffer_new_and_alloc (payload_len +
        GST_ASTERISKH263_HEADER_LEN);

    /* build the asterisk header */
    asterisk_len = payload_len;
    if (M)
      asterisk_len |= 0x8000;
    if (!asteriskh263->lastts)
      asteriskh263->lastts = timestamp;
    samples = timestamp - asteriskh263->lastts;
    asteriskh263->lastts = timestamp;

    gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
    GST_ASTERISKH263_HEADER_TIMESTAMP (map.data) = g_htonl (samples);
    GST_ASTERISKH263_HEADER_LENGTH (map.data) = g_htons (asterisk_len);

    /* copy the data into place */
    memcpy (map.data + GST_ASTERISKH263_HEADER_LEN, payload, payload_len);

    gst_buffer_unmap (outbuf, &map);

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    if (!gst_pad_has_current_caps (asteriskh263->srcpad)) {
      GstCaps *caps;

      caps = gst_pad_get_pad_template_caps (asteriskh263->srcpad);
      gst_pad_set_caps (asteriskh263->srcpad, caps);
      gst_caps_unref (caps);
    }

    ret = gst_pad_push (asteriskh263->srcpad, outbuf);

    gst_buffer_unref (buf);
  }

  return ret;

bad_packet:
  {
    GST_DEBUG ("Packet does not validate");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
}
示例#26
0
static void
gst_omx_audio_enc_loop (GstOMXAudioEnc * self)
{
  GstOMXAudioEncClass *klass;
  GstOMXPort *port = self->enc_out_port;
  GstOMXBuffer *buf = NULL;
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstOMXAcquireBufferReturn acq_return;
  OMX_ERRORTYPE err;

  klass = GST_OMX_AUDIO_ENC_GET_CLASS (self);

  acq_return = gst_omx_port_acquire_buffer (port, &buf);
  if (acq_return == GST_OMX_ACQUIRE_BUFFER_ERROR) {
    goto component_error;
  } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_FLUSHING) {
    goto flushing;
  } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_EOS) {
    goto eos;
  }

  if (!gst_pad_has_current_caps (GST_AUDIO_ENCODER_SRC_PAD (self))
      || acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) {
    GstAudioInfo *info =
        gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (self));
    GstCaps *caps;

    GST_DEBUG_OBJECT (self, "Port settings have changed, updating caps");

    /* Reallocate all buffers */
    if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) {
      err = gst_omx_port_set_enabled (port, FALSE);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_wait_buffers_released (port, 5 * GST_SECOND);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_deallocate_buffers (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_wait_enabled (port, 1 * GST_SECOND);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

    }

    GST_AUDIO_ENCODER_STREAM_LOCK (self);

    caps = klass->get_caps (self, self->enc_out_port, info);
    if (!caps) {
      if (buf)
        gst_omx_port_release_buffer (self->enc_out_port, buf);
      GST_AUDIO_ENCODER_STREAM_UNLOCK (self);
      goto caps_failed;
    }

    GST_DEBUG_OBJECT (self, "Setting output caps: %" GST_PTR_FORMAT, caps);

    if (!gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (self), caps)) {
      gst_caps_unref (caps);
      if (buf)
        gst_omx_port_release_buffer (self->enc_out_port, buf);
      GST_AUDIO_ENCODER_STREAM_UNLOCK (self);
      goto caps_failed;
    }
    gst_caps_unref (caps);

    GST_AUDIO_ENCODER_STREAM_UNLOCK (self);

    if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) {
      err = gst_omx_port_set_enabled (port, TRUE);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_allocate_buffers (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_wait_enabled (port, 5 * GST_SECOND);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_populate (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;

      err = gst_omx_port_mark_reconfigured (port);
      if (err != OMX_ErrorNone)
        goto reconfigure_error;
    }

    /* Now get a buffer */
    if (acq_return != GST_OMX_ACQUIRE_BUFFER_OK) {
      return;
    }
  }

  g_assert (acq_return == GST_OMX_ACQUIRE_BUFFER_OK);
  if (!buf) {
    g_assert ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER));
    GST_AUDIO_ENCODER_STREAM_LOCK (self);
    goto eos;
  }

  GST_DEBUG_OBJECT (self, "Handling buffer: 0x%08x %" G_GUINT64_FORMAT,
      (guint) buf->omx_buf->nFlags, (guint64) buf->omx_buf->nTimeStamp);

  /* This prevents a deadlock between the srcpad stream
   * lock and the videocodec stream lock, if ::reset()
   * is called at the wrong time
   */
  if (gst_omx_port_is_flushing (self->enc_out_port)) {
    GST_DEBUG_OBJECT (self, "Flushing");
    gst_omx_port_release_buffer (self->enc_out_port, buf);
    goto flushing;
  }

  GST_AUDIO_ENCODER_STREAM_LOCK (self);

  if ((buf->omx_buf->nFlags & OMX_BUFFERFLAG_CODECCONFIG)
      && buf->omx_buf->nFilledLen > 0) {
    GstCaps *caps;
    GstBuffer *codec_data;
    GstMapInfo map = GST_MAP_INFO_INIT;

    GST_DEBUG_OBJECT (self, "Handling codec data");
    caps =
        gst_caps_copy (gst_pad_get_current_caps (GST_AUDIO_ENCODER_SRC_PAD
            (self)));
    codec_data = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen);

    gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
    memcpy (map.data,
        buf->omx_buf->pBuffer + buf->omx_buf->nOffset,
        buf->omx_buf->nFilledLen);
    gst_buffer_unmap (codec_data, &map);

    gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
    if (!gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (self), caps)) {
      gst_caps_unref (caps);
      if (buf)
        gst_omx_port_release_buffer (self->enc_out_port, buf);
      GST_AUDIO_ENCODER_STREAM_UNLOCK (self);
      goto caps_failed;
    }
    gst_caps_unref (caps);
    flow_ret = GST_FLOW_OK;
  } else if (buf->omx_buf->nFilledLen > 0) {
    GstBuffer *outbuf;
    guint n_samples;

    GST_DEBUG_OBJECT (self, "Handling output data");

    n_samples =
        klass->get_num_samples (self, self->enc_out_port,
        gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (self)), buf);

    if (buf->omx_buf->nFilledLen > 0) {
      GstMapInfo map = GST_MAP_INFO_INIT;
      outbuf = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen);

      gst_buffer_map (outbuf, &map, GST_MAP_WRITE);

      memcpy (map.data,
          buf->omx_buf->pBuffer + buf->omx_buf->nOffset,
          buf->omx_buf->nFilledLen);
      gst_buffer_unmap (outbuf, &map);

    } else {
      outbuf = gst_buffer_new ();
    }

    GST_BUFFER_TIMESTAMP (outbuf) =
        gst_util_uint64_scale (buf->omx_buf->nTimeStamp, GST_SECOND,
        OMX_TICKS_PER_SECOND);
    if (buf->omx_buf->nTickCount != 0)
      GST_BUFFER_DURATION (outbuf) =
          gst_util_uint64_scale (buf->omx_buf->nTickCount, GST_SECOND,
          OMX_TICKS_PER_SECOND);

    flow_ret =
        gst_audio_encoder_finish_frame (GST_AUDIO_ENCODER (self),
        outbuf, n_samples);
  }

  GST_DEBUG_OBJECT (self, "Handled output data");

  GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));

  err = gst_omx_port_release_buffer (port, buf);
  if (err != OMX_ErrorNone)
    goto release_error;

  self->downstream_flow_ret = flow_ret;

  if (flow_ret != GST_FLOW_OK)
    goto flow_error;

  GST_AUDIO_ENCODER_STREAM_UNLOCK (self);

  return;

component_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
        ("OpenMAX component in error state %s (0x%08x)",
            gst_omx_component_get_last_error_string (self->enc),
            gst_omx_component_get_last_error (self->enc)));
    gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    self->started = FALSE;
    return;
  }
flushing:
  {
    GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
    gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_FLUSHING;
    self->started = FALSE;
    return;
  }
eos:
  {
    g_mutex_lock (&self->drain_lock);
    if (self->draining) {
      GST_DEBUG_OBJECT (self, "Drained");
      self->draining = FALSE;
      g_cond_broadcast (&self->drain_cond);
      flow_ret = GST_FLOW_OK;
      gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    } else {
      GST_DEBUG_OBJECT (self, "Component signalled EOS");
      flow_ret = GST_FLOW_EOS;
    }
    g_mutex_unlock (&self->drain_lock);

    GST_AUDIO_ENCODER_STREAM_LOCK (self);
    self->downstream_flow_ret = flow_ret;

    /* Here we fallback and pause the task for the EOS case */
    if (flow_ret != GST_FLOW_OK)
      goto flow_error;

    GST_AUDIO_ENCODER_STREAM_UNLOCK (self);

    return;
  }
flow_error:
  {
    if (flow_ret == GST_FLOW_EOS) {
      GST_DEBUG_OBJECT (self, "EOS");

      gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."),
          ("stream stopped, reason %s", gst_flow_get_name (flow_ret)));

      gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    }
    self->started = FALSE;
    GST_AUDIO_ENCODER_STREAM_UNLOCK (self);
    return;
  }
reconfigure_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
        ("Unable to reconfigure output port"));
    gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
    self->started = FALSE;
    return;
  }
caps_failed:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to set caps"));
    gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
    self->started = FALSE;
    return;
  }
release_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
        ("Failed to relase output buffer to component: %s (0x%08x)",
            gst_omx_error_to_string (err), err));
    gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    self->started = FALSE;
    GST_AUDIO_ENCODER_STREAM_UNLOCK (self);
    return;
  }
}
示例#27
0
static GstCaps *
gst_shape_wipe_mask_sink_getcaps (GstPad * pad, GstCaps * filter)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
  GstCaps *ret, *tmp;
  guint i, n;

  if (gst_pad_has_current_caps (pad))
    return gst_pad_get_current_caps (pad);

  tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
  if (tmp) {
    ret =
        gst_caps_intersect (tmp,
        gst_pad_get_pad_template_caps (self->video_sinkpad));
    gst_caps_unref (tmp);
  } else {
    ret = gst_pad_get_pad_template_caps (self->video_sinkpad);
  }

  GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  tmp = gst_pad_peer_query_caps (self->srcpad, NULL);
  GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret);

  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (ret, tmp);
    gst_caps_unref (ret);
    gst_caps_unref (tmp);
    ret = intersection;
  }

  GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  n = gst_caps_get_size (ret);
  tmp = gst_caps_new_empty ();
  for (i = 0; i < n; i++) {
    GstStructure *s = gst_caps_get_structure (ret, i);
    GstStructure *t;

    gst_structure_set_name (s, "video/x-raw");
    gst_structure_remove_fields (s, "format", "framerate", NULL);

    if (self->vinfo.width && self->vinfo.height)
      gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height",
          G_TYPE_INT, self->vinfo.height, NULL);

    gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);

    t = gst_structure_copy (s);

    gst_structure_set (s, "format", G_TYPE_STRING, GST_VIDEO_NE (GRAY16), NULL);
    gst_structure_set (t, "format", G_TYPE_STRING, "GRAY8", NULL);

    gst_caps_append_structure (tmp, t);
  }
  gst_caps_append (ret, tmp);

  tmp = gst_pad_peer_query_caps (pad, NULL);
  GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, tmp);

  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

done:
  gst_object_unref (self);

  GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
示例#28
0
/* Ouput buffer preparation ... if the buffer has no caps, and our allowed
 * output caps is fixed, then send the caps downstream, making sure caps are
 * sent before segment event.
 *
 * This ensures that caps event is sent if we can, so that pipelines like:
 *   gst-launch filesrc location=rawsamples.raw !
 *       audio/x-raw,format=S16LE,rate=48000,channels=2 ! alsasink
 * will work.
 */
static GstFlowReturn
gst_capsfilter_prepare_buf (GstBaseTransform * trans, GstBuffer * input,
    GstBuffer ** buf)
{
  GstFlowReturn ret = GST_FLOW_OK;

  /* always return the input as output buffer */
  *buf = input;

  if (!gst_pad_has_current_caps (trans->sinkpad)) {
    /* No caps. See if the output pad only supports fixed caps */
    GstCapsFilter *filter = GST_CAPSFILTER (trans);
    GstCaps *out_caps;
    GList *pending_events = filter->pending_events;

    GST_LOG_OBJECT (trans, "Input pad does not have caps");

    filter->pending_events = NULL;

    out_caps = gst_pad_get_current_caps (trans->srcpad);
    if (out_caps == NULL) {
      out_caps = gst_pad_get_allowed_caps (trans->srcpad);
      g_return_val_if_fail (out_caps != NULL, GST_FLOW_ERROR);
    }

    out_caps = gst_caps_simplify (out_caps);

    if (gst_caps_is_fixed (out_caps) && !gst_caps_is_empty (out_caps)) {
      GST_DEBUG_OBJECT (trans, "Have fixed output caps %"
          GST_PTR_FORMAT " to apply to srcpad", out_caps);

      if (!gst_pad_has_current_caps (trans->srcpad)) {
        if (gst_pad_set_caps (trans->srcpad, out_caps)) {
          if (pending_events) {
            GList *l;

            for (l = g_list_last (pending_events); l; l = l->prev) {
              GST_LOG_OBJECT (trans, "Forwarding %s event",
                  GST_EVENT_TYPE_NAME (l->data));
              GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans,
                  l->data);
            }
            g_list_free (pending_events);
            pending_events = NULL;
          }
        } else {
          ret = GST_FLOW_NOT_NEGOTIATED;
        }
      }

      g_list_free_full (pending_events, (GDestroyNotify) gst_event_unref);
      gst_caps_unref (out_caps);
    } else {
      gchar *caps_str = gst_caps_to_string (out_caps);

      GST_DEBUG_OBJECT (trans, "Cannot choose caps. Have unfixed output caps %"
          GST_PTR_FORMAT, out_caps);
      gst_caps_unref (out_caps);

      GST_ELEMENT_ERROR (trans, STREAM, FORMAT,
          ("Filter caps do not completely specify the output format"),
          ("Output caps are unfixed: %s", caps_str));

      g_free (caps_str);
      g_list_free_full (pending_events, (GDestroyNotify) gst_event_unref);

      ret = GST_FLOW_ERROR;
    }
  }

  return ret;
}
示例#29
0
static GstCaps *
gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
  GstCaps *templ, *ret, *tmp;

  if (gst_pad_has_current_caps (pad))
    return gst_pad_get_current_caps (pad);
  else if (gst_pad_has_current_caps (self->video_sinkpad))
    return gst_pad_get_current_caps (self->video_sinkpad);

  templ = gst_pad_get_pad_template_caps (self->video_sinkpad);
  tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
  if (tmp) {
    ret = gst_caps_intersect (tmp, templ);
    gst_caps_unref (templ);
    gst_caps_unref (tmp);
  } else {
    ret = templ;
  }

  GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  tmp = gst_pad_peer_query_caps (pad, NULL);
  GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, ret);
  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

  GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  if (self->vinfo.height && self->vinfo.width) {
    guint i, n;

    ret = gst_caps_make_writable (ret);
    n = gst_caps_get_size (ret);
    for (i = 0; i < n; i++) {
      GstStructure *s = gst_caps_get_structure (ret, i);

      gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height",
          G_TYPE_INT, self->vinfo.height, NULL);
    }
  }

  tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL);
  GST_LOG_OBJECT (pad, "mask sink accepted caps: %" GST_PTR_FORMAT, ret);
  if (tmp) {
    GstCaps *intersection, *tmp2;
    guint i, n;

    tmp2 = gst_pad_get_pad_template_caps (self->mask_sinkpad);
    intersection = gst_caps_intersect (tmp, tmp2);
    gst_caps_unref (tmp);
    gst_caps_unref (tmp2);

    tmp = gst_caps_make_writable (intersection);
    n = gst_caps_get_size (tmp);

    for (i = 0; i < n; i++) {
      GstStructure *s = gst_caps_get_structure (tmp, i);

      gst_structure_remove_fields (s, "format", "framerate", NULL);
      gst_structure_set_name (s, "video/x-raw");
    }

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

done:

  gst_object_unref (self);

  GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
示例#30
0
static gboolean
gst_gdk_pixbuf_dec_sink_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  GstFlowReturn res = GST_FLOW_OK;
  gboolean ret = TRUE, forward = TRUE;
  GstGdkPixbufDec *pixbuf;

  pixbuf = GST_GDK_PIXBUF_DEC (parent);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);
      ret = gst_gdk_pixbuf_dec_sink_setcaps (pixbuf, caps);
      forward = FALSE;
      break;
    }
    case GST_EVENT_EOS:
      if (pixbuf->pixbuf_loader != NULL) {
        gdk_pixbuf_loader_close (pixbuf->pixbuf_loader, NULL);
        res = gst_gdk_pixbuf_dec_flush (pixbuf);
        g_object_unref (G_OBJECT (pixbuf->pixbuf_loader));
        pixbuf->pixbuf_loader = NULL;
        /* as long as we don't have flow returns for event functions we need
         * to post an error here, or the application might never know that
         * things failed */
        if (res != GST_FLOW_OK && res != GST_FLOW_FLUSHING
            && res != GST_FLOW_EOS && res != GST_FLOW_NOT_LINKED) {
          GST_ELEMENT_ERROR (pixbuf, STREAM, FAILED, (NULL), ("Flow: %s",
                  gst_flow_get_name (res)));
          forward = FALSE;
          ret = FALSE;
        }
      }
      break;
    case GST_EVENT_FLUSH_STOP:
      g_list_free_full (pixbuf->pending_events,
          (GDestroyNotify) gst_event_unref);
      pixbuf->pending_events = NULL;
      /* Fall through */
    case GST_EVENT_SEGMENT:
    {
      const GstSegment *segment;
      gst_event_parse_segment (event, &segment);
      if (segment->format == GST_FORMAT_BYTES)
        pixbuf->packetized = FALSE;
      else
        pixbuf->packetized = TRUE;
      if (pixbuf->pixbuf_loader != NULL) {
        gdk_pixbuf_loader_close (pixbuf->pixbuf_loader, NULL);
        g_object_unref (G_OBJECT (pixbuf->pixbuf_loader));
        pixbuf->pixbuf_loader = NULL;
      }
      break;
    }
    default:
      break;
  }
  if (forward) {
    if (!gst_pad_has_current_caps (pixbuf->srcpad) &&
        GST_EVENT_IS_SERIALIZED (event)
        && GST_EVENT_TYPE (event) > GST_EVENT_CAPS
        && GST_EVENT_TYPE (event) != GST_EVENT_FLUSH_STOP
        && GST_EVENT_TYPE (event) != GST_EVENT_EOS) {
      ret = TRUE;
      pixbuf->pending_events = g_list_prepend (pixbuf->pending_events, event);
    } else {
      ret = gst_pad_event_default (pad, parent, event);
    }
  } else {
    gst_event_unref (event);
  }
  return ret;
}