Ejemplo n.º 1
0
static gboolean
gst_droidcamsrc_mode_negotiate_pad (GstDroidCamSrcMode * mode, GstPad * pad,
    gboolean force)
{
  GstDroidCamSrcPad *data = gst_pad_get_element_private (pad);
  /* take pad lock */
  g_mutex_lock (&data->lock);

  if (!force) {
    if (!gst_pad_check_reconfigure (data->pad)) {
      g_mutex_unlock (&data->lock);
      return TRUE;
    }
  }

  /*
   * stream start
   * we must send it before we send our CAPS event
   */
  if (G_UNLIKELY (data->open_stream)) {
    gchar *stream_id;
    GstEvent *event;

    stream_id =
        gst_pad_create_stream_id (data->pad, GST_ELEMENT_CAST (mode->src),
        GST_PAD_NAME (data->pad));

    GST_DEBUG_OBJECT (pad, "Pushing STREAM_START for pad");
    event = gst_event_new_stream_start (stream_id);
    gst_event_set_group_id (event, gst_util_group_id_next ());
    if (!gst_pad_push_event (data->pad, event)) {
      GST_ERROR_OBJECT (mode->src,
          "failed to push STREAM_START event for pad %s",
          GST_PAD_NAME (data->pad));
    }

    g_free (stream_id);
    data->open_stream = FALSE;
  }

  /* negotiate */
  if (!data->negotiate (data)) {
    GST_ELEMENT_ERROR (mode->src, STREAM, FORMAT, (NULL),
        ("failed to negotiate %s.", GST_PAD_NAME (data->pad)));

    g_mutex_unlock (&data->lock);
    return FALSE;
  }

  /* toss pad queue */
  g_queue_foreach (data->queue, (GFunc) gst_buffer_unref, NULL);
  g_queue_clear (data->queue);

  /* unlock */
  g_mutex_unlock (&data->lock);

  return TRUE;
}
Ejemplo n.º 2
0
/* make sure we are negotiated */
static GstFlowReturn
ensure_negotiated (GstMonoscope * monoscope)
{
  gboolean reconfigure;

  reconfigure = gst_pad_check_reconfigure (monoscope->srcpad);

  /* we don't know an output format yet, pick one */
  if (reconfigure || !gst_pad_has_current_caps (monoscope->srcpad)) {
    if (!gst_monoscope_src_negotiate (monoscope))
      return GST_FLOW_NOT_NEGOTIATED;
  }
  return GST_FLOW_OK;
}
Ejemplo n.º 3
0
static gboolean
gst_v4l2src_event (GstBaseSrc * src, GstEvent * event)
{
  GST_DEBUG_OBJECT (src, "handle event %" GST_PTR_FORMAT, event);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_RECONFIGURE:
      gst_pad_check_reconfigure (GST_BASE_SRC_PAD (src));
      break;
    default:
      break;
  }
  return GST_BASE_SRC_CLASS (parent_class)->event (src, event);
}
Ejemplo n.º 4
0
static GstFlowReturn
gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
    gboolean flush)
{
  GstVideoCodecFrame *frame;
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (vtdec);

  /* negotiate now so that we know whether we need to use the GL upload meta or
   * not */
  if (gst_pad_check_reconfigure (decoder->srcpad)) {
    if (!gst_video_decoder_negotiate (decoder)) {
      gst_pad_mark_reconfigure (decoder->srcpad);
      if (GST_PAD_IS_FLUSHING (decoder->srcpad))
        ret = GST_FLOW_FLUSHING;
      else
        ret = GST_FLOW_NOT_NEGOTIATED;
      return ret;
    }
  }

  if (drain)
    VTDecompressionSessionWaitForAsynchronousFrames (vtdec->session);

  /* push a buffer if there are enough frames to guarantee that we push in PTS
   * order
   */
  while ((g_async_queue_length (vtdec->reorder_queue) >=
          vtdec->reorder_queue_length) || drain || flush) {
    frame = (GstVideoCodecFrame *) g_async_queue_try_pop (vtdec->reorder_queue);

    /* we need to check this in case reorder_queue_length=0 (jpeg for
     * example) or we're draining/flushing
     */
    if (frame) {
      if (flush || frame->flags & VTDEC_FRAME_FLAG_SKIP)
        gst_video_decoder_release_frame (decoder, frame);
      else if (frame->flags & VTDEC_FRAME_FLAG_DROP)
        gst_video_decoder_drop_frame (decoder, frame);
      else
        ret = gst_video_decoder_finish_frame (decoder, frame);
    }

    if (!frame || ret != GST_FLOW_OK)
      break;
  }

  return ret;
}
Ejemplo n.º 5
0
static GstFlowReturn
gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
    gboolean flush)
{
  GstVideoCodecFrame *frame;
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (vtdec);

  /* FIXME: Instead of this, implement GstVideoDecoder::negotiate() and
   * just call gst_video_decoder_negotiate()
   */
  /* negotiate now so that we know whether we need to use the GL upload meta or
   * not */
  if (gst_pad_check_reconfigure (decoder->srcpad))
    gst_video_decoder_negotiate (decoder);

  if (drain)
    VTDecompressionSessionWaitForAsynchronousFrames (vtdec->session);

  /* push a buffer if there are enough frames to guarantee that we push in PTS
   * order
   */
  while ((g_async_queue_length (vtdec->reorder_queue) >=
          vtdec->reorder_queue_length) || drain || flush) {
    frame = (GstVideoCodecFrame *) g_async_queue_try_pop (vtdec->reorder_queue);
    if (frame && vtdec->texture_cache != NULL) {
      frame->output_buffer =
          gst_core_video_texture_cache_get_gl_buffer (vtdec->texture_cache,
          frame->output_buffer);
      if (!frame->output_buffer)
        GST_ERROR_OBJECT (vtdec, "couldn't get textures from buffer");
    }

    /* we need to check this in case reorder_queue_length=0 (jpeg for
     * example) or we're draining/flushing
     */
    if (frame) {
      if (flush)
        gst_video_decoder_drop_frame (decoder, frame);
      else
        ret = gst_video_decoder_finish_frame (decoder, frame);
    }

    if (!frame || ret != GST_FLOW_OK)
      break;
  }

  return ret;
}
Ejemplo n.º 6
0
static gboolean
gst_vtdec_negotiate_downstream (GstVTDec * self)
{
  gboolean result;
  GstCaps *caps;

  if (!gst_pad_check_reconfigure (self->srcpad))
    return TRUE;

  caps = gst_video_info_to_caps (&self->vinfo);
  result = gst_pad_set_caps (self->srcpad, caps);
  gst_caps_unref (caps);

  return result;
}
Ejemplo n.º 7
0
static GstFlowReturn
gst_dvdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstDVDec *dvdec;
  guint8 *inframe;
  guint8 *outframe_ptrs[3];
  gint outframe_pitches[3];
  GstMapInfo map;
  GstVideoFrame frame;
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  guint length;
  guint64 cstart = GST_CLOCK_TIME_NONE, cstop = GST_CLOCK_TIME_NONE;
  gboolean PAL, wide;

  dvdec = GST_DVDEC (parent);

  gst_buffer_map (buf, &map, GST_MAP_READ);
  inframe = map.data;

  /* buffer should be at least the size of one NTSC frame, this should
   * be enough to decode the header. */
  if (G_UNLIKELY (map.size < NTSC_BUFFER))
    goto wrong_size;

  /* preliminary dropping. unref and return if outside of configured segment */
  if ((dvdec->segment.format == GST_FORMAT_TIME) &&
      (!(gst_segment_clip (&dvdec->segment, GST_FORMAT_TIME,
                  GST_BUFFER_TIMESTAMP (buf),
                  GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf),
                  &cstart, &cstop))))
    goto dropping;

  if (G_UNLIKELY (dv_parse_header (dvdec->decoder, inframe) < 0))
    goto parse_header_error;

  /* get size */
  PAL = dv_system_50_fields (dvdec->decoder);
  wide = dv_format_wide (dvdec->decoder);

  /* check the buffer is of right size after we know if we are
   * dealing with PAL or NTSC */
  length = (PAL ? PAL_BUFFER : NTSC_BUFFER);
  if (G_UNLIKELY (map.size < length))
    goto wrong_size;

  dv_parse_packs (dvdec->decoder, inframe);

  if (dvdec->video_offset % dvdec->drop_factor != 0)
    goto skip;

  /* renegotiate on change */
  if (PAL != dvdec->PAL || wide != dvdec->wide) {
    dvdec->src_negotiated = FALSE;
    dvdec->PAL = PAL;
    dvdec->wide = wide;
  }

  dvdec->height = (dvdec->PAL ? PAL_HEIGHT : NTSC_HEIGHT);

  dvdec->interlaced = !dv_is_progressive (dvdec->decoder);

  /* negotiate if not done yet */
  if (!dvdec->src_negotiated) {
    if (!gst_dvdec_src_negotiate (dvdec))
      goto not_negotiated;
  }

  if (gst_pad_check_reconfigure (dvdec->srcpad)) {
    GstCaps *caps;

    caps = gst_pad_get_current_caps (dvdec->srcpad);
    if (!caps)
      goto not_negotiated;

    gst_dvdec_negotiate_pool (dvdec, caps, &dvdec->vinfo);
    gst_caps_unref (caps);
  }

  if (dvdec->need_segment) {
    gst_pad_push_event (dvdec->srcpad, gst_event_new_segment (&dvdec->segment));
    dvdec->need_segment = FALSE;
  }

  ret = gst_buffer_pool_acquire_buffer (dvdec->pool, &outbuf, NULL);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto no_buffer;

  gst_video_frame_map (&frame, &dvdec->vinfo, outbuf, GST_MAP_WRITE);

  outframe_ptrs[0] = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
  outframe_pitches[0] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);

  /* the rest only matters for YUY2 */
  if (dvdec->bpp < 3) {
    outframe_ptrs[1] = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
    outframe_ptrs[2] = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);

    outframe_pitches[1] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
    outframe_pitches[2] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 2);
  }

  GST_DEBUG_OBJECT (dvdec, "decoding and pushing buffer");
  dv_decode_full_frame (dvdec->decoder, inframe,
      e_dv_color_yuv, outframe_ptrs, outframe_pitches);

  gst_video_frame_unmap (&frame);

  GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);

  GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buf);
  GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_END (buf);

  /* FIXME : Compute values when using non-TIME segments,
   * but for the moment make sure we at least don't set bogus values
   */
  if (GST_CLOCK_TIME_IS_VALID (cstart)) {
    GST_BUFFER_TIMESTAMP (outbuf) = cstart;
    if (GST_CLOCK_TIME_IS_VALID (cstop))
      GST_BUFFER_DURATION (outbuf) = cstop - cstart;
  }

  ret = gst_pad_push (dvdec->srcpad, outbuf);

skip:
  dvdec->video_offset++;

done:
  gst_buffer_unmap (buf, &map);
  gst_buffer_unref (buf);

  return ret;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Input buffer too small"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
parse_header_error:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Error parsing DV header"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
not_negotiated:
  {
    GST_DEBUG_OBJECT (dvdec, "could not negotiate output");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
no_buffer:
  {
    GST_DEBUG_OBJECT (dvdec, "could not allocate buffer");
    goto done;
  }

dropping:
  {
    GST_DEBUG_OBJECT (dvdec,
        "dropping buffer since it's out of the configured segment");
    goto done;
  }
}
Ejemplo n.º 8
0
static GstBuffer *
gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
  GstRtpVRawDepay *rtpvrawdepay;
  guint8 *payload, *p0, *yp, *up, *vp, *headers;
  guint32 timestamp;
  guint cont, ystride, uvstride, pgroup, payload_len;
  gint width, height, xinc, yinc;
  GstRTPBuffer rtp = { NULL };
  GstVideoFrame *frame;
  gboolean marker;
  GstBuffer *outbuf = NULL;

  rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);

  gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);

  timestamp = gst_rtp_buffer_get_timestamp (&rtp);

  if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
    GstBuffer *new_buffer;
    GstFlowReturn ret;

    GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
    /* new timestamp, flush old buffer and create new output buffer */
    if (rtpvrawdepay->outbuf) {
      gst_video_frame_unmap (&rtpvrawdepay->frame);
      gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
      rtpvrawdepay->outbuf = NULL;
    }

    if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
      GstCaps *caps;

      caps =
          gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
      gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
          &rtpvrawdepay->vinfo);
      gst_caps_unref (caps);
    }

    ret =
        gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &new_buffer, NULL);

    if (G_UNLIKELY (ret != GST_FLOW_OK))
      goto alloc_failed;

    /* clear timestamp from alloc... */
    GST_BUFFER_TIMESTAMP (new_buffer) = -1;

    if (!gst_video_frame_map (&rtpvrawdepay->frame, &rtpvrawdepay->vinfo,
            new_buffer, GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
      gst_buffer_unref (new_buffer);
      goto invalid_frame;
    }

    rtpvrawdepay->outbuf = new_buffer;
    rtpvrawdepay->timestamp = timestamp;
  }

  frame = &rtpvrawdepay->frame;

  g_assert (frame->buffer != NULL);

  /* get pointer and strides of the planes */
  p0 = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  yp = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  up = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
  vp = GST_VIDEO_FRAME_COMP_DATA (frame, 2);

  ystride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
  uvstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);

  pgroup = rtpvrawdepay->pgroup;
  width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
  height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
  xinc = rtpvrawdepay->xinc;
  yinc = rtpvrawdepay->yinc;

  payload = gst_rtp_buffer_get_payload (&rtp);
  payload_len = gst_rtp_buffer_get_payload_len (&rtp);

  if (payload_len < 3)
    goto short_packet;

  /* skip extended seqnum */
  payload += 2;
  payload_len -= 2;

  /* remember header position */
  headers = payload;

  /* find data start */
  do {
    if (payload_len < 6)
      goto short_packet;

    cont = payload[4] & 0x80;

    payload += 6;
    payload_len -= 6;
  } while (cont);

  while (TRUE) {
    guint length, line, offs, plen;
    guint8 *datap;

    /* stop when we run out of data */
    if (payload_len == 0)
      break;

    /* read length and cont. This should work because we iterated the headers
     * above. */
    length = (headers[0] << 8) | headers[1];
    line = ((headers[2] & 0x7f) << 8) | headers[3];
    offs = ((headers[4] & 0x7f) << 8) | headers[5];
    cont = headers[4] & 0x80;
    headers += 6;

    /* length must be a multiple of pgroup */
    if (length % pgroup != 0)
      goto wrong_length;

    if (length > payload_len)
      length = payload_len;

    /* sanity check */
    if (line > (height - yinc)) {
      GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
      goto next;
    }
    if (offs > (width - xinc)) {
      GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
      goto next;
    }

    /* calculate the maximim amount of bytes we can use per line */
    if (offs + ((length / pgroup) * xinc) > width) {
      plen = ((width - offs) * pgroup) / xinc;
      GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
          length, offs, plen);
    } else
      plen = length;

    GST_LOG_OBJECT (depayload,
        "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
        line, offs, payload_len);

    switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
      case GST_VIDEO_FORMAT_RGB:
      case GST_VIDEO_FORMAT_RGBA:
      case GST_VIDEO_FORMAT_BGR:
      case GST_VIDEO_FORMAT_BGRA:
      case GST_VIDEO_FORMAT_UYVY:
      case GST_VIDEO_FORMAT_UYVP:
        /* samples are packed just like gstreamer packs them */
        offs /= xinc;
        datap = p0 + (line * ystride) + (offs * pgroup);

        memcpy (datap, payload, plen);
        break;
      case GST_VIDEO_FORMAT_AYUV:
      {
        gint i;
        guint8 *p;

        datap = p0 + (line * ystride) + (offs * 4);
        p = payload;

        /* samples are packed in order Cb-Y-Cr for both interlaced and
         * progressive frames */
        for (i = 0; i < plen; i += pgroup) {
          *datap++ = 0;
          *datap++ = p[1];
          *datap++ = p[0];
          *datap++ = p[2];
          p += pgroup;
        }
        break;
      }
      case GST_VIDEO_FORMAT_I420:
      {
        gint i;
        guint uvoff;
        guint8 *yd1p, *yd2p, *udp, *vdp, *p;

        yd1p = yp + (line * ystride) + (offs);
        yd2p = yd1p + ystride;
        uvoff = (line / yinc * uvstride) + (offs / xinc);

        udp = up + uvoff;
        vdp = vp + uvoff;
        p = payload;

        /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ...  */
        for (i = 0; i < plen; i += pgroup) {
          *yd1p++ = p[0];
          *yd1p++ = p[1];
          *yd2p++ = p[2];
          *yd2p++ = p[3];
          *udp++ = p[4];
          *vdp++ = p[5];
          p += pgroup;
        }
        break;
      }
      case GST_VIDEO_FORMAT_Y41B:
      {
        gint i;
        guint uvoff;
        guint8 *ydp, *udp, *vdp, *p;

        ydp = yp + (line * ystride) + (offs);
        uvoff = (line / yinc * uvstride) + (offs / xinc);

        udp = up + uvoff;
        vdp = vp + uvoff;
        p = payload;

        /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
         * and progressive scan lines */
        for (i = 0; i < plen; i += pgroup) {
          *udp++ = p[0];
          *ydp++ = p[1];
          *ydp++ = p[2];
          *vdp++ = p[3];
          *ydp++ = p[4];
          *ydp++ = p[5];
          p += pgroup;
        }
        break;
      }
      default:
        goto unknown_sampling;
    }

  next:
    if (!cont)
      break;

    payload += length;
    payload_len -= length;
  }

  marker = gst_rtp_buffer_get_marker (&rtp);
  gst_rtp_buffer_unmap (&rtp);

  if (marker) {
    GST_LOG_OBJECT (depayload, "marker, flushing frame");
    gst_video_frame_unmap (&rtpvrawdepay->frame);
    outbuf = rtpvrawdepay->outbuf;
    rtpvrawdepay->outbuf = NULL;
    rtpvrawdepay->timestamp = -1;
  }
  return outbuf;

  /* ERRORS */
unknown_sampling:
  {
    GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
        (NULL), ("unimplemented sampling"));
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
alloc_failed:
  {
    GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
invalid_frame:
  {
    GST_ERROR_OBJECT (depayload, "could not map video frame");
    return NULL;
  }
wrong_length:
  {
    GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
short_packet:
  {
    GST_WARNING_OBJECT (depayload, "short packet");
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
}
static GstFlowReturn
gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstRTPMux *rtp_mux;
  GstFlowReturn ret;
  GstRTPMuxPadPrivate *padpriv;
  gboolean drop;
  gboolean changed = FALSE;
  GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;

  rtp_mux = GST_RTP_MUX (parent);

  if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
    GstCaps *current_caps = gst_pad_get_current_caps (pad);

    if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
      ret = GST_FLOW_NOT_NEGOTIATED;
      gst_buffer_unref (buffer);
      goto out;
    }
    gst_caps_unref (current_caps);
  }

  GST_OBJECT_LOCK (rtp_mux);
  padpriv = gst_pad_get_element_private (pad);

  if (!padpriv) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_LINKED;
  }

  buffer = gst_buffer_make_writable (buffer);

  if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer");
    return GST_FLOW_ERROR;
  }

  drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer);

  gst_rtp_buffer_unmap (&rtpbuffer);

  if (!drop) {
    if (pad != rtp_mux->last_pad) {
      changed = TRUE;
      g_clear_object (&rtp_mux->last_pad);
      rtp_mux->last_pad = g_object_ref (pad);
    }

    if (GST_BUFFER_DURATION_IS_VALID (buffer) &&
        GST_BUFFER_PTS_IS_VALID (buffer))
      rtp_mux->last_stop = GST_BUFFER_PTS (buffer) +
          GST_BUFFER_DURATION (buffer);
    else
      rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
  }

  GST_OBJECT_UNLOCK (rtp_mux);

  if (changed)
    gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);

  if (drop) {
    gst_buffer_unref (buffer);
    ret = GST_FLOW_OK;
  } else {
    ret = gst_pad_push (rtp_mux->srcpad, buffer);
  }

out:
  return ret;
}
static GstFlowReturn
gst_rtp_mux_chain_list (GstPad * pad, GstObject * parent,
    GstBufferList * bufferlist)
{
  GstRTPMux *rtp_mux;
  GstFlowReturn ret;
  GstRTPMuxPadPrivate *padpriv;
  gboolean changed = FALSE;
  struct BufferListData bd;

  rtp_mux = GST_RTP_MUX (parent);

  if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
    GstCaps *current_caps = gst_pad_get_current_caps (pad);

    if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
      ret = GST_FLOW_NOT_NEGOTIATED;
      gst_buffer_list_unref (bufferlist);
      goto out;
    }
    gst_caps_unref (current_caps);
  }

  GST_OBJECT_LOCK (rtp_mux);

  padpriv = gst_pad_get_element_private (pad);
  if (!padpriv) {
    GST_OBJECT_UNLOCK (rtp_mux);
    ret = GST_FLOW_NOT_LINKED;
    gst_buffer_list_unref (bufferlist);
    goto out;
  }

  bd.rtp_mux = rtp_mux;
  bd.padpriv = padpriv;
  bd.drop = FALSE;

  bufferlist = gst_buffer_list_make_writable (bufferlist);
  gst_buffer_list_foreach (bufferlist, process_list_item, &bd);

  if (!bd.drop && pad != rtp_mux->last_pad) {
    changed = TRUE;
    g_clear_object (&rtp_mux->last_pad);
    rtp_mux->last_pad = g_object_ref (pad);
  }

  GST_OBJECT_UNLOCK (rtp_mux);

  if (changed)
    gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);

  if (bd.drop) {
    gst_buffer_list_unref (bufferlist);
    ret = GST_FLOW_OK;
  } else {
    ret = gst_pad_push_list (rtp_mux->srcpad, bufferlist);
  }

out:

  return ret;
}
static GstFlowReturn
gst_overlay_composition_sink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
  GstVideoOverlayComposition *compo = NULL;
  GstVideoOverlayCompositionMeta *upstream_compo_meta;

  if (gst_pad_check_reconfigure (self->srcpad)) {
    if (!gst_overlay_composition_negotiate (self, NULL)) {
      gst_pad_mark_reconfigure (self->srcpad);
      gst_buffer_unref (buffer);
      GST_OBJECT_LOCK (self->srcpad);
      if (GST_PAD_IS_FLUSHING (self->srcpad)) {
        GST_OBJECT_UNLOCK (self->srcpad);
        return GST_FLOW_FLUSHING;
      }
      GST_OBJECT_UNLOCK (self->srcpad);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  if (!self->sample) {
    self->sample = gst_sample_new (buffer, self->caps, &self->segment, NULL);
  } else {
    self->sample = gst_sample_make_writable (self->sample);
    gst_sample_set_buffer (self->sample, buffer);
    gst_sample_set_caps (self->sample, self->caps);
    gst_sample_set_segment (self->sample, &self->segment);
  }

  g_signal_emit (self, overlay_composition_signals[SIGNAL_DRAW], 0,
      self->sample, &compo);

  /* Don't store the buffer in the sample any longer, otherwise it will not
   * be writable below as we have one reference in the sample and one in
   * this function.
   *
   * If the sample is not writable itself then the application kept an
   * reference itself.
   */
  if (gst_sample_is_writable (self->sample)) {
    gst_sample_set_buffer (self->sample, NULL);
  }

  if (!compo) {
    GST_DEBUG_OBJECT (self->sinkpad,
        "Application did not provide an overlay composition");
    return gst_pad_push (self->srcpad, buffer);
  }

  /* If upstream attached a meta, we can safely add our own things
   * in it. Upstream must've checked that downstream supports it */
  upstream_compo_meta = gst_buffer_get_video_overlay_composition_meta (buffer);
  if (upstream_compo_meta) {
    GstVideoOverlayComposition *merged_compo =
        gst_video_overlay_composition_copy (upstream_compo_meta->overlay);
    guint i, n;

    GST_DEBUG_OBJECT (self->sinkpad,
        "Appending to upstream overlay composition");

    n = gst_video_overlay_composition_n_rectangles (compo);
    for (i = 0; i < n; i++) {
      GstVideoOverlayRectangle *rect =
          gst_video_overlay_composition_get_rectangle (compo, i);
      gst_video_overlay_composition_add_rectangle (merged_compo, rect);
    }

    gst_video_overlay_composition_unref (compo);
    gst_video_overlay_composition_unref (upstream_compo_meta->overlay);
    upstream_compo_meta->overlay = merged_compo;
  } else if (self->attach_compo_to_buffer) {
    GST_DEBUG_OBJECT (self->sinkpad, "Attaching as meta");

    buffer = gst_buffer_make_writable (buffer);
    gst_buffer_add_video_overlay_composition_meta (buffer, compo);
    gst_video_overlay_composition_unref (compo);
  } else {
    GstVideoFrame frame;

    buffer = gst_buffer_make_writable (buffer);
    if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_READWRITE)) {
      gst_video_overlay_composition_unref (compo);
      goto map_failed;
    }

    gst_video_overlay_composition_blend (compo, &frame);

    gst_video_frame_unmap (&frame);
    gst_video_overlay_composition_unref (compo);
  }

  return gst_pad_push (self->srcpad, buffer);

map_failed:
  {
    GST_ERROR_OBJECT (self->sinkpad, "Failed to map buffer");
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
/* Based on gstbasetextoverlay.c */
static gboolean
gst_overlay_composition_negotiate (GstOverlayComposition * self, GstCaps * caps)
{
  gboolean upstream_has_meta = FALSE;
  gboolean caps_has_meta = FALSE;
  gboolean alloc_has_meta = FALSE;
  gboolean attach = FALSE;
  gboolean ret = TRUE;
  guint width, height;
  GstCapsFeatures *f;
  GstCaps *overlay_caps;
  GstQuery *query;
  guint alloc_index;

  GST_DEBUG_OBJECT (self, "performing negotiation");

  /* Clear any pending reconfigure to avoid negotiating twice */
  gst_pad_check_reconfigure (self->srcpad);

  self->window_width = self->window_height = 0;

  if (!caps)
    caps = gst_pad_get_current_caps (self->sinkpad);
  else
    gst_caps_ref (caps);

  if (!caps || gst_caps_is_empty (caps))
    goto no_format;

  /* Check if upstream caps have meta */
  if ((f = gst_caps_get_features (caps, 0))) {
    upstream_has_meta = gst_caps_features_contains (f,
        GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
  }

  /* Initialize dimensions */
  width = self->info.width;
  height = self->info.height;

  if (upstream_has_meta) {
    overlay_caps = gst_caps_ref (caps);
  } else {
    GstCaps *peercaps;

    /* BaseTransform requires caps for the allocation query to work */
    overlay_caps = gst_caps_copy (caps);
    f = gst_caps_get_features (overlay_caps, 0);
    gst_caps_features_add (f,
        GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);

    /* Then check if downstream accept overlay composition in caps */
    /* FIXME: We should probably check if downstream *prefers* the
     * overlay meta, and only enforce usage of it if we can't handle
     * the format ourselves and thus would have to drop the overlays.
     * Otherwise we should prefer what downstream wants here.
     */
    peercaps = gst_pad_peer_query_caps (self->srcpad, overlay_caps);
    caps_has_meta = !gst_caps_is_empty (peercaps);
    gst_caps_unref (peercaps);

    GST_DEBUG_OBJECT (self, "caps have overlay meta %d", caps_has_meta);
  }

  if (upstream_has_meta || caps_has_meta) {
    /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
     * from allocation query */
    ret = gst_pad_set_caps (self->srcpad, overlay_caps);

    /* First check if the allocation meta has compositon */
    query = gst_query_new_allocation (overlay_caps, FALSE);

    if (!gst_pad_peer_query (self->srcpad, query)) {
      /* no problem, we use the query defaults */
      GST_DEBUG_OBJECT (self, "ALLOCATION query failed");

      /* In case we were flushing, mark reconfigure and fail this method,
       * will make it retry */
      if (GST_PAD_IS_FLUSHING (self->srcpad))
        ret = FALSE;
    }

    alloc_has_meta = gst_query_find_allocation_meta (query,
        GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);

    GST_DEBUG_OBJECT (self, "sink alloc has overlay meta %d", alloc_has_meta);

    if (alloc_has_meta) {
      const GstStructure *params;

      gst_query_parse_nth_allocation_meta (query, alloc_index, &params);
      if (params) {
        if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
                "height", G_TYPE_UINT, &height, NULL)) {
          GST_DEBUG_OBJECT (self, "received window size: %dx%d", width, height);
          g_assert (width != 0 && height != 0);
        }
      }
    }

    gst_query_unref (query);
  }

  /* Update render size if needed */
  self->window_width = width;
  self->window_height = height;

  /* For backward compatbility, we will prefer bliting if downstream
   * allocation does not support the meta. In other case we will prefer
   * attaching, and will fail the negotiation in the unlikely case we are
   * force to blit, but format isn't supported. */

  if (upstream_has_meta) {
    attach = TRUE;
  } else if (caps_has_meta) {
    if (alloc_has_meta) {
      attach = TRUE;
    } else {
      /* Don't attach unless we cannot handle the format */
      attach = !can_blend_caps (caps);
    }
  } else {
    ret = can_blend_caps (caps);
  }

  /* If we attach, then pick the overlay caps */
  if (attach) {
    GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, overlay_caps);
    /* Caps where already sent */
  } else if (ret) {
    GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, caps);
    ret = gst_pad_set_caps (self->srcpad, caps);
  }

  self->attach_compo_to_buffer = attach;

  if (!ret) {
    GST_DEBUG_OBJECT (self, "negotiation failed, schedule reconfigure");
    gst_pad_mark_reconfigure (self->srcpad);
  }

  g_signal_emit (self, overlay_composition_signals[SIGNAL_CAPS_CHANGED], 0,
      caps, self->window_width, self->window_height, NULL);

  gst_caps_unref (overlay_caps);
  gst_caps_unref (caps);

  return ret;

no_format:
  {
    if (caps)
      gst_caps_unref (caps);
    gst_pad_mark_reconfigure (self->srcpad);
    return FALSE;
  }
}