Exemplo n.º 1
0
static GstFlowReturn
gst_png_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstPngParse *pngparse = GST_PNG_PARSE (parse);

  if (!pngparse->sent_codec_tag) {
    GstTagList *taglist;
    GstCaps *caps;

    /* codec tag */
    caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
    if (G_UNLIKELY (caps == NULL)) {
      if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
        GST_INFO_OBJECT (parse, "Src pad is flushing");
        return GST_FLOW_FLUSHING;
      } else {
        GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
        return GST_FLOW_NOT_NEGOTIATED;
      }
    }

    taglist = gst_tag_list_new_empty ();
    gst_pb_utils_add_codec_description_to_tag_list (taglist,
        GST_TAG_VIDEO_CODEC, caps);
    gst_caps_unref (caps);

    gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
    gst_tag_list_unref (taglist);

    /* also signals the end of first-frame processing */
    pngparse->sent_codec_tag = TRUE;
  }

  return GST_FLOW_OK;
}
Exemplo n.º 2
0
static GstFlowReturn
allocate_output_buffer (GstMsdkDec * thiz, GstBuffer ** buffer)
{
  GstFlowReturn flow;
  GstVideoCodecFrame *frame;
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (thiz);

  frame = gst_msdkdec_get_oldest_frame (decoder);
  if (!frame) {
    if (GST_PAD_IS_FLUSHING (decoder->srcpad))
      return GST_FLOW_FLUSHING;
    else
      return GST_FLOW_ERROR;
  }

  if (!frame->output_buffer) {
    flow = gst_video_decoder_allocate_output_frame (decoder, frame);
    if (flow != GST_FLOW_OK) {
      gst_video_codec_frame_unref (frame);
      return flow;
    }
  }

  *buffer = gst_buffer_ref (frame->output_buffer);
  gst_buffer_replace (&frame->output_buffer, NULL);
  gst_video_codec_frame_unref (frame);
  return GST_FLOW_OK;
}
Exemplo n.º 3
0
static GstFlowReturn
gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
    gboolean flush)
{
  GstVideoCodecFrame *frame;
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (vtdec);

  /* negotiate now so that we know whether we need to use the GL upload meta or
   * not */
  if (gst_pad_check_reconfigure (decoder->srcpad)) {
    if (!gst_video_decoder_negotiate (decoder)) {
      gst_pad_mark_reconfigure (decoder->srcpad);
      if (GST_PAD_IS_FLUSHING (decoder->srcpad))
        ret = GST_FLOW_FLUSHING;
      else
        ret = GST_FLOW_NOT_NEGOTIATED;
      return ret;
    }
  }

  if (drain)
    VTDecompressionSessionWaitForAsynchronousFrames (vtdec->session);

  /* push a buffer if there are enough frames to guarantee that we push in PTS
   * order
   */
  while ((g_async_queue_length (vtdec->reorder_queue) >=
          vtdec->reorder_queue_length) || drain || flush) {
    frame = (GstVideoCodecFrame *) g_async_queue_try_pop (vtdec->reorder_queue);

    /* we need to check this in case reorder_queue_length=0 (jpeg for
     * example) or we're draining/flushing
     */
    if (frame) {
      if (flush || frame->flags & VTDEC_FRAME_FLAG_SKIP)
        gst_video_decoder_release_frame (decoder, frame);
      else if (frame->flags & VTDEC_FRAME_FLAG_DROP)
        gst_video_decoder_drop_frame (decoder, frame);
      else
        ret = gst_video_decoder_finish_frame (decoder, frame);
    }

    if (!frame || ret != GST_FLOW_OK)
      break;
  }

  return ret;
}
static GstFlowReturn gst_scream_queue_sink_chain(GstPad *pad, GstObject *parent, GstBuffer *buffer)
{
    GstScreamQueue *self = GST_SCREAM_QUEUE(parent);
    GstRTPBuffer rtp_buffer = GST_RTP_BUFFER_INIT;
    GstFlowReturn flow_ret = GST_FLOW_OK;
    GstScreamDataQueueRtpItem *rtp_item;

    if (GST_PAD_IS_FLUSHING(pad)) {
        flow_ret = GST_FLOW_FLUSHING;
        goto end;
    }

    if (!gst_rtp_buffer_map(buffer, GST_MAP_READ, &rtp_buffer)) {
        flow_ret = GST_FLOW_ERROR;
        goto end;
    }

    rtp_item = g_slice_new(GstScreamDataQueueRtpItem);
    ((GstDataQueueItem *)rtp_item)->object = GST_MINI_OBJECT(buffer);
    ((GstDataQueueItem *)rtp_item)->size = gst_buffer_get_size(buffer);
    ((GstDataQueueItem *)rtp_item)->visible = TRUE;
    ((GstDataQueueItem *)rtp_item)->duration = GST_BUFFER_DURATION(buffer);
    ((GstDataQueueItem *)rtp_item)->destroy = (GDestroyNotify) gst_scream_data_queue_rtp_item_free;

    ((GstScreamDataQueueItem *)rtp_item)->type = GST_SCREAM_DATA_QUEUE_ITEM_TYPE_RTP;
    ((GstScreamDataQueueItem *)rtp_item)->rtp_ssrc = gst_rtp_buffer_get_ssrc(&rtp_buffer);
    rtp_item->rtp_pt = gst_rtp_buffer_get_payload_type(&rtp_buffer);
    rtp_item->gst_ts = GST_BUFFER_PTS(buffer);
    rtp_item->rtp_seq = gst_rtp_buffer_get_seq(&rtp_buffer);
    rtp_item->rtp_ts = gst_rtp_buffer_get_timestamp(&rtp_buffer);
    rtp_item->rtp_marker = gst_rtp_buffer_get_marker(&rtp_buffer);
    rtp_item->rtp_payload_size = gst_rtp_buffer_get_payload_len(&rtp_buffer);
    rtp_item->enqueued_time = get_gst_time_us(self);
    gst_rtp_buffer_unmap(&rtp_buffer);

    if (self->pass_through) {
        rtp_item->adapted = FALSE;
        GST_LOG_OBJECT(self, "passing through: pt = %u, seq: %u, pass: %u", rtp_item->rtp_pt, rtp_item->rtp_seq, self->pass_through);
        gst_data_queue_push(self->approved_packets, (GstDataQueueItem *)rtp_item);
        goto end;
    }

    GST_LOG_OBJECT(self, "queuing: pt = %u, seq: %u, pass: %u", rtp_item->rtp_pt, rtp_item->rtp_seq, self->pass_through);
    g_async_queue_push(self->incoming_packets, (gpointer)rtp_item);

end:
    return flow_ret;
}
Exemplo n.º 5
0
static void
kms_element_set_target_on_linked (GstPad * pad, GstPad * peer,
    GstElement * element)
{
  GstPad *target;

  target = gst_element_get_request_pad (element, "src_%u");

  if (GST_PAD_IS_FLUSHING (peer)) {
    gst_pad_send_event (peer, gst_event_new_flush_start ());
    gst_pad_send_event (peer, gst_event_new_flush_stop (FALSE));
  }

  GST_DEBUG_OBJECT (pad, "Setting target %" GST_PTR_FORMAT, target);

  if (!gst_ghost_pad_set_target (GST_GHOST_PAD (pad), target)) {
    GST_ERROR_OBJECT (pad, "Can not set target pad");
  }

  g_object_unref (target);
}
Exemplo n.º 6
0
static gboolean
gst_dvd_spu_subpic_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstDVDSpu *dvdspu = (GstDVDSpu *) parent;
  gboolean res = TRUE;

  /* Some events on the subpicture sink pad just get ignored, like 
   * FLUSH_START */
  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);
      res = gst_dvd_spu_subpic_set_caps (pad, caps);
      gst_event_unref (event);
      break;
    }
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    case GST_EVENT_CUSTOM_DOWNSTREAM_STICKY:
    case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:
    {
      const GstStructure *structure = gst_event_get_structure (event);
      gboolean need_push;

      if (!gst_structure_has_name (structure, "application/x-gst-dvd")) {
        res = gst_pad_event_default (pad, parent, event);
        break;
      }

      DVD_SPU_LOCK (dvdspu);
      if (GST_EVENT_IS_SERIALIZED (event)) {
        SpuPacket *spu_packet = g_new0 (SpuPacket, 1);
        GST_DEBUG_OBJECT (dvdspu,
            "Enqueueing DVD event on subpicture pad for later");
        spu_packet->event = event;
        g_queue_push_tail (dvdspu->pending_spus, spu_packet);
      } else {
        gst_dvd_spu_handle_dvd_event (dvdspu, event);
      }

      /* If the handle_dvd_event generated a pending frame, we
       * need to synchronise with the video pad's stream lock and push it.
       * This requires some dancing to preserve locking order and handle
       * flushes correctly */
      need_push = (dvdspu->pending_frame != NULL);
      DVD_SPU_UNLOCK (dvdspu);
      if (need_push) {
        GstBuffer *to_push = NULL;
        gboolean flushing;

        GST_LOG_OBJECT (dvdspu, "Going for stream lock");
        GST_PAD_STREAM_LOCK (dvdspu->videosinkpad);
        GST_LOG_OBJECT (dvdspu, "Got stream lock");
        GST_OBJECT_LOCK (dvdspu->videosinkpad);
        flushing = GST_PAD_IS_FLUSHING (dvdspu->videosinkpad);
        GST_OBJECT_UNLOCK (dvdspu->videosinkpad);

        DVD_SPU_LOCK (dvdspu);
        if (dvdspu->pending_frame == NULL || flushing) {
          /* Got flushed while waiting for the stream lock */
          DVD_SPU_UNLOCK (dvdspu);
        } else {
          to_push = dvdspu->pending_frame;
          dvdspu->pending_frame = NULL;

          DVD_SPU_UNLOCK (dvdspu);
          gst_pad_push (dvdspu->srcpad, to_push);
        }
        GST_LOG_OBJECT (dvdspu, "Dropping stream lock");
        GST_PAD_STREAM_UNLOCK (dvdspu->videosinkpad);
      }

      break;
    }
    case GST_EVENT_SEGMENT:
    {
      GstSegment seg;

      gst_event_copy_segment (event, &seg);

      /* Only print updates if they have an end time (don't print start_time
       * updates */
      GST_DEBUG_OBJECT (dvdspu, "subpic pad Segment: %" GST_SEGMENT_FORMAT,
          &seg);

      DVD_SPU_LOCK (dvdspu);

      dvdspu->subp_seg = seg;
      GST_LOG_OBJECT (dvdspu, "Subpicture segment now: %" GST_SEGMENT_FORMAT,
          &dvdspu->subp_seg);
      DVD_SPU_UNLOCK (dvdspu);

      gst_event_unref (event);
      break;
    }
    case GST_EVENT_GAP:
    {
      GstClockTime timestamp, duration;
      gst_event_parse_gap (event, &timestamp, &duration);
      if (GST_CLOCK_TIME_IS_VALID (duration))
        timestamp += duration;

      DVD_SPU_LOCK (dvdspu);
      dvdspu->subp_seg.position = timestamp;
      GST_LOG_OBJECT (dvdspu, "Received GAP. Segment now: %" GST_SEGMENT_FORMAT,
          &dvdspu->subp_seg);
      DVD_SPU_UNLOCK (dvdspu);

      gst_event_unref (event);
      break;
    }
    case GST_EVENT_FLUSH_START:
      gst_event_unref (event);
      goto done;
    case GST_EVENT_FLUSH_STOP:
      GST_DEBUG_OBJECT (dvdspu, "Have flush-stop event on SPU pad");
      DVD_SPU_LOCK (dvdspu);
      gst_segment_init (&dvdspu->subp_seg, GST_FORMAT_UNDEFINED);
      gst_dvd_spu_flush_spu_info (dvdspu, TRUE);
      DVD_SPU_UNLOCK (dvdspu);

      /* We don't forward flushes on the spu pad */
      gst_event_unref (event);
      goto done;
    case GST_EVENT_EOS:
      /* drop EOS on the subtitle pad, it means there are no more subtitles,
       * video might still continue, though */
      gst_event_unref (event);
      goto done;
      break;
    default:
      res = gst_pad_event_default (pad, parent, event);
      break;
  }

done:

  return res;
}
static GstFlowReturn
gst_dvdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstDVDec *dvdec;
  guint8 *inframe;
  guint8 *outframe_ptrs[3];
  gint outframe_pitches[3];
  GstMapInfo map;
  GstVideoFrame frame;
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  guint length;
  guint64 cstart = GST_CLOCK_TIME_NONE, cstop = GST_CLOCK_TIME_NONE;
  gboolean PAL, wide;

  dvdec = GST_DVDEC (parent);

  gst_buffer_map (buf, &map, GST_MAP_READ);
  inframe = map.data;

  /* buffer should be at least the size of one NTSC frame, this should
   * be enough to decode the header. */
  if (G_UNLIKELY (map.size < NTSC_BUFFER))
    goto wrong_size;

  /* preliminary dropping. unref and return if outside of configured segment */
  if ((dvdec->segment.format == GST_FORMAT_TIME) &&
      (!(gst_segment_clip (&dvdec->segment, GST_FORMAT_TIME,
                  GST_BUFFER_TIMESTAMP (buf),
                  GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf),
                  &cstart, &cstop))))
    goto dropping;

  if (G_UNLIKELY (dv_parse_header (dvdec->decoder, inframe) < 0))
    goto parse_header_error;

  /* get size */
  PAL = dv_system_50_fields (dvdec->decoder);
  wide = dv_format_wide (dvdec->decoder);

  /* check the buffer is of right size after we know if we are
   * dealing with PAL or NTSC */
  length = (PAL ? PAL_BUFFER : NTSC_BUFFER);
  if (G_UNLIKELY (map.size < length))
    goto wrong_size;

  dv_parse_packs (dvdec->decoder, inframe);

  if (dvdec->video_offset % dvdec->drop_factor != 0)
    goto skip;

  /* renegotiate on change */
  if (PAL != dvdec->PAL || wide != dvdec->wide) {
    dvdec->src_negotiated = FALSE;
    dvdec->PAL = PAL;
    dvdec->wide = wide;
  }

  dvdec->height = (dvdec->PAL ? PAL_HEIGHT : NTSC_HEIGHT);

  dvdec->interlaced = !dv_is_progressive (dvdec->decoder);

  /* negotiate if not done yet */
  if (!dvdec->src_negotiated) {
    if (!gst_dvdec_src_negotiate (dvdec))
      goto not_negotiated;
  }

  if (gst_pad_check_reconfigure (dvdec->srcpad)) {
    GstCaps *caps;

    caps = gst_pad_get_current_caps (dvdec->srcpad);
    if (!caps)
      goto flushing;

    gst_dvdec_negotiate_pool (dvdec, caps, &dvdec->vinfo);
    gst_caps_unref (caps);
  }

  if (dvdec->need_segment) {
    gst_pad_push_event (dvdec->srcpad, gst_event_new_segment (&dvdec->segment));
    dvdec->need_segment = FALSE;
  }

  ret = gst_buffer_pool_acquire_buffer (dvdec->pool, &outbuf, NULL);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto no_buffer;

  gst_video_frame_map (&frame, &dvdec->vinfo, outbuf, GST_MAP_WRITE);

  outframe_ptrs[0] = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
  outframe_pitches[0] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);

  /* the rest only matters for YUY2 */
  if (dvdec->bpp < 3) {
    outframe_ptrs[1] = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
    outframe_ptrs[2] = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);

    outframe_pitches[1] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
    outframe_pitches[2] = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 2);
  }

  GST_DEBUG_OBJECT (dvdec, "decoding and pushing buffer");
  dv_decode_full_frame (dvdec->decoder, inframe,
      e_dv_color_yuv, outframe_ptrs, outframe_pitches);

  gst_video_frame_unmap (&frame);

  GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);

  GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buf);
  GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_END (buf);

  /* FIXME : Compute values when using non-TIME segments,
   * but for the moment make sure we at least don't set bogus values
   */
  if (GST_CLOCK_TIME_IS_VALID (cstart)) {
    GST_BUFFER_TIMESTAMP (outbuf) = cstart;
    if (GST_CLOCK_TIME_IS_VALID (cstop))
      GST_BUFFER_DURATION (outbuf) = cstop - cstart;
  }

  ret = gst_pad_push (dvdec->srcpad, outbuf);

skip:
  dvdec->video_offset++;

done:
  gst_buffer_unmap (buf, &map);
  gst_buffer_unref (buf);

  return ret;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Input buffer too small"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
parse_header_error:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Error parsing DV header"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
not_negotiated:
  {
    GST_DEBUG_OBJECT (dvdec, "could not negotiate output");
    if (GST_PAD_IS_FLUSHING (dvdec->srcpad))
      ret = GST_FLOW_FLUSHING;
    else
      ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
flushing:
  {
    GST_DEBUG_OBJECT (dvdec, "have no current caps");
    ret = GST_FLOW_FLUSHING;
    goto done;
  }
no_buffer:
  {
    GST_DEBUG_OBJECT (dvdec, "could not allocate buffer");
    goto done;
  }

dropping:
  {
    GST_DEBUG_OBJECT (dvdec,
        "dropping buffer since it's out of the configured segment");
    goto done;
  }
}
static GstFlowReturn
gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
  GstFlowReturn ret = GST_FLOW_OK;

  GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);

  if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
    goto flushing;

  if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) {
    if (!self->input_state)
      goto not_negotiated;
    if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps))
      goto not_negotiated;
  }

  if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) {
    GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
    GstVideoInfo info;
    GstVideoCodecState *output_state;
    GstBuffer *codec_data;

    GST_DEBUG_OBJECT (self, "Sending header");

    codec_data = self->input_state->codec_data;

    /* We are running in byte-stream mode, so we don't know the headers, but
     * we need to send something, otherwise the decoder will refuse to
     * intialize.
     */
    if (codec_data) {
      gst_buffer_ref (codec_data);
    } else {
      codec_data = frame->input_buffer;
      frame->input_buffer = NULL;
    }

    /* Ensure input internal pool is active */
    if (!gst_buffer_pool_is_active (pool)) {
      GstStructure *config = gst_buffer_pool_get_config (pool);
      gst_buffer_pool_config_set_params (config, self->input_state->caps,
          self->v4l2output->info.size, 2, 2);

      /* There is no reason to refuse this config */
      if (!gst_buffer_pool_set_config (pool, config))
        goto activate_failed;

      if (!gst_buffer_pool_set_active (pool, TRUE))
        goto activate_failed;
    }

    GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
    ret =
        gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
            v4l2output->pool), &codec_data);
    GST_VIDEO_DECODER_STREAM_LOCK (decoder);

    gst_buffer_unref (codec_data);

    if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info))
      goto not_negotiated;

    output_state = gst_video_decoder_set_output_state (decoder,
        info.finfo->format, info.width, info.height, self->input_state);

    /* Copy the rest of the information, there might be more in the future */
    output_state->info.interlace_mode = info.interlace_mode;
    gst_video_codec_state_unref (output_state);

    if (!gst_video_decoder_negotiate (decoder)) {
      if (GST_PAD_IS_FLUSHING (decoder->srcpad))
        goto flushing;
      else
        goto not_negotiated;
    }

    /* Ensure our internal pool is activated */
    if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
            TRUE))
      goto activate_failed;
  }

  if (g_atomic_int_get (&self->processing) == FALSE) {
    /* It's possible that the processing thread stopped due to an error */
    if (self->output_flow != GST_FLOW_OK &&
        self->output_flow != GST_FLOW_FLUSHING) {
      GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
      ret = self->output_flow;
      goto drop;
    }

    GST_DEBUG_OBJECT (self, "Starting decoding thread");

    /* Start the processing task, when it quits, the task will disable input
     * processing to unlock input if draining, or prevent potential block */
    g_atomic_int_set (&self->processing, TRUE);
    if (!gst_pad_start_task (decoder->srcpad,
            (GstTaskFunction) gst_v4l2_video_dec_loop, self,
            (GDestroyNotify) gst_v4l2_video_dec_loop_stopped))
      goto start_task_failed;
  }

  if (frame->input_buffer) {
    GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
    ret =
        gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->
            pool), &frame->input_buffer);
    GST_VIDEO_DECODER_STREAM_LOCK (decoder);

    if (ret == GST_FLOW_FLUSHING) {
      if (g_atomic_int_get (&self->processing) == FALSE)
        ret = self->output_flow;
      goto drop;
    } else if (ret != GST_FLOW_OK) {
      goto process_failed;
    }

    /* No need to keep input arround */
    gst_buffer_replace (&frame->input_buffer, NULL);
  }

  gst_video_codec_frame_unref (frame);
  return ret;

  /* ERRORS */
not_negotiated:
  {
    GST_ERROR_OBJECT (self, "not negotiated");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto drop;
  }
activate_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
        (_("Failed to allocate required memory.")),
        ("Buffer pool activation failed"));
    ret = GST_FLOW_ERROR;
    goto drop;
  }
flushing:
  {
    ret = GST_FLOW_FLUSHING;
    goto drop;
  }

start_task_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (_("Failed to start decoding thread.")), (NULL));
    g_atomic_int_set (&self->processing, FALSE);
    ret = GST_FLOW_ERROR;
    goto drop;
  }
process_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (_("Failed to process frame.")),
        ("Maybe be due to not enough memory or failing driver"));
    ret = GST_FLOW_ERROR;
    goto drop;
  }
drop:
  {
    gst_video_decoder_drop_frame (decoder, frame);
    return ret;
  }
}
Exemplo n.º 9
0
static void
kms_composite_mixer_port_data_destroy (gpointer data)
{
  KmsCompositeMixerData *port_data = (KmsCompositeMixerData *) data;
  KmsCompositeMixer *self = port_data->mixer;
  GstPad *audiosink;
  gchar *padname;

  KMS_COMPOSITE_MIXER_LOCK (self);

  port_data->removing = TRUE;

  kms_base_hub_unlink_video_sink (KMS_BASE_HUB (self), port_data->id);
  kms_base_hub_unlink_audio_sink (KMS_BASE_HUB (self), port_data->id);

  if (port_data->input) {
    GstEvent *event;
    gboolean result;
    GstPad *pad;

    if (port_data->capsfilter == NULL) {
      KMS_COMPOSITE_MIXER_UNLOCK (self);
      return;
    }

    pad = gst_element_get_static_pad (port_data->capsfilter, "sink");

    if (pad == NULL) {
      KMS_COMPOSITE_MIXER_UNLOCK (self);
      return;
    }

    if (!GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_EOS)) {

      if (GST_PAD_IS_FLUSHING (pad)) {
        gst_pad_send_event (pad, gst_event_new_flush_stop (FALSE));
      }

      event = gst_event_new_eos ();
      result = gst_pad_send_event (pad, event);

      if (port_data->input && self->priv->n_elems > 0) {
        port_data->input = FALSE;
        self->priv->n_elems--;
        kms_composite_mixer_recalculate_sizes (self);
      }
      KMS_COMPOSITE_MIXER_UNLOCK (self);

      if (!result) {
        GST_WARNING ("EOS event did not send");
      }
    } else {
      gboolean remove = FALSE;

      /* EOS callback was triggered before we could remove the port data */
      /* so we have to remove elements to avoid memory leaks. */
      remove = port_data->eos_managed;
      KMS_COMPOSITE_MIXER_UNLOCK (self);

      if (remove) {
        /* Remove pipeline without helding the mutex */
        kms_loop_idle_add_full (self->priv->loop, G_PRIORITY_DEFAULT,
            (GSourceFunc) remove_elements_from_pipeline,
            KMS_COMPOSITE_MIXER_REF (port_data),
            (GDestroyNotify) kms_ref_struct_unref);
      }
    }
    gst_element_unlink (port_data->capsfilter, port_data->tee);
    g_object_unref (pad);
  } else {
    if (port_data->probe_id > 0) {
      gst_pad_remove_probe (port_data->video_mixer_pad, port_data->probe_id);
    }

    if (port_data->latency_probe_id > 0) {
      gst_pad_remove_probe (port_data->video_mixer_pad,
          port_data->latency_probe_id);
    }

    if (port_data->link_probe_id > 0) {
      gst_pad_remove_probe (port_data->tee_sink_pad, port_data->link_probe_id);
    }
    KMS_COMPOSITE_MIXER_UNLOCK (self);

    gst_element_unlink (port_data->capsfilter, port_data->tee);
    gst_element_unlink (port_data->tee, port_data->fakesink);

    gst_bin_remove (GST_BIN (self), g_object_ref (port_data->capsfilter));
    gst_element_set_state (port_data->capsfilter, GST_STATE_NULL);
    g_object_unref (port_data->capsfilter);
    port_data->capsfilter = NULL;

    gst_bin_remove (GST_BIN (self), g_object_ref (port_data->tee));
    gst_element_set_state (port_data->tee, GST_STATE_NULL);
    g_object_unref (port_data->tee);
    port_data->tee = NULL;

    gst_bin_remove (GST_BIN (self), g_object_ref (port_data->fakesink));
    gst_element_set_state (port_data->fakesink, GST_STATE_NULL);
    g_object_unref (port_data->fakesink);
    port_data->fakesink = NULL;
  }

  padname = g_strdup_printf (AUDIO_SINK_PAD, port_data->id);
  audiosink = gst_element_get_static_pad (self->priv->audiomixer, padname);
  gst_element_release_request_pad (self->priv->audiomixer, audiosink);
  gst_object_unref (audiosink);
  g_free (padname);
}
Exemplo n.º 10
0
static GstFlowReturn
gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstV4l2Error error = GST_V4L2_ERROR_INIT;
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
  GstFlowReturn ret = GST_FLOW_OK;
  gboolean processed = FALSE;
  GstBuffer *tmp;

  GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);

  if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
    goto flushing;

  if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) {
    if (!self->input_state)
      goto not_negotiated;
    if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps,
          &error))
      goto not_negotiated;
  }

  if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) {
    GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
    GstVideoInfo info;
    GstVideoCodecState *output_state;
    GstBuffer *codec_data;
    GstCaps *acquired_caps, *available_caps, *caps, *filter;
    GstStructure *st;

    GST_DEBUG_OBJECT (self, "Sending header");

    codec_data = self->input_state->codec_data;

    /* We are running in byte-stream mode, so we don't know the headers, but
     * we need to send something, otherwise the decoder will refuse to
     * intialize.
     */
    if (codec_data) {
      gst_buffer_ref (codec_data);
    } else {
      codec_data = gst_buffer_ref (frame->input_buffer);
      processed = TRUE;
    }

    /* Ensure input internal pool is active */
    if (!gst_buffer_pool_is_active (pool)) {
      GstStructure *config = gst_buffer_pool_get_config (pool);
      gst_buffer_pool_config_set_params (config, self->input_state->caps,
          self->v4l2output->info.size, 2, 2);

      /* There is no reason to refuse this config */
      if (!gst_buffer_pool_set_config (pool, config))
        goto activate_failed;

      if (!gst_buffer_pool_set_active (pool, TRUE))
        goto activate_failed;
    }

    GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
    ret =
        gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
            v4l2output->pool), &codec_data);
    GST_VIDEO_DECODER_STREAM_LOCK (decoder);

    gst_buffer_unref (codec_data);

    /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
     * in the compose rectangle. gst_v4l2_object_acquire_format() checks both
     * and returns the visible size as with/height and the coded size as
     * padding. */
    if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info))
      goto not_negotiated;

    /* Create caps from the acquired format, remove the format field */
    acquired_caps = gst_video_info_to_caps (&info);
    st = gst_caps_get_structure (acquired_caps, 0);
    gst_structure_remove_field (st, "format");

    /* Probe currently available pixel formats */
    available_caps = gst_v4l2_object_probe_caps (self->v4l2capture, NULL);
    available_caps = gst_caps_make_writable (available_caps);

    /* Replace coded size with visible size, we want to negotiate visible size
     * with downstream, not coded size. */
    gst_caps_map_in_place (available_caps, gst_v4l2_video_remove_padding, self);

    filter = gst_caps_intersect_full (available_caps, acquired_caps,
        GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (acquired_caps);
    gst_caps_unref (available_caps);
    caps = gst_pad_peer_query_caps (decoder->srcpad, filter);
    gst_caps_unref (filter);

    GST_DEBUG_OBJECT (self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
    if (gst_caps_is_empty (caps)) {
      gst_caps_unref (caps);
      goto not_negotiated;
    }

    /* Fixate pixel format */
    caps = gst_caps_fixate (caps);

    GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);

    /* Try to set negotiated format, on success replace acquired format */
    if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error))
      gst_video_info_from_caps (&info, caps);
    else
      gst_v4l2_clear_error (&error);
    gst_caps_unref (caps);

    output_state = gst_video_decoder_set_output_state (decoder,
        info.finfo->format, info.width, info.height, self->input_state);

    /* Copy the rest of the information, there might be more in the future */
    output_state->info.interlace_mode = info.interlace_mode;
    gst_video_codec_state_unref (output_state);

    if (!gst_video_decoder_negotiate (decoder)) {
      if (GST_PAD_IS_FLUSHING (decoder->srcpad))
        goto flushing;
      else
        goto not_negotiated;
    }

    /* Ensure our internal pool is activated */
    if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
            TRUE))
      goto activate_failed;
  }

  if (g_atomic_int_get (&self->processing) == FALSE) {
    /* It's possible that the processing thread stopped due to an error */
    if (self->output_flow != GST_FLOW_OK &&
        self->output_flow != GST_FLOW_FLUSHING) {
      GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
      ret = self->output_flow;
      goto drop;
    }

    GST_DEBUG_OBJECT (self, "Starting decoding thread");

    /* Start the processing task, when it quits, the task will disable input
     * processing to unlock input if draining, or prevent potential block */
    g_atomic_int_set (&self->processing, TRUE);
    if (!gst_pad_start_task (decoder->srcpad,
            (GstTaskFunction) gst_v4l2_video_dec_loop, self,
            (GDestroyNotify) gst_v4l2_video_dec_loop_stopped))
      goto start_task_failed;
  }

  if (!processed) {
    GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
    ret =
        gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->
            pool), &frame->input_buffer);
    GST_VIDEO_DECODER_STREAM_LOCK (decoder);

    if (ret == GST_FLOW_FLUSHING) {
      if (g_atomic_int_get (&self->processing) == FALSE)
        ret = self->output_flow;
      goto drop;
    } else if (ret != GST_FLOW_OK) {
      goto process_failed;
    }
  }

  /* No need to keep input arround */
  tmp = frame->input_buffer;
  frame->input_buffer = gst_buffer_new ();
  gst_buffer_copy_into (frame->input_buffer, tmp,
      GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
      GST_BUFFER_COPY_META, 0, 0);
  gst_buffer_unref (tmp);

  gst_video_codec_frame_unref (frame);
  return ret;

  /* ERRORS */
not_negotiated:
  {
    GST_ERROR_OBJECT (self, "not negotiated");
    ret = GST_FLOW_NOT_NEGOTIATED;
    gst_v4l2_error (self, &error);
    goto drop;
  }
activate_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
        (_("Failed to allocate required memory.")),
        ("Buffer pool activation failed"));
    ret = GST_FLOW_ERROR;
    goto drop;
  }
flushing:
  {
    ret = GST_FLOW_FLUSHING;
    goto drop;
  }

start_task_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (_("Failed to start decoding thread.")), (NULL));
    g_atomic_int_set (&self->processing, FALSE);
    ret = GST_FLOW_ERROR;
    goto drop;
  }
process_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (_("Failed to process frame.")),
        ("Maybe be due to not enough memory or failing driver"));
    ret = GST_FLOW_ERROR;
    goto drop;
  }
drop:
  {
    gst_video_decoder_drop_frame (decoder, frame);
    return ret;
  }
}
static GstFlowReturn
gst_overlay_composition_sink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
  GstVideoOverlayComposition *compo = NULL;
  GstVideoOverlayCompositionMeta *upstream_compo_meta;

  if (gst_pad_check_reconfigure (self->srcpad)) {
    if (!gst_overlay_composition_negotiate (self, NULL)) {
      gst_pad_mark_reconfigure (self->srcpad);
      gst_buffer_unref (buffer);
      GST_OBJECT_LOCK (self->srcpad);
      if (GST_PAD_IS_FLUSHING (self->srcpad)) {
        GST_OBJECT_UNLOCK (self->srcpad);
        return GST_FLOW_FLUSHING;
      }
      GST_OBJECT_UNLOCK (self->srcpad);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  if (!self->sample) {
    self->sample = gst_sample_new (buffer, self->caps, &self->segment, NULL);
  } else {
    self->sample = gst_sample_make_writable (self->sample);
    gst_sample_set_buffer (self->sample, buffer);
    gst_sample_set_caps (self->sample, self->caps);
    gst_sample_set_segment (self->sample, &self->segment);
  }

  g_signal_emit (self, overlay_composition_signals[SIGNAL_DRAW], 0,
      self->sample, &compo);

  /* Don't store the buffer in the sample any longer, otherwise it will not
   * be writable below as we have one reference in the sample and one in
   * this function.
   *
   * If the sample is not writable itself then the application kept an
   * reference itself.
   */
  if (gst_sample_is_writable (self->sample)) {
    gst_sample_set_buffer (self->sample, NULL);
  }

  if (!compo) {
    GST_DEBUG_OBJECT (self->sinkpad,
        "Application did not provide an overlay composition");
    return gst_pad_push (self->srcpad, buffer);
  }

  /* If upstream attached a meta, we can safely add our own things
   * in it. Upstream must've checked that downstream supports it */
  upstream_compo_meta = gst_buffer_get_video_overlay_composition_meta (buffer);
  if (upstream_compo_meta) {
    GstVideoOverlayComposition *merged_compo =
        gst_video_overlay_composition_copy (upstream_compo_meta->overlay);
    guint i, n;

    GST_DEBUG_OBJECT (self->sinkpad,
        "Appending to upstream overlay composition");

    n = gst_video_overlay_composition_n_rectangles (compo);
    for (i = 0; i < n; i++) {
      GstVideoOverlayRectangle *rect =
          gst_video_overlay_composition_get_rectangle (compo, i);
      gst_video_overlay_composition_add_rectangle (merged_compo, rect);
    }

    gst_video_overlay_composition_unref (compo);
    gst_video_overlay_composition_unref (upstream_compo_meta->overlay);
    upstream_compo_meta->overlay = merged_compo;
  } else if (self->attach_compo_to_buffer) {
    GST_DEBUG_OBJECT (self->sinkpad, "Attaching as meta");

    buffer = gst_buffer_make_writable (buffer);
    gst_buffer_add_video_overlay_composition_meta (buffer, compo);
    gst_video_overlay_composition_unref (compo);
  } else {
    GstVideoFrame frame;

    buffer = gst_buffer_make_writable (buffer);
    if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_READWRITE)) {
      gst_video_overlay_composition_unref (compo);
      goto map_failed;
    }

    gst_video_overlay_composition_blend (compo, &frame);

    gst_video_frame_unmap (&frame);
    gst_video_overlay_composition_unref (compo);
  }

  return gst_pad_push (self->srcpad, buffer);

map_failed:
  {
    GST_ERROR_OBJECT (self->sinkpad, "Failed to map buffer");
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
/* Based on gstbasetextoverlay.c */
static gboolean
gst_overlay_composition_negotiate (GstOverlayComposition * self, GstCaps * caps)
{
  gboolean upstream_has_meta = FALSE;
  gboolean caps_has_meta = FALSE;
  gboolean alloc_has_meta = FALSE;
  gboolean attach = FALSE;
  gboolean ret = TRUE;
  guint width, height;
  GstCapsFeatures *f;
  GstCaps *overlay_caps;
  GstQuery *query;
  guint alloc_index;

  GST_DEBUG_OBJECT (self, "performing negotiation");

  /* Clear any pending reconfigure to avoid negotiating twice */
  gst_pad_check_reconfigure (self->srcpad);

  self->window_width = self->window_height = 0;

  if (!caps)
    caps = gst_pad_get_current_caps (self->sinkpad);
  else
    gst_caps_ref (caps);

  if (!caps || gst_caps_is_empty (caps))
    goto no_format;

  /* Check if upstream caps have meta */
  if ((f = gst_caps_get_features (caps, 0))) {
    upstream_has_meta = gst_caps_features_contains (f,
        GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
  }

  /* Initialize dimensions */
  width = self->info.width;
  height = self->info.height;

  if (upstream_has_meta) {
    overlay_caps = gst_caps_ref (caps);
  } else {
    GstCaps *peercaps;

    /* BaseTransform requires caps for the allocation query to work */
    overlay_caps = gst_caps_copy (caps);
    f = gst_caps_get_features (overlay_caps, 0);
    gst_caps_features_add (f,
        GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);

    /* Then check if downstream accept overlay composition in caps */
    /* FIXME: We should probably check if downstream *prefers* the
     * overlay meta, and only enforce usage of it if we can't handle
     * the format ourselves and thus would have to drop the overlays.
     * Otherwise we should prefer what downstream wants here.
     */
    peercaps = gst_pad_peer_query_caps (self->srcpad, overlay_caps);
    caps_has_meta = !gst_caps_is_empty (peercaps);
    gst_caps_unref (peercaps);

    GST_DEBUG_OBJECT (self, "caps have overlay meta %d", caps_has_meta);
  }

  if (upstream_has_meta || caps_has_meta) {
    /* Send caps immediatly, it's needed by GstBaseTransform to get a reply
     * from allocation query */
    ret = gst_pad_set_caps (self->srcpad, overlay_caps);

    /* First check if the allocation meta has compositon */
    query = gst_query_new_allocation (overlay_caps, FALSE);

    if (!gst_pad_peer_query (self->srcpad, query)) {
      /* no problem, we use the query defaults */
      GST_DEBUG_OBJECT (self, "ALLOCATION query failed");

      /* In case we were flushing, mark reconfigure and fail this method,
       * will make it retry */
      if (GST_PAD_IS_FLUSHING (self->srcpad))
        ret = FALSE;
    }

    alloc_has_meta = gst_query_find_allocation_meta (query,
        GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);

    GST_DEBUG_OBJECT (self, "sink alloc has overlay meta %d", alloc_has_meta);

    if (alloc_has_meta) {
      const GstStructure *params;

      gst_query_parse_nth_allocation_meta (query, alloc_index, &params);
      if (params) {
        if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
                "height", G_TYPE_UINT, &height, NULL)) {
          GST_DEBUG_OBJECT (self, "received window size: %dx%d", width, height);
          g_assert (width != 0 && height != 0);
        }
      }
    }

    gst_query_unref (query);
  }

  /* Update render size if needed */
  self->window_width = width;
  self->window_height = height;

  /* For backward compatbility, we will prefer bliting if downstream
   * allocation does not support the meta. In other case we will prefer
   * attaching, and will fail the negotiation in the unlikely case we are
   * force to blit, but format isn't supported. */

  if (upstream_has_meta) {
    attach = TRUE;
  } else if (caps_has_meta) {
    if (alloc_has_meta) {
      attach = TRUE;
    } else {
      /* Don't attach unless we cannot handle the format */
      attach = !can_blend_caps (caps);
    }
  } else {
    ret = can_blend_caps (caps);
  }

  /* If we attach, then pick the overlay caps */
  if (attach) {
    GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, overlay_caps);
    /* Caps where already sent */
  } else if (ret) {
    GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, caps);
    ret = gst_pad_set_caps (self->srcpad, caps);
  }

  self->attach_compo_to_buffer = attach;

  if (!ret) {
    GST_DEBUG_OBJECT (self, "negotiation failed, schedule reconfigure");
    gst_pad_mark_reconfigure (self->srcpad);
  }

  g_signal_emit (self, overlay_composition_signals[SIGNAL_CAPS_CHANGED], 0,
      caps, self->window_width, self->window_height, NULL);

  gst_caps_unref (overlay_caps);
  gst_caps_unref (caps);

  return ret;

no_format:
  {
    if (caps)
      gst_caps_unref (caps);
    gst_pad_mark_reconfigure (self->srcpad);
    return FALSE;
  }
}
Exemplo n.º 13
0
static GstFlowReturn
gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstRTPMux *rtp_mux;
  GstFlowReturn ret;
  GstRTPMuxPadPrivate *padpriv;
  gboolean drop;
  gboolean changed = FALSE;
  GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;

  rtp_mux = GST_RTP_MUX (parent);

  if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
    GstCaps *current_caps = gst_pad_get_current_caps (pad);

    if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
      gst_pad_mark_reconfigure (rtp_mux->srcpad);
      if (GST_PAD_IS_FLUSHING (rtp_mux->srcpad))
        ret = GST_FLOW_FLUSHING;
      else
        ret = GST_FLOW_NOT_NEGOTIATED;
      gst_buffer_unref (buffer);
      goto out;
    }
    gst_caps_unref (current_caps);
  }

  GST_OBJECT_LOCK (rtp_mux);
  padpriv = gst_pad_get_element_private (pad);

  if (!padpriv) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_LINKED;
  }

  buffer = gst_buffer_make_writable (buffer);

  if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer");
    return GST_FLOW_ERROR;
  }

  drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer);

  gst_rtp_buffer_unmap (&rtpbuffer);

  if (!drop) {
    if (pad != rtp_mux->last_pad) {
      changed = TRUE;
      g_clear_object (&rtp_mux->last_pad);
      rtp_mux->last_pad = g_object_ref (pad);
    }

    if (GST_BUFFER_DURATION_IS_VALID (buffer) &&
        GST_BUFFER_PTS_IS_VALID (buffer))
      rtp_mux->last_stop = GST_BUFFER_PTS (buffer) +
          GST_BUFFER_DURATION (buffer);
    else
      rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
  }

  GST_OBJECT_UNLOCK (rtp_mux);

  if (changed)
    gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);

  if (drop) {
    gst_buffer_unref (buffer);
    ret = GST_FLOW_OK;
  } else {
    ret = gst_pad_push (rtp_mux->srcpad, buffer);
  }

out:
  return ret;
}
Exemplo n.º 14
0
static GstFlowReturn
gst_rtp_mux_chain_list (GstPad * pad, GstObject * parent,
    GstBufferList * bufferlist)
{
  GstRTPMux *rtp_mux;
  GstFlowReturn ret;
  GstRTPMuxPadPrivate *padpriv;
  gboolean changed = FALSE;
  struct BufferListData bd;

  rtp_mux = GST_RTP_MUX (parent);

  if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
    GstCaps *current_caps = gst_pad_get_current_caps (pad);

    if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
      gst_pad_mark_reconfigure (rtp_mux->srcpad);
      if (GST_PAD_IS_FLUSHING (rtp_mux->srcpad))
        ret = GST_FLOW_FLUSHING;
      else
        ret = GST_FLOW_NOT_NEGOTIATED;
      gst_buffer_list_unref (bufferlist);
      goto out;
    }
    gst_caps_unref (current_caps);
  }

  GST_OBJECT_LOCK (rtp_mux);

  padpriv = gst_pad_get_element_private (pad);
  if (!padpriv) {
    GST_OBJECT_UNLOCK (rtp_mux);
    ret = GST_FLOW_NOT_LINKED;
    gst_buffer_list_unref (bufferlist);
    goto out;
  }

  bd.rtp_mux = rtp_mux;
  bd.padpriv = padpriv;
  bd.drop = FALSE;

  bufferlist = gst_buffer_list_make_writable (bufferlist);
  gst_buffer_list_foreach (bufferlist, process_list_item, &bd);

  if (!bd.drop && pad != rtp_mux->last_pad) {
    changed = TRUE;
    g_clear_object (&rtp_mux->last_pad);
    rtp_mux->last_pad = g_object_ref (pad);
  }

  GST_OBJECT_UNLOCK (rtp_mux);

  if (changed)
    gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);

  if (bd.drop) {
    gst_buffer_list_unref (bufferlist);
    ret = GST_FLOW_OK;
  } else {
    ret = gst_pad_push_list (rtp_mux->srcpad, bufferlist);
  }

out:

  return ret;
}