コード例 #1
0
static GstFlowReturn
pad_chain (GstPad *pad,
           GstBuffer *buf)
{
    GOmxCore *gomx;
    GOmxPort *in_port;
    GstOmxBaseFilter21 *self;
    GstFlowReturn ret = GST_FLOW_OK;
	int i;
	static sink_init = 0;
	int sink_number;
	static gboolean init_done = FALSE;
	
    self = GST_OMX_BASE_FILTER21 (GST_OBJECT_PARENT (pad));
	if(strcmp(GST_PAD_NAME(pad), "sink_00") == 0){
		sink_number=0;
        self->sink_camera_timestamp = GST_BUFFER_TIMESTAMP(buf);
	}
	else if(strcmp(GST_PAD_NAME(pad), "sink_01") == 0){
		sink_number=1;
	}
    PRINT_BUFFER (self, buf);

    gomx = self->gomx;

    GST_LOG_OBJECT (self, "begin: size=%u, state=%d, sink_number=%d", GST_BUFFER_SIZE (buf), gomx->omx_state, sink_number);
	
	/*if (G_LIKELY (gomx->omx_state != OMX_StateExecuting))
    {
		GST_INFO_OBJECT (self, "Begin - Port %d", sink_number);
		//setup_input_buffer (self, buf, sink_number);
		//sink_init++;
		//g_mutex_lock (self->ready_lock);
		if(init_done == TRUE){
			GST_INFO_OBJECT (self, "Init_done");
			//g_mutex_unlock(self->ready_lock);
		}
		if(init_done == TRUE){
			sink_init = 0;
			init_done = FALSE;

		}
		else{
			while(sink_init != 2){
				usleep(1000);
			}
		}
	}*/
	
    if (G_UNLIKELY (gomx->omx_state == OMX_StateLoaded))
    {

        GST_INFO_OBJECT (self, "omx: prepare");
        
        /** @todo this should probably go after doing preparations. */
        if (self->omx_setup)
        {
            self->omx_setup (self);
        }
        
        /* enable input port */
        for(i=0;i<NUM_INPUTS;i++){
			GST_INFO_OBJECT (self,"Enable Port %d",self->in_port[i]->port_index);
			OMX_SendCommand (gomx->omx_handle, OMX_CommandPortEnable, self->in_port[i]->port_index, NULL);
			g_sem_down (self->in_port[i]->core->port_sem);
		}
		GST_INFO_OBJECT (self,"Enable Port %d",self->out_port->port_index);
		/* enable output port */
		OMX_SendCommand (gomx->omx_handle,OMX_CommandPortEnable, self->out_port->port_index, NULL);
		g_sem_down (self->out_port->core->port_sem);

		/* indicate that port is now configured */

        setup_ports (self);

        g_omx_core_prepare (self->gomx);

        if (gomx->omx_state == OMX_StateIdle)
        {
            self->ready = TRUE;
           	//gst_pad_start_task (self->srcpad, output_loop, self->srcpad);
        }
        
        if (gomx->omx_state != OMX_StateIdle)
            goto out_flushing;
        
        GST_INFO_OBJECT (self, "omx: end state Loaded");    
    }
    
    if (G_UNLIKELY (gomx->omx_state == OMX_StateIdle))
	{
		g_omx_core_start (gomx);
		GST_INFO_OBJECT (self, "Release Port - %d", sink_number);
		init_done = TRUE;
		//g_mutex_unlock (self->ready_lock);
		if (gomx->omx_state != OMX_StateExecuting){
			GST_INFO_OBJECT (self, "omx: executing FAILED !");
			goto out_flushing;
		
		}
	}
	
	if (G_LIKELY (self->in_port[sink_number]->enabled))
	{	      
		

		if (G_UNLIKELY (gomx->omx_state != OMX_StateExecuting))
		{
			GST_ERROR_OBJECT (self, "Whoa! very wrong");
		}

		while (TRUE)
		{
			gint sent;
			if (self->last_pad_push_return != GST_FLOW_OK ||
				!(gomx->omx_state == OMX_StateExecuting ||
				gomx->omx_state == OMX_StatePause))
			{
				GST_INFO_OBJECT (self, "last_pad_push_return=%d", self->last_pad_push_return);
				goto out_flushing;
			}
			sent = g_omx_port_send (self->in_port[sink_number], buf);
			if (G_UNLIKELY (sent < 0))
			{
				ret = GST_FLOW_WRONG_STATE;
				goto out_flushing;
			}
			else if (sent < GST_BUFFER_SIZE (buf))
			{
				GstBuffer *subbuf = gst_buffer_create_sub (buf, sent,
						GST_BUFFER_SIZE (buf) - sent);
				gst_buffer_unref (buf);
				buf = subbuf;
			}
			else
			{
				gst_buffer_unref (buf);

				break;
			}
			
		}
	}
	else
	{
		GST_WARNING_OBJECT (self, "done");
		ret = GST_FLOW_UNEXPECTED;
	}
	return ret;
leave:

    GST_LOG_OBJECT (self, "end");

    return ret;

    /* special conditions */
out_flushing:
    {
        const gchar *error_msg = NULL;

        if (gomx->omx_error)
        {
            error_msg = "Error from OpenMAX component";
        }
        else if (gomx->omx_state != OMX_StateExecuting &&
                 gomx->omx_state != OMX_StatePause)
        {
            error_msg = "OpenMAX component in wrong state";
        }

        if (error_msg)
        {
            GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), (error_msg));
            ret = GST_FLOW_ERROR;
        }

        gst_buffer_unref (buf);

        goto leave;
    }
}
コード例 #2
0
ファイル: gstxvidenc.c プロジェクト: wang-zhao/gstreamer-win
/* encodes frame according to info in xframe;
   - buf is input buffer, can be NULL if dummy
   - buf is disposed of prior to exit
   - resulting buffer is returned, NULL if no encoder output or error
*/
static inline GstBuffer *
gst_xvidenc_encode (GstXvidEnc * xvidenc, GstBuffer * buf,
    xvid_enc_frame_t xframe)
{
  GstBuffer *outbuf;
  gint ret;

  /* compressed frame should fit in the rough size of an uncompressed one */
  outbuf = gst_buffer_new_and_alloc (gst_xvid_image_get_size (xvidenc->csp,
          xvidenc->width, xvidenc->height));

  xframe.bitstream = (void *) GST_BUFFER_DATA (outbuf);
  xframe.length = GST_BUFFER_SIZE (outbuf);

  /* now provide input image data where-abouts, if needed */
  if (buf)
    gst_xvid_image_fill (&xframe.input, GST_BUFFER_DATA (buf), xvidenc->csp,
        xvidenc->width, xvidenc->height);

  GST_DEBUG_OBJECT (xvidenc, "encoding frame into buffer of size %d",
      GST_BUFFER_SIZE (outbuf));
  ret = xvid_encore (xvidenc->handle, XVID_ENC_ENCODE, &xframe, NULL);

  if (ret < 0) {
    /* things can be nasty if we are trying to flush, so don't signal error then */
    if (buf) {
      GST_ELEMENT_WARNING (xvidenc, LIBRARY, ENCODE, (NULL),
          ("Error encoding xvid frame: %s (%d)", gst_xvid_error (ret), ret));
      gst_buffer_unref (buf);
    }
    gst_buffer_unref (outbuf);
    return NULL;
  } else if (ret > 0) {         /* make sub-buffer */
    GstBuffer *sub;

    GST_DEBUG_OBJECT (xvidenc, "xvid produced output of size %d", ret);
    sub = gst_buffer_create_sub (outbuf, 0, ret);

    /* parent no longer needed, will go away with child buffer */
    gst_buffer_unref (outbuf);
    outbuf = sub;
  } else {                      /* encoder did not yet produce something */
    GST_DEBUG_OBJECT (xvidenc, "xvid produced no output");
    gst_buffer_unref (outbuf);
    g_queue_push_tail (xvidenc->delay, buf);
    return NULL;
  }

  /* finish decoration and return */
  if (!(xframe.out_flags & XVID_KEYFRAME))
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (xvidenc->srcpad));

  /* now we need the right buf to take timestamps from;
     note that timestamps from a display order input buffer can end up with
     another encode order output buffer, but other than this permutation,
     the overall time progress is tracked,
     and keyframes should have the correct stamp */
  if (!g_queue_is_empty (xvidenc->delay)) {
    if (buf)
      g_queue_push_tail (xvidenc->delay, buf);
    buf = g_queue_pop_head (xvidenc->delay);
  }
  if (buf) {
    GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
    GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
    gst_buffer_unref (buf);
  }

  return outbuf;
}
コード例 #3
0
static int
run_test (const char *format, ...)
{
    GstStateChangeReturn ret;

    GstElement *pipe, *src, *sink;

    GstBuffer *buf = NULL;

    GstMessage *msg;

    gchar *url;

    va_list args;

    int rc = -1;

    pipe = gst_pipeline_new (NULL);

    src = gst_element_factory_make ("souphttpsrc", NULL);
    fail_unless (src != NULL);

    sink = gst_element_factory_make ("fakesink", NULL);
    fail_unless (sink != NULL);

    gst_bin_add (GST_BIN (pipe), src);
    gst_bin_add (GST_BIN (pipe), sink);
    fail_unless (gst_element_link (src, sink));

    if (http_port == 0) {
        GST_DEBUG ("failed to start soup http server");
    }
    fail_unless (http_port != 0);
    va_start (args, format);
    g_vasprintf (&url, format, args);
    va_end (args);
    fail_unless (url != NULL);
    g_object_set (src, "location", url, NULL);
    g_free (url);

    g_object_set (src, "automatic-redirect", redirect, NULL);
    if (cookies != NULL)
        g_object_set (src, "cookies", cookies, NULL);
    g_object_set (sink, "signal-handoffs", TRUE, NULL);
    g_signal_connect (sink, "preroll-handoff", G_CALLBACK (handoff_cb), &buf);

    if (user_id != NULL)
        g_object_set (src, "user-id", user_id, NULL);
    if (user_pw != NULL)
        g_object_set (src, "user-pw", user_pw, NULL);

    ret = gst_element_set_state (pipe, GST_STATE_PAUSED);
    if (ret != GST_STATE_CHANGE_ASYNC) {
        GST_DEBUG ("failed to start up soup http src, ret = %d", ret);
        goto done;
    }

    gst_element_set_state (pipe, GST_STATE_PLAYING);
    msg = gst_bus_poll (GST_ELEMENT_BUS (pipe),
                        GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
    if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) {
        gchar *debug = NULL;

        GError *err = NULL;

        gst_message_parse_error (msg, &err, &debug);
        GST_INFO ("error: %s", err->message);
        if (g_str_has_suffix (err->message, "Not Found"))
            rc = 404;
        else if (g_str_has_suffix (err->message, "Forbidden"))
            rc = 403;
        else if (g_str_has_suffix (err->message, "Unauthorized"))
            rc = 401;
        else if (g_str_has_suffix (err->message, "Found"))
            rc = 302;
        GST_INFO ("debug: %s", debug);
        g_error_free (err);
        g_free (debug);
        gst_message_unref (msg);
        goto done;
    }
    gst_message_unref (msg);

    /* don't wait for more than 10 seconds */
    ret = gst_element_get_state (pipe, NULL, NULL, 10 * GST_SECOND);
    GST_LOG ("ret = %u", ret);

    if (buf == NULL) {
        /* we want to test the buffer offset, nothing else; if there's a failure
         * it might be for lots of reasons (no network connection, whatever), we're
         * not interested in those */
        GST_DEBUG ("didn't manage to get data within 10 seconds, skipping test");
        goto done;
    }

    GST_DEBUG ("buffer offset = %" G_GUINT64_FORMAT, GST_BUFFER_OFFSET (buf));

    /* first buffer should have a 0 offset */
    fail_unless (GST_BUFFER_OFFSET (buf) == 0);
    gst_buffer_unref (buf);
    rc = 0;

done:

    gst_element_set_state (pipe, GST_STATE_NULL);
    gst_object_unref (pipe);
    return rc;
}
コード例 #4
0
static gboolean
gst_mpeg2dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstMpeg2dec *dec = GST_MPEG2DEC (decoder);
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config, *down_config = NULL;
  GstAllocator *allocator;
  GstAllocationParams params;
  gboolean update_allocator;
  gboolean has_videometa = FALSE;
  GstCaps *caps;

  /* Get rid of ancient pool */
  if (dec->downstream_pool) {
    gst_buffer_pool_set_active (dec->downstream_pool, FALSE);
    gst_object_unref (dec->downstream_pool);
    dec->downstream_pool = NULL;
  }

  /* Get negotiated allocation caps */
  gst_query_parse_allocation (query, &caps, NULL);

  /* Set allocation parameters to guarantee 16-byte aligned output buffers */
  if (gst_query_get_n_allocation_params (query) > 0) {
    gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
    update_allocator = TRUE;
  } else {
    allocator = NULL;
    gst_allocation_params_init (&params);
    update_allocator = FALSE;
  }

  params.align = MAX (params.align, 15);

  if (update_allocator)
    gst_query_set_nth_allocation_param (query, 0, allocator, &params);
  else
    gst_query_add_allocation_param (query, allocator, &params);

  /* Now chain up to the parent class to guarantee that we can
   * get a buffer pool from the query */
  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query)) {
    if (allocator)
      gst_object_unref (allocator);
    return FALSE;
  }

  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    has_videometa = TRUE;
  }

  if (dec->need_alignment) {
    /* If downstream does not support video meta, we will have to copy, keep
     * the downstream pool to avoid double copying */
    if (!has_videometa) {
      dec->downstream_pool = pool;
      pool = NULL;
      down_config = config;
      config = NULL;
      min = 2;
      max = 0;
    }

    /* In case downstream support video meta, but the downstream pool does not
     * have alignment support, discard downstream pool and use video pool */
    else if (!gst_buffer_pool_has_option (pool,
            GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) {
      gst_object_unref (pool);
      pool = NULL;
      gst_structure_free (config);
      config = NULL;
    }

    if (!pool)
      pool = gst_mpeg2dec_create_generic_pool (allocator, &params, caps, size,
          min, max, &config);

    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
    gst_buffer_pool_config_set_video_alignment (config, &dec->valign);
  }

  if (allocator)
    gst_object_unref (allocator);

  /* If we are copying out, we'll need to setup and activate the other pool */
  if (dec->downstream_pool) {
    if (!gst_buffer_pool_set_config (dec->downstream_pool, down_config)) {
      down_config = gst_buffer_pool_get_config (dec->downstream_pool);
      if (!gst_buffer_pool_config_validate_params (down_config, caps, size, min,
              max)) {
        gst_structure_free (down_config);
        goto config_failed;
      }

      if (!gst_buffer_pool_set_config (dec->downstream_pool, down_config))
        goto config_failed;
    }

    if (!gst_buffer_pool_set_active (dec->downstream_pool, TRUE))
      goto activate_failed;
  }

  /* Now configure the pool, if the pool had made some changes, it will
   * return FALSE. Validate the changes ...*/
  if (!gst_buffer_pool_set_config (pool, config)) {
    config = gst_buffer_pool_get_config (pool);

    /* Check basic params */
    if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) {
      gst_structure_free (config);
      goto config_failed;
    }

    /* If needed, check that resulting alignment is still valid */
    if (dec->need_alignment) {
      GstVideoAlignment valign;

      if (!gst_buffer_pool_config_get_video_alignment (config, &valign)) {
        gst_structure_free (config);
        goto config_failed;
      }

      if (valign.padding_left != 0 || valign.padding_top != 0
          || valign.padding_right < dec->valign.padding_right
          || valign.padding_bottom < dec->valign.padding_bottom) {
        gst_structure_free (config);
        goto config_failed;
      }
    }

    if (!gst_buffer_pool_set_config (pool, config))
      goto config_failed;
  }

  /* For external pools, we need to check strides */
  if (!GST_IS_VIDEO_BUFFER_POOL (pool) && has_videometa) {
    GstBuffer *buffer;
    const GstVideoFormatInfo *finfo;
    GstVideoMeta *vmeta;
    gint uv_stride;

    if (!gst_buffer_pool_set_active (pool, TRUE))
      goto activate_failed;

    if (gst_buffer_pool_acquire_buffer (pool, &buffer, NULL) != GST_FLOW_OK) {
      gst_buffer_pool_set_active (pool, FALSE);
      goto acquire_failed;
    }

    vmeta = gst_buffer_get_video_meta (buffer);
    finfo = gst_video_format_get_info (vmeta->format);

    /* Check that strides are compatible. In this case, we can scale the
     * stride directly since all the pixel strides for the formats we support
     * is 1 */
    uv_stride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, 1, vmeta->stride[0]);
    if (uv_stride != vmeta->stride[1] || uv_stride != vmeta->stride[2]) {
      gst_buffer_pool_set_active (pool, FALSE);
      gst_object_unref (pool);

      pool = gst_mpeg2dec_create_generic_pool (allocator, &params, caps, size,
          min, max, &config);

      if (dec->need_alignment) {
        gst_buffer_pool_config_add_option (config,
            GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
        gst_buffer_pool_config_set_video_alignment (config, &dec->valign);
      }

      /* Generic pool don't fail on _set_config() */
      gst_buffer_pool_set_config (pool, config);
    }

    gst_buffer_unref (buffer);
  }

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  gst_object_unref (pool);

  return TRUE;

config_failed:
  gst_object_unref (pool);
  GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS,
      ("Failed to configure buffer pool"),
      ("Configuration is most likely invalid, please report this issue."));
  return FALSE;

activate_failed:
  gst_object_unref (pool);
  GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS,
      ("Failed to activate buffer pool"), (NULL));
  return FALSE;

acquire_failed:
  gst_object_unref (pool);
  GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS,
      ("Failed to acquire a buffer"), (NULL));
  return FALSE;
}
コード例 #5
0
ファイル: gstfdsrc.c プロジェクト: JERUKA9/gstreamer
static GstFlowReturn
gst_fd_src_create (GstPushSrc * psrc, GstBuffer ** outbuf)
{
  GstFdSrc *src;
  GstBuffer *buf;
  gssize readbytes;
  guint blocksize;
  GstClockTime timeout;

#ifndef HAVE_WIN32
  gboolean try_again;
  gint retval;
#endif

  src = GST_FD_SRC (psrc);

  if (src->timeout > 0) {
    timeout = src->timeout * GST_USECOND;
  } else {
    timeout = GST_CLOCK_TIME_NONE;
  }

#ifndef HAVE_WIN32
  do {
    try_again = FALSE;

    GST_LOG_OBJECT (src, "doing poll, timeout %" GST_TIME_FORMAT,
        GST_TIME_ARGS (src->timeout));

    retval = gst_poll_wait (src->fdset, timeout);
    GST_LOG_OBJECT (src, "poll returned %d", retval);

    if (G_UNLIKELY (retval == -1)) {
      if (errno == EINTR || errno == EAGAIN) {
        /* retry if interrupted */
        try_again = TRUE;
      } else if (errno == EBUSY) {
        goto stopped;
      } else {
        goto poll_error;
      }
    } else if (G_UNLIKELY (retval == 0)) {
      try_again = TRUE;
      /* timeout, post element message */
      gst_element_post_message (GST_ELEMENT_CAST (src),
          gst_message_new_element (GST_OBJECT_CAST (src),
              gst_structure_new ("GstFdSrcTimeout",
                  "timeout", G_TYPE_UINT64, src->timeout, NULL)));
    }
  } while (G_UNLIKELY (try_again));     /* retry if interrupted or timeout */
#endif

  blocksize = GST_BASE_SRC (src)->blocksize;

  /* create the buffer */
  buf = gst_buffer_try_new_and_alloc (blocksize);
  if (G_UNLIKELY (buf == NULL)) {
    GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", blocksize);
    return GST_FLOW_ERROR;
  }

  do {
    readbytes = read (src->fd, GST_BUFFER_DATA (buf), blocksize);
    GST_LOG_OBJECT (src, "read %" G_GSSIZE_FORMAT, readbytes);
  } while (readbytes == -1 && errno == EINTR);  /* retry if interrupted */

  if (readbytes < 0)
    goto read_error;

  if (readbytes == 0)
    goto eos;

  GST_BUFFER_OFFSET (buf) = src->curoffset;
  GST_BUFFER_SIZE (buf) = readbytes;
  GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_TIME_NONE;
  src->curoffset += readbytes;

  GST_LOG_OBJECT (psrc, "Read buffer of size %" G_GSSIZE_FORMAT, readbytes);

  /* we're done, return the buffer */
  *outbuf = buf;

  return GST_FLOW_OK;

  /* ERRORS */
#ifndef HAVE_WIN32
poll_error:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("poll on file descriptor: %s.", g_strerror (errno)));
    GST_DEBUG_OBJECT (psrc, "Error during poll");
    return GST_FLOW_ERROR;
  }
stopped:
  {
    GST_DEBUG_OBJECT (psrc, "Poll stopped");
    return GST_FLOW_WRONG_STATE;
  }
#endif
eos:
  {
    GST_DEBUG_OBJECT (psrc, "Read 0 bytes. EOS.");
    gst_buffer_unref (buf);
    return GST_FLOW_UNEXPECTED;
  }
read_error:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("read on file descriptor: %s.", g_strerror (errno)));
    GST_DEBUG_OBJECT (psrc, "Error reading from fd");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
}
コード例 #6
0
ファイル: gstfreeze.c プロジェクト: bilboed/gst-plugins-bad
static GstFlowReturn
gst_freeze_play (GstPad * pad, GstBuffer * buff)
{
  GstFreeze *freeze;
  guint64 cur_offset;
  GstFlowReturn ret = GST_FLOW_OK;

  freeze = GST_FREEZE (gst_pad_get_parent (pad));

  if (freeze->on_flush) {
    g_object_unref (freeze);
    return GST_FLOW_WRONG_STATE;
  }

  cur_offset = freeze->offset;
  /* If it is working in push mode this function will be called by "_chain"
     and buff will never be NULL. In pull mode this function will be called
     by _loop and buff will be NULL */
  if (!buff) {
    ret =
        gst_pad_pull_range (GST_PAD (freeze->sinkpad), freeze->offset, 4096,
        &buff);
    if (ret != GST_FLOW_OK) {
      gst_object_unref (freeze);
      return ret;
    }

    freeze->offset += GST_BUFFER_SIZE (buff);

  }

  if (g_queue_get_length (freeze->buffers) < freeze->max_buffers ||
      freeze->max_buffers == 0) {
    g_queue_push_tail (freeze->buffers, buff);
    GST_DEBUG_OBJECT (freeze, "accepted buffer %u",
        g_queue_get_length (freeze->buffers) - 1);
  } else {
    gst_buffer_unref (buff);
  }


  if (freeze->current != NULL) {
    GST_DEBUG_OBJECT (freeze, "switching to next buffer");
    freeze->current = g_queue_peek_nth (freeze->buffers,
        g_queue_index (freeze->buffers, (gpointer) freeze->current) + 1);
  }

  if (freeze->current == NULL) {
    if (freeze->max_buffers > 1)
      GST_DEBUG_OBJECT (freeze, "restarting the loop");
    freeze->current = g_queue_peek_head (freeze->buffers);
  }

  GST_BUFFER_TIMESTAMP (freeze->current) = freeze->timestamp_offset +
      freeze->running_time;
  freeze->running_time += GST_BUFFER_DURATION (freeze->current);

  gst_buffer_ref (freeze->current);
  ret = gst_pad_push (freeze->srcpad, freeze->current);

  gst_object_unref (freeze);

  return ret;
}
コード例 #7
0
static GstFlowReturn
gst_rtp_pt_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstRtpPtDemux *rtpdemux;
  guint8 pt;
  GstPad *srcpad;
  GstCaps *caps;
  GstRTPBuffer rtp = { NULL };

  rtpdemux = GST_RTP_PT_DEMUX (parent);

  if (!gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp))
    goto invalid_buffer;

  pt = gst_rtp_buffer_get_payload_type (&rtp);
  gst_rtp_buffer_unmap (&rtp);

  GST_DEBUG_OBJECT (rtpdemux, "received buffer for pt %d", pt);

  srcpad = find_pad_for_pt (rtpdemux, pt);
  if (srcpad == NULL) {
    /* new PT, create a src pad */
    GstRtpPtDemuxPad *rtpdemuxpad;
    GstElementClass *klass;
    GstPadTemplate *templ;
    gchar *padname;

    caps = gst_rtp_pt_demux_get_caps (rtpdemux, pt);
    if (!caps)
      goto no_caps;

    klass = GST_ELEMENT_GET_CLASS (rtpdemux);
    templ = gst_element_class_get_pad_template (klass, "src_%u");
    padname = g_strdup_printf ("src_%u", pt);
    srcpad = gst_pad_new_from_template (templ, padname);
    gst_pad_use_fixed_caps (srcpad);
    g_free (padname);
    gst_pad_set_event_function (srcpad, gst_rtp_pt_demux_src_event);

    GST_DEBUG ("Adding pt=%d to the list.", pt);
    rtpdemuxpad = g_slice_new0 (GstRtpPtDemuxPad);
    rtpdemuxpad->pt = pt;
    rtpdemuxpad->newcaps = FALSE;
    rtpdemuxpad->pad = srcpad;
    gst_object_ref (srcpad);
    GST_OBJECT_LOCK (rtpdemux);
    rtpdemux->srcpads = g_slist_append (rtpdemux->srcpads, rtpdemuxpad);
    GST_OBJECT_UNLOCK (rtpdemux);

    gst_pad_set_active (srcpad, TRUE);


    /* First push the stream-start event, it must always come first */
    gst_pad_push_event (srcpad,
        gst_pad_get_sticky_event (rtpdemux->sink, GST_EVENT_STREAM_START, 0));

    /* Then caps event is sent */
    caps = gst_caps_make_writable (caps);
    gst_caps_set_simple (caps, "payload", G_TYPE_INT, pt, NULL);
    gst_pad_set_caps (srcpad, caps);
    gst_caps_unref (caps);

    /* First sticky events on sink pad are forwarded to the new src pad */
    gst_pad_sticky_events_foreach (rtpdemux->sink, forward_sticky_events,
        srcpad);

    gst_element_add_pad (GST_ELEMENT_CAST (rtpdemux), srcpad);

    GST_DEBUG ("emitting new-payload-type for pt %d", pt);
    g_signal_emit (G_OBJECT (rtpdemux),
        gst_rtp_pt_demux_signals[SIGNAL_NEW_PAYLOAD_TYPE], 0, pt, srcpad);
  }

  if (pt != rtpdemux->last_pt) {
    gint emit_pt = pt;

    /* our own signal with an extra flag that this is the only pad */
    rtpdemux->last_pt = pt;
    GST_DEBUG ("emitting payload-type-changed for pt %d", emit_pt);
    g_signal_emit (G_OBJECT (rtpdemux),
        gst_rtp_pt_demux_signals[SIGNAL_PAYLOAD_TYPE_CHANGE], 0, emit_pt);
  }

  while (need_caps_for_pt (rtpdemux, pt)) {
    GST_DEBUG ("need new caps for %d", pt);
    caps = gst_rtp_pt_demux_get_caps (rtpdemux, pt);
    if (!caps)
      goto no_caps;

    clear_newcaps_for_pt (rtpdemux, pt);

    caps = gst_caps_make_writable (caps);
    gst_caps_set_simple (caps, "payload", G_TYPE_INT, pt, NULL);
    gst_pad_set_caps (srcpad, caps);
    gst_caps_unref (caps);
  }

  /* push to srcpad */
  ret = gst_pad_push (srcpad, buf);

  gst_object_unref (srcpad);

  return ret;

  /* ERRORS */
invalid_buffer:
  {
    /* this is fatal and should be filtered earlier */
    GST_ELEMENT_ERROR (rtpdemux, STREAM, DECODE, (NULL),
        ("Dropping invalid RTP payload"));
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
no_caps:
  {
    GST_ELEMENT_ERROR (rtpdemux, STREAM, DECODE, (NULL),
        ("Could not get caps for payload"));
    gst_buffer_unref (buf);
    if (srcpad)
      gst_object_unref (srcpad);
    return GST_FLOW_ERROR;
  }
}
コード例 #8
0
/* For each buffer we receive we check if our collected condition is reached
 * and if so we call the collected function. When this is done we check if
 * data has been unqueued. If data is still queued we wait holding the stream
 * lock to make sure no EOS event can happen while we are ready to be
 * collected 
 */
static GstFlowReturn
gst_collect_pads_chain (GstPad * pad, GstBuffer * buffer)
{
  GstCollectData *data;
  GstCollectPads *pads;
  GstCollectPadsPrivate *priv;
  GstFlowReturn ret;

  GST_DEBUG ("Got buffer for pad %s:%s", GST_DEBUG_PAD_NAME (pad));

  /* some magic to get the managing collect_pads */
  GST_OBJECT_LOCK (pad);
  data = (GstCollectData *) gst_pad_get_element_private (pad);
  if (G_UNLIKELY (data == NULL))
    goto no_data;
  ref_data (data);
  GST_OBJECT_UNLOCK (pad);

  pads = data->collect;
  priv = pads->abidata.ABI.priv;

  GST_OBJECT_LOCK (pads);
  /* if not started, bail out */
  if (G_UNLIKELY (!pads->started))
    goto not_started;
  /* check if this pad is flushing */
  if (G_UNLIKELY (data->abidata.ABI.flushing))
    goto flushing;
  /* pad was EOS, we can refuse this data */
  if (G_UNLIKELY (data->abidata.ABI.eos))
    goto unexpected;

  /* see if we need to clip */
  if (priv->clipfunc) {
    buffer = priv->clipfunc (pads, data, buffer, priv->clipfunc_user_data);

    if (G_UNLIKELY (buffer == NULL))
      goto clipped;
  }

  GST_DEBUG ("Queuing buffer %p for pad %s:%s", buffer,
      GST_DEBUG_PAD_NAME (pad));

  /* One more pad has data queued */
  pads->queuedpads++;
  /* take ownership of the buffer */
  if (data->buffer)
    gst_buffer_unref (data->buffer);
  data->buffer = buffer;
  buffer = NULL;

  /* update segment last position if in TIME */
  if (G_LIKELY (data->segment.format == GST_FORMAT_TIME)) {
    GstClockTime timestamp = GST_BUFFER_TIMESTAMP (data->buffer);

    if (GST_CLOCK_TIME_IS_VALID (timestamp))
      gst_segment_set_last_stop (&data->segment, GST_FORMAT_TIME, timestamp);
  }

  /* While we have data queued on this pad try to collect stuff */
  do {
    GST_DEBUG ("Pad %s:%s checking", GST_DEBUG_PAD_NAME (pad));
    /* Check if our collected condition is matched and call the collected function
     * if it is */
    ret = gst_collect_pads_check_collected (pads);
    /* when an error occurs, we want to report this back to the caller ASAP
     * without having to block if the buffer was not popped */
    if (G_UNLIKELY (ret != GST_FLOW_OK))
      goto error;

    /* data was consumed, we can exit and accept new data */
    if (data->buffer == NULL)
      break;

    /* Check if we got removed in the mean time, FIXME, this is racy.
     * Between this check and the _WAIT, the pad could be removed which will
     * makes us hang in the _WAIT. */
    GST_OBJECT_LOCK (pad);
    if (G_UNLIKELY (gst_pad_get_element_private (pad) == NULL))
      goto pad_removed;
    GST_OBJECT_UNLOCK (pad);

    GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
        GST_DEBUG_PAD_NAME (pad));

    /* wait to be collected, this must happen from another thread triggered
     * by the _chain function of another pad. We release the lock so we
     * can get stopped or flushed as well. We can however not get EOS
     * because we still hold the STREAM_LOCK. 
     */
    GST_COLLECT_PADS_WAIT (pads);

    GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));

    /* after a signal, we could be stopped */
    if (G_UNLIKELY (!pads->started))
      goto not_started;
    /* check if this pad is flushing */
    if (G_UNLIKELY (data->abidata.ABI.flushing))
      goto flushing;
  }
  while (data->buffer != NULL);

unlock_done:
  GST_DEBUG ("Pad %s:%s done", GST_DEBUG_PAD_NAME (pad));
  GST_OBJECT_UNLOCK (pads);
  unref_data (data);
  if (buffer)
    gst_buffer_unref (buffer);
  return ret;

pad_removed:
  {
    GST_WARNING ("%s got removed from collectpads", GST_OBJECT_NAME (pad));
    GST_OBJECT_UNLOCK (pad);
    ret = GST_FLOW_NOT_LINKED;
    goto unlock_done;
  }
  /* ERRORS */
no_data:
  {
    GST_DEBUG ("%s got removed from collectpads", GST_OBJECT_NAME (pad));
    GST_OBJECT_UNLOCK (pad);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_LINKED;
  }
not_started:
  {
    GST_DEBUG ("not started");
    gst_collect_pads_clear (pads, data);
    ret = GST_FLOW_WRONG_STATE;
    goto unlock_done;
  }
flushing:
  {
    GST_DEBUG ("pad %s:%s is flushing", GST_DEBUG_PAD_NAME (pad));
    gst_collect_pads_clear (pads, data);
    ret = GST_FLOW_WRONG_STATE;
    goto unlock_done;
  }
unexpected:
  {
    /* we should not post an error for this, just inform upstream that
     * we don't expect anything anymore */
    GST_DEBUG ("pad %s:%s is eos", GST_DEBUG_PAD_NAME (pad));
    ret = GST_FLOW_UNEXPECTED;
    goto unlock_done;
  }
clipped:
  {
    GST_DEBUG ("clipped buffer on pad %s:%s", GST_DEBUG_PAD_NAME (pad));
    ret = GST_FLOW_OK;
    goto unlock_done;
  }
error:
  {
    /* we print the error, the element should post a reasonable error
     * message for fatal errors */
    GST_DEBUG ("collect failed, reason %d (%s)", ret, gst_flow_get_name (ret));
    gst_collect_pads_clear (pads, data);
    goto unlock_done;
  }
}
コード例 #9
0
static void
gst_amc_video_dec_loop (GstAmcVideoDec * self)
{
  GstVideoCodecFrame *frame;
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstClockTimeDiff deadline;
  gboolean is_eos;
  GstAmcBuffer *buf;
  GstAmcBufferInfo buffer_info;
  gint idx;
  GError *err = NULL;

  GST_VIDEO_DECODER_STREAM_LOCK (self);

retry:
  /*if (self->input_state_changed) {
     idx = INFO_OUTPUT_FORMAT_CHANGED;
     } else { */
  GST_DEBUG_OBJECT (self, "Waiting for available output buffer");
  GST_VIDEO_DECODER_STREAM_UNLOCK (self);
  /* Wait at most 100ms here, some codecs don't fail dequeueing if
   * the codec is flushing, causing deadlocks during shutdown */
  idx =
      gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000,
      &err);
  GST_VIDEO_DECODER_STREAM_LOCK (self);
  /*} */

  if (idx < 0) {
    if (self->flushing) {
      g_clear_error (&err);
      goto flushing;
    }

    switch (idx) {
      case INFO_OUTPUT_BUFFERS_CHANGED:
        /* Handled internally */
        g_assert_not_reached ();
        break;
      case INFO_OUTPUT_FORMAT_CHANGED:{
        GstAmcFormat *format;
        gchar *format_string;

        GST_DEBUG_OBJECT (self, "Output format has changed");

        format = gst_amc_codec_get_output_format (self->codec, &err);
        if (!format)
          goto format_error;

        format_string = gst_amc_format_to_string (format, &err);
        if (!format) {
          gst_amc_format_free (format);
          goto format_error;
        }
        GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string);
        g_free (format_string);

        if (!gst_amc_video_dec_set_src_caps (self, format)) {
          gst_amc_format_free (format);
          goto format_error;
        }
        gst_amc_format_free (format);

        goto retry;
      }
      case INFO_TRY_AGAIN_LATER:
        GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out");
        goto retry;
      case G_MININT:
        GST_ERROR_OBJECT (self, "Failure dequeueing output buffer");
        goto dequeue_error;
      default:
        g_assert_not_reached ();
        break;
    }

    goto retry;
  }

  GST_DEBUG_OBJECT (self,
      "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT
      " flags 0x%08x", idx, buffer_info.offset, buffer_info.size,
      buffer_info.presentation_time_us, buffer_info.flags);

  frame =
      _find_nearest_frame (self,
      gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, 1));

  is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM);

  buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err);
  if (!buf)
    goto failed_to_get_output_buffer;

  if (frame
      && (deadline =
          gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
              frame)) < 0) {
    GST_WARNING_OBJECT (self,
        "Frame is too late, dropping (deadline %" GST_TIME_FORMAT ")",
        GST_TIME_ARGS (-deadline));
    flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
  } else if (!frame && buffer_info.size > 0) {
    GstBuffer *outbuf;

    /* This sometimes happens at EOS or if the input is not properly framed,
     * let's handle it gracefully by allocating a new buffer for the current
     * caps and filling it
     */
    GST_ERROR_OBJECT (self, "No corresponding frame found");

    outbuf =
        gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));

    if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info, outbuf)) {
      gst_buffer_unref (outbuf);
      if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err))
        GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
            idx);
      if (err && !self->flushing)
        GST_ELEMENT_WARNING_FROM_ERROR (self, err);
      g_clear_error (&err);
      gst_amc_buffer_free (buf);
      buf = NULL;
      goto invalid_buffer;
    }

    GST_BUFFER_PTS (outbuf) =
        gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND,
        1);
    flow_ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
  } else if (buffer_info.size > 0) {
    if ((flow_ret = gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER
                (self), frame)) != GST_FLOW_OK) {
      GST_ERROR_OBJECT (self, "Failed to allocate buffer");
      if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err))
        GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
            idx);
      if (err && !self->flushing)
        GST_ELEMENT_WARNING_FROM_ERROR (self, err);
      g_clear_error (&err);
      gst_amc_buffer_free (buf);
      buf = NULL;
      goto flow_error;
    }

    if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info,
            frame->output_buffer)) {
      gst_buffer_replace (&frame->output_buffer, NULL);
      gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
      if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err))
        GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
            idx);
      if (err && !self->flushing)
        GST_ELEMENT_WARNING_FROM_ERROR (self, err);
      g_clear_error (&err);
      gst_amc_buffer_free (buf);
      buf = NULL;
      goto invalid_buffer;
    }

    flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
  } else if (frame != NULL) {
    flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
  }

  gst_amc_buffer_free (buf);
  buf = NULL;

  if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err)) {
    if (self->flushing) {
      g_clear_error (&err);
      goto flushing;
    }
    goto failed_release;
  }

  if (is_eos || flow_ret == GST_FLOW_EOS) {
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    if (self->draining) {
      GST_DEBUG_OBJECT (self, "Drained");
      self->draining = FALSE;
      g_cond_broadcast (&self->drain_cond);
    } else if (flow_ret == GST_FLOW_OK) {
      GST_DEBUG_OBJECT (self, "Component signalled EOS");
      flow_ret = GST_FLOW_EOS;
    }
    g_mutex_unlock (&self->drain_lock);
    GST_VIDEO_DECODER_STREAM_LOCK (self);
  } else {
    GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));
  }

  self->downstream_flow_ret = flow_ret;

  if (flow_ret != GST_FLOW_OK)
    goto flow_error;

  GST_VIDEO_DECODER_STREAM_UNLOCK (self);

  return;

dequeue_error:
  {
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }

format_error:
  {
    if (err)
      GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    else
      GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
          ("Failed to handle format"));
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }
failed_release:
  {
    GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }
flushing:
  {
    GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_FLUSHING;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    return;
  }

flow_error:
  {
    if (flow_ret == GST_FLOW_EOS) {
      GST_DEBUG_OBJECT (self, "EOS");
      gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    } else if (flow_ret < GST_FLOW_EOS) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED,
          ("Internal data stream error."), ("stream stopped, reason %s",
              gst_flow_get_name (flow_ret)));
      gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
          gst_event_new_eos ());
      gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    } else if (flow_ret == GST_FLOW_FLUSHING) {
      GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
      gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    }
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }

failed_to_get_output_buffer:
  {
    GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_ERROR;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }

invalid_buffer:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
        ("Invalid sized input buffer"));
    gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
    gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
    self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
    GST_VIDEO_DECODER_STREAM_UNLOCK (self);
    g_mutex_lock (&self->drain_lock);
    self->draining = FALSE;
    g_cond_broadcast (&self->drain_cond);
    g_mutex_unlock (&self->drain_lock);
    return;
  }
}
コード例 #10
0
/* Called with the object lock for both the element and pad held,
 * as well as the aagg lock
 */
static gboolean
gst_audio_aggregator_fill_buffer (GstAudioAggregator * aagg,
    GstAudioAggregatorPad * pad, GstBuffer * inbuf)
{
  GstClockTime start_time, end_time;
  gboolean discont = FALSE;
  guint64 start_offset, end_offset;
  gint rate, bpf;

  GstAggregator *agg = GST_AGGREGATOR (aagg);
  GstAggregatorPad *aggpad = GST_AGGREGATOR_PAD (pad);

  g_assert (pad->priv->buffer == NULL);

  rate = GST_AUDIO_INFO_RATE (&pad->info);
  bpf = GST_AUDIO_INFO_BPF (&pad->info);

  pad->priv->position = 0;
  pad->priv->size = gst_buffer_get_size (inbuf) / bpf;

  if (!GST_BUFFER_PTS_IS_VALID (inbuf)) {
    if (pad->priv->output_offset == -1)
      pad->priv->output_offset = aagg->priv->offset;
    if (pad->priv->next_offset == -1)
      pad->priv->next_offset = pad->priv->size;
    else
      pad->priv->next_offset += pad->priv->size;
    goto done;
  }

  start_time = GST_BUFFER_PTS (inbuf);
  end_time =
      start_time + gst_util_uint64_scale_ceil (pad->priv->size, GST_SECOND,
      rate);

  /* Clipping should've ensured this */
  g_assert (start_time >= aggpad->segment.start);

  start_offset =
      gst_util_uint64_scale (start_time - aggpad->segment.start, rate,
      GST_SECOND);
  end_offset = start_offset + pad->priv->size;

  if (GST_BUFFER_IS_DISCONT (inbuf)
      || GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_RESYNC)
      || pad->priv->new_segment || pad->priv->next_offset == -1) {
    discont = TRUE;
    pad->priv->new_segment = FALSE;
  } else {
    guint64 diff, max_sample_diff;

    /* Check discont, based on audiobasesink */
    if (start_offset <= pad->priv->next_offset)
      diff = pad->priv->next_offset - start_offset;
    else
      diff = start_offset - pad->priv->next_offset;

    max_sample_diff =
        gst_util_uint64_scale_int (aagg->priv->alignment_threshold, rate,
        GST_SECOND);

    /* Discont! */
    if (G_UNLIKELY (diff >= max_sample_diff)) {
      if (aagg->priv->discont_wait > 0) {
        if (pad->priv->discont_time == GST_CLOCK_TIME_NONE) {
          pad->priv->discont_time = start_time;
        } else if (start_time - pad->priv->discont_time >=
            aagg->priv->discont_wait) {
          discont = TRUE;
          pad->priv->discont_time = GST_CLOCK_TIME_NONE;
        }
      } else {
        discont = TRUE;
      }
    } else if (G_UNLIKELY (pad->priv->discont_time != GST_CLOCK_TIME_NONE)) {
      /* we have had a discont, but are now back on track! */
      pad->priv->discont_time = GST_CLOCK_TIME_NONE;
    }
  }

  if (discont) {
    /* Have discont, need resync */
    if (pad->priv->next_offset != -1)
      GST_INFO_OBJECT (pad, "Have discont. Expected %"
          G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT,
          pad->priv->next_offset, start_offset);
    pad->priv->output_offset = -1;
    pad->priv->next_offset = end_offset;
  } else {
    pad->priv->next_offset += pad->priv->size;
  }

  if (pad->priv->output_offset == -1) {
    GstClockTime start_running_time;
    GstClockTime end_running_time;
    guint64 start_output_offset;
    guint64 end_output_offset;

    start_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, start_time);
    end_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, end_time);

    /* Convert to position in the output segment */
    start_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        start_running_time);
    if (start_output_offset != -1)
      start_output_offset =
          gst_util_uint64_scale (start_output_offset - agg->segment.start, rate,
          GST_SECOND);

    end_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        end_running_time);
    if (end_output_offset != -1)
      end_output_offset =
          gst_util_uint64_scale (end_output_offset - agg->segment.start, rate,
          GST_SECOND);

    if (start_output_offset == -1 && end_output_offset == -1) {
      /* Outside output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad, "Buffer outside output segment");
      return FALSE;
    }

    /* Calculate end_output_offset if it was outside the output segment */
    if (end_output_offset == -1)
      end_output_offset = start_output_offset + pad->priv->size;

    if (end_output_offset < aagg->priv->offset) {
      /* Before output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad,
          "Buffer before segment or current position: %" G_GUINT64_FORMAT " < %"
          G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
      return FALSE;
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset) {
      guint diff;

      if (start_output_offset == -1 && end_output_offset < pad->priv->size) {
        diff = pad->priv->size - end_output_offset + aagg->priv->offset;
      } else if (start_output_offset == -1) {
        start_output_offset = end_output_offset - pad->priv->size;

        if (start_output_offset < aagg->priv->offset)
          diff = aagg->priv->offset - start_output_offset;
        else
          diff = 0;
      } else {
        diff = aagg->priv->offset - start_output_offset;
      }

      pad->priv->position += diff;
      if (pad->priv->position >= pad->priv->size) {
        /* Empty buffer, drop */
        gst_buffer_unref (inbuf);
        pad->priv->buffer = NULL;
        pad->priv->position = 0;
        pad->priv->size = 0;
        pad->priv->output_offset = -1;
        GST_DEBUG_OBJECT (pad,
            "Buffer before segment or current position: %" G_GUINT64_FORMAT
            " < %" G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
        return FALSE;
      }
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset)
      pad->priv->output_offset = aagg->priv->offset;
    else
      pad->priv->output_offset = start_output_offset;

    GST_DEBUG_OBJECT (pad,
        "Buffer resynced: Pad offset %" G_GUINT64_FORMAT
        ", current audio aggregator offset %" G_GINT64_FORMAT,
        pad->priv->output_offset, aagg->priv->offset);
  }

done:

  GST_LOG_OBJECT (pad,
      "Queued new buffer at offset %" G_GUINT64_FORMAT,
      pad->priv->output_offset);
  pad->priv->buffer = inbuf;

  return TRUE;
}
コード例 #11
0
ファイル: gstbufferpool.c プロジェクト: mjparme/openjdk-jfx
static void
default_free_buffer (GstBufferPool * pool, GstBuffer * buffer)
{
  gst_buffer_unref (buffer);
}
コード例 #12
0
static GstFlowReturn
gst_audio_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
{
  /* Get all pads that have data for us and store them in a
   * new list.
   *
   * Calculate the current output offset/timestamp and
   * offset_end/timestamp_end. Allocate a silence buffer
   * for this and store it.
   *
   * For all pads:
   * 1) Once per input buffer (cached)
   *   1) Check discont (flag and timestamp with tolerance)
   *   2) If discont or new, resync. That means:
   *     1) Drop all start data of the buffer that comes before
   *        the current position/offset.
   *     2) Calculate the offset (output segment!) that the first
   *        frame of the input buffer corresponds to. Base this on
   *        the running time.
   *
   * 2) If the current pad's offset/offset_end overlaps with the output
   *    offset/offset_end, mix it at the appropiate position in the output
   *    buffer and advance the pad's position. Remember if this pad needs
   *    a new buffer to advance behind the output offset_end.
   *
   * 3) If we had no pad with a buffer, go EOS.
   *
   * 4) If we had at least one pad that did not advance behind output
   *    offset_end, let collected be called again for the current
   *    output offset/offset_end.
   */
  GstElement *element;
  GstAudioAggregator *aagg;
  GList *iter;
  GstFlowReturn ret;
  GstBuffer *outbuf = NULL;
  gint64 next_offset;
  gint64 next_timestamp;
  gint rate, bpf;
  gboolean dropped = FALSE;
  gboolean is_eos = TRUE;
  gboolean is_done = TRUE;
  guint blocksize;

  element = GST_ELEMENT (agg);
  aagg = GST_AUDIO_AGGREGATOR (agg);

  /* Sync pad properties to the stream time */
  gst_aggregator_iterate_sinkpads (agg,
      (GstAggregatorPadForeachFunc) GST_DEBUG_FUNCPTR (sync_pad_values), NULL);

  GST_AUDIO_AGGREGATOR_LOCK (aagg);
  GST_OBJECT_LOCK (agg);

  /* Update position from the segment start/stop if needed */
  if (agg->segment.position == -1) {
    if (agg->segment.rate > 0.0)
      agg->segment.position = agg->segment.start;
    else
      agg->segment.position = agg->segment.stop;
  }

  if (G_UNLIKELY (aagg->info.finfo->format == GST_AUDIO_FORMAT_UNKNOWN)) {
    if (timeout) {
      GST_DEBUG_OBJECT (aagg,
          "Got timeout before receiving any caps, don't output anything");

      /* Advance position */
      if (agg->segment.rate > 0.0)
        agg->segment.position += aagg->priv->output_buffer_duration;
      else if (agg->segment.position > aagg->priv->output_buffer_duration)
        agg->segment.position -= aagg->priv->output_buffer_duration;
      else
        agg->segment.position = 0;

      GST_OBJECT_UNLOCK (agg);
      GST_AUDIO_AGGREGATOR_UNLOCK (aagg);
      return GST_FLOW_OK;
    } else {
      GST_OBJECT_UNLOCK (agg);
      goto not_negotiated;
    }
  }

  if (aagg->priv->send_caps) {
    GST_OBJECT_UNLOCK (agg);
    gst_aggregator_set_src_caps (agg, aagg->current_caps);
    GST_OBJECT_LOCK (agg);

    aagg->priv->send_caps = FALSE;
  }

  rate = GST_AUDIO_INFO_RATE (&aagg->info);
  bpf = GST_AUDIO_INFO_BPF (&aagg->info);

  if (aagg->priv->offset == -1) {
    aagg->priv->offset =
        gst_util_uint64_scale (agg->segment.position - agg->segment.start, rate,
        GST_SECOND);
    GST_DEBUG_OBJECT (aagg, "Starting at offset %" G_GINT64_FORMAT,
        aagg->priv->offset);
  }

  blocksize = gst_util_uint64_scale (aagg->priv->output_buffer_duration,
      rate, GST_SECOND);
  blocksize = MAX (1, blocksize);

  /* for the next timestamp, use the sample counter, which will
   * never accumulate rounding errors */

  /* FIXME: Reverse mixing does not work at all yet */
  if (agg->segment.rate > 0.0) {
    next_offset = aagg->priv->offset + blocksize;
  } else {
    next_offset = aagg->priv->offset - blocksize;
  }

  next_timestamp =
      agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND,
      rate);

  if (aagg->priv->current_buffer == NULL) {
    GST_OBJECT_UNLOCK (agg);
    aagg->priv->current_buffer =
        GST_AUDIO_AGGREGATOR_GET_CLASS (aagg)->create_output_buffer (aagg,
        blocksize);
    /* Be careful, some things could have changed ? */
    GST_OBJECT_LOCK (agg);
    GST_BUFFER_FLAG_SET (aagg->priv->current_buffer, GST_BUFFER_FLAG_GAP);
  }
  outbuf = aagg->priv->current_buffer;

  GST_LOG_OBJECT (agg,
      "Starting to mix %u samples for offset %" G_GINT64_FORMAT
      " with timestamp %" GST_TIME_FORMAT, blocksize,
      aagg->priv->offset, GST_TIME_ARGS (agg->segment.position));

  for (iter = element->sinkpads; iter; iter = iter->next) {
    GstBuffer *inbuf;
    GstAudioAggregatorPad *pad = (GstAudioAggregatorPad *) iter->data;
    GstAggregatorPad *aggpad = (GstAggregatorPad *) iter->data;
    gboolean drop_buf = FALSE;
    gboolean pad_eos = gst_aggregator_pad_is_eos (aggpad);

    if (!pad_eos)
      is_eos = FALSE;

    inbuf = gst_aggregator_pad_get_buffer (aggpad);

    GST_OBJECT_LOCK (pad);
    if (!inbuf) {
      if (timeout) {
        if (pad->priv->output_offset < next_offset) {
          gint64 diff = next_offset - pad->priv->output_offset;
          GST_LOG_OBJECT (pad, "Timeout, missing %" G_GINT64_FORMAT " frames (%"
              GST_TIME_FORMAT ")", diff,
              GST_TIME_ARGS (gst_util_uint64_scale (diff, GST_SECOND,
                      GST_AUDIO_INFO_RATE (&aagg->info))));
        }
      } else if (!pad_eos) {
        is_done = FALSE;
      }
      GST_OBJECT_UNLOCK (pad);
      continue;
    }

    g_assert (!pad->priv->buffer || pad->priv->buffer == inbuf);

    /* New buffer? */
    if (!pad->priv->buffer) {
      /* Takes ownership of buffer */
      if (!gst_audio_aggregator_fill_buffer (aagg, pad, inbuf)) {
        dropped = TRUE;
        GST_OBJECT_UNLOCK (pad);
        gst_aggregator_pad_drop_buffer (aggpad);
        continue;
      }
    } else {
      gst_buffer_unref (inbuf);
    }

    if (!pad->priv->buffer && !dropped && pad_eos) {
      GST_DEBUG_OBJECT (aggpad, "Pad is in EOS state");
      GST_OBJECT_UNLOCK (pad);
      continue;
    }

    g_assert (pad->priv->buffer);

    /* This pad is lacking behind, we need to update the offset
     * and maybe drop the current buffer */
    if (pad->priv->output_offset < aagg->priv->offset) {
      gint64 diff = aagg->priv->offset - pad->priv->output_offset;
      gint64 odiff = diff;

      if (pad->priv->position + diff > pad->priv->size)
        diff = pad->priv->size - pad->priv->position;
      pad->priv->position += diff;
      pad->priv->output_offset += diff;

      if (pad->priv->position == pad->priv->size) {
        GST_LOG_OBJECT (pad, "Buffer was late by %" GST_TIME_FORMAT
            ", dropping %" GST_PTR_FORMAT,
            GST_TIME_ARGS (gst_util_uint64_scale (odiff, GST_SECOND,
                    GST_AUDIO_INFO_RATE (&aagg->info))), pad->priv->buffer);
        /* Buffer done, drop it */
        gst_buffer_replace (&pad->priv->buffer, NULL);
        dropped = TRUE;
        GST_OBJECT_UNLOCK (pad);
        gst_aggregator_pad_drop_buffer (aggpad);
        continue;
      }
    }


    if (pad->priv->output_offset >= aagg->priv->offset
        && pad->priv->output_offset <
        aagg->priv->offset + blocksize && pad->priv->buffer) {
      GST_LOG_OBJECT (aggpad, "Mixing buffer for current offset");
      drop_buf = !gst_audio_aggregator_mix_buffer (aagg, pad, pad->priv->buffer,
          outbuf);
      if (pad->priv->output_offset >= next_offset) {
        GST_DEBUG_OBJECT (pad,
            "Pad is after current offset: %" G_GUINT64_FORMAT " >= %"
            G_GINT64_FORMAT, pad->priv->output_offset, next_offset);
      } else {
        is_done = FALSE;
      }
    }

    GST_OBJECT_UNLOCK (pad);
    if (drop_buf)
      gst_aggregator_pad_drop_buffer (aggpad);

  }
  GST_OBJECT_UNLOCK (agg);

  if (dropped) {
    /* We dropped a buffer, retry */
    GST_INFO_OBJECT (aagg, "A pad dropped a buffer, wait for the next one");
    GST_AUDIO_AGGREGATOR_UNLOCK (aagg);
    return GST_FLOW_OK;
  }

  if (!is_done && !is_eos) {
    /* Get more buffers */
    GST_INFO_OBJECT (aagg,
        "We're not done yet for the current offset," " waiting for more data");
    GST_AUDIO_AGGREGATOR_UNLOCK (aagg);
    return GST_FLOW_OK;
  }

  if (is_eos) {
    gint64 max_offset = 0;

    GST_DEBUG_OBJECT (aagg, "We're EOS");

    GST_OBJECT_LOCK (agg);
    for (iter = GST_ELEMENT (agg)->sinkpads; iter; iter = iter->next) {
      GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data);

      max_offset = MAX ((gint64) max_offset, (gint64) pad->priv->output_offset);
    }
    GST_OBJECT_UNLOCK (agg);

    /* This means EOS or nothing mixed in at all */
    if (aagg->priv->offset == max_offset) {
      gst_buffer_replace (&aagg->priv->current_buffer, NULL);
      GST_AUDIO_AGGREGATOR_UNLOCK (aagg);
      return GST_FLOW_EOS;
    }

    if (max_offset <= next_offset) {
      GST_DEBUG_OBJECT (aagg,
          "Last buffer is incomplete: %" G_GUINT64_FORMAT " <= %"
          G_GINT64_FORMAT, max_offset, next_offset);
      next_offset = max_offset;
      next_timestamp =
          agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND,
          rate);

      if (next_offset > aagg->priv->offset)
        gst_buffer_resize (outbuf, 0, (next_offset - aagg->priv->offset) * bpf);
    }
  }

  /* set timestamps on the output buffer */
  GST_OBJECT_LOCK (agg);
  if (agg->segment.rate > 0.0) {
    GST_BUFFER_PTS (outbuf) = agg->segment.position;
    GST_BUFFER_OFFSET (outbuf) = aagg->priv->offset;
    GST_BUFFER_OFFSET_END (outbuf) = next_offset;
    GST_BUFFER_DURATION (outbuf) = next_timestamp - agg->segment.position;
  } else {
    GST_BUFFER_PTS (outbuf) = next_timestamp;
    GST_BUFFER_OFFSET (outbuf) = next_offset;
    GST_BUFFER_OFFSET_END (outbuf) = aagg->priv->offset;
    GST_BUFFER_DURATION (outbuf) = agg->segment.position - next_timestamp;
  }

  GST_OBJECT_UNLOCK (agg);

  /* send it out */
  GST_LOG_OBJECT (aagg,
      "pushing outbuf %p, timestamp %" GST_TIME_FORMAT " offset %"
      G_GINT64_FORMAT, outbuf, GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)),
      GST_BUFFER_OFFSET (outbuf));

  GST_AUDIO_AGGREGATOR_UNLOCK (aagg);

  ret = gst_aggregator_finish_buffer (agg, aagg->priv->current_buffer);
  aagg->priv->current_buffer = NULL;

  GST_LOG_OBJECT (aagg, "pushed outbuf, result = %s", gst_flow_get_name (ret));

  GST_AUDIO_AGGREGATOR_LOCK (aagg);
  GST_OBJECT_LOCK (agg);
  aagg->priv->offset = next_offset;
  agg->segment.position = next_timestamp;

  /* If there was a timeout and there was a gap in data in out of the streams,
   * then it's a very good time to for a resync with the timestamps.
   */
  if (timeout) {
    for (iter = element->sinkpads; iter; iter = iter->next) {
      GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data);

      GST_OBJECT_LOCK (pad);
      if (pad->priv->output_offset < aagg->priv->offset)
        pad->priv->output_offset = -1;
      GST_OBJECT_UNLOCK (pad);
    }
  }
  GST_OBJECT_UNLOCK (agg);
  GST_AUDIO_AGGREGATOR_UNLOCK (aagg);

  return ret;
  /* ERRORS */
not_negotiated:
  {
    GST_AUDIO_AGGREGATOR_UNLOCK (aagg);
    GST_ELEMENT_ERROR (aagg, STREAM, FORMAT, (NULL),
        ("Unknown data received, not negotiated"));
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
コード例 #13
0
template<> void derefGPtr<GstBuffer>(GstBuffer* ptr)
{
    if (ptr)
        gst_buffer_unref(ptr);
}
コード例 #14
0
ファイル: audiotrim.c プロジェクト: fluffware/subrec
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
audio_trim_chain (GstPad * pad, GstBuffer * buf)
{
  AudioTrim *filter;

  g_assert(GST_BUFFER_OFFSET(buf) != GST_BUFFER_OFFSET_NONE);
  g_assert(GST_BUFFER_OFFSET_END(buf) != GST_BUFFER_OFFSET_NONE);
  
  filter = AUDIO_TRIM (GST_OBJECT_PARENT (pad));
  while(buf) {
    g_assert(GST_IS_BUFFER(buf));
    switch(filter->trim_state) {
    case AUDIO_TRIM_NOT_STARTED:
      filter->ref_time = (GST_BUFFER_OFFSET(buf)
			  + time_to_sample(filter, filter->start_skip));
      if (filter->empty_start_packet) {
	GstFlowReturn ret;
	GstBuffer *first;
	first = gst_buffer_new_and_alloc (sizeof(gfloat));
	*(gfloat*)GST_BUFFER_DATA(first) = 0.0;
	GST_BUFFER_SIZE(first) = 4;
	GST_BUFFER_OFFSET(first) = GST_BUFFER_OFFSET(buf); 
	GST_BUFFER_OFFSET_END(first) = GST_BUFFER_OFFSET(buf);
	GST_BUFFER_TIMESTAMP(first) = GST_BUFFER_TIMESTAMP(buf);
	GST_BUFFER_DURATION(first) = 0;
	GST_BUFFER_CAPS(first) = gst_caps_ref(GST_BUFFER_CAPS(buf));
	
	ret = gst_pad_push(filter->srcpad, first);
	if (ret != GST_FLOW_OK) {
	  gst_buffer_unref(buf);
	  return ret;
	}
      }
      filter->trim_state = AUDIO_TRIM_START_SKIP;
      break;
    case AUDIO_TRIM_START_SKIP:
      if (GST_BUFFER_OFFSET_END(buf) <= filter->ref_time) {
	gst_buffer_unref(buf); /* Ignore buffer completely */
      } else {
	GstBuffer *tail = buffer_tail(filter, buf, filter->ref_time);
	if (buf) gst_buffer_unref(buf);
	buf = tail;
	filter->trim_state = AUDIO_TRIM_START_SILENCE;
      }
      break;
    case AUDIO_TRIM_START_SILENCE:
      {
	guint64 offset = find_not_silence(filter, buf);
	if (offset == GST_BUFFER_OFFSET_NONE) {
	  while(filter->buffered > filter->pre_silence) {
	    GstBuffer *old = filter->buffers->data;
	    filter->buffered -= GST_BUFFER_DURATION(old);
	    gst_buffer_unref(old);
	    filter->buffers =
	      g_list_delete_link(filter->buffers, filter->buffers);
	  }
	  save_buffer(filter, buf);
	  buf = NULL;
	} else {
	  GstBuffer *head;
	  GstBuffer *tail;
	  GstFlowReturn ret;
	  gint64 clip_start;
	  clip_start = offset - time_to_sample(filter, filter->pre_silence);
	  ret = send_buffers_after(filter, clip_start);
	  if (ret != GST_FLOW_OK) {
	    gst_buffer_unref(buf);
	    return ret;
	  }
	  head = buffer_slice(filter, buf, clip_start, offset);
	  if (head) {
	    ret = gst_pad_push(filter->srcpad, head);
	    if (ret != GST_FLOW_OK) {
	      gst_buffer_unref(buf);
	      return ret;
	    }
	  }
	  tail = buffer_tail(filter, buf, offset);
	  filter->sound_duration =
	    sample_to_time(filter, GST_BUFFER_OFFSET_END(buf) - clip_start);
	  filter->ref_time = clip_start;
	  gst_buffer_unref(buf);
	  buf = tail;
	  filter->trim_state = AUDIO_TRIM_NOT_SILENCE;
	  g_debug("Got sound");
	}
      }
      break;
    case AUDIO_TRIM_NOT_SILENCE:
      {
	GstFlowReturn ret;
	filter->sound_duration += GST_BUFFER_DURATION(buf);
	while(filter->buffered > filter->max_silence_duration) {
	  GstBuffer *old = filter->buffers->data;
	  filter->buffered -= GST_BUFFER_DURATION(old);
	  filter->buffers = g_list_delete_link(filter->buffers,filter->buffers);
	  ret = gst_pad_push(filter->srcpad, old);
	   if (ret != GST_FLOW_OK) {
	     gst_buffer_unref(buf);
	     return ret;
	   }
	}
	save_buffer(filter, buf);
	buf = 0;
      }
      break;
    default:
      gst_buffer_unref(buf);
      buf = NULL;
    }
  }

  return GST_FLOW_OK;
}
コード例 #15
0
ファイル: visual-gl.c プロジェクト: PeterXu/gst-mobile
static GstFlowReturn
gst_visual_gl_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGLBuffer *outbuf = NULL;
  GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
  }

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    guint64 dist, timestamp;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least VISUAL_SAMPLES samples */
    if (avail < VISUAL_SAMPLES * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    /* get timestamp of the current adapter byte */
    timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist);
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      /* convert bytes to time */
      dist /= visual->bps;
      timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate);
    }

    if (timestamp != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          timestamp);
      qostime += visual->duration;

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }

    /* render libvisual plugin to our target */
    gst_gl_display_use_fbo_v2 (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        visual->midtexture, (GLCB_V2) render_frame, (gpointer *) visual);

    /* gst video is top-down whereas opengl plan is bottom up */
    gst_gl_display_use_fbo (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        outbuf->texture, (GLCB) bottom_up_to_top_down,
        visual->width, visual->height, visual->midtexture,
        0, visual->width, 0, visual->height, GST_GL_DISPLAY_PROJECTION_ORTHO2D,
        (gpointer *) visual);

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, GST_BUFFER (outbuf));
    outbuf = NULL;

  skip:
    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_gl_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
コード例 #16
0
static GstFlowReturn
gst_mfxpostproc_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstMfxPostproc *const vpp = GST_MFXPOSTPROC (trans);
  GstMfxVideoMeta *inbuf_meta, *outbuf_meta;
  GstMfxSurface *surface, *out_surface;
  GstMfxFilterStatus status = GST_MFX_FILTER_STATUS_SUCCESS;
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *buf = NULL;
  GstMfxRectangle *crop_rect = NULL;
  GstClockTime timestamp;

  timestamp = GST_BUFFER_TIMESTAMP (inbuf);

  ret = gst_mfx_plugin_base_get_input_buffer (GST_MFX_PLUGIN_BASE (vpp),
          inbuf, &buf);
  if (GST_FLOW_OK != ret)
    return ret;

  inbuf_meta = gst_buffer_get_mfx_video_meta (buf);
  surface = gst_mfx_video_meta_get_surface (inbuf_meta);
  if (!surface)
    goto error_create_surface;

  do {
    if (vpp->flags & GST_MFX_POSTPROC_FLAG_FRC) {
      if (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE != status)
        gst_buffer_replace (&buf, NULL);
      buf = create_output_buffer (vpp);
      if (!buf)
        goto error_create_buffer;
    }

    status = gst_mfx_filter_process (vpp->filter, surface, &out_surface);
    if (GST_MFX_FILTER_STATUS_SUCCESS != status
        && GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE != status
        && GST_MFX_FILTER_STATUS_ERROR_MORE_DATA != status)
      goto error_process_vpp;

    if (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE == status)
      outbuf_meta = gst_buffer_get_mfx_video_meta (buf);
    else
      outbuf_meta = gst_buffer_get_mfx_video_meta (outbuf);

    if (!outbuf_meta)
      goto error_create_meta;

    gst_mfx_video_meta_set_surface (outbuf_meta, out_surface);
    crop_rect = gst_mfx_surface_get_crop_rect (out_surface);
    if (crop_rect) {
      GstVideoCropMeta *const crop_meta =
          gst_buffer_add_video_crop_meta (outbuf);
      if (crop_meta) {
        crop_meta->x = crop_rect->x;
        crop_meta->y = crop_rect->y;
        crop_meta->width = crop_rect->width;
        crop_meta->height = crop_rect->height;
      }
    }

    if (GST_MFX_FILTER_STATUS_ERROR_MORE_DATA == status) {
      gst_buffer_unref (buf);
      return GST_BASE_TRANSFORM_FLOW_DROPPED;
    }

    if (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE == status) {
      GST_BUFFER_TIMESTAMP (buf) = timestamp;
      GST_BUFFER_DURATION (buf) = vpp->field_duration;
      timestamp += vpp->field_duration;
      ret = gst_pad_push (trans->srcpad, buf);
    }
    else {
      if (vpp->flags & GST_MFX_POSTPROC_FLAG_FRC) {
        GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
        GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
      }
      else {
        gst_buffer_copy_into (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
      }
    }
  } while (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE == status
           && GST_FLOW_OK == ret);

  gst_mfx_surface_dequeue(surface);

#if GST_CHECK_VERSION(1,8,0)
  gst_mfx_plugin_base_export_dma_buffer (GST_MFX_PLUGIN_BASE (vpp), outbuf);
#endif // GST_CHECK_VERSION

  gst_buffer_unref (buf);
  return ret;
  /* ERRORS */
error_create_buffer:
  {
    GST_ERROR ("failed to output buffer");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
error_create_meta:
  {
    GST_ERROR ("failed to create new output buffer meta");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
error_create_surface:
  {
    GST_ERROR ("failed to create surface surface from buffer");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
error_process_vpp:
  {
    GST_ERROR ("failed to apply VPP (error %d)", status);
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
}
コード例 #17
0
ファイル: gstpngdec.c プロジェクト: ChinnaSuhas/ossbuild
static GstFlowReturn
gst_pngdec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstPngDec *pngdec;
  GstFlowReturn ret = GST_FLOW_OK;

  pngdec = GST_PNGDEC (gst_pad_get_parent (pad));

  GST_LOG_OBJECT (pngdec, "Got buffer, size=%u", GST_BUFFER_SIZE (buffer));

  if (G_UNLIKELY (!pngdec->setup))
    goto not_configured;

  /* Something is going wrong in our callbacks */
  ret = pngdec->ret;
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (pngdec, "we have a pending return code of %d", ret);
    goto beach;
  }

  /* Let libpng come back here on error */
  if (setjmp (png_jmpbuf (pngdec->png))) {
    GST_WARNING_OBJECT (pngdec, "error during decoding");
    ret = GST_FLOW_ERROR;
    goto beach;
  }

  pngdec->in_timestamp = GST_BUFFER_TIMESTAMP (buffer);
  pngdec->in_duration = GST_BUFFER_DURATION (buffer);

  /* Progressive loading of the PNG image */
  png_process_data (pngdec->png, pngdec->info, GST_BUFFER_DATA (buffer),
      GST_BUFFER_SIZE (buffer));

  if (pngdec->image_ready) {
    if (pngdec->framed) {
      /* Reset ourselves for the next frame */
      gst_pngdec_libpng_clear (pngdec);
      gst_pngdec_libpng_init (pngdec);
      GST_LOG_OBJECT (pngdec, "setting up callbacks for next frame");
      png_set_progressive_read_fn (pngdec->png, pngdec,
          user_info_callback, user_endrow_callback, user_end_callback);
    } else {
      GST_LOG_OBJECT (pngdec, "sending EOS");
      pngdec->ret = gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ());
    }
    pngdec->image_ready = FALSE;
  }

  /* grab new return code */
  ret = pngdec->ret;

  /* And release the buffer */
  gst_buffer_unref (buffer);

beach:
  gst_object_unref (pngdec);

  return ret;

  /* ERRORS */
not_configured:
  {
    GST_LOG_OBJECT (pngdec, "we are not configured yet");
    ret = GST_FLOW_WRONG_STATE;
    goto beach;
  }
}
コード例 #18
0
static GstFlowReturn
gst_live_live_adder_chain (GstPad * pad, GstBuffer * buffer)
{
    GstLiveAdder *adder = GST_LIVE_ADDER (gst_pad_get_parent_element (pad));
    GstLiveAdderPadPrivate *padprivate = NULL;
    GstFlowReturn ret = GST_FLOW_OK;
    GList *item = NULL;
    GstClockTime skip = 0;
    gint64 drift = 0;             /* Positive if new buffer after old buffer */

    GST_OBJECT_LOCK (adder);

    ret = adder->srcresult;

    GST_DEBUG ("Incoming buffer time:%" GST_TIME_FORMAT " duration:%"
               GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
               GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));

    if (ret != GST_FLOW_OK) {
        GST_DEBUG_OBJECT (adder, "Passing non-ok result from src: %s",
                          gst_flow_get_name (ret));
        gst_buffer_unref (buffer);
        goto out;
    }

    padprivate = gst_pad_get_element_private (pad);

    if (!padprivate) {
        ret = GST_FLOW_NOT_LINKED;
        gst_buffer_unref (buffer);
        goto out;
    }

    if (padprivate->eos) {
        GST_DEBUG_OBJECT (adder, "Received buffer after EOS");
        ret = GST_FLOW_UNEXPECTED;
        gst_buffer_unref (buffer);
        goto out;
    }

    if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
        goto invalid_timestamp;

    if (padprivate->segment.format == GST_FORMAT_UNDEFINED) {
        GST_WARNING_OBJECT (adder, "No new-segment received,"
                            " initializing segment with time 0..-1");
        gst_segment_init (&padprivate->segment, GST_FORMAT_TIME);
        gst_segment_set_newsegment (&padprivate->segment,
                                    FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
    }

    if (padprivate->segment.format != GST_FORMAT_TIME)
        goto invalid_segment;

    buffer = gst_buffer_make_metadata_writable (buffer);

    drift = GST_BUFFER_TIMESTAMP (buffer) - padprivate->expected_timestamp;

    /* Just see if we receive invalid timestamp/durations */
    if (GST_CLOCK_TIME_IS_VALID (padprivate->expected_timestamp) &&
            !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT) &&
            (drift != 0)) {
        GST_LOG_OBJECT (adder,
                        "Timestamp discontinuity without the DISCONT flag set"
                        " (expected %" GST_TIME_FORMAT ", got %" GST_TIME_FORMAT
                        " drift:%" G_GINT64_FORMAT "ms)",
                        GST_TIME_ARGS (padprivate->expected_timestamp),
                        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), drift / GST_MSECOND);

        /* We accept drifts of 10ms */
        if (ABS (drift) < (10 * GST_MSECOND)) {
            GST_DEBUG ("Correcting minor drift");
            GST_BUFFER_TIMESTAMP (buffer) = padprivate->expected_timestamp;
        }
    }


    /* If there is no duration, lets set one */
    if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
        GST_BUFFER_DURATION (buffer) =
            gst_audio_duration_from_pad_buffer (pad, buffer);
        padprivate->expected_timestamp = GST_CLOCK_TIME_NONE;
    } else {
        padprivate->expected_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
                                         GST_BUFFER_DURATION (buffer);
    }


    /*
     * Lets clip the buffer to the segment (so we don't have to worry about
     * cliping afterwards).
     * This should also guarantee us that we'll have valid timestamps and
     * durations afterwards
     */

    buffer = gst_audio_buffer_clip (buffer, &padprivate->segment, adder->rate,
                                    adder->bps);

    /* buffer can be NULL if it's completely outside of the segment */
    if (!buffer) {
        GST_DEBUG ("Buffer completely outside of configured segment, dropping it");
        goto out;
    }

    /*
     * Make sure all incoming buffers share the same timestamping
     */
    GST_BUFFER_TIMESTAMP (buffer) =
        gst_segment_to_running_time (&padprivate->segment,
                                     padprivate->segment.format, GST_BUFFER_TIMESTAMP (buffer));


    if (GST_CLOCK_TIME_IS_VALID (adder->next_timestamp) &&
            GST_BUFFER_TIMESTAMP (buffer) < adder->next_timestamp) {
        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
                adder->next_timestamp) {
            GST_DEBUG_OBJECT (adder, "Buffer is late, dropping (ts: %" GST_TIME_FORMAT
                              " duration: %" GST_TIME_FORMAT ")",
                              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
                              GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
            gst_buffer_unref (buffer);
            goto out;
        } else {
            skip = adder->next_timestamp - GST_BUFFER_TIMESTAMP (buffer);
            GST_DEBUG_OBJECT (adder, "Buffer is partially late, skipping %"
                              GST_TIME_FORMAT, GST_TIME_ARGS (skip));
        }
    }

    /* If our new buffer's head is higher than the queue's head, lets wake up,
     * we may not have to wait for as long
     */
    if (adder->clock_id &&
            g_queue_peek_head (adder->buffers) != NULL &&
            GST_BUFFER_TIMESTAMP (buffer) + skip <
            GST_BUFFER_TIMESTAMP (g_queue_peek_head (adder->buffers)))
        gst_clock_id_unschedule (adder->clock_id);

    for (item = g_queue_peek_head_link (adder->buffers);
            item; item = g_list_next (item)) {
        GstBuffer *oldbuffer = item->data;
        GstClockTime old_skip = 0;
        GstClockTime mix_duration = 0;
        GstClockTime mix_start = 0;
        GstClockTime mix_end = 0;

        /* We haven't reached our place yet */
        if (GST_BUFFER_TIMESTAMP (buffer) + skip >=
                GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
            continue;

        /* We're past our place, lets insert ouselves here */
        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <=
                GST_BUFFER_TIMESTAMP (oldbuffer))
            break;

        /* if we reach this spot, we have overlap, so we must mix */

        /* First make a subbuffer with the non-overlapping part */
        if (GST_BUFFER_TIMESTAMP (buffer) + skip < GST_BUFFER_TIMESTAMP (oldbuffer)) {
            GstBuffer *subbuffer = NULL;
            GstClockTime subbuffer_duration = GST_BUFFER_TIMESTAMP (oldbuffer) -
                                              (GST_BUFFER_TIMESTAMP (buffer) + skip);

            subbuffer = gst_buffer_create_sub (buffer,
                                               gst_live_adder_length_from_duration (adder, skip),
                                               gst_live_adder_length_from_duration (adder, subbuffer_duration));

            GST_BUFFER_TIMESTAMP (subbuffer) = GST_BUFFER_TIMESTAMP (buffer) + skip;
            GST_BUFFER_DURATION (subbuffer) = subbuffer_duration;

            skip += subbuffer_duration;

            g_queue_insert_before (adder->buffers, item, subbuffer);
        }

        /* Now we are on the overlapping part */
        oldbuffer = gst_buffer_make_writable (oldbuffer);
        item->data = oldbuffer;

        old_skip = GST_BUFFER_TIMESTAMP (buffer) + skip -
                   GST_BUFFER_TIMESTAMP (oldbuffer);

        mix_start = GST_BUFFER_TIMESTAMP (oldbuffer) + old_skip;

        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
                GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
            mix_end = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
        else
            mix_end = GST_BUFFER_TIMESTAMP (oldbuffer) +
                      GST_BUFFER_DURATION (oldbuffer);

        mix_duration = mix_end - mix_start;

        adder->func (GST_BUFFER_DATA (oldbuffer) +
                     gst_live_adder_length_from_duration (adder, old_skip),
                     GST_BUFFER_DATA (buffer) +
                     gst_live_adder_length_from_duration (adder, skip),
                     gst_live_adder_length_from_duration (adder, mix_duration));

        skip += mix_duration;
    }

    g_cond_broadcast (adder->not_empty_cond);

    if (skip == GST_BUFFER_DURATION (buffer)) {
        gst_buffer_unref (buffer);
    } else {
        if (skip) {
            GstClockTime subbuffer_duration = GST_BUFFER_DURATION (buffer) - skip;
            GstClockTime subbuffer_ts = GST_BUFFER_TIMESTAMP (buffer) + skip;
            GstBuffer *new_buffer = gst_buffer_create_sub (buffer,
                                    gst_live_adder_length_from_duration (adder, skip),
                                    gst_live_adder_length_from_duration (adder, subbuffer_duration));
            gst_buffer_unref (buffer);
            buffer = new_buffer;
            GST_BUFFER_TIMESTAMP (buffer) = subbuffer_ts;
            GST_BUFFER_DURATION (buffer) = subbuffer_duration;
        }

        if (item)
            g_queue_insert_before (adder->buffers, item, buffer);
        else
            g_queue_push_tail (adder->buffers, buffer);
    }

out:

    GST_OBJECT_UNLOCK (adder);
    gst_object_unref (adder);

    return ret;

invalid_timestamp:

    GST_OBJECT_UNLOCK (adder);
    gst_buffer_unref (buffer);
    GST_ELEMENT_ERROR (adder, STREAM, FAILED,
                       ("Buffer without a valid timestamp received"),
                       ("Invalid timestamp received on buffer"));

    return GST_FLOW_ERROR;

invalid_segment:
    {
        const gchar *format = gst_format_get_name (padprivate->segment.format);
        GST_OBJECT_UNLOCK (adder);
        gst_buffer_unref (buffer);
        GST_ELEMENT_ERROR (adder, STREAM, FAILED,
                           ("This element only supports TIME segments, received other type"),
                           ("Received a segment of type %s, only support time segment", format));

        return GST_FLOW_ERROR;
    }

}
コード例 #19
0
ファイル: gstfreeze.c プロジェクト: bilboed/gst-plugins-bad
static void
gst_freeze_buffer_free (gpointer data, gpointer user_data)
{
  gst_buffer_unref (GST_BUFFER (data));
}
コード例 #20
0
ファイル: gsttarkindec.c プロジェクト: JJCG/gst-plugins-bad
static void
gst_tarkindec_chain (GstPad * pad, GstData * _data)
{
  GstBuffer *buf = GST_BUFFER (_data);
  TarkinDec *tarkindec;

  g_return_if_fail (pad != NULL);
  g_return_if_fail (GST_IS_PAD (pad));
  g_return_if_fail (buf != NULL);

  tarkindec = GST_TARKINDEC (gst_pad_get_parent (pad));

  if (!tarkindec->setup) {
    GST_ELEMENT_ERROR (tarkindec, CORE, NEGOTATION, (NULL),
        ("decoder not initialized (input is not tarkin?)"));
    if (GST_IS_BUFFER (buf))
      gst_buffer_unref (buf);
    else
      gst_pad_event_default (pad, GST_EVENT (buf));
    return;
  }

  if (GST_IS_EVENT (buf)) {
    switch (GST_EVENT_TYPE (buf)) {
      case GST_EVENT_EOS:
      default:
        gst_pad_event_default (pad, GST_EVENT (buf));
        break;
    }
  } else {
    gchar *data;
    gulong size;
    gchar *buffer;
    guchar *rgb;
    TarkinTime date;
    TarkinVideoLayerDesc *layer;

    /* data to decode */
    data = GST_BUFFER_DATA (buf);
    size = GST_BUFFER_SIZE (buf);

    buffer = ogg_sync_buffer (&tarkindec->oy, size);
    memcpy (buffer, data, size);
    ogg_sync_wrote (&tarkindec->oy, size);

    if (ogg_sync_pageout (&tarkindec->oy, &tarkindec->og)) {
      ogg_stream_pagein (&tarkindec->os, &tarkindec->og);

      while (ogg_stream_packetout (&tarkindec->os, &tarkindec->op)) {
        if (tarkindec->op.e_o_s)
          break;
        if (tarkindec->nheader < 3) {   /* 3 first packets to headerin */
          tarkin_synthesis_headerin (&tarkindec->ti, &tarkindec->tc,
              &tarkindec->op);

          if (tarkindec->nheader == 2) {
            tarkin_synthesis_init (tarkindec->tarkin_stream, &tarkindec->ti);
          }
          tarkindec->nheader++;
        } else {
          tarkin_synthesis_packetin (tarkindec->tarkin_stream, &tarkindec->op);

          while (tarkin_synthesis_frameout (tarkindec->tarkin_stream, &rgb, 0,
                  &date) == 0) {
            GstBuffer *outbuf;

            layer = &tarkindec->tarkin_stream->layer->desc;

            if (!GST_PAD_CAPS (tarkindec->srcpad)) {
              if (gst_pad_try_set_caps (tarkindec->srcpad, GST_CAPS_NEW ("tarkin_raw", "video/x-raw-rgb", "bpp", GST_PROPS_INT (24), "depth", GST_PROPS_INT (24), "endianness", GST_PROPS_INT (G_BYTE_ORDER), "red_mask", GST_PROPS_INT (0xff0000), "green_mask", GST_PROPS_INT (0xff00), "blue_mask", GST_PROPS_INT (0xff), "width", GST_PROPS_INT (layer->width), "height", GST_PROPS_INT (layer->height), "framerate", GST_PROPS_FLOAT (0.)  /* FIXME!!! */
                      )) <= 0) {
                GST_ELEMENT_ERROR (tarkindec, CORE, NEGOTATION, (NULL),
                    ("could not output format"));
                gst_buffer_unref (buf);
                return;
              }
            }
            outbuf = gst_buffer_new ();
            GST_BUFFER_DATA (outbuf) = rgb;
            GST_BUFFER_SIZE (outbuf) = layer->width * layer->height * 3;
            GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_DONTFREE);
            gst_pad_push (tarkindec->srcpad, GST_DATA (outbuf));

            tarkin_synthesis_freeframe (tarkindec->tarkin_stream, rgb);
          }
        }
      }
    }
    gst_buffer_unref (buf);
  }
}
コード例 #21
0
static GstFlowReturn
gst_mpeg2dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder);
  GstBuffer *buf = frame->input_buffer;
  GstMapInfo minfo;
  const mpeg2_info_t *info;
  mpeg2_state_t state;
  gboolean done = FALSE;
  GstFlowReturn ret = GST_FLOW_OK;

  GST_LOG_OBJECT (mpeg2dec, "received frame %d, timestamp %"
      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
      frame->system_frame_number,
      GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->duration));

  gst_buffer_ref (buf);
  if (!gst_buffer_map (buf, &minfo, GST_MAP_READ)) {
    GST_ERROR_OBJECT (mpeg2dec, "Failed to map input buffer");
    return GST_FLOW_ERROR;
  }

  info = mpeg2dec->info;

  GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer");
  mpeg2_buffer (mpeg2dec->decoder, minfo.data, minfo.data + minfo.size);
  GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer done");

  while (!done) {
    GST_LOG_OBJECT (mpeg2dec, "calling parse");
    state = mpeg2_parse (mpeg2dec->decoder);
    GST_DEBUG_OBJECT (mpeg2dec, "parse state %d", state);

    switch (state) {
#if MPEG2_RELEASE >= MPEG2_VERSION (0, 5, 0)
      case STATE_SEQUENCE_MODIFIED:
        GST_DEBUG_OBJECT (mpeg2dec, "sequence modified");
        mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE;
        gst_mpeg2dec_clear_buffers (mpeg2dec);
        /* fall through */
#endif
      case STATE_SEQUENCE:
        ret = handle_sequence (mpeg2dec, info);
        /* if there is an error handling the sequence
         * reset the decoder, maybe something more elegant
         * could be done.
         */
        if (ret == GST_FLOW_ERROR) {
          GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE,
              ("decoding error"), ("Bad sequence header"), ret);
          gst_video_decoder_drop_frame (decoder, frame);
          gst_mpeg2dec_flush (decoder);
          goto done;
        }
        break;
      case STATE_SEQUENCE_REPEATED:
        GST_DEBUG_OBJECT (mpeg2dec, "sequence repeated");
        break;
      case STATE_GOP:
        GST_DEBUG_OBJECT (mpeg2dec, "gop");
        break;
      case STATE_PICTURE:
        ret = handle_picture (mpeg2dec, info, frame);
        break;
      case STATE_SLICE_1ST:
        GST_LOG_OBJECT (mpeg2dec, "1st slice of frame encountered");
        break;
      case STATE_PICTURE_2ND:
        GST_LOG_OBJECT (mpeg2dec,
            "Second picture header encountered. Decoding 2nd field");
        break;
#if MPEG2_RELEASE >= MPEG2_VERSION (0, 4, 0)
      case STATE_INVALID_END:
        GST_DEBUG_OBJECT (mpeg2dec, "invalid end");
#endif
      case STATE_END:
        GST_DEBUG_OBJECT (mpeg2dec, "end");
      case STATE_SLICE:
        GST_DEBUG_OBJECT (mpeg2dec, "display_fbuf:%p, discard_fbuf:%p",
            info->display_fbuf, info->discard_fbuf);
        if (info->display_fbuf && info->display_fbuf->id) {
          ret = handle_slice (mpeg2dec, info);
        } else {
          GST_DEBUG_OBJECT (mpeg2dec, "no picture to display");
        }
        if (info->discard_fbuf && info->discard_fbuf->id)
          gst_mpeg2dec_discard_buffer (mpeg2dec,
              GPOINTER_TO_INT (info->discard_fbuf->id) - 1);
        if (state != STATE_SLICE) {
          gst_mpeg2dec_clear_buffers (mpeg2dec);
        }
        break;
      case STATE_BUFFER:
        done = TRUE;
        break;
        /* error */
      case STATE_INVALID:
        GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE,
            ("decoding error"), ("Reached libmpeg2 invalid state"), ret);
        continue;
      default:
        GST_ERROR_OBJECT (mpeg2dec, "Unknown libmpeg2 state %d, FIXME", state);
        ret = GST_FLOW_OK;
        gst_video_codec_frame_unref (frame);
        goto done;
    }

    if (ret != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (mpeg2dec, "exit loop, reason %s",
          gst_flow_get_name (ret));
      break;
    }
  }

  gst_video_codec_frame_unref (frame);

done:
  gst_buffer_unmap (buf, &minfo);
  gst_buffer_unref (buf);
  return ret;
}
コード例 #22
0
static GstFlowReturn
gst_shape_wipe_video_sink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (parent);
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *mask = NULL, *outbuf = NULL;
  GstClockTime timestamp;
  GstVideoFrame inframe, outframe, maskframe;

  if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&self->vinfo) ==
          GST_VIDEO_FORMAT_UNKNOWN))
    goto not_negotiated;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  timestamp =
      gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);

  if (GST_CLOCK_TIME_IS_VALID (timestamp))
    gst_object_sync_values (GST_OBJECT (self), timestamp);

  GST_LOG_OBJECT (self,
      "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f",
      GST_TIME_ARGS (timestamp), self->mask_position);

  g_mutex_lock (&self->mask_mutex);
  if (self->shutdown)
    goto shutdown;

  if (!self->mask)
    g_cond_wait (&self->mask_cond, &self->mask_mutex);

  if (self->mask == NULL || self->shutdown) {
    goto shutdown;
  } else {
    mask = gst_buffer_ref (self->mask);
  }
  g_mutex_unlock (&self->mask_mutex);

  if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer)))
    goto qos;

  /* Will blend inplace if buffer is writable */
  outbuf = gst_buffer_make_writable (buffer);
  gst_video_frame_map (&outframe, &self->vinfo, outbuf, GST_MAP_READWRITE);
  gst_video_frame_map (&inframe, &self->vinfo, outbuf, GST_MAP_READ);

  gst_video_frame_map (&maskframe, &self->minfo, mask, GST_MAP_READ);

  switch (GST_VIDEO_INFO_FORMAT (&self->vinfo)) {
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
      if (self->mask_bpp == 16)
        gst_shape_wipe_blend_argb_16 (self, &inframe, &maskframe, &outframe);
      else
        gst_shape_wipe_blend_argb_8 (self, &inframe, &maskframe, &outframe);
      break;
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_RGBA:
      if (self->mask_bpp == 16)
        gst_shape_wipe_blend_bgra_16 (self, &inframe, &maskframe, &outframe);
      else
        gst_shape_wipe_blend_bgra_8 (self, &inframe, &maskframe, &outframe);
      break;
    default:
      g_assert_not_reached ();
      break;
  }

  gst_video_frame_unmap (&outframe);
  gst_video_frame_unmap (&inframe);

  gst_video_frame_unmap (&maskframe);

  gst_buffer_unref (mask);

  ret = gst_pad_push (self->srcpad, outbuf);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto push_failed;

  return ret;

  /* Errors */
not_negotiated:
  {
    GST_ERROR_OBJECT (self, "No valid caps yet");
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_NEGOTIATED;
  }
shutdown:
  {
    GST_DEBUG_OBJECT (self, "Shutting down");
    gst_buffer_unref (buffer);
    return GST_FLOW_FLUSHING;
  }
qos:
  {
    GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS");
    gst_buffer_unref (buffer);
    gst_buffer_unref (mask);
    return GST_FLOW_OK;
  }
push_failed:
  {
    if (ret != GST_FLOW_FLUSHING)
      GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s",
          gst_flow_get_name (ret));
    return ret;
  }
}
コード例 #23
0
static GstFlowReturn
gst_ks_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
{
  GstKsVideoSrc *self = GST_KS_VIDEO_SRC (pushsrc);
  GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self);
  guint buf_size;
  GstCaps *caps;
  GstBuffer *buf = NULL;
  GstFlowReturn result;
  GstClockTime presentation_time;
  gulong error_code;
  gchar *error_str;

  g_assert (priv->device != NULL);

  if (!gst_ks_video_device_has_caps (priv->device))
    goto error_no_caps;

  buf_size = gst_ks_video_device_get_frame_size (priv->device);
  g_assert (buf_size);

  caps = gst_pad_get_negotiated_caps (GST_BASE_SRC_PAD (self));
  if (caps == NULL)
    goto error_no_caps;
  result = gst_pad_alloc_buffer (GST_BASE_SRC_PAD (self), priv->offset,
      buf_size, caps, &buf);
  gst_caps_unref (caps);
  if (G_UNLIKELY (result != GST_FLOW_OK))
    goto error_alloc_buffer;

  if (G_UNLIKELY (!priv->running)) {
    KS_WORKER_LOCK (priv);
    priv->worker_pending_run = TRUE;
    KS_WORKER_NOTIFY (priv);
    while (priv->worker_pending_run)
      KS_WORKER_WAIT_FOR_RESULT (priv);
    priv->running = priv->worker_run_result;
    KS_WORKER_UNLOCK (priv);

    if (!priv->running)
      goto error_start_capture;
  }

  do {
    gulong bytes_read;

    result = gst_ks_video_device_read_frame (priv->device,
        GST_BUFFER_DATA (buf), buf_size, &bytes_read, &presentation_time,
        &error_code, &error_str);
    if (G_UNLIKELY (result != GST_FLOW_OK))
      goto error_read_frame;

    GST_BUFFER_SIZE (buf) = bytes_read;
  }
  while (!gst_ks_video_src_timestamp_buffer (self, buf, presentation_time));

  if (G_UNLIKELY (priv->do_stats))
    gst_ks_video_src_update_statistics (self);

  gst_ks_video_device_postprocess_frame (priv->device,
      GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));

  *buffer = buf;
  return GST_FLOW_OK;

  /* ERRORS */
error_no_caps:
  {
    GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
        ("not negotiated"), ("maybe setcaps failed?"));

    return GST_FLOW_ERROR;
  }
error_start_capture:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ,
        ("could not start capture"),
        ("failed to change pin state to KSSTATE_RUN"));

    return GST_FLOW_ERROR;
  }
error_alloc_buffer:
  {
    GST_ELEMENT_ERROR (self, CORE, PAD, ("alloc_buffer failed"), (NULL));

    return result;
  }
error_read_frame:
  {
    if (result != GST_FLOW_WRONG_STATE && result != GST_FLOW_UNEXPECTED) {
      GST_ELEMENT_ERROR (self, RESOURCE, READ,
          ("read failed: %s [0x%08x]", error_str, error_code),
          ("gst_ks_video_device_read_frame failed"));
    }

    g_free (error_str);
    gst_buffer_unref (buf);

    return result;
  }
}
コード例 #24
0
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(PR_LOG_DEBUG, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mDecoder->SetMediaDuration(mLastParserDuration);
  } else {
    LOG(PR_LOG_DEBUG, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
      LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mDecoder->SetMediaDuration(duration);
    } else {
      mDecoder->SetMediaSeekable(false);
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
  mInfo.mVideo.mHasVideo = n_video != 0;
  mInfo.mAudio.mHasAudio = n_audio != 0;

  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        done = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(PR_LOG_DEBUG, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(PR_LOG_DEBUG, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  timestamp = gst_segment_to_stream_time(&mAudioSegment,
      GST_FORMAT_TIME, timestamp);

  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}
コード例 #25
0
static int
gst_dv1394src_iso_receive (raw1394handle_t handle, int channel, size_t len,
    quadlet_t * data)
{
  GstDV1394Src *dv1394src = gst_dv1394src_from_raw1394handle (handle);

  if (len > 16) {
    /*
       the following code taken from kino-0.51 (Dan Dennedy/Charles Yates)
       Kindly relicensed under the LGPL. See the commit log for version 1.6 of
       this file in CVS.
     */
    unsigned char *p = (unsigned char *) &data[3];

    int section_type = p[0] >> 5;       /* section type is in bits 5 - 7 */
    int dif_sequence = p[1] >> 4;       /* dif sequence number is in bits 4 - 7 */
    int dif_block = p[2];

    /* if we are at the beginning of a frame, 
       we set buf=frame, and alloc a new buffer for frame
     */
    if (section_type == 0 && dif_sequence == 0) {       // dif header
      if (!GST_PAD_CAPS (GST_BASE_SRC_PAD (dv1394src))) {
        GstCaps *caps;

        // figure format (NTSC/PAL)
        if (p[3] & 0x80) {
          // PAL
          dv1394src->frame_size = PAL_FRAMESIZE;
          dv1394src->frame_rate = PAL_FRAMERATE;
          GST_DEBUG ("PAL data");
          caps = gst_caps_new_simple ("video/x-dv",
              "format", G_TYPE_STRING, "PAL",
              "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
        } else {
          // NTSC (untested)
          dv1394src->frame_size = NTSC_FRAMESIZE;
          dv1394src->frame_rate = NTSC_FRAMERATE;
          GST_DEBUG
              ("NTSC data [untested] - please report success/failure to <*****@*****.**>");
          caps = gst_caps_new_simple ("video/x-dv",
              "format", G_TYPE_STRING, "NTSC",
              "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
        }
        gst_pad_set_caps (GST_BASE_SRC_PAD (dv1394src), caps);
        gst_caps_unref (caps);
      }
      // drop last frame when not complete
      if (!dv1394src->drop_incomplete
          || dv1394src->bytes_in_frame == dv1394src->frame_size) {
        dv1394src->buf = dv1394src->frame;
      } else {
        GST_INFO_OBJECT (GST_ELEMENT (dv1394src), "incomplete frame dropped");
        g_signal_emit (G_OBJECT (dv1394src),
            gst_dv1394src_signals[SIGNAL_FRAME_DROPPED], 0);
        if (dv1394src->frame) {
          gst_buffer_unref (dv1394src->frame);
        }
      }
      if ((dv1394src->frame_sequence + 1) % (dv1394src->skip +
              dv1394src->consecutive) < dv1394src->consecutive) {
        GstBuffer *buf;
        gint64 i64;

        buf = gst_buffer_new_and_alloc (dv1394src->frame_size);

        /* fill in offset, duration, timestamp */
        GST_BUFFER_OFFSET (buf) = dv1394src->frame_sequence;
        dv1394src->frame = buf;
      }
      dv1394src->frame_sequence++;
      dv1394src->bytes_in_frame = 0;
    }

    if (dv1394src->frame != NULL) {
      guint8 *data = GST_BUFFER_DATA (dv1394src->frame);

      switch (section_type) {
        case 0:                /* 1 Header block */
          /* p[3] |= 0x80; // hack to force PAL data */
          memcpy (data + dif_sequence * 150 * 80, p, 480);
          break;

        case 1:                /* 2 Subcode blocks */
          memcpy (data + dif_sequence * 150 * 80 + (1 + dif_block) * 80, p,
              480);
          break;

        case 2:                /* 3 VAUX blocks */
          memcpy (data + dif_sequence * 150 * 80 + (3 + dif_block) * 80, p,
              480);
          break;

        case 3:                /* 9 Audio blocks interleaved with video */
          memcpy (data + dif_sequence * 150 * 80 + (6 + dif_block * 16) * 80, p,
              480);
          break;

        case 4:                /* 135 Video blocks interleaved with audio */
          memcpy (data + dif_sequence * 150 * 80 + (7 + (dif_block / 15) +
                  dif_block) * 80, p, 480);
          break;

        default:               /* we can't handle any other data */
          break;
      }
      dv1394src->bytes_in_frame += 480;
    }
  }
コード例 #26
0
bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT
                      " threshold %" GST_TIME_FORMAT,
                      GST_TIME_ARGS(timestamp * 1000),
                      GST_TIME_ARGS(aTimeThreshold * 1000));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
  VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
                                                mDecoder->GetImageContainer(),
                                                offset, timestamp, duration,
                                                static_cast<Image*>(image.get()),
                                                isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}
コード例 #27
0
ファイル: gstxvidenc.c プロジェクト: wang-zhao/gstreamer-win
static GstFlowReturn
gst_xvidenc_chain (GstPad * pad, GstBuffer * buf)
{
  GstXvidEnc *xvidenc = GST_XVIDENC (GST_PAD_PARENT (pad));
  GstBuffer *outbuf;
  xvid_enc_frame_t xframe;

  const gint motion_presets[] = {
    0, 0, 0, 0,
    XVID_ME_HALFPELREFINE16,
    XVID_ME_HALFPELREFINE16 | XVID_ME_ADVANCEDDIAMOND16,
    XVID_ME_HALFPELREFINE16 | XVID_ME_EXTSEARCH16
        | XVID_ME_HALFPELREFINE8 | XVID_ME_USESQUARES16
  };

  if (!xvidenc->handle) {
    GST_ELEMENT_ERROR (xvidenc, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before chain function"));
    gst_buffer_unref (buf);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  GST_DEBUG_OBJECT (xvidenc,
      "Received buffer of time %" GST_TIME_FORMAT ", size %d",
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_BUFFER_SIZE (buf));

  if (xvidenc->xframe_cache)
    memcpy (&xframe, xvidenc->xframe_cache, sizeof (xframe));
  else {                        /* need to build some inital xframe to be cached */
    /* encode and so ... */
    gst_xvid_init_struct (xframe);

    if (xvidenc->par_width == xvidenc->par_height)
      xframe.par = XVID_PAR_11_VGA;
    else {
      xframe.par = XVID_PAR_EXT;
      xframe.par_width = xvidenc->par_width;
      xframe.par_height = xvidenc->par_height;
    }

    /* handle options */
    xframe.vol_flags |= xvidenc->quant_type;
    xframe.vop_flags = XVID_VOP_HALFPEL;
    xframe.motion = motion_presets[xvidenc->motion];

    if (xvidenc->me_chroma) {
      xframe.motion |= XVID_ME_CHROMA_PVOP;
      xframe.motion |= XVID_ME_CHROMA_BVOP;
    }

    if (xvidenc->me_vhq >= 1) {
      xframe.vop_flags |= XVID_VOP_MODEDECISION_RD;
    }
    if (xvidenc->me_vhq >= 2) {
      xframe.motion |= XVID_ME_HALFPELREFINE16_RD;
      xframe.motion |= XVID_ME_QUARTERPELREFINE16_RD;
    }
    if (xvidenc->me_vhq >= 3) {
      xframe.motion |= XVID_ME_HALFPELREFINE8_RD;
      xframe.motion |= XVID_ME_QUARTERPELREFINE8_RD;
      xframe.motion |= XVID_ME_CHECKPREDICTION_RD;
    }
    if (xvidenc->me_vhq >= 4) {
      xframe.motion |= XVID_ME_EXTSEARCH_RD;
    }

    /* no motion estimation, then only intra */
    if (xvidenc->motion == 0) {
      xframe.type = XVID_TYPE_IVOP;
    } else {
      xframe.type = XVID_TYPE_AUTO;
    }

    if (xvidenc->motion > 4) {
      xframe.vop_flags |= XVID_VOP_INTER4V;
    }

    if (xvidenc->me_quarterpel) {
      xframe.vol_flags |= XVID_VOL_QUARTERPEL;
      xframe.motion |= XVID_ME_QUARTERPELREFINE16;
      xframe.motion |= XVID_ME_QUARTERPELREFINE8;
    }

    if (xvidenc->gmc) {
      xframe.vol_flags |= XVID_VOL_GMC;
      xframe.motion |= XVID_ME_GME_REFINE;
    }

    if (xvidenc->interlaced) {
      xframe.vol_flags |= XVID_VOL_INTERLACING;
    }

    if (xvidenc->trellis) {
      xframe.vop_flags |= XVID_VOP_TRELLISQUANT;
    }

    if (xvidenc->hqacpred) {
      xframe.vop_flags |= XVID_VOP_HQACPRED;
    }

    if (xvidenc->greyscale) {
      xframe.vop_flags |= XVID_VOP_GREYSCALE;
    }

    if (xvidenc->cartoon) {
      xframe.vop_flags |= XVID_VOP_CARTOON;
      xframe.motion |= XVID_ME_DETECT_STATIC_MOTION;
    }

    xframe.bframe_threshold = xvidenc->bframe_threshold;
    xframe.input.csp = xvidenc->csp;

    /* save in cache */
    xvidenc->xframe_cache = g_memdup (&xframe, sizeof (xframe));
  }

  outbuf = gst_xvidenc_encode (xvidenc, buf, xframe);

  if (!outbuf)                  /* error or no data yet */
    return GST_FLOW_OK;

  /* go out, multiply! */
  return gst_pad_push (xvidenc->srcpad, outbuf);
}
コード例 #28
0
void GStreamerReader::ReadAndPushData(guint aLength)
{
  MediaResource* resource = mDecoder->GetResource();
  NS_ASSERTION(resource, "Decoder has no media resource");
  int64_t offset1 = resource->Tell();
  unused << offset1;
  nsresult rv = NS_OK;

  GstBuffer* buffer = gst_buffer_new_and_alloc(aLength);
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_WRITE);
  guint8 *data = info.data;
#else
  guint8* data = GST_BUFFER_DATA(buffer);
#endif
  uint32_t size = 0, bytesRead = 0;
  while(bytesRead < aLength) {
    rv = resource->Read(reinterpret_cast<char*>(data + bytesRead),
        aLength - bytesRead, &size);
    if (NS_FAILED(rv) || size == 0)
      break;

    bytesRead += size;
  }

  int64_t offset2 = resource->Tell();
  unused << offset2;

#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
  gst_buffer_set_size(buffer, bytesRead);
#else
  GST_BUFFER_SIZE(buffer) = bytesRead;
#endif

  GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
  if (ret != GST_FLOW_OK) {
    LOG(PR_LOG_ERROR, "ReadAndPushData push ret %s(%d)", gst_flow_get_name(ret), ret);
  }

  if (NS_FAILED(rv)) {
    /* Terminate the stream if there is an error in reading */
    LOG(PR_LOG_ERROR, "ReadAndPushData read error, rv=%x", rv);
    gst_app_src_end_of_stream(mSource);
  } else if (bytesRead < aLength) {
    /* If we read less than what we wanted, we reached the end */
    LOG(PR_LOG_WARNING, "ReadAndPushData read underflow, "
        "bytesRead=%u, aLength=%u, offset(%lld,%lld)",
        bytesRead, aLength, offset1, offset2);
    gst_app_src_end_of_stream(mSource);
  }

  gst_buffer_unref(buffer);

  /* Ensure offset change is consistent in this function.
   * If there are other stream operations on another thread at the same time,
   * it will disturb the GStreamer state machine.
   */
  MOZ_ASSERT(offset1 + bytesRead == offset2);
}
コード例 #29
0
static void
gst_v4l2_video_dec_loop (GstVideoDecoder * decoder)
{
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
  GstV4l2BufferPool *v4l2_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool);
  GstBufferPool *pool;
  GstVideoCodecFrame *frame;
  GstBuffer *buffer = NULL;
  GstFlowReturn ret;

  GST_LOG_OBJECT (decoder, "Allocate output buffer");

  self->output_flow = GST_FLOW_OK;
  do {
    /* We cannot use the base class allotate helper since it taking the internal
     * stream lock. we know that the acquire may need to poll until more frames
     * comes in and holding this lock would prevent that.
     */
    pool = gst_video_decoder_get_buffer_pool (decoder);

    /* Pool may be NULL if we started going to READY state */
    if (pool == NULL) {
      ret = GST_FLOW_FLUSHING;
      goto beach;
    }

    ret = gst_buffer_pool_acquire_buffer (pool, &buffer, NULL);
    g_object_unref (pool);

    if (ret != GST_FLOW_OK)
      goto beach;

    GST_LOG_OBJECT (decoder, "Process output buffer");
    ret = gst_v4l2_buffer_pool_process (v4l2_pool, &buffer);

  } while (ret == GST_V4L2_FLOW_CORRUPTED_BUFFER);

  if (ret != GST_FLOW_OK)
    goto beach;

  frame = gst_v4l2_video_dec_get_oldest_frame (decoder);

  if (frame) {
    frame->output_buffer = buffer;
    buffer = NULL;
    ret = gst_video_decoder_finish_frame (decoder, frame);

    if (ret != GST_FLOW_OK)
      goto beach;
  } else {
    GST_WARNING_OBJECT (decoder, "Decoder is producing too many buffers");
    gst_buffer_unref (buffer);
  }

  return;

beach:
  GST_DEBUG_OBJECT (decoder, "Leaving output thread: %s",
      gst_flow_get_name (ret));

  gst_buffer_replace (&buffer, NULL);
  self->output_flow = ret;
  gst_v4l2_object_unlock (self->v4l2output);
  gst_pad_pause_task (decoder->srcpad);
}
コード例 #30
0
static GstFlowReturn collected_pads(GstCollectPads *pads, GstOmxBaseFilter21 *self)
{
    GSList *item;
    GstCollectData *collectdata;
    GstFlowReturn ret = GST_FLOW_OK;
    GOmxCore *gomx = self->gomx;
    gint sink_number;
    GstBuffer *buffers[2];
    gboolean eos = FALSE;

    GST_DEBUG_OBJECT(self, "Collected pads !");

    // Collect buffers
    for( item = pads->data ; item != NULL ; item = item->next ) {
        collectdata = (GstCollectData *) item->data;
        
        //FIXME Use collect data
        if(strcmp(GST_PAD_NAME(collectdata->pad), "sink_00") == 0){
    		sink_number=0;
	    }
	    else{
		    sink_number=1;
	    }
	    
	    buffers[sink_number] = gst_collect_pads_pop(pads, collectdata);
	    
	    if( buffers[sink_number] == NULL ) {
	        eos = TRUE;
	    }
    }

    // Detect EOS
    if( eos == TRUE ) {
        GST_INFO_OBJECT(self, "EOS");
        for( sink_number=0 ; sink_number<2 ; sink_number++ ) {
            if( buffers[sink_number] ) {
                gst_buffer_unref(buffers[sink_number]);
            }
        }
        gst_pad_push_event(self->srcpad, gst_event_new_eos());
        return GST_FLOW_UNEXPECTED;
    }

    // Setup input ports if not done yet
    if (G_LIKELY (gomx->omx_state != OMX_StateExecuting)) {
      for( sink_number=0 ; sink_number<2 ; sink_number++ ) {
        GST_INFO_OBJECT(self, "Setup port %d", sink_number);
        setup_input_buffer (self, buffers[sink_number], sink_number);
      }
    }
    
    // Call chain foreach buffer
    for( sink_number=0 ; sink_number<2 ; sink_number++ ) {
        ret = pad_chain(self->sinkpad[sink_number], buffers[sink_number]);
    }
    
    // Call output_loop after pad_chain
    output_loop(self->srcpad);
    
    return ret;
}