static void new_buffer_added (GstAppSink *appsink, gpointer user_data)
{
    GstBuffer* buffer = gst_app_sink_pull_buffer (appsink);
    g_debug("Buffer recebido tamanho[%d] duracao[%lli] timestamp[%lli]", 
            GST_BUFFER_SIZE(buffer), GST_BUFFER_DURATION(buffer) / GST_SECOND, GST_BUFFER_TIMESTAMP(buffer) / GST_SECOND);
    gst_buffer_unref(buffer);
}
Beispiel #2
0
static GstFlowReturn on_new_buffer_from_source (GstAppSink * elt, void * data)
{
  guint size;
  GstBuffer *buffer;

  ofGstVideoData * gstData = (ofGstVideoData *) data;

  //get the buffer from appsink
  buffer = gst_app_sink_pull_buffer (GST_APP_SINK (elt));

  size = GST_BUFFER_SIZE (buffer);

  ofGstDataLock(gstData);
	  if(gstData->pixels){
		  memcpy (gstData->pixels, GST_BUFFER_DATA (buffer), size);
		  gstData->bHavePixelsChanged=true;
	  }
  ofGstDataUnlock(gstData);


  /// we don't need the appsink buffer anymore
  gst_buffer_unref (buffer);

  return GST_FLOW_OK;
}
Beispiel #3
0
static GstBuffer *
create_video_buffer (GstCaps * caps)
{
  GstElement *pipeline;
  GstElement *cf;
  GstElement *sink;
  GstBuffer *buffer;

  pipeline =
      gst_parse_launch
      ("videotestsrc num-buffers=1 ! capsfilter name=cf ! appsink name=sink",
      NULL);
  g_assert (pipeline != NULL);

  cf = gst_bin_get_by_name (GST_BIN (pipeline), "cf");
  sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");

  g_object_set (G_OBJECT (cf), "caps", caps, NULL);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  buffer = gst_app_sink_pull_buffer (GST_APP_SINK (sink));

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  gst_object_unref (sink);
  gst_object_unref (cf);
  return buffer;
}
Beispiel #4
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();
				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels.swap(backPixels);
#if GST_VERSION_MAJOR==0
					if(prevBuffer) gst_buffer_unref (prevBuffer);
#else
					if(prevBuffer) gst_sample_unref (prevBuffer);
#endif
					prevBuffer = buffer;
				}

			mutex.unlock();
		}else{
#if GST_VERSION_MAJOR==0
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = buffer;
					bHavePixelsChanged=true;
				}
			}
		}
#else
			GstBuffer *buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_sample_unref (prevBuffer);
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = sample;
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
		}
#endif
	}else{
Beispiel #5
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			ofScopedLock lock(mutex);
			bHavePixelsChanged = bBackPixelsChanged;
			if (bHavePixelsChanged){
				bBackPixelsChanged=false;
				swap(pixels,backPixels);
				#ifdef OF_USE_GST_GL
				if(backTexture.isAllocated()){
					frontTexture.getTextureData() = backTexture.getTextureData();
					frontTexture.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
					frontTexture.setTextureWrap(GL_CLAMP_TO_EDGE,GL_CLAMP_TO_EDGE);
				}
				#endif
				if(!copyPixels){
					frontBuffer = backBuffer;
				}
			}
		}else{
#if GST_VERSION_MAJOR==0
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = shared_ptr<GstBuffer>(buffer,gst_buffer_unref);;
					bHavePixelsChanged=true;
				}
			}
		}
#else
			GstBuffer * buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					backBuffer = shared_ptr<GstSample>(sample,gst_sample_unref);
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
		}
#endif
	}else{
Beispiel #6
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();

				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels = backPixels;
				}

			mutex.unlock();
		}else{
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				guint size = GST_BUFFER_SIZE (buffer);
				if(pixels.isAllocated()){
					memcpy (pixels.getPixels(), GST_BUFFER_DATA (buffer), size);
					bHavePixelsChanged=true;
				}
				/// we don't need the appsink buffer anymore
				gst_buffer_unref (buffer);
			}
		}
	}else{
		ofLog(OF_LOG_WARNING,"not loaded");
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
/*!
 * \brief CvCapture_GStreamer::grabFrame
 * \return
 * Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
 * The pipeline is started if it was not running yet
 */
bool CvCapture_GStreamer::grabFrame()
{
    if(!pipeline)
        return false;

    // start the pipeline if it was not in playing state yet
    if(!this->isPipelinePlaying())
        this->startPipeline();

    // bail out if EOS
    if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
        return false;

#if GST_VERSION_MAJOR == 0
    if(buffer)
        gst_buffer_unref(buffer);

    buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
#else
    if(sample)
        gst_sample_unref(sample);

    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));

    if(!sample)
        return false;

    buffer = gst_sample_get_buffer(sample);
#endif

    if(!buffer)
        return false;

    return true;
}
Beispiel #8
0
static size_t gstreamer_read_frames(struct input_handle* ih) {
    size_t buf_pos = 0;
    GSList *next;

    while (ih->current_bytes < BUFFER_SIZE) {
        GstBuffer *buf = gst_app_sink_pull_buffer(GST_APP_SINK(ih->appsink));
        if (!buf) {
            break;
        }
        ih->buffer_list = g_slist_append(ih->buffer_list, buf);
        ih->current_bytes += buf->size;
    }

    while (ih->buffer_list &&
           GST_BUFFER(ih->buffer_list->data)->size + buf_pos <= BUFFER_SIZE) {
        memcpy((guint8 *) ih->buffer + buf_pos,
               GST_BUFFER(ih->buffer_list->data)->data,
               GST_BUFFER(ih->buffer_list->data)->size);
        buf_pos           += GST_BUFFER(ih->buffer_list->data)->size;
        ih->current_bytes -= GST_BUFFER(ih->buffer_list->data)->size;

        gst_buffer_unref(GST_BUFFER(ih->buffer_list->data));
        next = ih->buffer_list->next;
        g_slist_free_1(ih->buffer_list);
        ih->buffer_list = next;
    }

    return buf_pos / sizeof(float) / gstreamer_get_channels(ih);
}
bool GStreamerReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  if (!WaitForDecodedData(&mAudioSinkBufferCount)) {
    mAudioQueue.Finish();
    return false;
  }

  GstBuffer* buffer = gst_app_sink_pull_buffer(mAudioAppSink);
  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  timestamp = gst_segment_to_stream_time(&mAudioSegment,
      GST_FORMAT_TIME, timestamp);
  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  unsigned int size = GST_BUFFER_SIZE(buffer);
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudioChannels;
  ssize_t outSize = static_cast<size_t>(size / sizeof(AudioDataValue));
  nsAutoArrayPtr<AudioDataValue> data(new AudioDataValue[outSize]);
  memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
  AudioData* audio = new AudioData(offset, timestamp, duration,
      frames, data.forget(), mInfo.mAudioChannels);

  mAudioQueue.Push(audio);
  gst_buffer_unref(buffer);

  return true;
}
GstFlowReturn GStreamerWrapper::onNewBufferFromAudioSource( GstAppSink* appsink, void* listener )
{
	GstBuffer* gstAudioSinkBuffer = gst_app_sink_pull_buffer( GST_APP_SINK( appsink ) );
	( ( GStreamerWrapper * )listener )->newAudioSinkBufferCallback( gstAudioSinkBuffer );
	gst_buffer_unref( gstAudioSinkBuffer );

	return GST_FLOW_OK;
}
Beispiel #11
0
static GstFlowReturn
gst_nle_source_on_audio_buffer (GstAppSink * appsink, gpointer data)
{
  GstNleSource *nlesrc = GST_NLE_SOURCE (data);

  return gst_nle_source_push_buffer (nlesrc,
      gst_app_sink_pull_buffer (appsink), TRUE);
}
Beispiel #12
0
BufferPtr ApplicationSink::pullBuffer()
{
    BufferPtr buf;
    if (d->appSink()) {
        buf = BufferPtr::wrap(gst_app_sink_pull_buffer(d->appSink()), false);
    }
    return buf;
}
Beispiel #13
0
GstAlBuf* gst_al_pull_rtp_buffer(GstAppSink *sink)
{
	GstAlBuf *buf = g_new(GstAlBuf,1);
	buf->m_buffer = gst_app_sink_pull_buffer(sink);
	buf->m_dptr = gst_rtp_buffer_get_payload(buf->m_buffer);
	buf->m_dlen = gst_rtp_buffer_get_payload_len(buf->m_buffer);
	return buf;
}
Beispiel #14
0
static GstFlowReturn on_new_buffer_from_source (GstAppSink * elt, void * data){
#if GST_VERSION_MAJOR==0
	shared_ptr<GstBuffer> buffer(gst_app_sink_pull_buffer (GST_APP_SINK (elt)),&gst_buffer_unref);
#else
	shared_ptr<GstSample> buffer(gst_app_sink_pull_sample (GST_APP_SINK (elt)),&gst_sample_unref);
#endif
	return ((ofGstUtils*)data)->buffer_cb(buffer);
}
Beispiel #15
0
static GstFlowReturn on_new_buffer_from_source (GstAppSink * elt, void * data){
#if GST_VERSION_MAJOR==0
	GstBuffer *buffer = gst_app_sink_pull_buffer (GST_APP_SINK (elt));
#else
	GstSample *buffer = gst_app_sink_pull_sample (GST_APP_SINK (elt));
#endif
	return ((ofGstUtils*)data)->buffer_cb(buffer);
}
Beispiel #16
0
GstAlBuf* gst_al_pull_buffer(GstAppSink *sink)
{
	GstAlBuf *buf = g_new(GstAlBuf,1);
	buf->m_buffer = gst_app_sink_pull_buffer(sink);
	buf->m_dptr = GST_BUFFER_DATA(buf->m_buffer);
	buf->m_dlen = GST_BUFFER_SIZE(buf->m_buffer);
	return buf;
}
Beispiel #17
0
void StreamPipeline::NewBufferCallback(GstElement* app_sink, gpointer self) {
  StreamPipeline* me = reinterpret_cast<StreamPipeline*>(self);

  GstBuffer* buffer = gst_app_sink_pull_buffer((GstAppSink*)app_sink);
  me->buffer_.append((const char*)buffer->data, buffer->size);
  gst_buffer_unref(buffer);

  emit me->readyRead();
}
Beispiel #18
0
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  int i;

  gst_init (&argc, &argv);

  app->pipe = gst_pipeline_new (NULL);
  g_assert (app->pipe);

  app->src = gst_element_factory_make ("appsrc", NULL);
  g_assert (app->src);
  gst_bin_add (GST_BIN (app->pipe), app->src);

  app->id = gst_element_factory_make ("identity", NULL);
  g_assert (app->id);
  gst_bin_add (GST_BIN (app->pipe), app->id);

  app->sink = gst_element_factory_make ("appsink", NULL);
  g_assert (app->sink);
  gst_bin_add (GST_BIN (app->pipe), app->sink);

  gst_element_link (app->src, app->id);
  gst_element_link (app->id, app->sink);

  gst_element_set_state (app->pipe, GST_STATE_PLAYING);

  for (i = 0; i < 10; i++) {
    GstBuffer *buf;
    void *data;

    data = malloc (100);
    memset (data, i, 100);

    buf = gst_app_buffer_new (data, 100, dont_eat_my_chicken_wings, data);
    printf ("%d: creating buffer for pointer %p, %p\n", i, data, buf);
    gst_app_src_push_buffer (GST_APP_SRC (app->src), buf);
  }

  gst_app_src_end_of_stream (GST_APP_SRC (app->src));

  while (!gst_app_sink_is_eos (GST_APP_SINK (app->sink))) {
    GstBuffer *buf;

    buf = gst_app_sink_pull_buffer (GST_APP_SINK (app->sink));
    printf ("retrieved buffer %p\n", buf);
    gst_buffer_unref (buf);
  }
  gst_element_set_state (app->pipe, GST_STATE_NULL);

  return 0;
}
GstFlowReturn AudioFileReader::handleBuffer(GstAppSink* sink)
{
    GstBuffer* buffer = gst_app_sink_pull_buffer(sink);
    if (!buffer)
        return GST_FLOW_ERROR;

    GstCaps* caps = gst_buffer_get_caps(buffer);
    GstStructure* structure = gst_caps_get_structure(caps, 0);

    gint channels = 0;
    if (!gst_structure_get_int(structure, "channels", &channels) || !channels) {
        gst_caps_unref(caps);
        gst_buffer_unref(buffer);
        return GST_FLOW_ERROR;
    }

    gint sampleRate = 0;
    if (!gst_structure_get_int(structure, "rate", &sampleRate) || !sampleRate) {
        gst_caps_unref(caps);
        gst_buffer_unref(buffer);
        return GST_FLOW_ERROR;
    }

    gint width = 0;
    if (!gst_structure_get_int(structure, "width", &width) || !width) {
        gst_caps_unref(caps);
        gst_buffer_unref(buffer);
        return GST_FLOW_ERROR;
    }

    GstClockTime duration = (static_cast<guint64>(GST_BUFFER_SIZE(buffer)) * 8 * GST_SECOND) / (sampleRate * channels * width);
    int frames = GST_CLOCK_TIME_TO_FRAMES(duration, sampleRate);

    // Check the first audio channel. The buffer is supposed to store
    // data of a single channel anyway.
    GstAudioChannelPosition* positions = gst_audio_get_channel_positions(structure);
    switch (positions[0]) {
    case GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT:
        gst_buffer_list_iterator_add(m_frontLeftBuffersIterator, buffer);
        m_channelSize += frames;
        break;
    case GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT:
        gst_buffer_list_iterator_add(m_frontRightBuffersIterator, buffer);
        break;
    default:
        gst_buffer_unref(buffer);
        break;
    }

    g_free(positions);
    gst_caps_unref(caps);
    return GST_FLOW_OK;
}
Beispiel #20
0
static GstFlowReturn on_buffer(GstAppSink *sink, gpointer data) {
	Eyrie *e = (Eyrie *) data;
	if(e->recbin == NULL || gst_app_sink_is_eos(GST_APP_SINK(e->sink))) {
		return GST_FLOW_OK;
	}
	if(e->buf == NULL) {
		e->buf = gst_buffer_new();
	}
	GstBuffer *tmpbuf;
	tmpbuf = gst_app_sink_pull_buffer(GST_APP_SINK(e->sink));
	e->mutex->lock();
	e->buf = gst_buffer_join(e->buf, tmpbuf);
	e->mutex->unlock();
	return GST_FLOW_OK;
}
Beispiel #21
0
void CGstDecoder::OnDecodedBuffer(GstElement *appsink, void *data)
{
  CGstDecoder *decoder = (CGstDecoder *)data;

  GstBuffer *buffer = gst_app_sink_pull_buffer(GST_APP_SINK(appsink));
  if (buffer)
  {
    if (decoder->m_callback)
      decoder->m_callback->OnDecodedBuffer(buffer);
    else
      gst_buffer_unref(buffer);
  }
  else
    printf("GStreamer: OnDecodedBuffer - Null Buffer\n");
}
Beispiel #22
0
void AVMuxEncode::processAudioSinkData()
{
    GstBuffer *buffer;

    m_audioSinkLock.lock();
    if ((buffer = gst_app_sink_pull_buffer((GstAppSink *)(m_appAudioSink))) != NULL) {
        AVMUX_QUEUEDATA *qd = new AVMUX_QUEUEDATA;
        qd->data = QByteArray((const char *)GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
        qd->timestamp = m_lastQueuedAudioTimestamp;
        qd->param = m_lastQueuedAudioParam;
        m_audioSinkQ.enqueue(qd);
        if (m_audioCaps == NULL)
            m_audioCaps = gst_caps_to_string(GST_BUFFER_CAPS(buffer));
        gst_buffer_unref(buffer);
    }
    m_audioSinkLock.unlock();
}
Beispiel #23
0
static void
shmdata_any_reader_on_new_buffer_from_source (GstElement * elt,
					      gpointer user_data)
{
  shmdata_any_reader_t *me = (shmdata_any_reader_t *) user_data;

  GstBuffer *buf;

  /* pull the next item, this can return NULL when there is no more data and
   * EOS has been received */
  buf = gst_app_sink_pull_buffer (GST_APP_SINK (me->sink_));

  if (me->on_data_ != NULL)
    {

      if (me->type_ == NULL
	  || gst_caps_is_always_compatible (me->data_caps_,
					    GST_BUFFER_CAPS (buf)))
	{
	  gchar *caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf)); 
	  me->on_data_ (me,
			(void *) buf,
			(void *) GST_BUFFER_DATA (buf),
			(int) GST_BUFFER_SIZE (buf),
			(unsigned long long)
			GST_TIME_AS_NSECONDS (GST_BUFFER_TIMESTAMP (buf)),
			(const char *)
			caps_string,
			(void *) me->on_data_user_data_);
	  g_free (caps_string);
	}
      else
	{
	  g_debug
	    ("incompatible data frame retrieved, data %p, data size %d, timestamp %llu, caps %"GST_PTR_FORMAT,
	     GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
	     GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)),
	     GST_BUFFER_CAPS (buf));
	}
    }
  /* if (buf) */
  /*  gst_buffer_unref (buf); */
}
Beispiel #24
0
GstFlowReturn Chromaprinter::NewBufferCallback(GstAppSink* app_sink, gpointer self) {
  Chromaprinter* me = reinterpret_cast<Chromaprinter*>(self);
  if (me->finishing_) {
    return GST_FLOW_OK;
  }

  GstBuffer* buffer = gst_app_sink_pull_buffer(app_sink);
  me->buffer_.write(reinterpret_cast<const char*>(buffer->data), buffer->size);
  gst_buffer_unref(buffer);

  gint64 pos = 0;
  GstFormat format = GST_FORMAT_TIME;
  gboolean ret = gst_element_query_position(me->pipeline_, &format, &pos);
  if (ret && pos > 30 * kNsecPerSec) {
    me->finishing_ = true;
    g_main_loop_quit(me->event_loop_);
  }
  return GST_FLOW_OK;
}
Beispiel #25
0
//
// start the pipeline, grab a buffer, and pause again
//
bool CvCapture_GStreamer::grabFrame()
{

    if(!pipeline)
        return false;

    if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
        return false;

    if(buffer)
        gst_buffer_unref(buffer);
    handleMessage();

    buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
    if(!buffer)
        return false;

    return true;
}
Beispiel #26
0
void AVMuxEncode::processVideoSinkData()
{
    GstBuffer *buffer;
    AVMUX_QUEUEDATA *qd;

    m_videoSinkLock.lock();
    if ((buffer = gst_app_sink_pull_buffer((GstAppSink *)(m_appVideoSink))) != NULL) {
        qd = new AVMUX_QUEUEDATA;
        qd->data = QByteArray((const char *)GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
        qd->timestamp = m_lastQueuedVideoTimestamp;
        qd->param = m_lastQueuedVideoParam;
        m_videoSinkQ.enqueue(qd);

        if (m_videoCaps == NULL)
            m_videoCaps = gst_caps_to_string(GST_BUFFER_CAPS(buffer));
//        qDebug() << "Video ts " << GST_BUFFER_TIMESTAMP(buffer);
        gst_buffer_unref(buffer);
     }
    m_videoSinkLock.unlock();
}
Beispiel #27
0
static GstFlowReturn
gst_nle_source_on_video_buffer (GstAppSink * appsink, gpointer data)
{
  GstNleSrcItem *item;
  GstNleSource *nlesrc;
  GstBuffer *buf;
  GstFlowReturn ret;

  nlesrc = GST_NLE_SOURCE (data);
  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  buf = gst_app_sink_pull_buffer (appsink);

  if (item->still_picture) {
    ret = gst_nle_source_push_still_picture (nlesrc, item, buf);
  } else {
    ret = gst_nle_source_push_buffer (nlesrc, buf, FALSE);
  }

  return ret;
}
Beispiel #28
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();
				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels.swap(backPixels);
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					prevBuffer = buffer;
				}

			mutex.unlock();
		}else{
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					//memcpy (pixels.getPixels(), GST_BUFFER_DATA (buffer), size);
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = buffer;
					bHavePixelsChanged=true;
				}
			}
		}
	}else{
		ofLog(OF_LOG_WARNING,"ofGstVideoUtils not loaded");
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
Beispiel #29
0
void ofGstUtils::update(){
	gstHandleMessage();
	if (bLoaded == true){
		if(!bFrameByFrame){
			ofGstDataLock(&gstData);

				bHavePixelsChanged = gstData.bHavePixelsChanged;
				if (bHavePixelsChanged){
					gstData.bHavePixelsChanged=false;
					bIsMovieDone = false;
					memcpy(pixels,gstData.pixels,width*height*bpp);
				}

			ofGstDataUnlock(&gstData);
		}else{
			GstBuffer *buffer;


			//get the buffer from appsink
			if(bPaused) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (gstSink));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (gstSink));

			if(buffer){
				guint size = GST_BUFFER_SIZE (buffer);
				if(pixels){
					memcpy (pixels, GST_BUFFER_DATA (buffer), size);
					bHavePixelsChanged=true;
				}
				/// we don't need the appsink buffer anymore
				gst_buffer_unref (buffer);
			}
		}
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
Beispiel #30
0
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(PR_LOG_DEBUG, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mDecoder->SetMediaDuration(mLastParserDuration);
  } else {
    LOG(PR_LOG_DEBUG, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
      LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mDecoder->SetMediaDuration(duration);
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
  mInfo.mVideo.mHasVideo = n_video != 0;
  mInfo.mAudio.mHasAudio = n_audio != 0;

  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

bool
GStreamerReader::IsMediaSeekable()
{
  if (mUseParserDuration) {
    return true;
  }

  gint64 duration;
#if GST_VERSION_MAJOR >= 1
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME,
                                 &duration)) {
#else
  GstFormat format = GST_FORMAT_TIME;
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) &&
      format == GST_FORMAT_TIME) {
#endif
    return true;
  }

  return false;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        done = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(PR_LOG_DEBUG, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(PR_LOG_DEBUG, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mAudioSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}

bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT
                      " threshold %" GST_TIME_FORMAT,
                      GST_TIME_ARGS(timestamp * 1000),
                      GST_TIME_ARGS(aTimeThreshold * 1000));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
  VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
                                                mDecoder->GetImageContainer(),
                                                offset, timestamp, duration,
                                                static_cast<Image*>(image.get()),
                                                isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}