void PlaybackPipeline::enqueueSample(PassRefPtr<MediaSample> prsample)
{
    RefPtr<MediaSample> rsample = prsample;
    AtomicString trackId = rsample->trackID();

    TRACE_MEDIA_MESSAGE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT, trackId.string().utf8().data(), rsample->presentationTime().toFloat(), rsample->presentationSize().width(), rsample->presentationSize().height(), GST_TIME_ARGS(floatToGstClockTime(rsample->presentationTime().toDouble())));

    ASSERT(WTF::isMainThread());

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        WARN_MEDIA_MESSAGE("No stream!");
        GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(rsample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GstSample* gstsample = gst_sample_ref(sample->sample());
        GST_BUFFER_FLAG_UNSET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
        push_sample(GST_APP_SRC(appsrc), gstsample);
        // gst_app_src_push_sample() uses transfer-none for gstsample
        gst_sample_unref(gstsample);
    }
}
Example #2
0
/* The appsink has received a sample */
static GstFlowReturn appsink_new_sample_cb(GstAppSink *sink,
					   gpointer user_data)
{
	struct videnc_state *st = user_data;
	GstSample *sample;
	GstBuffer *buffer;
	GstMapInfo info;
	guint8 *data;
	gsize size;

	/* Retrieve the sample */
	sample = gst_app_sink_pull_sample(sink);

	if (sample) {
		buffer = gst_sample_get_buffer(sample);
		gst_buffer_map( buffer, &info, (GstMapFlags)(GST_MAP_READ) );

		data = info.data;
		size = info.size;

		gst_video_h264_packetize(data, size, st->encoder.pktsize,
					 st->pkth, st->arg);

		gst_buffer_unmap(buffer, &info);
		gst_sample_unref(sample);
	}

	return GST_FLOW_OK;
}
/*!
 * \brief OpenIMAJCapGStreamer::grabFrame
 * \return
 * Grabs a sample from the pipeline, awaiting consumation by getImage.
 * The pipeline is started if it was not running yet
 */
bool OpenIMAJCapGStreamer::nextFrame()
{
    if(!pipeline)
        return false;
    
    // start the pipeline if it was not in playing state yet
    if(!this->isPipelinePlaying())
        this->startPipeline();
    
    // bail out if EOS
    if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
        return false;
    
    if(sample)
        gst_sample_unref(sample);
    
    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
    
    if(!sample)
        return false;
    
    buffer = gst_sample_get_buffer(sample);
    
    if(!buffer)
        return false;
    
    return true;
}
Example #4
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();
				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels.swap(backPixels);
#if GST_VERSION_MAJOR==0
					if(prevBuffer) gst_buffer_unref (prevBuffer);
#else
					if(prevBuffer) gst_sample_unref (prevBuffer);
#endif
					prevBuffer = buffer;
				}

			mutex.unlock();
		}else{
#if GST_VERSION_MAJOR==0
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = buffer;
					bHavePixelsChanged=true;
				}
			}
		}
#else
			GstBuffer *buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_sample_unref (prevBuffer);
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = sample;
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
		}
#endif
	}else{
Example #5
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			ofScopedLock lock(mutex);
			bHavePixelsChanged = bBackPixelsChanged;
			if (bHavePixelsChanged){
				bBackPixelsChanged=false;
				swap(pixels,backPixels);
				#ifdef OF_USE_GST_GL
				if(backTexture.isAllocated()){
					frontTexture.getTextureData() = backTexture.getTextureData();
					frontTexture.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
					frontTexture.setTextureWrap(GL_CLAMP_TO_EDGE,GL_CLAMP_TO_EDGE);
				}
				#endif
				if(!copyPixels){
					frontBuffer = backBuffer;
				}
			}
		}else{
#if GST_VERSION_MAJOR==0
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = shared_ptr<GstBuffer>(buffer,gst_buffer_unref);;
					bHavePixelsChanged=true;
				}
			}
		}
#else
			GstBuffer * buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					backBuffer = shared_ptr<GstSample>(sample,gst_sample_unref);
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
		}
#endif
	}else{
Example #6
0
/*!
 * \brief CvCapture_GStreamer::grabFrame
 * \return
 * Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
 * The pipeline is started if it was not running yet
 */
bool CvCapture_GStreamer::grabFrame()
{
    if(!pipeline)
        return false;

    // start the pipeline if it was not in playing state yet
    if(!this->isPipelinePlaying())
        this->startPipeline();

    // bail out if EOS
    if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
        return false;

#if GST_VERSION_MAJOR == 0
    if(buffer)
        gst_buffer_unref(buffer);

    buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
#else
    if(sample)
        gst_sample_unref(sample);

    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));

    if(!sample)
        return false;

    buffer = gst_sample_get_buffer(sample);
#endif

    if(!buffer)
        return false;

    return true;
}
Example #7
0
static GstFlowReturn
post_recv_sample (GstElement * appsink, gpointer user_data)
{
  GstSample *sample = NULL;
  GstFlowReturn ret;
  GstBuffer *buffer;

  g_signal_emit_by_name (appsink, "pull-sample", &sample);
  if (sample == NULL)
    return GST_FLOW_ERROR;

  buffer = gst_sample_get_buffer (sample);
  if (buffer == NULL) {
    ret = GST_FLOW_OK;
    goto end;
  }

  g_signal_emit_by_name (httpep, "push-buffer", buffer, &ret);

  if (ret != GST_FLOW_OK) {
    /* something wrong */
    GST_ERROR ("Could not send buffer to httpep %s. Ret code %d",
        GST_ELEMENT_NAME (httpep), ret);
  }

  g_object_get (G_OBJECT (httpep), "http-method", &method, NULL);
  ck_assert_int_eq (method, KMS_HTTP_ENDPOINT_METHOD_POST);

end:
  if (sample != NULL)
    gst_sample_unref (sample);

  return ret;
}
bool GstAppSinkPipeline::GetLatestFrameBuffer(void** frameBuffer)
{
	bool retrieving = false;
	
	boost::lock_guard<boost::mutex> guard(bufferMutex);
	if (retrievedBuffer == 0)
	{
		if (currentBuffer != 0)
		{
			retrievedBuffer = currentBuffer;
			currentBuffer = 0;
			retrieving = true;
		}
	}
	
	if (retrieving)
	{

		GstBuffer*		buffer;
		GstMapInfo 		map;

		buffer = gst_sample_get_buffer (retrievedBuffer);
		
		if (buffer)
		{
			gst_buffer_map (buffer, &map, GST_MAP_READ);
			(*frameBuffer) = map.data;
			gst_buffer_unmap (buffer, &map);

		}
		else return false;
	}
	
	return true;
}
GdkPixbuf *
totem_gst_tag_list_get_cover (GstTagList *tag_list)
{
  GstSample *cover_sample;

  g_return_val_if_fail (tag_list != NULL, FALSE);

  cover_sample = totem_gst_tag_list_get_cover_real (tag_list);
  /* Fallback to preview */
    if (!cover_sample) {
      gst_tag_list_get_sample_index (tag_list, GST_TAG_PREVIEW_IMAGE, 0,
				     &cover_sample);
    }

  if (cover_sample) {
    GstBuffer *buffer;
    GdkPixbuf *pixbuf;

    buffer = gst_sample_get_buffer (cover_sample);
    pixbuf = totem_gst_buffer_to_pixbuf (buffer);
    gst_sample_unref (cover_sample);
    return pixbuf;
  }

  return NULL;
}
GstFlowReturn on_new_preroll(GstAppSink *appsink, gpointer user_data) {
	GstSample* sample = NULL;
	GstBuffer* buffer;
	GstMemory* memory;
	GstMapInfo info;
	GstClockTime clocktime;

	g_debug("on_new_preroll ");
	sample = gst_app_sink_pull_sample (appsink);
	if (sample) {
		g_debug("pulled sample\n");
		buffer = gst_sample_get_buffer(sample);
		clocktime = GST_BUFFER_PTS(buffer);
		memory = gst_buffer_get_memory(buffer, 0);
		gst_memory_map(memory, &info, GST_MAP_READ);
		/*
			You can access raw memory at info.data
		*/
		if(app.output_callback)
			app.output_callback(info.data, info.size);
		//fwrite(info.data, 1, info.size, app.outfile);

		gst_memory_unmap(memory, &info);
		gst_memory_unref(memory);
		gst_sample_unref(sample);
	}
	return GST_FLOW_OK;
}
static void
convert_frame_need_data_callback (GstElement * src, guint size,
    GstVideoConvertSampleContext * context)
{
  GstFlowReturn ret = GST_FLOW_ERROR;
  GError *error;
  GstBuffer *buffer;

  g_mutex_lock (&context->mutex);

  if (context->finished)
    goto done;

  buffer = gst_sample_get_buffer (context->sample);
  g_signal_emit_by_name (src, "push-buffer", buffer, &ret);
  gst_sample_unref (context->sample);
  context->sample = NULL;

  if (ret != GST_FLOW_OK) {
    GST_ERROR ("Could not push video frame: %s", gst_flow_get_name (ret));

    error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
        "Could not push video frame: %s", gst_flow_get_name (ret));

    convert_frame_finish (context, NULL, error);
  }

  g_signal_handlers_disconnect_by_func (src, convert_frame_need_data_callback,
      context);

done:
  g_mutex_unlock (&context->mutex);
}
GstFlowReturn frame_handler(GstSample * sample, GStreamerFramesReceiver * pClass)
{
	GstBuffer * buffer = gst_sample_get_buffer(sample);

	GstMapInfo info;
	gst_buffer_map(buffer, &info, GST_MAP_READ);

	if (pClass)
	{
		if (pClass -> InputFrameWidth() == 0)
		{
			int width, height;
			PixelFormat pixelFormat;
			GstCaps *caps = gst_sample_get_caps(sample);
			ExtractImageParams(caps, width, height, pixelFormat);
			pClass -> InputFrameWidth()  = width;
			pClass -> InputFrameHeight() = height;
			pClass -> InputPixelFormat() = pixelFormat;
		}
		
		pClass -> CopyFrameData(info.data, info.size);
	}

	gst_buffer_unmap (buffer, &info);

	return GST_FLOW_OK;
}
Example #13
0
GstFlowReturn Capture::newSample(GstAppSink* sink, gpointer gSelf)
{
    //g_print("New sample...");
    GstSample* sample = NULL;
    GstBuffer* sampleBuffer = NULL;
    GstMapInfo bufferInfo;

    Capture* self = static_cast<Capture* >(gSelf);
    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
    if(sample != NULL)
    {
        sampleBuffer = gst_sample_get_buffer(sample);
        if(sampleBuffer != NULL)
        {
            gst_buffer_map(sampleBuffer, &bufferInfo, GST_MAP_READ);
	    //gsize sz = gst_buffer_get_size(sampleBuffer);
	    //g_print("%lu\n",sz);
            self->m_mutex.lock();
            self->m_image = QImage(bufferInfo.data, 600, 400, QImage::Format_Mono);
            self->m_mutex.unlock();
            gst_buffer_unmap(sampleBuffer, &bufferInfo);
        }
        gst_sample_unref(sample);
    }
    return GST_FLOW_OK;
}
Example #14
0
static GstFlowReturn new_sample_callback (GstAppSink * sink, gpointer user_data)
{
        GstBuffer *buffer;
        GstSample *sample;
        Encoder *encoder = (Encoder *)user_data;

        *(encoder->output->heartbeat) = gst_clock_get_time (encoder->system_clock);
        sample = gst_app_sink_pull_sample (GST_APP_SINK (sink));
        buffer = gst_sample_get_buffer (sample);

        sem_wait (encoder->output->semaphore);

        (*(encoder->output->total_count)) += gst_buffer_get_size (buffer);

        /* update head_addr, free enough memory for current buffer. */
        while (cache_free (encoder) < gst_buffer_get_size (buffer) + 12) { /* timestamp + gop size = 12 */
                move_head (encoder);
        }

        if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
                /* 
                 * random access point found.
                 * write previous gop size to 4 bytes reservation,
                 * write current gop timestamp,
                 * reserve 4 bytes for size of current gop,
                 */
                if (encoder->mqdes == -1) {
                        /* no m3u8 output */
                        move_last_rap (encoder, buffer);

                } else if (GST_BUFFER_PTS (buffer) == encoder->last_running_time) {
                        gchar *msg;

                        move_last_rap (encoder, buffer);
                        msg = g_strdup_printf ("%lu", encoder->last_segment_duration);
                        if (mq_send (encoder->mqdes, msg, strlen (msg), 1) == -1) {
                                GST_ERROR ("mq_send error: %s", g_strerror (errno));
                        }
                        g_free (msg);
                        encoder->last_running_time = GST_CLOCK_TIME_NONE;
                }
        }

        /* udpstreaming? */
        if (encoder->udpstreaming) {
                udp_streaming (encoder, buffer);
        }

        /*
         * copy buffer to cache.
         * update tail_addr
         */
        copy_buffer (encoder, buffer);

        sem_post (encoder->output->semaphore);

        gst_sample_unref (sample);

        return GST_FLOW_OK;
}
Example #15
0
GstFlowReturn VideoSender::newBufferCB(GstAppSink *sink, gpointer user_data)
{
  qDebug() << "In" << __FUNCTION__;

  VideoSender *vs = static_cast<VideoSender *>(user_data);

  // Get new video sample
  GstSample *sample = gst_app_sink_pull_sample(sink);
  if (sample == NULL) {
    qWarning("%s: Failed to get new sample", __FUNCTION__);
    return GST_FLOW_OK;
  }
  
  // FIXME: zero copy?
  GstBuffer *buffer = gst_sample_get_buffer(sample);
  GstMapInfo map;
  QByteArray *data = NULL;
  if (gst_buffer_map(buffer, &map, GST_MAP_READ)) {
    // Copy the data to QByteArray
    data = new QByteArray((char *)map.data, map.size);
    vs->emitVideo(data);
    gst_buffer_unmap(buffer, &map);
  } else {
    qWarning("Error with gst_buffer_map");
  }
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
void InbandTextTrackPrivateGStreamer::notifyTrackOfSample()
{
    Vector<GRefPtr<GstSample> > samples;
    {
        MutexLocker lock(m_sampleMutex);
        m_pendingSamples.swap(samples);
    }

    for (size_t i = 0; i < samples.size(); ++i) {
        GRefPtr<GstSample> sample = samples[i];
        GstBuffer* buffer = gst_sample_get_buffer(sample.get());
        if (!buffer) {
            WARN_MEDIA_MESSAGE("Track %d got sample with no buffer.", m_index);
            continue;
        }
        GstMapInfo info;
        gboolean ret = gst_buffer_map(buffer, &info, GST_MAP_READ);
        ASSERT(ret);
        if (!ret) {
            WARN_MEDIA_MESSAGE("Track %d unable to map buffer.", m_index);
            continue;
        }

        INFO_MEDIA_MESSAGE("Track %d parsing sample: %.*s", m_index, static_cast<int>(info.size),
            reinterpret_cast<char*>(info.data));
        client()->parseWebVTTCueData(this, reinterpret_cast<char*>(info.data), info.size);
        gst_buffer_unmap(buffer, &info);
    }
}
Example #17
0
bool MediaImpl::_videoPull()
{
//  qDebug() << "video pull" << endl;

  GstSample *sample = NULL;
  GstStructure *structure = NULL;
  GstCaps* caps = NULL;
  GstBuffer *buffer = NULL;

  // Retrieve the sample
  sample = queue_input_buf.get();

  if (sample == NULL)
  {
    // Either means we are not playing or we have reached EOS.
    return false;
  }
  else
  {
    caps = gst_sample_get_caps(sample);
    structure = gst_caps_get_structure(caps, 0);
    buffer = gst_sample_get_buffer(sample);

    int width  = 640;
    int height = 480;
    int bpp    = 32;
    int depth  = 32;

    gst_structure_get_int(structure, "width",  &width);
    gst_structure_get_int(structure, "height", &height);
    // TODO: use gst_video_info_from_caps if we want to support many different formats
    // otherwise, since we set the caps ourselves, we can assume bpp is 32 and depth too.

    _width = width;
    _height = height;
    int size = _width * _height;

//    video->resize(width, height);

//        qDebug() << gst_structure_to_string(capsStruct) << endl;
//        qDebug() << width << "x" << height << "=" << width*height << "(" << width*height*4 << "," << width*height*3 << ")" << endl;
//        qDebug() << "bpp: " << bpp << " depth: " << depth << endl;
//        qDebug() << "Buffer size: " << GST_BUFFER_SIZE(buffer) << endl;

    GstMapInfo map; 
    if (gst_buffer_map(buffer, &map, GST_MAP_READ))
    { 
      // For debugging:
      //gst_util_dump_mem(map.data, map.size)
      _data = map.data;
      gst_buffer_unmap(buffer, &map); 
      if(this->_frame != NULL)
        queue_output_buf.put(this->_frame);
      _frame = sample;
    } 

    return true;
  }
}
Example #18
0
/*
  This function will be called in a separate thread when our appsink
  says there is data for us. user_data has to be defined
  when calling g_signal_connect. It can be used to pass objects etc.
  from your other function to the callback.
*/
GstFlowReturn callback (GstElement* sink, void* user_data)
{
    GstSample* sample = NULL;
    /* Retrieve the buffer */
    g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);

    if (sample)
    {
        // we have a valid sample
        // do things with the image here
        static guint framecount = 0;
        int pixel_data = -1;

        GstBuffer* buffer = gst_sample_get_buffer(sample);
        GstMapInfo info; // contains the actual image
        if (gst_buffer_map(buffer, &info, GST_MAP_READ))
        {
            GstVideoInfo* video_info = gst_video_info_new();
            if (!gst_video_info_from_caps(video_info, gst_sample_get_caps(sample)))
            {
                // Could not parse video info (should not happen)
                g_warning("Failed to parse video info");
                return GST_FLOW_ERROR;
            }

            /* Get a pointer to the image data */
            unsigned char* data = info.data;

            /* Get the pixel value of the center pixel */
            int stride = video_info->finfo->bits / 8;
            unsigned int pixel_offset = video_info->width / 2 * stride +
                video_info->width * video_info->height / 2 * stride;

            // this is only one pixel
            // when dealing with formats like BGRx
            // pixel_data will have to consist out of
            // pixel_offset   => B
            // pixel_offset+1 => G
            // pixel_offset+2 => R
            // pixel_offset+3 => x
            pixel_data = info.data[pixel_offset];

            gst_buffer_unmap(buffer, &info);
            gst_video_info_free(video_info);
        }

        GstClockTime timestamp = GST_BUFFER_PTS(buffer);
        g_print("Captured frame %d, Pixel Value=%03d Timestamp=%" GST_TIME_FORMAT "            \r",
                framecount, pixel_data,
                GST_TIME_ARGS(timestamp));
        framecount++;


        // delete our reference so that gstreamer can handle the sample
        gst_sample_unref (sample);
    }
    return GST_FLOW_OK;
}
Example #19
0
static GdkPixbuf *
gst_thumbnailer_cover_from_tags (GstTagList   *tags,
                                 GCancellable *cancellable)
{
  GstSample          *cover = NULL;
  guint               i;
  GstSample          *sample;
  GstCaps            *caps;
  const GstStructure *caps_struct;
  gint                type;
  GstBuffer          *buffer;
  GdkPixbuf          *pixbuf = NULL;

  for (i = 0; ; i++)
    {
      if (g_cancellable_is_cancelled (cancellable))
        break;

      /* look for image in the tags */
      if (!gst_tag_list_get_sample_index (tags, GST_TAG_IMAGE, i, &sample))
        break;

      caps = gst_sample_get_caps (sample);
      caps_struct = gst_caps_get_structure (caps, 0);
      gst_structure_get_enum (caps_struct,
                              "image-type",
                              GST_TYPE_TAG_IMAGE_TYPE,
                              &type);

      if (type == GST_TAG_IMAGE_TYPE_FRONT_COVER)
        {
          /* found the cover */
          cover = sample;
          break;
        }

      gst_sample_unref (sample);
    }

  if (cover == NULL
      && !g_cancellable_is_cancelled (cancellable))
    {
      /* look for preview image */
      gst_tag_list_get_sample_index (tags, GST_TAG_PREVIEW_IMAGE, 0, &cover);
    }

  if (cover != NULL)
    {
      /* create image */
      buffer = gst_sample_get_buffer (cover);
      pixbuf = gst_thumbnailer_buffer_to_pixbuf (buffer);
      gst_sample_unref (cover);
    }

  return pixbuf;
}
Example #20
0
/* GStreamer thread
 *
 * We cannot use GStreamer's signals because they are not always run in
 * the main context. So use a callback (lower overhead) and have it pull
 * the sample to avoid a race with free_pipeline(). This means queuing the
 * decoded frames outside GStreamer. So while we're at it, also schedule
 * the frame display ourselves in schedule_frame().
 */
static GstFlowReturn new_sample(GstAppSink *gstappsink, gpointer video_decoder)
{
    SpiceGstDecoder *decoder = video_decoder;

    GstSample *sample = gst_app_sink_pull_sample(decoder->appsink);
    GstBuffer *buffer = sample ? gst_sample_get_buffer(sample) : NULL;
    if (sample) {
        g_mutex_lock(&decoder->queues_mutex);

        /* gst_app_sink_pull_sample() sometimes returns the same buffer twice
         * or buffers that have a modified, and thus unrecognizable, PTS.
         * Blindly removing frames from the decoding_queue until we find a
         * match would only empty the queue, resulting in later buffers not
         * finding a match either, etc. So check the buffer has a matching
         * frame first.
         */
        SpiceGstFrame *gstframe;
        GList *l = g_queue_peek_head_link(decoder->decoding_queue);
        while (l) {
            gstframe = l->data;
            if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) {
                /* The frame is now ready for display */
                gstframe->sample = sample;
                g_queue_push_tail(decoder->display_queue, gstframe);

                /* Now that we know there is a match, remove it and the older
                 * frames from the decoding queue.
                 */
                while ((gstframe = g_queue_pop_head(decoder->decoding_queue))) {
                    if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) {
                        break;
                    }
                    /* The GStreamer pipeline dropped the corresponding
                     * buffer.
                     */
                    SPICE_DEBUG("the GStreamer pipeline dropped a frame");
                    free_gst_frame(gstframe);
                }
                break;
            }
            l = l->next;
        }
        if (!l) {
            spice_warning("got an unexpected decoded buffer!");
            gst_sample_unref(sample);
        }

        g_mutex_unlock(&decoder->queues_mutex);
        schedule_frame(decoder);
    } else {
        spice_warning("GStreamer error: could not pull sample");
    }
    return GST_FLOW_OK;
}
static void
symmetry_test_assert_passthrough (SymmetryTest * st, GstBuffer * in)
{
  gpointer copy;
  gsize data_size;
  GstSample *out;

  gst_buffer_extract_dup (in, 0, -1, &copy, &data_size);

  fail_unless (gst_app_src_push_buffer (st->sink_src, in) == GST_FLOW_OK);
  in = NULL;
  out = gst_app_sink_pull_sample (st->src_sink);
  fail_unless (out != NULL);

  fail_unless (gst_buffer_get_size (gst_sample_get_buffer (out)) == data_size);
  fail_unless (gst_buffer_memcmp (gst_sample_get_buffer (out), 0, copy,
          data_size) == 0);
  g_free (copy);
  gst_sample_unref (out);
}
Example #22
0
void PlaybackPipeline::enqueueSample(RefPtr<MediaSample> mediaSample)
{
    ASSERT(WTF::isMainThread());

    AtomicString trackId = mediaSample->trackID();

    GST_TRACE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT,
        trackId.string().utf8().data(), mediaSample->presentationTime().toFloat(),
        mediaSample->presentationSize().width(), mediaSample->presentationSize().height(),
        GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->presentationTime().toDouble())),
        GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->duration().toDouble())));

    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        GST_WARNING("No stream!");
        return;
    }

    if (!stream->sourceBuffer->isReadyForMoreSamples(trackId)) {
        GST_DEBUG("enqueueSample: skip adding new sample for trackId=%s, SB is not ready yet", trackId.string().utf8().data());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    MediaTime lastEnqueuedTime = stream->lastEnqueuedTime;

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(mediaSample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GRefPtr<GstSample> gstSample = sample->sample();
        GstBuffer* buffer = gst_sample_get_buffer(gstSample.get());
        lastEnqueuedTime = sample->presentationTime();

        GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DECODE_ONLY);
        pushSample(GST_APP_SRC(appsrc), gstSample.get());
        // gst_app_src_push_sample() uses transfer-none for gstSample.

        stream->lastEnqueuedTime = lastEnqueuedTime;
    }
}
Example #23
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			std::unique_lock<std::mutex> lock(mutex);
			bHavePixelsChanged = bBackPixelsChanged;
			if (bHavePixelsChanged){
				bBackPixelsChanged=false;
				swap(pixels,backPixels);
				#ifdef OF_USE_GST_GL
				if(backTexture.isAllocated()){
					frontTexture.getTextureData() = backTexture.getTextureData();
					frontTexture.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
					frontTexture.setTextureWrap(GL_CLAMP_TO_EDGE,GL_CLAMP_TO_EDGE);
				}
				#endif
				if(!copyPixels){
					frontBuffer = backBuffer;
				}
			}
		}else{
#if GST_VERSION_MAJOR==0
			ofLogError() << "frame by frame doesn't work any more in 0.10";
#else
			GstBuffer * buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					backBuffer = shared_ptr<GstSample>(sample,gst_sample_unref);
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
#endif
		}
	}else{
		ofLogWarning("ofGstVideoUtils") << "update(): ofGstVideoUtils not loaded";
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
Example #24
0
CAMLprim value ocaml_gstreamer_appsink_pull_buffer(value _as, value string_mode)
{
  CAMLparam1(_as);
  CAMLlocal1(ans);
  appsink *as = Appsink_val(_as);
  GstSample *gstsample;
  GstBuffer *gstbuf;
  GstMapInfo map;
  intnat len;
  gboolean ret;

  caml_release_runtime_system();
  gstsample = gst_app_sink_pull_sample(as->appsink);
  caml_acquire_runtime_system();

  if (!gstsample)
    {
      if (gst_app_sink_is_eos(as->appsink))
        caml_raise_constant(*caml_named_value("gstreamer_exn_eos"));
      else
        caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));
    }

  caml_release_runtime_system();
  gstbuf = gst_sample_get_buffer(gstsample);
  caml_acquire_runtime_system();

  if (!gstbuf) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));

  caml_release_runtime_system();
  ret = gst_buffer_map(gstbuf, &map, GST_MAP_READ);
  caml_acquire_runtime_system();

  if (!ret) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));

  len = map.size;
  if (string_mode == Val_false) {
    ans = caml_ba_alloc(CAML_BA_C_LAYOUT | CAML_BA_UINT8, 1, NULL, &len);
    memcpy(Caml_ba_data_val(ans), map.data, len);
  } else {
    ans = caml_alloc_string(len);
    memcpy(String_val(ans), map.data, len);
  }

  caml_release_runtime_system();
  gst_buffer_unmap(gstbuf, &map);
  gst_sample_unref(gstsample);
  caml_acquire_runtime_system();

  CAMLreturn(ans);
}
static void
gss_program_jpeg_resource (GssTransaction * t)
{
  GssProgram *program = (GssProgram *) t->resource->priv;
#if GST_CHECK_VERSION(1,0,0)
  GstSample *sample = NULL;
#endif
  GstBuffer *buffer = NULL;

  if (!program->enable_streaming || program->state != GSS_PROGRAM_STATE_RUNNING) {
    soup_message_set_status (t->msg, SOUP_STATUS_NO_CONTENT);
    return;
  }
#if GST_CHECK_VERSION(1,0,0)
  if (program->jpegsink) {
    g_object_get (program->jpegsink, "last-sample", &sample, NULL);
  }
  if (sample) {
    GstMapInfo mapinfo;

    buffer = gst_sample_get_buffer (sample);
    if (buffer) {
      soup_message_set_status (t->msg, SOUP_STATUS_OK);

      gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
      soup_message_set_response (t->msg, "image/jpeg", SOUP_MEMORY_COPY,
          (char *) mapinfo.data, mapinfo.size);

      gst_buffer_unmap (buffer, &mapinfo);
    }
    gst_sample_unref (sample);
    return;
  }
#else
  if (program->jpegsink) {
    g_object_get (program->jpegsink, "last-buffer", &buffer, NULL);
  }

  if (buffer) {
    soup_message_set_status (t->msg, SOUP_STATUS_OK);

    soup_message_set_response (t->msg, "image/jpeg", SOUP_MEMORY_COPY,
        (char *) GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));

    gst_buffer_unref (buffer);
    return;
  }
#endif

  gss_html_error_404 (t->server, t->msg);
}
void PlaybackPipeline::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample> > samples)
{
    ASSERT(WTF::isMainThread());

    if (samples.size() == 0) {
        LOG_MEDIA_MESSAGE("No samples, trackId unknown");
        return;
    }

    AtomicString trackId = samples[0]->trackID();
    LOG_MEDIA_MESSAGE("flushAndEnqueueNonDisplayingSamples: trackId=%s PTS[0]=%f ... PTS[n]=%f", trackId.string().utf8().data(), samples[0]->presentationTime().toFloat(), samples[samples.size()-1]->presentationTime().toFloat());

    GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Flushing and re-enqueing %d samples for stream %s", samples.size(), trackId.string().utf8().data());

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    // Actually no need to flush. The seek preparations have done it for us.

    for (Vector<RefPtr<MediaSample> >::iterator it = samples.begin(); it != samples.end(); ++it) {
        GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(it->get());
        if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
            GstSample* gstsample = gst_sample_ref(sample->sample());
            GST_BUFFER_FLAG_SET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
            push_sample(GST_APP_SRC(appsrc), gstsample);
            // gst_app_src_push_sample() uses transfer-none for gstsample
            gst_sample_unref(gstsample);
        }
    }
}
Example #27
0
/* main context */
static gboolean display_frame(gpointer video_decoder)
{
    SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
    SpiceGstFrame *gstframe;
    GstCaps *caps;
    gint width, height;
    GstStructure *s;
    GstBuffer *buffer;
    GstMapInfo mapinfo;

    g_mutex_lock(&decoder->queues_mutex);
    decoder->timer_id = 0;
    gstframe = g_queue_pop_head(decoder->display_queue);
    g_mutex_unlock(&decoder->queues_mutex);
    /* If the queue is empty we don't even need to reschedule */
    g_return_val_if_fail(gstframe, G_SOURCE_REMOVE);

    if (!gstframe->sample) {
        spice_warning("got a frame without a sample!");
        goto error;
    }

    caps = gst_sample_get_caps(gstframe->sample);
    if (!caps) {
        spice_warning("GStreamer error: could not get the caps of the sample");
        goto error;
    }

    s = gst_caps_get_structure(caps, 0);
    if (!gst_structure_get_int(s, "width", &width) ||
        !gst_structure_get_int(s, "height", &height)) {
        spice_warning("GStreamer error: could not get the size of the frame");
        goto error;
    }

    buffer = gst_sample_get_buffer(gstframe->sample);
    if (!gst_buffer_map(buffer, &mapinfo, GST_MAP_READ)) {
        spice_warning("GStreamer error: could not map the buffer");
        goto error;
    }

    stream_display_frame(decoder->base.stream, gstframe->frame,
                         width, height, mapinfo.data);
    gst_buffer_unmap(buffer, &mapinfo);

 error:
    free_gst_frame(gstframe);
    schedule_frame(decoder);
    return G_SOURCE_REMOVE;
}
Example #28
0
void MediaPlayer::drawVideoFrame(QPainter &p, const QRect &rect)
{
    QMutexLocker m( &m_lastVideoSampleMutex );

    if ( !m_lastVideoSample )
        return;

    // get the snapshot buffer format now. We set the caps on the appsink so
    // that it can only be an rgb buffer.
    GstCaps *caps = gst_sample_get_caps( m_lastVideoSample );

    if ( !caps )
    {
        reportError( "could not get caps for the new video sample" );
        return;
    }

    GstStructure * structure = gst_caps_get_structure( caps, 0 );

    // We need to get the final caps on the buffer to get the size
    int width = 0;
    int height = 0;

    gst_structure_get_int( structure, "width", &width );
    gst_structure_get_int( structure, "height", &height );

    if ( !width || !height )
    {
        reportError( "could not get video height and width" );
        return;
    }

    // Create pixmap from buffer and save, gstreamer video buffers have a stride that
    // is rounded up to the nearest multiple of 4
    GstBuffer *buffer = gst_sample_get_buffer( m_lastVideoSample );
    GstMapInfo map;

    if ( !gst_buffer_map( buffer, &map, GST_MAP_READ ) )
    {
        reportError( "could not map video buffer" );
        return;
    }

    p.drawImage( rect, QImage( map.data, width, height, GST_ROUND_UP_4 (width * 4), QImage::Format_RGB32 ), QRect( 0, 0, width, height ) );

    // And clean up
    gst_buffer_unmap( buffer, &map );
}
Example #29
0
static GstFlowReturn
on_new_sample_from_source (GstAppSink * elt, gpointer user_data)
{
  ProgramData *data = (ProgramData *) user_data;
  GstSample *sample;
  GstBuffer *buffer;
  GstElement *source;

  sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
  buffer = gst_sample_get_buffer (sample);
  source = gst_bin_get_by_name (GST_BIN (data->sink), "testsource");
  gst_app_src_push_buffer (GST_APP_SRC (source), gst_buffer_ref (buffer));
  gst_sample_unref (sample);
  g_object_unref (source);
  return GST_FLOW_OK;
}
Example #30
0
GstFlowReturn MediaImpl::gstNewSampleCallback(GstElement*, MediaImpl *p)
{
    // Make it thread-safe.
    p->lockMutex();

    // Get next frame.
    GstSample *sample = gst_app_sink_pull_sample(GST_APP_SINK(p->_appsink0));

    // Unref last frame.
    p->_freeCurrentSample();

    // Set current frame.
    p->_currentFrameSample = sample;

    // For live sources, video dimensions have not been set, because
    // gstPadAddedCallback is never called. Fix dimensions from first sample /
    // caps we receive.
    if (p->_isSharedMemorySource && ( p->_padHandlerData.width == -1 ||
                                      p->_padHandlerData.height == -1)) {
        GstCaps *caps = gst_sample_get_caps(sample);
        GstStructure *structure;
        structure = gst_caps_get_structure(caps, 0);
        gst_structure_get_int(structure, "width",  &p->_padHandlerData.width);
        gst_structure_get_int(structure, "height", &p->_padHandlerData.height);
        // g_print("Size is %u x %u\n", _padHandlerData.width, _padHandlerData.height);
    }

    // Try to retrieve data bits of frame.
    GstMapInfo& map = p->_mapInfo;
    GstBuffer *buffer = gst_sample_get_buffer( sample );
    if (gst_buffer_map(buffer, &map, GST_MAP_READ))
    {
        p->_currentFrameBuffer = buffer;
        // For debugging:
        //gst_util_dump_mem(map.data, map.size)

        // Retrieve data from map info.
        p->_data = map.data;

        // Bits have changed.
        p->_bitsChanged = true;
    }

    p->unlockMutex();

    return GST_FLOW_OK;
}