Exemplo n.º 1
0
static gboolean
thumbnail_cb (gpointer user)
{
  GstSample *b = NULL;
  GstCaps *caps;
  GESPipeline *p;

  p = GES_PIPELINE (user);

  caps = gst_caps_from_string ("image/jpeg");
  GST_INFO ("getting thumbnails");

  /* check raw rgb use-case with scaling */
  b = ges_pipeline_get_thumbnail_rgb24 (p, 320, 240);
  g_assert (b);
  gst_sample_unref (b);

  /* check encoding use-case from caps */
  b = NULL;
  b = ges_pipeline_get_thumbnail (p, caps);
  g_assert (b);
  gst_sample_unref (b);

  g_assert (ges_pipeline_save_thumbnail (p, -1, -1, (gchar *)
          "image/jpeg", (gchar *) TEST_PATH, NULL));
  g_assert (g_file_test (TEST_PATH, G_FILE_TEST_EXISTS));
  g_unlink (TEST_PATH);

  gst_caps_unref (caps);
  return FALSE;
}
Exemplo n.º 2
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();
				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels.swap(backPixels);
#if GST_VERSION_MAJOR==0
					if(prevBuffer) gst_buffer_unref (prevBuffer);
#else
					if(prevBuffer) gst_sample_unref (prevBuffer);
#endif
					prevBuffer = buffer;
				}

			mutex.unlock();
		}else{
#if GST_VERSION_MAJOR==0
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = buffer;
					bHavePixelsChanged=true;
				}
			}
		}
#else
			GstBuffer *buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_sample_unref (prevBuffer);
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = sample;
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
		}
#endif
	}else{
Exemplo n.º 3
0
static GdkPixbuf *
gst_thumbnailer_cover_from_tags (GstTagList   *tags,
                                 GCancellable *cancellable)
{
  GstSample          *cover = NULL;
  guint               i;
  GstSample          *sample;
  GstCaps            *caps;
  const GstStructure *caps_struct;
  gint                type;
  GstBuffer          *buffer;
  GdkPixbuf          *pixbuf = NULL;

  for (i = 0; ; i++)
    {
      if (g_cancellable_is_cancelled (cancellable))
        break;

      /* look for image in the tags */
      if (!gst_tag_list_get_sample_index (tags, GST_TAG_IMAGE, i, &sample))
        break;

      caps = gst_sample_get_caps (sample);
      caps_struct = gst_caps_get_structure (caps, 0);
      gst_structure_get_enum (caps_struct,
                              "image-type",
                              GST_TYPE_TAG_IMAGE_TYPE,
                              &type);

      if (type == GST_TAG_IMAGE_TYPE_FRONT_COVER)
        {
          /* found the cover */
          cover = sample;
          break;
        }

      gst_sample_unref (sample);
    }

  if (cover == NULL
      && !g_cancellable_is_cancelled (cancellable))
    {
      /* look for preview image */
      gst_tag_list_get_sample_index (tags, GST_TAG_PREVIEW_IMAGE, 0, &cover);
    }

  if (cover != NULL)
    {
      /* create image */
      buffer = gst_sample_get_buffer (cover);
      pixbuf = gst_thumbnailer_buffer_to_pixbuf (buffer);
      gst_sample_unref (cover);
    }

  return pixbuf;
}
Exemplo n.º 4
0
GstFlowReturn VideoSender::newBufferCB(GstAppSink *sink, gpointer user_data)
{
  qDebug() << "In" << __FUNCTION__;

  VideoSender *vs = static_cast<VideoSender *>(user_data);

  // Get new video sample
  GstSample *sample = gst_app_sink_pull_sample(sink);
  if (sample == NULL) {
    qWarning("%s: Failed to get new sample", __FUNCTION__);
    return GST_FLOW_OK;
  }
  
  // FIXME: zero copy?
  GstBuffer *buffer = gst_sample_get_buffer(sample);
  GstMapInfo map;
  QByteArray *data = NULL;
  if (gst_buffer_map(buffer, &map, GST_MAP_READ)) {
    // Copy the data to QByteArray
    data = new QByteArray((char *)map.data, map.size);
    vs->emitVideo(data);
    gst_buffer_unmap(buffer, &map);
  } else {
    qWarning("Error with gst_buffer_map");
  }
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
Exemplo n.º 5
0
/*!
 * \brief OpenIMAJCapGStreamer::grabFrame
 * \return
 * Grabs a sample from the pipeline, awaiting consumation by getImage.
 * The pipeline is started if it was not running yet
 */
bool OpenIMAJCapGStreamer::nextFrame()
{
    if(!pipeline)
        return false;
    
    // start the pipeline if it was not in playing state yet
    if(!this->isPipelinePlaying())
        this->startPipeline();
    
    // bail out if EOS
    if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
        return false;
    
    if(sample)
        gst_sample_unref(sample);
    
    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
    
    if(!sample)
        return false;
    
    buffer = gst_sample_get_buffer(sample);
    
    if(!buffer)
        return false;
    
    return true;
}
Exemplo n.º 6
0
/*!
 * \brief CvCapture_GStreamer::grabFrame
 * \return
 * Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
 * The pipeline is started if it was not running yet
 */
bool CvCapture_GStreamer::grabFrame()
{
    if(!pipeline)
        return false;

    // start the pipeline if it was not in playing state yet
    if(!this->isPipelinePlaying())
        this->startPipeline();

    // bail out if EOS
    if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
        return false;

#if GST_VERSION_MAJOR == 0
    if(buffer)
        gst_buffer_unref(buffer);

    buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
#else
    if(sample)
        gst_sample_unref(sample);

    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));

    if(!sample)
        return false;

    buffer = gst_sample_get_buffer(sample);
#endif

    if(!buffer)
        return false;

    return true;
}
GdkPixbuf *
totem_gst_tag_list_get_cover (GstTagList *tag_list)
{
  GstSample *cover_sample;

  g_return_val_if_fail (tag_list != NULL, FALSE);

  cover_sample = totem_gst_tag_list_get_cover_real (tag_list);
  /* Fallback to preview */
    if (!cover_sample) {
      gst_tag_list_get_sample_index (tag_list, GST_TAG_PREVIEW_IMAGE, 0,
				     &cover_sample);
    }

  if (cover_sample) {
    GstBuffer *buffer;
    GdkPixbuf *pixbuf;

    buffer = gst_sample_get_buffer (cover_sample);
    pixbuf = totem_gst_buffer_to_pixbuf (buffer);
    gst_sample_unref (cover_sample);
    return pixbuf;
  }

  return NULL;
}
Exemplo n.º 8
0
static GstFlowReturn
post_recv_sample (GstElement * appsink, gpointer user_data)
{
  GstSample *sample = NULL;
  GstFlowReturn ret;
  GstBuffer *buffer;

  g_signal_emit_by_name (appsink, "pull-sample", &sample);
  if (sample == NULL)
    return GST_FLOW_ERROR;

  buffer = gst_sample_get_buffer (sample);
  if (buffer == NULL) {
    ret = GST_FLOW_OK;
    goto end;
  }

  g_signal_emit_by_name (httpep, "push-buffer", buffer, &ret);

  if (ret != GST_FLOW_OK) {
    /* something wrong */
    GST_ERROR ("Could not send buffer to httpep %s. Ret code %d",
        GST_ELEMENT_NAME (httpep), ret);
  }

  g_object_get (G_OBJECT (httpep), "http-method", &method, NULL);
  ck_assert_int_eq (method, KMS_HTTP_ENDPOINT_METHOD_POST);

end:
  if (sample != NULL)
    gst_sample_unref (sample);

  return ret;
}
Exemplo n.º 9
0
GstFlowReturn GstAppSinkPipeline::NewPrerollCallback(GstAppSink* appsink, gpointer user_data)
{
	GstSample* sample = gst_app_sink_pull_preroll(appsink);
	gst_sample_unref(sample);
	return GST_FLOW_OK;
	
}
void PlaybackPipeline::enqueueSample(PassRefPtr<MediaSample> prsample)
{
    RefPtr<MediaSample> rsample = prsample;
    AtomicString trackId = rsample->trackID();

    TRACE_MEDIA_MESSAGE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT, trackId.string().utf8().data(), rsample->presentationTime().toFloat(), rsample->presentationSize().width(), rsample->presentationSize().height(), GST_TIME_ARGS(floatToGstClockTime(rsample->presentationTime().toDouble())));

    ASSERT(WTF::isMainThread());

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        WARN_MEDIA_MESSAGE("No stream!");
        GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(rsample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GstSample* gstsample = gst_sample_ref(sample->sample());
        GST_BUFFER_FLAG_UNSET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
        push_sample(GST_APP_SRC(appsrc), gstsample);
        // gst_app_src_push_sample() uses transfer-none for gstsample
        gst_sample_unref(gstsample);
    }
}
MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
    if (m_repaintHandler) {
        g_signal_handler_disconnect(m_webkitVideoSink.get(), m_repaintHandler);
        m_repaintHandler = 0;
    }

    g_mutex_clear(&m_sampleMutex);

    if (m_sample)
        gst_sample_unref(m_sample);
    m_sample = 0;

    m_player = 0;

    if (m_volumeSignalHandler) {
        g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler);
        m_volumeSignalHandler = 0;
    }

    if (m_muteSignalHandler) {
        g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler);
        m_muteSignalHandler = 0;
    }

#if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
    if (client())
        client()->platformLayerWillBeDestroyed();
#endif
}
Exemplo n.º 12
0
static void
convert_frame_need_data_callback (GstElement * src, guint size,
    GstVideoConvertSampleContext * context)
{
  GstFlowReturn ret = GST_FLOW_ERROR;
  GError *error;
  GstBuffer *buffer;

  g_mutex_lock (&context->mutex);

  if (context->finished)
    goto done;

  buffer = gst_sample_get_buffer (context->sample);
  g_signal_emit_by_name (src, "push-buffer", buffer, &ret);
  gst_sample_unref (context->sample);
  context->sample = NULL;

  if (ret != GST_FLOW_OK) {
    GST_ERROR ("Could not push video frame: %s", gst_flow_get_name (ret));

    error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
        "Could not push video frame: %s", gst_flow_get_name (ret));

    convert_frame_finish (context, NULL, error);
  }

  g_signal_handlers_disconnect_by_func (src, convert_frame_need_data_callback,
      context);

done:
  g_mutex_unlock (&context->mutex);
}
Exemplo n.º 13
0
GstFlowReturn Capture::newSample(GstAppSink* sink, gpointer gSelf)
{
    //g_print("New sample...");
    GstSample* sample = NULL;
    GstBuffer* sampleBuffer = NULL;
    GstMapInfo bufferInfo;

    Capture* self = static_cast<Capture* >(gSelf);
    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
    if(sample != NULL)
    {
        sampleBuffer = gst_sample_get_buffer(sample);
        if(sampleBuffer != NULL)
        {
            gst_buffer_map(sampleBuffer, &bufferInfo, GST_MAP_READ);
	    //gsize sz = gst_buffer_get_size(sampleBuffer);
	    //g_print("%lu\n",sz);
            self->m_mutex.lock();
            self->m_image = QImage(bufferInfo.data, 600, 400, QImage::Format_Mono);
            self->m_mutex.unlock();
            gst_buffer_unmap(sampleBuffer, &bufferInfo);
        }
        gst_sample_unref(sample);
    }
    return GST_FLOW_OK;
}
Exemplo n.º 14
0
GstFlowReturn on_new_preroll(GstAppSink *appsink, gpointer user_data) {
	GstSample* sample = NULL;
	GstBuffer* buffer;
	GstMemory* memory;
	GstMapInfo info;
	GstClockTime clocktime;

	g_debug("on_new_preroll ");
	sample = gst_app_sink_pull_sample (appsink);
	if (sample) {
		g_debug("pulled sample\n");
		buffer = gst_sample_get_buffer(sample);
		clocktime = GST_BUFFER_PTS(buffer);
		memory = gst_buffer_get_memory(buffer, 0);
		gst_memory_map(memory, &info, GST_MAP_READ);
		/*
			You can access raw memory at info.data
		*/
		if(app.output_callback)
			app.output_callback(info.data, info.size);
		//fwrite(info.data, 1, info.size, app.outfile);

		gst_memory_unmap(memory, &info);
		gst_memory_unref(memory);
		gst_sample_unref(sample);
	}
	return GST_FLOW_OK;
}
Exemplo n.º 15
0
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  int i;

  gst_init (&argc, &argv);

  app->pipe = gst_pipeline_new (NULL);
  g_assert (app->pipe);

  app->src = gst_element_factory_make ("appsrc", NULL);
  g_assert (app->src);
  gst_bin_add (GST_BIN (app->pipe), app->src);

  app->id = gst_element_factory_make ("identity", NULL);
  g_assert (app->id);
  gst_bin_add (GST_BIN (app->pipe), app->id);

  app->sink = gst_element_factory_make ("appsink", NULL);
  g_assert (app->sink);
  gst_bin_add (GST_BIN (app->pipe), app->sink);

  gst_element_link (app->src, app->id);
  gst_element_link (app->id, app->sink);

  gst_element_set_state (app->pipe, GST_STATE_PLAYING);

  for (i = 0; i < 10; i++) {
    GstBuffer *buf;
    GstMapInfo map;

    buf = gst_buffer_new_and_alloc (100);
    gst_buffer_map (buf, &map, GST_MAP_WRITE);
    memset (map.data, i, 100);
    gst_buffer_unmap (buf, &map);

    printf ("%d: pushing buffer for pointer %p, %p\n", i, map.data, buf);
    gst_app_src_push_buffer (GST_APP_SRC (app->src), buf);
  }

  /* push EOS */
  gst_app_src_end_of_stream (GST_APP_SRC (app->src));

  /* _is_eos() does not block and returns TRUE if there is not currently an EOS
   * to be retrieved */
  while (!gst_app_sink_is_eos (GST_APP_SINK (app->sink))) {
    GstSample *sample;

    /* pull the next item, this can return NULL when there is no more data and
     * EOS has been received */
    sample = gst_app_sink_pull_sample (GST_APP_SINK (app->sink));
    printf ("retrieved sample %p\n", sample);
    if (sample)
      gst_sample_unref (sample);
  }
  gst_element_set_state (app->pipe, GST_STATE_NULL);

  return 0;
}
Exemplo n.º 16
0
static void
gst_player_media_info_finalize (GObject * object)
{
  GstPlayerMediaInfo *info = GST_PLAYER_MEDIA_INFO (object);

  g_free (info->uri);

  if (info->tags)
    gst_tag_list_unref (info->tags);

  g_free (info->title);

  g_free (info->container);

  if (info->image_sample)
    gst_sample_unref (info->image_sample);

  if (info->audio_stream_list)
    g_list_free (info->audio_stream_list);

  if (info->video_stream_list)
    g_list_free (info->video_stream_list);

  if (info->subtitle_stream_list)
    g_list_free (info->subtitle_stream_list);

  if (info->stream_list)
    g_list_free_full (info->stream_list, g_object_unref);

  G_OBJECT_CLASS (gst_player_media_info_parent_class)->finalize (object);
}
Exemplo n.º 17
0
static GstFlowReturn new_sample_callback (GstAppSink * sink, gpointer user_data)
{
        GstBuffer *buffer;
        GstSample *sample;
        Encoder *encoder = (Encoder *)user_data;

        *(encoder->output->heartbeat) = gst_clock_get_time (encoder->system_clock);
        sample = gst_app_sink_pull_sample (GST_APP_SINK (sink));
        buffer = gst_sample_get_buffer (sample);

        sem_wait (encoder->output->semaphore);

        (*(encoder->output->total_count)) += gst_buffer_get_size (buffer);

        /* update head_addr, free enough memory for current buffer. */
        while (cache_free (encoder) < gst_buffer_get_size (buffer) + 12) { /* timestamp + gop size = 12 */
                move_head (encoder);
        }

        if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
                /* 
                 * random access point found.
                 * write previous gop size to 4 bytes reservation,
                 * write current gop timestamp,
                 * reserve 4 bytes for size of current gop,
                 */
                if (encoder->mqdes == -1) {
                        /* no m3u8 output */
                        move_last_rap (encoder, buffer);

                } else if (GST_BUFFER_PTS (buffer) == encoder->last_running_time) {
                        gchar *msg;

                        move_last_rap (encoder, buffer);
                        msg = g_strdup_printf ("%lu", encoder->last_segment_duration);
                        if (mq_send (encoder->mqdes, msg, strlen (msg), 1) == -1) {
                                GST_ERROR ("mq_send error: %s", g_strerror (errno));
                        }
                        g_free (msg);
                        encoder->last_running_time = GST_CLOCK_TIME_NONE;
                }
        }

        /* udpstreaming? */
        if (encoder->udpstreaming) {
                udp_streaming (encoder, buffer);
        }

        /*
         * copy buffer to cache.
         * update tail_addr
         */
        copy_buffer (encoder, buffer);

        sem_post (encoder->output->semaphore);

        gst_sample_unref (sample);

        return GST_FLOW_OK;
}
Exemplo n.º 18
0
/* The appsink has received a sample */
static GstFlowReturn appsink_new_sample_cb(GstAppSink *sink,
					   gpointer user_data)
{
	struct videnc_state *st = user_data;
	GstSample *sample;
	GstBuffer *buffer;
	GstMapInfo info;
	guint8 *data;
	gsize size;

	/* Retrieve the sample */
	sample = gst_app_sink_pull_sample(sink);

	if (sample) {
		buffer = gst_sample_get_buffer(sample);
		gst_buffer_map( buffer, &info, (GstMapFlags)(GST_MAP_READ) );

		data = info.data;
		size = info.size;

		gst_video_h264_packetize(data, size, st->encoder.pktsize,
					 st->pkth, st->arg);

		gst_buffer_unmap(buffer, &info);
		gst_sample_unref(sample);
	}

	return GST_FLOW_OK;
}
Exemplo n.º 19
0
static GstSample *
totem_gst_tag_list_get_cover_real (GstTagList *tag_list)
{
  GstSample *cover_sample = NULL;
  guint i;

  for (i = 0; ; i++) {
    GstSample *sample;
    GstCaps *caps;
    const GstStructure *caps_struct;
    int type;

    if (!gst_tag_list_get_sample_index (tag_list, GST_TAG_IMAGE, i, &sample))
      break;

    caps = gst_sample_get_caps (sample);
    caps_struct = gst_caps_get_structure (caps, 0);
    gst_structure_get_enum (caps_struct,
			    "image-type",
			    GST_TYPE_TAG_IMAGE_TYPE,
			    &type);
    if (type == GST_TAG_IMAGE_TYPE_UNDEFINED) {
      if (cover_sample == NULL) {
	/* take a ref here since we will continue and unref below */
	cover_sample = gst_sample_ref (sample);
      }
    } else if (type == GST_TAG_IMAGE_TYPE_FRONT_COVER) {
      cover_sample = sample;
      break;
    }
    gst_sample_unref (sample);
  }

  return cover_sample;
}
Exemplo n.º 20
0
/*
 * Write camera frame to Objection Detection process
 */
GstFlowReturn VideoSender::newBufferOBCB(GstAppSink *sink, gpointer user_data)
{
  qDebug() << "In" << __FUNCTION__;

  VideoSender *vs = static_cast<VideoSender *>(user_data);

  // Get new video sample
  GstSample *sample = gst_app_sink_pull_sample(sink);
  if (sample == NULL) {
    qWarning("%s: Failed to get new sample", __FUNCTION__);
    return GST_FLOW_OK;
  }

  if (!vs->ODprocessReady) {
    qDebug() << "ODprocess not ready yet, not sending frame";
    gst_sample_unref(sample);
    return GST_FLOW_OK;
  }

  GstCaps *caps = gst_sample_get_caps(sample);
  if (caps == NULL) {
    qWarning("%s: Failed to get caps of the sample", __FUNCTION__);
    gst_sample_unref(sample);
    return GST_FLOW_OK;
  }

  gint width, height;
  GstStructure *gststruct = gst_caps_get_structure(caps, 0);
  gst_structure_get_int(gststruct,"width", &width);
  gst_structure_get_int(gststruct,"height", &height);

  GstBuffer *buffer = gst_sample_get_buffer(sample);
  GstMapInfo map;
  if (gst_buffer_map(buffer, &map, GST_MAP_READ)) {

    vs->ODdata[OB_VIDEO_PARAM_WD3] = width >> 3;
    vs->ODdata[OB_VIDEO_PARAM_HD3] = height >> 3;
    vs->ODdata[OB_VIDEO_PARAM_BPP] = map.size * 8 / (width * height);

    if (vs->ODprocess) {
      vs->ODprocessReady = false;
      vs->ODprocess->write((const char *)vs->ODdata, sizeof(vs->ODdata));
      vs->ODprocess->write((const char *)map.data, map.size);
    }
    gst_buffer_unmap(buffer, &map);
  } else {
Exemplo n.º 21
0
static void free_gst_frame(SpiceGstFrame *gstframe)
{
    gstframe->frame->free(gstframe->frame);
    if (gstframe->sample) {
        gst_sample_unref(gstframe->sample);
    }
    free(gstframe);
}
Exemplo n.º 22
0
/*
  This function will be called in a separate thread when our appsink
  says there is data for us. user_data has to be defined
  when calling g_signal_connect. It can be used to pass objects etc.
  from your other function to the callback.
*/
GstFlowReturn callback (GstElement* sink, void* user_data)
{
    GstSample* sample = NULL;
    /* Retrieve the buffer */
    g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);

    if (sample)
    {
        // we have a valid sample
        // do things with the image here
        static guint framecount = 0;
        int pixel_data = -1;

        GstBuffer* buffer = gst_sample_get_buffer(sample);
        GstMapInfo info; // contains the actual image
        if (gst_buffer_map(buffer, &info, GST_MAP_READ))
        {
            GstVideoInfo* video_info = gst_video_info_new();
            if (!gst_video_info_from_caps(video_info, gst_sample_get_caps(sample)))
            {
                // Could not parse video info (should not happen)
                g_warning("Failed to parse video info");
                return GST_FLOW_ERROR;
            }

            /* Get a pointer to the image data */
            unsigned char* data = info.data;

            /* Get the pixel value of the center pixel */
            int stride = video_info->finfo->bits / 8;
            unsigned int pixel_offset = video_info->width / 2 * stride +
                video_info->width * video_info->height / 2 * stride;

            // this is only one pixel
            // when dealing with formats like BGRx
            // pixel_data will have to consist out of
            // pixel_offset   => B
            // pixel_offset+1 => G
            // pixel_offset+2 => R
            // pixel_offset+3 => x
            pixel_data = info.data[pixel_offset];

            gst_buffer_unmap(buffer, &info);
            gst_video_info_free(video_info);
        }

        GstClockTime timestamp = GST_BUFFER_PTS(buffer);
        g_print("Captured frame %d, Pixel Value=%03d Timestamp=%" GST_TIME_FORMAT "            \r",
                framecount, pixel_data,
                GST_TIME_ARGS(timestamp));
        framecount++;


        // delete our reference so that gstreamer can handle the sample
        gst_sample_unref (sample);
    }
    return GST_FLOW_OK;
}
Exemplo n.º 23
0
void ofGstVideoUtils::close(){
	ofGstUtils::close();
	Poco::ScopedLock<ofMutex> lock(mutex);
	pixels.clear();
	backPixels.clear();
	bIsFrameNew					= false;
	bHavePixelsChanged			= false;
	bBackPixelsChanged			= false;
#if GST_VERSION_MAJOR==0
	if(prevBuffer) gst_buffer_unref (prevBuffer);
	if(buffer) gst_buffer_unref (buffer);
#else
	if(prevBuffer) gst_sample_unref (prevBuffer);
	if(buffer) gst_sample_unref (buffer);
#endif
	prevBuffer = 0;
	buffer = 0;
}
Exemplo n.º 24
0
/* GStreamer thread
 *
 * We cannot use GStreamer's signals because they are not always run in
 * the main context. So use a callback (lower overhead) and have it pull
 * the sample to avoid a race with free_pipeline(). This means queuing the
 * decoded frames outside GStreamer. So while we're at it, also schedule
 * the frame display ourselves in schedule_frame().
 */
static GstFlowReturn new_sample(GstAppSink *gstappsink, gpointer video_decoder)
{
    SpiceGstDecoder *decoder = video_decoder;

    GstSample *sample = gst_app_sink_pull_sample(decoder->appsink);
    GstBuffer *buffer = sample ? gst_sample_get_buffer(sample) : NULL;
    if (sample) {
        g_mutex_lock(&decoder->queues_mutex);

        /* gst_app_sink_pull_sample() sometimes returns the same buffer twice
         * or buffers that have a modified, and thus unrecognizable, PTS.
         * Blindly removing frames from the decoding_queue until we find a
         * match would only empty the queue, resulting in later buffers not
         * finding a match either, etc. So check the buffer has a matching
         * frame first.
         */
        SpiceGstFrame *gstframe;
        GList *l = g_queue_peek_head_link(decoder->decoding_queue);
        while (l) {
            gstframe = l->data;
            if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) {
                /* The frame is now ready for display */
                gstframe->sample = sample;
                g_queue_push_tail(decoder->display_queue, gstframe);

                /* Now that we know there is a match, remove it and the older
                 * frames from the decoding queue.
                 */
                while ((gstframe = g_queue_pop_head(decoder->decoding_queue))) {
                    if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) {
                        break;
                    }
                    /* The GStreamer pipeline dropped the corresponding
                     * buffer.
                     */
                    SPICE_DEBUG("the GStreamer pipeline dropped a frame");
                    free_gst_frame(gstframe);
                }
                break;
            }
            l = l->next;
        }
        if (!l) {
            spice_warning("got an unexpected decoded buffer!");
            gst_sample_unref(sample);
        }

        g_mutex_unlock(&decoder->queues_mutex);
        schedule_frame(decoder);
    } else {
        spice_warning("GStreamer error: could not pull sample");
    }
    return GST_FLOW_OK;
}
static GstFlowReturn new_buffer(GstAppSink * sink, gpointer data)
{
	GStreamerFramesReceiver * pClass = (GStreamerFramesReceiver*) data;

	GstSample * sample = gst_app_sink_pull_sample(sink);

	GstFlowReturn res = frame_handler(sample, pClass);

	gst_sample_unref(sample);

	return res;
}
Exemplo n.º 26
0
GstFlowReturn MediaImpl::gstNewSampleCallback(GstElement*, MediaImpl *p)
{
  GstSample *sample;
  sample = gst_app_sink_pull_sample(GST_APP_SINK(p->_videoSink));
  //g_signal_emit_by_name (p->_videoSink, "pull-sample", &sample);
  p->get_queue_input_buf()->put(sample);
  if (p->get_queue_output_buf()->size() > 1) {
    sample = p->get_queue_output_buf()->get();
    gst_sample_unref(sample);
  }
  return GST_FLOW_OK;
}
Exemplo n.º 27
0
GstFlowReturn AudioFileReader::handleSample(GstAppSink* sink)
{
    GstSample* sample = gst_app_sink_pull_sample(sink);
    if (!sample)
        return GST_FLOW_ERROR;

    GstBuffer* buffer = gst_sample_get_buffer(sample);
    if (!buffer) {
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }

    GstCaps* caps = gst_sample_get_caps(sample);
    if (!caps) {
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }

    GstAudioInfo info;
    gst_audio_info_from_caps(&info, caps);
    int frames = gst_buffer_get_size(buffer) / info.bpf;

    // Check the first audio channel. The buffer is supposed to store
    // data of a single channel anyway.
    switch (GST_AUDIO_INFO_POSITION(&info, 0)) {
    case GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT:
    case GST_AUDIO_CHANNEL_POSITION_MONO:
        gst_buffer_list_add(m_frontLeftBuffers, gst_buffer_ref(buffer));
        m_channelSize += frames;
        break;
    case GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT:
        gst_buffer_list_add(m_frontRightBuffers, gst_buffer_ref(buffer));
        break;
    default:
        break;
    }

    gst_sample_unref(sample);
    return GST_FLOW_OK;
}
static void
gss_program_jpeg_resource (GssTransaction * t)
{
  GssProgram *program = (GssProgram *) t->resource->priv;
#if GST_CHECK_VERSION(1,0,0)
  GstSample *sample = NULL;
#endif
  GstBuffer *buffer = NULL;

  if (!program->enable_streaming || program->state != GSS_PROGRAM_STATE_RUNNING) {
    soup_message_set_status (t->msg, SOUP_STATUS_NO_CONTENT);
    return;
  }
#if GST_CHECK_VERSION(1,0,0)
  if (program->jpegsink) {
    g_object_get (program->jpegsink, "last-sample", &sample, NULL);
  }
  if (sample) {
    GstMapInfo mapinfo;

    buffer = gst_sample_get_buffer (sample);
    if (buffer) {
      soup_message_set_status (t->msg, SOUP_STATUS_OK);

      gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
      soup_message_set_response (t->msg, "image/jpeg", SOUP_MEMORY_COPY,
          (char *) mapinfo.data, mapinfo.size);

      gst_buffer_unmap (buffer, &mapinfo);
    }
    gst_sample_unref (sample);
    return;
  }
#else
  if (program->jpegsink) {
    g_object_get (program->jpegsink, "last-buffer", &buffer, NULL);
  }

  if (buffer) {
    soup_message_set_status (t->msg, SOUP_STATUS_OK);

    soup_message_set_response (t->msg, "image/jpeg", SOUP_MEMORY_COPY,
        (char *) GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));

    gst_buffer_unref (buffer);
    return;
  }
#endif

  gss_html_error_404 (t->server, t->msg);
}
Exemplo n.º 29
0
CAMLprim value ocaml_gstreamer_appsink_pull_buffer(value _as, value string_mode)
{
  CAMLparam1(_as);
  CAMLlocal1(ans);
  appsink *as = Appsink_val(_as);
  GstSample *gstsample;
  GstBuffer *gstbuf;
  GstMapInfo map;
  intnat len;
  gboolean ret;

  caml_release_runtime_system();
  gstsample = gst_app_sink_pull_sample(as->appsink);
  caml_acquire_runtime_system();

  if (!gstsample)
    {
      if (gst_app_sink_is_eos(as->appsink))
        caml_raise_constant(*caml_named_value("gstreamer_exn_eos"));
      else
        caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));
    }

  caml_release_runtime_system();
  gstbuf = gst_sample_get_buffer(gstsample);
  caml_acquire_runtime_system();

  if (!gstbuf) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));

  caml_release_runtime_system();
  ret = gst_buffer_map(gstbuf, &map, GST_MAP_READ);
  caml_acquire_runtime_system();

  if (!ret) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));

  len = map.size;
  if (string_mode == Val_false) {
    ans = caml_ba_alloc(CAML_BA_C_LAYOUT | CAML_BA_UINT8, 1, NULL, &len);
    memcpy(Caml_ba_data_val(ans), map.data, len);
  } else {
    ans = caml_alloc_string(len);
    memcpy(String_val(ans), map.data, len);
  }

  caml_release_runtime_system();
  gst_buffer_unmap(gstbuf, &map);
  gst_sample_unref(gstsample);
  caml_acquire_runtime_system();

  CAMLreturn(ans);
}
Exemplo n.º 30
0
static void
got_buf_cb (GstElement * sink, GstBuffer * new_buf, GstPad * pad,
    GstSample ** p_old_sample)
{
  GstCaps *caps;

  caps = gst_pad_get_current_caps (pad);

  if (*p_old_sample)
    gst_sample_unref (*p_old_sample);
  *p_old_sample = gst_sample_new (new_buf, caps, NULL, NULL);

  gst_caps_unref (caps);
}