Ejemplo n.º 1
0
static gboolean read_data(gst_app_t *app)
{
	GstBuffer *buffer;
	guint8 *ptr;
	GstFlowReturn ret;
	int iret;
	char *vbuf;
	char *abuf;
	int res_len = 0;

	iret = hu_aap_recv_process ();                       

	if (iret != 0) {
		printf("hu_aap_recv_process() iret: %d\n", iret);
		g_main_loop_quit(app->loop);		
		return FALSE;
	}

	/* Is there a video buffer queued? */
	vbuf = vid_read_head_buf_get (&res_len);

	if (vbuf != NULL) {

		//buffer = gst_buffer_new();
		//gst_buffer_set_data(buffer, vbuf, res_len);
		buffer = gst_buffer_new_and_alloc(res_len);
		memcpy(GST_BUFFER_DATA(buffer),vbuf,res_len);

		ret = gst_app_src_push_buffer(app->src, buffer);

		if(ret !=  GST_FLOW_OK){
			printf("push buffer returned %d for %d bytes \n", ret, res_len);
			return FALSE;
		}
	}
	
	/* Is there an audio buffer queued? */
	abuf = aud_read_head_buf_get (&res_len);
	if (abuf != NULL) {

		//buffer = gst_buffer_new();
		//gst_buffer_set_data(buffer, abuf, res_len);
		
		buffer = gst_buffer_new_and_alloc(res_len);
		memcpy(GST_BUFFER_DATA(buffer),abuf,res_len);

		if (res_len <= 2048 + 96)
			ret = gst_app_src_push_buffer((GstAppSrc *)au1_src, buffer);
		else
			ret = gst_app_src_push_buffer((GstAppSrc *)aud_src, buffer);

		if(ret !=  GST_FLOW_OK){
			printf("push buffer returned %d for %d bytes \n", ret, res_len);
			return FALSE;
		}
	}	

	return TRUE;
}
Ejemplo n.º 2
0
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
{

    CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");

    __BEGIN__;
    if (input_pix_fmt == 1) {
        if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
            CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
        }
    }
    else if (input_pix_fmt == 0) {
        if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
            CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
        }
    }
    else {
        assert(false);
    }
    int size;
    size = image->imageSize;
    buffer = gst_buffer_new_and_alloc (size);
    //gst_buffer_set_data (buffer,(guint8*)image->imageData, size);
    memcpy (GST_BUFFER_DATA(buffer),image->imageData, size);
    gst_app_src_push_buffer(GST_APP_SRC(source),buffer);
    //gst_buffer_unref(buffer);
    //buffer = 0;
    __END__;
    return true;
}
Ejemplo n.º 3
0
void MediaPlayer::cb_source_need_data(GstAppSrc *src, guint length, gpointer user_data)
{
    MediaPlayer * self = reinterpret_cast<MediaPlayer*>( user_data );
    qint64 totalread = 0;

    if ( !self->m_mediaIODevice->atEnd() )
    {
        GstBuffer * buffer = gst_buffer_new_and_alloc( length );

        GstMapInfo map;
        gst_buffer_map( buffer, &map, GST_MAP_WRITE );

        totalread = self->m_mediaIODevice->read( (char*) map.data, length );
        gst_buffer_unmap( buffer, &map );

        if ( totalread > 0)
        {
            GstFlowReturn ret = gst_app_src_push_buffer( src, buffer );

            if (ret == GST_FLOW_ERROR) {
                qWarning()<<"appsrc: push buffer error";
            } else if (ret == GST_FLOW_FLUSHING) {
                qWarning()<<"appsrc: push buffer wrong state";
            }
        }
    }

    // We need to tell GStreamer this is end of stream
    if ( totalread <= 0 )
        gst_app_src_end_of_stream( src );
}
Ejemplo n.º 4
0
//--------------------------------------------------------------
void testApp::update(){
	grabber.update();
	
	if(grabber.isFrameNew()){
		GstBuffer * buffer;
		if( 1 ){
			unsigned char * lpBits24 = grabber.getPixels();
			memcpy(pixels.getPixels(), lpBits24, 640*480*3);
			//for(int i=0;i<640*480;i++){
			//	int nPos24 = i*3;
			//	
			//		pixels[nPos24] = lpBits24[;
			//		pixels[nPos24+1] = (lpBits24[i*3]+lpBits24[i*3+1]+lpBits24[i*3+2])*.3333;
			//		pixels[nPos24+2] =  0;
			//	
			//}
			buffer = gst_app_buffer_new (pixels.getPixels(), 640*480*3, NULL, pixels.getPixels());
		}

		GstFlowReturn flow_return = gst_app_src_push_buffer(gstSrc, buffer);
		if (flow_return != GST_FLOW_OK) {
			ofLog(OF_LOG_WARNING,"error pushing buffer");
		}
		gst.update();
	}

	grabber.draw(0,0);
}
Ejemplo n.º 5
0
gboolean airplayRendererFeedData(GstElement *appsrc, guint size, gpointer *object) {
	AirplayRenderer *self = AIRPLAY_RENDERER(object);
	AirplayRendererPrivate *priv = AIRPLAY_RENDERER_GET_PRIVATE(self);

g_print("AirplayMediaPlayer: feed data %d\n", priv->seq);

	AudioPkg *pkg = g_async_queue_try_pop(priv->bufferList);
	while(pkg == NULL) {
		if(!priv->isRunning) return FALSE;
		
		pkg = g_hash_table_lookup(priv->resendTable, &priv->seq);
g_print("Look up: %d %d\n", priv->seq, pkg == NULL);
		if(pkg) {
			priv->seq = (priv->seq + 1) % 65536;
			break;
		}
		
		usleep(50000);
		sched_yield();
		pkg = g_async_queue_try_pop(priv->bufferList);
g_print("Sleep: %d\n", priv->seq);
	}
	
	GstBuffer *buffer = gst_buffer_new();
	gst_buffer_set_data(buffer, pkg->data, pkg->length);
	GST_BUFFER_SIZE(buffer) = pkg->length;
	GST_BUFFER_MALLOCDATA(buffer) = pkg->data;
	GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);

	gst_app_src_push_buffer((GstAppSrc *)appsrc, buffer);
	g_free(pkg);
	// gst_buffer_unref(buffer);
		
	return TRUE;
}
Ejemplo n.º 6
0
CAMLprim value ocaml_gstreamer_appsrc_push_buffer_data(value _as, value _buf)
{
  CAMLparam2(_as, _buf);
  int buflen = Caml_ba_array_val(_buf)->dim[0];
  appsrc *as = Appsrc_val(_as);
  GstBuffer *gstbuf;
  GstMapInfo map;
  GstFlowReturn ret;
  gboolean bret;

  caml_release_runtime_system();
  gstbuf = gst_buffer_new_and_alloc(buflen);
  bret = gst_buffer_map(gstbuf, &map, GST_MAP_WRITE);
  caml_acquire_runtime_system();

  if(!bret) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));
  memcpy(map.data, (unsigned char*)Caml_ba_data_val(_buf), buflen);

  caml_release_runtime_system();
  gst_buffer_unmap(gstbuf, &map);
  ret = gst_app_src_push_buffer(as->appsrc, gstbuf);
  caml_acquire_runtime_system();

  if (ret != GST_FLOW_OK) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));
  CAMLreturn(Val_unit);
}
Ejemplo n.º 7
0
void QGstAppSrc::pushDataToAppSrc()
{
    if (!isStreamValid() || !m_setup)
        return;

    if (m_dataRequested && !m_enoughData) {
        qint64 size;
        if (m_dataRequestSize == (unsigned int)-1)
            size = qMin(m_stream->bytesAvailable(), queueSize());
        else
            size = qMin(m_stream->bytesAvailable(), (qint64)m_dataRequestSize);
        void *data = g_malloc(size);
        GstBuffer* buffer = gst_app_buffer_new(data, size, g_free, data);
        buffer->offset = m_stream->pos();
        qint64 bytesRead = m_stream->read((char*)GST_BUFFER_DATA(buffer), size);
        buffer->offset_end =  buffer->offset + bytesRead - 1;

        if (bytesRead > 0) {
            m_dataRequested = false;
            m_enoughData = false;
            GstFlowReturn ret = gst_app_src_push_buffer (GST_APP_SRC (element()), buffer);
            if (ret == GST_FLOW_ERROR) {
                qWarning()<<"appsrc: push buffer error";
            } else if (ret == GST_FLOW_WRONG_STATE) {
                qWarning()<<"appsrc: push buffer wrong state";
            } else if (ret == GST_FLOW_RESEND) {
                qWarning()<<"appsrc: push buffer resend";
            }
        }
    } else if (m_stream->atEnd()) {
        sendEOS();
    }
}
Ejemplo n.º 8
0
void AVMuxEncode::needVideoData()
{
    GstFlowReturn ret;
    GstBuffer *buffer;

    m_videoSrcLock.lock();
    if (m_videoSrcQ.empty()) {
        m_videoSrcLock.unlock();
        return;
    }

    AVMUX_QUEUEDATA *qd = m_videoSrcQ.dequeue();
    QByteArray frame = qd->data;
    m_lastQueuedVideoTimestamp = qd->timestamp;
    m_lastQueuedVideoParam = qd->param;
    delete qd;

    buffer = gst_buffer_new_and_alloc(frame.length());
    memcpy(GST_BUFFER_DATA(buffer), (unsigned char *)frame.data(), frame.length());
    m_videoSrcLock.unlock();

    ret = gst_app_src_push_buffer((GstAppSrc *)(m_appVideoSrc), buffer);

    if (ret != GST_FLOW_OK) {
        qDebug() << "video push error ";
    }
}
void GStreamerReader::ReadAndPushData(guint aLength)
{
  MediaResource* resource = mDecoder->GetResource();
  NS_ASSERTION(resource, "Decoder has no media resource");
  nsresult rv = NS_OK;

  GstBuffer* buffer = gst_buffer_new_and_alloc(aLength);
  guint8* data = GST_BUFFER_DATA(buffer);
  uint32_t size = 0, bytesRead = 0;
  while(bytesRead < aLength) {
    rv = resource->Read(reinterpret_cast<char*>(data + bytesRead),
        aLength - bytesRead, &size);
    if (NS_FAILED(rv) || size == 0)
      break;

    bytesRead += size;
  }

  GST_BUFFER_SIZE(buffer) = bytesRead;
  mByteOffset += bytesRead;

  GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
  if (ret != GST_FLOW_OK) {
    LOG(PR_LOG_ERROR, ("ReadAndPushData push ret %s", gst_flow_get_name(ret)));
  }

  if (GST_BUFFER_SIZE (buffer) < aLength) {
    /* If we read less than what we wanted, we reached the end */
    gst_app_src_end_of_stream(mSource);
  }

  gst_buffer_unref(buffer);
}
Ejemplo n.º 10
0
void ofxGstRTPServer::sendAudioOut(PooledAudioFrame * pooledFrame){
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));
	gst_object_ref(clock);
	GstClockTime now = gst_clock_get_time (clock) - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	if(firstAudioFrame && !audioAutoTimestamp){
		prevTimestampAudio = now;
		firstAudioFrame = false;
		return;
	}

	int size = pooledFrame->audioFrame._payloadDataLengthInSamples*2*pooledFrame->audioFrame._audioChannel;

	GstBuffer * echoCancelledBuffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,(void*)pooledFrame->audioFrame._payloadData,size,0,size,pooledFrame,(GDestroyNotify)&ofxWebRTCAudioPool::relaseFrame);

	if(!audioAutoTimestamp){
		GstClockTime duration = (pooledFrame->audioFrame._payloadDataLengthInSamples * GST_SECOND / pooledFrame->audioFrame._frequencyInHz);
		GstClockTime now = prevTimestamp + duration;

		GST_BUFFER_OFFSET(echoCancelledBuffer) = numFrameAudio++;
		GST_BUFFER_OFFSET_END(echoCancelledBuffer) = numFrameAudio;
		GST_BUFFER_DTS (echoCancelledBuffer) = now;
		GST_BUFFER_PTS (echoCancelledBuffer) = now;
		GST_BUFFER_DURATION(echoCancelledBuffer) = duration;
		prevTimestampAudio = now;
	}


	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcAudio, echoCancelledBuffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError(LOG_NAME) << "error pushing audio buffer: flow_return was " << flow_return;
	}
}
Ejemplo n.º 11
0
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  int i;

  gst_init (&argc, &argv);

  app->pipe = gst_pipeline_new (NULL);
  g_assert (app->pipe);

  app->src = gst_element_factory_make ("appsrc", NULL);
  g_assert (app->src);
  gst_bin_add (GST_BIN (app->pipe), app->src);

  app->id = gst_element_factory_make ("identity", NULL);
  g_assert (app->id);
  gst_bin_add (GST_BIN (app->pipe), app->id);

  app->sink = gst_element_factory_make ("appsink", NULL);
  g_assert (app->sink);
  gst_bin_add (GST_BIN (app->pipe), app->sink);

  gst_element_link (app->src, app->id);
  gst_element_link (app->id, app->sink);

  gst_element_set_state (app->pipe, GST_STATE_PLAYING);

  for (i = 0; i < 10; i++) {
    GstBuffer *buf;
    GstMapInfo map;

    buf = gst_buffer_new_and_alloc (100);
    gst_buffer_map (buf, &map, GST_MAP_WRITE);
    memset (map.data, i, 100);
    gst_buffer_unmap (buf, &map);

    printf ("%d: pushing buffer for pointer %p, %p\n", i, map.data, buf);
    gst_app_src_push_buffer (GST_APP_SRC (app->src), buf);
  }

  /* push EOS */
  gst_app_src_end_of_stream (GST_APP_SRC (app->src));

  /* _is_eos() does not block and returns TRUE if there is not currently an EOS
   * to be retrieved */
  while (!gst_app_sink_is_eos (GST_APP_SINK (app->sink))) {
    GstSample *sample;

    /* pull the next item, this can return NULL when there is no more data and
     * EOS has been received */
    sample = gst_app_sink_pull_sample (GST_APP_SINK (app->sink));
    printf ("retrieved sample %p\n", sample);
    if (sample)
      gst_sample_unref (sample);
  }
  gst_element_set_state (app->pipe, GST_STATE_NULL);

  return 0;
}
Ejemplo n.º 12
0
void VideoHttpBuffer::needData(int size)
{
    Q_ASSERT(media);

    /* Refactor to use gst_pad_alloc_buffer? Probably wouldn't provide any benefit. */
    GstBuffer *buffer = gst_buffer_new_and_alloc(size);

    int re = media->read(media->readPosition(), (char*)GST_BUFFER_DATA(buffer), size);
    if (re < 0)
    {
        /* Error reporting is handled by MediaDownload for this case */
        qDebug() << "VideoHttpBuffer: read error";
        return;
    }
    else if (re == 0)
    {
        if (media->readPosition() >= media->fileSize() && media->isFinished())
        {
            qDebug() << "VideoHttpBuffer: end of stream";
            gst_app_src_end_of_stream(m_element);
        }
        else
            qDebug() << "VideoHttpBuffer: read aborted";
        return;
    }

    GST_BUFFER_SIZE(buffer) = re;

    GstFlowReturn flow = gst_app_src_push_buffer(m_element, buffer);
    if (flow != GST_FLOW_OK)
        qDebug() << "VideoHttpBuffer: Push result is" << flow;
}
Ejemplo n.º 13
0
void GStreamerReader::ReadAndPushData(guint aLength)
{
  int64_t offset1 = mResource.Tell();
  unused << offset1;
  nsresult rv = NS_OK;

  GstBuffer* buffer = gst_buffer_new_and_alloc(aLength);
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_WRITE);
  guint8 *data = info.data;
#else
  guint8* data = GST_BUFFER_DATA(buffer);
#endif
  uint32_t size = 0, bytesRead = 0;
  while(bytesRead < aLength) {
    rv = mResource.Read(reinterpret_cast<char*>(data + bytesRead),
                        aLength - bytesRead, &size);
    if (NS_FAILED(rv) || size == 0)
      break;

    bytesRead += size;
  }

  int64_t offset2 = mResource.Tell();
  unused << offset2;

#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
  gst_buffer_set_size(buffer, bytesRead);
#else
  GST_BUFFER_SIZE(buffer) = bytesRead;
#endif

  GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
  if (ret != GST_FLOW_OK) {
    LOG(LogLevel::Error, "ReadAndPushData push ret %s(%d)", gst_flow_get_name(ret), ret);
  }

  if (NS_FAILED(rv)) {
    /* Terminate the stream if there is an error in reading */
    LOG(LogLevel::Error, "ReadAndPushData read error, rv=%x", rv);
    gst_app_src_end_of_stream(mSource);
  } else if (bytesRead < aLength) {
    /* If we read less than what we wanted, we reached the end */
    LOG(LogLevel::Warning, "ReadAndPushData read underflow, "
        "bytesRead=%u, aLength=%u, offset(%lld,%lld)",
        bytesRead, aLength, offset1, offset2);
    gst_app_src_end_of_stream(mSource);
  }

  gst_buffer_unref(buffer);

  /* Ensure offset change is consistent in this function.
   * If there are other stream operations on another thread at the same time,
   * it will disturb the GStreamer state machine.
   */
  MOZ_ASSERT(offset1 + bytesRead == offset2);
}
Ejemplo n.º 14
0
FlowReturn ApplicationSource::pushBuffer(const BufferPtr & buffer)
{
    if (d->appSrc()) {
        return static_cast<FlowReturn>(gst_app_src_push_buffer(d->appSrc(), gst_buffer_ref(buffer)));
    } else {
        return FlowWrongState;
    }
}
Ejemplo n.º 15
0
void ofxGstRTPServer::newFrameDepth(ofPixels & pixels, GstClockTime timestamp){
	// here we push new depth frames in the pipeline, it's important
	// to timestamp them properly so gstreamer can sync them with the
	// audio.

	if(!bufferPoolDepth || !appSrcDepth) return;

	GstClockTime now = timestamp;
	if(!depthAutoTimestamp){
		if(now==GST_CLOCK_TIME_NONE){
			now = getTimeStamp();
		}

		if(firstDepthFrame){
			prevTimestampDepth = now;
			firstDepthFrame = false;
			return;
		}
	}

	// get a pixels buffer from the pool and copy the passed frame into it
	PooledPixels<unsigned char> * pooledPixels = bufferPoolDepth->newBuffer();
	//pooledPixels->swap(pixels);
	*(ofPixels*)pooledPixels=pixels;

	// wrap the pooled pixels into a gstreamer buffer and pass the release
	// callback so when it's not needed anymore by gst we can return it to the pool
	GstBuffer * buffer;
	buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,pooledPixels->getPixels(), pooledPixels->size(), 0, pooledPixels->size(), pooledPixels, (GDestroyNotify)&ofxGstBufferPool<unsigned char>::relaseBuffer);

	// timestamp the buffer, right now we are using:
	// timestamp = current pipeline time - base time
	// duration = timestamp - previousTimeStamp
	// the duration is actually the duration of the previous frame
	// but should be accurate enough

	if(!depthAutoTimestamp){
		GST_BUFFER_OFFSET(buffer) = numFrameDepth++;
		GST_BUFFER_OFFSET_END(buffer) = numFrameDepth;
		GST_BUFFER_DTS (buffer) = now;
		GST_BUFFER_PTS (buffer) = now;
		GST_BUFFER_DURATION(buffer) = now-prevTimestampDepth;
		prevTimestampDepth = now;
	}

	if(sendDepthKeyFrame){
		emitDepthKeyFrame();
	}

	// finally push the buffer into the pipeline through the appsrc element
	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcDepth, buffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError() << "error pushing depth buffer: flow_return was " << flow_return;
	}
}
Ejemplo n.º 16
0
void ofxGstRTPServer::newFrameDepth(ofShortPixels & pixels, GstClockTime timestamp, float pixel_size, float distance){
	//unsigned long long time = ofGetElapsedTimeMicros();

	// here we push new depth frames in the pipeline, it's important
	// to timestamp them properly so gstreamer can sync them with the
	// audio.

	if(!appSrcDepth) return;

	GstClockTime now = timestamp;
	if(!depthAutoTimestamp){
		if(now==GST_CLOCK_TIME_NONE){
			now = getTimeStamp();
		}

		if(firstDepthFrame){
			prevTimestampDepth = now;
			firstDepthFrame = false;
			return;
		}
	}

	ofxDepthCompressedFrame frame = depthCompressor.newFrame(pixels,pixel_size,distance);
	GstBuffer * buffer = gst_buffer_new_allocate(NULL,frame.compressedData().size()*sizeof(short),NULL);
	GstMapInfo mapinfo;
	gst_buffer_map(buffer,&mapinfo,GST_MAP_WRITE);
	memcpy(mapinfo.data,&frame.compressedData()[0],frame.compressedData().size()*sizeof(short));
	gst_buffer_unmap(buffer,&mapinfo);
	// timestamp the buffer, right now we are using:
	// timestamp = current pipeline time - base time
	// duration = timestamp - previousTimeStamp
	// the duration is actually the duration of the previous frame
	// but should be accurate enough

	if(!depthAutoTimestamp){
		GST_BUFFER_OFFSET(buffer) = numFrameDepth++;
		GST_BUFFER_OFFSET_END(buffer) = numFrameDepth;
		GST_BUFFER_DTS (buffer) = now;
		GST_BUFFER_PTS (buffer) = now;
		GST_BUFFER_DURATION(buffer) = now-prevTimestampDepth;
		prevTimestampDepth = now;
	}

	if(sendDepthKeyFrame){
		//emitDepthKeyFrame();
	}

	// finally push the buffer into the pipeline through the appsrc element
	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcDepth, buffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError() << "error pushing depth buffer: flow_return was " << flow_return;
	}
	//cout << "sending depth buffer with " << pixels.getWidth() << "," << pixels.getHeight() << " csize: " << frame.compressedData().size() << endl;
	//cout << ofGetElapsedTimeMicros() - time << endl;
}
Ejemplo n.º 17
0
/* spice_gst_decoder_queue_frame() queues the SpiceFrame for decoding and
 * displaying. The steps it goes through are as follows:
 *
 * 1) A SpiceGstFrame is created to keep track of SpiceFrame and some additional
 *    metadata. The SpiceGstFrame is then pushed to the decoding_queue.
 * 2) frame->data, which contains the compressed frame data, is reffed and
 *    wrapped in a GstBuffer which is pushed to the GStreamer pipeline for
 *    decoding.
 * 3) As soon as the GStreamer pipeline no longer needs the compressed frame it
 *    will call frame->unref_data() to free it.
 * 4) Once the decompressed frame is available the GStreamer pipeline calls
 *    new_sample() in the GStreamer thread.
 * 5) new_sample() then matches the decompressed frame to a SpiceGstFrame from
 *    the decoding queue using the GStreamer timestamp information to deal with
 *    dropped frames. The SpiceGstFrame is popped from the decoding_queue.
 * 6) new_sample() then attaches the decompressed frame to the SpiceGstFrame,
 *    pushes it to the display_queue and calls schedule_frame().
 * 7) schedule_frame() then uses gstframe->frame->mm_time to arrange for
 *    display_frame() to be called, in the main thread, at the right time for
 *    the next frame.
 * 8) display_frame() pops the first SpiceGstFrame from the display_queue and
 *    calls stream_display_frame().
 * 9) display_frame() then frees the SpiceGstFrame, which frees the SpiceFrame
 *    and decompressed frame with it.
 */
static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
                                              SpiceFrame *frame, int latency)
{
    SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;

    if (frame->size == 0) {
        SPICE_DEBUG("got an empty frame buffer!");
        frame->free(frame);
        return TRUE;
    }

    if (frame->mm_time < decoder->last_mm_time) {
        SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"
                    " resetting stream",
                    frame->mm_time, decoder->last_mm_time);
        /* Let GStreamer deal with the frame anyway */
    }
    decoder->last_mm_time = frame->mm_time;

    if (latency < 0 &&
        decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) {
        /* Dropping MJPEG frames has no impact on those that follow and
         * saves CPU so do it.
         */
        SPICE_DEBUG("dropping a late MJPEG frame");
        frame->free(frame);
        return TRUE;
    }

    if (decoder->pipeline == NULL) {
        /* An error occurred, causing the GStreamer pipeline to be freed */
        spice_warning("An error occurred, stopping the video stream");
        return FALSE;
    }

    /* ref() the frame data for the buffer */
    frame->ref_data(frame->data_opaque);
    GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS,
                                                    frame->data, frame->size, 0, frame->size,
                                                    frame->data_opaque, frame->unref_data);

    GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000;

    g_mutex_lock(&decoder->queues_mutex);
    g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame));
    g_mutex_unlock(&decoder->queues_mutex);

    if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {
        SPICE_DEBUG("GStreamer error: unable to push frame of size %u", frame->size);
        stream_dropped_frame_on_playback(decoder->base.stream);
    }
    return TRUE;
}
Ejemplo n.º 18
0
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  int i;

  gst_init (&argc, &argv);

  app->pipe = gst_pipeline_new (NULL);
  g_assert (app->pipe);

  app->src = gst_element_factory_make ("appsrc", NULL);
  g_assert (app->src);
  gst_bin_add (GST_BIN (app->pipe), app->src);

  app->id = gst_element_factory_make ("identity", NULL);
  g_assert (app->id);
  gst_bin_add (GST_BIN (app->pipe), app->id);

  app->sink = gst_element_factory_make ("appsink", NULL);
  g_assert (app->sink);
  gst_bin_add (GST_BIN (app->pipe), app->sink);

  gst_element_link (app->src, app->id);
  gst_element_link (app->id, app->sink);

  gst_element_set_state (app->pipe, GST_STATE_PLAYING);

  for (i = 0; i < 10; i++) {
    GstBuffer *buf;
    void *data;

    data = malloc (100);
    memset (data, i, 100);

    buf = gst_app_buffer_new (data, 100, dont_eat_my_chicken_wings, data);
    printf ("%d: creating buffer for pointer %p, %p\n", i, data, buf);
    gst_app_src_push_buffer (GST_APP_SRC (app->src), buf);
  }

  gst_app_src_end_of_stream (GST_APP_SRC (app->src));

  while (!gst_app_sink_is_eos (GST_APP_SINK (app->sink))) {
    GstBuffer *buf;

    buf = gst_app_sink_pull_buffer (GST_APP_SINK (app->sink));
    printf ("retrieved buffer %p\n", buf);
    gst_buffer_unref (buf);
  }
  gst_element_set_state (app->pipe, GST_STATE_NULL);

  return 0;
}
Ejemplo n.º 19
0
static int gst_video_push(struct videnc_state *st, const uint8_t *src,
			  size_t size)
{
	GstBuffer *buffer;
	int ret = 0;

	if (!st) {
		return EINVAL;
	}

	if (!size) {
		warning("gst_video: push: eos returned %d at %d\n",
			ret, __LINE__);
		gst_app_src_end_of_stream((GstAppSrc *)st->source);
		return ret;
	}

	/* Wait "start feed". */
	pthread_mutex_lock(&st->mutex);
	if (st->bwait) {
#define WAIT_TIME_SECONDS 5
		struct timespec ts;
		struct timeval tp;
		gettimeofday(&tp, NULL);
		ts.tv_sec  = tp.tv_sec;
		ts.tv_nsec = tp.tv_usec * 1000;
		ts.tv_sec += WAIT_TIME_SECONDS;
		/* Wait. */
		ret = pthread_cond_timedwait(&st->wait, &st->mutex, &ts);
		if (ETIMEDOUT == ret) {
			warning("gst_video: Raw frame is lost"
				" because of timeout\n");
			return ret;
		}
	}
	pthread_mutex_unlock(&st->mutex);

	/* Create a new empty buffer */
	buffer = gst_buffer_new();
	GST_BUFFER_MALLOCDATA(buffer) = (guint8 *)src;
	GST_BUFFER_SIZE(buffer) = (guint)size;
	GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);

	ret = gst_app_src_push_buffer((GstAppSrc *)st->source, buffer);

	if (ret != GST_FLOW_OK) {
		warning("gst_video: push buffer returned"
			" %d for %d bytes \n", ret, size);
		return ret;
	}

	return ret;
}
void VideoReceiver::consumeVideo(QByteArray *media)
{
  qDebug() << "In" << __FUNCTION__;

  GstBuffer *buffer = gst_buffer_new_and_alloc(media->length());

  // FIXME: zero copy?
  memcpy(GST_BUFFER_DATA(buffer), media->data(), media->length());

  if (gst_app_src_push_buffer(GST_APP_SRC(source), buffer) != GST_FLOW_OK) {
	qWarning("Error with gst_app_src_push_buffer");
  }
}
Ejemplo n.º 21
0
void
shmdata_any_writer_push_data (shmdata_any_writer_t * context,
			      void *data,
			      int size,
			      unsigned long long timestamp,
			      void (*done_with_data) (void *),
			      void *user_data)
{
  GstBuffer *buf;
  buf = gst_app_buffer_new (data, size, done_with_data, user_data);
  GST_BUFFER_TIMESTAMP (buf) = (GstClockTime) (timestamp);
  gst_app_src_push_buffer (GST_APP_SRC (context->src_), buf);
}
Ejemplo n.º 22
0
  extern int
feed_buffer_to_gst (const char *audio, size_t b_len, GNUNET_gstData * d)
{
  GstBuffer *b;
  gchar *bufspace;
  GstFlowReturn flow;

  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
	      "Feeding %u bytes to GStreamer\n",
	      (unsigned int) b_len);

  bufspace = g_memdup (audio, b_len);
  b = gst_buffer_new_wrapped (bufspace, b_len);
  if (NULL == b)
  {
    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
		"Failed to wrap a buffer\n");
    g_free (bufspace);
    return GNUNET_SYSERR;
  }
  if (GST_APP_SRC(d->appsrc) == NULL)
    exit(10);
  flow = gst_app_src_push_buffer (GST_APP_SRC(d->appsrc), b);
  /* They all return GNUNET_OK, because currently player stops when
   * data stops coming. This might need to be changed for the player
   * to also stop when pipeline breaks.
   */
  switch (flow)
  {
  case GST_FLOW_OK:
    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
		"Fed %u bytes to the pipeline\n",
		(unsigned int) b_len);
    break;
  case GST_FLOW_FLUSHING:
    /* buffer was dropped, because pipeline state is not PAUSED or PLAYING */
    GNUNET_log (GNUNET_ERROR_TYPE_INFO,
		"Dropped a buffer\n");
    break;
  case GST_FLOW_EOS:
    /* end of stream */
    GNUNET_log (GNUNET_ERROR_TYPE_INFO,
		"EOS\n");
    break;
  default:
    GNUNET_log (GNUNET_ERROR_TYPE_WARNING,
		"Unexpected push result\n");
    break;
  }
  return GNUNET_OK;
}
Ejemplo n.º 23
0
CAMLprim value ocaml_gstreamer_appsrc_push_buffer(value _as, value _buf)
{
  CAMLparam2(_as, _buf);
  appsrc *as = Appsrc_val(_as);
  GstBuffer *gstbuf = Buffer_val(_buf);
  GstFlowReturn ret;

  caml_release_runtime_system();
  /* The reference will be eaten by push_buffer */
  gst_buffer_ref(gstbuf);
  ret = gst_app_src_push_buffer(as->appsrc, gstbuf);
  caml_acquire_runtime_system();

  if (ret != GST_FLOW_OK) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));
  CAMLreturn(Val_unit);
}
Ejemplo n.º 24
0
static GstFlowReturn
on_new_sample_from_source (GstAppSink * elt, gpointer user_data)
{
  ProgramData *data = (ProgramData *) user_data;
  GstSample *sample;
  GstBuffer *buffer;
  GstElement *source;

  sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
  buffer = gst_sample_get_buffer (sample);
  source = gst_bin_get_by_name (GST_BIN (data->sink), "testsource");
  gst_app_src_push_buffer (GST_APP_SRC (source), gst_buffer_ref (buffer));
  gst_sample_unref (sample);
  g_object_unref (source);
  return GST_FLOW_OK;
}
Ejemplo n.º 25
0
/* C920 Live Src - Handle a H.264 Buffer
 * -------------------------------------
 * Callback that's called from the device. Use the data and length to create a GstBuffer
 */
static void c920_live_src_handle_buffer(gconstpointer data, guint length, gpointer userdata)
{
	g_return_if_fail(C920_IS_LIVE_SRC(userdata));

	C920LiveSrc *self = C920_LIVE_SRC(userdata);
	if (!self->priv->queue_full)
	{
		GstBuffer *buffer = gst_buffer_new();
		if (buffer)
		{
			buffer->size = length;
			buffer->malloc_data = buffer->data = g_malloc(length);
			memcpy(buffer->malloc_data, data, length);
			gst_app_src_push_buffer(GST_APP_SRC(self), buffer);
		}
	}
}
Ejemplo n.º 26
0
static void udp_streaming (Encoder *encoder, GstBuffer *buffer)
{
        gsize buffer_size;
        gssize offset;
        GstFlowReturn ret;

        offset = 0;
        buffer_size = gst_buffer_get_size (buffer);
        while (buffer_size != 0) {
                if ((encoder->cache_size == 0) && (buffer_size < 1316)) {
                        encoder->cache_7x188 = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY, offset, buffer_size);
                        encoder->cache_size = buffer_size;
                        break;

                } else if (encoder->cache_size == 0) {
                        /* buffer_size >= 1316 */
                        encoder->cache_7x188 = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY, offset, 1316);
                        offset += 1316;
                        buffer_size -= 1316;

                } else if (encoder->cache_size + buffer_size >= 1316) {
                        gsize size;
                        gst_buffer_ref (buffer);
                        size = 1316 - encoder->cache_size;
                        encoder->cache_7x188 = gst_buffer_append_region (encoder->cache_7x188, buffer, offset, size);
                        offset += 1316 - encoder->cache_size;
                        buffer_size -= 1316 - encoder->cache_size;
                        encoder->cache_size = 0;

                } else {
                        /* encoder->cache_size + buffer_size < 1316 */
                        gst_buffer_ref (buffer);
                        encoder->cache_7x188 = gst_buffer_append_region (encoder->cache_7x188, buffer, offset, buffer_size);
                        encoder->cache_size += buffer_size;
                        break;
                }
                ret = gst_app_src_push_buffer ((GstAppSrc *)encoder->appsrc, encoder->cache_7x188);
                if (ret != GST_FLOW_OK) {
                        GST_ERROR ("appsrc push buffer failure, return %s.", gst_flow_get_name (ret));
                        gst_buffer_unref (encoder->cache_7x188);
                }
                encoder->cache_size = 0;
        }
}
Ejemplo n.º 27
0
// FIXME: Use gst_app_src_push_sample() instead when we switch to the appropriate GStreamer version.
static GstFlowReturn pushSample(GstAppSrc* appsrc, GstSample* sample)
{
    g_return_val_if_fail(GST_IS_SAMPLE(sample), GST_FLOW_ERROR);

    GstCaps* caps = gst_sample_get_caps(sample);
    if (caps)
        gst_app_src_set_caps(appsrc, caps);
    else
        GST_WARNING_OBJECT(appsrc, "received sample without caps");

    GstBuffer* buffer = gst_sample_get_buffer(sample);
    if (UNLIKELY(!buffer)) {
        GST_WARNING_OBJECT(appsrc, "received sample without buffer");
        return GST_FLOW_OK;
    }

    // gst_app_src_push_buffer() steals the reference, we need an additional one.
    return gst_app_src_push_buffer(appsrc, gst_buffer_ref(buffer));
}
Ejemplo n.º 28
0
void
shmdata_any_writer_push_data_with_duration (shmdata_any_writer_t * context,
					    void *data,
					    int size,
					    unsigned long long timestamp,
					    unsigned long long duration,
					    unsigned long long offset,
					    unsigned long long offset_end,
					    void (*done_with_data) (void *),
					    void *user_data)
{
  GstBuffer *buf;
  buf = gst_app_buffer_new (data, size, done_with_data, user_data);
  GST_BUFFER_TIMESTAMP (buf) = (GstClockTime) (timestamp);
  GST_BUFFER_DURATION (buf) = (GstClockTime) (duration);
  GST_BUFFER_OFFSET (buf) = (GstClockTime) (offset);
  GST_BUFFER_OFFSET_END (buf) = (GstClockTime) (offset_end);
  gst_app_src_push_buffer (GST_APP_SRC (context->src_), buf);
}
Ejemplo n.º 29
0
bool Player::Backend::push_data(Packet& data, Offset current_offset) {
    GstBuffer * buffer = gst_buffer_new_allocate(
        nullptr, data.size()*Config::word_size, nullptr);
    Offset packet_size = Offset(data.size());

    GST_BUFFER_PTS(buffer) = current_offset.to_time(Config::sample_rate).value();
    GST_BUFFER_DURATION(buffer) = packet_size.to_time(Config::sample_rate).value();
    GST_BUFFER_OFFSET(buffer) = current_offset.value();
    GST_BUFFER_OFFSET_END(buffer) = (current_offset+packet_size).value();

    auto size = data.size() * Config::word_size;
    auto rsize = gst_buffer_fill(buffer, 0, static_cast<void *>(data.data()), size);
    assert(size==rsize);

    auto ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc_), buffer); 
    if ( ret != GST_FLOW_OK) {
        throw BadFlowException("bad flow while pushing buffer");
    }
    return true;
}
static void
symmetry_test_assert_passthrough (SymmetryTest * st, GstBuffer * in)
{
  gpointer copy;
  gsize data_size;
  GstSample *out;

  gst_buffer_extract_dup (in, 0, -1, &copy, &data_size);

  fail_unless (gst_app_src_push_buffer (st->sink_src, in) == GST_FLOW_OK);
  in = NULL;
  out = gst_app_sink_pull_sample (st->src_sink);
  fail_unless (out != NULL);

  fail_unless (gst_buffer_get_size (gst_sample_get_buffer (out)) == data_size);
  fail_unless (gst_buffer_memcmp (gst_sample_get_buffer (out), 0, copy,
          data_size) == 0);
  g_free (copy);
  gst_sample_unref (out);
}