Ejemplo n.º 1
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();
				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels.swap(backPixels);
#if GST_VERSION_MAJOR==0
					if(prevBuffer) gst_buffer_unref (prevBuffer);
#else
					if(prevBuffer) gst_sample_unref (prevBuffer);
#endif
					prevBuffer = buffer;
				}

			mutex.unlock();
		}else{
#if GST_VERSION_MAJOR==0
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = buffer;
					bHavePixelsChanged=true;
				}
			}
		}
#else
			GstBuffer *buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_sample_unref (prevBuffer);
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = sample;
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
		}
#endif
	}else{
Ejemplo n.º 2
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			ofScopedLock lock(mutex);
			bHavePixelsChanged = bBackPixelsChanged;
			if (bHavePixelsChanged){
				bBackPixelsChanged=false;
				swap(pixels,backPixels);
				#ifdef OF_USE_GST_GL
				if(backTexture.isAllocated()){
					frontTexture.getTextureData() = backTexture.getTextureData();
					frontTexture.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
					frontTexture.setTextureWrap(GL_CLAMP_TO_EDGE,GL_CLAMP_TO_EDGE);
				}
				#endif
				if(!copyPixels){
					frontBuffer = backBuffer;
				}
			}
		}else{
#if GST_VERSION_MAJOR==0
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = shared_ptr<GstBuffer>(buffer,gst_buffer_unref);;
					bHavePixelsChanged=true;
				}
			}
		}
#else
			GstBuffer * buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					backBuffer = shared_ptr<GstSample>(sample,gst_sample_unref);
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
		}
#endif
	}else{
Ejemplo n.º 3
0
static GstFlowReturn on_new_preroll_from_source (GstAppSink * elt, void * data){
#if GST_VERSION_MAJOR==0
	shared_ptr<GstBuffer> buffer(gst_app_sink_pull_preroll(GST_APP_SINK (elt)),&gst_buffer_unref);
#else
	shared_ptr<GstSample> buffer(gst_app_sink_pull_preroll(GST_APP_SINK (elt)),&gst_sample_unref);
#endif
	return ((ofGstUtils*)data)->preroll_cb(buffer);
}
Ejemplo n.º 4
0
static GstFlowReturn on_new_preroll_from_source (GstAppSink * elt, void * data)
{
  guint size;
  GstBuffer *buffer;

  ofGstVideoData * gstData = (ofGstVideoData *) data;

  //get the buffer from appsink
  buffer = gst_app_sink_pull_preroll (GST_APP_SINK (elt));

  size = GST_BUFFER_SIZE (buffer);
  /*if(size < data->width*data->height*3){
	  gst_buffer_unref (buffer);
	  return;
  }*/
  ofGstDataLock(gstData);
	  if(gstData->pixels){
		  memcpy (gstData->pixels, GST_BUFFER_DATA (buffer), size);

			  gstData->bHavePixelsChanged=true;

	  }
  ofGstDataUnlock(gstData);


  /// we don't need the appsink buffer anymore
  gst_buffer_unref (buffer);

  return GST_FLOW_OK;
}
Ejemplo n.º 5
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();

				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels = backPixels;
				}

			mutex.unlock();
		}else{
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				guint size = GST_BUFFER_SIZE (buffer);
				if(pixels.isAllocated()){
					memcpy (pixels.getPixels(), GST_BUFFER_DATA (buffer), size);
					bHavePixelsChanged=true;
				}
				/// we don't need the appsink buffer anymore
				gst_buffer_unref (buffer);
			}
		}
	}else{
		ofLog(OF_LOG_WARNING,"not loaded");
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
Ejemplo n.º 6
0
GstFlowReturn GstAppSinkPipeline::NewPrerollCallback(GstAppSink* appsink, gpointer user_data)
{
	GstSample* sample = gst_app_sink_pull_preroll(appsink);
	gst_sample_unref(sample);
	return GST_FLOW_OK;
	
}
Ejemplo n.º 7
0
BufferPtr ApplicationSink::pullPreroll()
{
    BufferPtr buf;
    if (d->appSink()) {
        buf = BufferPtr::wrap(gst_app_sink_pull_preroll(d->appSink()), false);
    }
    return buf;
}
GstFlowReturn GStreamerWrapper::onNewPrerollFromAudioSource( GstAppSink* appsink, void* listener )
{
	GstBuffer* gstAudioSinkBuffer = gst_app_sink_pull_preroll( GST_APP_SINK( appsink ) );
	( ( GStreamerWrapper * )listener )->newAudioSinkPrerollCallback( gstAudioSinkBuffer );
	gst_buffer_unref( gstAudioSinkBuffer );

	return GST_FLOW_OK;
}
Ejemplo n.º 9
0
static GstFlowReturn on_new_preroll_from_source (GstAppSink * elt, void * data){
#if GST_VERSION_MAJOR==0
	GstBuffer *buffer;
#else
	GstSample *buffer;
#endif
	buffer = gst_app_sink_pull_preroll(GST_APP_SINK (elt));
	return ((ofGstUtils*)data)->preroll_cb(buffer);
}
static GstFlowReturn new_preroll(GstAppSink * sink, gpointer data)
{
	GStreamerFramesReceiver * pClass = (GStreamerFramesReceiver*) data;

	GstSample * sample = gst_app_sink_pull_preroll(sink);

	GstFlowReturn res = frame_handler(sample, pClass);

	gst_sample_unref(sample);

	return res;
}
Ejemplo n.º 11
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			std::unique_lock<std::mutex> lock(mutex);
			bHavePixelsChanged = bBackPixelsChanged;
			if (bHavePixelsChanged){
				bBackPixelsChanged=false;
				swap(pixels,backPixels);
				#ifdef OF_USE_GST_GL
				if(backTexture.isAllocated()){
					frontTexture.getTextureData() = backTexture.getTextureData();
					frontTexture.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
					frontTexture.setTextureWrap(GL_CLAMP_TO_EDGE,GL_CLAMP_TO_EDGE);
				}
				#endif
				if(!copyPixels){
					frontBuffer = backBuffer;
				}
			}
		}else{
#if GST_VERSION_MAJOR==0
			ofLogError() << "frame by frame doesn't work any more in 0.10";
#else
			GstBuffer * buffer;
			GstSample * sample;

			//get the buffer from appsink
			if(isPaused()){
				sample = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			}else{
				sample = gst_app_sink_pull_sample (GST_APP_SINK (getSink()));
			}
			buffer = gst_sample_get_buffer(sample);

			if(buffer){
				if(pixels.isAllocated()){
					gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
					//TODO: stride = mapinfo.size / height;
					pixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					backBuffer = shared_ptr<GstSample>(sample,gst_sample_unref);
					bHavePixelsChanged=true;
					gst_buffer_unmap(buffer,&mapinfo);
				}
			}
#endif
		}
	}else{
		ofLogWarning("ofGstVideoUtils") << "update(): ofGstVideoUtils not loaded";
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
GstFlowReturn GStreamerImageStream::on_new_preroll(GstAppSink *appsink, GStreamerImageStream *user_data)
{
    // get the sample from appsink

    GstSample *sample = gst_app_sink_pull_preroll(appsink);

    // get sample info

    GstCaps *caps = gst_sample_get_caps(sample);
    GstStructure *structure = gst_caps_get_structure(caps, 0);

    int width;
    int height;

    gst_structure_get_int(structure, "width", &width);
    gst_structure_get_int(structure, "height", &height);

    if (width<=0 || height<=0)
    {
        OSG_NOTICE<<"Error: video size invalid width="<<width<<", height="<<height<<std::endl;
        return GST_FLOW_ERROR;
    }

    if (user_data->_width != width || user_data->_height != height)
    {
        user_data->_width = width;
        user_data->_height = height;


        int row_width = width*3;
        if ((row_width%4)!=0)
        {
            row_width += (4-(row_width%4));
        }

        // if buffer previously assigned free it before allocating new buffer.
        if (user_data->_internal_buffer) free(user_data->_internal_buffer);

        // allocate buffer
        user_data->_internal_buffer = (unsigned char*)malloc(sizeof(unsigned char)*row_width*height);

        // assign buffer to image
        user_data->setImage(user_data->_width, user_data->_height, 1, GL_RGB, GL_RGB, GL_UNSIGNED_BYTE, user_data->_internal_buffer, osg::Image::NO_DELETE, 4);
    }

    // clean resources
    gst_sample_unref(sample);

    return GST_FLOW_OK;
}
Ejemplo n.º 13
0
static gboolean query_data(struct input_handle* ih) {
  GstBuffer *preroll;
  GstCaps *src_caps;
  GstStructure *s;
  int i;

  ih->n_channels = 0;
  ih->sample_rate = 0;
  ih->channel_positions = NULL;

  preroll = gst_app_sink_pull_preroll(GST_APP_SINK(ih->appsink));
  src_caps = gst_buffer_get_caps(preroll);

  s = gst_caps_get_structure(src_caps, 0);
  gst_structure_get_int(s, "rate", &(ih->sample_rate));
  gst_structure_get_int(s, "channels", &(ih->n_channels));
  if (!ih->sample_rate || !ih->n_channels) {
    gst_caps_unref(src_caps);
    gst_buffer_unref(preroll);
    return FALSE;
  }

  ih->channel_positions = gst_audio_get_channel_positions(s);
  if (verbose) {
    if (ih->channel_positions) {
      for (i = 0; i < ih->n_channels; ++i) {
        printf("Channel %d: %d\n", i, ih->channel_positions[i]);
      }
    }
    g_print ("%d channels @ %d Hz\n", ih->n_channels, ih->sample_rate);
  }

  gst_caps_unref(src_caps);
  gst_buffer_unref(preroll);

  return TRUE;
}
Ejemplo n.º 14
0
void ofGstVideoUtils::update(){
	if (isLoaded()){
		if(!isFrameByFrame()){
			mutex.lock();
				bHavePixelsChanged = bBackPixelsChanged;
				if (bHavePixelsChanged){
					bBackPixelsChanged=false;
					pixels.swap(backPixels);
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					prevBuffer = buffer;
				}

			mutex.unlock();
		}else{
			GstBuffer *buffer;

			//get the buffer from appsink
			if(isPaused()) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (getSink()));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (getSink()));

			if(buffer){
				if(pixels.isAllocated()){
					if(prevBuffer) gst_buffer_unref (prevBuffer);
					//memcpy (pixels.getPixels(), GST_BUFFER_DATA (buffer), size);
					pixels.setFromExternalPixels(GST_BUFFER_DATA (buffer),pixels.getWidth(),pixels.getHeight(),pixels.getNumChannels());
					prevBuffer = buffer;
					bHavePixelsChanged=true;
				}
			}
		}
	}else{
		ofLog(OF_LOG_WARNING,"ofGstVideoUtils not loaded");
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
Ejemplo n.º 15
0
void ofGstUtils::update(){
	gstHandleMessage();
	if (bLoaded == true){
		if(!bFrameByFrame){
			ofGstDataLock(&gstData);

				bHavePixelsChanged = gstData.bHavePixelsChanged;
				if (bHavePixelsChanged){
					gstData.bHavePixelsChanged=false;
					bIsMovieDone = false;
					memcpy(pixels,gstData.pixels,width*height*bpp);
				}

			ofGstDataUnlock(&gstData);
		}else{
			GstBuffer *buffer;


			//get the buffer from appsink
			if(bPaused) buffer = gst_app_sink_pull_preroll (GST_APP_SINK (gstSink));
			else buffer = gst_app_sink_pull_buffer (GST_APP_SINK (gstSink));

			if(buffer){
				guint size = GST_BUFFER_SIZE (buffer);
				if(pixels){
					memcpy (pixels, GST_BUFFER_DATA (buffer), size);
					bHavePixelsChanged=true;
				}
				/// we don't need the appsink buffer anymore
				gst_buffer_unref (buffer);
			}
		}
	}
	bIsFrameNew = bHavePixelsChanged;
	bHavePixelsChanged = false;
}
/*
 *  PsychGSGetTextureFromMovie() -- Create an OpenGL texture map from a specific videoframe from given movie object.
 *
 *  win = Window pointer of onscreen window for which a OpenGL texture should be created.
 *  moviehandle = Handle to the movie object.
 *  checkForImage = true == Just check if new image available, false == really retrieve the image, blocking if necessary.
 *  timeindex = When not in playback mode, this allows specification of a requested frame by presentation time.
 *              If set to -1, or if in realtime playback mode, this parameter is ignored and the next video frame is returned.
 *  out_texture = Pointer to the Psychtoolbox texture-record where the new texture should be stored.
 *  presentation_timestamp = A ptr to a double variable, where the presentation timestamp of the returned frame should be stored.
 *
 *  Returns true (1) on success, false (0) if no new image available, -1 if no new image available and there won't be any in future.
 */
int PsychGSGetTextureFromMovie(PsychWindowRecordType *win, int moviehandle, int checkForImage, double timeindex,
			     PsychWindowRecordType *out_texture, double *presentation_timestamp)
{
    GstElement			*theMovie;
    unsigned int		failcount=0;
    double			rate;
    double			targetdelta, realdelta, frames;
    // PsychRectType		outRect;
    GstBuffer                   *videoBuffer = NULL;
    gint64		        bufferIndex;
    double                      deltaT = 0;
    GstEvent                    *event;

    if (!PsychIsOnscreenWindow(win)) {
        PsychErrorExitMsg(PsychError_user, "Need onscreen window ptr!!!");
    }
    
    if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided.");
    }
    
    if ((timeindex!=-1) && (timeindex < 0 || timeindex >= 10000.0)) {
        PsychErrorExitMsg(PsychError_user, "Invalid timeindex provided.");
    }
    
    if (NULL == out_texture && !checkForImage) {
        PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of out_texture ptr passed!!!");
    }
    
    // Fetch references to objects we need:
    theMovie = movieRecordBANK[moviehandle].theMovie;
    if (theMovie == NULL) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle.");
    }

    // Allow context task to do its internal bookkeeping and cleanup work:
    PsychGSProcessMovieContext(movieRecordBANK[moviehandle].MovieContext, FALSE);

    // If this is a pure audio "movie" with no video tracks, we always return failed,
    // as those certainly don't have movie frames associated.

    if (movieRecordBANK[moviehandle].nrVideoTracks == 0) return((checkForImage) ? -1 : FALSE);

    // Get current playback rate:
    rate = movieRecordBANK[moviehandle].rate;

    // Is movie actively playing (automatic async playback, possibly with synced sound)?
    // If so, then we ignore the 'timeindex' parameter, because the automatic playback
    // process determines which frames should be delivered to PTB when. This function will
    // simply wait or poll for arrival/presence of a new frame that hasn't been fetched
    // in previous calls.
    if (0 == rate) {
        // Movie playback inactive. We are in "manual" mode: No automatic async playback,
        // no synced audio output. The user just wants to manually fetch movie frames into
        // textures for manual playback in a standard Matlab-loop.

	// First pass - checking for new image?
	if (checkForImage) {
		// Image for specific point in time requested?
		if (timeindex >= 0) {
			// Yes. We try to retrieve the next possible image for requested timeindex.
			// Seek to target timeindex:
			PsychGSSetMovieTimeIndex(moviehandle, timeindex, FALSE);
		}
		else {
			// No. We just retrieve the next frame, given the current position.
			// Nothing to do so far...
		}

		// Check for frame availability happens down there in the shared check code...
	}
    }

    // Should we just check for new image? If so, just return availability status:
    if (checkForImage) {
	PsychLockMutex(&movieRecordBANK[moviehandle].mutex);
	if ((((0 != rate) && movieRecordBANK[moviehandle].frameAvail) || ((0 == rate) && movieRecordBANK[moviehandle].preRollAvail)) &&
	    !gst_app_sink_is_eos(GST_APP_SINK(movieRecordBANK[moviehandle].videosink))) {
		// New frame available. Unlock and report success:
		//printf("PTB-DEBUG: NEW FRAME %d\n", movieRecordBANK[moviehandle].frameAvail);
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(true);
	}

	// Is this the special case of a movie without video, but only sound? In that case
	// we always return a 'false' because there ain't no image to return. We check this
	// indirectly - If the imageBuffer is NULL then the video callback hasn't been called.
	if (oldstyle && (NULL == movieRecordBANK[moviehandle].imageBuffer)) {
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(false);
	}

	// None available. Any chance there will be one in the future?
        if (gst_app_sink_is_eos(GST_APP_SINK(movieRecordBANK[moviehandle].videosink)) && movieRecordBANK[moviehandle].loopflag == 0) {
		// No new frame available and there won't be any in the future, because this is a non-looping
		// movie that has reached its end.
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(-1);
        }
        else {
		// No new frame available yet:
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		//printf("PTB-DEBUG: NO NEW FRAME\n");
		return(false);
        }
    }

    // If we reach this point, then an image fetch is requested. If no new data
    // is available we shall block:

    PsychLockMutex(&movieRecordBANK[moviehandle].mutex);
    // printf("PTB-DEBUG: Blocking fetch start %d\n", movieRecordBANK[moviehandle].frameAvail);

    if (((0 != rate) && !movieRecordBANK[moviehandle].frameAvail) ||
	((0 == rate) && !movieRecordBANK[moviehandle].preRollAvail)) {
	// No new frame available. Perform a blocking wait:
	PsychTimedWaitCondition(&movieRecordBANK[moviehandle].condition, &movieRecordBANK[moviehandle].mutex, 10.0);

	// Recheck:
	if (((0 != rate) && !movieRecordBANK[moviehandle].frameAvail) ||
	    ((0 == rate) && !movieRecordBANK[moviehandle].preRollAvail)) {
		// Game over! Wait timed out after 10 secs.
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		printf("PTB-ERROR: No new video frame received after timeout of 10 seconds! Something's wrong. Aborting fetch.\n");
		return(FALSE);
	}

	// At this point we should have at least one frame available.
        // printf("PTB-DEBUG: After blocking fetch start %d\n", movieRecordBANK[moviehandle].frameAvail);
    }

    // We're here with at least one frame available and the mutex lock held.

    // Preroll case is simple:
    movieRecordBANK[moviehandle].preRollAvail = 0;

    // Perform texture fetch & creation:
    if (oldstyle) {
	// Reset frame available flag:
	movieRecordBANK[moviehandle].frameAvail = 0;

	// This will retrieve an OpenGL compatible pointer to the pixel data and assign it to our texmemptr:
	out_texture->textureMemory = (GLuint*) movieRecordBANK[moviehandle].imageBuffer;
    } else {
	// Active playback mode?
	if (0 != rate) {
		// Active playback mode: One less frame available after our fetch:
		movieRecordBANK[moviehandle].frameAvail--;
		if (PsychPrefStateGet_Verbosity()>4) printf("PTB-DEBUG: Pulling from videosink, %d buffers avail...\n", movieRecordBANK[moviehandle].frameAvail);

		// Clamp frameAvail to queue lengths:
		if ((int) gst_app_sink_get_max_buffers(GST_APP_SINK(movieRecordBANK[moviehandle].videosink)) < movieRecordBANK[moviehandle].frameAvail) {
			movieRecordBANK[moviehandle].frameAvail = gst_app_sink_get_max_buffers(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
		}

		// This will pull the oldest video buffer from the videosink. It would block if none were available,
		// but that won't happen as we wouldn't reach this statement if none were available. It would return
		// NULL if the stream would be EOS or the pipeline off, but that shouldn't ever happen:
		videoBuffer = gst_app_sink_pull_buffer(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
	} else {
		// Passive fetch mode: Use prerolled buffers after seek:
		// These are available even after eos...
		videoBuffer = gst_app_sink_pull_preroll(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
	}

	// We can unlock early, thanks to videosink's internal buffering:
	PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);

	if (videoBuffer) {
		// Assign pointer to videoBuffer's data directly: Avoids one full data copy compared to oldstyle method.
		out_texture->textureMemory = (GLuint*) GST_BUFFER_DATA(videoBuffer);

		// Assign pts presentation timestamp in pipeline stream time and convert to seconds:
		movieRecordBANK[moviehandle].pts = (double) GST_BUFFER_TIMESTAMP(videoBuffer) / (double) 1e9;
		if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(videoBuffer)))
			deltaT = (double) GST_BUFFER_DURATION(videoBuffer) / (double) 1e9;
		bufferIndex = GST_BUFFER_OFFSET(videoBuffer);
	} else {
		printf("PTB-ERROR: No new video frame received in gst_app_sink_pull_buffer! Something's wrong. Aborting fetch.\n");
		return(FALSE);
	}
	if (PsychPrefStateGet_Verbosity()>4) printf("PTB-DEBUG: ...done.\n");
    }

    // Assign presentation_timestamp:
    if (presentation_timestamp) *presentation_timestamp = movieRecordBANK[moviehandle].pts;

    // Activate OpenGL context of target window:
    PsychSetGLContext(win);

    #if PSYCH_SYSTEM == PSYCH_OSX
    // Explicitely disable Apple's Client storage extensions. For now they are not really useful to us.
    glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
    #endif

    // Build a standard PTB texture record:
    PsychMakeRect(out_texture->rect, 0, 0, movieRecordBANK[moviehandle].width, movieRecordBANK[moviehandle].height);    
        
    // Set NULL - special texture object as part of the PTB texture record:
    out_texture->targetSpecific.QuickTimeGLTexture = NULL;

    // Set texture orientation as if it were an inverted Offscreen window: Upside-down.
    out_texture->textureOrientation = 3;
        
    // We use zero client storage memory bytes:
    out_texture->textureMemorySizeBytes = 0;

    // Textures are aligned on 4 Byte boundaries because texels are RGBA8:
    out_texture->textureByteAligned = 4;

	// Assign texturehandle of our cached texture, if any, so it gets recycled now:
	out_texture->textureNumber = movieRecordBANK[moviehandle].cached_texture;

    // Let PsychCreateTexture() do the rest of the job of creating, setting up and
    // filling an OpenGL texture with content:
    PsychCreateTexture(out_texture);

	// After PsychCreateTexture() the cached texture object from our cache is used
	// and no longer available for recycling. We mark the cache as empty:
	// It will be filled with a new textureid for recycling if a texture gets
	// deleted in PsychMovieDeleteTexture()....
	movieRecordBANK[moviehandle].cached_texture = 0;

    // Detection of dropped frames: This is a heuristic. We'll see how well it works out...
    // TODO: GstBuffer videoBuffer provides special flags that should allow to do a more
    // robust job, although nothing's wrong with the current approach per se...
    if (rate && presentation_timestamp) {
        // Try to check for dropped frames in playback mode:

        // Expected delta between successive presentation timestamps:
        targetdelta = 1.0f / (movieRecordBANK[moviehandle].fps * rate);

        // Compute real delta, given rate and playback direction:
        if (rate > 0) {
            realdelta = *presentation_timestamp - movieRecordBANK[moviehandle].last_pts;
            if (realdelta < 0) realdelta = 0;
        }
        else {
            realdelta = -1.0 * (*presentation_timestamp - movieRecordBANK[moviehandle].last_pts);
            if (realdelta < 0) realdelta = 0;
        }
        
        frames = realdelta / targetdelta;
        // Dropped frames?
        if (frames > 1 && movieRecordBANK[moviehandle].last_pts >= 0) {
            movieRecordBANK[moviehandle].nr_droppedframes += (int) (frames - 1 + 0.5);
        }

        movieRecordBANK[moviehandle].last_pts = *presentation_timestamp;
    }

    // Unlock.
    if (oldstyle) {
	PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
    } else {
	gst_buffer_unref(videoBuffer);
	videoBuffer = NULL;
    }
    
    // Manually advance movie time, if in fetch mode:
    if (0 == rate) {
        // We are in manual fetch mode: Need to manually advance movie to next
        // media sample:
	event = gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1.0, TRUE, FALSE);
	gst_element_send_event(theMovie, event);

	// Block until seek completed, failed, or timeout of 30 seconds reached:
        gst_element_get_state(theMovie, NULL, NULL, (GstClockTime) (30 * 1e9));
    }

    return(TRUE);
}
Ejemplo n.º 17
0
static GstFlowReturn
gst_nle_source_on_preroll_buffer (GstAppSink * appsink, gpointer data)
{
  gst_buffer_unref (gst_app_sink_pull_preroll (appsink));
  return GST_FLOW_OK;
}
Ejemplo n.º 18
0
GstFlowReturn on_new_preroll_from_source (GstAppSink * elt, void * data){
	GstBuffer *buffer = gst_app_sink_pull_preroll(GST_APP_SINK (elt));
	return ((ofGstUtils*)data)->preroll_cb(buffer);
}