Пример #1
0
static void
gst_app_sink_get_property (GObject * object, guint prop_id, GValue * value,
    GParamSpec * pspec)
{
  GstAppSink *appsink = GST_APP_SINK_CAST (object);

  switch (prop_id) {
    case PROP_CAPS:
    {
      GstCaps *caps;

      caps = gst_app_sink_get_caps (appsink);
      gst_value_set_caps (value, caps);
      if (caps)
        gst_caps_unref (caps);
      break;
    }
    case PROP_EOS:
      g_value_set_boolean (value, gst_app_sink_is_eos (appsink));
      break;
    case PROP_EMIT_SIGNALS:
      g_value_set_boolean (value, gst_app_sink_get_emit_signals (appsink));
      break;
    case PROP_MAX_BUFFERS:
      g_value_set_uint (value, gst_app_sink_get_max_buffers (appsink));
      break;
    case PROP_DROP:
      g_value_set_boolean (value, gst_app_sink_get_drop (appsink));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Пример #2
0
/*!
 * \brief OpenIMAJCapGStreamer::getProperty retreive the requested property from the pipeline
 * \param propId requested property
 * \return property value
 *
 * There are two ways the properties can be retreived. For seek-based properties we can query the pipeline.
 * For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties
 * are not available until a first frame was received
 */
double OpenIMAJCapGStreamer::getProperty( int propId )
{
    GstFormat format;
    gint64 value;
    gboolean status;
    
#define FORMAT format
    
    if(!pipeline) {
        WARN("GStreamer: no pipeline");
        return false;
    }
    
    switch(propId) {
        case CAP_PROP_POS_MSEC:
            format = GST_FORMAT_TIME;
            status = gst_element_query_position(sink, FORMAT, &value);
            if(!status) {
                WARN("GStreamer: unable to query position of stream");
                return false;
            }
            return value * 1e-6; // nano seconds to milli seconds
        case CAP_PROP_POS_FRAMES:
            format = GST_FORMAT_DEFAULT;
            status = gst_element_query_position(sink, FORMAT, &value);
            if(!status) {
                WARN("GStreamer: unable to query position of stream");
                return false;
            }
            return value;
        case CAP_PROP_POS_AVI_RATIO:
            format = GST_FORMAT_PERCENT;
            status = gst_element_query_position(sink, FORMAT, &value);
            if(!status) {
                WARN("GStreamer: unable to query position of stream");
                return false;
            }
            return ((double) value) / GST_FORMAT_PERCENT_MAX;
        case CAP_PROP_FRAME_WIDTH: {
            if (!buffer_caps){
                WARN("GStreamer: unable to query width of frame; no frame grabbed yet");
                return 0;
            }
            GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
            gint width = 0;
            if(!gst_structure_get_int(structure, "width", &width)){
                WARN("GStreamer: unable to query width of frame");
                return 0;
            }
            return width;
            break;
        }
        case CAP_PROP_FRAME_HEIGHT: {
            if (!buffer_caps){
                WARN("GStreamer: unable to query height of frame; no frame grabbed yet");
                return 0;
            }
            GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
            gint height = 0;
            if(!gst_structure_get_int(structure, "height", &height)){
                WARN("GStreamer: unable to query height of frame");
                return 0;
            }
            return height;
            break;
        }
        case CAP_PROP_FPS: {
            if (!buffer_caps){
                WARN("GStreamer: unable to query framerate of stream; no frame grabbed yet");
                return 0;
            }
            GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
            gint num = 0, denom=1;
            if(!gst_structure_get_fraction(structure, "framerate", &num, &denom)){
                WARN("GStreamer: unable to query framerate of stream");
                return 0;
            }
            return (double)num/(double)denom;
            break;
        }
        case CAP_GSTREAMER_QUEUE_LENGTH:
            if(!sink) {
                WARN("GStreamer: there is no sink yet");
                return false;
            }
            return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
        default:
            WARN("GStreamer: unhandled property");
            break;
    }
    
#undef FORMAT
    
    return false;
}
Пример #3
0
uint ApplicationSink::maxBuffers() const
{
    return d->appSink() ? gst_app_sink_get_max_buffers(d->appSink()) : 0;
}
/*
 *  PsychGSGetTextureFromMovie() -- Create an OpenGL texture map from a specific videoframe from given movie object.
 *
 *  win = Window pointer of onscreen window for which a OpenGL texture should be created.
 *  moviehandle = Handle to the movie object.
 *  checkForImage = true == Just check if new image available, false == really retrieve the image, blocking if necessary.
 *  timeindex = When not in playback mode, this allows specification of a requested frame by presentation time.
 *              If set to -1, or if in realtime playback mode, this parameter is ignored and the next video frame is returned.
 *  out_texture = Pointer to the Psychtoolbox texture-record where the new texture should be stored.
 *  presentation_timestamp = A ptr to a double variable, where the presentation timestamp of the returned frame should be stored.
 *
 *  Returns true (1) on success, false (0) if no new image available, -1 if no new image available and there won't be any in future.
 */
int PsychGSGetTextureFromMovie(PsychWindowRecordType *win, int moviehandle, int checkForImage, double timeindex,
			     PsychWindowRecordType *out_texture, double *presentation_timestamp)
{
    GstElement			*theMovie;
    unsigned int		failcount=0;
    double			rate;
    double			targetdelta, realdelta, frames;
    // PsychRectType		outRect;
    GstBuffer                   *videoBuffer = NULL;
    gint64		        bufferIndex;
    double                      deltaT = 0;
    GstEvent                    *event;

    if (!PsychIsOnscreenWindow(win)) {
        PsychErrorExitMsg(PsychError_user, "Need onscreen window ptr!!!");
    }
    
    if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided.");
    }
    
    if ((timeindex!=-1) && (timeindex < 0 || timeindex >= 10000.0)) {
        PsychErrorExitMsg(PsychError_user, "Invalid timeindex provided.");
    }
    
    if (NULL == out_texture && !checkForImage) {
        PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of out_texture ptr passed!!!");
    }
    
    // Fetch references to objects we need:
    theMovie = movieRecordBANK[moviehandle].theMovie;
    if (theMovie == NULL) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle.");
    }

    // Allow context task to do its internal bookkeeping and cleanup work:
    PsychGSProcessMovieContext(movieRecordBANK[moviehandle].MovieContext, FALSE);

    // If this is a pure audio "movie" with no video tracks, we always return failed,
    // as those certainly don't have movie frames associated.

    if (movieRecordBANK[moviehandle].nrVideoTracks == 0) return((checkForImage) ? -1 : FALSE);

    // Get current playback rate:
    rate = movieRecordBANK[moviehandle].rate;

    // Is movie actively playing (automatic async playback, possibly with synced sound)?
    // If so, then we ignore the 'timeindex' parameter, because the automatic playback
    // process determines which frames should be delivered to PTB when. This function will
    // simply wait or poll for arrival/presence of a new frame that hasn't been fetched
    // in previous calls.
    if (0 == rate) {
        // Movie playback inactive. We are in "manual" mode: No automatic async playback,
        // no synced audio output. The user just wants to manually fetch movie frames into
        // textures for manual playback in a standard Matlab-loop.

	// First pass - checking for new image?
	if (checkForImage) {
		// Image for specific point in time requested?
		if (timeindex >= 0) {
			// Yes. We try to retrieve the next possible image for requested timeindex.
			// Seek to target timeindex:
			PsychGSSetMovieTimeIndex(moviehandle, timeindex, FALSE);
		}
		else {
			// No. We just retrieve the next frame, given the current position.
			// Nothing to do so far...
		}

		// Check for frame availability happens down there in the shared check code...
	}
    }

    // Should we just check for new image? If so, just return availability status:
    if (checkForImage) {
	PsychLockMutex(&movieRecordBANK[moviehandle].mutex);
	if ((((0 != rate) && movieRecordBANK[moviehandle].frameAvail) || ((0 == rate) && movieRecordBANK[moviehandle].preRollAvail)) &&
	    !gst_app_sink_is_eos(GST_APP_SINK(movieRecordBANK[moviehandle].videosink))) {
		// New frame available. Unlock and report success:
		//printf("PTB-DEBUG: NEW FRAME %d\n", movieRecordBANK[moviehandle].frameAvail);
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(true);
	}

	// Is this the special case of a movie without video, but only sound? In that case
	// we always return a 'false' because there ain't no image to return. We check this
	// indirectly - If the imageBuffer is NULL then the video callback hasn't been called.
	if (oldstyle && (NULL == movieRecordBANK[moviehandle].imageBuffer)) {
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(false);
	}

	// None available. Any chance there will be one in the future?
        if (gst_app_sink_is_eos(GST_APP_SINK(movieRecordBANK[moviehandle].videosink)) && movieRecordBANK[moviehandle].loopflag == 0) {
		// No new frame available and there won't be any in the future, because this is a non-looping
		// movie that has reached its end.
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(-1);
        }
        else {
		// No new frame available yet:
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		//printf("PTB-DEBUG: NO NEW FRAME\n");
		return(false);
        }
    }

    // If we reach this point, then an image fetch is requested. If no new data
    // is available we shall block:

    PsychLockMutex(&movieRecordBANK[moviehandle].mutex);
    // printf("PTB-DEBUG: Blocking fetch start %d\n", movieRecordBANK[moviehandle].frameAvail);

    if (((0 != rate) && !movieRecordBANK[moviehandle].frameAvail) ||
	((0 == rate) && !movieRecordBANK[moviehandle].preRollAvail)) {
	// No new frame available. Perform a blocking wait:
	PsychTimedWaitCondition(&movieRecordBANK[moviehandle].condition, &movieRecordBANK[moviehandle].mutex, 10.0);

	// Recheck:
	if (((0 != rate) && !movieRecordBANK[moviehandle].frameAvail) ||
	    ((0 == rate) && !movieRecordBANK[moviehandle].preRollAvail)) {
		// Game over! Wait timed out after 10 secs.
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		printf("PTB-ERROR: No new video frame received after timeout of 10 seconds! Something's wrong. Aborting fetch.\n");
		return(FALSE);
	}

	// At this point we should have at least one frame available.
        // printf("PTB-DEBUG: After blocking fetch start %d\n", movieRecordBANK[moviehandle].frameAvail);
    }

    // We're here with at least one frame available and the mutex lock held.

    // Preroll case is simple:
    movieRecordBANK[moviehandle].preRollAvail = 0;

    // Perform texture fetch & creation:
    if (oldstyle) {
	// Reset frame available flag:
	movieRecordBANK[moviehandle].frameAvail = 0;

	// This will retrieve an OpenGL compatible pointer to the pixel data and assign it to our texmemptr:
	out_texture->textureMemory = (GLuint*) movieRecordBANK[moviehandle].imageBuffer;
    } else {
	// Active playback mode?
	if (0 != rate) {
		// Active playback mode: One less frame available after our fetch:
		movieRecordBANK[moviehandle].frameAvail--;
		if (PsychPrefStateGet_Verbosity()>4) printf("PTB-DEBUG: Pulling from videosink, %d buffers avail...\n", movieRecordBANK[moviehandle].frameAvail);

		// Clamp frameAvail to queue lengths:
		if ((int) gst_app_sink_get_max_buffers(GST_APP_SINK(movieRecordBANK[moviehandle].videosink)) < movieRecordBANK[moviehandle].frameAvail) {
			movieRecordBANK[moviehandle].frameAvail = gst_app_sink_get_max_buffers(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
		}

		// This will pull the oldest video buffer from the videosink. It would block if none were available,
		// but that won't happen as we wouldn't reach this statement if none were available. It would return
		// NULL if the stream would be EOS or the pipeline off, but that shouldn't ever happen:
		videoBuffer = gst_app_sink_pull_buffer(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
	} else {
		// Passive fetch mode: Use prerolled buffers after seek:
		// These are available even after eos...
		videoBuffer = gst_app_sink_pull_preroll(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
	}

	// We can unlock early, thanks to videosink's internal buffering:
	PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);

	if (videoBuffer) {
		// Assign pointer to videoBuffer's data directly: Avoids one full data copy compared to oldstyle method.
		out_texture->textureMemory = (GLuint*) GST_BUFFER_DATA(videoBuffer);

		// Assign pts presentation timestamp in pipeline stream time and convert to seconds:
		movieRecordBANK[moviehandle].pts = (double) GST_BUFFER_TIMESTAMP(videoBuffer) / (double) 1e9;
		if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(videoBuffer)))
			deltaT = (double) GST_BUFFER_DURATION(videoBuffer) / (double) 1e9;
		bufferIndex = GST_BUFFER_OFFSET(videoBuffer);
	} else {
		printf("PTB-ERROR: No new video frame received in gst_app_sink_pull_buffer! Something's wrong. Aborting fetch.\n");
		return(FALSE);
	}
	if (PsychPrefStateGet_Verbosity()>4) printf("PTB-DEBUG: ...done.\n");
    }

    // Assign presentation_timestamp:
    if (presentation_timestamp) *presentation_timestamp = movieRecordBANK[moviehandle].pts;

    // Activate OpenGL context of target window:
    PsychSetGLContext(win);

    #if PSYCH_SYSTEM == PSYCH_OSX
    // Explicitely disable Apple's Client storage extensions. For now they are not really useful to us.
    glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
    #endif

    // Build a standard PTB texture record:
    PsychMakeRect(out_texture->rect, 0, 0, movieRecordBANK[moviehandle].width, movieRecordBANK[moviehandle].height);    
        
    // Set NULL - special texture object as part of the PTB texture record:
    out_texture->targetSpecific.QuickTimeGLTexture = NULL;

    // Set texture orientation as if it were an inverted Offscreen window: Upside-down.
    out_texture->textureOrientation = 3;
        
    // We use zero client storage memory bytes:
    out_texture->textureMemorySizeBytes = 0;

    // Textures are aligned on 4 Byte boundaries because texels are RGBA8:
    out_texture->textureByteAligned = 4;

	// Assign texturehandle of our cached texture, if any, so it gets recycled now:
	out_texture->textureNumber = movieRecordBANK[moviehandle].cached_texture;

    // Let PsychCreateTexture() do the rest of the job of creating, setting up and
    // filling an OpenGL texture with content:
    PsychCreateTexture(out_texture);

	// After PsychCreateTexture() the cached texture object from our cache is used
	// and no longer available for recycling. We mark the cache as empty:
	// It will be filled with a new textureid for recycling if a texture gets
	// deleted in PsychMovieDeleteTexture()....
	movieRecordBANK[moviehandle].cached_texture = 0;

    // Detection of dropped frames: This is a heuristic. We'll see how well it works out...
    // TODO: GstBuffer videoBuffer provides special flags that should allow to do a more
    // robust job, although nothing's wrong with the current approach per se...
    if (rate && presentation_timestamp) {
        // Try to check for dropped frames in playback mode:

        // Expected delta between successive presentation timestamps:
        targetdelta = 1.0f / (movieRecordBANK[moviehandle].fps * rate);

        // Compute real delta, given rate and playback direction:
        if (rate > 0) {
            realdelta = *presentation_timestamp - movieRecordBANK[moviehandle].last_pts;
            if (realdelta < 0) realdelta = 0;
        }
        else {
            realdelta = -1.0 * (*presentation_timestamp - movieRecordBANK[moviehandle].last_pts);
            if (realdelta < 0) realdelta = 0;
        }
        
        frames = realdelta / targetdelta;
        // Dropped frames?
        if (frames > 1 && movieRecordBANK[moviehandle].last_pts >= 0) {
            movieRecordBANK[moviehandle].nr_droppedframes += (int) (frames - 1 + 0.5);
        }

        movieRecordBANK[moviehandle].last_pts = *presentation_timestamp;
    }

    // Unlock.
    if (oldstyle) {
	PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
    } else {
	gst_buffer_unref(videoBuffer);
	videoBuffer = NULL;
    }
    
    // Manually advance movie time, if in fetch mode:
    if (0 == rate) {
        // We are in manual fetch mode: Need to manually advance movie to next
        // media sample:
	event = gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1.0, TRUE, FALSE);
	gst_element_send_event(theMovie, event);

	// Block until seek completed, failed, or timeout of 30 seconds reached:
        gst_element_get_state(theMovie, NULL, NULL, (GstClockTime) (30 * 1e9));
    }

    return(TRUE);
}
Пример #5
0
double CvCapture_GStreamer::getProperty( int propId )
{
    GstFormat format;
    //GstQuery q;
    gint64 value;

    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }

    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        if(!gst_element_query_position(sink, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value * 1e-6; // nano seconds to milli seconds
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        if(!gst_element_query_position(sink, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        if(!gst_element_query_position(pipeline, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return ((double) value) / GST_FORMAT_PERCENT_MAX;
    case CV_CAP_PROP_FRAME_WIDTH:
    case CV_CAP_PROP_FRAME_HEIGHT:
    case CV_CAP_PROP_FPS:
    case CV_CAP_PROP_FOURCC:
        break;
    case CV_CAP_PROP_FRAME_COUNT:
        format = GST_FORMAT_DEFAULT;
        if(!gst_element_query_duration(pipeline, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value;
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink) {
            CV_WARN("GStreamer: there is no sink yet");
            return false;
        }
        return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
    default:
        CV_WARN("GStreamer: unhandled property");
        break;
    }
    return false;
}