int PsychHIDReturnEventFromEventBuffer(int deviceIndex, int outArgIndex, double maxWaitTimeSecs)
{
	unsigned int navail;
	PsychHIDEventRecord evt;
	PsychGenericScriptType *retevent;
	double* foo = NULL;
	const char *FieldNames[] = { "Time", "Pressed", "Keycode", "CookedKey" };

	if (deviceIndex < 0) deviceIndex = PsychHIDGetDefaultKbQueueDevice();	
	if (!hidEventBuffer[deviceIndex]) return(0);
	
	PsychLockMutex(&hidEventBufferMutex[deviceIndex]);
	navail = hidEventBufferWritePos[deviceIndex] - hidEventBufferReadPos[deviceIndex];	

	// If nothing available and we're asked to wait for something, then wait:
	if ((navail == 0) && (maxWaitTimeSecs > 0)) {
		// Wait for something:
		PsychTimedWaitCondition(&hidEventBufferCondition[deviceIndex], &hidEventBufferMutex[deviceIndex], maxWaitTimeSecs);

		// Recompute number of available events:
		navail = hidEventBufferWritePos[deviceIndex] - hidEventBufferReadPos[deviceIndex];	
	}

	// Check if anything available, copy it if so:
	if (navail) {
		memcpy(&evt, &(hidEventBuffer[deviceIndex][hidEventBufferReadPos[deviceIndex] % hidEventBufferCapacity[deviceIndex]]), sizeof(PsychHIDEventRecord));
		hidEventBufferReadPos[deviceIndex]++;
	}
	PsychUnlockMutex(&hidEventBufferMutex[deviceIndex]);

	if (navail) {
		// Return event struct:
		PsychAllocOutStructArray(outArgIndex, kPsychArgOptional, 1, 4, FieldNames, &retevent);
		PsychSetStructArrayDoubleElement("Time", 0, evt.timestamp, retevent);
		PsychSetStructArrayDoubleElement("Pressed", 0, (double) (evt.status & (1<<0)) ? 1 : 0, retevent);
		PsychSetStructArrayDoubleElement("Keycode", 0, (double) evt.rawEventCode, retevent);
		PsychSetStructArrayDoubleElement("CookedKey", 0, (double) evt.cookedEventCode, retevent);
		return(navail - 1);
	}
	else {
		// Return empty matrix:
		PsychCopyOutDoubleMatArg(outArgIndex, kPsychArgOptional, 0, 0, 0, foo);
		return(0);
	}
}
/*
 *  PsychGSGetTextureFromMovie() -- Create an OpenGL texture map from a specific videoframe from given movie object.
 *
 *  win = Window pointer of onscreen window for which a OpenGL texture should be created.
 *  moviehandle = Handle to the movie object.
 *  checkForImage = true == Just check if new image available, false == really retrieve the image, blocking if necessary.
 *  timeindex = When not in playback mode, this allows specification of a requested frame by presentation time.
 *              If set to -1, or if in realtime playback mode, this parameter is ignored and the next video frame is returned.
 *  out_texture = Pointer to the Psychtoolbox texture-record where the new texture should be stored.
 *  presentation_timestamp = A ptr to a double variable, where the presentation timestamp of the returned frame should be stored.
 *
 *  Returns true (1) on success, false (0) if no new image available, -1 if no new image available and there won't be any in future.
 */
int PsychGSGetTextureFromMovie(PsychWindowRecordType *win, int moviehandle, int checkForImage, double timeindex,
			     PsychWindowRecordType *out_texture, double *presentation_timestamp)
{
    GstElement			*theMovie;
    unsigned int		failcount=0;
    double			rate;
    double			targetdelta, realdelta, frames;
    // PsychRectType		outRect;
    GstBuffer                   *videoBuffer = NULL;
    gint64		        bufferIndex;
    double                      deltaT = 0;
    GstEvent                    *event;

    if (!PsychIsOnscreenWindow(win)) {
        PsychErrorExitMsg(PsychError_user, "Need onscreen window ptr!!!");
    }
    
    if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided.");
    }
    
    if ((timeindex!=-1) && (timeindex < 0 || timeindex >= 10000.0)) {
        PsychErrorExitMsg(PsychError_user, "Invalid timeindex provided.");
    }
    
    if (NULL == out_texture && !checkForImage) {
        PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of out_texture ptr passed!!!");
    }
    
    // Fetch references to objects we need:
    theMovie = movieRecordBANK[moviehandle].theMovie;
    if (theMovie == NULL) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle.");
    }

    // Allow context task to do its internal bookkeeping and cleanup work:
    PsychGSProcessMovieContext(movieRecordBANK[moviehandle].MovieContext, FALSE);

    // If this is a pure audio "movie" with no video tracks, we always return failed,
    // as those certainly don't have movie frames associated.

    if (movieRecordBANK[moviehandle].nrVideoTracks == 0) return((checkForImage) ? -1 : FALSE);

    // Get current playback rate:
    rate = movieRecordBANK[moviehandle].rate;

    // Is movie actively playing (automatic async playback, possibly with synced sound)?
    // If so, then we ignore the 'timeindex' parameter, because the automatic playback
    // process determines which frames should be delivered to PTB when. This function will
    // simply wait or poll for arrival/presence of a new frame that hasn't been fetched
    // in previous calls.
    if (0 == rate) {
        // Movie playback inactive. We are in "manual" mode: No automatic async playback,
        // no synced audio output. The user just wants to manually fetch movie frames into
        // textures for manual playback in a standard Matlab-loop.

	// First pass - checking for new image?
	if (checkForImage) {
		// Image for specific point in time requested?
		if (timeindex >= 0) {
			// Yes. We try to retrieve the next possible image for requested timeindex.
			// Seek to target timeindex:
			PsychGSSetMovieTimeIndex(moviehandle, timeindex, FALSE);
		}
		else {
			// No. We just retrieve the next frame, given the current position.
			// Nothing to do so far...
		}

		// Check for frame availability happens down there in the shared check code...
	}
    }

    // Should we just check for new image? If so, just return availability status:
    if (checkForImage) {
	PsychLockMutex(&movieRecordBANK[moviehandle].mutex);
	if ((((0 != rate) && movieRecordBANK[moviehandle].frameAvail) || ((0 == rate) && movieRecordBANK[moviehandle].preRollAvail)) &&
	    !gst_app_sink_is_eos(GST_APP_SINK(movieRecordBANK[moviehandle].videosink))) {
		// New frame available. Unlock and report success:
		//printf("PTB-DEBUG: NEW FRAME %d\n", movieRecordBANK[moviehandle].frameAvail);
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(true);
	}

	// Is this the special case of a movie without video, but only sound? In that case
	// we always return a 'false' because there ain't no image to return. We check this
	// indirectly - If the imageBuffer is NULL then the video callback hasn't been called.
	if (oldstyle && (NULL == movieRecordBANK[moviehandle].imageBuffer)) {
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(false);
	}

	// None available. Any chance there will be one in the future?
        if (gst_app_sink_is_eos(GST_APP_SINK(movieRecordBANK[moviehandle].videosink)) && movieRecordBANK[moviehandle].loopflag == 0) {
		// No new frame available and there won't be any in the future, because this is a non-looping
		// movie that has reached its end.
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		return(-1);
        }
        else {
		// No new frame available yet:
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		//printf("PTB-DEBUG: NO NEW FRAME\n");
		return(false);
        }
    }

    // If we reach this point, then an image fetch is requested. If no new data
    // is available we shall block:

    PsychLockMutex(&movieRecordBANK[moviehandle].mutex);
    // printf("PTB-DEBUG: Blocking fetch start %d\n", movieRecordBANK[moviehandle].frameAvail);

    if (((0 != rate) && !movieRecordBANK[moviehandle].frameAvail) ||
	((0 == rate) && !movieRecordBANK[moviehandle].preRollAvail)) {
	// No new frame available. Perform a blocking wait:
	PsychTimedWaitCondition(&movieRecordBANK[moviehandle].condition, &movieRecordBANK[moviehandle].mutex, 10.0);

	// Recheck:
	if (((0 != rate) && !movieRecordBANK[moviehandle].frameAvail) ||
	    ((0 == rate) && !movieRecordBANK[moviehandle].preRollAvail)) {
		// Game over! Wait timed out after 10 secs.
		PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
		printf("PTB-ERROR: No new video frame received after timeout of 10 seconds! Something's wrong. Aborting fetch.\n");
		return(FALSE);
	}

	// At this point we should have at least one frame available.
        // printf("PTB-DEBUG: After blocking fetch start %d\n", movieRecordBANK[moviehandle].frameAvail);
    }

    // We're here with at least one frame available and the mutex lock held.

    // Preroll case is simple:
    movieRecordBANK[moviehandle].preRollAvail = 0;

    // Perform texture fetch & creation:
    if (oldstyle) {
	// Reset frame available flag:
	movieRecordBANK[moviehandle].frameAvail = 0;

	// This will retrieve an OpenGL compatible pointer to the pixel data and assign it to our texmemptr:
	out_texture->textureMemory = (GLuint*) movieRecordBANK[moviehandle].imageBuffer;
    } else {
	// Active playback mode?
	if (0 != rate) {
		// Active playback mode: One less frame available after our fetch:
		movieRecordBANK[moviehandle].frameAvail--;
		if (PsychPrefStateGet_Verbosity()>4) printf("PTB-DEBUG: Pulling from videosink, %d buffers avail...\n", movieRecordBANK[moviehandle].frameAvail);

		// Clamp frameAvail to queue lengths:
		if ((int) gst_app_sink_get_max_buffers(GST_APP_SINK(movieRecordBANK[moviehandle].videosink)) < movieRecordBANK[moviehandle].frameAvail) {
			movieRecordBANK[moviehandle].frameAvail = gst_app_sink_get_max_buffers(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
		}

		// This will pull the oldest video buffer from the videosink. It would block if none were available,
		// but that won't happen as we wouldn't reach this statement if none were available. It would return
		// NULL if the stream would be EOS or the pipeline off, but that shouldn't ever happen:
		videoBuffer = gst_app_sink_pull_buffer(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
	} else {
		// Passive fetch mode: Use prerolled buffers after seek:
		// These are available even after eos...
		videoBuffer = gst_app_sink_pull_preroll(GST_APP_SINK(movieRecordBANK[moviehandle].videosink));
	}

	// We can unlock early, thanks to videosink's internal buffering:
	PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);

	if (videoBuffer) {
		// Assign pointer to videoBuffer's data directly: Avoids one full data copy compared to oldstyle method.
		out_texture->textureMemory = (GLuint*) GST_BUFFER_DATA(videoBuffer);

		// Assign pts presentation timestamp in pipeline stream time and convert to seconds:
		movieRecordBANK[moviehandle].pts = (double) GST_BUFFER_TIMESTAMP(videoBuffer) / (double) 1e9;
		if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(videoBuffer)))
			deltaT = (double) GST_BUFFER_DURATION(videoBuffer) / (double) 1e9;
		bufferIndex = GST_BUFFER_OFFSET(videoBuffer);
	} else {
		printf("PTB-ERROR: No new video frame received in gst_app_sink_pull_buffer! Something's wrong. Aborting fetch.\n");
		return(FALSE);
	}
	if (PsychPrefStateGet_Verbosity()>4) printf("PTB-DEBUG: ...done.\n");
    }

    // Assign presentation_timestamp:
    if (presentation_timestamp) *presentation_timestamp = movieRecordBANK[moviehandle].pts;

    // Activate OpenGL context of target window:
    PsychSetGLContext(win);

    #if PSYCH_SYSTEM == PSYCH_OSX
    // Explicitely disable Apple's Client storage extensions. For now they are not really useful to us.
    glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
    #endif

    // Build a standard PTB texture record:
    PsychMakeRect(out_texture->rect, 0, 0, movieRecordBANK[moviehandle].width, movieRecordBANK[moviehandle].height);    
        
    // Set NULL - special texture object as part of the PTB texture record:
    out_texture->targetSpecific.QuickTimeGLTexture = NULL;

    // Set texture orientation as if it were an inverted Offscreen window: Upside-down.
    out_texture->textureOrientation = 3;
        
    // We use zero client storage memory bytes:
    out_texture->textureMemorySizeBytes = 0;

    // Textures are aligned on 4 Byte boundaries because texels are RGBA8:
    out_texture->textureByteAligned = 4;

	// Assign texturehandle of our cached texture, if any, so it gets recycled now:
	out_texture->textureNumber = movieRecordBANK[moviehandle].cached_texture;

    // Let PsychCreateTexture() do the rest of the job of creating, setting up and
    // filling an OpenGL texture with content:
    PsychCreateTexture(out_texture);

	// After PsychCreateTexture() the cached texture object from our cache is used
	// and no longer available for recycling. We mark the cache as empty:
	// It will be filled with a new textureid for recycling if a texture gets
	// deleted in PsychMovieDeleteTexture()....
	movieRecordBANK[moviehandle].cached_texture = 0;

    // Detection of dropped frames: This is a heuristic. We'll see how well it works out...
    // TODO: GstBuffer videoBuffer provides special flags that should allow to do a more
    // robust job, although nothing's wrong with the current approach per se...
    if (rate && presentation_timestamp) {
        // Try to check for dropped frames in playback mode:

        // Expected delta between successive presentation timestamps:
        targetdelta = 1.0f / (movieRecordBANK[moviehandle].fps * rate);

        // Compute real delta, given rate and playback direction:
        if (rate > 0) {
            realdelta = *presentation_timestamp - movieRecordBANK[moviehandle].last_pts;
            if (realdelta < 0) realdelta = 0;
        }
        else {
            realdelta = -1.0 * (*presentation_timestamp - movieRecordBANK[moviehandle].last_pts);
            if (realdelta < 0) realdelta = 0;
        }
        
        frames = realdelta / targetdelta;
        // Dropped frames?
        if (frames > 1 && movieRecordBANK[moviehandle].last_pts >= 0) {
            movieRecordBANK[moviehandle].nr_droppedframes += (int) (frames - 1 + 0.5);
        }

        movieRecordBANK[moviehandle].last_pts = *presentation_timestamp;
    }

    // Unlock.
    if (oldstyle) {
	PsychUnlockMutex(&movieRecordBANK[moviehandle].mutex);
    } else {
	gst_buffer_unref(videoBuffer);
	videoBuffer = NULL;
    }
    
    // Manually advance movie time, if in fetch mode:
    if (0 == rate) {
        // We are in manual fetch mode: Need to manually advance movie to next
        // media sample:
	event = gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1.0, TRUE, FALSE);
	gst_element_send_event(theMovie, event);

	// Block until seek completed, failed, or timeout of 30 seconds reached:
        gst_element_get_state(theMovie, NULL, NULL, (GstClockTime) (30 * 1e9));
    }

    return(TRUE);
}
int PsychHIDReturnEventFromEventBuffer(int deviceIndex, int outArgIndex, double maxWaitTimeSecs)
{
    unsigned int navail, j;
    PsychHIDEventRecord evt;
    PsychGenericScriptType *retevent;
    double* foo = NULL;
    PsychGenericScriptType *outMat;
    double *v;
    const char *FieldNames[] = { "Type", "Time", "Pressed", "Keycode", "CookedKey", "ButtonStates", "Motion", "X", "Y", "NormX", "NormY", "Valuators" };

    if (deviceIndex < 0) deviceIndex = PsychHIDGetDefaultKbQueueDevice();
    if (!hidEventBuffer[deviceIndex]) return(0);

    PsychLockMutex(&hidEventBufferMutex[deviceIndex]);
    navail = hidEventBufferWritePos[deviceIndex] - hidEventBufferReadPos[deviceIndex];

    // If nothing available and we're asked to wait for something, then wait:
    if ((navail == 0) && (maxWaitTimeSecs > 0)) {
        // Wait for something:
        PsychTimedWaitCondition(&hidEventBufferCondition[deviceIndex], &hidEventBufferMutex[deviceIndex], maxWaitTimeSecs);

        // Recompute number of available events:
        navail = hidEventBufferWritePos[deviceIndex] - hidEventBufferReadPos[deviceIndex];
    }

    // Check if anything available, copy it if so:
    if (navail) {
        memcpy(&evt, &(hidEventBuffer[deviceIndex][hidEventBufferReadPos[deviceIndex] % hidEventBufferCapacity[deviceIndex]]), sizeof(PsychHIDEventRecord));
        hidEventBufferReadPos[deviceIndex]++;
    }

    PsychUnlockMutex(&hidEventBufferMutex[deviceIndex]);

    if (navail) {
        // Return event struct:
        switch (evt.type) {
            case 0: // Press/Release
            case 1: // Motion/Valuator change
                PsychAllocOutStructArray(outArgIndex, kPsychArgOptional, 1, 12, FieldNames, &retevent);
                break;

            case 2: // Touch begin
            case 3: // Touch update/move
            case 4: // Touch end
            case 5: // Touch sequence compromised marker. If this one shows up - with magic touch point
                    // id 0xffffffff btw., then the user script knows the sequence was cut short / aborted
                    // by some higher priority consumer, e.g., some global gesture recognizer.
                PsychAllocOutStructArray(outArgIndex, kPsychArgOptional, 1, 12, FieldNames, &retevent);
                break;

            default:
                PsychErrorExitMsg(PsychError_internal, "Unhandled keyboard queue event type!");
        }

        PsychSetStructArrayDoubleElement("Type",         0, (double) evt.type,                        retevent);
        PsychSetStructArrayDoubleElement("Time",         0, evt.timestamp,                            retevent);
        PsychSetStructArrayDoubleElement("Pressed",      0, (double) (evt.status & (1 << 0)) ? 1 : 0, retevent);
        PsychSetStructArrayDoubleElement("Keycode",      0, (double) evt.rawEventCode,                retevent);
        PsychSetStructArrayDoubleElement("CookedKey",    0, (double) evt.cookedEventCode,             retevent);
        PsychSetStructArrayDoubleElement("ButtonStates", 0, (double) evt.buttonStates,                retevent);
        PsychSetStructArrayDoubleElement("Motion",       0, (double) (evt.status & (1 << 1)) ? 1 : 0, retevent);
        PsychSetStructArrayDoubleElement("X",            0, (double) evt.X,                           retevent);
        PsychSetStructArrayDoubleElement("Y",            0, (double) evt.Y,                           retevent);
        PsychSetStructArrayDoubleElement("NormX",        0, (double) evt.normX,                       retevent);
        PsychSetStructArrayDoubleElement("NormY",        0, (double) evt.normY,                       retevent);

        // Copy out all valuators (including redundant (X,Y) again:
        PsychAllocateNativeDoubleMat(1, evt.numValuators, 1, &v, &outMat);
        for (j = 0; j < evt.numValuators; j++)
            *(v++) = (double) evt.valuators[j];
        PsychSetStructArrayNativeElement("Valuators", 0, outMat, retevent);

        return(navail - 1);
    }
    else {
        // Return empty matrix:
        PsychCopyOutDoubleMatArg(outArgIndex, kPsychArgOptional, 0, 0, 0, foo);
        return(0);
    }
}