void PsychWaitIntervalSeconds(double delaySecs) { double deadline; if (delaySecs <= 0) return; // Get current time: PsychGetPrecisionTimerSeconds(&deadline); // Compute deadline in absolute system time: deadline+=delaySecs; // Wait until deadline reached: PsychWaitUntilSeconds(deadline); return; }
PsychError WAITSECSWaitUntilSecs(void) { static char useString[] = "[realWakeupTimeSecs] = WaitSecs('UntilTime', whenSecs);"; // 1 static char synopsisString[] = "Wait until at least system time \"whenSecs\" has been reached. " "Optionally, return the real wakeup time \"realWakeupTimeSecs\".\n" "This allows conveniently waiting until an absolute point in time " "has been reached, or to allow drift-free waiting for a well defined " "interval, more accurate than the standard WaitSecs() call.\n" "Example:\n" "Wait until 0.6 secs after last stimulus onset, if vbl=Screen('Flip', window); " "was the onset timestamp vbl from a previous flip:\n" "realwakeup = WaitSecs('UntilTime', vbl + 0.6);\n\n" "In a perfect world, realwakeup == vbl + 0.6, in reality it will be\n" "realwakeup == vbl + 0.6 + randomjitter; with randomjitter being the " "hopefully small scheduling delay of your operating system. If the " "delay is high or varies a lot between trials then your system has " "noisy timing or real timing problems.\n"; static char seeAlsoString[] = ""; double waitUntilSecs; double now; //all sub functions should have these two lines PsychPushHelp(useString, synopsisString,seeAlsoString); if(PsychIsGiveHelp()){PsychGiveHelp();return(PsychError_none);}; //check to see if the user supplied superfluous arguments PsychErrorExit(PsychCapNumOutputArgs(1)); PsychErrorExit(PsychCapNumInputArgs(1)); PsychCopyInDoubleArg(1,TRUE,&waitUntilSecs); PsychWaitUntilSeconds(waitUntilSecs); // Return current system time at end of sleep: PsychGetAdjustedPrecisionTimerSeconds(&now); PsychCopyOutDoubleArg(1, FALSE, now); return(PsychError_none); }
/* CHECKED * PsychARVideoCaptureRate() - Start- and stop video capture. * * capturehandle = Grabber to start-/stop. * playbackrate = zero == Stop capture, non-zero == Capture * dropframes = 0 - Always deliver oldest frame in DMA ringbuffer. 1 - Always deliver newest frame. * --> 1 == drop frames in ringbuffer if behind -- low-latency capture. * startattime = Deadline (in system time) for which to wait before real start of capture. * Returns Number of dropped frames during capture. */ int PsychARVideoCaptureRate(int capturehandle, double capturerate, int dropframes, double* startattime) { int dropped = 0; float framerate = 0; // Retrieve device record for handle: PsychVidcapRecordType* capdev = PsychGetARVidcapRecord(capturehandle); // Start- or stop capture? if (capturerate > 0) { // Start capture: if (capdev->grabber_active) PsychErrorExitMsg(PsychError_user, "You tried to start video capture, but capture is already started!"); // Reset statistics: capdev->last_pts = -1.0; capdev->nr_droppedframes = 0; capdev->frame_ready = 0; // Framedropping is not supported by libARVideo, so we implement it ourselves. // Store the 'dropframes' flag in our capdev struct, so the PsychARGetTextureFromCapture() // knows how to handle this: capdev->dropframes = (dropframes > 0) ? 1 : 0; // Ready to go! Now we just need to tell the camera to start its capture cycle: // Wait until start deadline reached: if (*startattime != 0) PsychWaitUntilSeconds(*startattime); // Start DMA driven isochronous data transfer: if(PsychPrefStateGet_Verbosity()>5) printf("PTB-DEBUG: Starting capture...\n"); fflush(NULL); // Start the video capture for this camera. if (ar2VideoCapStart(capdev->camera) !=DC1394_SUCCESS) { // Failed! PsychErrorExitMsg(PsychError_user, "Unable to start capture on camera via ar2VideoCapStart() - Start of video capture failed!"); } // Record real start time: PsychGetAdjustedPrecisionTimerSeconds(startattime); if(PsychPrefStateGet_Verbosity()>5) printf("PTB-DEBUG: Capture engine fully running...\n"); fflush(NULL); // Query framerate and convert to floating point value and assign it: #if PSYCH_SYSTEM == PSYCH_WINDOWS ar2VideoInqFreq(capdev->camera, &framerate); #else // TODO: Implement for non-Win32: framerate = (float) capturerate; #endif capdev->fps = (double) framerate; // Ok, capture is now started: capdev->grabber_active = 1; // Allocate conversion buffer if needed for YUV->RGB conversions. if (capdev->pixeldepth == -1) { // Not used at the moment!! // Software conversion of YUV -> RGB needed. Allocate a proper scratch-buffer: capdev->scratchbuffer = malloc(capdev->width * capdev->height * 3); } if(PsychPrefStateGet_Verbosity()>1) { printf("PTB-INFO: Capture started on device %i - Width x Height = %i x %i - Framerate: %f fps.\n", capturehandle, capdev->width, capdev->height, capdev->fps); } } else { // Stop capture: if (capdev->grabber_active) { // Stop isochronous data transfer from camera: if (ar2VideoCapStop(capdev->camera) !=DC1394_SUCCESS) { PsychErrorExitMsg(PsychError_user, "Unable to stop video transfer on camera! (ar2VideoCapStop() failed)!"); } // Ok, capture is now stopped. capdev->frame_ready = 0; capdev->grabber_active = 0; if (capdev->scratchbuffer) { // Release scratch-buffer: free(capdev->scratchbuffer); capdev->scratchbuffer = NULL; } if(PsychPrefStateGet_Verbosity()>1){ // Output count of dropped frames: if ((dropped=capdev->nr_droppedframes) > 0) { printf("PTB-INFO: Video capture dropped %i frames on device %i to keep capture running in sync with realtime.\n", dropped, capturehandle); } if (capdev->nrframes>0) capdev->avg_decompresstime/= (double) capdev->nrframes; printf("PTB-INFO: Average time spent in video decompressor (waiting/polling for new frames) was %f milliseconds.\n", (float) capdev->avg_decompresstime * 1000.0f); if (capdev->nrgfxframes>0) capdev->avg_gfxtime/= (double) capdev->nrgfxframes; printf("PTB-INFO: Average time spent in GetCapturedImage (intensity calculation Video->OpenGL texture conversion) was %f milliseconds.\n", (float) capdev->avg_gfxtime * 1000.0f); } } } fflush(NULL); // Reset framecounters and statistics: capdev->nrframes = 0; capdev->avg_decompresstime = 0; capdev->nrgfxframes = 0; capdev->avg_gfxtime = 0; // Return either the real capture framerate (at start of capture) or count of dropped frames - at end of capture. return((capturerate!=0) ? (int) (capdev->fps + 0.5) : dropped); }