Exemplo n.º 1
0
static void cleanup(void)
{
    ar2VideoCapStop(vidL);
    ar2VideoCapStop(vidR);
    ar2VideoClose(vidL);
    ar2VideoClose(vidR);
    argCleanup();
    exit(0);
}
bool ARToolKitVideoSource::close() {
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): called");
    if (deviceState == DEVICE_CLOSED)
    {
        ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): if (deviceState == DEVICE_CLOSED) true, exiting returning true");
        return true;
    }
    
	if (deviceState == DEVICE_RUNNING) {
        ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): stopping video, calling ar2VideoCapStop(gVid)");
		int err = ar2VideoCapStop(gVid);
		if (err != 0)
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::close(): Error \"%d\" stopping video", err);
        
        if (cparamLT) arParamLTFree(&cparamLT);
        
        deviceState = DEVICE_OPEN;
    }
    
    frameBuffer = NULL;
    frameBuffer2 = NULL;

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): closing video, calling ar2VideoClose(gVid)");
    if (ar2VideoClose(gVid) != 0)
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::close(): error closing video");
	
    gVid = NULL;
	deviceState = DEVICE_CLOSED; // ARToolKit video source is always ready to be opened.

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): exiting returning true");
	return true;
}
bool ARToolKitVideoSource::close() {

    if (deviceState == DEVICE_CLOSED) return true;
    
	if (deviceState == DEVICE_RUNNING) {
		ARController::logv("Stopping video.");
		int err = ar2VideoCapStop(gVid);
		if (err != 0) ARController::logv("Error %d stopping video.", err);
        
        if (cparamLT) arParamLTFree(&cparamLT);
        
        deviceState = DEVICE_OPEN;
    }
    
    frameBuffer = NULL;
    frameBufferSize = 0;

    ARController::logv("Closing video.");
    if (ar2VideoClose(gVid) != 0) ARController::logv("Error closing video.");
	
    gVid = NULL;

	deviceState = DEVICE_CLOSED; // ARToolKit video source is always ready to be opened.

	return true;

}
Exemplo n.º 4
0
int ar2VideoClose( AR2VideoParamT *vid )
{
    if(vid->video_cont_num >= 0){
        ar2VideoCapStop( vid );
    }
    close(vid->fd);
    if(vid->videoBuffer!=NULL)
        free(vid->videoBuffer);
    free( vid );

    return 0;
} 
Exemplo n.º 5
0
static void cleanup(void)
{
    // Dispose of movie drawing structures, and stop and close the movie file.
    arglCleanup(gMovieArglSettings);
    gMovieArglSettings = NULL;
    if (gMovieVideo) {
        ar2VideoCapStop(gMovieVideo);
        ar2VideoClose(gMovieVideo);
    }    
	arglCleanup(gArglSettings);
    gArglSettings = NULL;
	arPattDetach(gARHandle);
	arPattDeleteHandle(gARPattHandle);
	arVideoCapStop();
	ar3DDeleteHandle(&gAR3DHandle);
	arDeleteHandle(gARHandle);
    arParamLTFree(&gCparamLT);
	arVideoClose();
}
Exemplo n.º 6
0
static void mainLoop(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;
    AR2VideoBufferT *movieBuffer;
	ARdouble err;

    int             j, k;
	
	// Find out how long since mainLoop() last ran.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001f;
	if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
	ms_prev = ms;
	
	// Grab a movie frame (if available).
    if ((movieBuffer = ar2VideoGetImage(gMovieVideo)) != NULL) {
        if (movieBuffer->buff && movieBuffer->fillFlag)
            gMovieImage = movieBuffer->buff;
    }
    
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
        
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		
		// Detect the markers in the video frame.
		if (arDetectMarker(gARHandle, gARTImage) < 0) {
			exit(-1);
		}
		
		// Check through the marker_info array for highest confidence
		// visible marker matching our preferred pattern.
		k = -1;
		for (j = 0; j < gARHandle->marker_num; j++) {
			if (gARHandle->markerInfo[j].id == gPatt_id) {
				if (k == -1) k = j; // First marker detected.
				else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected.
			}
		}
		
		if (k != -1) {
			// Get the transformation between the marker and the real camera into gPatt_trans.
            if (gPatt_found && useContPoseEstimation) {
                err = arGetTransMatSquareCont(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_trans, gPatt_width, gPatt_trans);
            } else {
                err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_width, gPatt_trans);
                // Marker has appeared, so un-pause movie.
                ar2VideoCapStart(gMovieVideo);
            }
			gPatt_found = TRUE;
		} else {
            if (gPatt_found) {
                // Marker has disappeared, so pause movie.
                ar2VideoCapStop(gMovieVideo);
            }
			gPatt_found = FALSE;
		}
		
		// Tell GLUT the display has changed.
		glutPostRedisplay();
	}
}
Exemplo n.º 7
0
int arVideoCapStop( void )
{
    if( vid == NULL ) return -1;

    return ar2VideoCapStop( vid );
}
Exemplo n.º 8
0
void CWebCam::Quit(void)
{
	ar2VideoCapStop(ARTVideo);
	ar2VideoClose(ARTVideo);
}
/* CHECKED
*  PsychARVideoCaptureRate() - Start- and stop video capture.
*
*  capturehandle = Grabber to start-/stop.
*  playbackrate = zero == Stop capture, non-zero == Capture
*  dropframes = 0 - Always deliver oldest frame in DMA ringbuffer. 1 - Always deliver newest frame.
*               --> 1 == drop frames in ringbuffer if behind -- low-latency capture.
*  startattime = Deadline (in system time) for which to wait before real start of capture.
*  Returns Number of dropped frames during capture.
*/
int PsychARVideoCaptureRate(int capturehandle, double capturerate, int dropframes, double* startattime)
{
	int dropped = 0;
	float framerate = 0;
	
	// Retrieve device record for handle:
	PsychVidcapRecordType* capdev = PsychGetARVidcapRecord(capturehandle);
	
	// Start- or stop capture?
	if (capturerate > 0) {
		// Start capture:
		if (capdev->grabber_active) PsychErrorExitMsg(PsychError_user, "You tried to start video capture, but capture is already started!");
		
		// Reset statistics:
		capdev->last_pts = -1.0;
		capdev->nr_droppedframes = 0;
		capdev->frame_ready = 0;
		
		// Framedropping is not supported by libARVideo, so we implement it ourselves.
		// Store the 'dropframes' flag in our capdev struct, so the PsychARGetTextureFromCapture()
		// knows how to handle this:
		capdev->dropframes = (dropframes > 0) ? 1 : 0;

		// Ready to go! Now we just need to tell the camera to start its capture cycle:
		
		// Wait until start deadline reached:
		if (*startattime != 0) PsychWaitUntilSeconds(*startattime);
		
		// Start DMA driven isochronous data transfer:
		if(PsychPrefStateGet_Verbosity()>5) printf("PTB-DEBUG: Starting capture...\n"); fflush(NULL);

		// Start the video capture for this camera.
		if (ar2VideoCapStart(capdev->camera) !=DC1394_SUCCESS) {
			// Failed!
			PsychErrorExitMsg(PsychError_user, "Unable to start capture on camera via ar2VideoCapStart() - Start of video capture failed!");
		}
		
		// Record real start time:
		PsychGetAdjustedPrecisionTimerSeconds(startattime);
		
		if(PsychPrefStateGet_Verbosity()>5) printf("PTB-DEBUG: Capture engine fully running...\n"); fflush(NULL);
		
		// Query framerate and convert to floating point value and assign it:
		#if PSYCH_SYSTEM == PSYCH_WINDOWS
		ar2VideoInqFreq(capdev->camera, &framerate);
		#else
		// TODO: Implement for non-Win32:
		framerate = (float) capturerate;
		#endif

		capdev->fps = (double) framerate;

		// Ok, capture is now started:
		capdev->grabber_active = 1;
		
		// Allocate conversion buffer if needed for YUV->RGB conversions.
		if (capdev->pixeldepth == -1) {
			// Not used at the moment!!
			// Software conversion of YUV -> RGB needed. Allocate a proper scratch-buffer:
			capdev->scratchbuffer = malloc(capdev->width * capdev->height * 3);
		}
		
		if(PsychPrefStateGet_Verbosity()>1) {
			printf("PTB-INFO: Capture started on device %i - Width x Height = %i x %i - Framerate: %f fps.\n", capturehandle, capdev->width, capdev->height, capdev->fps);
		}
	}
	else {
		// Stop capture:
		if (capdev->grabber_active) {
			// Stop isochronous data transfer from camera:
			if (ar2VideoCapStop(capdev->camera) !=DC1394_SUCCESS) {
				PsychErrorExitMsg(PsychError_user, "Unable to stop video transfer on camera! (ar2VideoCapStop() failed)!");
			}
			
			// Ok, capture is now stopped.
			capdev->frame_ready = 0;
			capdev->grabber_active = 0;
			
			if (capdev->scratchbuffer) {
				// Release scratch-buffer:
				free(capdev->scratchbuffer);
				capdev->scratchbuffer = NULL;
			}

			if(PsychPrefStateGet_Verbosity()>1){
				// Output count of dropped frames:
				if ((dropped=capdev->nr_droppedframes) > 0) {
					printf("PTB-INFO: Video capture dropped %i frames on device %i to keep capture running in sync with realtime.\n", dropped, capturehandle); 
				}
				
				if (capdev->nrframes>0) capdev->avg_decompresstime/= (double) capdev->nrframes;
				printf("PTB-INFO: Average time spent in video decompressor (waiting/polling for new frames) was %f milliseconds.\n", (float) capdev->avg_decompresstime * 1000.0f);
				if (capdev->nrgfxframes>0) capdev->avg_gfxtime/= (double) capdev->nrgfxframes;
				printf("PTB-INFO: Average time spent in GetCapturedImage (intensity calculation Video->OpenGL texture conversion) was %f milliseconds.\n",  (float) capdev->avg_gfxtime * 1000.0f);
			}
		}
	}
	
	fflush(NULL);
    
	// Reset framecounters and statistics:
	capdev->nrframes = 0;
	capdev->avg_decompresstime = 0;
	capdev->nrgfxframes = 0;
	capdev->avg_gfxtime = 0;
	
	// Return either the real capture framerate (at start of capture) or count of dropped frames - at end of capture.
	return((capturerate!=0) ? (int) (capdev->fps + 0.5) : dropped);
}