Exemplo n.º 1
0
int main( int argc, char *argv[] )
{
    glutInit(&argc, argv);
    init(argc, argv);
    argSetDispFunc( mainLoop, 1 );
    argSetKeyFunc( keyEvent );
    ar2VideoCapStart(vidL);
    ar2VideoCapStart(vidR);
    argMainLoop();
    return 0;
}
Exemplo n.º 2
0
int CWebCam::SetupWebCam(const char *cparam_names, char *vconfs)
{
	int			xsize, ysize;
	ARParam		wparam;

	if((ARTVideo = ar2VideoOpen(vconfs)) == 0) return(0);
	if(ar2VideoInqSize(ARTVideo, &xsize, &ysize) < 0) return(0);
	if(arParamLoad(cparam_names, 1, &wparam) < 0) return(0);

	arParamChangeSize(&wparam, xsize, ysize, &ARTCparam);
	arInitCparam(&ARTCparam);
	arParamDisp(&ARTCparam);
	ARTThreshhold = 100;

	arglCameraFrustumRH(&ARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, projectionMat);

	if(ar2VideoCapStart(ARTVideo) != 0) return(0);

	ar2VideoCapNext(ARTVideo);

	return(1);
}
Exemplo n.º 3
0
static void mainLoop(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;
    AR2VideoBufferT *movieBuffer;
	ARdouble err;

    int             j, k;
	
	// Find out how long since mainLoop() last ran.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001f;
	if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
	ms_prev = ms;
	
	// Grab a movie frame (if available).
    if ((movieBuffer = ar2VideoGetImage(gMovieVideo)) != NULL) {
        if (movieBuffer->buff && movieBuffer->fillFlag)
            gMovieImage = movieBuffer->buff;
    }
    
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
        
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		
		// Detect the markers in the video frame.
		if (arDetectMarker(gARHandle, gARTImage) < 0) {
			exit(-1);
		}
		
		// Check through the marker_info array for highest confidence
		// visible marker matching our preferred pattern.
		k = -1;
		for (j = 0; j < gARHandle->marker_num; j++) {
			if (gARHandle->markerInfo[j].id == gPatt_id) {
				if (k == -1) k = j; // First marker detected.
				else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected.
			}
		}
		
		if (k != -1) {
			// Get the transformation between the marker and the real camera into gPatt_trans.
            if (gPatt_found && useContPoseEstimation) {
                err = arGetTransMatSquareCont(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_trans, gPatt_width, gPatt_trans);
            } else {
                err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_width, gPatt_trans);
                // Marker has appeared, so un-pause movie.
                ar2VideoCapStart(gMovieVideo);
            }
			gPatt_found = TRUE;
		} else {
            if (gPatt_found) {
                // Marker has disappeared, so pause movie.
                ar2VideoCapStop(gMovieVideo);
            }
			gPatt_found = FALSE;
		}
		
		// Tell GLUT the display has changed.
		glutPostRedisplay();
	}
}
bool ARToolKitVideoSource::open() {
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): called, opening ARToolKit video");
    
    if (deviceState != DEVICE_CLOSED) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error: device is already open, exiting returning false");
        return false;
    }

	// Open the video path
    gVid = ar2VideoOpen(videoConfiguration);
    if (!gVid) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): arVideoOpen unable to open connection to camera using configuration '%s', exiting returning false", videoConfiguration);
    	return false;
	}

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Opened connection to camera using configuration '%s'", videoConfiguration);
	deviceState = DEVICE_OPEN;
    
    // Find the size of the video
	if (ar2VideoGetSize(gVid, &videoWidth, &videoHeight) < 0) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get video size, calling close(), exiting returning false");
        this->close();
		return false;
	}
	
	// Get the format in which the camera is returning pixels
	pixelFormat = ar2VideoGetPixelFormat(gVid);
	if (pixelFormat < 0 ) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get pixel format, calling close(), exiting returning false");
        this->close();
		return false;
	}
    
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Video %dx%d@%dBpp (%s)", videoWidth, videoHeight, arUtilGetPixelSize(pixelFormat), arUtilGetPixelFormatName(pixelFormat));

#ifndef _WINRT
    // Translate pixel format into OpenGL texture intformat, format, and type.
    switch (pixelFormat) {
        case AR_PIXEL_FORMAT_RGBA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_BGRA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_BGRA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
		case AR_PIXEL_FORMAT_ABGR:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_ABGR_EXT;
            glPixType = GL_UNSIGNED_BYTE;
			break;
		case AR_PIXEL_FORMAT_ARGB:
				glPixIntFormat = GL_RGBA;
				glPixFormat = GL_BGRA;
#ifdef AR_BIG_ENDIAN
				glPixType = GL_UNSIGNED_INT_8_8_8_8_REV;
#else
				glPixType = GL_UNSIGNED_INT_8_8_8_8;
#endif
			break;
		case AR_PIXEL_FORMAT_BGR:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_BGR;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_MONO:
        case AR_PIXEL_FORMAT_420v:
        case AR_PIXEL_FORMAT_420f:
        case AR_PIXEL_FORMAT_NV21:
            glPixIntFormat = GL_LUMINANCE;
            glPixFormat = GL_LUMINANCE;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB_565:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_SHORT_5_6_5;
            break;
        case AR_PIXEL_FORMAT_RGBA_5551:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_5_5_5_1;
            break;
        case AR_PIXEL_FORMAT_RGBA_4444:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_4_4_4_4;
            break;
        default:
            ARController::logv("Error: Unsupported pixel format.\n");
            this->close();
			return false;
            break;
    }
#endif // !_WINRT

#if TARGET_PLATFORM_IOS
    // Tell arVideo what the typical focal distance will be. Note that this does NOT
    // change the actual focus, but on devices with non-fixed focus, it lets arVideo
    // choose a better set of camera parameters.
    ar2VideoSetParami(gVid, AR_VIDEO_PARAM_IOS_FOCUS, AR_VIDEO_IOS_FOCUS_0_3M); // Default is 0.3 metres. See <AR/sys/videoiPhone.h> for allowable values.
#endif
    
    // Load the camera parameters, resize for the window and init.
    ARParam cparam;
    // Prefer internal camera parameters.
    if (ar2VideoGetCParam(gVid, &cparam) == 0) {
        ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Using internal camera parameters.");
    } else {
        const char cparam_name_default[] = "camera_para.dat"; // Default name for the camera parameters.
        if (cameraParamBuffer) {
            if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters from buffer, calling close(), exiting returning false");
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Camera parameters loaded from buffer");
            }
        } else {
            if (arParamLoad((cameraParam ? cameraParam : cparam_name_default), 1, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters %s, calling close(), exiting returning false",
                                   (cameraParam ? cameraParam : cparam_name_default));        
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open():Camera parameters loaded from %s", (cameraParam ? cameraParam : cparam_name_default));
            }
        }
    }

    if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
        ARController::logv(AR_LOG_LEVEL_ERROR, "*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
#endif
        arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
    }
	if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to create camera parameters lookup table, calling close(), exiting returning false");
        this->close();
		return false;
	}

	int err = ar2VideoCapStart(gVid);
	if (err != 0) {
        if (err == -2) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error starting video-device unavailable \"%d,\" setting ARW_ERROR_DEVICE_UNAVAILABLE error state", err);
            setError(ARW_ERROR_DEVICE_UNAVAILABLE);
        } else {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error \"%d\" starting video capture", err);
        }
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): calling close(), exiting returning false");
        this->close();
		return false;		
	}

	deviceState = DEVICE_RUNNING;

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): exiting returning true, deviceState = DEVICE_RUNNING, video capture started");
	return true;
}
Exemplo n.º 5
0
int arVideoCapStart( void )
{
    if( vid == NULL ) return -1;

    return ar2VideoCapStart( vid );
}
/* CHECKED
*  PsychARVideoCaptureRate() - Start- and stop video capture.
*
*  capturehandle = Grabber to start-/stop.
*  playbackrate = zero == Stop capture, non-zero == Capture
*  dropframes = 0 - Always deliver oldest frame in DMA ringbuffer. 1 - Always deliver newest frame.
*               --> 1 == drop frames in ringbuffer if behind -- low-latency capture.
*  startattime = Deadline (in system time) for which to wait before real start of capture.
*  Returns Number of dropped frames during capture.
*/
int PsychARVideoCaptureRate(int capturehandle, double capturerate, int dropframes, double* startattime)
{
	int dropped = 0;
	float framerate = 0;
	
	// Retrieve device record for handle:
	PsychVidcapRecordType* capdev = PsychGetARVidcapRecord(capturehandle);
	
	// Start- or stop capture?
	if (capturerate > 0) {
		// Start capture:
		if (capdev->grabber_active) PsychErrorExitMsg(PsychError_user, "You tried to start video capture, but capture is already started!");
		
		// Reset statistics:
		capdev->last_pts = -1.0;
		capdev->nr_droppedframes = 0;
		capdev->frame_ready = 0;
		
		// Framedropping is not supported by libARVideo, so we implement it ourselves.
		// Store the 'dropframes' flag in our capdev struct, so the PsychARGetTextureFromCapture()
		// knows how to handle this:
		capdev->dropframes = (dropframes > 0) ? 1 : 0;

		// Ready to go! Now we just need to tell the camera to start its capture cycle:
		
		// Wait until start deadline reached:
		if (*startattime != 0) PsychWaitUntilSeconds(*startattime);
		
		// Start DMA driven isochronous data transfer:
		if(PsychPrefStateGet_Verbosity()>5) printf("PTB-DEBUG: Starting capture...\n"); fflush(NULL);

		// Start the video capture for this camera.
		if (ar2VideoCapStart(capdev->camera) !=DC1394_SUCCESS) {
			// Failed!
			PsychErrorExitMsg(PsychError_user, "Unable to start capture on camera via ar2VideoCapStart() - Start of video capture failed!");
		}
		
		// Record real start time:
		PsychGetAdjustedPrecisionTimerSeconds(startattime);
		
		if(PsychPrefStateGet_Verbosity()>5) printf("PTB-DEBUG: Capture engine fully running...\n"); fflush(NULL);
		
		// Query framerate and convert to floating point value and assign it:
		#if PSYCH_SYSTEM == PSYCH_WINDOWS
		ar2VideoInqFreq(capdev->camera, &framerate);
		#else
		// TODO: Implement for non-Win32:
		framerate = (float) capturerate;
		#endif

		capdev->fps = (double) framerate;

		// Ok, capture is now started:
		capdev->grabber_active = 1;
		
		// Allocate conversion buffer if needed for YUV->RGB conversions.
		if (capdev->pixeldepth == -1) {
			// Not used at the moment!!
			// Software conversion of YUV -> RGB needed. Allocate a proper scratch-buffer:
			capdev->scratchbuffer = malloc(capdev->width * capdev->height * 3);
		}
		
		if(PsychPrefStateGet_Verbosity()>1) {
			printf("PTB-INFO: Capture started on device %i - Width x Height = %i x %i - Framerate: %f fps.\n", capturehandle, capdev->width, capdev->height, capdev->fps);
		}
	}
	else {
		// Stop capture:
		if (capdev->grabber_active) {
			// Stop isochronous data transfer from camera:
			if (ar2VideoCapStop(capdev->camera) !=DC1394_SUCCESS) {
				PsychErrorExitMsg(PsychError_user, "Unable to stop video transfer on camera! (ar2VideoCapStop() failed)!");
			}
			
			// Ok, capture is now stopped.
			capdev->frame_ready = 0;
			capdev->grabber_active = 0;
			
			if (capdev->scratchbuffer) {
				// Release scratch-buffer:
				free(capdev->scratchbuffer);
				capdev->scratchbuffer = NULL;
			}

			if(PsychPrefStateGet_Verbosity()>1){
				// Output count of dropped frames:
				if ((dropped=capdev->nr_droppedframes) > 0) {
					printf("PTB-INFO: Video capture dropped %i frames on device %i to keep capture running in sync with realtime.\n", dropped, capturehandle); 
				}
				
				if (capdev->nrframes>0) capdev->avg_decompresstime/= (double) capdev->nrframes;
				printf("PTB-INFO: Average time spent in video decompressor (waiting/polling for new frames) was %f milliseconds.\n", (float) capdev->avg_decompresstime * 1000.0f);
				if (capdev->nrgfxframes>0) capdev->avg_gfxtime/= (double) capdev->nrgfxframes;
				printf("PTB-INFO: Average time spent in GetCapturedImage (intensity calculation Video->OpenGL texture conversion) was %f milliseconds.\n",  (float) capdev->avg_gfxtime * 1000.0f);
			}
		}
	}
	
	fflush(NULL);
    
	// Reset framecounters and statistics:
	capdev->nrframes = 0;
	capdev->avg_decompresstime = 0;
	capdev->nrgfxframes = 0;
	capdev->avg_gfxtime = 0;
	
	// Return either the real capture framerate (at start of capture) or count of dropped frames - at end of capture.
	return((capturerate!=0) ? (int) (capdev->fps + 0.5) : dropped);
}