Пример #1
0
bool AndroidVideoSource::open() {
    
	ARController::logv("Opening Android Video Source.");
    
    if (deviceState != DEVICE_CLOSED) {
        ARController::logv("Error: device is already open.");
        return false;
    }
    
	// On Android, ARVideo doesn't actually provide the frames, but it is needed to handle
    // fetching of the camera parameters. Note that if the current working directory
    // isn't already the directory where the camera parametere cache should be created,
    // then the videoconfiguration should include the option 'cachedir="/path/to/cache"'.
    gVid = ar2VideoOpen(videoConfiguration);
    if (!gVid) {
		ARController::logv("arVideoOpen unable to open connection to camera.");
    	return false;
	}
	//ARController::logv("Opened connection to camera.");

    pixelFormat = ar2VideoGetPixelFormat(gVid);
    if (pixelFormat == AR_PIXEL_FORMAT_INVALID) {
        ARController::logv("AndroidVideoSource::getVideoReadyAndroid: Error: No pixel format set.\n");
        goto bail;
    }
    
	deviceState = DEVICE_OPEN;
	return true;
    
bail:
    ar2VideoClose(gVid);
    gVid = NULL;
    return false;
}
Пример #2
0
static int setupMovie(const char *path)
{
    char            *movieVconf;
    int             len;
    int             xsize, ysize;
    AR_PIXEL_FORMAT pixFormat;

    // Construct the vconf string.
    arMalloc(movieVconf, char, 2048); // 2Kb for URL.
    sprintf(movieVconf, "-device=QUICKTIME -movie=\""); // Make sure we're using the QuickTime video input.
    len = (int)strlen(movieVconf);
    strncat(movieVconf + len, path, 2048 - len - 1);
    len = (int)strlen(movieVconf);
    strncat(movieVconf + len, "\" -loop -pause", 2048 - len - 1); // Start the movie paused. It will be unpaused in mainLoop().

    // Open the movie.
    gMovieVideo = ar2VideoOpen(movieVconf);
    free(movieVconf);
    if (!gMovieVideo)
    {
        ARLOGe("setupMovie(): Unable to open movie.\n");
        return (FALSE);
    }

    // Find the size of the movie.
    if (ar2VideoGetSize(gMovieVideo, &xsize, &ysize) < 0)
        return (FALSE);

    // Get the pixel format of the movie.
    pixFormat = ar2VideoGetPixelFormat(gMovieVideo);
    if (pixFormat == AR_PIXEL_FORMAT_INVALID)
    {
        ARLOGe("setupMovie(): Movie is using unsupported pixel format.\n");
        return (FALSE);
    }

    // Set up an ARParam object for the movie input.
    arParamClear(&gMovieCparam, xsize, ysize, AR_DIST_FUNCTION_VERSION_DEFAULT);

    // For convenience, we will use gsub_lite to draw the actual pixels. Set it up now.
    gMovieArglSettings = arglSetupForCurrentContext(&gMovieCparam, pixFormat);
    arglDistortionCompensationSet(gMovieArglSettings, 0);

    return (TRUE);
}
Пример #3
0
static void init(int argc, char *argv[])
{
    char              *vconfL = NULL;
    char              *vconfR = NULL;
    char              *cparaL = NULL;
    char              *cparaR = NULL;
    char               cparaLDefault[] = "Data/cparaL.dat";
    char               cparaRDefault[] = "Data/cparaR.dat";

    ARParam            wparam;
    ARGViewport        viewport;
    int                i, j;
    int                gotTwoPartOption;
    int                screenWidth, screenHeight, screenMargin;
    double             wscalef, hscalef, scalef;

    chessboardCornerNumX = 0;
    chessboardCornerNumY = 0;
    calibImageNum        = 0;
    patternWidth         = 0.0f;

    i = 1; // argv[0] is name of app, so start at 1.
    while (i < argc) {
        gotTwoPartOption = FALSE;
        // Look for two-part options first.
        if ((i + 1) < argc) {
            if (strcmp(argv[i], "--vconfL") == 0) {
                i++;
                vconfL = argv[i];
                gotTwoPartOption = TRUE;
            } else if (strcmp(argv[i], "--vconfR") == 0) {
                i++;
                vconfR = argv[i];
                gotTwoPartOption = TRUE;
            } else if (strcmp(argv[i], "--cparaL") == 0) {
                i++;
                cparaL = argv[i];
                gotTwoPartOption = TRUE;
            } else if (strcmp(argv[i], "--cparaR") == 0) {
                i++;
                cparaR = argv[i];
                gotTwoPartOption = TRUE;
            }
        }
        if (!gotTwoPartOption) {
            // Look for single-part options.
            if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) {
                usage(argv[0]);
            } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) {
                ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING);
                exit(0);
            } else if( strncmp(argv[i], "-cornerx=", 9) == 0 ) {
                if( sscanf(&(argv[i][9]), "%d", &chessboardCornerNumX) != 1 ) usage(argv[0]);
                if( chessboardCornerNumX <= 0 ) usage(argv[0]);
            } else if( strncmp(argv[i], "-cornery=", 9) == 0 ) {
                if( sscanf(&(argv[i][9]), "%d", &chessboardCornerNumY) != 1 ) usage(argv[0]);
                if( chessboardCornerNumY <= 0 ) usage(argv[0]);
            } else if( strncmp(argv[i], "-imagenum=", 10) == 0 ) {
                if( sscanf(&(argv[i][10]), "%d", &calibImageNum) != 1 ) usage(argv[0]);
                if( calibImageNum <= 0 ) usage(argv[0]);
            } else if( strncmp(argv[i], "-pattwidth=", 11) == 0 ) {
                if( sscanf(&(argv[i][11]), "%f", &patternWidth) != 1 ) usage(argv[0]);
                if( patternWidth <= 0 ) usage(argv[0]);
            } else if( strncmp(argv[i], "-cparaL=", 8) == 0 ) {
                cparaL = &(argv[i][8]);
            } else if( strncmp(argv[i], "-cparaR=", 8) == 0 ) {
                cparaR = &(argv[i][8]);
            } else {
                ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]);
                usage(argv[0]);
            }
        }
        i++;
    }

    if( chessboardCornerNumX == 0 ) chessboardCornerNumX = CHESSBOARD_CORNER_NUM_X;
    if( chessboardCornerNumY == 0 ) chessboardCornerNumY = CHESSBOARD_CORNER_NUM_Y;
    if( calibImageNum == 0 )        calibImageNum = CALIB_IMAGE_NUM;
    if( patternWidth == 0.0f )      patternWidth = (float)CHESSBOARD_PATTERN_WIDTH;
    if (!cparaL) cparaL = cparaLDefault;
    if (!cparaR) cparaR = cparaRDefault;
    ARLOG("CHESSBOARD_CORNER_NUM_X = %d\n", chessboardCornerNumX);
    ARLOG("CHESSBOARD_CORNER_NUM_Y = %d\n", chessboardCornerNumY);
    ARLOG("CHESSBOARD_PATTERN_WIDTH = %f\n", patternWidth);
    ARLOG("CALIB_IMAGE_NUM = %d\n", calibImageNum);
    ARLOG("Video parameter Left : %s\n", vconfL);
    ARLOG("Video parameter Right: %s\n", vconfR);
    ARLOG("Camera parameter Left : %s\n", cparaL);
    ARLOG("Camera parameter Right: %s\n", cparaR);

    if( (vidL=ar2VideoOpen(vconfL)) == NULL ) {
        ARLOGe("Cannot found the first camera.\n");
        exit(0);
    }
    if( (vidR=ar2VideoOpen(vconfR)) == NULL ) {
        ARLOGe("Cannot found the second camera.\n");
        exit(0);
    }
    if( ar2VideoGetSize(vidL, &xsizeL, &ysizeL) < 0 ) exit(0);
    if( ar2VideoGetSize(vidR, &xsizeR, &ysizeR) < 0 ) exit(0);
    if( (pixFormatL=ar2VideoGetPixelFormat(vidL)) < 0 ) exit(0);
    if( (pixFormatR=ar2VideoGetPixelFormat(vidR)) < 0 ) exit(0);
    ARLOG("Image size for the left camera  = (%d,%d)\n", xsizeL, ysizeL);
    ARLOG("Image size for the right camera = (%d,%d)\n", xsizeR, ysizeR);

    if( arParamLoad(cparaL, 1, &wparam) < 0 ) {
        ARLOGe("Camera parameter load error !!   %s\n", cparaL);
        exit(0);
    }
    arParamChangeSize( &wparam, xsizeL, ysizeL, &paramL );
    ARLOG("*** Camera Parameter for the left camera ***\n");
    arParamDisp( &paramL );
    if( arParamLoad(cparaR, 1, &wparam) < 0 ) {
        ARLOGe("Camera parameter load error !!   %s\n", cparaR);
        exit(0);
    }
    arParamChangeSize( &wparam, xsizeR, ysizeR, &paramR );
    ARLOG("*** Camera Parameter for the right camera ***\n");
    arParamDisp( &paramR );

    screenWidth = glutGet(GLUT_SCREEN_WIDTH);
    screenHeight = glutGet(GLUT_SCREEN_HEIGHT);
    if (screenWidth > 0 && screenHeight > 0) {
        screenMargin = (int)(MAX(screenWidth, screenHeight) * SCREEN_SIZE_MARGIN);
        if ((screenWidth - screenMargin) < (xsizeL + xsizeR) || (screenHeight - screenMargin) < MAX(ysizeL, ysizeR)) {
            wscalef = (double)(screenWidth - screenMargin) / (double)(xsizeL + xsizeR);
            hscalef = (double)(screenHeight - screenMargin) / (double)MAX(ysizeL, ysizeR);
            scalef = MIN(wscalef, hscalef);
            ARLOG("Scaling %dx%d window by %0.3f to fit onto %dx%d screen (with %2.0f%% margin).\n", xsizeL + xsizeR, MAX(ysizeL, ysizeR), scalef, screenWidth, screenHeight, SCREEN_SIZE_MARGIN*100.0);
        } else {
            scalef = 1.0;
        }
    } else {
        scalef = 1.0;
    }

    /* open the graphics window */
    if( argCreateWindow((int)((xsizeL + xsizeR)*scalef), (int)(MAX(ysizeL, ysizeR)*scalef)) < 0 ) {
        ARLOGe("Error: argCreateWindow.\n");
        exit(0);
    }
    viewport.sx = 0;
    viewport.sy = 0;
    viewport.xsize = (int)(xsizeL*scalef);
    viewport.ysize = (int)(ysizeL*scalef);
    if( (vpL=argCreateViewport(&viewport)) == NULL ) {
        ARLOGe("Error: argCreateViewport.\n");
        exit(0);
    }
    viewport.sx = (int)(xsizeL*scalef);
    viewport.sy = 0;
    viewport.xsize = (int)(xsizeR*scalef);
    viewport.ysize = (int)(ysizeR*scalef);
    if( (vpR=argCreateViewport(&viewport)) == NULL ) {
        ARLOGe("Error: argCreateViewport.\n");
        exit(0);
    }
    argViewportSetPixFormat( vpL, pixFormatL );
    argViewportSetPixFormat( vpR, pixFormatR );
    argViewportSetCparam( vpL, &paramL );
    argViewportSetCparam( vpR, &paramR );
    argViewportSetDispMethod( vpL, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME );
    argViewportSetDispMethod( vpR, AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME );
    argViewportSetDispMode(vpL, AR_GL_DISP_MODE_FIT_TO_VIEWPORT_KEEP_ASPECT_RATIO);
    argViewportSetDispMode(vpR, AR_GL_DISP_MODE_FIT_TO_VIEWPORT_KEEP_ASPECT_RATIO);


    calibImageL = cvCreateImage( cvSize(xsizeL, ysizeL), IPL_DEPTH_8U, 1);
    calibImageR = cvCreateImage( cvSize(xsizeR, ysizeR), IPL_DEPTH_8U, 1);
    arMalloc(cornersL, CvPoint2D32f, chessboardCornerNumX*chessboardCornerNumY);
    arMalloc(cornersR, CvPoint2D32f, chessboardCornerNumX*chessboardCornerNumY);
    arMalloc(worldCoord, ICP3DCoordT, chessboardCornerNumX*chessboardCornerNumY);
    for( i = 0; i < chessboardCornerNumX; i++ ) {
        for( j = 0; j < chessboardCornerNumY; j++ ) {
            worldCoord[i*chessboardCornerNumY+j].x = patternWidth*i;
            worldCoord[i*chessboardCornerNumY+j].y = patternWidth*j;
            worldCoord[i*chessboardCornerNumY+j].z = 0.0;
        }
    }
    arMalloc(calibData, ICPCalibDataT, calibImageNum);
    for( i = 0; i < calibImageNum; i++ ) {
        arMalloc(calibData[i].screenCoordL, ICP2DCoordT, chessboardCornerNumX*chessboardCornerNumY);
        arMalloc(calibData[i].screenCoordR, ICP2DCoordT, chessboardCornerNumX*chessboardCornerNumY);
        calibData[i].worldCoordL = worldCoord;
        calibData[i].worldCoordR = worldCoord;
        calibData[i].numL = chessboardCornerNumX*chessboardCornerNumY;
        calibData[i].numR = chessboardCornerNumX*chessboardCornerNumY;
    }

    return;
}
bool ARToolKitVideoSource::open() {
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): called, opening ARToolKit video");
    
    if (deviceState != DEVICE_CLOSED) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error: device is already open, exiting returning false");
        return false;
    }

	// Open the video path
    gVid = ar2VideoOpen(videoConfiguration);
    if (!gVid) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): arVideoOpen unable to open connection to camera using configuration '%s', exiting returning false", videoConfiguration);
    	return false;
	}

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Opened connection to camera using configuration '%s'", videoConfiguration);
	deviceState = DEVICE_OPEN;
    
    // Find the size of the video
	if (ar2VideoGetSize(gVid, &videoWidth, &videoHeight) < 0) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get video size, calling close(), exiting returning false");
        this->close();
		return false;
	}
	
	// Get the format in which the camera is returning pixels
	pixelFormat = ar2VideoGetPixelFormat(gVid);
	if (pixelFormat < 0 ) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get pixel format, calling close(), exiting returning false");
        this->close();
		return false;
	}
    
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Video %dx%d@%dBpp (%s)", videoWidth, videoHeight, arUtilGetPixelSize(pixelFormat), arUtilGetPixelFormatName(pixelFormat));

#ifndef _WINRT
    // Translate pixel format into OpenGL texture intformat, format, and type.
    switch (pixelFormat) {
        case AR_PIXEL_FORMAT_RGBA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_BGRA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_BGRA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
		case AR_PIXEL_FORMAT_ABGR:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_ABGR_EXT;
            glPixType = GL_UNSIGNED_BYTE;
			break;
		case AR_PIXEL_FORMAT_ARGB:
				glPixIntFormat = GL_RGBA;
				glPixFormat = GL_BGRA;
#ifdef AR_BIG_ENDIAN
				glPixType = GL_UNSIGNED_INT_8_8_8_8_REV;
#else
				glPixType = GL_UNSIGNED_INT_8_8_8_8;
#endif
			break;
		case AR_PIXEL_FORMAT_BGR:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_BGR;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_MONO:
        case AR_PIXEL_FORMAT_420v:
        case AR_PIXEL_FORMAT_420f:
        case AR_PIXEL_FORMAT_NV21:
            glPixIntFormat = GL_LUMINANCE;
            glPixFormat = GL_LUMINANCE;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB_565:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_SHORT_5_6_5;
            break;
        case AR_PIXEL_FORMAT_RGBA_5551:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_5_5_5_1;
            break;
        case AR_PIXEL_FORMAT_RGBA_4444:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_4_4_4_4;
            break;
        default:
            ARController::logv("Error: Unsupported pixel format.\n");
            this->close();
			return false;
            break;
    }
#endif // !_WINRT

#if TARGET_PLATFORM_IOS
    // Tell arVideo what the typical focal distance will be. Note that this does NOT
    // change the actual focus, but on devices with non-fixed focus, it lets arVideo
    // choose a better set of camera parameters.
    ar2VideoSetParami(gVid, AR_VIDEO_PARAM_IOS_FOCUS, AR_VIDEO_IOS_FOCUS_0_3M); // Default is 0.3 metres. See <AR/sys/videoiPhone.h> for allowable values.
#endif
    
    // Load the camera parameters, resize for the window and init.
    ARParam cparam;
    // Prefer internal camera parameters.
    if (ar2VideoGetCParam(gVid, &cparam) == 0) {
        ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Using internal camera parameters.");
    } else {
        const char cparam_name_default[] = "camera_para.dat"; // Default name for the camera parameters.
        if (cameraParamBuffer) {
            if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters from buffer, calling close(), exiting returning false");
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Camera parameters loaded from buffer");
            }
        } else {
            if (arParamLoad((cameraParam ? cameraParam : cparam_name_default), 1, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters %s, calling close(), exiting returning false",
                                   (cameraParam ? cameraParam : cparam_name_default));        
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open():Camera parameters loaded from %s", (cameraParam ? cameraParam : cparam_name_default));
            }
        }
    }

    if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
        ARController::logv(AR_LOG_LEVEL_ERROR, "*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
#endif
        arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
    }
	if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to create camera parameters lookup table, calling close(), exiting returning false");
        this->close();
		return false;
	}

	int err = ar2VideoCapStart(gVid);
	if (err != 0) {
        if (err == -2) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error starting video-device unavailable \"%d,\" setting ARW_ERROR_DEVICE_UNAVAILABLE error state", err);
            setError(ARW_ERROR_DEVICE_UNAVAILABLE);
        } else {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error \"%d\" starting video capture", err);
        }
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): calling close(), exiting returning false");
        this->close();
		return false;		
	}

	deviceState = DEVICE_RUNNING;

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): exiting returning true, deviceState = DEVICE_RUNNING, video capture started");
	return true;
}