bool AndroidVideoSource::getVideoReadyAndroid2(const ARParam *cparam_p) { // Load camera parameters ARParam cparam; if (cparam_p) cparam = *cparam_p; else { ARController::logv("Unable to automatically determine camera parameters. Using supplied default.\n"); if (cameraParam) { if (arParamLoad(cameraParam, 1, &cparam) < 0) { ARController::logv("Error: Unable to load camera parameters from file '%s'.", cameraParam); goto bail; } } else if (cameraParamBuffer) { if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) { ARController::logv("Error: Unable to load camera parameters from buffer."); goto bail; } } else { ARController::logv("Error: video source must be configured before opening."); goto bail; } } if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) { #ifdef DEBUG ARController::logv("*** Camera Parameter resized from %d, %d. ***", cparam.xsize, cparam.ysize); #endif arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam); } if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) { ARController::logv("Error: Failed to create camera parameters lookup table."); goto bail; } // Allocate local buffer for video frame after copy or conversion. if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) { frameBufferSize = videoWidth * videoHeight + 2 * videoWidth/2 * videoHeight/2; } else { frameBufferSize = videoWidth * videoHeight * arUtilGetPixelSize(pixelFormat); } localFrameBuffer = (ARUint8*)calloc(frameBufferSize, sizeof(ARUint8)); if (!localFrameBuffer) { ARController::logv("Error: Unable to allocate memory for local video frame buffer"); goto bail; } frameBuffer = localFrameBuffer; if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) { frameBuffer2 = localFrameBuffer + videoWidth*videoHeight; } else { frameBuffer2 = NULL; } ARController::logv("Android Video Source running %dx%d.", videoWidth, videoHeight); deviceState = DEVICE_RUNNING; return true; bail: deviceState = DEVICE_OPEN; return false; }
bool ARToolKitVideoSource::open() { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): called, opening ARToolKit video"); if (deviceState != DEVICE_CLOSED) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error: device is already open, exiting returning false"); return false; } // Open the video path gVid = ar2VideoOpen(videoConfiguration); if (!gVid) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): arVideoOpen unable to open connection to camera using configuration '%s', exiting returning false", videoConfiguration); return false; } ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Opened connection to camera using configuration '%s'", videoConfiguration); deviceState = DEVICE_OPEN; // Find the size of the video if (ar2VideoGetSize(gVid, &videoWidth, &videoHeight) < 0) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get video size, calling close(), exiting returning false"); this->close(); return false; } // Get the format in which the camera is returning pixels pixelFormat = ar2VideoGetPixelFormat(gVid); if (pixelFormat < 0 ) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get pixel format, calling close(), exiting returning false"); this->close(); return false; } ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Video %dx%d@%dBpp (%s)", videoWidth, videoHeight, arUtilGetPixelSize(pixelFormat), arUtilGetPixelFormatName(pixelFormat)); #ifndef _WINRT // Translate pixel format into OpenGL texture intformat, format, and type. switch (pixelFormat) { case AR_PIXEL_FORMAT_RGBA: glPixIntFormat = GL_RGBA; glPixFormat = GL_RGBA; glPixType = GL_UNSIGNED_BYTE; break; case AR_PIXEL_FORMAT_RGB: glPixIntFormat = GL_RGB; glPixFormat = GL_RGB; glPixType = GL_UNSIGNED_BYTE; break; case AR_PIXEL_FORMAT_BGRA: glPixIntFormat = GL_RGBA; glPixFormat = GL_BGRA; glPixType = GL_UNSIGNED_BYTE; break; case AR_PIXEL_FORMAT_ABGR: glPixIntFormat = GL_RGBA; glPixFormat = GL_ABGR_EXT; glPixType = GL_UNSIGNED_BYTE; break; case AR_PIXEL_FORMAT_ARGB: glPixIntFormat = GL_RGBA; glPixFormat = GL_BGRA; #ifdef AR_BIG_ENDIAN glPixType = GL_UNSIGNED_INT_8_8_8_8_REV; #else glPixType = GL_UNSIGNED_INT_8_8_8_8; #endif break; case AR_PIXEL_FORMAT_BGR: glPixIntFormat = GL_RGB; glPixFormat = GL_BGR; glPixType = GL_UNSIGNED_BYTE; break; case AR_PIXEL_FORMAT_MONO: case AR_PIXEL_FORMAT_420v: case AR_PIXEL_FORMAT_420f: case AR_PIXEL_FORMAT_NV21: glPixIntFormat = GL_LUMINANCE; glPixFormat = GL_LUMINANCE; glPixType = GL_UNSIGNED_BYTE; break; case AR_PIXEL_FORMAT_RGB_565: glPixIntFormat = GL_RGB; glPixFormat = GL_RGB; glPixType = GL_UNSIGNED_SHORT_5_6_5; break; case AR_PIXEL_FORMAT_RGBA_5551: glPixIntFormat = GL_RGBA; glPixFormat = GL_RGBA; glPixType = GL_UNSIGNED_SHORT_5_5_5_1; break; case AR_PIXEL_FORMAT_RGBA_4444: glPixIntFormat = GL_RGBA; glPixFormat = GL_RGBA; glPixType = GL_UNSIGNED_SHORT_4_4_4_4; break; default: ARController::logv("Error: Unsupported pixel format.\n"); this->close(); return false; break; } #endif // !_WINRT #if TARGET_PLATFORM_IOS // Tell arVideo what the typical focal distance will be. Note that this does NOT // change the actual focus, but on devices with non-fixed focus, it lets arVideo // choose a better set of camera parameters. ar2VideoSetParami(gVid, AR_VIDEO_PARAM_IOS_FOCUS, AR_VIDEO_IOS_FOCUS_0_3M); // Default is 0.3 metres. See <AR/sys/videoiPhone.h> for allowable values. #endif // Load the camera parameters, resize for the window and init. ARParam cparam; // Prefer internal camera parameters. if (ar2VideoGetCParam(gVid, &cparam) == 0) { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Using internal camera parameters."); } else { const char cparam_name_default[] = "camera_para.dat"; // Default name for the camera parameters. if (cameraParamBuffer) { if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters from buffer, calling close(), exiting returning false"); this->close(); return false; } else { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Camera parameters loaded from buffer"); } } else { if (arParamLoad((cameraParam ? cameraParam : cparam_name_default), 1, &cparam) < 0) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters %s, calling close(), exiting returning false", (cameraParam ? cameraParam : cparam_name_default)); this->close(); return false; } else { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open():Camera parameters loaded from %s", (cameraParam ? cameraParam : cparam_name_default)); } } } if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) { #ifdef DEBUG ARController::logv(AR_LOG_LEVEL_ERROR, "*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); #endif arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam); } if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to create camera parameters lookup table, calling close(), exiting returning false"); this->close(); return false; } int err = ar2VideoCapStart(gVid); if (err != 0) { if (err == -2) { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error starting video-device unavailable \"%d,\" setting ARW_ERROR_DEVICE_UNAVAILABLE error state", err); setError(ARW_ERROR_DEVICE_UNAVAILABLE); } else { ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error \"%d\" starting video capture", err); } ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): calling close(), exiting returning false"); this->close(); return false; } deviceState = DEVICE_RUNNING; ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): exiting returning true, deviceState = DEVICE_RUNNING, video capture started"); return true; }
bool AndroidVideoSource::getVideoReadyAndroid(const int width, const int height, const int cameraIndex, const bool cameraIsFrontFacing) { char *a, b[1024]; int err_i; if (deviceState == DEVICE_GETTING_READY) return true; // This path will be exercised if another frame arrives while we're waiting for the callback. else if (deviceState != DEVICE_OPEN) { ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid: Error: device not open.\n"); return false; } deviceState = DEVICE_GETTING_READY; #ifdef DEBUG ARController::logv(AR_LOG_LEVEL_DEBUG, "AndroidVideoSource::getVideoReadyAndroid: width=%d, height=%d, cameraIndex=%d, cameraIsFrontFacing=%s.\n", width, height, cameraIndex, (cameraIsFrontFacing ? "true" : "false")); #endif videoWidth = width; videoHeight = height; gCameraIndex = cameraIndex; gCameraIsFrontFacing = cameraIsFrontFacing; if (pixelFormat == AR_PIXEL_FORMAT_RGBA) { glPixIntFormat = GL_RGBA; glPixFormat = GL_RGBA; glPixType = GL_UNSIGNED_BYTE; } else if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) { glPixIntFormat = GL_LUMINANCE; // Use only luma channel. glPixFormat = GL_LUMINANCE; glPixType = GL_UNSIGNED_BYTE; } else { ARController::logv(AR_LOG_LEVEL_ERROR, "Unsupported video format '%s'.\n", arUtilGetPixelFormatName(pixelFormat)); return false; } ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_WIDTH, videoWidth); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_HEIGHT, videoHeight); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_INDEX, gCameraIndex); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_FACE, gCameraIsFrontFacing); //ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_INTERNET_STATE, gInternetState); // User-supplied camera parameters take precedence, then internal, then fetched. Otherwise, a default will be created. ARParam cparam; if (cameraParamBuffer) { if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) { ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid(): error-failed to load camera parameters from buffer, calling close(), exiting returning false"); this->close(); return false; } else { ARController::logv(AR_LOG_LEVEL_INFO, "Camera parameters loaded from buffer."); return (getVideoReadyAndroid2(&cparam)); } } else if (cameraParam) { if (arParamLoad(cameraParam, 1, &cparam) < 0) { ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid(): error-failed to load camera parameters from file '%s', calling close(), exiting returning false", cameraParam); this->close(); return false; } else { ARController::logv(AR_LOG_LEVEL_INFO, "Camera parameters loaded from file '%s'.", cameraParam); return (getVideoReadyAndroid2(&cparam)); } } else { // Search database. if (ar2VideoGetCParamAsync(gVid, getVideoReadyAndroidCparamCallback, (void *)this) == 0) { ARController::logv(AR_LOG_LEVEL_INFO, "Fetching external camera parameters."); return true; } return getVideoReadyAndroid2(NULL); } }