bool AndroidVideoSource::getVideoReadyAndroid(const int width, const int height, const int cameraIndex, const bool cameraIsFrontFacing) { char *a, b[1024]; int err_i; if (deviceState == DEVICE_GETTING_READY) return true; else if (deviceState != DEVICE_OPEN) { ARController::logv("AndroidVideoSource::getVideoReadyAndroid: Error: device not open.\n"); return false; } deviceState = DEVICE_GETTING_READY; #ifdef DEBUG ARController::logv("AndroidVideoSource::getVideoReadyAndroid: width=%d, height=%d, cameraIndex=%d, cameraIsFrontFacing=%s.\n", width, height, cameraIndex, (cameraIsFrontFacing ? "true" : "false")); #endif videoWidth = width; videoHeight = height; gCameraIndex = cameraIndex; gCameraIsFrontFacing = cameraIsFrontFacing; if (pixelFormat == AR_PIXEL_FORMAT_RGBA) { glPixIntFormat = GL_RGBA; glPixFormat = GL_RGBA; glPixType = GL_UNSIGNED_BYTE; } else if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) { glPixIntFormat = GL_LUMINANCE; // Use only luma channel. glPixFormat = GL_LUMINANCE; glPixType = GL_UNSIGNED_BYTE; } else { ARController::logv("Unsupported video format '%s'.\n", arUtilGetPixelFormatName(pixelFormat)); return false; } ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_WIDTH, videoWidth); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_HEIGHT, videoHeight); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_INDEX, gCameraIndex); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_FACE, gCameraIsFrontFacing); //ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_INTERNET_STATE, gInternetState); if (ar2VideoGetCParamAsync(gVid, getVideoReadyAndroidCparamCallback, (void *)this) < 0) { ARController::logv("Error getting cparam.\n"); getVideoReadyAndroid2(NULL); } return (true); }
JNIEXPORT jboolean JNICALL JNIFUNCTION_NATIVE(nativeVideoInit(JNIEnv* env, jobject object, jint w, jint h, jint cameraIndex, jboolean cameraIsFrontFacing)) { #ifdef DEBUG LOGI("nativeVideoInit\n"); #endif // As of ARToolKit v5.0, NV21 format video frames are handled natively, // and no longer require colour conversion to RGBA. A buffer (gVideoFrame) // must be set aside to copy the frame from the Java side. // If you still require RGBA format information from the video, // you can create your own additional buffer, and then unpack the NV21 // frames into it in nativeVideoFrame() below. // Here is where you'd allocate the buffer: // ARUint8 *myRGBABuffer = (ARUint8 *)malloc(videoWidth * videoHeight * 4); gPixFormat = AR_PIXEL_FORMAT_NV21; gVideoFrameSize = (sizeof(ARUint8)*(w*h + 2*w/2*h/2)); gVideoFrame = (ARUint8 *)malloc(gVideoFrameSize); if (!gVideoFrame) { gVideoFrameSize = 0; LOGE("Error allocating frame buffer"); return false; } videoWidth = w; videoHeight = h; gVideoBuffer = (AR2VideoBufferT *)calloc(1, sizeof(AR2VideoBufferT)); gVideoBuffer->bufPlanes = (ARUint8 **)calloc(2, sizeof(ARUint8 *)); gVideoBuffer->bufPlaneCount = 2; gVideoBuffer->bufPlanes[0] = gVideoFrame; gVideoBuffer->bufPlanes[1] = gVideoFrame + videoWidth*videoHeight; gVideoBuffer->buff = gVideoBuffer->buffLuma = gVideoFrame; gCameraIndex = cameraIndex; gCameraIsFrontFacing = cameraIsFrontFacing; LOGI("Video camera %d (%s), %dx%d format %s, %d-byte buffer.", gCameraIndex, (gCameraIsFrontFacing ? "front" : "rear"), w, h, arUtilGetPixelFormatName(gPixFormat), gVideoFrameSize); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_WIDTH, videoWidth); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_HEIGHT, videoHeight); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_PIXELFORMAT, (int)gPixFormat); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_INDEX, gCameraIndex); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_FACE, gCameraIsFrontFacing); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_INTERNET_STATE, gInternetState); if (ar2VideoGetCParamAsync(gVid, nativeVideoGetCparamCallback, NULL) < 0) { LOGE("Error getting cparam.\n"); nativeVideoGetCparamCallback(NULL, NULL); } return (true); }
bool AndroidVideoSource::getVideoReadyAndroid(const int width, const int height, const int cameraIndex, const bool cameraIsFrontFacing) { char *a, b[1024]; int err_i; if (deviceState == DEVICE_GETTING_READY) return true; // This path will be exercised if another frame arrives while we're waiting for the callback. else if (deviceState != DEVICE_OPEN) { ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid: Error: device not open.\n"); return false; } deviceState = DEVICE_GETTING_READY; #ifdef DEBUG ARController::logv(AR_LOG_LEVEL_DEBUG, "AndroidVideoSource::getVideoReadyAndroid: width=%d, height=%d, cameraIndex=%d, cameraIsFrontFacing=%s.\n", width, height, cameraIndex, (cameraIsFrontFacing ? "true" : "false")); #endif videoWidth = width; videoHeight = height; gCameraIndex = cameraIndex; gCameraIsFrontFacing = cameraIsFrontFacing; if (pixelFormat == AR_PIXEL_FORMAT_RGBA) { glPixIntFormat = GL_RGBA; glPixFormat = GL_RGBA; glPixType = GL_UNSIGNED_BYTE; } else if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) { glPixIntFormat = GL_LUMINANCE; // Use only luma channel. glPixFormat = GL_LUMINANCE; glPixType = GL_UNSIGNED_BYTE; } else { ARController::logv(AR_LOG_LEVEL_ERROR, "Unsupported video format '%s'.\n", arUtilGetPixelFormatName(pixelFormat)); return false; } ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_WIDTH, videoWidth); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_HEIGHT, videoHeight); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_INDEX, gCameraIndex); ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_FACE, gCameraIsFrontFacing); //ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_INTERNET_STATE, gInternetState); // User-supplied camera parameters take precedence, then internal, then fetched. Otherwise, a default will be created. ARParam cparam; if (cameraParamBuffer) { if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) { ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid(): error-failed to load camera parameters from buffer, calling close(), exiting returning false"); this->close(); return false; } else { ARController::logv(AR_LOG_LEVEL_INFO, "Camera parameters loaded from buffer."); return (getVideoReadyAndroid2(&cparam)); } } else if (cameraParam) { if (arParamLoad(cameraParam, 1, &cparam) < 0) { ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid(): error-failed to load camera parameters from file '%s', calling close(), exiting returning false", cameraParam); this->close(); return false; } else { ARController::logv(AR_LOG_LEVEL_INFO, "Camera parameters loaded from file '%s'.", cameraParam); return (getVideoReadyAndroid2(&cparam)); } } else { // Search database. if (ar2VideoGetCParamAsync(gVid, getVideoReadyAndroidCparamCallback, (void *)this) == 0) { ARController::logv(AR_LOG_LEVEL_INFO, "Fetching external camera parameters."); return true; } return getVideoReadyAndroid2(NULL); } }