EXPORT_API bool arwGetVideoParamsStereo(int *widthL, int *heightL, int *pixelSizeL, char *pixelFormatStringBufferL, int pixelFormatStringBufferLenL, int *widthR, int *heightR, int *pixelSizeR, char *pixelFormatStringBufferR, int pixelFormatStringBufferLenR)
{
    AR_PIXEL_FORMAT pfL, pfR;
    
    if (!gARTK) return false;
	if (!gARTK->videoParameters(0, widthL, heightL, &pfL)) return false;
	if (!gARTK->videoParameters(1, widthR, heightR, &pfR)) return false;
    if (pixelSizeL) *pixelSizeL = arUtilGetPixelSize(pfL);
    if (pixelSizeR) *pixelSizeR = arUtilGetPixelSize(pfR);
    if (pixelFormatStringBufferL && pixelFormatStringBufferLenL > 0) {
        strncpy(pixelFormatStringBufferL, arUtilGetPixelFormatName(pfL), pixelFormatStringBufferLenL);
        pixelFormatStringBufferL[pixelFormatStringBufferLenL - 1] = '\0'; // guarantee nul termination.
    }
    if (pixelFormatStringBufferR && pixelFormatStringBufferLenR > 0) {
        strncpy(pixelFormatStringBufferR, arUtilGetPixelFormatName(pfR), pixelFormatStringBufferLenR);
        pixelFormatStringBufferR[pixelFormatStringBufferLenR - 1] = '\0'; // guarantee nul termination.
    }
    return true;
}
EXPORT_API bool arwGetVideoParams(int *width, int *height, int *pixelSize, char *pixelFormatStringBuffer, int pixelFormatStringBufferLen)
{
    AR_PIXEL_FORMAT pf;
    
    if (!gARTK) return false;
	if (!gARTK->videoParameters(0, width, height, &pf)) return false;
    if (pixelSize) *pixelSize = arUtilGetPixelSize(pf);
    if (pixelFormatStringBuffer && pixelFormatStringBufferLen > 0) {
        strncpy(pixelFormatStringBuffer, arUtilGetPixelFormatName(pf), pixelFormatStringBufferLen);
        pixelFormatStringBuffer[pixelFormatStringBufferLen - 1] = '\0'; // guarantee nul termination.
    }
    return true;
}
Пример #3
0
bool AndroidVideoSource::getVideoReadyAndroid(const int width, const int height, const int cameraIndex, const bool cameraIsFrontFacing) {
	
    char *a, b[1024];
    int err_i;
    
    if (deviceState == DEVICE_GETTING_READY) return true;
    else if (deviceState != DEVICE_OPEN) {
        ARController::logv("AndroidVideoSource::getVideoReadyAndroid: Error: device not open.\n");
        return false;
    }
    deviceState = DEVICE_GETTING_READY;

#ifdef DEBUG
    ARController::logv("AndroidVideoSource::getVideoReadyAndroid: width=%d, height=%d, cameraIndex=%d, cameraIsFrontFacing=%s.\n", width, height, cameraIndex, (cameraIsFrontFacing ? "true" : "false"));
#endif
    
	videoWidth = width;
	videoHeight = height;
    gCameraIndex = cameraIndex;
    gCameraIsFrontFacing = cameraIsFrontFacing;

    if (pixelFormat == AR_PIXEL_FORMAT_RGBA) {
        glPixIntFormat = GL_RGBA;
        glPixFormat = GL_RGBA;
        glPixType = GL_UNSIGNED_BYTE;
    } else if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) {
        glPixIntFormat = GL_LUMINANCE; // Use only luma channel.
        glPixFormat = GL_LUMINANCE;
        glPixType = GL_UNSIGNED_BYTE;
    } else {
        ARController::logv("Unsupported video format '%s'.\n", arUtilGetPixelFormatName(pixelFormat));
        return false;
    }
    
    ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_WIDTH, videoWidth);
	ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_HEIGHT, videoHeight);
	ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_INDEX, gCameraIndex);
	ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_FACE, gCameraIsFrontFacing);
	//ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_INTERNET_STATE, gInternetState);

	if (ar2VideoGetCParamAsync(gVid, getVideoReadyAndroidCparamCallback, (void *)this) < 0) {
		ARController::logv("Error getting cparam.\n");
		getVideoReadyAndroid2(NULL);
	}
    
	return (true);
}
Пример #4
0
JNIEXPORT jboolean JNICALL JNIFUNCTION_NATIVE(nativeVideoInit(JNIEnv * env, jobject object, jint w, jint h, jint cameraIndex, jboolean cameraIsFrontFacing))
{
    LOGD("nativeVideoInit\n");

    // As of ARToolKit v5.0, NV21 format video frames are handled natively,
    // and no longer require color conversion to RGBA. A buffer (gVideoFrame)
    // must be set aside to copy the frame from the Java side.
    // If you still require RGBA format information from the video,
    // you can create your own additional buffer, and then unpack the NV21
    // frames into it in nativeVideoFrame() below.
    // Here is where you'd allocate the buffer:
    // ARUint8 *myRGBABuffer = (ARUint8 *)malloc(videoWidth * videoHeight * 4);
    gPixFormat      = AR_PIXEL_FORMAT_NV21;
    gVideoFrameSize = (sizeof(ARUint8) * (w * h + 2 * w / 2 * h / 2));
    gVideoFrame     = (ARUint8*) (malloc(gVideoFrameSize));
    if (!gVideoFrame)
    {
        gVideoFrameSize = 0;
        LOGE("Error allocating frame buffer");
        return false;
    }

    g_videoWidth         = w;
    g_videoHeight        = h;
    gCameraIndex         = cameraIndex;
    gCameraIsFrontFacing = cameraIsFrontFacing;
    LOGI("Video camera %d (%s), %dx%d format %s, %d-byte buffer.",
         gCameraIndex, (gCameraIsFrontFacing ? "front" : "rear"), w, h, arUtilGetPixelFormatName(gPixFormat), gVideoFrameSize);

    ar2VideoSetParami(g_Vid, AR_VIDEO_PARAM_ANDROID_WIDTH, g_videoWidth);
    ar2VideoSetParami(g_Vid, AR_VIDEO_PARAM_ANDROID_HEIGHT, g_videoHeight);
    ar2VideoSetParami(g_Vid, AR_VIDEO_PARAM_ANDROID_PIXELFORMAT, (int)gPixFormat);
    ar2VideoSetParami(g_Vid, AR_VIDEO_PARAM_ANDROID_CAMERA_INDEX, gCameraIndex);
    ar2VideoSetParami(g_Vid, AR_VIDEO_PARAM_ANDROID_CAMERA_FACE, gCameraIsFrontFacing);
    ar2VideoSetParami(g_Vid, AR_VIDEO_PARAM_ANDROID_INTERNET_STATE, gInternetState);

    if (ar2VideoGetCParamAsync(g_Vid, nativeVideoGetCparamCallback, NULL) < 0)
    {
        LOGE("Error getting cparam.\n");
        nativeVideoGetCparamCallback(NULL, NULL);
    }

    return (true);
}
bool ARToolKitVideoSource::open() {
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): called, opening ARToolKit video");
    
    if (deviceState != DEVICE_CLOSED) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error: device is already open, exiting returning false");
        return false;
    }

	// Open the video path
    gVid = ar2VideoOpen(videoConfiguration);
    if (!gVid) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): arVideoOpen unable to open connection to camera using configuration '%s', exiting returning false", videoConfiguration);
    	return false;
	}

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Opened connection to camera using configuration '%s'", videoConfiguration);
	deviceState = DEVICE_OPEN;
    
    // Find the size of the video
	if (ar2VideoGetSize(gVid, &videoWidth, &videoHeight) < 0) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get video size, calling close(), exiting returning false");
        this->close();
		return false;
	}
	
	// Get the format in which the camera is returning pixels
	pixelFormat = ar2VideoGetPixelFormat(gVid);
	if (pixelFormat < 0 ) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get pixel format, calling close(), exiting returning false");
        this->close();
		return false;
	}
    
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Video %dx%d@%dBpp (%s)", videoWidth, videoHeight, arUtilGetPixelSize(pixelFormat), arUtilGetPixelFormatName(pixelFormat));

#ifndef _WINRT
    // Translate pixel format into OpenGL texture intformat, format, and type.
    switch (pixelFormat) {
        case AR_PIXEL_FORMAT_RGBA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_BGRA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_BGRA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
		case AR_PIXEL_FORMAT_ABGR:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_ABGR_EXT;
            glPixType = GL_UNSIGNED_BYTE;
			break;
		case AR_PIXEL_FORMAT_ARGB:
				glPixIntFormat = GL_RGBA;
				glPixFormat = GL_BGRA;
#ifdef AR_BIG_ENDIAN
				glPixType = GL_UNSIGNED_INT_8_8_8_8_REV;
#else
				glPixType = GL_UNSIGNED_INT_8_8_8_8;
#endif
			break;
		case AR_PIXEL_FORMAT_BGR:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_BGR;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_MONO:
        case AR_PIXEL_FORMAT_420v:
        case AR_PIXEL_FORMAT_420f:
        case AR_PIXEL_FORMAT_NV21:
            glPixIntFormat = GL_LUMINANCE;
            glPixFormat = GL_LUMINANCE;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB_565:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_SHORT_5_6_5;
            break;
        case AR_PIXEL_FORMAT_RGBA_5551:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_5_5_5_1;
            break;
        case AR_PIXEL_FORMAT_RGBA_4444:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_4_4_4_4;
            break;
        default:
            ARController::logv("Error: Unsupported pixel format.\n");
            this->close();
			return false;
            break;
    }
#endif // !_WINRT

#if TARGET_PLATFORM_IOS
    // Tell arVideo what the typical focal distance will be. Note that this does NOT
    // change the actual focus, but on devices with non-fixed focus, it lets arVideo
    // choose a better set of camera parameters.
    ar2VideoSetParami(gVid, AR_VIDEO_PARAM_IOS_FOCUS, AR_VIDEO_IOS_FOCUS_0_3M); // Default is 0.3 metres. See <AR/sys/videoiPhone.h> for allowable values.
#endif
    
    // Load the camera parameters, resize for the window and init.
    ARParam cparam;
    // Prefer internal camera parameters.
    if (ar2VideoGetCParam(gVid, &cparam) == 0) {
        ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Using internal camera parameters.");
    } else {
        const char cparam_name_default[] = "camera_para.dat"; // Default name for the camera parameters.
        if (cameraParamBuffer) {
            if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters from buffer, calling close(), exiting returning false");
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Camera parameters loaded from buffer");
            }
        } else {
            if (arParamLoad((cameraParam ? cameraParam : cparam_name_default), 1, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters %s, calling close(), exiting returning false",
                                   (cameraParam ? cameraParam : cparam_name_default));        
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open():Camera parameters loaded from %s", (cameraParam ? cameraParam : cparam_name_default));
            }
        }
    }

    if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
        ARController::logv(AR_LOG_LEVEL_ERROR, "*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
#endif
        arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
    }
	if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to create camera parameters lookup table, calling close(), exiting returning false");
        this->close();
		return false;
	}

	int err = ar2VideoCapStart(gVid);
	if (err != 0) {
        if (err == -2) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error starting video-device unavailable \"%d,\" setting ARW_ERROR_DEVICE_UNAVAILABLE error state", err);
            setError(ARW_ERROR_DEVICE_UNAVAILABLE);
        } else {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error \"%d\" starting video capture", err);
        }
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): calling close(), exiting returning false");
        this->close();
		return false;		
	}

	deviceState = DEVICE_RUNNING;

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): exiting returning true, deviceState = DEVICE_RUNNING, video capture started");
	return true;
}
Пример #6
0
bool AndroidVideoSource::getVideoReadyAndroid(const int width, const int height, const int cameraIndex, const bool cameraIsFrontFacing) {
	
    char *a, b[1024];
    int err_i;
    
    if (deviceState == DEVICE_GETTING_READY) return true; // This path will be exercised if another frame arrives while we're waiting for the callback.
    else if (deviceState != DEVICE_OPEN) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid: Error: device not open.\n");
        return false;
    }
    deviceState = DEVICE_GETTING_READY;

#ifdef DEBUG
    ARController::logv(AR_LOG_LEVEL_DEBUG, "AndroidVideoSource::getVideoReadyAndroid: width=%d, height=%d, cameraIndex=%d, cameraIsFrontFacing=%s.\n", width, height, cameraIndex, (cameraIsFrontFacing ? "true" : "false"));
#endif
    
	videoWidth = width;
	videoHeight = height;
    gCameraIndex = cameraIndex;
    gCameraIsFrontFacing = cameraIsFrontFacing;

    if (pixelFormat == AR_PIXEL_FORMAT_RGBA) {
        glPixIntFormat = GL_RGBA;
        glPixFormat = GL_RGBA;
        glPixType = GL_UNSIGNED_BYTE;
    } else if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) {
        glPixIntFormat = GL_LUMINANCE; // Use only luma channel.
        glPixFormat = GL_LUMINANCE;
        glPixType = GL_UNSIGNED_BYTE;
    } else {
        ARController::logv(AR_LOG_LEVEL_ERROR, "Unsupported video format '%s'.\n", arUtilGetPixelFormatName(pixelFormat));
        return false;
    }
    
    ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_WIDTH, videoWidth);
	ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_HEIGHT, videoHeight);
	ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_INDEX, gCameraIndex);
	ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_CAMERA_FACE, gCameraIsFrontFacing);
	//ar2VideoSetParami(gVid, AR_VIDEO_PARAM_ANDROID_INTERNET_STATE, gInternetState);

    // User-supplied camera parameters take precedence, then internal, then fetched. Otherwise, a default will be created.
    ARParam cparam;
    if (cameraParamBuffer) {
        if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid(): error-failed to load camera parameters from buffer, calling close(), exiting returning false");
            this->close();
            return false;
        } else {
            ARController::logv(AR_LOG_LEVEL_INFO, "Camera parameters loaded from buffer.");
            return (getVideoReadyAndroid2(&cparam));
        }
    } else if (cameraParam) {
        if (arParamLoad(cameraParam, 1, &cparam) < 0) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "AndroidVideoSource::getVideoReadyAndroid(): error-failed to load camera parameters from file '%s', calling close(), exiting returning false", cameraParam);
            this->close();
            return false;
        } else {
            ARController::logv(AR_LOG_LEVEL_INFO, "Camera parameters loaded from file '%s'.", cameraParam);
            return (getVideoReadyAndroid2(&cparam));
        }
    } else {
        // Search database.
        if (ar2VideoGetCParamAsync(gVid, getVideoReadyAndroidCparamCallback, (void *)this) == 0) {
            ARController::logv(AR_LOG_LEVEL_INFO, "Fetching external camera parameters.");
            return true;
        }
        return getVideoReadyAndroid2(NULL);
    }
}
Пример #7
0
ARGL_CONTEXT_SETTINGS_REF arglSetupForCurrentContext(ARParam *cparam, AR_PIXEL_FORMAT pixelFormat)
{
	ARGL_CONTEXT_SETTINGS_REF contextSettings;
	
	contextSettings = (ARGL_CONTEXT_SETTINGS_REF)calloc(1, sizeof(ARGL_CONTEXT_SETTINGS));
    contextSettings->arParam = *cparam; // Copy it.
    contextSettings->arhandle = NULL;
	
	// Use supplied pixel format.
    // This sets pixIntFormat, pixFormat, pixType, pixSize, and sets initPlease.
	if (!arglPixelFormatSet(contextSettings, pixelFormat)) {
		ARLOGe("arglSetupForCurrentContext() Error: Unknown default pixel format %s (%d).\n", arUtilGetPixelFormatName(pixelFormat), pixelFormat);
        free(contextSettings);
		return (NULL);
	}
    
	arglDrawModeSet(contextSettings, AR_DRAW_BY_TEXTURE_MAPPING);
	arglTexmapModeSet(contextSettings, AR_DRAW_TEXTURE_FULL_IMAGE);
	arglTexRectangleSet(contextSettings, TRUE);
    
	return (contextSettings);
}