Пример #1
0
static void nativeVideoGetCparamCallback(const ARParam *cparam_p, void *userdata)
{
	// Load the camera parameters, resize for the window and init.
	ARParam cparam;
	if (cparam_p) cparam = *cparam_p;
	else {
        arParamClearWithFOVy(&cparam, videoWidth, videoHeight, M_PI_4); // M_PI_4 radians = 45 degrees.
        LOGE("Using default camera parameters for %dx%d image size, 45 degrees vertical field-of-view.", videoWidth, videoHeight);
	}
	if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
		LOGI("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
#endif
		arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
	}
#ifdef DEBUG
	LOGI("*** Camera Parameter ***\n");
	arParamDisp(&cparam);
#endif
	if ((gCparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
		LOGE("Error: arParamLTCreate.\n");
		return;
	}
	videoInited = true;

	//
	// AR init.
	//
    
	// Create the OpenGL projection from the calibrated camera parameters.
	arglCameraFrustumRHf(&gCparamLT->param, NEAR_PLANE, FAR_PLANE, cameraLens);
	cameraPoseValid = FALSE;

	if (!initNFT(gCparamLT, gPixFormat)) {
		LOGE("Error initialising NFT.\n");
		arParamLTFree(&gCparamLT);
		return;
	}

	// Marker data has already been loaded, so now load NFT data on a second thread.
	nftDataLoadingThreadHandle = threadInit(0, NULL, loadNFTDataAsync);
	if (!nftDataLoadingThreadHandle) {
		LOGE("Error starting NFT loading thread.\n");
		arParamLTFree(&gCparamLT);
		return;
	}
	threadStartSignal(nftDataLoadingThreadHandle);

}
Пример #2
0
bool AndroidVideoSource::getVideoReadyAndroid2(const ARParam *cparam_p)
{
    ARParam cparam;

    if (cparam_p) {
        cparam = *cparam_p;
    } else {
        arParamClearWithFOVy(&cparam, videoWidth, videoHeight, M_PI_4); // M_PI_4 radians = 45 degrees.
        ARController::logv(AR_LOG_LEVEL_WARN, "Using default camera parameters for %dx%d image size, 45 degrees vertical field-of-view.", videoWidth, videoHeight);
    }

	if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
        ARController::logv(AR_LOG_LEVEL_WARN, "*** Camera Parameter resized from %d, %d. ***", cparam.xsize, cparam.ysize);
#endif
        arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
    }
	if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) {
    	ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Failed to create camera parameters lookup table.");
        goto bail;
	}

	// Allocate buffer for incoming video frame.
    incomingFrameRawBufferSize = videoWidth * videoHeight + 2 * videoWidth/2 * videoHeight/2;
    incomingFrameRawBuffer[0] = (unsigned char *)calloc(incomingFrameRawBufferSize, sizeof(unsigned char));
    incomingFrameRawBuffer[1] = (unsigned char *)calloc(incomingFrameRawBufferSize, sizeof(unsigned char));
    if (!incomingFrameRawBuffer[0] || !incomingFrameRawBuffer[1]) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Unable to allocate memory for incoming frame raw buffer.");
        goto bail;
    }
    
    // Next, an AR2VideoBufferT.
    localFrameBuffer = (AR2VideoBufferT *)calloc(1, sizeof(AR2VideoBufferT));
    if (!localFrameBuffer) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Unable to allocate memory for local video frame buffer");
        goto bail;
    }
    
    if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) {
        localFrameBuffer->buff = incomingFrameRawBuffer[0];
        localFrameBuffer->buffLuma = incomingFrameRawBuffer[0];
        localFrameBuffer->bufPlaneCount = 2;
        localFrameBuffer->bufPlanes = (ARUint8 **)calloc(2, sizeof(ARUint8 *));
        localFrameBuffer->bufPlanes[0] = incomingFrameRawBuffer[0];
        localFrameBuffer->bufPlanes[1] = incomingFrameRawBuffer[0] + videoWidth*videoHeight;
    } else {
        convertedFrameRawBufferSize = videoWidth * videoHeight * arUtilGetPixelSize(pixelFormat);
        convertedFrameRawBuffer[0] = (ARUint8 *)calloc(convertedFrameRawBufferSize, sizeof(ARUint8));
        convertedFrameRawBuffer[1] = (ARUint8 *)calloc(convertedFrameRawBufferSize, sizeof(ARUint8));
        if (!convertedFrameRawBuffer[0] || !convertedFrameRawBuffer[1]) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Unable to allocate memory for converted video frame buffer.");
            goto bail;
        }
        localFrameBuffer->buff = convertedFrameRawBuffer[0];
        localFrameBuffer->buffLuma = incomingFrameRawBuffer[0];
        localFrameBuffer->bufPlaneCount = 0;
    }
    
    frameBuffer = localFrameBuffer;

	ARController::logv(AR_LOG_LEVEL_INFO, "Android Video Source running %dx%d.", videoWidth, videoHeight);

	deviceState = DEVICE_RUNNING;
    return true;
    
bail:
    if (localFrameBuffer) {
        free(localFrameBuffer->bufPlanes);
        free(localFrameBuffer);
        localFrameBuffer = NULL;
    }
    if (incomingFrameRawBuffer[0]) {
        free(incomingFrameRawBuffer[0]);
        incomingFrameRawBuffer[0] = NULL;
    }
    if (incomingFrameRawBuffer[1]) {
        free(incomingFrameRawBuffer[1]);
        incomingFrameRawBuffer[1] = NULL;
    }
    if (convertedFrameRawBuffer[0]) {
        free(convertedFrameRawBuffer[0]);
        convertedFrameRawBuffer[0] = NULL;
    }
    if (convertedFrameRawBuffer[1]) {
        free(convertedFrameRawBuffer[1]);
        convertedFrameRawBuffer[1] = NULL;
    }
    incomingFrameRawBufferSize = 0;
    convertedFrameRawBufferSize = 0;
    frameBuffer = NULL;
    
    deviceState = DEVICE_OPEN;
    return false;
}