static void nativeVideoGetCparamCallback(const ARParam *cparam_p, void *userdata) { // Load the camera parameters, resize for the window and init. ARParam cparam; if (cparam_p) cparam = *cparam_p; else { LOGE("Unable to automatically determine camera parameters. Using default.\n"); if (arParamLoad(cparaName, 1, &cparam) < 0) { LOGE("Error: Unable to load parameter file %s for camera.\n", cparaName); return; } } if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) { #ifdef DEBUG LOGI("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize); #endif arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam); } #ifdef DEBUG LOGI("*** Camera Parameter ***\n"); arParamDisp(&cparam); #endif if ((gCparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) { LOGE("Error: arParamLTCreate.\n"); return; } videoInited = true; // // AR init. // // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRHf(&gCparamLT->param, NEAR_PLANE, FAR_PLANE, cameraLens); cameraPoseValid = FALSE; if (!initNFT(gCparamLT, gPixFormat)) { LOGE("Error initialising NFT.\n"); arParamLTFree(&gCparamLT); return; } // Marker data has already been loaded, so now load NFT data on a second thread. nftDataLoadingThreadHandle = threadInit(0, NULL, loadNFTDataAsync); if (!nftDataLoadingThreadHandle) { LOGE("Error starting NFT loading thread.\n"); arParamLTFree(&gCparamLT); return; } threadStartSignal(nftDataLoadingThreadHandle); }
void ofxArtool5::cleanup(){ if(artMode==ART_PATTERN){ arPattDetach(gARHandle); arPattDeleteHandle(gARPattHandle); ar3DDeleteHandle(&gAR3DHandle); arDeleteHandle(gARHandle); arParamLTFree(&gCparamLT); }else if(artMode==ART_NFT){ if(markersNFT)deleteMarkers(&markersNFT, &markersNFTCount); unloadNFTData(); ar2DeleteHandle(&ar2Handle); kpmDeleteHandle(&kpmHandle); arParamLTFree(&gCparamLT); } }
/* cleanup function called when program exits */ static void cleanup(void) { arParamLTFree(&gCparamLT); arVideoCapStop(); arVideoClose(); argCleanup(); }
bool ARToolKitVideoSource::close() { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): called"); if (deviceState == DEVICE_CLOSED) { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): if (deviceState == DEVICE_CLOSED) true, exiting returning true"); return true; } if (deviceState == DEVICE_RUNNING) { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): stopping video, calling ar2VideoCapStop(gVid)"); int err = ar2VideoCapStop(gVid); if (err != 0) ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::close(): Error \"%d\" stopping video", err); if (cparamLT) arParamLTFree(&cparamLT); deviceState = DEVICE_OPEN; } frameBuffer = NULL; frameBuffer2 = NULL; ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): closing video, calling ar2VideoClose(gVid)"); if (ar2VideoClose(gVid) != 0) ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::close(): error closing video"); gVid = NULL; deviceState = DEVICE_CLOSED; // ARToolKit video source is always ready to be opened. ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): exiting returning true"); return true; }
bool ARToolKitVideoSource::close() { if (deviceState == DEVICE_CLOSED) return true; if (deviceState == DEVICE_RUNNING) { ARController::logv("Stopping video."); int err = ar2VideoCapStop(gVid); if (err != 0) ARController::logv("Error %d stopping video.", err); if (cparamLT) arParamLTFree(&cparamLT); deviceState = DEVICE_OPEN; } frameBuffer = NULL; frameBufferSize = 0; ARController::logv("Closing video."); if (ar2VideoClose(gVid) != 0) ARController::logv("Error closing video."); gVid = NULL; deviceState = DEVICE_CLOSED; // ARToolKit video source is always ready to be opened. return true; }
// cleanup(); JNIEXPORT jboolean JNICALL JNIFUNCTION_NATIVE(nativeStop(JNIEnv* env, jobject object)) { #ifdef DEBUG LOGI("nativeStop\n"); #endif int i, j; // Can't call arglCleanup() here, because nativeStop is not called on rendering thread. // Clean up ARToolKit data. if (ar3DHandle) ar3DDeleteHandle(&ar3DHandle); if (arHandle) { arPattDetach(arHandle); arDeleteHandle(arHandle); arHandle = NULL; } arParamLTFree(&gCparamLT); // OpenGL cleanup -- not done here. // Video cleanup. if (gVideoFrame) { free(gVideoFrame); gVideoFrame = NULL; gVideoFrameSize = 0; } ar2VideoClose(gVid); gVid = NULL; videoInited = false; return (true); }
// cleanup(); JNIEXPORT jboolean JNICALL JNIFUNCTION_NATIVE(nativeStop(JNIEnv* env, jobject object)) { #ifdef DEBUG LOGI("nativeStop\n"); #endif int i, j; // Can't call arglCleanup() here, because nativeStop is not called on rendering thread. // NFT cleanup. if (trackingThreadHandle) { #ifdef DEBUG LOGI("Stopping NFT2 tracking thread."); #endif trackingInitQuit(&trackingThreadHandle); detectedPage = -2; } j = 0; for (i = 0; i < surfaceSetCount; i++) { if (surfaceSet[i]) { #ifdef DEBUG if (j == 0) LOGI("Unloading NFT tracking surfaces."); #endif ar2FreeSurfaceSet(&surfaceSet[i]); // Sets surfaceSet[i] to NULL. j++; } } #ifdef DEBUG if (j > 0) LOGI("Unloaded %d NFT tracking surfaces.", j); #endif surfaceSetCount = 0; nftDataLoaded = false; #ifdef DEBUG LOGI("Cleaning up ARToolKit NFT handles."); #endif ar2DeleteHandle(&ar2Handle); kpmDeleteHandle(&kpmHandle); arParamLTFree(&gCparamLT); // OpenGL cleanup -- not done here. // Video cleanup. if (gVideoFrame) { free(gVideoFrame); gVideoFrame = NULL; gVideoFrameSize = 0; pthread_mutex_destroy(&gVideoFrameLock); } ar2VideoClose(gVid); gVid = NULL; videoInited = false; return (true); }
static void cleanup(void) { arglCleanup(gArglSettings); gArglSettings = NULL; arPattDetach(gARHandle); arPattDeleteHandle(gARPattHandle); arVideoCapStop(); ar3DDeleteHandle(&gAR3DHandle); arDeleteHandle(gARHandle); arParamLTFree(&gCparamLT); arVideoClose(); }
ARTApp::~ARTApp() { arPattDetach(arHandle); arPattDeleteHandle(pattHandle); arVideoCapStop(); ar3DDeleteHandle(&ar3DHandle); arDeleteHandle(arHandle); arParamLTFree(&cParam); arVideoClose(); if (objModel) glmDelete(objModel); }
static void cleanup(void) { if (gObjectData) { arPattDetach(gARHandle); arPattDeleteHandle(gObjectData->pattHandle); } arglCleanup(gArglSettings); gArglSettings = NULL; arVideoCapStop(); ar3DDeleteHandle(&gAR3DHandle); arDeleteHandle(gARHandle); arParamLTFree(&gCparamLT); arVideoClose(); #ifdef _WIN32 CoUninitialize(); #endif }
static void cleanup(void) { // Dispose of movie drawing structures, and stop and close the movie file. arglCleanup(gMovieArglSettings); gMovieArglSettings = NULL; if (gMovieVideo) { ar2VideoCapStop(gMovieVideo); ar2VideoClose(gMovieVideo); } arglCleanup(gArglSettings); gArglSettings = NULL; arPattDetach(gARHandle); arPattDeleteHandle(gARPattHandle); arVideoCapStop(); ar3DDeleteHandle(&gAR3DHandle); arDeleteHandle(gARHandle); arParamLTFree(&gCparamLT); arVideoClose(); }
static void cleanup(void) { VirtualEnvironmentFinal(); if (markersNFT) deleteMarkers(&markersNFT, &markersNFTCount); // NFT cleanup. unloadNFTData(); ARLOGd("Cleaning up ARToolKit NFT handles.\n"); ar2DeleteHandle(&ar2Handle); kpmDeleteHandle(&kpmHandle); arParamLTFree(&gCparamLT); // OpenGL cleanup. arglCleanup(gArglSettings); gArglSettings = NULL; // Camera cleanup. arVideoCapStop(); arVideoClose(); }
bool AndroidVideoSource::close() { if (deviceState == DEVICE_CLOSED) return true; if (cparamLT) arParamLTFree(&cparamLT); if (localFrameBuffer) { free(localFrameBuffer->bufPlanes); free(localFrameBuffer); localFrameBuffer = NULL; } if (incomingFrameRawBuffer[0]) { free(incomingFrameRawBuffer[0]); incomingFrameRawBuffer[0] = NULL; } if (incomingFrameRawBuffer[1]) { free(incomingFrameRawBuffer[1]); incomingFrameRawBuffer[1] = NULL; } if (convertedFrameRawBuffer[0]) { free(convertedFrameRawBuffer[0]); convertedFrameRawBuffer[0] = NULL; } if (convertedFrameRawBuffer[1]) { free(convertedFrameRawBuffer[1]); convertedFrameRawBuffer[1] = NULL; } incomingFrameRawBufferSize = 0; convertedFrameRawBufferSize = 0; newFrameArrived = false; ar2VideoClose(gVid); gVid = NULL; frameBuffer = NULL; deviceState = DEVICE_CLOSED; ARController::logv(AR_LOG_LEVEL_INFO, "Android Video Source closed."); return true; }
bool AndroidVideoSource::close() { if (deviceState == DEVICE_CLOSED) return true; if (cparamLT) arParamLTFree(&cparamLT); if (localFrameBuffer) { free(localFrameBuffer); localFrameBuffer = NULL; frameBuffer = NULL; frameBuffer2 = NULL; frameBufferSize = 0; } newFrameArrived = false; ar2VideoClose(gVid); gVid = NULL; deviceState = DEVICE_CLOSED; ARController::logv("Android Video Source closed."); return true; }
static void cleanup(void) { if (markersNFT) deleteMarkers(&markersNFT, &markersNFTCount); // NFT cleanup. unloadNFTData(); ARLOGd("Cleaning up ARToolKit NFT handles.\n"); ar2DeleteHandle(&ar2Handle); kpmDeleteHandle(&kpmHandle); arParamLTFree(&gCparamLT); // OpenGL cleanup. arglCleanup(gArglSettings); gArglSettings = NULL; // Camera cleanup. arVideoCapStop(); arVideoClose(); #ifdef _WIN32 CoUninitialize(); #endif }
static void cleanup(void) { int i; arglCleanup(gArglSettings); gArglSettings = NULL; arPattDetach(gARHandle); for (i = 0; i < gMultiConfigCount; i++) { arMultiFreeConfig(gMultiConfigs[i]); } if (gARPattHandle) arPattDeleteHandle(gARPattHandle); arVideoCapStop(); arDeleteHandle(gARHandle); arParamLTFree(&gCparamLT); arVideoClose(); }
static void cleanup(void) { VirtualEnvironmentFinal(); if (markersSquare) deleteMarkers(&markersSquare, &markersSquareCount, gARPattHandle); // Tracking cleanup. if (gARPattHandle) { arPattDetach(gARHandle); arPattDeleteHandle(gARPattHandle); } ar3DDeleteHandle(&gAR3DHandle); arDeleteHandle(gARHandle); arParamLTFree(&gCparamLT); // OpenGL cleanup. arglCleanup(gArglSettings); gArglSettings = NULL; // Camera cleanup. arVideoCapStop(); arVideoClose(); }