static void cleanup(void) { ar2VideoCapStop(vidL); ar2VideoCapStop(vidR); ar2VideoClose(vidL); ar2VideoClose(vidR); argCleanup(); exit(0); }
bool ARToolKitVideoSource::close() { if (deviceState == DEVICE_CLOSED) return true; if (deviceState == DEVICE_RUNNING) { ARController::logv("Stopping video."); int err = ar2VideoCapStop(gVid); if (err != 0) ARController::logv("Error %d stopping video.", err); if (cparamLT) arParamLTFree(&cparamLT); deviceState = DEVICE_OPEN; } frameBuffer = NULL; frameBufferSize = 0; ARController::logv("Closing video."); if (ar2VideoClose(gVid) != 0) ARController::logv("Error closing video."); gVid = NULL; deviceState = DEVICE_CLOSED; // ARToolKit video source is always ready to be opened. return true; }
bool ARToolKitVideoSource::close() { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): called"); if (deviceState == DEVICE_CLOSED) { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): if (deviceState == DEVICE_CLOSED) true, exiting returning true"); return true; } if (deviceState == DEVICE_RUNNING) { ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): stopping video, calling ar2VideoCapStop(gVid)"); int err = ar2VideoCapStop(gVid); if (err != 0) ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::close(): Error \"%d\" stopping video", err); if (cparamLT) arParamLTFree(&cparamLT); deviceState = DEVICE_OPEN; } frameBuffer = NULL; frameBuffer2 = NULL; ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): closing video, calling ar2VideoClose(gVid)"); if (ar2VideoClose(gVid) != 0) ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::close(): error closing video"); gVid = NULL; deviceState = DEVICE_CLOSED; // ARToolKit video source is always ready to be opened. ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::close(): exiting returning true"); return true; }
// cleanup(); JNIEXPORT jboolean JNICALL JNIFUNCTION_NATIVE(nativeStop(JNIEnv* env, jobject object)) { #ifdef DEBUG LOGI("nativeStop\n"); #endif int i, j; // Can't call arglCleanup() here, because nativeStop is not called on rendering thread. // Clean up ARToolKit data. if (ar3DHandle) ar3DDeleteHandle(&ar3DHandle); if (arHandle) { arPattDetach(arHandle); arDeleteHandle(arHandle); arHandle = NULL; } arParamLTFree(&gCparamLT); // OpenGL cleanup -- not done here. // Video cleanup. if (gVideoFrame) { free(gVideoFrame); gVideoFrame = NULL; gVideoFrameSize = 0; } ar2VideoClose(gVid); gVid = NULL; videoInited = false; return (true); }
bool AndroidVideoSource::open() { ARController::logv("Opening Android Video Source."); if (deviceState != DEVICE_CLOSED) { ARController::logv("Error: device is already open."); return false; } // On Android, ARVideo doesn't actually provide the frames, but it is needed to handle // fetching of the camera parameters. Note that if the current working directory // isn't already the directory where the camera parametere cache should be created, // then the videoconfiguration should include the option 'cachedir="/path/to/cache"'. gVid = ar2VideoOpen(videoConfiguration); if (!gVid) { ARController::logv("arVideoOpen unable to open connection to camera."); return false; } //ARController::logv("Opened connection to camera."); pixelFormat = ar2VideoGetPixelFormat(gVid); if (pixelFormat == AR_PIXEL_FORMAT_INVALID) { ARController::logv("AndroidVideoSource::getVideoReadyAndroid: Error: No pixel format set.\n"); goto bail; } deviceState = DEVICE_OPEN; return true; bail: ar2VideoClose(gVid); gVid = NULL; return false; }
int arVideoClose( void ) { int result; if( gVid == NULL ) return -1; result = ar2VideoClose(gVid); gVid = NULL; return (result); }
// cleanup(); JNIEXPORT jboolean JNICALL JNIFUNCTION_NATIVE(nativeStop(JNIEnv* env, jobject object)) { #ifdef DEBUG LOGI("nativeStop\n"); #endif int i, j; // Can't call arglCleanup() here, because nativeStop is not called on rendering thread. // NFT cleanup. if (trackingThreadHandle) { #ifdef DEBUG LOGI("Stopping NFT2 tracking thread."); #endif trackingInitQuit(&trackingThreadHandle); detectedPage = -2; } j = 0; for (i = 0; i < surfaceSetCount; i++) { if (surfaceSet[i]) { #ifdef DEBUG if (j == 0) LOGI("Unloading NFT tracking surfaces."); #endif ar2FreeSurfaceSet(&surfaceSet[i]); // Sets surfaceSet[i] to NULL. j++; } } #ifdef DEBUG if (j > 0) LOGI("Unloaded %d NFT tracking surfaces.", j); #endif surfaceSetCount = 0; nftDataLoaded = false; #ifdef DEBUG LOGI("Cleaning up ARToolKit NFT handles."); #endif ar2DeleteHandle(&ar2Handle); kpmDeleteHandle(&kpmHandle); arParamLTFree(&gCparamLT); // OpenGL cleanup -- not done here. // Video cleanup. if (gVideoFrame) { free(gVideoFrame); gVideoFrame = NULL; gVideoFrameSize = 0; pthread_mutex_destroy(&gVideoFrameLock); } ar2VideoClose(gVid); gVid = NULL; videoInited = false; return (true); }
static void cleanup(void) { // Dispose of movie drawing structures, and stop and close the movie file. arglCleanup(gMovieArglSettings); gMovieArglSettings = NULL; if (gMovieVideo) { ar2VideoCapStop(gMovieVideo); ar2VideoClose(gMovieVideo); } arglCleanup(gArglSettings); gArglSettings = NULL; arPattDetach(gARHandle); arPattDeleteHandle(gARPattHandle); arVideoCapStop(); ar3DDeleteHandle(&gAR3DHandle); arDeleteHandle(gARHandle); arParamLTFree(&gCparamLT); arVideoClose(); }
/* CHECKED * PsychARCloseVideoCaptureDevice() -- Close a capture device and release all associated ressources. */ void PsychARCloseVideoCaptureDevice(int capturehandle) { // Retrieve device record for handle: PsychVidcapRecordType* capdev = PsychGetARVidcapRecord(capturehandle); // Stop capture immediately if it is still running: PsychARVideoCaptureRate(capturehandle, 0, 0, NULL); // Close & Shutdown camera, release ressources: ar2VideoClose(capdev->camera); capdev->camera = NULL; // Invalidate device record to free up this slot in the array: capdev->valid = 0; // Decrease counter of open capture devices: if (numCaptureRecords>0) numCaptureRecords--; // Done. return; }
bool AndroidVideoSource::close() { if (deviceState == DEVICE_CLOSED) return true; if (cparamLT) arParamLTFree(&cparamLT); if (localFrameBuffer) { free(localFrameBuffer->bufPlanes); free(localFrameBuffer); localFrameBuffer = NULL; } if (incomingFrameRawBuffer[0]) { free(incomingFrameRawBuffer[0]); incomingFrameRawBuffer[0] = NULL; } if (incomingFrameRawBuffer[1]) { free(incomingFrameRawBuffer[1]); incomingFrameRawBuffer[1] = NULL; } if (convertedFrameRawBuffer[0]) { free(convertedFrameRawBuffer[0]); convertedFrameRawBuffer[0] = NULL; } if (convertedFrameRawBuffer[1]) { free(convertedFrameRawBuffer[1]); convertedFrameRawBuffer[1] = NULL; } incomingFrameRawBufferSize = 0; convertedFrameRawBufferSize = 0; newFrameArrived = false; ar2VideoClose(gVid); gVid = NULL; frameBuffer = NULL; deviceState = DEVICE_CLOSED; ARController::logv(AR_LOG_LEVEL_INFO, "Android Video Source closed."); return true; }
bool AndroidVideoSource::close() { if (deviceState == DEVICE_CLOSED) return true; if (cparamLT) arParamLTFree(&cparamLT); if (localFrameBuffer) { free(localFrameBuffer); localFrameBuffer = NULL; frameBuffer = NULL; frameBuffer2 = NULL; frameBufferSize = 0; } newFrameArrived = false; ar2VideoClose(gVid); gVid = NULL; deviceState = DEVICE_CLOSED; ARController::logv("Android Video Source closed."); return true; }
int arVideoClose( void ) { if( vid == NULL ) return -1; return ar2VideoClose( vid ); }
void CWebCam::Quit(void) { ar2VideoCapStop(ARTVideo); ar2VideoClose(ARTVideo); }