void CameraHandler::applyProperties(CameraHandler** ppcameraHandler) { LOGD("CameraHandler::applyProperties()"); if (NULL == ppcameraHandler) { LOGE("applyProperties: Passed NULL ppcameraHandler"); return; } if (NULL == *ppcameraHandler) { LOGE("applyProperties: Passed null *ppcameraHandler"); return; } LOGD("CameraHandler::applyProperties()"); #if !defined(ANDROID_r2_2_0) LOGD("Reconnect camera"); (*ppcameraHandler)->camera->reconnect(); (*ppcameraHandler)->params = (*ppcameraHandler)->camera->getParameters(); #else CameraHandler* previousCameraHandler = *ppcameraHandler; CameraParameters curCameraParameters(previousCameraHandler->params.flatten()); CameraCallback cameraCallback = previousCameraHandler->cameraCallback; void* userData = previousCameraHandler->userData; int cameraId = previousCameraHandler->cameraId; LOGD("CameraHandler::applyProperties(): before previousCameraHandler->closeCameraConnect"); previousCameraHandler->closeCameraConnect(); LOGD("CameraHandler::applyProperties(): after previousCameraHandler->closeCameraConnect"); LOGD("CameraHandler::applyProperties(): before initCameraConnect"); CameraHandler* handler = initCameraConnect(cameraCallback, cameraId, userData, &curCameraParameters); LOGD("CameraHandler::applyProperties(): after initCameraConnect, handler=0x%x", (int)handler); if (handler == NULL) { LOGE("ERROR in applyProperties --- cannot reinit camera"); handler = initCameraConnect(cameraCallback, cameraId, userData, NULL); LOGD("CameraHandler::applyProperties(): repeate initCameraConnect after ERROR, handler=0x%x", (int)handler); if (handler == NULL) { LOGE("ERROR in applyProperties --- cannot reinit camera AGAIN --- cannot do anything else"); } } (*ppcameraHandler) = handler; #endif }
CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, int cameraId, void* userData, CameraParameters* prevCameraParameters) { typedef sp<Camera> (*Android22ConnectFuncType)(); typedef sp<Camera> (*Android23ConnectFuncType)(int); typedef sp<Camera> (*Android3DConnectFuncType)(int, int); enum { CAMERA_SUPPORT_MODE_2D = 0x01, /* Camera Sensor supports 2D mode. */ CAMERA_SUPPORT_MODE_3D = 0x02, /* Camera Sensor supports 3D mode. */ CAMERA_SUPPORT_MODE_NONZSL = 0x04, /* Camera Sensor in NON-ZSL mode. */ CAMERA_SUPPORT_MODE_ZSL = 0x08 /* Camera Sensor supports ZSL mode. */ }; const char Android22ConnectName[] = "_ZN7android6Camera7connectEv"; const char Android23ConnectName[] = "_ZN7android6Camera7connectEi"; const char Android3DConnectName[] = "_ZN7android6Camera7connectEii"; LOGD("CameraHandler::initCameraConnect(%p, %d, %p, %p)", callback, cameraId, userData, prevCameraParameters); sp<Camera> camera = 0; void* CameraHALHandle = dlopen("libcamera_client.so", RTLD_LAZY); if (!CameraHALHandle) { LOGE("Cannot link to \"libcamera_client.so\""); return NULL; } // reset errors dlerror(); if (Android22ConnectFuncType Android22Connect = (Android22ConnectFuncType)dlsym(CameraHALHandle, Android22ConnectName)) { LOGD("Connecting to CameraService v 2.2"); camera = Android22Connect(); LOGD("Connection to CameraService v 2.2 established"); } else if (Android23ConnectFuncType Android23Connect = (Android23ConnectFuncType)dlsym(CameraHALHandle, Android23ConnectName)) { LOGD("Connecting to CameraService v 2.3"); camera = Android23Connect(cameraId); LOGD("Connection to CameraService v 2.3 established"); } else if (Android3DConnectFuncType Android3DConnect = (Android3DConnectFuncType)dlsym(CameraHALHandle, Android3DConnectName)) { LOGD("Connecting to CameraService v 3D"); camera = Android3DConnect(cameraId, CAMERA_SUPPORT_MODE_2D); LOGD("Connection to CameraService v 3D established"); } else { dlclose(CameraHALHandle); LOGE("Cannot connect to CameraService. Connect method was not found!"); return NULL; } dlclose(CameraHALHandle); if ( NULL == camera.get() ) { LOGE("initCameraConnect: Unable to connect to CameraService\n"); return 0; } LOGD("Creating camera handler"); CameraHandler* handler = new CameraHandler(callback, userData); LOGD("Setting camera listener"); camera->setListener(handler); LOGD("Updating camera handler"); handler->camera = camera; handler->cameraId = cameraId; LOGD("Checking previous camera parameters"); if (NULL != prevCameraParameters) { LOGI("initCameraConnect: Setting paramers from previous camera handler"); camera->setParameters(prevCameraParameters->flatten()); handler->params.unflatten(prevCameraParameters->flatten()); } else { android::String8 params_str = camera->getParameters(); LOGI("initCameraConnect: [%s]", params_str.string()); handler->params.unflatten(params_str); LOGD("Supported Cameras: %s", handler->params.get("camera-indexes")); LOGD("Supported Picture Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES)); LOGD("Supported Picture Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS)); LOGD("Supported Preview Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES)); LOGD("Supported Preview Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS)); LOGD("Supported Preview Frame Rates: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)); LOGD("Supported Thumbnail Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES)); LOGD("Supported Whitebalance Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE)); LOGD("Supported Effects: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_EFFECTS)); LOGD("Supported Scene Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES)); LOGD("Supported Focus Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES)); LOGD("Supported Antibanding Options: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING)); LOGD("Supported Flash Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES)); #if !defined(ANDROID_r2_2_0) // Set focus mode to continuous-video if supported const char* available_focus_modes = handler->params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES); if (NULL != available_focus_modes) { if (strstr(available_focus_modes, "continuous-video") != NULL) { handler->params.set(CameraParameters::KEY_FOCUS_MODE, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO); status_t resParams = handler->camera->setParameters(handler->params.flatten()); if (0 != resParams) { LOGE("initCameraConnect: failed to set autofocus mode to \"continuous-video\""); } else { LOGD("initCameraConnect: autofocus is set to mode \"continuous-video\""); } } } #endif //check if yuv420sp format available. Set this format as preview format. const char* available_formats = handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS); if (available_formats != 0) { const char* format_to_set = 0; const char* pos = available_formats; const char* ptr = pos; while(true) { while(*ptr != 0 && *ptr != ',') ++ptr; if (ptr != pos) { if (0 == strncmp(pos, "yuv420sp", ptr - pos)) { format_to_set = "yuv420sp"; break; } if (0 == strncmp(pos, "yvu420sp", ptr - pos)) format_to_set = "yvu420sp"; } if (*ptr == 0) break; pos = ++ptr; } if (0 != format_to_set) { handler->params.setPreviewFormat(format_to_set); status_t resParams = handler->camera->setParameters(handler->params.flatten()); if (resParams != 0) LOGE("initCameraConnect: failed to set preview format to %s", format_to_set); else LOGD("initCameraConnect: preview format is set to %s", format_to_set); } } } status_t pdstatus; #if defined(ANDROID_r2_2_0) pdstatus = camera->setPreviewDisplay(sp<ISurface>(0 /*new DummySurface*/)); if (pdstatus != 0) LOGE("initCameraConnect: failed setPreviewDisplay(0) call; camera migth not work correctly on some devices"); #elif defined(ANDROID_r2_3_3) /* Do nothing in case of 2.3 for now */ #elif defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) sp<SurfaceTexture> surfaceTexture = new SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID); pdstatus = camera->setPreviewTexture(surfaceTexture); if (pdstatus != 0) LOGE("initCameraConnect: failed setPreviewTexture call; camera migth not work correctly"); #elif defined(ANDROID_r4_1_1) sp<BufferQueue> bufferQueue = new BufferQueue(); sp<BufferQueue::ConsumerListener> queueListener = new ConsumerListenerStub(); bufferQueue->consumerConnect(queueListener); pdstatus = camera->setPreviewTexture(bufferQueue); if (pdstatus != 0) LOGE("initCameraConnect: failed setPreviewTexture call; camera migth not work correctly"); #endif #if (defined(ANDROID_r2_2_0) || defined(ANDROID_r2_3_3) || defined(ANDROID_r3_0_1)) # if 1 ////ATTENTION: switching between two versions: with and without copying memory inside Android OS //// see the method CameraService::Client::copyFrameAndPostCopiedFrame and where it is used camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK | FRAME_CALLBACK_FLAG_COPY_OUT_MASK);//with copy # else camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK );//without copy # endif #else camera->setPreviewCallbackFlags( CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK | CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK);//with copy #endif //!(defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)) LOGD("Starting preview"); status_t resStart = camera->startPreview(); if (resStart != 0) { LOGE("initCameraConnect: startPreview() fails. Closing camera connection..."); handler->closeCameraConnect(); handler = 0; } else { LOGD("Preview started successfully"); } return handler; }
int main(int argc, char** argv) { // camera CameraHandler camera; // getopt getopt_t* gopt = getopt_create(); getopt_add_string(gopt, 'f', "file", "", "Use static camera image"); if (!getopt_parse(gopt, argc, argv, 1)) { getopt_do_usage(gopt); exit(1); } const char* fileName = getopt_get_string(gopt, "file"); if (strncmp(fileName, "", 1)) { // if fileName is not empty camera.setStaticImage(fileName); } getopt_destroy(gopt); // initialize with first image image_u32_t* tempIm = camera.getImage(); CalibrationHandler::instance()->calibrateImageSize(tempIm->height, tempIm->width, true); image_u32_destroy(tempIm); // lcm LcmHandler::instance()->launchThreads(); // vx VxHandler vx(1024, 768); vx.launchThreads(); while (1) { CalibrationInfo calibrationInfo = CalibrationHandler::instance()->getCalibration(); RenderInfo render; render.im = camera.getImage(); CalibrationHandler::instance()->clipImage(render.im); // std::array<float, 2> pos; // if (Arm::instance()->forwardKinematics(pos)) { // printf("pos: %f, %f\n", pos[0], pos[1]); // } if (buttonStates.blobDetect) { std::vector<BlobDetector::Blob> blobs = BlobDetector::findBlobs(render.im, calibrationInfo, blobMinPixels); for (const auto& blob : blobs) { std::array<int, 2> imageCoords{{blob.x, blob.y}}; std::array<float, 2> screenCoords = CoordinateConverter::imageToScreen(imageCoords); switch (blob.type) { case REDBALL: render.redBlobs.push_back(screenCoords); break; case GREENBALL: render.greenBlobs.push_back(screenCoords); break; case BLUESQUARE: render.blueBlobs.push_back(screenCoords); break; default: break; } } } VxButtonStates buttonStates = vx.getButtonStates(); if (buttonStates.colorMask) { maskWithColors(render.im, calibrationInfo); } vx.changeRenderInfo(render); usleep(1e3); } }
CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, int cameraId, void* userData, CameraParameters* prevCameraParameters) { LOGD("CameraHandler::initCameraConnect(%p, %d, %p, %p)", callback, cameraId, userData, prevCameraParameters); sp<Camera> camera = 0; #ifdef ANDROID_r2_2_2 camera = Camera::connect(); #endif #ifdef ANDROID_r2_3_3 camera = Camera::connect(cameraId); #endif if ( 0 == camera.get() ) { LOGE("initCameraConnect: Unable to connect to CameraService\n"); return 0; } CameraHandler* handler = new CameraHandler(callback, userData); camera->setListener(handler); handler->camera = camera; handler->cameraId = cameraId; if (prevCameraParameters != 0) { LOGI("initCameraConnect: Setting paramers from previous camera handler"); camera->setParameters(prevCameraParameters->flatten()); handler->params.unflatten(prevCameraParameters->flatten()); } else { android::String8 params_str = camera->getParameters(); LOGI("initCameraConnect: [%s]", params_str.string()); handler->params.unflatten(params_str); LOGD("Supported Cameras: %s", handler->params.get("camera-indexes")); LOGD("Supported Picture Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES)); LOGD("Supported Picture Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS)); LOGD("Supported Preview Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES)); LOGD("Supported Preview Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS)); LOGD("Supported Preview Frame Rates: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)); LOGD("Supported Thumbnail Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES)); LOGD("Supported Whitebalance Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE)); LOGD("Supported Effects: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_EFFECTS)); LOGD("Supported Scene Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES)); LOGD("Supported Focus Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES)); LOGD("Supported Antibanding Options: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING)); LOGD("Supported Flash Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES)); //check if yuv420sp format available. Set this format as preview format. const char* available_formats = handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS); if (available_formats != 0) { const char* format_to_set = 0; const char* pos = available_formats; const char* ptr = pos; while(true) { while(*ptr != 0 && *ptr != ',') ++ptr; if (ptr != pos) { if (0 == strncmp(pos, "yuv420sp", ptr - pos)) { format_to_set = "yuv420sp"; break; } if (0 == strncmp(pos, "yuv420i", ptr - pos)) format_to_set = "yuv420i"; } if (*ptr == 0) break; pos = ++ptr; } if (0 != format_to_set) { handler->params.setPreviewFormat(format_to_set); status_t resParams = handler->camera->setParameters(handler->params.flatten()); if (resParams != 0) LOGE("initCameraConnect: failed to set preview format to %s", format_to_set); else LOGD("initCameraConnect: preview format is set to %s", format_to_set); } } } #ifdef ANDROID_r2_2_2 status_t pdstatus = camera->setPreviewDisplay(sp<ISurface>(0 /*new DummySurface*/)); if (pdstatus != 0) LOGE("initCameraConnect: failed setPreviewDisplay(0) call; camera migth not work correcttly on some devices"); #endif ////ATTENTION: switching between two versions: with and without copying memory inside Android OS //// see the method CameraService::Client::copyFrameAndPostCopiedFrame and where it is used #if 1 camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK | FRAME_CALLBACK_FLAG_COPY_OUT_MASK);//with copy #else camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK );//without copy #endif status_t resStart = camera->startPreview(); if (resStart != 0) { LOGE("initCameraConnect: startPreview() fails. Closing camera connection..."); handler->closeCameraConnect(); handler = 0; } return handler; }
CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, int cameraId, void* userData, CameraParameters* prevCameraParameters) { typedef sp<Camera> (*Android22ConnectFuncType)(); typedef sp<Camera> (*Android23ConnectFuncType)(int); typedef sp<Camera> (*Android3DConnectFuncType)(int, int); typedef sp<Camera> (*Android43ConnectFuncType)(int, const String16&, int); const int ANY_CAMERA_INDEX = -1; const int BACK_CAMERA_INDEX = 99; const int FRONT_CAMERA_INDEX = 98; enum { CAMERA_SUPPORT_MODE_2D = 0x01, /* Camera Sensor supports 2D mode. */ CAMERA_SUPPORT_MODE_3D = 0x02, /* Camera Sensor supports 3D mode. */ CAMERA_SUPPORT_MODE_NONZSL = 0x04, /* Camera Sensor in NON-ZSL mode. */ CAMERA_SUPPORT_MODE_ZSL = 0x08 /* Camera Sensor supports ZSL mode. */ }; // used for Android 4.3 enum { USE_CALLING_UID = -1 }; const char Android22ConnectName[] = "_ZN7android6Camera7connectEv"; const char Android23ConnectName[] = "_ZN7android6Camera7connectEi"; const char Android3DConnectName[] = "_ZN7android6Camera7connectEii"; const char Android43ConnectName[] = "_ZN7android6Camera7connectEiRKNS_8String16Ei"; int localCameraIndex = cameraId; if (cameraId == ANY_CAMERA_INDEX) { localCameraIndex = 0; } #if !defined(ANDROID_r2_2_0) else if (cameraId == BACK_CAMERA_INDEX) { LOGD("Back camera selected"); for (int i = 0; i < Camera::getNumberOfCameras(); i++) { CameraInfo info; Camera::getCameraInfo(i, &info); if (info.facing == CAMERA_FACING_BACK) { localCameraIndex = i; break; } } } else if (cameraId == FRONT_CAMERA_INDEX) { LOGD("Front camera selected"); for (int i = 0; i < Camera::getNumberOfCameras(); i++) { CameraInfo info; Camera::getCameraInfo(i, &info); if (info.facing == CAMERA_FACING_FRONT) { localCameraIndex = i; break; } } } if (localCameraIndex == BACK_CAMERA_INDEX) { LOGE("Back camera not found!"); return NULL; } else if (localCameraIndex == FRONT_CAMERA_INDEX) { LOGE("Front camera not found!"); return NULL; } #endif LOGD("CameraHandler::initCameraConnect(%p, %d, %p, %p)", callback, localCameraIndex, userData, prevCameraParameters); sp<Camera> camera = 0; void* CameraHALHandle = dlopen("libcamera_client.so", RTLD_LAZY); if (!CameraHALHandle) { LOGE("Cannot link to \"libcamera_client.so\""); return NULL; } // reset errors dlerror(); if (Android22ConnectFuncType Android22Connect = (Android22ConnectFuncType)dlsym(CameraHALHandle, Android22ConnectName)) { LOGD("Connecting to CameraService v 2.2"); camera = Android22Connect(); } else if (Android23ConnectFuncType Android23Connect = (Android23ConnectFuncType)dlsym(CameraHALHandle, Android23ConnectName)) { LOGD("Connecting to CameraService v 2.3"); camera = Android23Connect(localCameraIndex); } else if (Android3DConnectFuncType Android3DConnect = (Android3DConnectFuncType)dlsym(CameraHALHandle, Android3DConnectName)) { LOGD("Connecting to CameraService v 3D"); camera = Android3DConnect(localCameraIndex, CAMERA_SUPPORT_MODE_2D); } else if (Android43ConnectFuncType Android43Connect = (Android43ConnectFuncType)dlsym(CameraHALHandle, Android43ConnectName)) { std::string currentProcName = getProcessName(); LOGD("Current process name for camera init: %s", currentProcName.c_str()); camera = Android43Connect(localCameraIndex, String16(currentProcName.c_str()), USE_CALLING_UID); } else { dlclose(CameraHALHandle); LOGE("Cannot connect to CameraService. Connect method was not found!"); return NULL; } dlclose(CameraHALHandle); if ( 0 == camera.get() ) { LOGE("initCameraConnect: Unable to connect to CameraService\n"); return 0; } CameraHandler* handler = new CameraHandler(callback, userData); camera->setListener(handler); handler->camera = camera; handler->cameraId = localCameraIndex; if (prevCameraParameters != NULL) { LOGI("initCameraConnect: Setting paramers from previous camera handler"); camera->setParameters(prevCameraParameters->flatten()); handler->params->unflatten(prevCameraParameters->flatten()); } else { android::String8 params_str = camera->getParameters(); LOGI("initCameraConnect: [%s]", params_str.string()); handler->params->unflatten(params_str); LOGD("Supported Cameras: %s", handler->params->get("camera-indexes")); LOGD("Supported Picture Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES)); LOGD("Supported Picture Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS)); LOGD("Supported Preview Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES)); LOGD("Supported Preview Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS)); LOGD("Supported Preview Frame Rates: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)); LOGD("Supported Thumbnail Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES)); LOGD("Supported Whitebalance Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE)); LOGD("Supported Effects: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_EFFECTS)); LOGD("Supported Scene Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_SCENE_MODES)); LOGD("Supported Focus Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES)); LOGD("Supported Antibanding Options: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_ANTIBANDING)); LOGD("Supported Flash Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FLASH_MODES)); #if !defined(ANDROID_r2_2_0) // Set focus mode to continuous-video if supported const char* available_focus_modes = handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES); if (available_focus_modes != 0) { if (strstr(available_focus_modes, "continuous-video") != NULL) { handler->params->set(CameraParameters::KEY_FOCUS_MODE, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO); status_t resParams = handler->camera->setParameters(handler->params->flatten()); if (resParams != 0) { LOGE("initCameraConnect: failed to set autofocus mode to \"continuous-video\""); } else { LOGD("initCameraConnect: autofocus is set to mode \"continuous-video\""); } } } #endif //check if yuv420sp format available. Set this format as preview format. const char* available_formats = handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS); if (available_formats != 0) { const char* format_to_set = 0; const char* pos = available_formats; const char* ptr = pos; while(true) { while(*ptr != 0 && *ptr != ',') ++ptr; if (ptr != pos) { if (0 == strncmp(pos, "yuv420sp", ptr - pos)) { format_to_set = "yuv420sp"; break; } if (0 == strncmp(pos, "yvu420sp", ptr - pos)) format_to_set = "yvu420sp"; } if (*ptr == 0) break; pos = ++ptr; } if (0 != format_to_set) { handler->params->setPreviewFormat(format_to_set); status_t resParams = handler->camera->setParameters(handler->params->flatten()); if (resParams != 0) LOGE("initCameraConnect: failed to set preview format to %s", format_to_set); else LOGD("initCameraConnect: preview format is set to %s", format_to_set); } } handler->params->setPreviewSize(640, 480); status_t resParams = handler->camera->setParameters(handler->params->flatten()); if (resParams != 0) LOGE("initCameraConnect: failed to set preview resolution to 640x480"); else LOGD("initCameraConnect: preview format is set to 640x480"); } status_t bufferStatus; #if defined(ANDROID_r2_2_0) bufferStatus = camera->setPreviewDisplay(sp<ISurface>(0 /*new DummySurface*/)); if (bufferStatus != 0) LOGE("initCameraConnect: failed setPreviewDisplay(0) call (status %d); camera might not work correctly on some devices", bufferStatus); #elif defined(ANDROID_r2_3_3) /* Do nothing in case of 2.3 for now */ #elif defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) void* surface_texture_obj = operator new(sizeof(SurfaceTexture) + MAGIC_TAIL); handler->surface = new(surface_texture_obj) SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID); bufferStatus = camera->setPreviewTexture(handler->surface); if (bufferStatus != 0) LOGE("initCameraConnect: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus); #elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0) void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL); handler->queue = new(buffer_queue_obj) BufferQueue(); void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL); handler->listener = new(consumer_listener_obj) ConsumerListenerStub(); handler->queue->consumerConnect(handler->listener); bufferStatus = camera->setPreviewTexture(handler->queue); if (bufferStatus != 0) LOGE("initCameraConnect: failed setPreviewTexture call; camera might not work correctly"); # elif defined(ANDROID_r4_4_0) void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL); handler->queue = new(buffer_queue_obj) BufferQueue(); handler->queue->setConsumerUsageBits(GraphicBuffer::USAGE_HW_TEXTURE); void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL); handler->listener = new(consumer_listener_obj) ConsumerListenerStub(); handler->queue->consumerConnect(handler->listener, true); bufferStatus = handler->camera->setPreviewTarget(handler->queue); if (bufferStatus != 0) LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly"); # endif #if (defined(ANDROID_r2_2_0) || defined(ANDROID_r2_3_3) || defined(ANDROID_r3_0_1)) # if 1 ////ATTENTION: switching between two versions: with and without copying memory inside Android OS //// see the method CameraService::Client::copyFrameAndPostCopiedFrame and where it is used camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK | FRAME_CALLBACK_FLAG_COPY_OUT_MASK);//with copy # else camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK );//without copy # endif #else camera->setPreviewCallbackFlags( CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK | CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK);//with copy #endif //!(defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)) LOGD("Starting preview"); status_t previewStatus = camera->startPreview(); if (previewStatus != 0) { LOGE("initCameraConnect: startPreview() fails. Closing camera connection..."); handler->closeCameraConnect(); handler = 0; } else { LOGD("Preview started successfully"); } return handler; }