status_t OMXCameraAdapter::insertVFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { status_t ret = NO_ERROR; char supported[MAX_PROP_VALUE_LENGTH]; char defaultRange[MAX_PROP_VALUE_LENGTH]; LOG_FUNCTION_NAME; memset(supported, '\0', MAX_PROP_VALUE_LENGTH); ret = encodeVFramerateCap(caps, mVarFramerates, ARRAY_SIZE(mVarFramerates), supported, defaultRange, MAX_PROP_VALUE_LENGTH); if ( NO_ERROR != ret ) { CAMHAL_LOGEB("Error inserting supported preview framerate ranges 0x%x", ret); } else { params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported); CAMHAL_LOGDB("framerate ranges %s", supported); params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE_IMAGE); params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, DEFAULT_FRAMERATE_RANGE_VIDEO); params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, DEFAULT_FRAMERATE_RANGE_IMAGE); CAMHAL_LOGDB("Default framerate range: [%s]", DEFAULT_FRAMERATE_RANGE_IMAGE); } LOG_FUNCTION_NAME; return ret; }
status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HANDLETYPE handle) { status_t ret = NO_ERROR; int caps_size = 0; OMX_ERRORTYPE eError = OMX_ErrorNone; OMX_TI_CAPTYPE** caps = NULL;; OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer; MemoryManager memMgr; LOG_FUNCTION_NAME; // allocate tiler (or ion) buffer for caps (size is always a multiple of 4K) caps_size = ((sizeof(OMX_TI_CAPTYPE)+4095)/4096)*4096; caps = (OMX_TI_CAPTYPE**) memMgr.allocateBuffer(0, 0, NULL, caps_size, 1); if (!caps) { CAMHAL_LOGEB("Error allocating buffer for caps %d", eError); ret = -ENOMEM; goto EXIT; } // initialize structures to be passed to OMX Camera OMX_INIT_STRUCT_PTR (caps[0], OMX_TI_CAPTYPE); caps[0]->nPortIndex = OMX_ALL; OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER); sharedBuffer.nPortIndex = OMX_ALL; sharedBuffer.nSharedBuffSize = caps_size; sharedBuffer.pSharedBuff = (OMX_U8 *) caps[0]; // Get capabilities from OMX Camera CAMHAL_LOGEB("Calling OMX_GetConfig() for OMX_TI_IndexConfigCamCapabilities %d", 0); /* FIXME-HASH: Fix this */ eError = OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer); if ( OMX_ErrorNone != eError ) { CAMHAL_LOGEB("Error during capabilities query 0x%x", eError); /* FIXME-HASH: Removed the query as it will fail for GB syslink */ // ret = UNKNOWN_ERROR; // goto EXIT; } else { CAMHAL_LOGDA("OMX capability query success"); } // Translate and insert Ducati capabilities to CameraProperties if ( NO_ERROR == ret ) { ret = insertCapabilities(params, *caps[0]); } CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps[0]->tSenMounting.nSenId); EXIT: if (caps) { memMgr.freeBuffer((void*) caps); caps = NULL; } LOG_FUNCTION_NAME_EXIT; return ret; }
void CameraProperties::Properties::dump() { for (size_t i = 0; i < mProperties->size(); i++) { CAMHAL_LOGDB("%s = %s\n", mProperties->keyAt(i).string(), mProperties->valueAt(i).string()); } }
status_t OMXCameraAdapter::setFocusCallback(bool enabled) { status_t ret = NO_ERROR; OMX_ERRORTYPE eError = OMX_ErrorNone; OMX_CONFIG_CALLBACKREQUESTTYPE focusRequstCallback; LOG_FUNCTION_NAME; if ( OMX_StateInvalid == mComponentState ) { CAMHAL_LOGEA("OMX component in Invalid state"); ret = -EINVAL; } if ( OMX_StateExecuting != mComponentState ) { CAMHAL_LOGEA("OMX component not in executing state"); ret = NO_ERROR; } if ( NO_ERROR == ret ) { OMX_INIT_STRUCT_PTR (&focusRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE); focusRequstCallback.nPortIndex = OMX_ALL; focusRequstCallback.nIndex = OMX_IndexConfigCommonFocusStatus; if ( enabled ) { focusRequstCallback.bEnable = OMX_TRUE; } else { focusRequstCallback.bEnable = OMX_FALSE; } eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE) OMX_IndexConfigCallbackRequest, &focusRequstCallback); if ( OMX_ErrorNone != eError ) { CAMHAL_LOGEB("Error registering focus callback 0x%x", eError); ret = -1; } else { CAMHAL_LOGDB("Autofocus callback for index 0x%x registered successfully", OMX_IndexConfigCommonFocusStatus); } } LOG_FUNCTION_NAME_EXIT; return ret; }
status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) { status_t ret = NO_ERROR; char supported[MAX_PROP_VALUE_LENGTH]; const char *p; LOG_FUNCTION_NAME; /* FIXME-HASH: Removed, no GB support */ #if 0 memset(supported, '\0', MAX_PROP_VALUE_LENGTH); sprintf(supported, "%d", caps.ulAlgoAreasFocusCount); params->set(CameraProperties::MAX_FOCUS_AREAS, supported); CAMHAL_LOGDB("Maximum supported focus areas %s", supported); memset(supported, '\0', MAX_PROP_VALUE_LENGTH); sprintf(supported, "%d", caps.ulAlgoAreasExposureCount); params->set(CameraProperties::MAX_NUM_METERING_AREAS, supported); CAMHAL_LOGDB("Maximum supported exposure areas %s", supported); #endif LOG_FUNCTION_NAME; return ret; }
status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus) { status_t ret = NO_ERROR; OMX_ERRORTYPE eError = OMX_ErrorNone; LOG_FUNCTION_NAME; if ( NULL == eFocusStatus ) { CAMHAL_LOGEA("Invalid focus status"); ret = -EINVAL; } if ( OMX_StateInvalid == mComponentState ) { CAMHAL_LOGEA("OMX component in Invalid state"); ret = -EINVAL; } if ( OMX_StateExecuting != mComponentState ) { CAMHAL_LOGEA("OMX component not in executing state"); ret = NO_ERROR; } if ( NO_ERROR == ret ) { OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE); eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp, OMX_IndexConfigCommonFocusStatus, eFocusStatus); if ( OMX_ErrorNone != eError ) { CAMHAL_LOGEB("Error while retrieving focus status: 0x%x", eError); ret = -1; } } if ( NO_ERROR == ret ) { CAMHAL_LOGDB("Focus Status: %d", eFocusStatus->eFocusStatus); } LOG_FUNCTION_NAME_EXIT; return ret; }
void SensorListener::disableSensor(sensor_type_t type) { Sensor const* sensor; SensorManager& mgr(SensorManager::getInstance()); LOG_FUNCTION_NAME; Mutex::Autolock lock(&mLock); if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) { sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER); CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string()); mSensorEventQueue->disableSensor(sensor); sensorsEnabled &= ~SENSOR_ORIENTATION; } LOG_FUNCTION_NAME_EXIT; }
void SensorListener::enableSensor(sensor_type_t type) { android::Sensor const* sensor; android::SensorManager& mgr(android::SensorManager::getInstance()); LOG_FUNCTION_NAME; android::AutoMutex lock(&mLock); if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) { sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER); CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string()); mSensorEventQueue->enableSensor(sensor); mSensorEventQueue->setEventRate(sensor, ms2ns(100)); sensorsEnabled |= SENSOR_ORIENTATION; } LOG_FUNCTION_NAME_EXIT; }
status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Properties* params, V4L_HANDLETYPE handle) { status_t status = NO_ERROR; V4L_TI_CAPTYPE caps; int i = 0; int j = 0; struct v4l2_fmtdesc fmtDesc; struct v4l2_frmsizeenum frmSizeEnum; struct v4l2_frmivalenum frmIvalEnum; //get supported pixel formats for ( i = 0; status == NO_ERROR; i++) { fmtDesc.index = i; fmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; status = ioctl (handle, VIDIOC_ENUM_FMT, &fmtDesc); if (status == NO_ERROR) { CAMHAL_LOGDB("fmtDesc[%d].description::pixelformat::flags== (%s::%d::%d)",i, fmtDesc.description,fmtDesc.pixelformat,fmtDesc.flags); caps.ePreviewFormats[i] = fmtDesc.pixelformat; } } caps.ulPreviewFormatCount = i; //get preview sizes & capture image sizes status = NO_ERROR; for ( i = 0; status == NO_ERROR; i++) { frmSizeEnum.index = i; //Check for frame sizes for default pixel format //TODO: Check for frame sizes for all supported pixel formats frmSizeEnum.pixel_format = V4L2_PIX_FMT_YUYV; status = ioctl (handle, VIDIOC_ENUM_FRAMESIZES, &frmSizeEnum); if (status == NO_ERROR) { int width; int height; if(frmSizeEnum.type != V4L2_FRMSIZE_TYPE_DISCRETE) { CAMHAL_LOGDB("\nfrmSizeEnum.type = %d", frmSizeEnum.type); CAMHAL_LOGDB("\nmin_width x height = %d x %d ",frmSizeEnum.stepwise.min_width, frmSizeEnum.stepwise.min_height); CAMHAL_LOGDB("\nmax_width x height = %d x %d ",frmSizeEnum.stepwise.max_width, frmSizeEnum.stepwise.max_height); CAMHAL_LOGDB("\nstep width x height = %d x %d ",frmSizeEnum.stepwise.step_width,frmSizeEnum.stepwise.step_height); //TODO: validate populating the sizes when type = V4L2_FRMSIZE_TYPE_STEPWISE width = frmSizeEnum.stepwise.max_width; height = frmSizeEnum.stepwise.max_height; } else { CAMHAL_LOGDB("frmSizeEnum.index[%d].width x height == (%d x %d)", i, frmSizeEnum.discrete.width, frmSizeEnum.discrete.height); width = frmSizeEnum.discrete.width; height = frmSizeEnum.discrete.height; } caps.tCaptureRes[i].width = width; caps.tCaptureRes[i].height = height; caps.tPreviewRes[i].width = width; caps.tPreviewRes[i].height = height; snprintf(caps.tPreviewRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tPreviewRes[i].width,caps.tPreviewRes[i].height); snprintf(caps.tCaptureRes[i].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tCaptureRes[i].width,caps.tCaptureRes[i].height); } else { caps.ulCaptureResCount = i; caps.ulPreviewResCount = i; } } //sort the preview sizes in ascending order sortAscend(caps, caps.ulPreviewResCount); //get supported frame rates bool fps30 = false; for ( j=caps.ulPreviewResCount-1; j >= 0; j--) { CAMHAL_LOGDB(" W x H = %d x %d", caps.tPreviewRes[j].width, caps.tPreviewRes[j].height); status = NO_ERROR; for ( i = 0; status == NO_ERROR; i++) { frmIvalEnum.index = i; //Check for supported frame rates for the default pixel format. frmIvalEnum.pixel_format = V4L2_PIX_FMT_YUYV; frmIvalEnum.width = caps.tPreviewRes[j].width; frmIvalEnum.height = caps.tPreviewRes[j].height; status = ioctl (handle, VIDIOC_ENUM_FRAMEINTERVALS, &frmIvalEnum); if (status == NO_ERROR) { if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) { CAMHAL_LOGDB("frmIvalEnum[%d].type = %d)", i, frmIvalEnum.type); CAMHAL_LOGDB("frmIvalEnum[%d].stepwise.min = %d/%d)", i, frmIvalEnum.stepwise.min.denominator, frmIvalEnum.stepwise.min.numerator); CAMHAL_LOGDB("frmIvalEnum[%d].stepwise.max = %d/%d)", i, frmIvalEnum.stepwise.max.denominator, frmIvalEnum.stepwise.max.numerator); CAMHAL_LOGDB("frmIvalEnum[%d].stepwise.step = %d/%d)", i, frmIvalEnum.stepwise.step.denominator, frmIvalEnum.stepwise.step.numerator); caps.ulFrameRates[i] = (frmIvalEnum.stepwise.max.denominator/frmIvalEnum.stepwise.max.numerator); } else { CAMHAL_LOGDB("frmIvalEnum[%d].frame rate= %d)",i, (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator)); caps.ulFrameRates[i] = (frmIvalEnum.discrete.denominator/frmIvalEnum.discrete.numerator); } if (caps.ulFrameRates[i] == 30) { fps30 = true; } } else if (i == 0) { // Framerate reporting is not guaranteed in V4L2 implementation. caps.ulFrameRates[i] = 30; fps30 = true; caps.ulFrameRateCount = 1; } else { CAMHAL_LOGE("caps.ulFrameRateCount = %d",i); caps.ulFrameRateCount = i; } } if(fps30) { break; } } if(frmIvalEnum.type != V4L2_FRMIVAL_TYPE_DISCRETE) { //TODO: populate the frame rates when type = V4L2_FRMIVAL_TYPE_STEPWISE; } //update the preview resolution with the highest resolution which supports 30fps. /* // for video preview the application choose the resolution from the mediaprofiles.xml. // so populating all supported preview resolution is required for video mode. caps.tPreviewRes[0].width = caps.tPreviewRes[j].width; caps.tPreviewRes[0].height = caps.tPreviewRes[j].height; snprintf(caps.tPreviewRes[0].param, MAX_RES_STRING_LENGTH,"%dx%d",caps.tPreviewRes[j].width,caps.tPreviewRes[j].height); caps.ulPreviewResCount = 1; */ insertCapabilities (params, caps); return NO_ERROR; }
/*--------------------MemoryManager Class STARTS here-----------------------------*/ void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) { LOG_FUNCTION_NAME; if(mIonFd == 0) { mIonFd = ion_open(); if(mIonFd == 0) { CAMHAL_LOGEA("ion_open failed!!!"); return NULL; } } ///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing ///the buffers const uint numArrayEntriesC = (uint)(numBufs+1); ///Allocate a buffer array uint32_t *bufsArr = new uint32_t [numArrayEntriesC]; if(!bufsArr) { CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC); LOG_FUNCTION_NAME_EXIT; return NULL; } ///Initialize the array with zeros - this will help us while freeing the array in case of error ///If a value of an array element is NULL, it means we didnt allocate it memset(bufsArr, 0, sizeof(*bufsArr) * numArrayEntriesC); //2D Allocations are not supported currently if(bytes != 0) { struct ion_handle *handle; int mmap_fd; ///1D buffers for (int i = 0; i < numBufs; i++) { int ret = ion_alloc(mIonFd, bytes, 0, 1 << ION_HEAP_TYPE_CARVEOUT, &handle); if(ret < 0) { CAMHAL_LOGEB("ion_alloc resulted in error %d", ret); goto error; } CAMHAL_LOGDB("Before mapping, handle = %x, nSize = %d", handle, bytes); if ((ret = ion_map(mIonFd, handle, bytes, PROT_READ | PROT_WRITE, MAP_SHARED, 0, (unsigned char**)&bufsArr[i], &mmap_fd)) < 0) { CAMHAL_LOGEB("Userspace mapping of ION buffers returned error %d", ret); ion_free(mIonFd, handle); goto error; } mIonHandleMap.add(bufsArr[i], (unsigned int)handle); mIonFdMap.add(bufsArr[i], (unsigned int) mmap_fd); mIonBufLength.add(bufsArr[i], (unsigned int) bytes); } } else // If bytes is not zero, then it is a 2-D tiler buffer request { } LOG_FUNCTION_NAME_EXIT; return (void*)bufsArr; error: LOGE("Freeing buffers already allocated after error occurred"); freeBuffer(bufsArr); if ( NULL != mErrorNotifier.get() ) { mErrorNotifier->errorNotify(-ENOMEM); } LOG_FUNCTION_NAME_EXIT; return NULL; }
status_t CameraArea::parseAreas(const char *area, size_t areaLength, Vector< sp<CameraArea> > &areas) { status_t ret = NO_ERROR; char *ctx; char *pArea = NULL; char *pStart = NULL; char *pEnd = NULL; const char *startToken = "("; const char endToken = ')'; const char sep = ','; ssize_t top, left, bottom, right, weight; char *tmpBuffer = NULL; sp<CameraArea> currentArea; LOG_FUNCTION_NAME if ( ( NULL == area ) || ( 0 >= areaLength ) ) { return -EINVAL; } tmpBuffer = ( char * ) malloc(areaLength); if ( NULL == tmpBuffer ) { return -ENOMEM; } memcpy(tmpBuffer, area, areaLength); pArea = strtok_r(tmpBuffer, startToken, &ctx); do { pStart = pArea; if ( NULL == pStart ) { CAMHAL_LOGEA("Parsing of the left area coordinate failed!"); ret = -EINVAL; break; } else { left = static_cast<ssize_t>(strtol(pStart, &pEnd, 10)); } if ( sep != *pEnd ) { CAMHAL_LOGEA("Parsing of the top area coordinate failed!"); ret = -EINVAL; break; } else { top = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10)); } if ( sep != *pEnd ) { CAMHAL_LOGEA("Parsing of the right area coordinate failed!"); ret = -EINVAL; break; } else { right = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10)); } if ( sep != *pEnd ) { CAMHAL_LOGEA("Parsing of the bottom area coordinate failed!"); ret = -EINVAL; break; } else { bottom = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10)); } if ( sep != *pEnd ) { CAMHAL_LOGEA("Parsing of the weight area coordinate failed!"); ret = -EINVAL; break; } else { weight = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10)); } if ( endToken != *pEnd ) { CAMHAL_LOGEA("Malformed area!"); ret = -EINVAL; break; } ret = checkArea(top, left, bottom, right, weight); if ( NO_ERROR != ret ) { break; } currentArea = new CameraArea(top, left, bottom, right, weight); CAMHAL_LOGDB("Area parsed [%dx%d, %dx%d] %d", ( int ) top, ( int ) left, ( int ) bottom, ( int ) right, ( int ) weight); if ( NULL != currentArea.get() ) { areas.add(currentArea); } else { ret = -ENOMEM; break; } pArea = strtok_r(NULL, startToken, &ctx); } while ( NULL != pArea ); if ( NULL != tmpBuffer ) { free(tmpBuffer); } LOG_FUNCTION_NAME_EXIT return ret; }
status_t OMXCameraAdapter::doAutoFocus() { status_t ret = NO_ERROR; OMX_ERRORTYPE eError = OMX_ErrorNone; OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl; OMX_PARAM_FOCUSSTATUSTYPE focusStatus; OMX_CONFIG_BOOLEANTYPE bOMX; nsecs_t timeout = 0; LOG_FUNCTION_NAME; if ( OMX_StateInvalid == mComponentState ) { CAMHAL_LOGEA("OMX component in Invalid state"); returnFocusStatus(false); return -EINVAL; } if ( OMX_StateExecuting != mComponentState ) { CAMHAL_LOGEA("OMX component not in executing state"); returnFocusStatus(false); return NO_ERROR; } if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) { CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called"); return NO_ERROR; } OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE); // If the app calls autoFocus, the camera will stop sending face callbacks. pauseFaceDetection(true); // This is needed for applying FOCUS_REGION correctly if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea())) { //Disable face priority setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false); //Enable region algorithm priority setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true); } OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE); focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) mParameters3A.Focus; if (mParameters3A.FocusLock) { // this basically means user never called cancelAutoFocus after a scan... // if this is the case we need to unlock AF to ensure we will do a scan if (set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_FALSE) != NO_ERROR) { CAMHAL_LOGEA("Error Unlocking 3A locks"); } else { CAMHAL_LOGDA("AE/AWB unlocked successfully"); } } else if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAuto ) { // In case we have CAF running we should first check the AF status. // If it has managed to lock, then do as usual and return status // immediately. ret = checkFocus(&focusStatus); if ( NO_ERROR != ret ) { CAMHAL_LOGEB("Focus status check failed 0x%x!", ret); return ret; } else { CAMHAL_LOGDB("Focus status check 0x%x!", focusStatus.eFocusStatus); } } if ( (focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto && ( focusStatus.eFocusStatus == OMX_FocusStatusRequest || focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach || focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) || (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) ) { OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE); bOMX.bEnabled = OMX_TRUE; //Enable focus scanning eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable, &bOMX); // force AF, Ducati will take care of whether CAF // or AF will be performed, depending on light conditions if ( focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto && ( focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach || focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) { focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock; } if ( focusControl.eFocusControl != OMX_IMAGE_FocusControlAuto ) { eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, OMX_IndexConfigFocusControl, &focusControl); } if ( OMX_ErrorNone != eError ) { CAMHAL_LOGEB("Error while starting focus 0x%x", eError); return INVALID_OPERATION; } else { CAMHAL_LOGDA("Autofocus started successfully"); } // configure focus timeout based on capture mode timeout = (mCapMode == VIDEO_MODE) ? ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) : ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 ); { Mutex::Autolock lock(mDoAFMutex); ret = mDoAFCond.waitRelative(mDoAFMutex, timeout); } //If somethiing bad happened while we wait if (mComponentState == OMX_StateInvalid) { CAMHAL_LOGEA("Invalid State after Auto Focus Exitting!!!"); return -EINVAL; } if(ret != NO_ERROR) { CAMHAL_LOGEA("Autofocus callback timeout expired"); ret = returnFocusStatus(true); } else { ret = returnFocusStatus(false); } } else { // Focus mode in continuous if ( NO_ERROR == ret ) { ret = returnFocusStatus(true); mPending3Asettings |= SetFocus; } } LOG_FUNCTION_NAME_EXIT; return ret; }
status_t OMXCameraAdapter::setTouchFocus() { status_t ret = NO_ERROR; OMX_ERRORTYPE eError = OMX_ErrorNone; OMX_ALGOAREASTYPE **focusAreas; OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer; MemoryManager memMgr; int areasSize = 0; LOG_FUNCTION_NAME; if ( OMX_StateInvalid == mComponentState ) { CAMHAL_LOGEA("OMX component is in invalid state"); ret = -1; } if ( NO_ERROR == ret ) { areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096; focusAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1); OMXCameraPortParameters * mPreviewData = NULL; mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex]; if (!focusAreas) { CAMHAL_LOGEB("Error allocating buffer for focus areas %d", eError); return -ENOMEM; } OMX_INIT_STRUCT_PTR (focusAreas[0], OMX_ALGOAREASTYPE); focusAreas[0]->nPortIndex = OMX_ALL; focusAreas[0]->nNumAreas = mFocusAreas.size(); focusAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaFocus; // If the area is the special case of (0, 0, 0, 0, 0), then // the algorithm needs nNumAreas to be set to 0, // in order to automatically choose the best fitting areas. if ( mFocusAreas.itemAt(0)->isZeroArea() ) { focusAreas[0]->nNumAreas = 0; } for ( unsigned int n = 0; n < mFocusAreas.size(); n++) { // transform the coordinates to 3A-type coordinates mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth, (size_t)mPreviewData->mHeight, (size_t&)focusAreas[0]->tAlgoAreas[n].nTop, (size_t&)focusAreas[0]->tAlgoAreas[n].nLeft, (size_t&)focusAreas[0]->tAlgoAreas[n].nWidth, (size_t&)focusAreas[0]->tAlgoAreas[n].nHeight); focusAreas[0]->tAlgoAreas[n].nLeft = ( focusAreas[0]->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth; focusAreas[0]->tAlgoAreas[n].nTop = ( focusAreas[0]->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight; focusAreas[0]->tAlgoAreas[n].nWidth = ( focusAreas[0]->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth; focusAreas[0]->tAlgoAreas[n].nHeight = ( focusAreas[0]->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight; focusAreas[0]->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight(); CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d", n, (int)focusAreas[0]->tAlgoAreas[n].nTop, (int)focusAreas[0]->tAlgoAreas[n].nLeft, (int)focusAreas[0]->tAlgoAreas[n].nWidth, (int)focusAreas[0]->tAlgoAreas[n].nHeight, (int)focusAreas[0]->tAlgoAreas[n].nPriority); } OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER); sharedBuffer.nPortIndex = OMX_ALL; sharedBuffer.nSharedBuffSize = areasSize; sharedBuffer.pSharedBuff = (OMX_U8 *) focusAreas[0]; if ( NULL == sharedBuffer.pSharedBuff ) { CAMHAL_LOGEA("No resources to allocate OMX shared buffer"); ret = -ENOMEM; goto EXIT; } eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp, (OMX_INDEXTYPE) OMX_TI_IndexConfigAlgoAreas, &sharedBuffer); if ( OMX_ErrorNone != eError ) { CAMHAL_LOGEB("Error while setting Focus Areas configuration 0x%x", eError); ret = -EINVAL; } EXIT: if (NULL != focusAreas) { memMgr.freeBuffer((void*) focusAreas); focusAreas = NULL; } } LOG_FUNCTION_NAME_EXIT; return ret; }