size_t CameraHal::calculateBufferSize(const char* parametersFormat, int width, int height)
{
    int bufferSize = -1;

    if ( NULL != parametersFormat ) {
        if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
            bufferSize = width * height * 2;
        } else if ( (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
                    (0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
            bufferSize = width * height * 3 / 2;
        } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
            bufferSize = width * height * 2;
        } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
            bufferSize = width * height * 2;
        } else {
            CAMHAL_LOGEA("Invalid format");
            bufferSize = 0;
        }
    } else {
        CAMHAL_LOGEA("Preview format is NULL");
        bufferSize = 0;
    }

    return bufferSize;
}
const char* CameraHal::getPixelFormatConstant(const char* parametersFormat)
{
    const char *pixelFormat = NULL;

    if ( NULL != parametersFormat ) {
        if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I) ) {
            CAMHAL_LOGVA("CbYCrY format selected");
            pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV422I;
        } else if ( (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP)) ||
                    (0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420P)) ) {
            // TODO(XXX): We are treating YV12 the same as YUV420SP
            CAMHAL_LOGVA("YUV420SP format selected");
            pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
        } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565) ) {
            CAMHAL_LOGVA("RGB565 format selected");
            pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_RGB565;
        } else if ( 0 == strcmp(parametersFormat, (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ) {
            CAMHAL_LOGVA("BAYER format selected");
            pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
        } else if ( 0 == strcmp(parametersFormat, android::CameraParameters::PIXEL_FORMAT_JPEG) ) {
            CAMHAL_LOGVA("JPEG format selected");
            pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_JPEG;
        } else {
            CAMHAL_LOGEA("Invalid format, NV12 format selected as default");
            pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
        }
    } else {
        CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
        pixelFormat = (const char *) android::CameraParameters::PIXEL_FORMAT_YUV420SP;
    }

    return pixelFormat;
}
status_t CameraArea::checkArea(ssize_t top,
                               ssize_t left,
                               ssize_t bottom,
                               ssize_t right,
                               ssize_t weight)
{

    //Handles the invalid regin corner case.
    if ( ( 0 == top ) && ( 0 == left ) && ( 0 == bottom ) && ( 0 == right ) && ( 0 == weight ) )
    {
        return NO_ERROR;
    }

    if ( ( CameraArea::WEIGHT_MIN > weight ) ||  ( CameraArea::WEIGHT_MAX < weight ) )
    {
        CAMHAL_LOGEB("Camera area weight is invalid %d", static_cast<int>(weight));
        return -EINVAL;
    }

    if ( ( CameraArea::TOP > top ) || ( CameraArea::BOTTOM < top ) )
    {
        CAMHAL_LOGEB("Camera area top coordinate is invalid %d", static_cast<int>(top) );
        return -EINVAL;
    }

    if ( ( CameraArea::TOP > bottom ) || ( CameraArea::BOTTOM < bottom ) )
    {
        CAMHAL_LOGEB("Camera area bottom coordinate is invalid %d", static_cast<int>(bottom) );
        return -EINVAL;
    }

    if ( ( CameraArea::LEFT > left ) || ( CameraArea::RIGHT < left ) )
    {
        CAMHAL_LOGEB("Camera area left coordinate is invalid %d", static_cast<int>(left) );
        return -EINVAL;
    }

    if ( ( CameraArea::LEFT > right ) || ( CameraArea::RIGHT < right ) )
    {
        CAMHAL_LOGEB("Camera area right coordinate is invalid %d", static_cast<int>(right) );
        return -EINVAL;
    }

    if ( left >= right )
    {
        CAMHAL_LOGEA("Camera area left larger than right");
        return -EINVAL;
    }

    if ( top >= bottom )
    {
        CAMHAL_LOGEA("Camera area top larger than bottom");
        return -EINVAL;
    }

    return NO_ERROR;
}
status_t OMXCameraAdapter::setFocusCallback(bool enabled)
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;
    OMX_CONFIG_CALLBACKREQUESTTYPE focusRequstCallback;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        ret = -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
          CAMHAL_LOGEA("OMX component not in executing state");
        ret = NO_ERROR;
        }

    if ( NO_ERROR == ret )
        {

        OMX_INIT_STRUCT_PTR (&focusRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE);
        focusRequstCallback.nPortIndex = OMX_ALL;
        focusRequstCallback.nIndex = OMX_IndexConfigCommonFocusStatus;

        if ( enabled )
            {
            focusRequstCallback.bEnable = OMX_TRUE;
            }
        else
            {
            focusRequstCallback.bEnable = OMX_FALSE;
            }

        eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                                (OMX_INDEXTYPE) OMX_IndexConfigCallbackRequest,
                                &focusRequstCallback);
        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error registering focus callback 0x%x", eError);
            ret = -1;
            }
        else
            {
            CAMHAL_LOGDB("Autofocus callback for index 0x%x registered successfully",
                         OMX_IndexConfigCommonFocusStatus);
            }
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
status_t OMXCameraAdapter::stopAutoFocus()
{
    OMX_ERRORTYPE eError = OMX_ErrorNone;
    OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        returnFocusStatus(false);
        return -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
          CAMHAL_LOGEA("OMX component not in executing state");
        return NO_ERROR;
        }

    if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) {
        // No need to stop focus if we are in infinity mode. Nothing to stop.
        return NO_ERROR;
    }

    OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
    focusControl.eFocusControl = OMX_IMAGE_FocusControlOff;

    eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                            OMX_IndexConfigFocusControl,
                            &focusControl);
    if ( OMX_ErrorNone != eError )
        {
        CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
        return ErrorUtils::omxToAndroidError(eError);
    } else {
        // This is a WA. Usually the OMX Camera component should
        // generate AF status change OMX event fairly quickly
        // ( after one preview frame ) and this notification should
        // actually come from 'handleFocusCallback()'.
        Mutex::Autolock lock(mDoAFMutex);
        mDoAFCond.broadcast();
    }


    LOG_FUNCTION_NAME_EXIT;

    return NO_ERROR;
}
Exemplo n.º 6
0
status_t OMXCameraAdapter::encodeSizeCap(OMX_TI_CAPRESTYPE &res,
                       const CapResolution *cap,
                       size_t capCount,
                       char * buffer,
                       size_t bufferSize) {
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME;

    if ( (NULL == buffer) || (NULL == cap) ) {
        CAMHAL_LOGEA("Invalid input arguments");
        return -EINVAL;
    }

    for ( unsigned int i = 0 ; i < capCount ; i++ ) {
        if ( (cap[i].width <= res.nWidthMax) &&
             (cap[i].height <= res.nHeightMax) &&
             (cap[i].width >= res.nWidthMin) &&
             (cap[i].height >= res.nHeightMin) ) {
                strncat(buffer, cap[i].param, bufferSize -1);
                strncat(buffer, PARAM_SEP, bufferSize - 1);
        }
    }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
Exemplo n.º 7
0
status_t OMXCameraAdapter::encodeISOCap(OMX_U32 maxISO,
                      const CapISO *cap,
                      size_t capCount,
                      char * buffer,
                      size_t bufferSize) {
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME;

    if ( (NULL == buffer) || (NULL == cap) ) {
        CAMHAL_LOGEA("Invalid input arguments");
        return -EINVAL;
    }

    for ( unsigned int i = 0; i < capCount; i++ ) {
        if ( cap[i].num <= maxISO) {
            strncat(buffer, cap[i].param, bufferSize - 1);
            strncat(buffer, PARAM_SEP, bufferSize - 1);
        }
    }
    remove_last_sep(buffer);

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
Exemplo n.º 8
0
status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
                              const CapPixelformat *cap,
                              size_t capCount,
                              char * buffer,
                              size_t bufferSize) {
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME;

    if ( ( NULL == buffer ) || ( NULL == cap ) ) {
        CAMHAL_LOGEA("Invalid input arguments");
        return -EINVAL;
    }

    for ( unsigned int i = 0; i < capCount; i++ ) {
        if ( format == cap[i].pixelformat ) {
            strncat(buffer, cap[i].param, bufferSize - 1);
            strncat(buffer, PARAM_SEP, bufferSize - 1);
        }
    }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focusMode)
{;
    OMX_ERRORTYPE eError = OMX_ErrorNone;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState ) {
        CAMHAL_LOGEA("OMX component is in invalid state");
        return NO_INIT;
    }

    OMX_INIT_STRUCT_PTR (&focusMode, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
    focusMode.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;

    eError =  OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
                            OMX_IndexConfigFocusControl,
                            &focusMode);

    if ( OMX_ErrorNone != eError ) {
        CAMHAL_LOGEB("Error while retrieving focus mode 0x%x", eError);
    }

    LOG_FUNCTION_NAME_EXIT;

    return ErrorUtils::omxToAndroidError(eError);
}
Exemplo n.º 10
0
int MemoryManager::freeBuffer(void* buf)
{
    status_t ret = NO_ERROR;
    LOG_FUNCTION_NAME;

    uint32_t *bufEntry = (uint32_t*)buf;

    if(!bufEntry)
        {
        CAMHAL_LOGEA("NULL pointer passed to freebuffer");
        LOG_FUNCTION_NAME_EXIT;
        return BAD_VALUE;
        }

    while(*bufEntry)
        {
        unsigned int ptr = (unsigned int) *bufEntry++;
        if(mIonBufLength.valueFor(ptr))
            {
            munmap((void *)ptr, mIonBufLength.valueFor(ptr));
            close(mIonFdMap.valueFor(ptr));
            ion_free(mIonFd, (ion_handle*)mIonHandleMap.valueFor(ptr));
            mIonHandleMap.removeItem(ptr);
            mIonBufLength.removeItem(ptr);
            mIonFdMap.removeItem(ptr);
            }
        else
            {
            CAMHAL_LOGEA("Not a valid Memory Manager buffer");
            }
        }

    ///@todo Check if this way of deleting array is correct, else use malloc/free
    uint32_t * bufArr = (uint32_t*)buf;
    delete [] bufArr;

    if(mIonBufLength.size() == 0)
        {
        if(mIonFd)
            {
            ion_close(mIonFd);
            mIonFd = 0;
            }
        }
    LOG_FUNCTION_NAME_EXIT;
    return ret;
}
status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;

    LOG_FUNCTION_NAME;

    if ( NULL == eFocusStatus )
        {
        CAMHAL_LOGEA("Invalid focus status");
        ret = -EINVAL;
        }

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        ret = -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
        CAMHAL_LOGEA("OMX component not in executing state");
        ret = NO_ERROR;
        }

    if ( NO_ERROR == ret )
        {
        OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
        eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
                               OMX_IndexConfigCommonFocusStatus,
                               eFocusStatus);
        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error while retrieving focus status: 0x%x", eError);
            ret = -1;
            }
        }

    if ( NO_ERROR == ret )
        {
        CAMHAL_LOGDB("Focus Status: %d", eFocusStatus->eFocusStatus);
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
Exemplo n.º 12
0
status_t OMXCameraAdapter::encodeFramerateCap(OMX_U32 framerateMax, OMX_U32 framerateMin, const CapFramerate *cap, size_t capCount, char * buffer, size_t bufferSize)
{
    status_t ret = NO_ERROR;
    bool minInserted = false;
    bool maxInserted = false;
    char tmpBuffer[FRAMERATE_COUNT];

    LOG_FUNCTION_NAME

    if ( ( NULL == buffer ) ||
         ( NULL == cap ) )
        {
        CAMHAL_LOGEA("Invalid input arguments");
        ret = -EINVAL;
        }

    if ( NO_ERROR == ret )
        {
        for ( unsigned int i = 0 ; i < capCount ; i++ )
            {
            if ( ( framerateMax >= cap[i].framerate ) &&
                 ( framerateMin <= cap[i].framerate ) )
                {
                strncat(buffer, cap[i].param, bufferSize - 1);
                strncat(buffer, PARAM_SEP, bufferSize - 1);

                if ( cap[i].framerate ==  framerateMin )
                    {
                    minInserted = true;
                    }
                }
                if ( cap[i].framerate ==  framerateMax )
                    {
                    maxInserted = true;
                    }
                }

        if ( !maxInserted )
            {
            memset(tmpBuffer, 0, FRAMERATE_COUNT);
            snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMax);
            strncat(buffer, tmpBuffer, bufferSize - 1);
            strncat(buffer, PARAM_SEP, bufferSize - 1);
            }

        if ( !minInserted )
            {
            memset(tmpBuffer, 0, FRAMERATE_COUNT);
            snprintf(tmpBuffer, FRAMERATE_COUNT - 1, "%u,", ( unsigned int ) framerateMin);
            strncat(buffer, tmpBuffer, bufferSize - 1);
            strncat(buffer, PARAM_SEP, bufferSize - 1);
            }

        }

    LOG_FUNCTION_NAME_EXIT

    return ret;
}
Exemplo n.º 13
0
status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps, char * buffer, size_t bufferSize)
{
    status_t ret = NO_ERROR;
    uint32_t minVFR, maxVFR;
    char tmpBuffer[MAX_PROP_VALUE_LENGTH];
    bool skipLast = false;

    LOG_FUNCTION_NAME

    if ( NULL == buffer )
        {
        CAMHAL_LOGEA("Invalid input arguments");
        ret = -EINVAL;
        }

    if ( NO_ERROR == ret )
        {
        unsigned int count = caps.ulPrvVarFPSModesCount;
        if ( count > 10 )
        {
            count = 10;
        }
        for ( unsigned int i = 0 ; i < count ; i++ )
            {

            if ( 0 < i )
                {
                if ( ( caps.tPrvVarFPSModes[i-1].nVarFPSMin == caps.tPrvVarFPSModes[i].nVarFPSMin ) &&
                     ( caps.tPrvVarFPSModes[i-1].nVarFPSMax == caps.tPrvVarFPSModes[i].nVarFPSMax ) )
                    {
                    continue;
                    }
                else if (!skipLast)
                    {
                    strncat(buffer, PARAM_SEP, bufferSize - 1);
                    }
                }
            if ( caps.tPrvVarFPSModes[i].nVarFPSMin == caps.tPrvVarFPSModes[i].nVarFPSMax )
                {
                skipLast = true;
                continue;
                }
            else
                {
                skipLast = false;
                }

            CAMHAL_LOGEB("Min fps 0x%x, Max fps 0x%x", ( unsigned int ) caps.tPrvVarFPSModes[i].nVarFPSMin,
                                                       ( unsigned int ) caps.tPrvVarFPSModes[i].nVarFPSMax);

            minVFR = caps.tPrvVarFPSModes[i].nVarFPSMin >> VFR_OFFSET;
            minVFR *= CameraHal::VFR_SCALE;
            maxVFR = caps.tPrvVarFPSModes[i].nVarFPSMax >> VFR_OFFSET;
            maxVFR *= CameraHal::VFR_SCALE;
            snprintf(tmpBuffer, ( MAX_PROP_VALUE_LENGTH - 1 ), "(%d,%d)", minVFR, maxVFR);
            strncat(buffer, tmpBuffer, ( bufferSize - 1 ));
            }
        }
static void libjpeg_skip_input_data(j_decompress_ptr cinfo, long num_bytes) {
    libjpeg_source_mgr*  src = (libjpeg_source_mgr*)cinfo->src;

    if (num_bytes > (long)src->bytes_in_buffer) {
        CAMHAL_LOGEA("\n\n\n libjpeg_skip_input_data - num_bytes > (long)src->bytes_in_buffer \n\n\n");
    } else {
        src->next_input_byte += num_bytes;
        src->bytes_in_buffer -= num_bytes;
    }
}
Exemplo n.º 15
0
status_t SensorListener::initialize() {
    status_t ret = NO_ERROR;
    SensorManager& mgr(SensorManager::getInstance());

    LOG_FUNCTION_NAME;

    sp<Looper> mLooper;

    mSensorEventQueue = mgr.createEventQueue();
    if (mSensorEventQueue == NULL) {
        CAMHAL_LOGEA("createEventQueue returned NULL");
        ret = NO_INIT;
        goto out;
    }

    mLooper = new Looper(false);
    mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);

    if (mSensorLooperThread.get() == NULL)
            mSensorLooperThread = new SensorLooperThread(mLooper.get());

    if (mSensorLooperThread.get() == NULL) {
        CAMHAL_LOGEA("Couldn't create sensor looper thread");
        ret = NO_MEMORY;
        goto out;
    }

    ret = mSensorLooperThread->run("sensor looper thread", PRIORITY_URGENT_DISPLAY);
    if (ret == INVALID_OPERATION){
        CAMHAL_LOGDA("thread already running ?!?");
    } else if (ret != NO_ERROR) {
        CAMHAL_LOGEA("couldn't run thread");
        goto out;
    }

 out:
    LOG_FUNCTION_NAME_EXIT;
    return ret;
}
int Decoder_libjpeg::appendDHT(unsigned char *jpeg_src, int filled_len, unsigned char *jpeg_with_dht_buffer, int buff_size)
{
    /* Appending DHT to JPEG */

    int len = filled_len + sizeof(jpeg_odml_dht) - 2; // final length of jpeg data
    if (len > buff_size)  {
        CAMHAL_LOGEA("\n\n\n Buffer size too small. filled_len=%d, buff_size=%d, sizeof(jpeg_odml_dht)=%d\n\n\n", filled_len, buff_size, sizeof(jpeg_odml_dht));
        return 0;
    }

    memcpy(jpeg_with_dht_buffer, jpeg_odml_dht, sizeof(jpeg_odml_dht));
    memcpy((jpeg_with_dht_buffer + sizeof(jpeg_odml_dht)), jpeg_src + 2, (filled_len - 2));
    return len;
}
void OMXCameraAdapter::handleFocusCallback() {
    OMX_PARAM_FOCUSSTATUSTYPE eFocusStatus;
    CameraHalEvent::FocusStatus focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
    status_t ret = NO_ERROR;
    BaseCameraAdapter::AdapterState nextState;
    BaseCameraAdapter::getNextState(nextState);

    OMX_INIT_STRUCT(eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);

    ret = checkFocus(&eFocusStatus);

    if (NO_ERROR != ret) {
        CAMHAL_LOGEA("Focus status check failed!");
        // signal and unblock doAutoFocus
        if (AF_ACTIVE & nextState) {
            Mutex::Autolock lock(mDoAFMutex);
            mDoAFCond.broadcast();
        }
        return;
    }

    if ( ( eFocusStatus.eFocusStatus != OMX_FocusStatusRequest ) &&
         ( eFocusStatus.eFocusStatus != OMX_FocusStatusOff ) ) {
        // signal doAutoFocus when a end of scan message comes
        // ignore start of scan
        Mutex::Autolock lock(mDoAFMutex);
        mDoAFCond.broadcast();
    }

    if (mParameters3A.Focus != (OMX_IMAGE_FOCUSCONTROLTYPE) OMX_IMAGE_FocusControlAuto) {
       CAMHAL_LOGDA("unregistered focus callback when not in CAF or doAutoFocus... not handling");
       return;
    }

    // Handling for CAF Callbacks
    switch (eFocusStatus.eFocusStatus) {
        case OMX_FocusStatusRequest:
            focusStatus = CameraHalEvent::FOCUS_STATUS_PENDING;
            break;
        case OMX_FocusStatusReached:
        case OMX_FocusStatusOff:
        case OMX_FocusStatusUnableToReach:
        default:
            focusStatus = CameraHalEvent::FOCUS_STATUS_DONE;
            break;
    }

    notifyFocusSubscribers(focusStatus);
}
Exemplo n.º 18
0
status_t OMXCameraAdapter::initFaceDetection()
{
    status_t ret = NO_ERROR;
#ifdef CAMERA_VCE_OMX_FD
    mVceFaceDetect = new OMXVceFaceDetect();
    mVceFaceDetect->setVceObserver(this);
    ret = mVceFaceDetect->init();
    if ( NO_ERROR != ret )
    {
        CAMHAL_LOGEA("Couldn't mVceFaceDetect.init!");
    }

    mFDFrameProvider = new FrameProvider(this, this, FDFrameCallbackRelay);

#endif
    return ret;

}
status_t OMXCameraAdapter::getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far)
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError;

    OMX_TI_CONFIG_FOCUSDISTANCETYPE focusDist;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
        {
        CAMHAL_LOGEA("OMX component is in invalid state");
        ret = UNKNOWN_ERROR;
        }

    if ( NO_ERROR == ret )
        {
        OMX_INIT_STRUCT_PTR(&focusDist, OMX_TI_CONFIG_FOCUSDISTANCETYPE);
        focusDist.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;

        eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
                               ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFocusDistance,
                               &focusDist);
        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error while querying focus distances 0x%x", eError);
            ret = UNKNOWN_ERROR;
            }

        }

    if ( NO_ERROR == ret )
        {
        near = focusDist.nFocusDistanceNear;
        optimal = focusDist.nFocusDistanceOptimal;
        far = focusDist.nFocusDistanceFar;
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
Exemplo n.º 20
0
status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier)
{
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME;

    if ( NULL == errorNotifier )
        {
        CAMHAL_LOGEA("Invalid Error Notifier reference");
        ret = -EINVAL;
        }

    if ( NO_ERROR == ret )
        {
        mErrorNotifier = errorNotifier;
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
{
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME

    if ( NULL == errorNotifier )
        {
        CAMHAL_LOGEA("Invalid Error Notifier reference");
        ret = -EINVAL;
        }

    if ( NO_ERROR == ret )
        {
        mErrorNotifier = errorNotifier;
        }

    LOG_FUNCTION_NAME_EXIT

    return ret;
}
Exemplo n.º 22
0
/*--------------------MemoryManager Class STARTS here-----------------------------*/
void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
{
    LOG_FUNCTION_NAME;

    if(mIonFd == 0)
        {
        mIonFd = ion_open();
        if(mIonFd == 0)
            {
            CAMHAL_LOGEA("ion_open failed!!!");
            return NULL;
            }
        }

    ///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing
    ///the buffers
    const uint numArrayEntriesC = (uint)(numBufs+1);

    ///Allocate a buffer array
    uint32_t *bufsArr = new uint32_t [numArrayEntriesC];
    if(!bufsArr)
        {
        CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC);
        LOG_FUNCTION_NAME_EXIT;
        return NULL;
        }

    ///Initialize the array with zeros - this will help us while freeing the array in case of error
    ///If a value of an array element is NULL, it means we didnt allocate it
    memset(bufsArr, 0, sizeof(*bufsArr) * numArrayEntriesC);

    //2D Allocations are not supported currently
    if(bytes != 0)
        {
        struct ion_handle *handle;
        int mmap_fd;

        ///1D buffers
        for (int i = 0; i < numBufs; i++)
            {
            int ret = ion_alloc(mIonFd, bytes, 0, 1 << ION_HEAP_TYPE_CARVEOUT, &handle);
            if(ret < 0)
                {
                CAMHAL_LOGEB("ion_alloc resulted in error %d", ret);
                goto error;
                }

            CAMHAL_LOGDB("Before mapping, handle = %x, nSize = %d", handle, bytes);
            if ((ret = ion_map(mIonFd, handle, bytes, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
                          (unsigned char**)&bufsArr[i], &mmap_fd)) < 0)
                {
                CAMHAL_LOGEB("Userspace mapping of ION buffers returned error %d", ret);
                ion_free(mIonFd, handle);
                goto error;
                }

            mIonHandleMap.add(bufsArr[i], (unsigned int)handle);
            mIonFdMap.add(bufsArr[i], (unsigned int) mmap_fd);
            mIonBufLength.add(bufsArr[i], (unsigned int) bytes);
            }

        }
    else // If bytes is not zero, then it is a 2-D tiler buffer request
        {
        }

        LOG_FUNCTION_NAME_EXIT;

        return (void*)bufsArr;

error:
    LOGE("Freeing buffers already allocated after error occurred");
    freeBuffer(bufsArr);

    if ( NULL != mErrorNotifier.get() )
        {
        mErrorNotifier->errorNotify(-ENOMEM);
        }

    LOG_FUNCTION_NAME_EXIT;
    return NULL;
}
bool Decoder_libjpeg::decode(unsigned char *jpeg_src, int filled_len, unsigned char *nv12_buffer, int stride)
{
    struct jpeg_decompress_struct cinfo;
    struct jpeg_error_mgr jerr;
    struct libjpeg_source_mgr s_mgr(jpeg_src, filled_len);

    if (filled_len == 0)
        return false;

    cinfo.err = jpeg_std_error(&jerr);
    jpeg_create_decompress(&cinfo);

    cinfo.src = &s_mgr;
    int status = jpeg_read_header(&cinfo, true);
    if (status != JPEG_HEADER_OK) {
        CAMHAL_LOGEA("jpeg header corrupted");
        return false;
    }

    cinfo.out_color_space = JCS_YCbCr;
    cinfo.raw_data_out = true;
    status = jpeg_start_decompress(&cinfo);
    if (!status){
        CAMHAL_LOGEA("jpeg_start_decompress failed");
        return false;
    }

    if (mWidth == 0){
        mWidth = cinfo.output_width;
        mHeight = cinfo.output_height;
        CAMHAL_LOGEA("w x h = %d x %d. stride=%d", cinfo.output_width, cinfo.output_height, stride);
    }
    else if ((cinfo.output_width > mWidth) || (cinfo.output_height > mHeight)) {
        CAMHAL_LOGEA(" Free the existing buffers so that they are reallocated for new w x h. Old WxH = %dx%d. New WxH = %dx%d",
        mWidth, mHeight, cinfo.output_width, cinfo.output_height);
        release();
    }

    unsigned int decoded_uv_buffer_size = cinfo.output_width * cinfo.output_height / 2;
    if (Y_Plane == NULL)Y_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
    if (U_Plane == NULL)U_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
    if (V_Plane == NULL)V_Plane = (unsigned char **)malloc(cinfo.output_height * sizeof(unsigned char *));
    if (UV_Plane == NULL) UV_Plane = (unsigned char *)malloc(decoded_uv_buffer_size);

    unsigned char **YUV_Planes[NUM_COMPONENTS_IN_YUV];
    YUV_Planes[0] = Y_Plane;
    YUV_Planes[1] = U_Plane;
    YUV_Planes[2] = V_Plane;

    unsigned char *row = &nv12_buffer[0];

    // Y Component
    for (unsigned int j = 0; j < cinfo.output_height; j++, row += stride)
        YUV_Planes[0][j] = row;

    row = &UV_Plane[0];

    // U Component
    for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
        YUV_Planes[1][j+0] = row;
        YUV_Planes[1][j+1] = row;
    }

    // V Component
    for (unsigned int j = 0; j < cinfo.output_height; j+=2, row += cinfo.output_width / 2){
        YUV_Planes[2][j+0] = row;
        YUV_Planes[2][j+1] = row;
    }

    // Interleaving U and V
    for (unsigned int i = 0; i < cinfo.output_height; i += 8) {
        jpeg_read_raw_data(&cinfo, YUV_Planes, 8);
        YUV_Planes[0] += 8;
        YUV_Planes[1] += 8;
        YUV_Planes[2] += 8;
    }

    unsigned char *uv_ptr = nv12_buffer + (stride * cinfo.output_height);
    unsigned char *u_ptr = UV_Plane;
    unsigned char *v_ptr = UV_Plane + (decoded_uv_buffer_size / 2);
    for(unsigned int i = 0; i < cinfo.output_height / 2; i++){
        for(unsigned int j = 0; j < cinfo.output_width; j+=2){
            *(uv_ptr + j) = *u_ptr; u_ptr++;
            *(uv_ptr + j + 1) = *v_ptr; v_ptr++;
        }
        uv_ptr = uv_ptr + stride;
    }

    jpeg_finish_decompress(&cinfo);
    jpeg_destroy_decompress(&cinfo);

    return true;
}
status_t CameraArea::parseAreas(const char *area,
                                size_t areaLength,
                                Vector< sp<CameraArea> > &areas)
{
    status_t ret = NO_ERROR;
    char *ctx;
    char *pArea = NULL;
    char *pStart = NULL;
    char *pEnd = NULL;
    const char *startToken = "(";
    const char endToken = ')';
    const char sep = ',';
    ssize_t top, left, bottom, right, weight;
    char *tmpBuffer = NULL;
    sp<CameraArea> currentArea;

    LOG_FUNCTION_NAME

    if ( ( NULL == area ) ||
            ( 0 >= areaLength ) )
    {
        return -EINVAL;
    }

    tmpBuffer = ( char * ) malloc(areaLength);
    if ( NULL == tmpBuffer )
    {
        return -ENOMEM;
    }

    memcpy(tmpBuffer, area, areaLength);

    pArea = strtok_r(tmpBuffer, startToken, &ctx);

    do
    {

        pStart = pArea;
        if ( NULL == pStart )
        {
            CAMHAL_LOGEA("Parsing of the left area coordinate failed!");
            ret = -EINVAL;
            break;
        }
        else
        {
            left = static_cast<ssize_t>(strtol(pStart, &pEnd, 10));
        }

        if ( sep != *pEnd )
        {
            CAMHAL_LOGEA("Parsing of the top area coordinate failed!");
            ret = -EINVAL;
            break;
        }
        else
        {
            top = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
        }

        if ( sep != *pEnd )
        {
            CAMHAL_LOGEA("Parsing of the right area coordinate failed!");
            ret = -EINVAL;
            break;
        }
        else
        {
            right = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
        }

        if ( sep != *pEnd )
        {
            CAMHAL_LOGEA("Parsing of the bottom area coordinate failed!");
            ret = -EINVAL;
            break;
        }
        else
        {
            bottom = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
        }

        if ( sep != *pEnd )
        {
            CAMHAL_LOGEA("Parsing of the weight area coordinate failed!");
            ret = -EINVAL;
            break;
        }
        else
        {
            weight = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
        }

        if ( endToken != *pEnd )
        {
            CAMHAL_LOGEA("Malformed area!");
            ret = -EINVAL;
            break;
        }

        ret = checkArea(top, left, bottom, right, weight);
        if ( NO_ERROR != ret )
        {
            break;
        }

        currentArea = new CameraArea(top, left, bottom, right, weight);
        CAMHAL_LOGDB("Area parsed [%dx%d, %dx%d] %d",
                     ( int ) top,
                     ( int ) left,
                     ( int ) bottom,
                     ( int ) right,
                     ( int ) weight);
        if ( NULL != currentArea.get() )
        {
            areas.add(currentArea);
        }
        else
        {
            ret = -ENOMEM;
            break;
        }

        pArea = strtok_r(NULL, startToken, &ctx);

    }
    while ( NULL != pArea );

    if ( NULL != tmpBuffer )
    {
        free(tmpBuffer);
    }

    LOG_FUNCTION_NAME_EXIT

    return ret;
}
Exemplo n.º 25
0
status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps,
                                               const CapU32Pair *cap,
                                               size_t capCount,
                                               char *buffer,
                                               char *defaultRange,
                                               size_t bufferSize) {
    status_t ret = NO_ERROR;
    uint32_t minVFR, maxVFR;
    int default_index = -1;

    LOG_FUNCTION_NAME;

    if ( (NULL == buffer) || (NULL == cap) ) {
        CAMHAL_LOGEA("Invalid input arguments");
        return -EINVAL;
    }

    if(caps.ulPrvVarFPSModesCount < 1) {
        return NO_ERROR;
    }

    // Assumption: last range in tPrvVarFPSModes will be for S30FPSHD mode
    minVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMin >> VFR_OFFSET;
    maxVFR = caps.tPrvVarFPSModes[caps.ulPrvVarFPSModesCount-1].nVarFPSMax >> VFR_OFFSET;

    if (minVFR < FPS_MIN) {
        minVFR = FPS_MIN;
    }

    for (unsigned int i = 0; i < capCount; i++) {
        // add cap[i] if it is in range and maxVFR != minVFR
        if ((maxVFR >= cap[i].num1) && (minVFR <= cap[i].num2)) {
            if (buffer[0] != '\0') {
                strncat(buffer, PARAM_SEP, bufferSize - 1);
            }
            strncat(buffer, cap[i].param, bufferSize - 1);

            // choose the max variable framerate as default
            if (cap[i].num1 != cap[i].num2) {
                default_index = i;
            }
        }
    }

    // if we haven't found any caps in the list to populate
    // just use the min and max
    if (buffer[0] == '\0') {
        snprintf(buffer, bufferSize - 1,
             "(%u,%u)",
             minVFR * CameraHal::VFR_SCALE,
             maxVFR * CameraHal::VFR_SCALE);
    }

    if (default_index != -1) {
        snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%lu,%lu",
                 cap[default_index].num2 * CameraHal::VFR_SCALE,
                 cap[default_index].num1 * CameraHal::VFR_SCALE);
    } else {
        snprintf(defaultRange, (MAX_PROP_VALUE_LENGTH - 1), "%u,%u",
                 minVFR * CameraHal::VFR_SCALE, maxVFR * CameraHal::VFR_SCALE);
    }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
status_t OMXCameraAdapter::doAutoFocus()
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;
    OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
    OMX_PARAM_FOCUSSTATUSTYPE focusStatus;
    OMX_CONFIG_BOOLEANTYPE bOMX;
    nsecs_t timeout = 0;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        returnFocusStatus(false);
        return -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
        CAMHAL_LOGEA("OMX component not in executing state");
        returnFocusStatus(false);
        return NO_ERROR;
        }


    if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) {
       CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called");
       return NO_ERROR;
    }

    OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE);

    // If the app calls autoFocus, the camera will stop sending face callbacks.
    pauseFaceDetection(true);

    // This is needed for applying FOCUS_REGION correctly
    if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea()))
    {
    //Disable face priority
    setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);

    //Enable region algorithm priority
    setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
    }

    OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
    focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) mParameters3A.Focus;

    if (mParameters3A.FocusLock) {
        // this basically means user never called cancelAutoFocus after a scan...
        // if this is the case we need to unlock AF to ensure we will do a scan
        if (set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_FALSE) != NO_ERROR) {
            CAMHAL_LOGEA("Error Unlocking 3A locks");
        } else {
            CAMHAL_LOGDA("AE/AWB unlocked successfully");
        }

    } else if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAuto ) {
        // In case we have CAF running we should first check the AF status.
        // If it has managed to lock, then do as usual and return status
        // immediately.
        ret = checkFocus(&focusStatus);
        if ( NO_ERROR != ret ) {
            CAMHAL_LOGEB("Focus status check failed 0x%x!", ret);
            return ret;
        } else {
            CAMHAL_LOGDB("Focus status check 0x%x!", focusStatus.eFocusStatus);
        }
    }

    if ( (focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto &&
         ( focusStatus.eFocusStatus == OMX_FocusStatusRequest ||
           focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
           focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) ||
            (mParameters3A.Focus !=  (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) )
        {
        OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
        bOMX.bEnabled = OMX_TRUE;

        //Enable focus scanning
        eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                               (OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
                               &bOMX);

        // force AF, Ducati will take care of whether CAF
        // or AF will be performed, depending on light conditions
        if ( focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto &&
             ( focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
               focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) {
            focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock;
        }

        if ( focusControl.eFocusControl != OMX_IMAGE_FocusControlAuto )
            {
            eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                                    OMX_IndexConfigFocusControl,
                                    &focusControl);
            }

        if ( OMX_ErrorNone != eError ) {
            CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
            return INVALID_OPERATION;
        } else {
            CAMHAL_LOGDA("Autofocus started successfully");
        }

        // configure focus timeout based on capture mode
        timeout = (mCapMode == VIDEO_MODE) ?
                        ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
                        ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );

            {
            Mutex::Autolock lock(mDoAFMutex);
            ret = mDoAFCond.waitRelative(mDoAFMutex, timeout);
            }

        //If somethiing bad happened while we wait
        if (mComponentState == OMX_StateInvalid) {
          CAMHAL_LOGEA("Invalid State after Auto Focus Exitting!!!");
          return -EINVAL;
        }

        if(ret != NO_ERROR) {
            CAMHAL_LOGEA("Autofocus callback timeout expired");
            ret = returnFocusStatus(true);
        } else {
            ret = returnFocusStatus(false);
        }
    } else { // Focus mode in continuous
        if ( NO_ERROR == ret ) {
            ret = returnFocusStatus(true);
            mPending3Asettings |= SetFocus;
        }
    }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
status_t OMXCameraAdapter::returnFocusStatus(bool timeoutReached)
{
    status_t ret = NO_ERROR;
    OMX_PARAM_FOCUSSTATUSTYPE eFocusStatus;
    CameraHalEvent::FocusStatus focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
    BaseCameraAdapter::AdapterState state, nextState;
    BaseCameraAdapter::getState(state);
    BaseCameraAdapter::getNextState(nextState);

    LOG_FUNCTION_NAME;

    OMX_INIT_STRUCT(eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);

    if( ((AF_ACTIVE & state ) != AF_ACTIVE) && ((AF_ACTIVE & nextState ) != AF_ACTIVE) )
       {
        /// We don't send focus callback if focus was not started
       CAMHAL_LOGDA("Not sending focus callback because focus was not started");
       return NO_ERROR;
       }

    if ( NO_ERROR == ret )
        {

        if ( !timeoutReached )
            {
            ret = checkFocus(&eFocusStatus);

            if ( NO_ERROR != ret )
                {
                CAMHAL_LOGEA("Focus status check failed!");
                }
            }
        }

    if ( NO_ERROR == ret )
        {

        if ( timeoutReached )
            {
            focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
            }
        else
            {
            switch (eFocusStatus.eFocusStatus)
                {
                    case OMX_FocusStatusReached:
                        {
                        focusStatus = CameraHalEvent::FOCUS_STATUS_SUCCESS;
                        break;
                        }
                    case OMX_FocusStatusOff: // AF got canceled
                        return NO_ERROR;
                    case OMX_FocusStatusUnableToReach:
                    case OMX_FocusStatusRequest:
                    default:
                        {
                        focusStatus = CameraHalEvent::FOCUS_STATUS_FAIL;
                        break;
                        }
                }
            // Lock CAF after AF call
            if( set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_TRUE) != NO_ERROR) {
                CAMHAL_LOGEA("Error Applying 3A locks");
            } else {
                CAMHAL_LOGDA("Focus locked. Applied focus locks successfully");
            }
            stopAutoFocus();
            }

        //Query current focus distance after AF is complete
        updateFocusDistances(mParameters);
       }

    ret =  BaseCameraAdapter::setState(CAMERA_CANCEL_AUTOFOCUS);
    if ( NO_ERROR == ret )
        {
        ret = BaseCameraAdapter::commitState();
        }
    else
        {
        ret |= BaseCameraAdapter::rollbackState();
        }

    if ( NO_ERROR == ret )
        {
        notifyFocusSubscribers(focusStatus);
        }

    // After focus, face detection will resume sending face callbacks
    pauseFaceDetection(false);

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
status_t OMXCameraAdapter::setTouchFocus()
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;

    OMX_ALGOAREASTYPE **focusAreas;
    OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
    MemoryManager memMgr;
    int areasSize = 0;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
        {
        CAMHAL_LOGEA("OMX component is in invalid state");
        ret = -1;
        }

    if ( NO_ERROR == ret )
        {

        areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
        focusAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);

        OMXCameraPortParameters * mPreviewData = NULL;
        mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];

        if (!focusAreas)
            {
            CAMHAL_LOGEB("Error allocating buffer for focus areas %d", eError);
            return -ENOMEM;
            }

        OMX_INIT_STRUCT_PTR (focusAreas[0], OMX_ALGOAREASTYPE);

        focusAreas[0]->nPortIndex = OMX_ALL;
        focusAreas[0]->nNumAreas = mFocusAreas.size();
        focusAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaFocus;

        // If the area is the special case of (0, 0, 0, 0, 0), then
        // the algorithm needs nNumAreas to be set to 0,
        // in order to automatically choose the best fitting areas.
        if ( mFocusAreas.itemAt(0)->isZeroArea() )
            {
            focusAreas[0]->nNumAreas = 0;
            }

        for ( unsigned int n = 0; n < mFocusAreas.size(); n++)
            {
            // transform the coordinates to 3A-type coordinates
            mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth,
                                            (size_t)mPreviewData->mHeight,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nTop,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nLeft,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nWidth,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nHeight);

            focusAreas[0]->tAlgoAreas[n].nLeft =
                    ( focusAreas[0]->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
            focusAreas[0]->tAlgoAreas[n].nTop =
                    ( focusAreas[0]->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
            focusAreas[0]->tAlgoAreas[n].nWidth =
                    ( focusAreas[0]->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
            focusAreas[0]->tAlgoAreas[n].nHeight =
                    ( focusAreas[0]->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
            focusAreas[0]->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();

             CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d",
                    n, (int)focusAreas[0]->tAlgoAreas[n].nTop, (int)focusAreas[0]->tAlgoAreas[n].nLeft,
                    (int)focusAreas[0]->tAlgoAreas[n].nWidth, (int)focusAreas[0]->tAlgoAreas[n].nHeight,
                    (int)focusAreas[0]->tAlgoAreas[n].nPriority);
             }

        OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);

        sharedBuffer.nPortIndex = OMX_ALL;
        sharedBuffer.nSharedBuffSize = areasSize;
        sharedBuffer.pSharedBuff = (OMX_U8 *) focusAreas[0];

        if ( NULL == sharedBuffer.pSharedBuff )
            {
            CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
            ret = -ENOMEM;
            goto EXIT;
            }

            eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                                      (OMX_INDEXTYPE) OMX_TI_IndexConfigAlgoAreas, &sharedBuffer);

        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error while setting Focus Areas configuration 0x%x", eError);
            ret = -EINVAL;
            }

    EXIT:
        if (NULL != focusAreas)
            {
            memMgr.freeBuffer((void*) focusAreas);
            focusAreas = NULL;
            }
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}