status_t OMXCameraAdapter::updateFocusDistances(CameraParameters &params)
{
    OMX_U32 focusNear, focusOptimal, focusFar;
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME;

    ret = getFocusDistances(focusNear, focusOptimal, focusFar);
    if ( NO_ERROR == ret)
        {
        ret = addFocusDistances(focusNear, focusOptimal, focusFar, params);
            if ( NO_ERROR != ret )
                {
                CAMHAL_LOGEB("Error in call to addFocusDistances() 0x%x", ret);
                }
        }
    else
        {
        CAMHAL_LOGEB("Error in call to getFocusDistances() 0x%x", ret);
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
예제 #2
0
status_t OMXCameraAdapter::getCaps(CameraProperties::Properties* params, OMX_HANDLETYPE handle) {
    status_t ret = NO_ERROR;
    int caps_size = 0;
    OMX_ERRORTYPE eError = OMX_ErrorNone;
    OMX_TI_CAPTYPE** caps = NULL;;
    OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
    MemoryManager memMgr;

    LOG_FUNCTION_NAME;

    // allocate tiler (or ion) buffer for caps (size is always a multiple of 4K)
    caps_size = ((sizeof(OMX_TI_CAPTYPE)+4095)/4096)*4096;
    caps = (OMX_TI_CAPTYPE**) memMgr.allocateBuffer(0, 0, NULL, caps_size, 1);

    if (!caps) {
        CAMHAL_LOGEB("Error allocating buffer for caps %d", eError);
        ret = -ENOMEM;
        goto EXIT;
    }

    // initialize structures to be passed to OMX Camera
    OMX_INIT_STRUCT_PTR (caps[0], OMX_TI_CAPTYPE);
    caps[0]->nPortIndex = OMX_ALL;

    OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);
    sharedBuffer.nPortIndex = OMX_ALL;
    sharedBuffer.nSharedBuffSize = caps_size;
    sharedBuffer.pSharedBuff = (OMX_U8 *) caps[0];

    // Get capabilities from OMX Camera
    CAMHAL_LOGEB("Calling OMX_GetConfig() for OMX_TI_IndexConfigCamCapabilities %d", 0);
    /* FIXME-HASH: Fix this */
    eError =  OMX_GetConfig(handle, (OMX_INDEXTYPE) OMX_TI_IndexConfigCamCapabilities, &sharedBuffer);
    if ( OMX_ErrorNone != eError ) {
        CAMHAL_LOGEB("Error during capabilities query 0x%x", eError);
        /* FIXME-HASH: Removed the query as it will fail for GB syslink */
        // ret = UNKNOWN_ERROR;
        // goto EXIT;
    } else {
        CAMHAL_LOGDA("OMX capability query success");
    }

    // Translate and insert Ducati capabilities to CameraProperties
    if ( NO_ERROR == ret ) {
        ret = insertCapabilities(params, *caps[0]);
    }

    CAMHAL_LOGDB("sen mount id=%u", (unsigned int)caps[0]->tSenMounting.nSenId);


 EXIT:
    if (caps) {
        memMgr.freeBuffer((void*) caps);
        caps = NULL;
    }

    LOG_FUNCTION_NAME_EXIT;
    return ret;
}
status_t CameraArea::checkArea(ssize_t top,
                               ssize_t left,
                               ssize_t bottom,
                               ssize_t right,
                               ssize_t weight)
{

    //Handles the invalid regin corner case.
    if ( ( 0 == top ) && ( 0 == left ) && ( 0 == bottom ) && ( 0 == right ) && ( 0 == weight ) )
    {
        return NO_ERROR;
    }

    if ( ( CameraArea::WEIGHT_MIN > weight ) ||  ( CameraArea::WEIGHT_MAX < weight ) )
    {
        CAMHAL_LOGEB("Camera area weight is invalid %d", static_cast<int>(weight));
        return -EINVAL;
    }

    if ( ( CameraArea::TOP > top ) || ( CameraArea::BOTTOM < top ) )
    {
        CAMHAL_LOGEB("Camera area top coordinate is invalid %d", static_cast<int>(top) );
        return -EINVAL;
    }

    if ( ( CameraArea::TOP > bottom ) || ( CameraArea::BOTTOM < bottom ) )
    {
        CAMHAL_LOGEB("Camera area bottom coordinate is invalid %d", static_cast<int>(bottom) );
        return -EINVAL;
    }

    if ( ( CameraArea::LEFT > left ) || ( CameraArea::RIGHT < left ) )
    {
        CAMHAL_LOGEB("Camera area left coordinate is invalid %d", static_cast<int>(left) );
        return -EINVAL;
    }

    if ( ( CameraArea::LEFT > right ) || ( CameraArea::RIGHT < right ) )
    {
        CAMHAL_LOGEB("Camera area right coordinate is invalid %d", static_cast<int>(right) );
        return -EINVAL;
    }

    if ( left >= right )
    {
        CAMHAL_LOGEA("Camera area left larger than right");
        return -EINVAL;
    }

    if ( top >= bottom )
    {
        CAMHAL_LOGEA("Camera area top larger than bottom");
        return -EINVAL;
    }

    return NO_ERROR;
}
status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
                                             OMX_U32 &optimal,
                                             OMX_U32 &far,
                                             CameraParameters& params)
{
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME;

    if ( NO_ERROR == ret )
        {
        ret = encodeFocusDistance(near, mFocusDistNear, FOCUS_DIST_SIZE);
        if ( NO_ERROR != ret )
            {
            CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
            }
        }

    if ( NO_ERROR == ret )
        {
        ret = encodeFocusDistance(optimal, mFocusDistOptimal, FOCUS_DIST_SIZE);
        if ( NO_ERROR != ret )
            {
            CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
            }
        }

    if ( NO_ERROR == ret )
        {
        ret = encodeFocusDistance(far, mFocusDistFar, FOCUS_DIST_SIZE);
        if ( NO_ERROR != ret )
            {
            CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
            }
        }

    if ( NO_ERROR == ret )
        {
        snprintf(mFocusDistBuffer, ( FOCUS_DIST_BUFFER_SIZE - 1) ,"%s,%s,%s", mFocusDistNear,
                                                                              mFocusDistOptimal,
                                                                              mFocusDistFar);

        params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
예제 #5
0
status_t OMXCameraAdapter::insertVideoSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
    status_t ret = NO_ERROR;
    char supported[MAX_PROP_VALUE_LENGTH];

    LOG_FUNCTION_NAME;

    memset(supported, '\0', MAX_PROP_VALUE_LENGTH);

    ret = encodeSizeCap(caps.tPreviewResRange,
                        mPreviewRes,
                        ARRAY_SIZE(mPreviewRes),
                        supported,
                        MAX_PROP_VALUE_LENGTH);

    if ( NO_ERROR != ret ) {
      CAMHAL_LOGEB("Error inserting supported video sizes 0x%x", ret);
    } else {
      remove_last_sep(supported);
      params->set(CameraProperties::SUPPORTED_VIDEO_SIZES, supported);
    }

    LOG_FUNCTION_NAME;

    return ret;
}
예제 #6
0
status_t OMXCameraAdapter::insertVFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
    status_t ret = NO_ERROR;
    char supported[MAX_PROP_VALUE_LENGTH];
    char defaultRange[MAX_PROP_VALUE_LENGTH];

    LOG_FUNCTION_NAME;

    memset(supported, '\0', MAX_PROP_VALUE_LENGTH);

    ret = encodeVFramerateCap(caps,
                              mVarFramerates,
                              ARRAY_SIZE(mVarFramerates),
                              supported,
                              defaultRange,
                              MAX_PROP_VALUE_LENGTH);

    if ( NO_ERROR != ret ) {
        CAMHAL_LOGEB("Error inserting supported preview framerate ranges 0x%x", ret);
    } else {
        params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, supported);
        CAMHAL_LOGDB("framerate ranges %s", supported);
        params->set(CameraProperties::FRAMERATE_RANGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
        params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, DEFAULT_FRAMERATE_RANGE_VIDEO);
        params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, DEFAULT_FRAMERATE_RANGE_IMAGE);
        CAMHAL_LOGDB("Default framerate range: [%s]", DEFAULT_FRAMERATE_RANGE_IMAGE);
    }

    LOG_FUNCTION_NAME;

    return ret;
}
예제 #7
0
status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
    status_t ret = NO_ERROR;
    char supported[MAX_PROP_VALUE_LENGTH];

    LOG_FUNCTION_NAME;

    memset(supported, '\0', MAX_PROP_VALUE_LENGTH);

    ret = encodeFramerateCap(caps.xFramerateMax >> VFR_OFFSET,
                             caps.xFramerateMin >> VFR_OFFSET,
                             mFramerates,
                             ARRAY_SIZE(mFramerates),
                             supported,
                             MAX_PROP_VALUE_LENGTH);

    if ( NO_ERROR != ret ) {
        CAMHAL_LOGEB("Error inserting supported preview framerates 0x%x", ret);
    } else {
        params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, supported);
    }

    LOG_FUNCTION_NAME;

    return ret;
}
예제 #8
0
status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
    status_t ret = NO_ERROR;
    char supported[MAX_PROP_VALUE_LENGTH];

    LOG_FUNCTION_NAME;

    memset(supported, '\0', MAX_PROP_VALUE_LENGTH);

    for ( int i = 0 ; i < caps.ulPreviewFormatCount; i++ ) {
        ret = encodePixelformatCap(caps.ePreviewFormats[i],
                                   mPixelformats,
                                   ARRAY_SIZE(mPixelformats),
                                   supported,
                                   MAX_PROP_VALUE_LENGTH);
        if ( NO_ERROR != ret ) {
            CAMHAL_LOGEB("Error inserting supported preview formats 0x%x", ret);
            break;
        }
    }

    if ( NO_ERROR == ret ) {
        // need to advertise we support YV12 format
        // We will program preview port with NV21 when we see application set YV12
        strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
        params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
    }

    LOG_FUNCTION_NAME;

    return ret;
}
예제 #9
0
status_t OMXCameraAdapter::insertImageFormats(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
    status_t ret = NO_ERROR;
    char supported[MAX_PROP_VALUE_LENGTH];

    LOG_FUNCTION_NAME;

    memset(supported, '\0', MAX_PROP_VALUE_LENGTH);

    for ( int i = 0 ; i < caps.ulImageFormatCount ; i++ ) {
        ret = encodePixelformatCap(caps.eImageFormats[i],
                                   mPixelformats,
                                   ARRAY_SIZE(mPixelformats),
                                   supported,
                                   MAX_PROP_VALUE_LENGTH);
        if ( NO_ERROR != ret ) {
            CAMHAL_LOGEB("Error inserting supported picture formats 0x%x", ret);
            break;
        }
    }

    if ( NO_ERROR == ret ) {
        //jpeg is not supported in OMX capabilies yet
        strncat(supported, CameraParameters::PIXEL_FORMAT_JPEG, MAX_PROP_VALUE_LENGTH - 1);
        params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, supported);
    }

    LOG_FUNCTION_NAME;

    return ret;
}
예제 #10
0
status_t OMXCameraAdapter::insertThumbSizes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
    status_t ret = NO_ERROR;
    char supported[MAX_PROP_VALUE_LENGTH];

    LOG_FUNCTION_NAME;

    memset(supported, '\0', MAX_PROP_VALUE_LENGTH);

    ret = encodeSizeCap(caps.tThumbResRange,
                        mThumbRes,
                        ARRAY_SIZE(mThumbRes),
                        supported,
                        MAX_PROP_VALUE_LENGTH);

    if ( NO_ERROR != ret ) {
        CAMHAL_LOGEB("Error inserting supported thumbnail sizes 0x%x", ret);
    } else {
        //CTS Requirement: 0x0 should always be supported
        strncat(supported, "0x0", MAX_PROP_NAME_LENGTH);
        params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, supported);
    }

    LOG_FUNCTION_NAME;

    return ret;
}
status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focusMode)
{;
    OMX_ERRORTYPE eError = OMX_ErrorNone;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState ) {
        CAMHAL_LOGEA("OMX component is in invalid state");
        return NO_INIT;
    }

    OMX_INIT_STRUCT_PTR (&focusMode, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
    focusMode.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;

    eError =  OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
                            OMX_IndexConfigFocusControl,
                            &focusMode);

    if ( OMX_ErrorNone != eError ) {
        CAMHAL_LOGEB("Error while retrieving focus mode 0x%x", eError);
    }

    LOG_FUNCTION_NAME_EXIT;

    return ErrorUtils::omxToAndroidError(eError);
}
예제 #12
0
status_t OMXCameraAdapter::encodeVFramerateCap(OMX_TI_CAPTYPE &caps, char * buffer, size_t bufferSize)
{
    status_t ret = NO_ERROR;
    uint32_t minVFR, maxVFR;
    char tmpBuffer[MAX_PROP_VALUE_LENGTH];
    bool skipLast = false;

    LOG_FUNCTION_NAME

    if ( NULL == buffer )
        {
        CAMHAL_LOGEA("Invalid input arguments");
        ret = -EINVAL;
        }

    if ( NO_ERROR == ret )
        {
        unsigned int count = caps.ulPrvVarFPSModesCount;
        if ( count > 10 )
        {
            count = 10;
        }
        for ( unsigned int i = 0 ; i < count ; i++ )
            {

            if ( 0 < i )
                {
                if ( ( caps.tPrvVarFPSModes[i-1].nVarFPSMin == caps.tPrvVarFPSModes[i].nVarFPSMin ) &&
                     ( caps.tPrvVarFPSModes[i-1].nVarFPSMax == caps.tPrvVarFPSModes[i].nVarFPSMax ) )
                    {
                    continue;
                    }
                else if (!skipLast)
                    {
                    strncat(buffer, PARAM_SEP, bufferSize - 1);
                    }
                }
            if ( caps.tPrvVarFPSModes[i].nVarFPSMin == caps.tPrvVarFPSModes[i].nVarFPSMax )
                {
                skipLast = true;
                continue;
                }
            else
                {
                skipLast = false;
                }

            CAMHAL_LOGEB("Min fps 0x%x, Max fps 0x%x", ( unsigned int ) caps.tPrvVarFPSModes[i].nVarFPSMin,
                                                       ( unsigned int ) caps.tPrvVarFPSModes[i].nVarFPSMax);

            minVFR = caps.tPrvVarFPSModes[i].nVarFPSMin >> VFR_OFFSET;
            minVFR *= CameraHal::VFR_SCALE;
            maxVFR = caps.tPrvVarFPSModes[i].nVarFPSMax >> VFR_OFFSET;
            maxVFR *= CameraHal::VFR_SCALE;
            snprintf(tmpBuffer, ( MAX_PROP_VALUE_LENGTH - 1 ), "(%d,%d)", minVFR, maxVFR);
            strncat(buffer, tmpBuffer, ( bufferSize - 1 ));
            }
        }
status_t OMXCameraAdapter::setFocusCallback(bool enabled)
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;
    OMX_CONFIG_CALLBACKREQUESTTYPE focusRequstCallback;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        ret = -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
          CAMHAL_LOGEA("OMX component not in executing state");
        ret = NO_ERROR;
        }

    if ( NO_ERROR == ret )
        {

        OMX_INIT_STRUCT_PTR (&focusRequstCallback, OMX_CONFIG_CALLBACKREQUESTTYPE);
        focusRequstCallback.nPortIndex = OMX_ALL;
        focusRequstCallback.nIndex = OMX_IndexConfigCommonFocusStatus;

        if ( enabled )
            {
            focusRequstCallback.bEnable = OMX_TRUE;
            }
        else
            {
            focusRequstCallback.bEnable = OMX_FALSE;
            }

        eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                                (OMX_INDEXTYPE) OMX_IndexConfigCallbackRequest,
                                &focusRequstCallback);
        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error registering focus callback 0x%x", eError);
            ret = -1;
            }
        else
            {
            CAMHAL_LOGDB("Autofocus callback for index 0x%x registered successfully",
                         OMX_IndexConfigCommonFocusStatus);
            }
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
예제 #14
0
static int sensor_events_listener(int fd, int events, void* data)
{
    SensorListener* listener = (SensorListener*) data;
    ssize_t num_sensors;
    ASensorEvent sen_events[8];
    while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) {
        for (int i = 0; i < num_sensors; i++) {
            if (sen_events[i].type == Sensor::TYPE_ACCELEROMETER) {
                float x = sen_events[i].vector.azimuth;
                float y = sen_events[i].vector.pitch;
                float z = sen_events[i].vector.roll;
                float radius = 0;
                int tilt = 0, orient = 0;

                CAMHAL_LOGVA("ACCELEROMETER EVENT");
                CAMHAL_LOGVB(" azimuth = %f pitch = %f roll = %f",
                             sen_events[i].vector.azimuth,
                             sen_events[i].vector.pitch,
                             sen_events[i].vector.roll);
                // see http://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates
                // about conversion from cartesian to spherical for orientation calculations
                radius = (float) sqrt(x * x + y * y + z * z);
                tilt = (int) asinf(z / radius) * RADIANS_2_DEG;
                orient = (int) atan2f(-x, y) * RADIANS_2_DEG;

                if (orient < 0) {
                    orient += 360;
                }

                if (orient >= DEGREES_270_THRESH) {
                    orient = 270;
                } else if (orient >= DEGREES_180_THRESH) {
                    orient = 180;
                } else if (orient >= DEGREES_90_THRESH) {
                    orient = 90;
                } else {
                    orient = 0;
                }
                listener->handleOrientation(orient, tilt);
                CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient);
            } else if (sen_events[i].type == Sensor::TYPE_GYROSCOPE) {
                CAMHAL_LOGVA("GYROSCOPE EVENT");
            }
        }
    }

    if (num_sensors < 0 && num_sensors != -EAGAIN) {
        CAMHAL_LOGEB("reading events failed: %s", strerror(-num_sensors));
    }

    return 1;
}
status_t OMXCameraAdapter::stopAutoFocus()
{
    OMX_ERRORTYPE eError = OMX_ErrorNone;
    OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        returnFocusStatus(false);
        return -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
          CAMHAL_LOGEA("OMX component not in executing state");
        return NO_ERROR;
        }

    if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAutoInfinity ) {
        // No need to stop focus if we are in infinity mode. Nothing to stop.
        return NO_ERROR;
    }

    OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
    focusControl.eFocusControl = OMX_IMAGE_FocusControlOff;

    eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                            OMX_IndexConfigFocusControl,
                            &focusControl);
    if ( OMX_ErrorNone != eError )
        {
        CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
        return ErrorUtils::omxToAndroidError(eError);
    } else {
        // This is a WA. Usually the OMX Camera component should
        // generate AF status change OMX event fairly quickly
        // ( after one preview frame ) and this notification should
        // actually come from 'handleFocusCallback()'.
        Mutex::Autolock lock(mDoAFMutex);
        mDoAFCond.broadcast();
    }


    LOG_FUNCTION_NAME_EXIT;

    return NO_ERROR;
}
status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;

    LOG_FUNCTION_NAME;

    if ( NULL == eFocusStatus )
        {
        CAMHAL_LOGEA("Invalid focus status");
        ret = -EINVAL;
        }

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        ret = -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
        CAMHAL_LOGEA("OMX component not in executing state");
        ret = NO_ERROR;
        }

    if ( NO_ERROR == ret )
        {
        OMX_INIT_STRUCT_PTR (eFocusStatus, OMX_PARAM_FOCUSSTATUSTYPE);
        eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
                               OMX_IndexConfigCommonFocusStatus,
                               eFocusStatus);
        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error while retrieving focus status: 0x%x", eError);
            ret = -1;
            }
        }

    if ( NO_ERROR == ret )
        {
        CAMHAL_LOGDB("Focus Status: %d", eFocusStatus->eFocusStatus);
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
예제 #17
0
status_t OMXCameraAdapter::setFaceDetectionOrientation(int orientation)
{
    status_t ret = NO_ERROR;
#ifdef CAMERA_VCE_OMX_FD
    Mutex::Autolock lock(mFaceDetectionLock);
    if(!mFaceDetectionRunning || mFaceDetectionPaused)
    {
        return ret;
    }
    ret = mVceFaceDetect->setFaceDetectInfo(mDeviceType, orientation);
    if(ret < 0)
    {
        CAMHAL_LOGEB("setFaceDetectInfo error, %d,%d\n",mDeviceType, mDeviceOrientation);
    }
#endif
    return ret;
}
예제 #18
0
status_t OMXCameraAdapter::startFaceDetection()
{
    status_t ret = NO_ERROR;
    Mutex::Autolock lock(mFaceDetectionLock);
    if(mFaceDetectionRunning)
    {
        return ret;
    }

    //add this for cts, if not support fd, just return error
    if(atoi(mCapabilities->get(CameraProperties::MAX_FD_HW_FACES))==0)
    {
        ret = BAD_VALUE;
        return ret;
    }

#ifdef CAMERA_VCE_OMX_FD
    OMXCameraPortParameters *cap;
    cap = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];

    ret = mVceFaceDetect->setFaceDetectInfo(mDeviceType, mDeviceOrientation);
    if(ret < 0)
    {
        CAMHAL_LOGEB("setFaceDetectInfo error, %d,%d\n",mDeviceType, mDeviceOrientation);
    }

    ret = mVceFaceDetect->setImageSize(cap->mWidth, cap->mHeight);
    ret |= mVceFaceDetect->setOutputImageSize(cap->mWidth, cap->mHeight);
    ret |= mVceFaceDetect->setImageFormat(cap->mColorFormat);
    ret |= mVceFaceDetect->setImageInputCnt((OMX_U32)cap->mNumBufs);
    ret |= mVceFaceDetect->useBuffers(OMXVceImageInputPort, cap->mBufSize,reinterpret_cast<void **>(&(cap->mHostBufaddr)),cap->mNumBufs);
    ret |= mVceFaceDetect->startFaceDetect();

    if ( NULL != mFDFrameProvider )
    {
        mFDFrameProvider->enableFrameNotification(CameraFrame::FD_FRAME_SYNC);
    }

#endif
    mFaceDetectionRunning = true;
out:
    return ret;
}
status_t OMXCameraAdapter::getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far)
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError;

    OMX_TI_CONFIG_FOCUSDISTANCETYPE focusDist;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
        {
        CAMHAL_LOGEA("OMX component is in invalid state");
        ret = UNKNOWN_ERROR;
        }

    if ( NO_ERROR == ret )
        {
        OMX_INIT_STRUCT_PTR(&focusDist, OMX_TI_CONFIG_FOCUSDISTANCETYPE);
        focusDist.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;

        eError = OMX_GetConfig(mCameraAdapterParameters.mHandleComp,
                               ( OMX_INDEXTYPE ) OMX_TI_IndexConfigFocusDistance,
                               &focusDist);
        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error while querying focus distances 0x%x", eError);
            ret = UNKNOWN_ERROR;
            }

        }

    if ( NO_ERROR == ret )
        {
        near = focusDist.nFocusDistanceNear;
        optimal = focusDist.nFocusDistanceOptimal;
        far = focusDist.nFocusDistanceFar;
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
예제 #20
0
void OMXCameraAdapter::pauseFaceDetection(bool pause)
{
    status_t ret = NO_ERROR;
    Mutex::Autolock lock(mFaceDetectionLock);
    // pausing will only take affect if fd is already running
    if (mFaceDetectionRunning)
    {
        mFaceDetectionPaused = pause;
#ifdef CAMERA_VCE_OMX_FD
        if(!mFaceDetectionPaused)
        {
            ret = mVceFaceDetect->setFaceDetectInfo(mDeviceType, mDeviceOrientation);
            if(ret < 0)
            {
                CAMHAL_LOGEB("setFaceDetectInfo error, %d,%d\n",mDeviceType, mDeviceOrientation);
            }
            
        }
#endif
    }
}
status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
                                              BaseCameraAdapter::AdapterState state)
{
    status_t ret = NO_ERROR;
    const char *str = NULL;
    Vector< sp<CameraArea> > tempAreas;
    size_t MAX_FOCUS_AREAS;

    LOG_FUNCTION_NAME;

    Mutex::Autolock lock(mFocusAreasLock);

    str = params.get(CameraParameters::KEY_FOCUS_AREAS);

    MAX_FOCUS_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));

    if ( NULL != str ) {
        ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
    }

    if ( (NO_ERROR == ret) && CameraArea::areAreasDifferent(mFocusAreas, tempAreas) ) {
        mFocusAreas.clear();
        mFocusAreas = tempAreas;
        if ( MAX_FOCUS_AREAS < mFocusAreas.size() ) {
            CAMHAL_LOGEB("Focus areas supported %d, focus areas set %d",
                         MAX_FOCUS_AREAS,
                         mFocusAreas.size());
            ret = -EINVAL;
        }
        else {
            if ( !mFocusAreas.isEmpty() ) {
                setTouchFocus();
            }
        }
    }

    LOG_FUNCTION_NAME;

    return ret;
}
예제 #22
0
status_t OMXCameraAdapter::insertISOModes(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
    status_t ret = NO_ERROR;
    char supported[MAX_PROP_VALUE_LENGTH];

    LOG_FUNCTION_NAME;

    memset(supported, '\0', MAX_PROP_VALUE_LENGTH);

    ret = encodeISOCap(caps.nSensitivityMax,
                       mISOStages,
                       ARRAY_SIZE(mISOStages),
                       supported,
                       MAX_PROP_VALUE_LENGTH);
    if ( NO_ERROR != ret ) {
        CAMHAL_LOGEB("Error inserting supported ISO modes 0x%x", ret);
    } else {
        params->set(CameraProperties::SUPPORTED_ISO_VALUES, supported);
    }

    LOG_FUNCTION_NAME;

    return ret;
}
예제 #23
0
/*--------------------MemoryManager Class STARTS here-----------------------------*/
void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
{
    LOG_FUNCTION_NAME;

    if(mIonFd == 0)
        {
        mIonFd = ion_open();
        if(mIonFd == 0)
            {
            CAMHAL_LOGEA("ion_open failed!!!");
            return NULL;
            }
        }

    ///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing
    ///the buffers
    const uint numArrayEntriesC = (uint)(numBufs+1);

    ///Allocate a buffer array
    uint32_t *bufsArr = new uint32_t [numArrayEntriesC];
    if(!bufsArr)
        {
        CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC);
        LOG_FUNCTION_NAME_EXIT;
        return NULL;
        }

    ///Initialize the array with zeros - this will help us while freeing the array in case of error
    ///If a value of an array element is NULL, it means we didnt allocate it
    memset(bufsArr, 0, sizeof(*bufsArr) * numArrayEntriesC);

    //2D Allocations are not supported currently
    if(bytes != 0)
        {
        struct ion_handle *handle;
        int mmap_fd;

        ///1D buffers
        for (int i = 0; i < numBufs; i++)
            {
            int ret = ion_alloc(mIonFd, bytes, 0, 1 << ION_HEAP_TYPE_CARVEOUT, &handle);
            if(ret < 0)
                {
                CAMHAL_LOGEB("ion_alloc resulted in error %d", ret);
                goto error;
                }

            CAMHAL_LOGDB("Before mapping, handle = %x, nSize = %d", handle, bytes);
            if ((ret = ion_map(mIonFd, handle, bytes, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
                          (unsigned char**)&bufsArr[i], &mmap_fd)) < 0)
                {
                CAMHAL_LOGEB("Userspace mapping of ION buffers returned error %d", ret);
                ion_free(mIonFd, handle);
                goto error;
                }

            mIonHandleMap.add(bufsArr[i], (unsigned int)handle);
            mIonFdMap.add(bufsArr[i], (unsigned int) mmap_fd);
            mIonBufLength.add(bufsArr[i], (unsigned int) bytes);
            }

        }
    else // If bytes is not zero, then it is a 2-D tiler buffer request
        {
        }

        LOG_FUNCTION_NAME_EXIT;

        return (void*)bufsArr;

error:
    LOGE("Freeing buffers already allocated after error occurred");
    freeBuffer(bufsArr);

    if ( NULL != mErrorNotifier.get() )
        {
        mErrorNotifier->errorNotify(-ENOMEM);
        }

    LOG_FUNCTION_NAME_EXIT;
    return NULL;
}
status_t OMXCameraAdapter::doAutoFocus()
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;
    OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE focusControl;
    OMX_PARAM_FOCUSSTATUSTYPE focusStatus;
    OMX_CONFIG_BOOLEANTYPE bOMX;
    nsecs_t timeout = 0;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
      {
        CAMHAL_LOGEA("OMX component in Invalid state");
        returnFocusStatus(false);
        return -EINVAL;
      }

    if ( OMX_StateExecuting != mComponentState )
        {
        CAMHAL_LOGEA("OMX component not in executing state");
        returnFocusStatus(false);
        return NO_ERROR;
        }


    if( ((AF_ACTIVE & getState()) != AF_ACTIVE) && ((AF_ACTIVE & getNextState()) != AF_ACTIVE) ) {
       CAMHAL_LOGDA("Auto focus got canceled before doAutoFocus could be called");
       return NO_ERROR;
    }

    OMX_INIT_STRUCT_PTR (&focusStatus, OMX_PARAM_FOCUSSTATUSTYPE);

    // If the app calls autoFocus, the camera will stop sending face callbacks.
    pauseFaceDetection(true);

    // This is needed for applying FOCUS_REGION correctly
    if ( (!mFocusAreas.isEmpty()) && (!mFocusAreas.itemAt(0)->isZeroArea()))
    {
    //Disable face priority
    setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO, false);

    //Enable region algorithm priority
    setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, true);
    }

    OMX_INIT_STRUCT_PTR (&focusControl, OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE);
    focusControl.eFocusControl = ( OMX_IMAGE_FOCUSCONTROLTYPE ) mParameters3A.Focus;

    if (mParameters3A.FocusLock) {
        // this basically means user never called cancelAutoFocus after a scan...
        // if this is the case we need to unlock AF to ensure we will do a scan
        if (set3ALock(mUserSetExpLock, mUserSetWbLock, OMX_FALSE) != NO_ERROR) {
            CAMHAL_LOGEA("Error Unlocking 3A locks");
        } else {
            CAMHAL_LOGDA("AE/AWB unlocked successfully");
        }

    } else if ( mParameters3A.Focus == OMX_IMAGE_FocusControlAuto ) {
        // In case we have CAF running we should first check the AF status.
        // If it has managed to lock, then do as usual and return status
        // immediately.
        ret = checkFocus(&focusStatus);
        if ( NO_ERROR != ret ) {
            CAMHAL_LOGEB("Focus status check failed 0x%x!", ret);
            return ret;
        } else {
            CAMHAL_LOGDB("Focus status check 0x%x!", focusStatus.eFocusStatus);
        }
    }

    if ( (focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto &&
         ( focusStatus.eFocusStatus == OMX_FocusStatusRequest ||
           focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
           focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) ||
            (mParameters3A.Focus !=  (OMX_IMAGE_FOCUSCONTROLTYPE)OMX_IMAGE_FocusControlAuto) )
        {
        OMX_INIT_STRUCT_PTR (&bOMX, OMX_CONFIG_BOOLEANTYPE);
        bOMX.bEnabled = OMX_TRUE;

        //Enable focus scanning
        eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                               (OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
                               &bOMX);

        // force AF, Ducati will take care of whether CAF
        // or AF will be performed, depending on light conditions
        if ( focusControl.eFocusControl == OMX_IMAGE_FocusControlAuto &&
             ( focusStatus.eFocusStatus == OMX_FocusStatusUnableToReach ||
               focusStatus.eFocusStatus == OMX_FocusStatusLost ) ) {
            focusControl.eFocusControl = OMX_IMAGE_FocusControlAutoLock;
        }

        if ( focusControl.eFocusControl != OMX_IMAGE_FocusControlAuto )
            {
            eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                                    OMX_IndexConfigFocusControl,
                                    &focusControl);
            }

        if ( OMX_ErrorNone != eError ) {
            CAMHAL_LOGEB("Error while starting focus 0x%x", eError);
            return INVALID_OPERATION;
        } else {
            CAMHAL_LOGDA("Autofocus started successfully");
        }

        // configure focus timeout based on capture mode
        timeout = (mCapMode == VIDEO_MODE) ?
                        ( ( nsecs_t ) AF_VIDEO_CALLBACK_TIMEOUT * 1000 ) :
                        ( ( nsecs_t ) AF_IMAGE_CALLBACK_TIMEOUT * 1000 );

            {
            Mutex::Autolock lock(mDoAFMutex);
            ret = mDoAFCond.waitRelative(mDoAFMutex, timeout);
            }

        //If somethiing bad happened while we wait
        if (mComponentState == OMX_StateInvalid) {
          CAMHAL_LOGEA("Invalid State after Auto Focus Exitting!!!");
          return -EINVAL;
        }

        if(ret != NO_ERROR) {
            CAMHAL_LOGEA("Autofocus callback timeout expired");
            ret = returnFocusStatus(true);
        } else {
            ret = returnFocusStatus(false);
        }
    } else { // Focus mode in continuous
        if ( NO_ERROR == ret ) {
            ret = returnFocusStatus(true);
            mPending3Asettings |= SetFocus;
        }
    }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
status_t OMXCameraAdapter::setTouchFocus()
{
    status_t ret = NO_ERROR;
    OMX_ERRORTYPE eError = OMX_ErrorNone;

    OMX_ALGOAREASTYPE **focusAreas;
    OMX_TI_CONFIG_SHAREDBUFFER sharedBuffer;
    MemoryManager memMgr;
    int areasSize = 0;

    LOG_FUNCTION_NAME;

    if ( OMX_StateInvalid == mComponentState )
        {
        CAMHAL_LOGEA("OMX component is in invalid state");
        ret = -1;
        }

    if ( NO_ERROR == ret )
        {

        areasSize = ((sizeof(OMX_ALGOAREASTYPE)+4095)/4096)*4096;
        focusAreas = (OMX_ALGOAREASTYPE**) memMgr.allocateBuffer(0, 0, NULL, areasSize, 1);

        OMXCameraPortParameters * mPreviewData = NULL;
        mPreviewData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mPrevPortIndex];

        if (!focusAreas)
            {
            CAMHAL_LOGEB("Error allocating buffer for focus areas %d", eError);
            return -ENOMEM;
            }

        OMX_INIT_STRUCT_PTR (focusAreas[0], OMX_ALGOAREASTYPE);

        focusAreas[0]->nPortIndex = OMX_ALL;
        focusAreas[0]->nNumAreas = mFocusAreas.size();
        focusAreas[0]->nAlgoAreaPurpose = OMX_AlgoAreaFocus;

        // If the area is the special case of (0, 0, 0, 0, 0), then
        // the algorithm needs nNumAreas to be set to 0,
        // in order to automatically choose the best fitting areas.
        if ( mFocusAreas.itemAt(0)->isZeroArea() )
            {
            focusAreas[0]->nNumAreas = 0;
            }

        for ( unsigned int n = 0; n < mFocusAreas.size(); n++)
            {
            // transform the coordinates to 3A-type coordinates
            mFocusAreas.itemAt(n)->transfrom((size_t)mPreviewData->mWidth,
                                            (size_t)mPreviewData->mHeight,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nTop,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nLeft,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nWidth,
                                            (size_t&)focusAreas[0]->tAlgoAreas[n].nHeight);

            focusAreas[0]->tAlgoAreas[n].nLeft =
                    ( focusAreas[0]->tAlgoAreas[n].nLeft * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
            focusAreas[0]->tAlgoAreas[n].nTop =
                    ( focusAreas[0]->tAlgoAreas[n].nTop* TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
            focusAreas[0]->tAlgoAreas[n].nWidth =
                    ( focusAreas[0]->tAlgoAreas[n].nWidth * TOUCH_FOCUS_RANGE ) / mPreviewData->mWidth;
            focusAreas[0]->tAlgoAreas[n].nHeight =
                    ( focusAreas[0]->tAlgoAreas[n].nHeight * TOUCH_FOCUS_RANGE ) / mPreviewData->mHeight;
            focusAreas[0]->tAlgoAreas[n].nPriority = mFocusAreas.itemAt(n)->getWeight();

             CAMHAL_LOGDB("Focus area %d : top = %d left = %d width = %d height = %d prio = %d",
                    n, (int)focusAreas[0]->tAlgoAreas[n].nTop, (int)focusAreas[0]->tAlgoAreas[n].nLeft,
                    (int)focusAreas[0]->tAlgoAreas[n].nWidth, (int)focusAreas[0]->tAlgoAreas[n].nHeight,
                    (int)focusAreas[0]->tAlgoAreas[n].nPriority);
             }

        OMX_INIT_STRUCT_PTR (&sharedBuffer, OMX_TI_CONFIG_SHAREDBUFFER);

        sharedBuffer.nPortIndex = OMX_ALL;
        sharedBuffer.nSharedBuffSize = areasSize;
        sharedBuffer.pSharedBuff = (OMX_U8 *) focusAreas[0];

        if ( NULL == sharedBuffer.pSharedBuff )
            {
            CAMHAL_LOGEA("No resources to allocate OMX shared buffer");
            ret = -ENOMEM;
            goto EXIT;
            }

            eError =  OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
                                      (OMX_INDEXTYPE) OMX_TI_IndexConfigAlgoAreas, &sharedBuffer);

        if ( OMX_ErrorNone != eError )
            {
            CAMHAL_LOGEB("Error while setting Focus Areas configuration 0x%x", eError);
            ret = -EINVAL;
            }

    EXIT:
        if (NULL != focusAreas)
            {
            memMgr.freeBuffer((void*) focusAreas);
            focusAreas = NULL;
            }
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}