Ejemplo n.º 1
0
int
InvokeLoadAndRunGucefPlatformApp( const char* appName ,
                                  const char* rootDir ,
                                  int platformArgc    ,
                                  char** platformArgv ,
                                  int appArgc         ,
                                  char** appArgv      )
{
    char* modulePath = GetLibPath( rootDir, "libgucefLOADER.so" );
    void* modulePtr = (void*) dlopen( modulePath, RTLD_NOW );
    if ( NULL == modulePtr )
    {
        free( modulePath );
        LOGF( "Unable to link gucefLOADER module" );
        return 0;
    }
    FLOGI( "Loading loader module from: %s", modulePath );
    free( modulePath );
    modulePath = NULL;

    TGUCEFCORECINTERFACE_LoadAndRunGucefPlatformApp loadAndRunGucefPlatformApp =
        (TGUCEFCORECINTERFACE_LoadAndRunGucefPlatformApp) dlsym( modulePtr, "LoadAndRunGucefPlatformApp" );

    if ( NULL == loadAndRunGucefPlatformApp )
    {
        LOGF( "Unable to link gucefLOADER function: LoadAndRunGucefPlatformApp" );
        dlclose( modulePtr );
        return 0;
    }

    char* libRootDir = GetAssetLibRoot( rootDir );
    char* assetRootDir = GetAssetPath( rootDir, "" );

    int returnValue = loadAndRunGucefPlatformApp( appName      ,
                                                  rootDir      ,
                                                  assetRootDir ,
                                                  libRootDir   ,
                                                  platformArgc ,
                                                  platformArgv ,
                                                  appArgc      ,
                                                  appArgv      );

    free( libRootDir );
    libRootDir = NULL;
    free( assetRootDir );
    assetRootDir = NULL;
    dlclose( modulePtr );
    modulePtr = NULL;

    FLOGI( "LoadAndRunGucefPlatformApp returned with code %i", returnValue );
    return returnValue;
}
Ejemplo n.º 2
0
int
ExtractAsset( AAssetManager* assetManager ,
              const char* assetPath       ,
              const char* destPath        )
{
    AAsset* asset = AAssetManager_open( assetManager, assetPath, AASSET_MODE_BUFFER );
    if ( NULL == asset )
    {
        FLOGE( "Unable to open asset for extraction: %s", assetPath );
        return 0;
    }
    FLOGI( "Extracting asset: %s", assetPath );

    const void* fileBuffer = AAsset_getBuffer( asset );
    if ( NULL == fileBuffer )
    {
        AAsset_close( asset );
        FLOGE( "Unable to get buffer to asset for extraction: %s", assetPath );
        return 0;
    }
    off_t bufferSize = AAsset_getLength( asset );

    // Make sure the directories exist to put the file in
    if ( 0 == MakeFileDir( destPath, 00777 ) )
    {
        AAsset_close( asset );
        FLOGE( "Unable to make directories for asset extraction: %s", destPath );
        return 0;
    }

    FILE* destFile = fopen( destPath, "wb" );
    if ( NULL == destFile )
    {
        AAsset_close( asset );
        FLOGE( "Unable to open destination file for asset: %s", destPath );
        return 0;
    }

    if ( 1 != fwrite( fileBuffer, bufferSize, 1, destFile ) )
    {
        FLOGE( "Error extracting asset from %s to %s", assetPath, destPath );
        fclose( destFile );
        AAsset_close( asset );
        return 0;
    }
    fclose( destFile );
    AAsset_close( asset );
    FLOGI( "Extracted asset from %s to %s", assetPath, destPath );
    return 1;
}
int convertPixelFormatToV4L2Format(PixelFormat format)
{
    int nFormat = 0;

    switch (format) {
        case HAL_PIXEL_FORMAT_YCbCr_420_SP:
        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
            nFormat = v4l2_fourcc('N', 'V', '1', '2');
            break;

        case HAL_PIXEL_FORMAT_YCbCr_420_P:
            nFormat = v4l2_fourcc('Y', 'U', '1', '2');
            break;

        case HAL_PIXEL_FORMAT_YCbCr_422_I:
            nFormat = v4l2_fourcc('Y', 'U', 'Y', 'V');
            break;

        default:
            FLOGE("Error: format:0x%x not supported!", format);
            break;
    }
    FLOGI("pixel format: 0x%x", nFormat);
    return nFormat;
}
void CameraFrame::initialize(buffer_handle_t  buf_h,
                             int              index)
{
    fAssert(buf_h != NULL);
    private_handle_t *handle = (private_handle_t *)buf_h;
    mBufHandle = buf_h;
    mVirtAddr  =  (void *)handle->base;
    mPhyAddr   =   handle->phys;
    mSize      =   handle->size;
    mWidth     =  handle->width;
    mHeight    = handle->height;
    mFormat    = handle->format;

    mObserver  = NULL;
    atomic_init(&mRefCount, 0);
    mBufState  = BUFS_CREATE;
    mFrameType = INVALID_FRAME;
    mIndex     = index;

    //for uvc jpeg stream
    mpFrameBuf  = NULL;
    mBindUVCBufIdx = -1;

    FLOGI("CameraFrame::initialize, i %d, phyAddr 0x%x, mBufHandle %p", index, mPhyAddr, mBufHandle);
}
int PreviewStream::registerBuffers(int num_buffers, buffer_handle_t *buffers)
{
    if (buffers == NULL || num_buffers > MAX_PREVIEW_BUFFER) {
        FLOGE("%s buffer num %d too large", __FUNCTION__, num_buffers);
        return BAD_VALUE;
    }

    mTotalBuffers = num_buffers;
    FLOGI("%s total %d buffer", __FUNCTION__, num_buffers);
    GraphicBufferMapper& mapper = GraphicBufferMapper::get();
    Rect bounds;
    memset(mCameraBuffer, 0, sizeof(mCameraBuffer));

    bounds.left   = 0;
    bounds.top    = 0;
    bounds.right  = mWidth;
    bounds.bottom = mHeight;
    void *pVaddr = NULL;

    for (int i=0; i < num_buffers; i++) {
        mapper.lock(buffers[i], mUsage, bounds, &pVaddr);
        mCameraBuffer[i].initialize(buffers[i], i);
        mCameraBuffer[i].mWidth  = mWidth;
        mCameraBuffer[i].mHeight = mHeight;
        mCameraBuffer[i].mFormat = mFormat;
        mCameraBuffer[i].setState(CameraFrame::BUFS_IN_SERVICE);
    }

    return 0;
}
Ejemplo n.º 6
0
/**
 * This is the main entry point of a native application that is using
 * android_native_app_glue.  It runs in its own thread, with its own
 * event loop for receiving input events and doing other things.
 */
void
android_main( struct android_app* state )
{
    // Make sure glue isn't stripped.
    app_dummy();

    char packageDir[ 512 ];
    GetPackageDir( state, packageDir, 512 );

    // Check if we need to perform first time initialization
    int firstRun = IsFirstRun( packageDir );
    if ( 0 == firstRun )
    {
        LOGI( "Performing first run initialization" );

        // Extract to our private storage as desired
        if ( 0 == ExtractAssets( state, packageDir ) )
        {
            return;
        }

        LOGI( "Completed first run initialization" );
    }
    else
    {
        LOGI( "Detected previous run, skipping first run initialization" );
    }

    // Create the platform specific params
    char** platformArgv = NULLPTR;
    int platformArgc = 0;
    CreatePlatformParams( &platformArgv, &platformArgc, state );

    // Start the process of invoking the launch of the platform using the loader
    int appStatus = InvokeLoadAndRunGucefPlatformApp( "gucefPRODMAN", packageDir, platformArgc, platformArgv, 0, NULLPTR );

    // clean up our platform param data
    FreeStringMatrix( platformArgv, platformArgc );

    // Check if we had a successfull run
    if ( 0 != firstRun )
    {
        if ( 0 == appStatus )
        {
            LOGI( "Successfull completed first run, setting first run flag to false" );

            // Set the flag that we completed the first run
            SetFirstRunCompleted( packageDir );
        }
        else
        {
            // If the flag is already set, unset it
            UnSetFirstRunCompleted( packageDir );
        }
    }
    FLOGI( "exit status code: %i", appStatus );

}
Ejemplo n.º 7
0
int
MakeDir( const char* path, int permissions )
{
    int retValue = recursive_mkdir( path, permissions );

    if ( retValue == 0 )
    {
        FLOGI( "created dir: %s", path );
    }
    else
    {
        if ( EEXIST == errno )
        {
            FLOGI( "found existing dir: %s", path );
            return 1;
        }
        else
        {
            FLOGI( "error %i creating dir: %s", errno, path );
        }
    }
    return retValue == 0 ? 1 : 0;
}
Ejemplo n.º 8
0
int PhysMemAdapter::freeBuffer()
{
    if (mIonFd <= 0) {
        FLOGE("try to free buffer from ion in preview or ion invalid");
        return BAD_VALUE;
    }

    FLOGI("freeBufferToIon buffer num:%d", mBufferCount);
    for (int i = 0; i < mBufferCount; i++) {
        struct ion_handle *ionHandle =
            (struct ion_handle *)mCameraBuffer[i].mBufHandle;
        ion_free(mIonFd, ionHandle);
        munmap(mCameraBuffer[i].mVirtAddr, mCameraBuffer[i].mSize);
    }

    memset(mCameraBuffer, 0, sizeof(mCameraBuffer));
    dispatchBuffers(NULL, 0, BUFFER_DESTROY);
    return NO_ERROR;
}
PixelFormat convertV4L2FormatToPixelFormat(unsigned int format)
{
    PixelFormat nFormat = 0;

    switch (format) {
        case v4l2_fourcc('N', 'V', '1', '2'):
            nFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
            break;

        case v4l2_fourcc('Y', 'U', '1', '2'):
            nFormat = HAL_PIXEL_FORMAT_YCbCr_420_P;
            break;

        case v4l2_fourcc('Y', 'U', 'Y', 'V'):
            nFormat = HAL_PIXEL_FORMAT_YCbCr_422_I;
            break;

        default:
            FLOGE("Error: format:0x%x not supported!", format);
            break;
    }
    FLOGI("pixel format: 0x%x", nFormat);
    return nFormat;
}
Ejemplo n.º 10
0
void
SetFirstRunCompleted( const char* packageDir )
{
    // We know the gucefLOADER relies on a text file named firstrun.completed
    // we will use the same convention here to keep things consistent
    char* firstrunFile = GetAssetPath( packageDir, "firstrun.completed" );
    int existsBool = FileExists( firstrunFile );
    if ( 0 == existsBool )
    {
        if ( 0 != MakeFileDir( firstrunFile, 00777 ) )
        {
            FILE* fptr = fopen( firstrunFile, "wb" );
            if ( NULL != fptr )
            {
                FLOGI( "Wrote flag to %s", firstrunFile );
                fclose( fptr );
                free( firstrunFile );
                return;
            }
        }
        FLOGE( "Unable to create flag at %s", firstrunFile );
    }
    free( firstrunFile );
}
status_t TVINDevice::initSensorInfo(const CameraInfo& /*info*/)
{
    if (mCameraHandle < 0) {
        FLOGE("TVINDevice: initParameters sensor has not been opened");
        return BAD_VALUE;
    }

    int res = 0;
    int maxWait = 6;
    // Get the PAL/NTSC STD
    do {
        res = ioctl(mCameraHandle, VIDIOC_G_STD, &mSTD);
        if (res < 0) {
            FLOGE("VIDIOC_G_STD failed with more try %d\n", maxWait - 1);
            sleep(1);
        }
        maxWait --;
    }while ((res != 0) || (maxWait <= 0));

    if (mSTD == V4L2_STD_PAL)
        FLOGI("Get current mode: PAL");
    else if (mSTD == V4L2_STD_NTSC)
        FLOGI("Get current mode: NTSC");
    else {
        FLOGI("Error!Get invalid mode: %llu", mSTD);
        return BAD_VALUE;
    }

    if (ioctl(mCameraHandle, VIDIOC_S_STD, &mSTD) < 0) {
        FLOGE("VIDIOC_S_STD failed\n");
        return BAD_VALUE;
    }


    // first read sensor format.
    int ret = 0, index = 0;
    int sensorFormats[MAX_SENSOR_FORMAT];
    memset(mAvailableFormats, 0, sizeof(mAvailableFormats));
    memset(sensorFormats, 0, sizeof(sensorFormats));
#if 0
    struct v4l2_fmtdesc vid_fmtdesc;
    while (ret == 0) {
        vid_fmtdesc.index = index;
        vid_fmtdesc.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        ret               = ioctl(mCameraHandle, VIDIOC_ENUM_FMT, &vid_fmtdesc);
        FLOG_RUNTIME("index:%d,ret:%d, format:%c%c%c%c", index, ret,
                     vid_fmtdesc.pixelformat & 0xFF,
                     (vid_fmtdesc.pixelformat >> 8) & 0xFF,
                     (vid_fmtdesc.pixelformat >> 16) & 0xFF,
                     (vid_fmtdesc.pixelformat >> 24) & 0xFF);
        if (ret == 0) {
            sensorFormats[index++] = vid_fmtdesc.pixelformat;
        }
    }
    sensorFormats[index++] = v4l2_fourcc('B', 'L', 'O', 'B');
    sensorFormats[index++] = v4l2_fourcc('R', 'A', 'W', 'S');
#endif

    // v4l2 does not support enum format, now hard code here.
    sensorFormats[index++] = v4l2_fourcc('N', 'V', '1', '2');
    sensorFormats[index++] = v4l2_fourcc('Y', 'V', '1', '2');
    sensorFormats[index++] = v4l2_fourcc('B', 'L', 'O', 'B');
    sensorFormats[index++] = v4l2_fourcc('R', 'A', 'W', 'S');
    //mAvailableFormats[2] = v4l2_fourcc('Y', 'U', 'Y', 'V');
    mAvailableFormatCount = index;
    changeSensorFormats(sensorFormats, index);

    index = 0;
    char TmpStr[20];
    int  previewCnt = 0, pictureCnt = 0;
    struct v4l2_frmsizeenum vid_frmsize;
    struct v4l2_frmivalenum vid_frmval;
    while (ret == 0) {
        memset(TmpStr, 0, 20);
        memset(&vid_frmsize, 0, sizeof(struct v4l2_frmsizeenum));
        vid_frmsize.index        = index++;
        vid_frmsize.pixel_format = v4l2_fourcc('N', 'V', '1', '2');
        ret = ioctl(mCameraHandle,
                    VIDIOC_ENUM_FRAMESIZES, &vid_frmsize);
        if (ret == 0) {
            FLOG_RUNTIME("enum frame size w:%d, h:%d",
                         vid_frmsize.discrete.width, vid_frmsize.discrete.height);
            memset(&vid_frmval, 0, sizeof(struct v4l2_frmivalenum));
            vid_frmval.index        = 0;
            vid_frmval.pixel_format = vid_frmsize.pixel_format;
            vid_frmval.width        = vid_frmsize.discrete.width;
            vid_frmval.height       = vid_frmsize.discrete.height;

            // ret = ioctl(mCameraHandle, VIDIOC_ENUM_FRAMEINTERVALS,
            // &vid_frmval);
            // v4l2 does not support, now hard code here.
            if (ret == 0) {
                FLOG_RUNTIME("vid_frmval denominator:%d, numeraton:%d",
                             vid_frmval.discrete.denominator,
                             vid_frmval.discrete.numerator);
                if ((vid_frmsize.discrete.width > 1920) ||
                    (vid_frmsize.discrete.height > 1080)) {
                    vid_frmval.discrete.denominator = 15;
                    vid_frmval.discrete.numerator   = 1;
                }
                else {
                    vid_frmval.discrete.denominator = 30;
                    vid_frmval.discrete.numerator   = 1;
                }

                mPictureResolutions[pictureCnt++] = vid_frmsize.discrete.width;
                mPictureResolutions[pictureCnt++] = vid_frmsize.discrete.height;

                if (vid_frmval.discrete.denominator /
                    vid_frmval.discrete.numerator > 15) {
                    mPreviewResolutions[previewCnt++] = vid_frmsize.discrete.width;
                    mPreviewResolutions[previewCnt++] = vid_frmsize.discrete.height;;
                }
            }
        }
    } // end while

    mPreviewResolutionCount = previewCnt;
    mPictureResolutionCount = pictureCnt;

    mMinFrameDuration = 33331760L;
    mMaxFrameDuration = 30000000000L;
    int i;
    for (i=0; i<MAX_RESOLUTION_SIZE && i<pictureCnt; i+=2) {
        FLOGI("SupportedPictureSizes: %d x %d", mPictureResolutions[i], mPictureResolutions[i+1]);
    }

    adjustPreviewResolutions();
    for (i=0; i<MAX_RESOLUTION_SIZE && i<previewCnt; i+=2) {
        FLOGI("SupportedPreviewSizes: %d x %d", mPreviewResolutions[i], mPreviewResolutions[i+1]);
    }
    FLOGI("FrameDuration is %lld, %lld", mMinFrameDuration, mMaxFrameDuration);

    i = 0;
    mTargetFpsRange[i++] = 10;
    mTargetFpsRange[i++] = 15;
    mTargetFpsRange[i++] = 25;
    mTargetFpsRange[i++] = 30;

    setMaxPictureResolutions();
    FLOGI("mMaxWidth:%d, mMaxHeight:%d", mMaxWidth, mMaxHeight);
    mFocalLength = 10.001;

    return NO_ERROR;
}
Ejemplo n.º 12
0
int PhysMemAdapter::allocatePictureBuffer(int width,
                                          int height,
                                          int format,
                                          int numBufs)
{
    if (mIonFd <= 0) {
        FLOGE("try to allocate buffer from ion in preview or ion invalid");
        return BAD_VALUE;
    }

    int size = 0;
    if ((width == 0) || (height == 0)) {
        FLOGE("allocateBufferFromIon: width or height = 0");
        return BAD_VALUE;
    }
    switch (format) {
        case HAL_PIXEL_FORMAT_YCbCr_420_SP:
            size = width * ((height + 16) & (~15)) * 3 / 2;
            break;

        case HAL_PIXEL_FORMAT_YCbCr_420_P:
            size = width * height * 3 / 2;
            break;

        case HAL_PIXEL_FORMAT_YCbCr_422_I:
            size = width * height * 2;
            break;

        default:
            FLOGE("Error: format not supported int ion alloc");
            return BAD_VALUE;
    }

    unsigned char *ptr = NULL;
    int sharedFd;
    int phyAddr;
    struct ion_handle *ionHandle;
    size = (size + PAGE_SIZE - 1) & (~(PAGE_SIZE - 1));

    FLOGI("allocateBufferFromIon buffer num:%d", numBufs);
    for (int i = 0; i < numBufs; i++) {
        ionHandle = NULL;
        int err = ion_alloc(mIonFd, size, 8, 1, &ionHandle);
        if (err) {
            FLOGE("ion_alloc failed.");
            return BAD_VALUE;
        }

        err = ion_map(mIonFd,
                      ionHandle,
                      size,
                      PROT_READ | PROT_WRITE,
                      MAP_SHARED,
                      0,
                      &ptr,
                      &sharedFd);
        if (err) {
            FLOGE("ion_map failed.");
            return BAD_VALUE;
        }
        phyAddr = ion_phys(mIonFd, ionHandle);
        if (phyAddr == 0) {
            FLOGE("ion_phys failed.");
            return BAD_VALUE;
        }
        FLOG_RUNTIME("phyalloc ptr:0x%x, phy:0x%x, size:%d",
                     (int)ptr,
                     phyAddr,
                     size);
        mCameraBuffer[i].reset();
        mCameraBuffer[i].mIndex     = i;
        mCameraBuffer[i].mWidth     = width;
        mCameraBuffer[i].mHeight    = height;
        mCameraBuffer[i].mFormat    = format;
        mCameraBuffer[i].mVirtAddr  = ptr;
        mCameraBuffer[i].mPhyAddr   = phyAddr;
        mCameraBuffer[i].mSize      =  size;
        mCameraBuffer[i].mBufHandle = (buffer_handle_t *)ionHandle;
        close(sharedFd);
    }

    mBufferCount    = numBufs;
    mQueueableCount = numBufs;
    mFormat         = format;
    mBufferSize     = mCameraBuffer[0].mSize;
    mFrameWidth     = width;
    mFrameHeight    = height;

    dispatchBuffers(&mCameraBuffer[0], numBufs, BUFFER_CREATE);

    return NO_ERROR;
}
int PreviewStream::allocateBuffers(int /*width*/, int /*height*/,
                        int /*format*/, int /*numBufs*/ )
{
    int index = -1;
    int ret = NO_ERROR;

    //In DeviceAdapter::handleFrameRelease, if mPreviewing is false,
    //will not dec mRefCount. This will happen when performance is low.
    //So need zero ref count.
    for (int i = 0; i < mTotalBuffers; i++) {
       // FLOGI("==== PreviewStream::allocateBuffers, i %d, state %d, ref %d",
         //   i, mCameraBuffer[i].getState(), mCameraBuffer[i].getRefCount());

        mCameraBuffer[i].ZeroRefCount();
    }

    for (int i = 0; i < mMaxProducerBuffers; i++) {
        buffer_handle_t *buf_h = NULL;
        ret = mNativeWindow->dequeue_buffer(mNativeWindow, &buf_h);
        if (ret != 0) {
            FLOGE("dequeueBuffer failed: %s (%d)", strerror(-ret), -ret);
            if (ENODEV == ret) {
                FLOGE("Preview surface abandoned!");
                mNativeWindow = NULL;
            }
            return ret;
        }

        index = getBufferIdx(buf_h);
        if (index < 0 || index >= mTotalBuffers) {
            FLOGE("%s dequeue invalid buffer", __FUNCTION__);
            return BAD_VALUE;
        }
        mCameraBuffer[index].setState(CameraFrame::BUFS_FREE);
		if(mDeviceAdapter.get() && mDeviceAdapter->UseMJPG()) {
			mDeviceAdapter.get()->mVPUPhyAddr[i] = (unsigned char*)mCameraBuffer[index].mPhyAddr;
            mDeviceAdapter.get()->mVPUVirtAddr[i] = (unsigned char*)mCameraBuffer[index].mVirtAddr;
            FLOGI("allocateBuffers, index %d, phyAddr 0x%x", index, mCameraBuffer[index].mPhyAddr);
		}
    }

    for (int i = 0; i < mTotalBuffers; i++) {
        int state = mCameraBuffer[i].getState();
        if (state != CameraFrame::BUFS_FREE) {
            mCameraBuffer[i].setState(CameraFrame::BUFS_IN_SERVICE);

            // The frame held in service.
            // Make sure we dont add one more reference
            // count for it
            if(!mCameraBuffer[i].getRefCount())
                mCameraBuffer[i].addReference();
        }

        if(mDeviceAdapter.get() && mDeviceAdapter->UseMJPG()) {
            mCameraBuffer[i].mBindUVCBufIdx = -1;
            mCameraBuffer[i].mpFrameBuf = NULL;
        }
    }

    dispatchBuffers(&mCameraBuffer[0], mTotalBuffers, BUFFER_CREATE);

    return ret;
}
Ejemplo n.º 14
0
status_t TVINDevice::setParameters(CameraParameters& params)
{
    int  w, h;
    int  framerate, local_framerate;
    int  max_zoom, zoom, max_fps, min_fps;
    char tmp[128];

    Mutex::Autolock lock(mLock);

    max_zoom = params.getInt(CameraParameters::KEY_MAX_ZOOM);
    zoom     = params.getInt(CameraParameters::KEY_ZOOM);
    if (zoom > max_zoom) {
        FLOGE("Invalid zoom setting, zoom %d, max zoom %d", zoom, max_zoom);
        return BAD_VALUE;
    }
    if (!((strcmp(params.getPreviewFormat(), "yuv420sp") == 0) ||
          (strcmp(params.getPreviewFormat(), "yuv420p") == 0) ||
          (strcmp(params.getPreviewFormat(), "yuv422i-yuyv") == 0))) {
        FLOGE("Only yuv420sp or yuv420pis supported, but input format is %s",
              params.getPreviewFormat());
        return BAD_VALUE;
    }

    if (strcmp(params.getPictureFormat(), "jpeg") != 0) {
        FLOGE("Only jpeg still pictures are supported");
        return BAD_VALUE;
    }

    params.getPreviewSize(&w, &h);
    sprintf(tmp, "%dx%d", w, h);
    FLOGI("Set preview size: %s", tmp);
    if (strstr(mSupportedPreviewSizes, tmp) == NULL) {
        FLOGE("The preview size w %d, h %d is not corrected", w, h);
        return BAD_VALUE;
    }

    params.getPictureSize(&w, &h);
    sprintf(tmp, "%dx%d", w, h);
    FLOGI("Set picture size: %s", tmp);
    if (strstr(mSupportedPictureSizes, tmp) == NULL) {
        FLOGE("The picture size w %d, h %d is not corrected", w, h);
        return BAD_VALUE;
    }

    local_framerate = mParams.getPreviewFrameRate();
    FLOGI("get local frame rate:%d FPS", local_framerate);
    if ((local_framerate > 30) || (local_framerate < 0)) {
        FLOGE("The framerate is not corrected");
        local_framerate = 15;
    }

    framerate = params.getPreviewFrameRate();
    FLOGI("Set frame rate:%d FPS", framerate);
    if ((framerate > 30) || (framerate < 0)) {
        FLOGE("The framerate is not corrected");
        return BAD_VALUE;
    }
    else if (local_framerate != framerate) {
        if (framerate == 15) {
            params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "12000,17000");
        }
        else if (framerate == 30) {
            params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "25000,33000");
        }
    }

    int actual_fps = 15;
    params.getPreviewFpsRange(&min_fps, &max_fps);
    FLOGI("FPS range: %d - %d", min_fps, max_fps);
    if ((max_fps < 1000) || (min_fps < 1000) || (max_fps > 33000) ||
        (min_fps > 33000)) {
        FLOGE("The fps range from %d to %d is error", min_fps, max_fps);
        return BAD_VALUE;
    }
    actual_fps = min_fps > 15000 ? 30 : 15;
    FLOGI("setParameters: actual_fps=%d", actual_fps);
    params.setPreviewFrameRate(actual_fps);

    mParams = params;
    return NO_ERROR;
}
Ejemplo n.º 15
0
status_t TVINDevice::initParameters(CameraParameters& params,
                                  int              *supportRecordingFormat,
                                  int               rfmtLen,
                                  int              *supportPictureFormat,
                                  int               pfmtLen)
{
    int ret = 0, index = 0;
    int maxWait = 6;
    int sensorFormat[MAX_SENSOR_FORMAT];

    if (mCameraHandle < 0) {
        FLOGE("TVINDevice: initParameters sensor has not been opened");
        return BAD_VALUE;
    }
    if ((supportRecordingFormat == NULL) || (rfmtLen == 0) ||
        (supportPictureFormat == NULL) || (pfmtLen == 0)) {
        FLOGE("TVINDevice: initParameters invalid parameters");
        return BAD_VALUE;
    }

    // Get the PAL/NTSC STD
    do {
        ret = ioctl(mCameraHandle, VIDIOC_G_STD, &mSTD);
        if (ret < 0)
        {
            FLOGE("VIDIOC_G_STD failed with more try %d\n",
                  maxWait - 1);
            sleep(1);
        }
        maxWait --;
    }while ((ret != 0) || (maxWait <= 0));

    if (mSTD == V4L2_STD_PAL)
        FLOGI("Get current mode: PAL");
    else if (mSTD == V4L2_STD_NTSC)
        FLOGI("Get current mode: NTSC");
    else {
        FLOGI("Error!Get invalid mode: %llu", mSTD);
		return BAD_VALUE;
    }

	if (ioctl(mCameraHandle, VIDIOC_S_STD, &mSTD) < 0)
	{
		FLOGE("VIDIOC_S_STD failed\n");
		return BAD_VALUE;
	}

    // first read sensor format.
#if 0
    struct v4l2_fmtdesc vid_fmtdesc;
    while (ret == 0) {
        vid_fmtdesc.index = index;
        vid_fmtdesc.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        ret               = ioctl(mCameraHandle, VIDIOC_ENUM_FMT, &vid_fmtdesc);
        FLOG_RUNTIME("index:%d,ret:%d, format:%c%c%c%c", index, ret,
                     vid_fmtdesc.pixelformat & 0xFF,
                     (vid_fmtdesc.pixelformat >> 8) & 0xFF,
                     (vid_fmtdesc.pixelformat >> 16) & 0xFF,
                     (vid_fmtdesc.pixelformat >> 24) & 0xFF);
        if (ret == 0) {
            sensorFormat[index++] = vid_fmtdesc.pixelformat;
        }
    }
#endif // if 0

    // v4l2 does not support enum format, now hard code here.
    sensorFormat[0] = v4l2_fourcc('N', 'V', '1', '2');
    sensorFormat[1] = v4l2_fourcc('Y', 'U', '1', '2');
    sensorFormat[2] = v4l2_fourcc('Y', 'U', 'Y', 'V');
    index           = 3;

    // second check match sensor format with vpu support format and picture
    // format.
    mPreviewPixelFormat = getMatchFormat(supportRecordingFormat,
                                         rfmtLen,
                                         sensorFormat,
                                         index);
    mPicturePixelFormat = getMatchFormat(supportPictureFormat,
                                         pfmtLen,
                                         sensorFormat,
                                         index);
    setPreviewStringFormat(mPreviewPixelFormat);
    ret = setSupportedPreviewFormats(supportRecordingFormat,
                                     rfmtLen,
                                     sensorFormat,
                                     index);
    if (ret) {
        FLOGE("setSupportedPreviewFormats failed");
        return ret;
    }

    index = 0;
    char TmpStr[20];
    int  previewCnt = 0, pictureCnt = 0;
    struct v4l2_frmsizeenum vid_frmsize;
    struct v4l2_frmivalenum vid_frmval;
    while (ret == 0) {
        memset(TmpStr, 0, 20);
        memset(&vid_frmsize, 0, sizeof(struct v4l2_frmsizeenum));
        vid_frmsize.index        = index++;
        vid_frmsize.pixel_format = v4l2_fourcc('N', 'V', '1', '2');
        ret                      = ioctl(mCameraHandle,
                                         VIDIOC_ENUM_FRAMESIZES,
                                         &vid_frmsize);
        if (ret == 0) {
            FLOG_RUNTIME("enum frame size w:%d, h:%d",
                         vid_frmsize.discrete.width, vid_frmsize.discrete.height);
            memset(&vid_frmval, 0, sizeof(struct v4l2_frmivalenum));
            vid_frmval.index        = 0;
            vid_frmval.pixel_format = vid_frmsize.pixel_format;
            vid_frmval.width        = vid_frmsize.discrete.width;
            vid_frmval.height       = vid_frmsize.discrete.height;

            // ret = ioctl(mCameraHandle, VIDIOC_ENUM_FRAMEINTERVALS,
            // &vid_frmval);
            // v4l2 does not support, now hard code here.
            if (ret == 0) {
                FLOG_RUNTIME("vid_frmval denominator:%d, numeraton:%d",
                             vid_frmval.discrete.denominator,
                             vid_frmval.discrete.numerator);
                if ((vid_frmsize.discrete.width > 1280) ||
                    (vid_frmsize.discrete.height > 720)) {
                    vid_frmval.discrete.denominator = 15;
                    vid_frmval.discrete.numerator   = 1;
                }
                else {
                    vid_frmval.discrete.denominator = 30;
                    vid_frmval.discrete.numerator   = 1;
                }

                sprintf(TmpStr,
                        "%dx%d",
                        vid_frmsize.discrete.width,
                        vid_frmsize.discrete.height);

                // Set default to be first enum w/h, since tvin may only
                // have one set
                if (pictureCnt == 0){
                    mParams.setPreviewSize(vid_frmsize.discrete.width,
                            vid_frmsize.discrete.height);
                    mParams.setPictureSize(vid_frmsize.discrete.width,
                            vid_frmsize.discrete.height);
                }

                if (pictureCnt == 0)
                    strncpy((char *)mSupportedPictureSizes,
                            TmpStr,
                            CAMER_PARAM_BUFFER_SIZE);
                else {
                    strncat(mSupportedPictureSizes,
                            PARAMS_DELIMITER,
                            CAMER_PARAM_BUFFER_SIZE);
                    strncat(mSupportedPictureSizes,
                            TmpStr,
                            CAMER_PARAM_BUFFER_SIZE);
                }
                pictureCnt++;

                if (vid_frmval.discrete.denominator /
                    vid_frmval.discrete.numerator >= 15) {
                    if (previewCnt == 0)
                        strncpy((char *)mSupportedPreviewSizes,
                                TmpStr,
                                CAMER_PARAM_BUFFER_SIZE);
                    else {
                        strncat(mSupportedPreviewSizes,
                                PARAMS_DELIMITER,
                                CAMER_PARAM_BUFFER_SIZE);
                        strncat(mSupportedPreviewSizes,
                                TmpStr,
                                CAMER_PARAM_BUFFER_SIZE);
                    }
                    previewCnt++;
                }
            }
        } // end if (ret == 0)
        else {
            FLOGI("enum frame size error %d", ret);
        }
    } // end while

    strcpy(mSupportedFPS, "15,30");
    FLOGI("SupportedPictureSizes is %s", mSupportedPictureSizes);
    FLOGI("SupportedPreviewSizes is %s", mSupportedPreviewSizes);
    FLOGI("SupportedFPS is %s", mSupportedFPS);

    mParams.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
                mSupportedPictureSizes);
    mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
                mSupportedPreviewSizes);
    mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
                mSupportedFPS);
    mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
                "(12000,17000),(25000,33000)");
    // Align the default FPS RANGE to the DEFAULT_PREVIEW_FPS
    mParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "12000,17000");
    mParams.setPreviewFrameRate(DEFAULT_PREVIEW_FPS);

    params = mParams;
    return NO_ERROR;
}
Ejemplo n.º 16
0
status_t TVINDevice::setDeviceConfig(int         width,
                                        int         height,
                                        PixelFormat format,
                                        int         fps)
{
    if (mCameraHandle <= 0) {
        FLOGE("setDeviceConfig: DeviceAdapter uninitialized");
        return BAD_VALUE;
    }
    if ((width == 0) || (height == 0)) {
        FLOGE("setDeviceConfig: invalid parameters");
        return BAD_VALUE;
    }

    status_t ret = NO_ERROR;
    int input    = 1;
    ret = ioctl(mCameraHandle, VIDIOC_S_INPUT, &input);
    if (ret < 0) {
        FLOGE("Open: VIDIOC_S_INPUT Failed: %s", strerror(errno));
        return ret;
    }

    int vformat;
    vformat = convertPixelFormatToV4L2Format(format);

    FLOGI("Width * Height %d x %d format %d, fps: %d",
          width,
          height,
          vformat,
          fps);

    mVideoInfo->width       = width;
    mVideoInfo->height      = height;
    mVideoInfo->framesizeIn = (width * height << 1);
    mVideoInfo->formatIn    = vformat;

    mVideoInfo->param.type =
        V4L2_BUF_TYPE_VIDEO_CAPTURE;
    mVideoInfo->param.parm.capture.timeperframe.numerator   = 1;
    mVideoInfo->param.parm.capture.timeperframe.denominator = 0;
    mVideoInfo->param.parm.capture.capturemode = 0;
    ret = ioctl(mCameraHandle, VIDIOC_S_PARM, &mVideoInfo->param);
    if (ret < 0) {
        FLOGE("Open: VIDIOC_S_PARM Failed: %s", strerror(errno));
        return ret;
    }

    mVideoInfo->format.type                 = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    mVideoInfo->format.fmt.pix.width        = width & 0xFFFFFFF8;
    mVideoInfo->format.fmt.pix.height       = height & 0xFFFFFFF8;
    mVideoInfo->format.fmt.pix.pixelformat  = vformat;
    mVideoInfo->format.fmt.pix.field        = V4L2_FIELD_INTERLACED;
    mVideoInfo->format.fmt.pix.priv         = 0;
    mVideoInfo->format.fmt.pix.sizeimage    = 0;
    mVideoInfo->format.fmt.pix.bytesperline = 0;

    // Special stride alignment for YU12
    if (vformat == v4l2_fourcc('Y', 'U', '1', '2')){
        // Goolge define the the stride and c_stride for YUV420 format
        // y_size = stride * height
        // c_stride = ALIGN(stride/2, 16)
        // c_size = c_stride * height/2
        // size = y_size + c_size * 2
        // cr_offset = y_size
        // cb_offset = y_size + c_size
        // int stride = (width+15)/16*16;
        // int c_stride = (stride/2+16)/16*16;
        // y_size = stride * height
        // c_stride = ALIGN(stride/2, 16)
        // c_size = c_stride * height/2
        // size = y_size + c_size * 2
        // cr_offset = y_size
        // cb_offset = y_size + c_size

        // GPU and IPU take below stride calculation
        // GPU has the Y stride to be 32 alignment, and UV stride to be
        // 16 alignment.
        // IPU have the Y stride to be 2x of the UV stride alignment
        int stride = (width+31)/32*32;
        int c_stride = (stride/2+15)/16*16;
        mVideoInfo->format.fmt.pix.bytesperline = stride;
        mVideoInfo->format.fmt.pix.sizeimage    = stride*height+c_stride * height;
        FLOGI("Special handling for YV12 on Stride %d, size %d",
            mVideoInfo->format.fmt.pix.bytesperline,
            mVideoInfo->format.fmt.pix.sizeimage);
    }

    ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
    if (ret < 0) {
        FLOGE("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
        return ret;
    }

    return ret;
}