void CameraHardwareStub::initDefaultParameters()
{
    CameraParameters p;

    p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, "320x240");
    p.setPreviewSize(320, 240);
    p.setPreviewFrameRate(15);
    p.setPreviewFormat(CameraParameters::PIXEL_FORMAT_YUV420SP);

    p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, "320x240");
    p.setPictureSize(320, 240);
    p.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);

    if (setParameters(p) != NO_ERROR) {
        LOGE("Failed to set default parameters?!");
    }
}
CameraParameters
JordanCameraWrapper::getParameters() const
{
    CameraParameters ret = mMotoInterface->getParameters();

    if (mCameraType == CAM_SOC) {
        /* the original zoom ratio string is '100,200,300,400,500,600',
           but 500 and 600 are broken for the SOC camera, so limiting
           it here */
        ret.set(CameraParameters::KEY_MAX_ZOOM, "3");
        ret.set(CameraParameters::KEY_ZOOM_RATIOS, "100,200,300,400");
    }

    /* Motorola uses mot-exposure-offset instead of exposure-compensation
       for whatever reason -> adapt the values.
       The limits used here are taken from the lib, we surely also
       could parse it, but it's likely not worth the hassle */
    float exposure = ret.getFloat("mot-exposure-offset");
    int exposureParam = (int) round(exposure * 3);

    ret.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, exposureParam);
    ret.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, "9");
    ret.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, "-9");
    ret.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, "0.3333333333333");

    ret.set("cam-mode", mVideoMode ? "1" : "0");

    return ret;
}
CameraParameters
LibCameraWrapper::getParameters() const
{
    CameraParameters ret = mLibInterface->getParameters();

        // We support facedetect as well
        ret.set("focus-mode-values", "auto");

        // This is for detecting if we're in camcorder mode or not
        ret.set("cam-mode", mVideoMode ? "1" : "0");

        ret.set("flash-mode-values", "off,on");

        // FFC: We need more preview and picture size to support GTalk
        ret.set("preview-size-values", "320x240,640x480,1280x720");

    ret.set(CameraParameters::KEY_MAX_ZOOM, "0");

    return ret;
}
Esempio n. 4
0
/**
 * Set a camera parameter
 */
int CaptureCommand::capture_setParameter(Value& name, Value& value) {
  LOG_ERROR((name.isNull()), "name not specified");
  LOG_ERROR((value.isNull()), "value not specified");
  LOG_ERROR((mCamera.get() == NULL), "camera not initialized");

  CameraParameters params = mCamera->getParameters();
  params.set(name.asCString(), value.asCString());
  status_t err = mCamera->setParameters(params.flatten());
  if (err != ::OK) {
    ALOGW("Error %d: Failed to set '%s' to '%s'", err,
      name.asCString(), value.asCString());
  }
  return 0;
}
status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
                                             OMX_U32 &optimal,
                                             OMX_U32 &far,
                                             CameraParameters& params)
{
    status_t ret = NO_ERROR;

    LOG_FUNCTION_NAME;

    if ( NO_ERROR == ret )
        {
        ret = encodeFocusDistance(near, mFocusDistNear, FOCUS_DIST_SIZE);
        if ( NO_ERROR != ret )
            {
            CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
            }
        }

    if ( NO_ERROR == ret )
        {
        ret = encodeFocusDistance(optimal, mFocusDistOptimal, FOCUS_DIST_SIZE);
        if ( NO_ERROR != ret )
            {
            CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
            }
        }

    if ( NO_ERROR == ret )
        {
        ret = encodeFocusDistance(far, mFocusDistFar, FOCUS_DIST_SIZE);
        if ( NO_ERROR != ret )
            {
            CAMHAL_LOGEB("Error encoding near focus distance 0x%x", ret);
            }
        }

    if ( NO_ERROR == ret )
        {
        snprintf(mFocusDistBuffer, ( FOCUS_DIST_BUFFER_SIZE - 1) ,"%s,%s,%s", mFocusDistNear,
                                                                              mFocusDistOptimal,
                                                                              mFocusDistFar);

        params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
        }

    LOG_FUNCTION_NAME_EXIT;

    return ret;
}
Esempio n. 6
0
void CameraHandler::setProperty(int propIdx, double value)
{
    LOGD("CameraHandler::setProperty(%d, %f)", propIdx, value);

    switch (propIdx)
    {
    case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
    {
        int w,h;
        params.getPreviewSize(&w, &h);
        w = (int)value;
        params.setPreviewSize(w, h);
    }
    break;
    case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
    {
        int w,h;
        params.getPreviewSize(&w, &h);
        h = (int)value;
        params.setPreviewSize(w, h);
    }
    break;
    case ANDROID_CAMERA_PROPERTY_EXPOSURE:
    {
        int max_exposure = params.getInt("max-exposure-compensation");
        int min_exposure = params.getInt("min-exposure-compensation");
        if(max_exposure && min_exposure){
            int exposure = (int)value;
            if(exposure >= min_exposure && exposure <= max_exposure){
                params.set("exposure-compensation", exposure);
            } else {
                LOGE("Exposure compensation not in valid range (%i,%i).", min_exposure, max_exposure);
            }
        } else {
            LOGE("Exposure compensation adjust is not supported.");
        }
    }
    break;
    case ANDROID_CAMERA_PROPERTY_FLASH_MODE:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_FLASH_MODES_NUM){
            const char* mode_name = flashModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mode_name))
                params.set(CameraParameters::KEY_FLASH_MODE, mode_name);
            else
                LOGE("Flash mode %s is not supported.", mode_name);
        } else {
            LOGE("Flash mode value not in valid range.");
        }
    }
    break;
    case ANDROID_CAMERA_PROPERTY_FOCUS_MODE:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_FOCUS_MODES_NUM){
            const char* mode_name = focusModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mode_name))
                params.set(CameraParameters::KEY_FOCUS_MODE, mode_name);
            else
                LOGE("Focus mode %s is not supported.", mode_name);
        } else {
            LOGE("Focus mode value not in valid range.");
        }
    }
    break;
    case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM){
            const char* mode_name = whiteBalanceModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mode_name))
                params.set(CameraParameters::KEY_WHITE_BALANCE, mode_name);
            else
                LOGE("White balance mode %s is not supported.", mode_name);
        } else {
            LOGE("White balance mode value not in valid range.");
        }
    }
    break;
    case ANDROID_CAMERA_PROPERTY_ANTIBANDING:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_ANTIBANDING_MODES_NUM){
            const char* mode_name = antibandingModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mode_name))
                params.set(CameraParameters::KEY_ANTIBANDING, mode_name);
            else
                LOGE("Antibanding mode %s is not supported.", mode_name);
        } else {
            LOGE("Antibanding mode value not in valid range.");
        }
    }
    break;
    default:
        LOGW("CameraHandler::setProperty - Unsupported property.");
    };
}
CameraParameters MotoCameraWrapper::getParameters() const
{
    CameraParameters ret = mMotoInterface->getParameters();

    /* cut down supported effects to values supported by framework */
    ret.set(CameraParameters::KEY_SUPPORTED_EFFECTS,
            "none,mono,sepia,negative,solarize,red-tint,green-tint,blue-tint");

    /* Motorola uses mot-exposure-offset instead of exposure-compensation
       for whatever reason -> adapt the values.
       The limits used here are taken from the lib, we surely also
       could parse it, but it's likely not worth the hassle */
    float exposure = ret.getFloat("mot-exposure-offset");
    int exposureParam = (int) round(exposure * 3);

    ret.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, exposureParam);
    ret.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, "9");
    ret.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, "-9");
    ret.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, "0.3333333333333");
    ret.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, CameraParameters::PIXEL_FORMAT_YUV422I);

    /* Device specific options */
    switch (mCameraType) {
        case DEFY_GREEN:
            /* The original zoom ratio string is '100,200,300,400,500,600',
             * but 500 and 600 are broken for the SOC camera, so limiting
             * it here
             */
            ret.set(CameraParameters::KEY_MAX_ZOOM, "3");
            ret.set(CameraParameters::KEY_ZOOM_RATIOS, "100,200,300,400");
            ret.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
                    "(1000,30000),(1000,25000),(1000,20000),(1000,24000),(1000,15000),(1000,10000)");
            ret.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "1000, 30000");
            break;
        case DEFY_RED:
            ret.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
                    "(1000,30000),(1000,25000),(1000,20000),(1000,24000),(1000,15000),(1000,10000)");
            ret.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "1000, 30000");
            break;
        default:
            break;
    }
    ret.set("cam-mode", mVideoMode ? "1" : "0");

    return ret;
}
Esempio n. 8
0
/**
 * Thread function that initializes the camera
 */
status_t CaptureCommand::initThreadCamera() {
  // Setup the camera
  int cameraId = 0;
  mCamera = Camera::connect(cameraId, String16(CAMERA_NAME),
      Camera::USE_CALLING_UID);
  if (mCamera == NULL) {
    ALOGE("Unable to connect to camera");
    return -1;
  }
  mRemote = mCamera->remote();

  FaceDetection faces(&mChannel);
  mCamera->setListener(&faces);

  {
    char previewSize[80];
    snprintf(previewSize, sizeof(previewSize), "%dx%d", sVideoSize.width, sVideoSize.height);
    CameraParameters params = mCamera->getParameters();
    params.set(CameraParameters::KEY_PREVIEW_SIZE, previewSize);
    params.set(CameraParameters::KEY_PREVIEW_FORMAT, "yuv420sp");
    status_t err = mCamera->setParameters(params.flatten());
    CHECK(err == 0);
    params = mCamera->getParameters();
    params.dump();
  }

  sOpenCVCameraCapture->setPreviewProducerListener(this);
  CHECK(setPreviewTarget() == 0);

  //CHECK(mCamera->sendCommand(CAMERA_CMD_START_FACE_DETECTION, CAMERA_FACE_DETECTION_SW, 0) == 0);
  CHECK(mCamera->sendCommand(CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG, 1, 0) == 0);

  mCameraSource = CameraSource::CreateFromCamera(mRemote, mCamera->getRecordingProxy(), cameraId,
      String16(CAMERA_NAME, strlen(CAMERA_NAME)), Camera::USE_CALLING_UID,
      sVideoSize, sFPS,
      NULL, sUseMetaDataMode);
  CHECK_EQ(mCameraSource->initCheck(), ::OK);

  if (sInitCameraVideo) {
    mLooper = new ALooper;
    mLooper->setName("capture-looper");
    mLooper->start();

    sp<MediaSource> videoEncoder = prepareVideoEncoder(mLooper, mCameraSource);

    sp<MediaSource> audioSource(
      new AudioSource(
        AUDIO_SOURCE_MIC,
#ifdef TARGET_GE_MARSHMALLOW
        String16("silk-capture"),
#endif
        sAudioSampleRate,
        sAudioChannels
      )
    );
    sp<MediaSource> audioSourceEmitter =
      new AudioSourceEmitter(&mChannel, audioSource);
    sp<MediaSource> audioMutter =
      new AudioMutter(audioSourceEmitter);
    sp<MediaSource> audioEncoder =
      prepareAudioEncoder(mLooper, audioMutter);

    mSegmenter = new MPEG4SegmenterDASH(videoEncoder, audioEncoder, &mChannel);
    mSegmenter->run();

    mHardwareActive = true;
    notifyCameraEvent("initialized");

    // Block this thread while camera is running
    mSegmenter->join();
  } else {
    pthread_create(&mAudioThread, NULL, initThreadAudioOnlyWrapper, this);

    CHECK_EQ(mCameraSource->start(), ::OK);
    MediaSourceNullPuller cameraPuller(mCameraSource, "camera");
    if (!cameraPuller.loop()) {
      notifyCameraEvent("error");
    }
  }

  sOpenCVCameraCapture->setPreviewProducerListener(NULL);
  return 0;
}
Esempio n. 9
0
/*! 
 \return 0 on success, otherwise error code
*/
int camera_open(void)
{
	LOGE("%s",__FUNCTION__);

	CameraParameters	params;
	status_t		rtn;

	/* Open video device */
	#ifdef CAMERA_FOR_SCANNER
	//pCamera = CameraScanner::connect(0);
	

	
#ifdef CAMERA_FOR_SCANNER
	fd = open("/sys/devices/platform/image_sensor/currDrvIndex",O_RDWR);
    if(fd<0)
	{
		LOGE("Cannot open currDrvIndex\n");
		return -1;
	}
	int temp = 20002;
	write(fd,"20002",5);

	fd_scanner1 = open(SE955_CHANNEL_NAME, O_RDONLY);
 
#endif
	pCamera = Camera::connect(1);
	#else
	pCamera = CameraScanner::connect(ANDROID_CAMERA_ID);
	//pCamera = CameraScanner::connect(0);
	#endif
	if( pCamera == 0)
	{
		LOGE("Cannot connect to camera %d of: %i, %s\n", ANDROID_CAMERA_ID, errno, strerror(errno));
		return -errno;
	}

	LOGE("Camera %d \n", ANDROID_CAMERA_ID);
#ifdef CAMERA_FOR_SCANNER
pCamera->setListener(&scannerListener);	//enable data callback

#else

	pCamera->setScannerListener(&scannerListener);	//enable data callback
#endif

#ifdef VIDEO_SUBDEV 
	/* Open subdevice */
	video_subdev_handle = ::open(VIDEO_SUBDEV, O_RDWR);
	if (video_subdev_handle == -1) {
		LOGE("ERROR opening %s: %d\n", VIDEO_SUBDEV, errno);
		return -errno;
	}

	LOGE("Subdevice: %s, handle = %d\n", VIDEO_SUBDEV, video_subdev_handle);
#endif

	ImagerProps.width = 256; ImagerProps.height = 64;
#ifdef CAMERA_FOR_SCANNER
			//ImagerProps.width = 624;
			//ImagerProps.height = 474;
			ImagerProps.width = 640;
			ImagerProps.height = 480;
			ImagerProps.mount = 2;
			ImagerProps.i2c_addr_sensor = 0x48;
			ImagerProps.i2c_addr_psoc = 0x40;
			ImagerProps.i2c_addr_clock = 0x69;
#else

	if( 0 != camera_ioctl(HSM_GET_PROPERTIES, &ImagerProps) )
	{
		pCamera->disconnect();
		LOGE("HSM_GET_PROPERTIES error %d\n", errno);
		return -1;
	}
#endif

#ifdef CAMERA_FOR_SCANNER
#else

	LOGE("Image size = %dx%d\n", ImagerProps.width, ImagerProps.height);
#endif

	cbBufLength = ImagerProps.height * ImagerProps.width;

	params.unflatten(pCamera->getParameters());
	params.set("mtk-cam-mode", 1);
#if 0 //def CAMERA_FOR_SCANNER
#else
	params.set("scanner-mode", "on");
#endif
	params.setPreviewSize(ImagerProps.width, ImagerProps.height);
	//params.setPreviewFormat("yuv422i-yuyv"); // FIXME: "raw"
	//params.setPreviewFormat("yuv420sp"); // FIXME: "raw" 
	rtn = pCamera->setParameters(params.flatten());
	if( rtn != OK )
	{
		LOGE("setParameters error %d\n", rtn);
	}
#ifdef CAMERA_FOR_SCANNER


	if(fd>=0)write(fd,"20001",5);
#endif

#if 0
	rtn = pCamera->setPreviewTexture(dummy_texture); // FIXME: Is there a dummy texture?
	if( rtn != OK )
	{
		KIL_ERR("setPreviewDisplay error %d\n", rtn);
	}
#endif

	return 0;
}
Esempio n. 10
0
void CameraHandler::setProperty(int propIdx, double value)
{
    LOGD("CameraHandler::setProperty(%d, %f)", propIdx, value);

    android::String8 params_str;
    params_str = camera->getParameters();
    LOGI("Params before set: [%s]", params_str.string());

    switch (propIdx)
    {
    case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
    {
        int w,h;
        params->getPreviewSize(&w, &h);
        width = (int)value;
    }
    break;
    case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
    {
        int w,h;
        params->getPreviewSize(&w, &h);
        height = (int)value;
    }
    break;
    case ANDROID_CAMERA_PROPERTY_EXPOSURE:
    {
        int max_exposure = params->getInt("max-exposure-compensation");
        int min_exposure = params->getInt("min-exposure-compensation");
        if(max_exposure && min_exposure)
        {
            int exposure = (int)value;
            if(exposure >= min_exposure && exposure <= max_exposure)
                params->set("exposure-compensation", exposure);
            else
                LOGE("Exposure compensation not in valid range (%i,%i).", min_exposure, max_exposure);
        } else
            LOGE("Exposure compensation adjust is not supported.");

        camera->setParameters(params->flatten());
    }
    break;
    case ANDROID_CAMERA_PROPERTY_FLASH_MODE:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_FLASH_MODES_NUM)
        {
            const char* mode_name = flashModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mode_name))
                params->set(CameraParameters::KEY_FLASH_MODE, mode_name);
            else
                LOGE("Flash mode %s is not supported.", mode_name);
        }
        else
            LOGE("Flash mode value not in valid range.");

        camera->setParameters(params->flatten());
    }
    break;
    case ANDROID_CAMERA_PROPERTY_FOCUS_MODE:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_FOCUS_MODES_NUM)
        {
            const char* mode_name = focusModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mode_name))
                params->set(CameraParameters::KEY_FOCUS_MODE, mode_name);
            else
                LOGE("Focus mode %s is not supported.", mode_name);
        }
        else
            LOGE("Focus mode value not in valid range.");

        camera->setParameters(params->flatten());
    }
    break;
    case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM)
        {
            const char* mode_name = whiteBalanceModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mode_name))
                params->set(CameraParameters::KEY_WHITE_BALANCE, mode_name);
            else
                LOGE("White balance mode %s is not supported.", mode_name);
        }
        else
            LOGE("White balance mode value not in valid range.");

        camera->setParameters(params->flatten());
    }
    break;
    case ANDROID_CAMERA_PROPERTY_ANTIBANDING:
    {
        int new_val = (int)value;
        if(new_val >= 0 && new_val < ANDROID_CAMERA_ANTIBANDING_MODES_NUM)
        {
            const char* mode_name = antibandingModesNames[new_val];
            if(is_supported(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mode_name))
                params->set(CameraParameters::KEY_ANTIBANDING, mode_name);
            else
                LOGE("Antibanding mode %s is not supported.", mode_name);
        }
        else
            LOGE("Antibanding mode value not in valid range.");

        camera->setParameters(params->flatten());
    }
    break;
#if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1)
    case ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK:
    {
        if (is_supported(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true"))
        {
            if (value != 0)
                params->set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, CameraParameters::TRUE);
            else
                params->set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, CameraParameters::FALSE);
            LOGE("Expose lock is set");
        }
        else
            LOGE("Expose lock is not supported");

        camera->setParameters(params->flatten());
    }
    break;
    case ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK:
    {
        if (is_supported(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true"))
        {
            if (value != 0)
                params->set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, CameraParameters::TRUE);
            else
                params->set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, CameraParameters::FALSE);
            LOGE("White balance lock is set");
        }
        else
            LOGE("White balance lock is not supported");

        camera->setParameters(params->flatten());
    }
    break;
#endif
    default:
        LOGW("CameraHandler::setProperty - Unsupported property.");
    };

    params_str = camera->getParameters();
    LOGI("Params after set: [%s]", params_str.string());
}
Esempio n. 11
0
status_t TVINDevice::setParameters(CameraParameters& params)
{
    int  w, h;
    int  framerate, local_framerate;
    int  max_zoom, zoom, max_fps, min_fps;
    char tmp[128];

    Mutex::Autolock lock(mLock);

    max_zoom = params.getInt(CameraParameters::KEY_MAX_ZOOM);
    zoom     = params.getInt(CameraParameters::KEY_ZOOM);
    if (zoom > max_zoom) {
        FLOGE("Invalid zoom setting, zoom %d, max zoom %d", zoom, max_zoom);
        return BAD_VALUE;
    }
    if (!((strcmp(params.getPreviewFormat(), "yuv420sp") == 0) ||
          (strcmp(params.getPreviewFormat(), "yuv420p") == 0) ||
          (strcmp(params.getPreviewFormat(), "yuv422i-yuyv") == 0))) {
        FLOGE("Only yuv420sp or yuv420pis supported, but input format is %s",
              params.getPreviewFormat());
        return BAD_VALUE;
    }

    if (strcmp(params.getPictureFormat(), "jpeg") != 0) {
        FLOGE("Only jpeg still pictures are supported");
        return BAD_VALUE;
    }

    params.getPreviewSize(&w, &h);
    sprintf(tmp, "%dx%d", w, h);
    FLOGI("Set preview size: %s", tmp);
    if (strstr(mSupportedPreviewSizes, tmp) == NULL) {
        FLOGE("The preview size w %d, h %d is not corrected", w, h);
        return BAD_VALUE;
    }

    params.getPictureSize(&w, &h);
    sprintf(tmp, "%dx%d", w, h);
    FLOGI("Set picture size: %s", tmp);
    if (strstr(mSupportedPictureSizes, tmp) == NULL) {
        FLOGE("The picture size w %d, h %d is not corrected", w, h);
        return BAD_VALUE;
    }

    local_framerate = mParams.getPreviewFrameRate();
    FLOGI("get local frame rate:%d FPS", local_framerate);
    if ((local_framerate > 30) || (local_framerate < 0)) {
        FLOGE("The framerate is not corrected");
        local_framerate = 15;
    }

    framerate = params.getPreviewFrameRate();
    FLOGI("Set frame rate:%d FPS", framerate);
    if ((framerate > 30) || (framerate < 0)) {
        FLOGE("The framerate is not corrected");
        return BAD_VALUE;
    }
    else if (local_framerate != framerate) {
        if (framerate == 15) {
            params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "12000,17000");
        }
        else if (framerate == 30) {
            params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "25000,33000");
        }
    }

    int actual_fps = 15;
    params.getPreviewFpsRange(&min_fps, &max_fps);
    FLOGI("FPS range: %d - %d", min_fps, max_fps);
    if ((max_fps < 1000) || (min_fps < 1000) || (max_fps > 33000) ||
        (min_fps > 33000)) {
        FLOGE("The fps range from %d to %d is error", min_fps, max_fps);
        return BAD_VALUE;
    }
    actual_fps = min_fps > 15000 ? 30 : 15;
    FLOGI("setParameters: actual_fps=%d", actual_fps);
    params.setPreviewFrameRate(actual_fps);

    mParams = params;
    return NO_ERROR;
}
Esempio n. 12
0
void mergepara( camera_device_t  *pdev, String8 &argstr)
{
	char * tmp = pdev->ops->get_parameters(pdev);
	CameraParameters def = CameraParameters(String8(tmp));
	char *a;
	char *b;

	pdev->ops->put_parameters(pdev,tmp);
#if   PRINTF_CONFIG
		CMR_LOGI("old config  =>\n");
	   a= const_cast < char *> ( def.flatten().string());
	    for (;;) {
	        b = strchr(a, ';');
	        if (b == 0) {
	            // If there's no semicolon, this is the last item.
	            String8 v(a);
				CMR_LOGI("%s",v.string());
	            break;
	        }
             else
             {
		        String8 v(a, (size_t)(b-a));
				CMR_LOGI("%s",v.string());
		         a = b+1;
             }
	   }
		CMR_LOGI("old config  end\n");
#endif

	   //a =const_cast < char *>  (add.flatten().string());
	   a =  (char *) argstr.string();
             CMR_LOGI("new str:  %s\n",a);

	    for (;;) {
	        // Find the bounds of the key name.
	        b = strchr(a, '=');
	        if (b == 0)
	            break;

	        // Create the key string.
	        String8 k(a, (size_t)(b-a));

	        // Find the value.
	        a = b+1;
	        b = strchr(a, ',');
	        if (b == 0) {
	            // If there's no semicolon, this is the last item.
		//		String8 v(a, strlen(a));
				CMR_LOGI("k=%s,v=%s",k.string(), a);
				def.set( k.string(), a);
	            break;
	        }
             else
             {
		        String8 v(a, (size_t)(b-a));
				CMR_LOGI("k=%s,v=%s",k.string(), v.string());
				def.set( k.string(),  v.string());
		         a = b+1;
             }
	   }

#if   PRINTF_CONFIG
	   CMR_LOGI("new config =>\n");
	   a=const_cast < char *>   ( def.flatten().string());
		for (;;) {
			b = strchr(a, ';');
			if (b == 0) {
				// If there's no semicolon, this is the last item.
				String8 v(a);
				CMR_LOGI("%s",v.string());
				break;
			}
			 else
			 {
			String8 v(a, (size_t)(b-a));
			CMR_LOGI("%s",v.string());
			 a = b+1;
			 }
	   }
	CMR_LOGI("new config  end\n");
#endif
	pdev->ops->set_parameters(pdev,def.flatten().string());

	if(opts.workmode == WK_TAKEPIC) {
		opts.pic_cnt = def.getInt("capture-mode");
		CMR_LOGW("WK_TAKEPIC:opts.pic_cnt = %d\n",opts.pic_cnt);
	}
}