status_t CameraHardwareStub::setParameters(const CameraParameters& params)
{
    Mutex::Autolock lock(mLock);
    // XXX verify params

    if (strcmp(params.getPreviewFormat(),
        CameraParameters::PIXEL_FORMAT_YUV420SP) != 0) {
        LOGE("Only yuv420sp preview is supported");
        return -1;
    }

    if (strcmp(params.getPictureFormat(),
        CameraParameters::PIXEL_FORMAT_JPEG) != 0) {
        LOGE("Only jpeg still pictures are supported");
        return -1;
    }

    int w, h;
    params.getPictureSize(&w, &h);
    if (w != kCannedJpegWidth && h != kCannedJpegHeight) {
        LOGE("Still picture size must be size of canned JPEG (%dx%d)",
             kCannedJpegWidth, kCannedJpegHeight);
        return -1;
    }

    mParameters = params;
    initHeapLocked();

    return NO_ERROR;
}
Exemple #2
0
bool ProtoCameraSettings::CommitColorFX()
{
	MMAL_PARAMETER_COLOURFX_T colfx = { { MMAL_PARAMETER_COLOUR_EFFECT, sizeof(colfx) },
		m_parameters.color_effects().enable(),
		m_parameters.color_effects().u(),
		m_parameters.color_effects().v() };

	return !mmal_status_to_int( mmal_port_parameter_set( m_component->m_mmal_component.GetControlPort(), &colfx.hdr ) );

}
static bool getCameraParam(const CameraParameters& parameters,
                           const char* parameterKey,
                           float* outValue) {
    const char* value = parameters.get(parameterKey);
    if (value) {
        *outValue = parameters.getFloat(parameterKey);
        return true;
    }
    return false;
}
/** Return the camera parameters. */
char *HAL_camera_device_get_parameters(struct camera_device *dev)
{
    ExynosCameraAutoTimer autoTimer(__func__);

    ALOGV("DEBUG(%s):", __func__);
    String8 str;
    CameraParameters parms = obj(dev)->getParameters();
    str = parms.flatten();
    return strdup(str.string());
}
status_t CameraHardware::setParameters(const CameraParameters& params)
{
    Mutex::Autolock lock(mLock);
	int width  = 0;
	int height = 0;
	int framerate = 0;
	params.getPreviewSize(&width,&height);

	LOGD("Set Parameter...!! ");

	LOGD("PreviewFormat %s", params.getPreviewFormat());
	if ( params.getPreviewFormat() != NULL ) {
		if (strcmp(params.getPreviewFormat(), (const char *) CameraParameters::PIXEL_FORMAT_YUV422SP) != 0) {
			LOGE("Only yuv422sp preview is supported");
			return -EINVAL;
		}
	}

	LOGD("PictureFormat %s", params.getPictureFormat());
	if ( params.getPictureFormat() != NULL ) {
		if (strcmp(params.getPictureFormat(), (const char *) CameraParameters::PIXEL_FORMAT_JPEG) != 0) {
			LOGE("Only jpeg still pictures are supported");
			return -EINVAL;
		}
	}

	/* validate preview size */
	params.getPreviewSize(&width, &height);
	LOGD("preview width:%d,height:%d",width,height);
	if ( validateSize(width, height, supportedPreviewRes, ARRAY_SIZE(supportedPreviewRes)) == false ) {
        LOGE("Preview size not supported");
        return -EINVAL;
    }

    /* validate picture size */
	params.getPictureSize(&width, &height);
	LOGD("picture width:%d,height:%d",width,height);
	if (validateSize(width, height, supportedPictureRes, ARRAY_SIZE(supportedPictureRes)) == false ) {
        LOGE("Picture size not supported");
        return -EINVAL;
    }

    framerate = params.getPreviewFrameRate();
    LOGD("FRAMERATE %d", framerate);

    mParameters = params;

    mParameters.getPictureSize(&width, &height);
    LOGD("Picture Size by CamHAL %d x %d", width, height);

    mParameters.getPreviewSize(&width, &height);
    LOGD("Preview Resolution by CamHAL %d x %d", width, height);

    return NO_ERROR;
}
CameraParameters
JordanCameraWrapper::getParameters() const
{
    CameraParameters ret = mMotoInterface->getParameters();

    if (mCameraType == CAM_SOC) {
        /* the original zoom ratio string is '100,200,300,400,500,600',
           but 500 and 600 are broken for the SOC camera, so limiting
           it here */
        ret.set(CameraParameters::KEY_MAX_ZOOM, "3");
        ret.set(CameraParameters::KEY_ZOOM_RATIOS, "100,200,300,400");
    }

    /* Motorola uses mot-exposure-offset instead of exposure-compensation
       for whatever reason -> adapt the values.
       The limits used here are taken from the lib, we surely also
       could parse it, but it's likely not worth the hassle */
    float exposure = ret.getFloat("mot-exposure-offset");
    int exposureParam = (int) round(exposure * 3);

    ret.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, exposureParam);
    ret.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, "9");
    ret.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, "-9");
    ret.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, "0.3333333333333");

    ret.set("cam-mode", mVideoMode ? "1" : "0");

    return ret;
}
Exemple #7
0
StereoFrame::StereoFrame(
  const size_t id,
  const CameraPose& cameraPose, const CameraParameters& calibrationLeft,
  const double stereo_baseline, const CameraParameters& calibrationRight,
  const ImageFeatures& imageFeaturesLeft, const ImageFeatures& imageFeaturesRight,
  bool bFixed
)
  : frameLeft_(Camera(cameraPose, calibrationLeft), imageFeaturesLeft)
  , frameRight_(Camera(ComputeRightCameraPose(cameraPose, stereo_baseline), calibrationRight), imageFeaturesRight)
  , rectified_camera_parameters_({stereo_baseline, calibrationLeft.focalLengths(), calibrationLeft.principalPoint()})
  , bFixed_( bFixed ), id_( id )
{}
int main(int argc, char** argv) {
    try {
        if (readArguments(argc, argv) == false)
            return 0;

        // read board configuration
        TheBoardConfig.readFromFile(TheBoardConfigFile);

        // Open video input source
        if (TheInputVideo == "") // read from camera
            TheVideoCapturer.open(0);
        else
            TheVideoCapturer.open(TheInputVideo);
        if (!TheVideoCapturer.isOpened()) {
            cerr << "Could not open video" << endl;
            return -1;
        }

        // read first image
        TheVideoCapturer >> TheInputImage;
        // read camera paramters if passed
        TheCameraParams.readFromXMLFile(TheIntrinsicFile);
        TheCameraParams.resize(TheInputImage.size());

        TheBoardDetector.getMarkerDetector().setThresholdParams(25, 7);

        glutInit(&argc, argv);
        glutInitWindowPosition(0, 0);
        glutInitWindowSize(TheInputImage.size().width, TheInputImage.size().height);
        glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE);
        glutCreateWindow("ArUco");
        glutDisplayFunc(vDrawScene);
        glutIdleFunc(vIdle);
        glutReshapeFunc(vResize);
        glutMouseFunc(vMouse);
        glClearColor(0.0, 0.0, 0.0, 1.0);
        glClearDepth(1.0);

        // these two are necesary for the mask effect
        glEnable(GL_ALPHA_TEST);
        glAlphaFunc(GL_GREATER, 0.5);

        TheGlWindowSize = TheInputImage.size();
        vResize(TheGlWindowSize.width, TheGlWindowSize.height);
        glutMainLoop();

    } catch (std::exception& ex)

    {
        cout << "Exception :" << ex.what() << endl;
    }
}
/*
 * Query the camera to retrieve the supported video frame sizes
 * and also to see whether CameraParameters::setVideoSize()
 * is supported or not.
 * @param params CameraParameters to retrieve the information
 * @@param isSetVideoSizeSupported retunrs whether method
 *      CameraParameters::setVideoSize() is supported or not.
 * @param sizes returns the vector of Size objects for the
 *      supported video frame sizes advertised by the camera.
 */
static void getSupportedVideoSizes(
    const CameraParameters& params,
    bool *isSetVideoSizeSupported,
    Vector<Size>& sizes) {

    *isSetVideoSizeSupported = true;
    params.getSupportedVideoSizes(sizes);
    if (sizes.size() == 0) {
        ALOGD("Camera does not support setVideoSize()");
        params.getSupportedPreviewSizes(sizes);
        *isSetVideoSizeSupported = false;
    }
}
double CameraHandler::getProperty(int propIdx)
{
    LOGD("CameraHandler::getProperty(%d)", propIdx);

    switch (propIdx)
    {
    case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
    {
        int w,h;
        params.getPreviewSize(&w, &h);
        return w;
    }
    case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
    {
        int w,h;
        params.getPreviewSize(&w, &h);
        return h;
    }
    case ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING:
    {
        cameraPropertySupportedPreviewSizesString = params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
        union {const char* str;double res;} u;
        memset(&u.res, 0, sizeof(u.res));
        u.str = cameraPropertySupportedPreviewSizesString.c_str();
        return u.res;
    }

    case ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING:
    {
        const char* fmt = params.get(CameraParameters::KEY_PREVIEW_FORMAT);
        if (fmt == CameraParameters::PIXEL_FORMAT_YUV422SP)
            fmt = "yuv422sp";
        else if (fmt == CameraParameters::PIXEL_FORMAT_YUV420SP)
            fmt = "yuv420sp";
        else if (fmt == CameraParameters::PIXEL_FORMAT_YUV422I)
            fmt = "yuv422i";
        else if (fmt == CameraParameters::PIXEL_FORMAT_RGB565)
            fmt = "rgb565";
        else if (fmt == CameraParameters::PIXEL_FORMAT_JPEG)
            fmt = "jpeg";
        cameraPropertyPreviewFormatString = fmt;

        union {const char* str;double res;} u;
        memset(&u.res, 0, sizeof(u.res));
        u.str = cameraPropertyPreviewFormatString.c_str();
        return u.res;
    }

    };
    return -1;
}
Exemple #11
0
/**
 * Set a camera parameter
 */
int CaptureCommand::capture_setParameter(Value& name, Value& value) {
  LOG_ERROR((name.isNull()), "name not specified");
  LOG_ERROR((value.isNull()), "value not specified");
  LOG_ERROR((mCamera.get() == NULL), "camera not initialized");

  CameraParameters params = mCamera->getParameters();
  params.set(name.asCString(), value.asCString());
  status_t err = mCamera->setParameters(params.flatten());
  if (err != ::OK) {
    ALOGW("Error %d: Failed to set '%s' to '%s'", err,
      name.asCString(), value.asCString());
  }
  return 0;
}
Exemple #12
0
bool ProtoCameraSettings::CommitROI()
{
	bool ret = false;
	MMAL_PARAMETER_INPUT_CROP_T crop = { { MMAL_PARAMETER_INPUT_CROP, sizeof(MMAL_PARAMETER_INPUT_CROP_T) } };

	crop.rect.x = (65536 * m_parameters.region_of_interest().left() );
	crop.rect.y = (65536 * m_parameters.region_of_interest().top() );
	crop.rect.width = (65536 * m_parameters.region_of_interest().width() );
	crop.rect.height = (65536 * m_parameters.region_of_interest().height() );

	ret = !mmal_port_parameter_set( m_component->m_mmal_component.GetControlPort(),&crop.hdr);

	return ret;
}
Exemple #13
0
/**
 * Get string camera parameter
 */
int CaptureCommand::capture_getParameterStr(Value& name) {
  LOG_ERROR((name.isNull()), "name not specified");
  LOG_ERROR((mCamera.get() == NULL), "camera not initialized");

  CameraParameters params = mCamera->getParameters();
  const char* value = params.get(name.asCString());

  Value jsonMsg;
  jsonMsg["eventName"] = "getParameter";
  jsonMsg["data"] = value;
  mCaptureListener->sendEvent(jsonMsg);

  return 0;
}
int main(int argc,char **argv)
{
    try
    {
        if (readArguments (argc,argv)==false) return 0;

        //read board configuration
        TheBoardConfig.readFromFile(TheBoardConfigFile);

        //Open video input source
        if (TheInputVideo=="live")  //read from camera
            TheVideoCapturer.open(0);
        else TheVideoCapturer.open(TheInputVideo);
        if (!TheVideoCapturer.isOpened())
        {
            cerr<<"Could not open video"<<endl;
            return -1;

        }

        //read first image
        TheVideoCapturer>>TheInputImage;
        //read camera paramters if passed
        TheCameraParams.readFromXMLFile(TheIntrinsicFile);
        TheCameraParams.resize( TheInputImage.size());

		//init glut information and init
        glutInit(&argc, argv);
        glutInitWindowPosition( 0, 0);
        glutInitWindowSize(TheInputImage.size().width,TheInputImage.size().height);
        glutInitDisplayMode( GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE );
        glutCreateWindow( "AruCo" );
        glutDisplayFunc( vDrawScene );
        glutIdleFunc( vIdle );
        glutReshapeFunc( vResize );
        glutMouseFunc(vMouse);
        glClearColor( 0.0, 0.0, 0.0, 1.0 );
        glClearDepth( 1.0 );
        TheGlWindowSize=TheInputImage.size();
        vResize(TheGlWindowSize.width,TheGlWindowSize.height);
        glutMainLoop();

    } catch (std::exception &ex)

    {
        cout<<"Exception :"<<ex.what()<<endl;
    }

}
Exemple #15
0
int main(int argc,char **argv)
{
	try
	{
		if(argc==1) usage();
		readArguments (argc,argv);
		if (TheIntrinsicFile==""){cerr<<"-f option required"<<endl;return -1;}
		if (TheMarkerSize==-1){cerr<<"-s option required"<<endl;return -1;}
								 //read from camera
		if (TheInputVideo=="") TheVideoCapturer.open(0);
		else TheVideoCapturer.open(TheInputVideo);
		if (!TheVideoCapturer.isOpened())
		{
			cerr<<"Could not open video"<<endl;
			return -1;

		}

		//read first image
		TheVideoCapturer>>TheInputImage;
		//read camera paramters if passed
		if (isIntrinsicFileYAML)
		  TheCameraParams.readFromXMLFile(TheIntrinsicFile);
		else
		  TheCameraParams.readFromFile(TheIntrinsicFile);
		TheCameraParams.resize(TheInputImage.size());

		glutInit(&argc, argv);
		glutInitWindowPosition( 0, 0);
		glutInitWindowSize(TheInputImage.size().width,TheInputImage.size().height);
		glutInitDisplayMode( GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE );
		glutCreateWindow( "AruCo" );
		glutDisplayFunc( vDrawScene );
		glutIdleFunc( vIdle );
		glutReshapeFunc( vResize );
		glutMouseFunc(vMouse);
		glClearColor( 0.0, 0.0, 0.0, 1.0 );
		glClearDepth( 1.0 );
		TheGlWindowSize=TheInputImage.size();
		vResize(TheGlWindowSize.width,TheGlWindowSize.height);
		glutMainLoop();

	}catch(std::exception &ex)

	{
		cout<<"Exception :"<<ex.what()<<endl;
	}

}
/*!
 *  
 */
int main(int argc,char **argv)
{
  readArguments (argc,argv);

  //read board configuration
  boardConfig.readFromFile(bcon->sval[0]);

  //Open video input source
  if (inp->count==0 || strcmp(inp->sval[0], "live")==0)
  {
    //read from camera
    vCapturer.open(0);

    vCapturer.set(CV_CAP_PROP_FRAME_WIDTH, wid->ival[0]);
    vCapturer.set(CV_CAP_PROP_FRAME_HEIGHT, hei->ival[0]);
    int val = CV_FOURCC('M', 'P', 'E', 'G');
    vCapturer.set(CV_CAP_PROP_FOURCC, val);
  }
  else
    vCapturer.open(inp->sval[0]);

  if (!vCapturer.isOpened())
  {
    std::cerr<<"Could not open video"<<std::endl;
    return -1;
  }

  //read first image
  vCapturer>>inImg;
  //read camera paramters if passed
  camParams.readFromXMLFile(ints->sval[0]);
  camParams.resize( inImg.size());

  glutInit(&argc, argv);
  glutInitWindowPosition( 0, 0);
  glutInitWindowSize(inImg.size().width,inImg.size().height);
  glutInitDisplayMode( GLUT_RGB | GLUT_DEPTH | GLUT_DOUBLE );
  glutCreateWindow( "AruCo" );
  glutDisplayFunc( vDrawScene );
  glutIdleFunc( vIdle );
  glutReshapeFunc( vResize );
  glutMouseFunc(vMouse);
  glClearColor( 0.0, 0.0, 0.0, 1.0 );
  glClearDepth( 1.0 );
  glSize=inImg.size();
  vResize(glSize.width,glSize.height);
  glutMainLoop();
}
/*
 * Check the requested frame rate has been successfully configured or not.
 * If the target frameRate is -1, check on the current frame rate value
 * setting is performed.
 *
 * @param params CameraParameters to retrieve the information
 * @param the target video frame rate to check against
 * @return OK if no error.
 */
status_t CameraSource::checkFrameRate(
        const CameraParameters& params,
        int32_t frameRate) {

    ALOGV("checkFrameRate");
    int32_t frameRateActual = params.getPreviewFrameRate();
    if (frameRateActual < 0) {
        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
        return UNKNOWN_ERROR;
    }

    // Check the actual video frame rate against the target/requested
    // video frame rate.
#ifndef HTC_3D_SUPPORT  // HTC uses invalid frame rates intentionally on the 3D camera
    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
        ALOGE("Failed to set preview frame rate to %d fps. The actual "
                "frame rate is %d", frameRate, frameRateActual);
        return UNKNOWN_ERROR;
    }
#endif

    // Good now.
    mVideoFrameRate = frameRateActual;
    return OK;
}
void cvTackBarEvents(int pos,void*)
{
    if (iThresParam1<3) iThresParam1=3;
    if (iThresParam1%2!=1) iThresParam1++;
    if (ThresParam2<1) ThresParam2=1;
    ThresParam1=iThresParam1;
    ThresParam2=iThresParam2;
    MDetector.setThresholdParams(ThresParam1,ThresParam2);
//recompute
    MDetector.detect(TheInputImage,TheMarkers,TheCameraParameters);
    TheInputImage.copyTo(TheInputImageCopy);
    for (unsigned int i=0;i<TheMarkers.size();i++)	TheMarkers[i].draw(TheInputImageCopy,Scalar(0,0,255),1);
    //print other rectangles that contains no valid markers
    /*for (unsigned int i=0;i<MDetector.getCandidates().size();i++) {
        aruco::Marker m( MDetector.getCandidates()[i],999);
        m.draw(TheInputImageCopy,cv::Scalar(255,0,0));
    }*/

//draw a 3d cube in each marker if there is 3d info
    if (TheCameraParameters.isValid())
        for (unsigned int i=0;i<TheMarkers.size();i++)
            CvDrawingUtils::draw3dCube(TheInputImageCopy,TheMarkers[i],TheCameraParameters);

    cv::imshow("in",TheInputImageCopy);
    cv::imshow("thres",MDetector.getThresholdedImage());
}
Exemple #19
0
static void PrintParamDiff(const CameraParameters& current,
                           const char* new_par)
{
    char tmp[2048];
    const char* wrk = new_par;

    /* Divided with ';' */
    const char* next = strchr(wrk, ';');
    while (next != NULL) {
        snprintf(tmp, sizeof(tmp), "%.*s", (int)(intptr_t)(next-wrk), wrk);
        /* in the form key=value */
        char* val = strchr(tmp, '=');
        if (val != NULL) {
            *val = '\0';
            val++;
            const char* in_current = current.get(tmp);
            if (in_current != NULL) {
                if (strcmp(in_current, val)) {
                    ALOGD("=== Value changed: %s: %s -> %s", tmp, in_current, val);
                }
            } else {
                ALOGD("+++ New parameter: %s=%s", tmp, val);
            }
        } else {
            ALOGW("No value separator in %s", tmp);
        }
        wrk = next + 1;
        next = strchr(wrk, ';');
    }
}
Exemple #20
0
bool ProtoCameraSettings::CommitAwbMode()
{
	MMAL_PARAMETER_AWBMODE_T param = { { MMAL_PARAMETER_AWB_MODE,sizeof(param)}, 
									  ( MMAL_PARAM_AWBMODE_T ) m_parameters.awb_mode() };

	return !mmal_status_to_int(mmal_port_parameter_set(m_component->m_mmal_component.GetControlPort(), &param.hdr ) );
}
void vDrawScene()
{
    if (TheResizedImage.rows==0) //prevent from going on until the image is initialized
        return;
    ///clear
    glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
    ///draw image in the buffer
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, TheGlWindowSize.width, 0, TheGlWindowSize.height, -1.0, 1.0);
    glViewport(0, 0, TheGlWindowSize.width , TheGlWindowSize.height);
    glDisable(GL_TEXTURE_2D);
    glPixelZoom( 1, -1);
    glRasterPos3f( 0, TheGlWindowSize.height  - 0.5, -1.0 );
    glDrawPixels ( TheGlWindowSize.width , TheGlWindowSize.height , GL_RGB , GL_UNSIGNED_BYTE , TheResizedImage.ptr(0) );
    ///Set the appropriate projection matrix so that rendering is done in a enrvironment
    //like the real camera (without distorsion)
    glMatrixMode(GL_PROJECTION);
    double proj_matrix[16];
    TheCameraParams.glGetProjectionMatrix(TheInputImage.size(),TheGlWindowSize,proj_matrix,0.05,10);
    glLoadIdentity();
    glLoadMatrixd(proj_matrix);
    glLineWidth(2);
    //now, for each marker,
    double modelview_matrix[16];

    /*    for (unsigned int m=0;m<TheMarkers.size();m++)
        {
            TheMarkers[m].glGetModelViewMatrix(modelview_matrix);
            glMatrixMode(GL_MODELVIEW);
            glLoadIdentity();
            glLoadMatrixd(modelview_matrix);
    // 		axis(TheMarkerSize);
            glColor3f(1,0.4,0.4);
            glTranslatef(0, TheMarkerSize/2,0);
            glPushMatrix();
            glutWireCube( TheMarkerSize );

            glPopMatrix();
        }*/
    //If the board is detected with enough probability
    if (TheBoardDetected.second>0.3) {
        TheBoardDetected.first.glGetModelViewMatrix(modelview_matrix);
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();
        glLoadMatrixd(modelview_matrix);
        glColor3f(0,1,0);
        glTranslatef(0, TheMarkerSize/2,0);
        glPushMatrix();
        glutWireCube( TheMarkerSize );
        axis(TheMarkerSize);
        glPopMatrix();
    }

    glutSwapBuffers();

}
Exemple #22
0
void
GonkCameraHardware::PullParameters(CameraParameters& aParams)
{
  if (!NS_WARN_IF(mClosing)) {
    const String8 s = mCamera->getParameters();
    aParams.unflatten(s);
  }
}
Exemple #23
0
bool ProtoCameraSettings::CommitImageFX()
{
	MMAL_PARAMETER_IMAGEFX_T imgFX = { { MMAL_PARAMETER_IMAGE_EFFECT,sizeof(imgFX) }, 
									   ( MMAL_PARAM_IMAGEFX_T )m_parameters.image_effect() };

	return !mmal_status_to_int(mmal_port_parameter_set(m_component->m_mmal_component.GetControlPort(), &imgFX.hdr));

}
Exemple #24
0
bool ProtoCameraSettings::CommitExposureMode()
{
	MMAL_PARAMETER_EXPOSUREMODE_T exp_mode = { { MMAL_PARAMETER_EXPOSURE_MODE,sizeof(exp_mode)}, 
											   ( MMAL_PARAM_EXPOSUREMODE_T ) m_parameters.exposure_mode() };

	return !mmal_status_to_int(mmal_port_parameter_set(m_component->m_mmal_component.GetControlPort(), &exp_mode.hdr));

}
Exemple #25
0
void Marker::calculateExtrinsics(float markerSize, const CameraParameters& CP,
                                 bool setYPerpendicular) throw(cv::Exception) {
    if (!CP.isValid())
        throw cv::Exception(
            9004, "!CP.isValid(): invalid camera parameters. It is not possible to calculate extrinsics",
            "calculateExtrinsics", __FILE__, __LINE__);
    calculateExtrinsics(markerSize, CP.CameraMatrix, CP.Distorsion, setYPerpendicular);
}
Exemple #26
0
int
GonkCameraHardware::PushParameters(const CameraParameters& aParams)
{
  if (NS_WARN_IF(mClosing)) {
    return DEAD_OBJECT;
  }
  String8 s = aParams.flatten();
  return mCamera->setParameters(s);
}
/*
 * Check whether the camera has the supported color format
 * @param params CameraParameters to retrieve the information
 * @return OK if no error.
 */
status_t CameraSource::isCameraColorFormatSupported(
        const CameraParameters& params) {
    mColorFormat = getColorFormat(params.get(
            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
    if (mColorFormat == -1) {
        return BAD_VALUE;
    }
    return OK;
}
status_t
JordanCameraWrapper::setParameters(const CameraParameters& params)
{
    CameraParameters pars(params.flatten());
    int width, height;
    char buf[10];
    bool isWide;

    /*
     * getInt returns -1 if the value isn't present and 0 on parse failure,
     * so if it's larger than 0, we can be sure the value was parsed properly
     */
    mVideoMode = pars.getInt("cam-mode") > 0;
    pars.remove("cam-mode");

    pars.getPreviewSize(&width, &height);
    isWide = width == 848 && height == 480;

    if (isWide && !mVideoMode) {
        pars.setPreviewFrameRate(24);
    }
    if (mCameraType == CAM_BAYER && mVideoMode) {
        pars.setPreviewFrameRate(24);
    }

    if (mCameraType == CAM_SOC) {
        /*
         * libsoccamera fails to turn flash on if 16:9 recording is enabled (no matter
         * whether it's photo or video recording), thus we do it ourselves in that case.
         * Luckily libsoccamera handles the automatic flash properly also in the 16:9 case.
         */
        const char *flashMode = pars.get(CameraParameters::KEY_FLASH_MODE);
        if (flashMode != NULL) {
            if (isWide && mLastFlashMode != flashMode) {
                bool shouldBeOn = strcmp(flashMode, CameraParameters::FLASH_MODE_TORCH) == 0 ||
                                  strcmp(flashMode, CameraParameters::FLASH_MODE_ON) == 0;
                setSocTorchMode(shouldBeOn);
            }
            mLastFlashMode = flashMode;
        }
    }

    float exposure = pars.getFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION);
    /* exposure-compensation comes multiplied in the -9...9 range, while
       we need it in the -3...3 range -> adjust for that */
    exposure /= 3;

    /* format the setting in a way the lib understands */
    bool even = (exposure - round(exposure)) < 0.05;
    snprintf(buf, sizeof(buf), even ? "%.0f" : "%.2f", exposure);
    pars.set("mot-exposure-offset", buf);

    /* kill off the original setting */
    pars.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, "0");

    return mMotoInterface->setParameters(pars);
}
Exemple #29
0
bool ProtoCameraSettings::CommitFlips()
{
	bool ret = false;
	MMAL_PARAMETER_MIRROR_T mirror = { { MMAL_PARAMETER_MIRROR, sizeof(MMAL_PARAMETER_MIRROR_T) }, 
									   MMAL_PARAM_MIRROR_NONE };

	if ( m_parameters.horizontal_flip() && m_parameters.vertical_flip() )
	   mirror.value = MMAL_PARAM_MIRROR_BOTH;
	else if ( m_parameters.horizontal_flip() )
	   mirror.value = MMAL_PARAM_MIRROR_HORIZONTAL;
	else if ( m_parameters.vertical_flip() )
	   mirror.value = MMAL_PARAM_MIRROR_VERTICAL;

	ret = !mmal_status_to_int( mmal_port_parameter_set( m_component->m_preview_port->MmalPort(), &mirror.hdr) );
	ret = !mmal_status_to_int( mmal_port_parameter_set( m_component->m_video_port->MmalPort(), &mirror.hdr) );
	ret = !mmal_status_to_int( mmal_port_parameter_set( m_component->m_still_port->MmalPort(), &mirror.hdr) );

	return ret;
}
/*
 * Check whether the requested video frame size
 * has been successfully configured or not. If both width and height
 * are -1, check on the current width and height value setting
 * is performed.
 *
 * @param params CameraParameters to retrieve the information
 * @param the target video frame width in pixels to check against
 * @param the target video frame height in pixels to check against
 * @return OK if no error
 */
status_t CameraSource::checkVideoSize(
        const CameraParameters& params,
        int32_t width, int32_t height) {

    ALOGV("checkVideoSize");
    // The actual video size is the same as the preview size
    // if the camera hal does not support separate video and
    // preview output. In this case, we retrieve the video
    // size from preview.
    int32_t frameWidthActual = -1;
    int32_t frameHeightActual = -1;
    Vector<Size> sizes;
    params.getSupportedVideoSizes(sizes);
    if (sizes.size() == 0) {
        // video size is the same as preview size
        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
    } else {
        // video size may not be the same as preview
        params.getVideoSize(&frameWidthActual, &frameHeightActual);
    }
    if (frameWidthActual < 0 || frameHeightActual < 0) {
        ALOGE("Failed to retrieve video frame size (%dx%d)",
                frameWidthActual, frameHeightActual);
        return UNKNOWN_ERROR;
    }

    // Check the actual video frame size against the target/requested
    // video frame size.
    if (width != -1 && height != -1) {
        if (frameWidthActual != width || frameHeightActual != height) {
            ALOGE("Failed to set video frame size to %dx%d. "
                    "The actual video size is %dx%d ", width, height,
                    frameWidthActual, frameHeightActual);
            return UNKNOWN_ERROR;
        }
    }

    // Good now.
    mVideoSize.width = frameWidthActual;
    mVideoSize.height = frameHeightActual;
    return OK;
}