Esempio n. 1
0
void LHSVision::UpdateVision() //by Michael
{
	if(mXbox->GetRawButton(XBOX::XBOX_A_BUTTON) == true)	//Toggle on A button press
	{
		while(mXbox->GetRawButton(XBOX::XBOX_A_BUTTON) == true)	//Prevents multi-press
		{}
		if(send == 1)
		{
			StopCamera(1);	//Stop Cam 1
			StartCamera(2);	//Start Cam 2
			printf("\nActivating Camera 2 - Raw Image Display\n\n");
			send = 2;
		}
		else
		{
			StopCamera(2);	//Stop Cam 2
			StartCamera(1);	//Start Cam 1
			printf("\nActivating Camera 1 - Raw Image Display\n\n");
			send = 1;
		}
	}

	if(send == 1)
	{
		IMAQdxGrab(session, frame, true, NULL);
		SendToDashboard(frame);		//Send Cam 1
		Wait(.1);
	}
	else
	{
		IMAQdxGrab(session2, frame2, true, NULL);
		SendToDashboard(frame2);	//Send Cam 2
		Wait(.1);
	}
}
Esempio n. 2
0
Gfx_Camera::Gfx_Camera(Graphic_Util	&g):
	graphic_util(g)
{
	std::cout << "CAMERA:" << g_config.camera_width << "," << g_config.camera_height << std::endl;
	//Init the camera, 15fps, 1 resolution, no RGB conersion
	
	cam = StartCamera(g_config.camera_width, g_config.camera_height, 15, 1, false); 
	
	ytexture.CreateGreyScale(g_config.camera_width, g_config.camera_height);
	utexture.CreateGreyScale(g_config.camera_width / 2, g_config.camera_height / 2);
	vtexture.CreateGreyScale(g_config.camera_width / 2, g_config.camera_height / 2);

	yreadtexture.CreateRGBA(g_config.camera_width, g_config.camera_height);
	yreadtexture.GenerateFrameBuffer();
	
	ureadtexture.CreateRGBA(g_config.camera_width / 2, g_config.camera_height / 2);
	ureadtexture.GenerateFrameBuffer();
	
	vreadtexture.CreateRGBA(g_config.camera_width / 2, g_config.camera_height / 2);
	vreadtexture.GenerateFrameBuffer();
	
	rgbtextures.CreateRGBA(g_config.camera_width, g_config.camera_height);
	rgbtextures.GenerateFrameBuffer();
	
	resulttexture.CreateRGBA(g_config.camera_width, g_config.camera_height);
	resulttexture.GenerateFrameBuffer();
}
Esempio n. 3
0
void QmitkToFRecorderWidget::OnPlay()
{
  m_Controls->m_PlayButton->setChecked(true);
  m_Controls->m_PlayButton->setEnabled(false);
  m_Controls->m_RecorderGroupBox->setEnabled(true);
  this->repaint();

  StartCamera();

  emit ToFCameraStarted();
}
Esempio n. 4
0
VisionNode::VisionNode() {
    cv::FileStorage fs("/home/roboy/workspace/mocap/src/intrinsics.xml", cv::FileStorage::READ);
    if (!fs.isOpened()) {
        ROS_ERROR("could not open intrinsics.xml");
        return;
    }
    fs["camera_matrix"] >> cameraMatrix;
    fs["distortion_coefficients"] >> distCoeffs;
    fs.release();

    ID = 0;

    // calculate undistortion mapping
    initUndistortRectifyMap(cameraMatrix, distCoeffs, Mat(),
                            getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, cv::Size(WIDTH, HEIGHT), 1,
                                                      cv::Size(WIDTH, HEIGHT), 0),
                            cv::Size(WIDTH, HEIGHT), CV_16SC2, map1, map2);

    marker_position_pub = new ros::Publisher;
    *marker_position_pub = nh.advertise<communication::MarkerPosition>("/mocap/marker_position", 100);

    video_pub = new ros::Publisher;
    *video_pub = nh.advertise<sensor_msgs::Image>("/mocap/video", 1);

    camera_control_sub = nh.subscribe("/mocap/camera_control", 100, &VisionNode::camera_control, this);

    cameraID_pub = new ros::Publisher;
    *cameraID_pub = nh.advertise<std_msgs::Int32>("/mocap/cameraID", 100);

    // Publish the marker
    while (cameraID_pub->getNumSubscribers() < 1) {
        ros::Duration d(1.0);
        if (!ros::ok()) {
            return;
        }
        ROS_WARN_ONCE("Waiting for mocap plugin to subscribe to /mocap/cameraID");
        d.sleep();
    }
    ROS_INFO_ONCE("Found subscriber");

    spinner = new ros::AsyncSpinner(1);
    spinner->start();

    std_msgs::Int32 msg;
    msg.data = ID;
    cameraID_pub->publish(msg);

    img = cv::Mat(HEIGHT, WIDTH, CV_8UC4, img_data);
    img_rectified = cv::Mat(HEIGHT, WIDTH, CV_8UC4, img_rectified_data);

    t1 = std::chrono::high_resolution_clock::now();

    StartCamera(WIDTH, HEIGHT, 90, CameraCallback);
}
Esempio n. 5
0
File: picam.cpp Progetto: Roboy/legs
int main(int argc, const char **argv)
{
	printf("PI Cam api tester\n");
	dest = new unsigned char[HEIGHT*WIDTH*4];
	fps = 0;

	startMilliSecondTimer();
	StartCamera(WIDTH,HEIGHT,90,CameraCallback);
	sleep(30);
	StopCamera();
	delete[] dest;
}
Esempio n. 6
0
//=============================================================================
// ゲーム
//=============================================================================
void CCamera :: SceneGame(void)
{
	int phase = CGame::GetPhase();
	switch (phase)
	{
	case START_PHASE:	StartCamera(); break;
	case SCENARIO_PHASE:ShotCamera(); break;
	case ANGLE_PHASE:	ShotCamera(); break;
	case POWER_PHASE:	PowerCamera(); break;
	case MOVE_PHASE:	MoveCamera(); break;
	case JUDGE_PHASE:	JudgeCamera(); break;
	case END_PHASE:		JudgeCamera(); break;
	case CHANGE_PHASE:	ChangeCamera(); break;
	}
}
Esempio n. 7
0
int main()
{
	int ret = 0;
	void * mV4l2_ctx = NULL;
	int mCaptureFormat;
	int width = 1280;
	int height = 720;

	int number = 0;

	struct v4l2_buffer buf;
	memset(&buf, 0, sizeof(buf));
	
	mV4l2_ctx = CreateV4l2Context();

	setV4L2DeviceName(mV4l2_ctx, "/dev/video1");

	// open v4l2 camera device
	ret = OpenCamera(mV4l2_ctx);

	if (ret != 0)
	{
		LOGE("openCameraDevice failed\n");
		return ret;
	}

	StartCamera(mV4l2_ctx, &width, &height);

	while(number < 300) {

		CameraGetOneframe(mV4l2_ctx,&buf);
		CameraReturnOneframe(mV4l2_ctx,buf.index);
		LOGD("test, number: %d",number);

		number ++;
	}
	
	StopCamera(mV4l2_ctx);

	CloseCamera(mV4l2_ctx);
	DestroyV4l2Context(mV4l2_ctx);
	mV4l2_ctx = NULL;

	return 0;
}
Esempio n. 8
0
void CameraAppTask(void *param)
{
    UINT8 Opcode = 0x02;
    struct HttpBlob Write;

    InitCameraComponents(640, 480);

    while(1)
    {
        if(g_close == 0)
        {
            Write.uLength = StartCamera((char **)&Write.pData);

            if(!sl_WebSocketSend(g_uConnection, Write, Opcode))
            {
                while(1);
            }
        }
    }

}
Esempio n. 9
0
int init_hardware()
{

    if (gpioInitialise() < 0)
    {
      printf("GPIO initialization failed \n");
      return -1;
    }
    //gpioSetMode(24,PI_OUTPUT);

    // set motor pins
    gpioSetMode(12,PI_OUTPUT);
    gpioSetMode(16,PI_OUTPUT);
    gpioSetMode(20,PI_OUTPUT);
    gpioSetMode(21,PI_OUTPUT);

    //how many detail levels (1 = just the capture res, > 1 goes down by half each level, 4 max)
	//int num_levels = 1;
    cam = StartCamera(CAMERA_WIDTH, CAMERA_HEIGHT,30,1,true);
    if (cam == NULL)
    {
        printf(" Camera initialization failure\n");
        return -2;
    }
    // screen parameters


    spi_h = spiOpen(0,5000,0);
    if (spi_h<0)
    {
        printf("SPI failure\n");
        return -3;
    }
    // success
    // give camera time to settle
    sleep(1);
    return 0;

}
Esempio n. 10
0
void *camThread(void* tid) {
    const void* buffer;
    int buffer_len;
    int sval;
    int i = 0;
    
    // Initialize and configure the camera
    CCamera* cam = StartCamera(PIXELS_X, PIXELS_Y, FPS, 1, false);
    TRACE(DEBUG, "Camera opened\n");
    
    for(;;i++) {
        //TRACE(DEBUG, "\n(CAMERA PHOTOGRAM %d, milliseconds: %ld)\n", i, getMillisSinceStart());
        // Get image data
        int ret;
        int delaySteps = 0;
        //STARTCHRONO
        do {
            ret = cam->BeginReadFrame(0, buffer, buffer_len);
            if (!ret) {
                usleep(100);
                delaySteps++;
            }
        } while (!ret);
        
        pthread_mutex_lock(&mutexImageData);
            memcpy(imageData, buffer, PIXELS_X * PIXELS_Y);
        pthread_mutex_unlock(&mutexImageData);
        
        cam->EndReadFrame(0);
        
        // Let the main process continue
        sem_getvalue(&semCam, &sval);
        if (sval < 1)
            sem_post(&semCam);
        //TRACE(DEBUG, "(delaySteps: %d, time: %d, sval: %d)\n", delaySteps, GETCLICK, sval);
    }
}
Esempio n. 11
0
static int startcamera(AWCameraDevice *p)
{	
	AWCameraDevice* camera_dev = (AWCameraDevice *)(p);
	AWCameraContext* CameraCtx = NULL;
	int err = 0;
	
	if(!camera_dev)
		return -1;

	CameraCtx = (AWCameraContext*)(camera_dev->context);

	if(CameraCtx->callback.cookie == NULL) {
		LOGE("should set callback before start Camera");
		return -1;
	}
        camera_dev->isYUYV = 0;
	setV4L2DeviceName(CameraCtx->v4l2ctx, camera_dev->deviceName );
	OpenCamera(CameraCtx->v4l2ctx);

	StartCamera(CameraCtx->v4l2ctx, &CameraCtx->width, &CameraCtx->height);

        camera_dev->isYUYV = V4L2_PIX_FMT_YUYV == v4l2GetCaptureFmt(CameraCtx->v4l2ctx);

	err = getFrameRate(CameraCtx->v4l2ctx);
	LOGD("Camera FPS=%d", err );
	
	set_state(CameraCtx, 1);
	
	// Create the camera thread
	err = pthread_create(&CameraCtx->thread_id, NULL, CameraThread, camera_dev);
	if (err || !CameraCtx->thread_id) {
		LOGE("Create thread error");
		return -1;
	}
	
	return 0;
}
Esempio n. 12
0
int RenderControl::Execute() {
    int numCamera = Cameras.Size();



    while (numCamera--) {
        RenderingCamera * cam = Cameras[numCamera];
        StartCamera(cam);
    }

    // wait for tasks finish
    int Count = Events.Size();
    OsEvent::Join(Count, &Events[0], 1);
    Events.Empty();
    // end stages
    RenderingPath * path = RenderPath[LIGHT_PRE];
    int stages = path->Stages.Size();
    for (int i = 0; i < stages; i++) {
        RenderStage * Stage = path->Stages[i];
        Stage->End();
    }
    RenderQueue_->Execute(RenderProcesser_);
    return 0;
}
Esempio n. 13
0
File: main.c Progetto: dlugaz/All
void CameraService(void *pvParameters)
{
    StartCamera();  
    osi_TaskDelete(&g_CameraTaskHandle);  
}