Пример #1
0
void DrawGLScene()
{
    short *depth = 0;
    char *rgb = 0;
    uint32_t ts;
    if (freenect_sync_get_depth((void**)&depth, &ts, 0, FREENECT_DEPTH_11BIT) < 0)
	no_kinect_quit();
    if (freenect_sync_get_video((void**)&rgb, &ts, 0, FREENECT_VIDEO_RGB) < 0)
	no_kinect_quit();

    static unsigned int indices[480][640];
    static short xyz[480][640][3];
    int i,j;
    for (i = 0; i < 480; i++) {
        for (j = 0; j < 640; j++) {
            xyz[i][j][0] = j;
            xyz[i][j][1] = i;
            xyz[i][j][2] = depth[i*640+j];
            indices[i][j] = i*640+j;
        }
    }

    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glLoadIdentity();

    glPushMatrix();
    glScalef(zoom,zoom,1);
    glTranslatef(0,0,-3.5);
    glRotatef(rotangles[0], 1,0,0);
    glRotatef(rotangles[1], 0,1,0);
    glTranslatef(0,0,1.5);

    LoadVertexMatrix();

    // Set the projection from the XYZ to the texture image
    glMatrixMode(GL_TEXTURE);
    glLoadIdentity();
    glScalef(1/640.0f,1/480.0f,1);
    LoadRGBMatrix();
    LoadVertexMatrix();
    glMatrixMode(GL_MODELVIEW);

    glPointSize(1);

    glEnableClientState(GL_VERTEX_ARRAY);
    glVertexPointer(3, GL_SHORT, 0, xyz);
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);
    glTexCoordPointer(3, GL_SHORT, 0, xyz);

    if (color)
        glEnable(GL_TEXTURE_2D);
    glBindTexture(GL_TEXTURE_2D, gl_rgb_tex);
    glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, rgb);

    glPointSize(2.0f);
    glDrawElements(GL_POINTS, 640*480, GL_UNSIGNED_INT, indices);
    glPopMatrix();
    glDisable(GL_TEXTURE_2D);
    glutSwapBuffers();
}
Пример #2
0
/*******************
 grab the rgb frame
*******************/
static int libkinect_(grab_rgb) (lua_State *L) {
  // Get Tensor's Info
  THTensor * tensor = luaT_checkudata(L, 1, torch_(Tensor_id));
  THTensor *contigTensor = THTensor_(newContiguous)(tensor);
  // Get device ID
  int index = 0;
  if (lua_isnumber(L, 2)) index = lua_tonumber(L, 2);

  THArgCheck(tensor->nDimension == 3 , 1, "RBG buffer: 3x480x640 Tensor expected");
  THArgCheck(tensor->size[0] == 3 , 1, "RBG buffer: 3x480x640 Tensor expected");
  THArgCheck(tensor->size[1] == 480 , 1, "RBG buffer: 3x480x640 Tensor expected");
  THArgCheck(tensor->size[2] == 640 , 1, "RBG buffer: 3x480x640 Tensor expected");

  unsigned int timestamp;
  unsigned char *data = 0;
  if (freenect_sync_get_video((void**)&data, &timestamp, index, FREENECT_VIDEO_RGB))
    luaL_error(L, "<libkinect.grabRGB> Error Kinect not connected?");

  int z;
  for (z=0;z<3;z++){
    unsigned char *sourcep = data+z;
    THTensor *tslice = THTensor_(newSelect)(contigTensor,0,z);
    // copy
    TH_TENSOR_APPLY(real, tslice,
        	    *tslice_data = ((real)(*sourcep)) / 255;
        	    sourcep = sourcep + 3;
        	    );
    THTensor_(free)(tslice);
  }
Пример #3
0
void *rgb_out(void *arg){
	while (rgb_connected)
	{
		uint32_t ts,i;
		void *buf_rgb_temp;
		freenect_sync_get_video(&buf_rgb_temp, &ts, 0, FREENECT_VIDEO_RGB);
		uint8_t *rgb = (uint8_t*)buf_rgb_temp;
		for (i=0; i<FREENECT_FRAME_PIX; i++) {
			buf_rgb[4 * i + 0] = rgb[3 * i + 2];
			buf_rgb[4 * i + 1] = rgb[3 * i + 1];
			buf_rgb[4 * i + 2] = rgb[3 * i + 0];
			buf_rgb[4 * i + 3] = 0x00;
		}
		#ifdef WIN32
		int n = send(rgb_client_socket, (char*)buf_rgb, AS3_BITMAPDATA_LEN, 0);
		#else
		int n = write(rgb_child, &buf_rgb, AS3_BITMAPDATA_LEN);
		#endif
		
		if ( n < 0 || n != AS3_BITMAPDATA_LEN)
		{
			//fprintf(stderr, "Error on write() for rgb (%d instead of %d)\n",n, AS3_BITMAPDATA_LEN);
			rgb_connected = 0;
		}
	}
	return NULL;
}
Пример #4
0
pcl::PointCloud<pcl::RGB> KinectCloud::GetRGB()
{
    unsigned char *video;
    
    pcl::PointCloud<pcl::RGB> cam;
    cam.width = width;
    cam.height = height;
    cam.points.resize(cam.width * cam.height);
    //SetSevoAngle(servoAngle);
    uint32_t ts;
    if(freenect_sync_get_video((void**) &video, &ts, 0, FREENECT_VIDEO_RGB) < 0)
	printf("Hey, Kinect is not connected\n");

    else//transform collected data from kinect to cm 
    {
	for(int j = 0;j < height - 20;j++)
	    for(int i = 0;i < width;i++)
	    {
		int ind=j * width + i;
		int calibration = 3 * ((j + 20)*640 + (i + 15)*29 / 32);
		cam.points[ind].r = video[calibration];
		cam.points[ind].g = video[calibration + 1];
		cam.points[ind].b = video[calibration + 2];
	    }
    }
    return cam;
}
Пример #5
0
//send video ARGB to client
void sendVideo(){
	uint32_t ts,x, y, i, j;
	freenect_sync_get_video(&buf_rgb_temp, &ts, 0, FREENECT_VIDEO_RGB);
	uint8_t *rgb = (uint8_t*)buf_rgb_temp;

	//MIRROR RGB DATA AND ADD ALPHA
	for(x = 0; x < FREENECT_FRAME_W; x++){
		for(y = 0; y < FREENECT_FRAME_H; y++){
			i = x + (y  * FREENECT_FRAME_W);
			if(!_video_mirrored)
				j = i;
			else
				j = (FREENECT_FRAME_W - x - 1) + (y  * FREENECT_FRAME_W);
			buf_rgb[4 * i + 0] = rgb[3 * j + 2];
			buf_rgb[4 * i + 1] = rgb[3 * j + 1];
			buf_rgb[4 * i + 2] = rgb[3 * j + 0];
			buf_rgb[4 * i + 3] = 0x00;
		}
	}
	int n = sendMessage(0, 1, buf_rgb, AS3_BITMAPDATA_LEN);
	if ( n < 0)
	{
		printf("Error sending Video\n");
		client_connected = 0;
	}
}
Пример #6
0
IplImage *freenect_sync_get_rgb_cv(int index)
{
	static IplImage *image = 0;
	static char *data = 0;
	if (!image) image = cvCreateImageHeader(cvSize(640,480), 8, 3);
	unsigned int timestamp;
	if (freenect_sync_get_video(&data, &timestamp, index, FREENECT_VIDEO_RGB))
	    return NULL;
	cvSetData(image, data, 640*3);
	return image;
}
Пример #7
0
 int main()
 {   
     IplImage *image = cvCreateImageHeader(cvSize(640,480), 8, 3);
     while (cvWaitKey(10) < 0) 
     {
       char *data;
       unsigned int timestamp;
       freenect_sync_get_video((void**)(&data), &timestamp, 0, FREENECT_VIDEO_RGB);
       cvSetData(image, data, 640*3);
       cvCvtColor(image, image, CV_RGB2BGR);
       cvShowImage("RGB", image);
     }
     freenect_sync_stop();       
     cvFree(&image);
     return EXIT_SUCCESS;
 }
Пример #8
0
//send video ARGB to client
void sendVideo(){
	int n;
	uint32_t ts,x, y, i, j;
	freenect_sync_get_video(&buf_rgb_temp, &ts, 0, FREENECT_VIDEO_RGB);
	uint8_t *rgb = (uint8_t*)buf_rgb_temp;
	freenect_frame_mode video_mode = freenect_find_video_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_VIDEO_RGB);

	//MIRROR RGB DATA AND ADD ALPHA
	for(x = 0; x < video_mode.width; x++){
		for(y = 0; y < video_mode.height; y++){
			i = x + (y  * video_mode.width);
			if(!_video_mirrored)
				j = i;
			else
				j = (video_mode.width - x - 1) + (y  * video_mode.width);
			if(_video_compression != 0) {
				buf_rgb[3 * i + 2] = rgb[3 * j + 2];
				buf_rgb[3 * i + 1] = rgb[3 * j + 1];
				buf_rgb[3 * i + 0] = rgb[3 * j + 0];
			} else {
				buf_rgb[4 * i + 0] = rgb[3 * j + 2];
				buf_rgb[4 * i + 1] = rgb[3 * j + 1];
				buf_rgb[4 * i + 2] = rgb[3 * j + 0];
				buf_rgb[4 * i + 3] = 0x00;	
			}
		}
	}
	if(_video_compression != 0) {
		unsigned char *compressed_buff = (unsigned char *)malloc(AS3_BITMAPDATA_LEN);
		unsigned long len = 0;
		RGB_2_JPEG(buf_rgb, &compressed_buff, &len, _video_compression);
		n = freenect_network_sendMessage(0, 2, compressed_buff, (int)len);
		free(compressed_buff);
	} else {
		n = freenect_network_sendMessage(0, 2, buf_rgb, AS3_BITMAPDATA_LEN);
	}
	if ( n < 0)
	{
		printf("Error sending Video\n");
		client_connected = 0;
	}
}
Пример #9
0
void CameraNUI::onNextImage() {
	short *depth = 0;
	char *rgb = 0;
	uint32_t ts;
	int ret;

	// retrieve color image
	ret = freenect_sync_get_video((void**)&rgb, &ts, 0, FREENECT_VIDEO_RGB);
	cv::Mat tmp_rgb(480, 640, CV_8UC3, rgb);
	cv::cvtColor(tmp_rgb, cameraFrame, CV_RGB2BGR);

	// retrieve depth image
	ret = freenect_sync_get_depth((void**)&depth, &ts, 0, FREENECT_DEPTH_REGISTERED);
	cv::Mat tmp_depth(480, 640, CV_16SC1, depth);
	tmp_depth.copyTo(depthFrame);

	// write data to output streams
	outImg.write(cameraFrame);
	outDepthMap.write(depthFrame);

	camera_info.write(m_camera_info);
}
Пример #10
0
 void display()
 {
     void* image = 0;
     void* depth = 0;
     float aux[16];
     uint32_t timestamp;
     freenect_sync_get_video(&image, &timestamp, 0, FREENECT_VIDEO_RGB);
     freenect_sync_get_depth(&depth, &timestamp, 0, FREENECT_DEPTH_11BIT);
     
     mFfusion->update(depth);
     
     glDisable(GL_TEXTURE_2D);
     glPointSize(1);
     
     glMatrixMode(GL_MODELVIEW);
     
     glEnableClientState(GL_VERTEX_ARRAY);
     glEnableClientState(GL_COLOR_ARRAY);
     
     glRotated(180, 0, 0, 1);
     
     if(mDrawFlags[0])
     {
         glPushMatrix();
         const double* t = getCamera()->getTransform();
         float t2[16];
         std::copy(t, t+16, t2);
         mRenderer->measure(*mFfusion->getVolume(), t2);
         
         glBindBuffer(GL_ARRAY_BUFFER, mRenderer->getGLVertexBuffer());
         glVertexPointer(3, GL_FLOAT, 3*sizeof(float), 0);
         glBindBuffer(GL_ARRAY_BUFFER, mRenderer->getGLNormalBuffer());
         glColorPointer(3, GL_FLOAT, 3*sizeof(float), 0);
         glDrawArrays(GL_POINTS, 0, mRenderer->getNumVertices());
         glPopMatrix();
     }
     
     if(mDrawFlags[1])
     {
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getVolumeMeasurement()->getGLVertexBuffer());
         glVertexPointer(3, GL_FLOAT, 12, 0);
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getVolumeMeasurement()->getGLNormalBuffer());
         glColorPointer(3, GL_FLOAT, 12, 0);
         glDrawArrays(GL_POINTS, 0, 640*480);
     }
     
     if(mDrawFlags[2])
     {
         glPushMatrix();
         transposeTransform(aux, mFfusion->getLocation());
         glMultMatrixf(aux);
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getMeasurement()->getGLVertexBuffer(1));
         glVertexPointer(3, GL_FLOAT, 12, 0);
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getMeasurement()->getGLNormalBuffer(1));
         glColorPointer(3, GL_FLOAT, 12, 0);
         glDrawArrays(GL_POINTS, 0, 640*480);
         glPopMatrix();
     }
     
     drawBoundingBox();
     drawSensor();
     
     /*if(mDrawFlags[0])
     {
         MarchingCubes* mc = mFfusion->getMarchingCubes();
         glBindBuffer(GL_ARRAY_BUFFER, mc->getGLVertexBuffer());
         glVertexPointer(4, GL_FLOAT, 4*sizeof(float), 0);
         glBindBuffer(GL_ARRAY_BUFFER, mc->getGLNormalBuffer());
         glColorPointer(4, GL_FLOAT, 4*sizeof(float), 0);
         glDrawArrays(GL_TRIANGLES, 0, mc->getActiveVertices());
     }*/
 }
Пример #11
0
int main()
{

  printf("Start");
  
  int exit_code = 0;
  
  init();
  WINDOW* win;

  if ((win= initscr()) == NULL) {
    printf("can't init ncurses");
    return 1;
  }
  
  mvaddstr(3,3,"Start\n");
  refresh();
  
  do 
    {
      
      uint8_t* data;
      
      uint64_t* regions = calloc( (W / REGION_RES) * (H / REGION_RES), 
				  sizeof(double) );
      
      uint32_t timestamp;
      
      int32_t err = freenect_sync_get_video( (void**)(&data), 
					     &timestamp, 
					     0, 
					     FREENECT_VIDEO_RGB );
      
      if (err) {
	delwin(win);
	endwin();
	refresh();
	
	printf("can't access kinect\n");
	exit(1);
      }
      
      uint32_t offset;
      uint32_t max = W*H;
      
      for(offset = 0; offset < max; offset += 3 )
	{
	  
	  uint8_t r = data[offset];
	  uint8_t g = data[offset+1];
	  uint8_t b = data[offset+2];
	  
	  struct rgb_color rgb = {r,g,b};
	  struct hsv_color hsv = rgb_to_hsv(rgb);
	  
	  /*
	   * euclidean distance from 'perfect red'
	   */
	  /*
	    uint16_t distance = sqrt( pow((255 - r),2) +
	    pow(g,2) +
	    pow(b,2) );
	  */
	  
	  if ( (hsv.val > 32) &&
	       (hsv.hue < 24 || hsv.hue > 232) &&
	       (hsv.sat > 128)
	       )
	    {
	      regions[get_region(offset)] ++;
	    }
	  
	  /*
	  double cosT = 
	    r /
	    sqrt( pow(r,2) + pow(g,2) + pow(b,2) );
	  
	  regions[get_region(offset)] += cosT;
	  */
	}
      
      
      //int obstacle = 0;
      int red_regions = 0;
      int i,j;
      int base_row = 7;
      int base_col = 5;
      for (i = 0; i < (H / REGION_RES); i++) {
	
	mvaddstr(base_row + 3 * i, base_col, "|");
	
	for (j = 0; j < (W / REGION_RES); j++) {
	  
	  uint64_t region_count =
	    regions[(i * (W / REGION_RES)) + j];
	  
	  char buf[16];
	  
	  if (region_count > R_THRESH) 
	    {
	      //obstacle = 1;
	      red_regions ++;
	      sprintf(buf, "(%"PRIu64")    ", region_count);
	    }
	  else
	    {
	      sprintf(buf, "%"PRIu64"    ", region_count);
	    }
	  
	  
	  mvaddstr( base_row + 3 * i,
		    base_col + 6 * (j+1),
		    buf );
	  
	  
	  
	}
	
	mvaddstr( base_row + 3 * i, 
		  base_col + 6 * ((W/REGION_RES)+1), 
		  "|");
	
      }
      
      char buf[8];

      sprintf(buf, "%d    ", red_regions);
      mvaddstr(5,3,buf);
      /*
      if(obstacle) {
	mvaddstr(5,3,"WARNING!\n");
      } else {
	mvaddstr(5,3,"Safe....\n");
      }
      */
      
      refresh();
      
      free(regions);
      
    } while (LOOP);
  
  delwin(win);
  endwin();
  refresh();
  return exit_code;
  
}
Пример #12
0
void* freenect_thread(void* v) {
  freenect_thread_params* params = (freenect_thread_params*) v;
  int retval = 0;

  sensor_msgs::CameraInfo rgb_info;
  sensor_msgs::Image rgb_image;
  if (params->video_mode.video_format != FREENECT_VIDEO_DUMMY) {
    rgb_image.header.frame_id=params->frame_id+"_rgb";
    rgb_image.is_bigendian=1;
    rgb_image.height=params->video_mode.height;
    rgb_image.width=params->video_mode.width;
    rgb_image.data.resize(params->video_mode.bytes);
    rgb_image.step=params->video_mode.bytes/params->video_mode.height;
    switch(params->video_mode.video_format){
    case FREENECT_VIDEO_RGB: rgb_image.encoding = "rgb8"; break;
    case FREENECT_VIDEO_BAYER: rgb_image.encoding = "bayer_gbrg8"; break;
    case FREENECT_VIDEO_IR_8BIT: rgb_image.encoding = "mono8"; break;
    case FREENECT_VIDEO_IR_10BIT: rgb_image.encoding = "mono16"; break;
    case FREENECT_VIDEO_IR_10BIT_PACKED: rgb_image.encoding = "mono10"; break;
    case FREENECT_VIDEO_YUV_RGB: rgb_image.encoding = "yuv_rgb"; break;
    case FREENECT_VIDEO_YUV_RAW: rgb_image.encoding = "yuv_raw"; break;
    }


    float hfov=M_PI/180.0*58.5;
    float vfov=M_PI/180.0*46.6;

    rgb_info.header.frame_id=rgb_image.header.frame_id;
    rgb_info.width=rgb_image.width;
    rgb_info.height=rgb_image.height;
    rgb_info.K.fill(0);
    rgb_info.K[0]=rgb_info.width/(2*tan(hfov/2)); //fx
    rgb_info.K[4]=rgb_info.height/(2*tan(vfov/2));; //fy
    rgb_info.K[2]=rgb_info.width/2; //cx
    rgb_info.K[5]=rgb_info.height/2; //cy
    rgb_info.K[8]=1;
  }
  printf("rgb image encoding: %s\n", rgb_image.encoding.c_str());

 
  sensor_msgs::CameraInfo depth_info;
  sensor_msgs::Image depth_image;
  if (params->depth_mode.depth_format != FREENECT_DEPTH_DUMMY) {
    if (params->depth_mode.depth_format == FREENECT_DEPTH_REGISTERED) {
      depth_image.header.frame_id=params->frame_id+"_rgb";
    } else 
      depth_image.header.frame_id=params->frame_id+"_depth";
    depth_image.is_bigendian=1;
    depth_image.height=params->depth_mode.height;
    depth_image.width=params->depth_mode.width;
    depth_image.data.resize(params->depth_mode.bytes);
    depth_image.step=params->depth_mode.bytes/params->depth_mode.height;
    switch(params->depth_mode.depth_format){
    case FREENECT_DEPTH_11BIT: depth_image.encoding = "mono16"; break;
    case FREENECT_DEPTH_10BIT: depth_image.encoding = "mono16"; break;
    case FREENECT_DEPTH_11BIT_PACKED: depth_image.encoding  = "mono11"; break;
    case FREENECT_DEPTH_10BIT_PACKED: depth_image.encoding = "mono10"; break;
    case FREENECT_DEPTH_REGISTERED:  depth_image.encoding = "mono16"; break;
    case FREENECT_DEPTH_MM: depth_image.encoding = "mono16"; break;
    }
 

    float hfov=M_PI/180.0*58.5;
    float vfov=M_PI/180.0*46.6;

    depth_info.header.frame_id=depth_image.header.frame_id;
    depth_info.width=depth_image.width;
    depth_info.height=depth_image.height;
    depth_info.K.fill(0);
    depth_info.K[0]=depth_info.width/(2*tan(hfov/2)); //fx
    depth_info.K[4]=depth_info.height/(2*tan(vfov/2));; //fy
    depth_info.K[2]=depth_info.width/2; //cx
    depth_info.K[5]=depth_info.height/2; //cy
    depth_info.K[8]=1;
  }

  printf("depth image encoding: %s\n", depth_image.encoding.c_str());
  int count = 0;
  int num_frames_stat=64;
  struct timeval previous_time;
  gettimeofday(&previous_time, 0);
  while (params->run && ! retval) {
    void* video_buffer=0, *depth_buffer=0;
    uint32_t video_ts;
    uint32_t depth_ts;

    if (params->depth_mode.depth_format != FREENECT_DEPTH_DUMMY) {
      retval = freenect_sync_get_depth(&depth_buffer, 
				       &depth_ts, 
				       params->device_num, 
				       params->depth_mode.depth_format);
      if (retval < 0) {
	printf("error in getting depth: %d\n", retval);
	break;
      } else if (! (count % params->frame_skip)) {
	depth_image.header.stamp=ros::Time::now();
	depth_image.header.seq=count;
	memcpy(&depth_image.data[0], depth_buffer, depth_image.data.size());
	params->pub_depth.publish(depth_image);
	depth_info.header.seq = count;
	depth_info.header.stamp=depth_image.header.stamp;
	params->pub_camera_info_depth.publish(depth_info);
      }
    }
    if (params->video_mode.video_format != FREENECT_VIDEO_DUMMY) {
      retval = freenect_sync_get_video(&video_buffer, 
				       &video_ts, 
				       params->device_num, 
				       params->video_mode.video_format);
      if (retval < 0) {
	printf("error in getting rgb: %d\n", retval);
	break;
      } else if (! (count % params->frame_skip)){
	rgb_image.header.stamp=ros::Time::now();
	rgb_image.header.seq=count;
	memcpy(&rgb_image.data[0], video_buffer, rgb_image.data.size());
	params->pub_rgb.publish(rgb_image);
	rgb_info.header.seq = count;
	rgb_info.header.stamp=depth_image.header.stamp;
	params->pub_camera_info_rgb.publish(rgb_info);
      }
    }
    if (!(count%num_frames_stat)){
      struct timeval current_time, interval;
      gettimeofday(&current_time, 0);
      timersub(&current_time, &previous_time, &interval);
      previous_time = current_time;
      double fps = num_frames_stat/(interval.tv_sec +1e-6*interval.tv_usec);
      printf("running at %lf fps\n", fps);
      fflush(stdout);
     
    }
    count++;
  }
  freenect_sync_stop();
  
}