示例#1
0
//send depth ARGB to client
void sendRawDepth(){
	uint32_t ts, x, y, i, j;
	freenect_sync_get_depth(&buf_depth_temp, &ts, 0, FREENECT_DEPTH_11BIT);
	uint16_t *depth = (uint16_t*) buf_depth_temp;
	freenect_frame_mode depth_mode = freenect_find_depth_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_DEPTH_11BIT);
	uint16_t *tmp_depth = (uint16_t*) malloc(depth_mode.bytes);
	unsigned char *char_depth = (unsigned char *) malloc(depth_mode.bytes);
	
	if(_depth_mirrored){	//MIRROR DEPTH DATA
		for(x = 0; x < depth_mode.width; x++){
			for(y = 0; y < depth_mode.height; y++){
				i = x + (y  * depth_mode.width);
				j = (depth_mode.width - x - 1) + (y  * depth_mode.width);
				tmp_depth[i] = depth[j];
			}
		}
		depth = tmp_depth;
	}
	
	for (i=0; i<640 * 480; i++) {
		memcpy(char_depth + (i*2), &depth[i], 2);
	}
	
	int n = freenect_network_sendMessage(0, 1, char_depth, depth_mode.bytes);
	if (n < 0)
	{
		printf("Error sending raw depth\n");
		client_connected = 0;
	}
	free(char_depth);
	free(tmp_depth);
}
示例#2
0
void DrawGLScene()
{
    short *depth = 0;
    char *rgb = 0;
    uint32_t ts;
    if (freenect_sync_get_depth((void**)&depth, &ts, 0, FREENECT_DEPTH_11BIT) < 0)
	no_kinect_quit();
    if (freenect_sync_get_video((void**)&rgb, &ts, 0, FREENECT_VIDEO_RGB) < 0)
	no_kinect_quit();

    static unsigned int indices[480][640];
    static short xyz[480][640][3];
    int i,j;
    for (i = 0; i < 480; i++) {
        for (j = 0; j < 640; j++) {
            xyz[i][j][0] = j;
            xyz[i][j][1] = i;
            xyz[i][j][2] = depth[i*640+j];
            indices[i][j] = i*640+j;
        }
    }

    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glLoadIdentity();

    glPushMatrix();
    glScalef(zoom,zoom,1);
    glTranslatef(0,0,-3.5);
    glRotatef(rotangles[0], 1,0,0);
    glRotatef(rotangles[1], 0,1,0);
    glTranslatef(0,0,1.5);

    LoadVertexMatrix();

    // Set the projection from the XYZ to the texture image
    glMatrixMode(GL_TEXTURE);
    glLoadIdentity();
    glScalef(1/640.0f,1/480.0f,1);
    LoadRGBMatrix();
    LoadVertexMatrix();
    glMatrixMode(GL_MODELVIEW);

    glPointSize(1);

    glEnableClientState(GL_VERTEX_ARRAY);
    glVertexPointer(3, GL_SHORT, 0, xyz);
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);
    glTexCoordPointer(3, GL_SHORT, 0, xyz);

    if (color)
        glEnable(GL_TEXTURE_2D);
    glBindTexture(GL_TEXTURE_2D, gl_rgb_tex);
    glTexImage2D(GL_TEXTURE_2D, 0, 3, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, rgb);

    glPointSize(2.0f);
    glDrawElements(GL_POINTS, 640*480, GL_UNSIGNED_INT, indices);
    glPopMatrix();
    glDisable(GL_TEXTURE_2D);
    glutSwapBuffers();
}
示例#3
0
void *depth_out(void *arg){
	while ( depth_connected )
	{
		uint32_t ts,i;
		void *buf_depth_temp;
		freenect_sync_get_depth(&buf_depth_temp, &ts, 0, FREENECT_DEPTH_11BIT);
		uint16_t *depth = (uint16_t*)buf_depth_temp;

		for (i=0; i<FREENECT_FRAME_PIX; i++) {
			buf_depth[4 * i + 0] = 0x00;
			buf_depth[4 * i + 1] = 0x00;
			buf_depth[4 * i + 2] = 0x00;
			buf_depth[4 * i + 3] = 0xFF;
			if(depth[i] < 800 && depth[i] > 600){
				buf_depth[4 * i + 0] = 0xFF;
				buf_depth[4 * i + 1] = 0xFF;
				buf_depth[4 * i + 2] = 0xFF;
				buf_depth[4 * i + 3] = 0xFF;
			}
		}
		#ifdef WIN32
		int n = send(depth_client_socket, (char*)buf_depth, AS3_BITMAPDATA_LEN, 0);
		#else
		int n = write(depth_child, &buf_depth, AS3_BITMAPDATA_LEN);
		#endif
		if ( n < 0 || n != AS3_BITMAPDATA_LEN)
		{
			//fprintf(stderr, "Error on write() for depth (%d instead of %d)\n",n, AS3_BITMAPDATA_LEN);
			depth_connected = 0;
		}
	}
	return 0;
}
示例#4
0
//send depth ARGB to client
void sendDepth(){
	int n;
	uint32_t ts, x, y, i, j;
	freenect_sync_get_depth(&buf_depth_temp, &ts, 0, FREENECT_DEPTH_11BIT);
	uint16_t *depth = (uint16_t*) buf_depth_temp;
	freenect_frame_mode depth_mode = freenect_find_depth_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_DEPTH_11BIT);
	//uint16_t *tmp_depth = (uint16_t*) malloc(FREENECT_DEPTH_11BIT_SIZE);
	uint16_t *tmp_depth = (uint16_t*) malloc(depth_mode.bytes);
	if(_depth_mirrored){	//MIRROR DEPTH DATA
		for(x = 0; x < depth_mode.width; x++){
			for(y = 0; y < depth_mode.height; y++){
				i = x + (y  * depth_mode.width);
				j = (depth_mode.width - x - 1) + (y  * depth_mode.width);
				tmp_depth[i] = depth[j];
			}
		}
		depth = tmp_depth;
	}
	
    for (i=0; i< depth_mode.width * depth_mode.height; i++) {
		if(_depth_compression != 0) {
			buf_depth[3 * i + 0] = 0x00;
			buf_depth[3 * i + 1] = 0x00;
			buf_depth[3 * i + 2] = 0x00;
		} else {
			buf_depth[4 * i + 0] = 0x00;
			buf_depth[4 * i + 1] = 0x00;
			buf_depth[4 * i + 2] = 0x00;
			buf_depth[4 * i + 3] = 0xFF;
		}
		if(depth[i] < _max_depth && depth[i] > _min_depth){
			unsigned char l =  0xFF - ((depth[i] - _min_depth) & 0xFF);
			if(_depth_compression != 0) {
				buf_depth[3 * i + 0] = l;
				buf_depth[3 * i + 1] = l;
				buf_depth[3 * i + 2] = l;
			} else {
				buf_depth[4 * i + 0] = l;
				buf_depth[4 * i + 1] = l;
				buf_depth[4 * i + 2] = l;
				buf_depth[4 * i + 3] = 0xFF;
			}
		}
	}
	if(_depth_compression != 0) {
		unsigned char *compressed_buff = (unsigned char *)malloc(AS3_BITMAPDATA_LEN);
		unsigned long len = 0;
		RGB_2_JPEG(buf_depth, &compressed_buff, &len, _depth_compression);
		n = freenect_network_sendMessage(0, 0, compressed_buff, (int)len);
		free(compressed_buff);
	} else {
		n = freenect_network_sendMessage(0, 0, buf_depth, AS3_BITMAPDATA_LEN);
	}
	if (n < 0)
	{
		printf("Error sending depth\n");
		client_connected = 0;
	}
	free(tmp_depth);
}
示例#5
0
pcl::PointCloud<pcl::PointXYZ> KinectCloud::GetDepth()
{

    short *result = 0;
    pcl::PointCloud<pcl::PointXYZ> depth;
    depth.width = width;
    depth.height = height;
    depth.points.resize(depth.width * depth.height);
    uint32_t ts;
    if(freenect_sync_get_depth((void**) &result, &ts, 0, FREENECT_DEPTH_11BIT) < 0)
	printf("Hey, Kinect is not connected\n");

    else//transform collected data from kinect to cm 
    {
	for(int j = 0;j < height - 20;j++)
	    for(int i = 0;i < width;i++)
	    {
		int ind = j * width + i;
		depth.points[ind].x = (0.1236 * tan((result[ind] / 2842.5) + 1.1863));
		depth.points[ind].y = (i - width / 2)*0.001602 * depth.points[ind].x;/// 0.001602=1.025/640 from: http://www.google.es/url?sa=t&rct=j&q=view-dependent%203d%20projection%20using%20depth-image-based%20head%20tracking&source=web&cd=1&ved=0CCAQFjAA&url=http%3A%2F%2Fwww.cs.ubc.ca%2Flabs%2Fimager%2FPROCAMS2011%2F0008.pdf&ei=vCqjTvSyIYi3hQerrOmaDA&usg=AFQjCNHL8nvvxEbONsTlFjw-5NbnIL0DpQ&sig2=4NSb91eQt0szqNXrwsXuvA&cad=rja
		depth.points[ind].z = -(j - height / 2)*0.001602 * depth.points[ind].x;
	    }
    }
    return depth;
}
示例#6
0
IplImage *freenect_sync_get_depth_cv(int index)
{
	static IplImage *image = 0;
	static char *data = 0;
	if (!image) image = cvCreateImageHeader(cvSize(640,480), 16, 1);
	unsigned int timestamp;
	if (freenect_sync_get_depth(&data, &timestamp, index, FREENECT_DEPTH_11BIT))
	    return NULL;
	cvSetData(image, data, 640*2);
	return image;
}
示例#7
0
void CameraNUI::onNextImage() {
	short *depth = 0;
	char *rgb = 0;
	uint32_t ts;
	int ret;

	if (triggered && trigger.empty()) {
		return;
	} else {
		// clear all triggers
		while(!trigger.empty()) trigger.read();
	}

	// retrieve color image

	cv::Size size;
	if (resolution == FREENECT_RESOLUTION_MEDIUM)
		size = cv::Size(640, 480);
	if (resolution == FREENECT_RESOLUTION_HIGH)
		size = cv::Size(1280, 1024);

	cv::Mat image_out;
	ret = freenect_sync_get_video_with_res((void**)&rgb, &ts, index, (int)resolution, imageType);

	if(imageType == FREENECT_VIDEO_RGB) {
        cv::Mat tmp_rgb(size, CV_8UC3, rgb);
        cv::cvtColor(tmp_rgb, image_out, CV_RGB2BGR);
	}
	if(imageType == FREENECT_VIDEO_IR_8BIT) {
        cv::Mat tmp_gray(size, CV_8UC1, rgb);
        image_out = tmp_gray;
	}
	if(imageType == FREENECT_VIDEO_BAYER) {
        cv::Mat tmp_gray(size, CV_8UC1, rgb);
        image_out = tmp_gray;
	}

	//

	//

	// retrieve depth image
	ret = freenect_sync_get_depth((void**)&depth, &ts, index, depthType);
	cv::Mat tmp_depth(480, 640, CV_16UC1, depth);
	tmp_depth.copyTo(depthFrame);

	// write data to output streams
	//outImg.write(cameraFrame);
	outImg.write(image_out);
	outDepthMap.write(depthFrame);

	camera_info.write(m_camera_info);
}
示例#8
0
int main (int argc, char * const argv[]) {
	
	short *depth;
	uint32_t timestamp;	

	while (1) {
		// 深度データの取得
		freenect_sync_get_depth((void**)&depth, &timestamp, 0, FREENECT_DEPTH_11BIT);
		
		// RAWデータをセンチメートルに変換
		int centerDepth = rawToCentimeters(depth[240*320]);
		
		std::cout << "Center Depth:" << centerDepth << "cm" << std::endl;
		
		sleep(1);
	}
	return 0;
}
示例#9
0
void CameraNUI::onNextImage() {
	short *depth = 0;
	char *rgb = 0;
	uint32_t ts;
	int ret;

	// retrieve color image
	ret = freenect_sync_get_video((void**)&rgb, &ts, 0, FREENECT_VIDEO_RGB);
	cv::Mat tmp_rgb(480, 640, CV_8UC3, rgb);
	cv::cvtColor(tmp_rgb, cameraFrame, CV_RGB2BGR);

	// retrieve depth image
	ret = freenect_sync_get_depth((void**)&depth, &ts, 0, FREENECT_DEPTH_REGISTERED);
	cv::Mat tmp_depth(480, 640, CV_16SC1, depth);
	tmp_depth.copyTo(depthFrame);

	// write data to output streams
	outImg.write(cameraFrame);
	outDepthMap.write(depthFrame);

	camera_info.write(m_camera_info);
}
示例#10
0
//send depth ARGB to client
void sendDepth(){
	uint32_t ts, x, y, i, j;
	freenect_sync_get_depth(&buf_depth_temp, &ts, 0, FREENECT_DEPTH_11BIT);
	uint16_t *depth = (uint16_t*) buf_depth_temp;
	uint16_t *tmp_depth = (uint16_t*) malloc(FREENECT_DEPTH_11BIT_SIZE);
	if(_depth_mirrored){	//MIRROR DEPTH DATA
		for(x = 0; x < FREENECT_FRAME_W; x++){
			for(y = 0; y < FREENECT_FRAME_H; y++){
				i = x + (y  * FREENECT_FRAME_W);
				j = (FREENECT_FRAME_W - x - 1) + (y  * FREENECT_FRAME_W);
				tmp_depth[i] = depth[j];
			}
		}
		depth = tmp_depth;
	}
	
	for (i=0; i<FREENECT_FRAME_PIX; i++) {
		buf_depth[4 * i + 0] = 0x00;
		buf_depth[4 * i + 1] = 0x00;
		buf_depth[4 * i + 2] = 0x00;
		buf_depth[4 * i + 3] = 0xFF;
		if(depth[i] < _max_depth && depth[i] > _min_depth){
			unsigned char l =  0xFF - ((depth[i] - _min_depth) & 0xFF);
			buf_depth[4 * i + 0] = l;
			buf_depth[4 * i + 1] = l;
			buf_depth[4 * i + 2] = l;
			buf_depth[4 * i + 3] = 0xFF;
		}
	}
	int n = sendMessage(0, 0, buf_depth, AS3_BITMAPDATA_LEN);
	if (n < 0)
	{
		printf("Error sending depth\n");
		client_connected = 0;
	}
	free(tmp_depth);
}
示例#11
0
/* baseX, Y, Z are in meters.
 * hangle and vangle are in degerees.
 */
void
poll_one_device(KDevice *dev) {
  int i,j;
  uint16_t *fdepths;
  uint32_t timestamp;
  FP_TYPE d;
  FP_TYPE landX, landY, landZ;
  FP_TYPE ha, va;
  if (!freenect_sync_get_depth((void **) &fdepths,
                               &timestamp,
                               dev->device,
                               FREENECT_DEPTH_11BIT)) {
    // printf("\n\n\n");
    for (j = SENSOR_HEIGHT - 1; j >= 0; j--) {
    // for (j = SENSOR_HEIGHT/2 ; j ; j = 0) {
      va = dev->vangle[j];
      for (i = 0; i < SENSOR_WIDTH; i++) {
      // for (i = SENSOR_WIDTH / 2 ; i ; i = 0) {
        d = depthDistance[fdepths[j*SENSOR_WIDTH+i] & 0x7FF];
        d *= horizDepthMultiplier[i];
        d *= vertDepthMultiplier[j];
        /* It's not worth plotting if it's too far away or too close. */
        if (d > 0.1 && d < 30.0) {
          ha = dev->hangle[i];

          /* We have spherical coordinates, and now we need to convert that to
           * cartesian coordiantes. */
          /* Trig approach. Needs tweaking? */
          FP_TYPE dh = d * dev->dvh[j];
          FP_TYPE dx = dev->dx[i];
          FP_TYPE dy = dev->dy[i];
          FP_TYPE dz = dev->dvv[j];
          landY = dh * dy;
          landX = dh * dx;
          landZ = d * dz;

          /* Approximation approach: Broken. */
          /*
          y = (i - (SENSOR_WIDTH/2)) * (d - 10.0) * (SENSOR_WIDTH/SENSOR_HEIGHT) * 0.0021;
          z = (j - 240) * (d - 10.0) * 0.0021;
          x = d;
          */

          /* Measurements are relative to the camera's position. Now adjust
           * for the base position. */
          landX += dev->baseX;
          landY += dev->baseY;
          landZ += dev->baseZ;

          /*
          if (!(i&0x5f) && !(j & 0x5f)) {
            printf("%d,%d:%f meters away. %f meters horizontally, %f vert",
                i, j, d, dh, z);
            printf("%d,%d:%d/%f %f meters ahead, %f meters to the side, %f meters tall.\n",
                i, j, fdepths[j*SENSOR_WIDTH+i] & 0x7FF, d,
                x, y, z);
          }
          */
          /*
          if (((i == 9) || (i == 160) || (i == 320) || (i == 480) || (i == 631)) &&
              ((j == 9) || (j == 120) || (j == 240) || (j == 360) || (j == 470))) {
            //  printf("%d,%d point is %f meters in front of camera, %f meters "
            //         "to the side, and is %f meters above the camera. "
            //         "ha: %f, va: %f\n",
            //         j, i, y, x, z, ha, va);
            printf("%d,%d: Z is %f, zindex is %d\n", j, i, z, zindex);
          }
          */

          /* Current x/y/z. */
          FP_TYPE cx = dev->baseX;
          FP_TYPE cy = dev->baseY;
          FP_TYPE cz = dev->baseZ;

          // Plot everything onto the cube scale.
          landX *= CUBE_XSCALE;
          landY *= CUBE_YSCALE;
          landZ *= CUBE_ZSCALE;

          cx *= CUBE_XSCALE;
          cy *= CUBE_YSCALE;
          cz *= CUBE_ZSCALE;

          int xt = cx <= landX;
          int yt = cy <= landY;

          // ix, iy and iz are cube coordinates of cx,cy,cz
#define ix ((int) cx)
#define iy ((int) cy)
#define iz ((cz < 0) ? 0 : (cz > CUBE_HEIGHT) ? (CUBE_HEIGHT-1) : (int) cz)

#define INBOUNDS(x,y,z) ( \
                 (x < CUBE_WIDTH) && \
                 (x >= 0.0) && \
                 (y < CUBE_LENGTH) && \
                 (y >= 0.0) && \
                 (z < CUBE_HEIGHT) && \
                 (z >= 0.0))
          // printf("LandX,y,z: %d,%d,%d\n", (int) landX, (int) landY, (int) landZ);
          /* Step 1: Mark all items between basex/y/z and landing x/y/z. */
          while ((((cx <= landX) == xt) &&
                 ((cy <= landY) == yt)) &&
                 INBOUNDS(cx,cy,cz)) {
            // printf("cx <= landX: %d (xt: %d)\n", cx <= landX, xt);
            // printf("cy <= landY: %d (yt: %d)\n", cy <= landY, yt);
            // printf("Clearing x,y,z: %d,%d,%d\n", ix, iy, iz);
            CUBE(ix, iy, iz).cleared_by = dev->id;
            /* Advance to next cube. */
            cx += dx; cy += dy; cz += dz;
          }

          if (INBOUNDS(cx, cy, cz)) {
            /* Step 2: Mark the cube this lands in as known, as well as its
             * column status. */
            // printf("Knowing x,y,z: %d,%d,%d\n", ix, iy, iz);
            cx = landX;
            cy = landY;
            cz = landZ;
            CUBE(ix, iy, iz).known_by = dev->id;
            COLSTAT(ix, iy) = COL_KNOWN;
            CUBE(ix, iy, iz).known_count++;
            CUBE(ix, iy, iz).known_by = dev->id;
          // } else {
            // printf("Landed out of bounds?\n");
          }

          /* Advance to next cube. */
          cx += dx; cy += dy; cz += dz;

          /* Step 3: Mark every cube in a line _after_ known cube as 'guessed'
           */
          while (INBOUNDS(cx, cy, cz)) {
            // printf("Guessing x,y,z: %d,%d,%d\n", ix, iy, iz);
            CUBE(ix, iy, iz).guess_count++;
            /* Advance to next cube. */
            if (COLSTAT(ix, iy) == COL_CLEAR) {
              COLSTAT(ix, iy) = COL_GUESSED;
            }
            cx += dx; cy += dy; cz += dz;
          }
        }
      }
    }
  }
}
示例#12
0
 void display()
 {
     void* image = 0;
     void* depth = 0;
     float aux[16];
     uint32_t timestamp;
     freenect_sync_get_video(&image, &timestamp, 0, FREENECT_VIDEO_RGB);
     freenect_sync_get_depth(&depth, &timestamp, 0, FREENECT_DEPTH_11BIT);
     
     mFfusion->update(depth);
     
     glDisable(GL_TEXTURE_2D);
     glPointSize(1);
     
     glMatrixMode(GL_MODELVIEW);
     
     glEnableClientState(GL_VERTEX_ARRAY);
     glEnableClientState(GL_COLOR_ARRAY);
     
     glRotated(180, 0, 0, 1);
     
     if(mDrawFlags[0])
     {
         glPushMatrix();
         const double* t = getCamera()->getTransform();
         float t2[16];
         std::copy(t, t+16, t2);
         mRenderer->measure(*mFfusion->getVolume(), t2);
         
         glBindBuffer(GL_ARRAY_BUFFER, mRenderer->getGLVertexBuffer());
         glVertexPointer(3, GL_FLOAT, 3*sizeof(float), 0);
         glBindBuffer(GL_ARRAY_BUFFER, mRenderer->getGLNormalBuffer());
         glColorPointer(3, GL_FLOAT, 3*sizeof(float), 0);
         glDrawArrays(GL_POINTS, 0, mRenderer->getNumVertices());
         glPopMatrix();
     }
     
     if(mDrawFlags[1])
     {
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getVolumeMeasurement()->getGLVertexBuffer());
         glVertexPointer(3, GL_FLOAT, 12, 0);
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getVolumeMeasurement()->getGLNormalBuffer());
         glColorPointer(3, GL_FLOAT, 12, 0);
         glDrawArrays(GL_POINTS, 0, 640*480);
     }
     
     if(mDrawFlags[2])
     {
         glPushMatrix();
         transposeTransform(aux, mFfusion->getLocation());
         glMultMatrixf(aux);
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getMeasurement()->getGLVertexBuffer(1));
         glVertexPointer(3, GL_FLOAT, 12, 0);
         glBindBuffer(GL_ARRAY_BUFFER, mFfusion->getMeasurement()->getGLNormalBuffer(1));
         glColorPointer(3, GL_FLOAT, 12, 0);
         glDrawArrays(GL_POINTS, 0, 640*480);
         glPopMatrix();
     }
     
     drawBoundingBox();
     drawSensor();
     
     /*if(mDrawFlags[0])
     {
         MarchingCubes* mc = mFfusion->getMarchingCubes();
         glBindBuffer(GL_ARRAY_BUFFER, mc->getGLVertexBuffer());
         glVertexPointer(4, GL_FLOAT, 4*sizeof(float), 0);
         glBindBuffer(GL_ARRAY_BUFFER, mc->getGLNormalBuffer());
         glColorPointer(4, GL_FLOAT, 4*sizeof(float), 0);
         glDrawArrays(GL_TRIANGLES, 0, mc->getActiveVertices());
     }*/
 }
void* freenect_thread(void* v) {
  freenect_thread_params* params = (freenect_thread_params*) v;
  int retval = 0;

  sensor_msgs::CameraInfo rgb_info;
  sensor_msgs::Image rgb_image;
  if (params->video_mode.video_format != FREENECT_VIDEO_DUMMY) {
    rgb_image.header.frame_id=params->frame_id+"_rgb";
    rgb_image.is_bigendian=1;
    rgb_image.height=params->video_mode.height;
    rgb_image.width=params->video_mode.width;
    rgb_image.data.resize(params->video_mode.bytes);
    rgb_image.step=params->video_mode.bytes/params->video_mode.height;
    switch(params->video_mode.video_format){
    case FREENECT_VIDEO_RGB: rgb_image.encoding = "rgb8"; break;
    case FREENECT_VIDEO_BAYER: rgb_image.encoding = "bayer_gbrg8"; break;
    case FREENECT_VIDEO_IR_8BIT: rgb_image.encoding = "mono8"; break;
    case FREENECT_VIDEO_IR_10BIT: rgb_image.encoding = "mono16"; break;
    case FREENECT_VIDEO_IR_10BIT_PACKED: rgb_image.encoding = "mono10"; break;
    case FREENECT_VIDEO_YUV_RGB: rgb_image.encoding = "yuv_rgb"; break;
    case FREENECT_VIDEO_YUV_RAW: rgb_image.encoding = "yuv_raw"; break;
    }


    float hfov=M_PI/180.0*58.5;
    float vfov=M_PI/180.0*46.6;

    rgb_info.header.frame_id=rgb_image.header.frame_id;
    rgb_info.width=rgb_image.width;
    rgb_info.height=rgb_image.height;
    rgb_info.K.fill(0);
    rgb_info.K[0]=rgb_info.width/(2*tan(hfov/2)); //fx
    rgb_info.K[4]=rgb_info.height/(2*tan(vfov/2));; //fy
    rgb_info.K[2]=rgb_info.width/2; //cx
    rgb_info.K[5]=rgb_info.height/2; //cy
    rgb_info.K[8]=1;
  }
  printf("rgb image encoding: %s\n", rgb_image.encoding.c_str());

 
  sensor_msgs::CameraInfo depth_info;
  sensor_msgs::Image depth_image;
  if (params->depth_mode.depth_format != FREENECT_DEPTH_DUMMY) {
    if (params->depth_mode.depth_format == FREENECT_DEPTH_REGISTERED) {
      depth_image.header.frame_id=params->frame_id+"_rgb";
    } else 
      depth_image.header.frame_id=params->frame_id+"_depth";
    depth_image.is_bigendian=1;
    depth_image.height=params->depth_mode.height;
    depth_image.width=params->depth_mode.width;
    depth_image.data.resize(params->depth_mode.bytes);
    depth_image.step=params->depth_mode.bytes/params->depth_mode.height;
    switch(params->depth_mode.depth_format){
    case FREENECT_DEPTH_11BIT: depth_image.encoding = "mono16"; break;
    case FREENECT_DEPTH_10BIT: depth_image.encoding = "mono16"; break;
    case FREENECT_DEPTH_11BIT_PACKED: depth_image.encoding  = "mono11"; break;
    case FREENECT_DEPTH_10BIT_PACKED: depth_image.encoding = "mono10"; break;
    case FREENECT_DEPTH_REGISTERED:  depth_image.encoding = "mono16"; break;
    case FREENECT_DEPTH_MM: depth_image.encoding = "mono16"; break;
    }
 

    float hfov=M_PI/180.0*58.5;
    float vfov=M_PI/180.0*46.6;

    depth_info.header.frame_id=depth_image.header.frame_id;
    depth_info.width=depth_image.width;
    depth_info.height=depth_image.height;
    depth_info.K.fill(0);
    depth_info.K[0]=depth_info.width/(2*tan(hfov/2)); //fx
    depth_info.K[4]=depth_info.height/(2*tan(vfov/2));; //fy
    depth_info.K[2]=depth_info.width/2; //cx
    depth_info.K[5]=depth_info.height/2; //cy
    depth_info.K[8]=1;
  }

  printf("depth image encoding: %s\n", depth_image.encoding.c_str());
  int count = 0;
  int num_frames_stat=64;
  struct timeval previous_time;
  gettimeofday(&previous_time, 0);
  while (params->run && ! retval) {
    void* video_buffer=0, *depth_buffer=0;
    uint32_t video_ts;
    uint32_t depth_ts;

    if (params->depth_mode.depth_format != FREENECT_DEPTH_DUMMY) {
      retval = freenect_sync_get_depth(&depth_buffer, 
				       &depth_ts, 
				       params->device_num, 
				       params->depth_mode.depth_format);
      if (retval < 0) {
	printf("error in getting depth: %d\n", retval);
	break;
      } else if (! (count % params->frame_skip)) {
	depth_image.header.stamp=ros::Time::now();
	depth_image.header.seq=count;
	memcpy(&depth_image.data[0], depth_buffer, depth_image.data.size());
	params->pub_depth.publish(depth_image);
	depth_info.header.seq = count;
	depth_info.header.stamp=depth_image.header.stamp;
	params->pub_camera_info_depth.publish(depth_info);
      }
    }
    if (params->video_mode.video_format != FREENECT_VIDEO_DUMMY) {
      retval = freenect_sync_get_video(&video_buffer, 
				       &video_ts, 
				       params->device_num, 
				       params->video_mode.video_format);
      if (retval < 0) {
	printf("error in getting rgb: %d\n", retval);
	break;
      } else if (! (count % params->frame_skip)){
	rgb_image.header.stamp=ros::Time::now();
	rgb_image.header.seq=count;
	memcpy(&rgb_image.data[0], video_buffer, rgb_image.data.size());
	params->pub_rgb.publish(rgb_image);
	rgb_info.header.seq = count;
	rgb_info.header.stamp=depth_image.header.stamp;
	params->pub_camera_info_rgb.publish(rgb_info);
      }
    }
    if (!(count%num_frames_stat)){
      struct timeval current_time, interval;
      gettimeofday(&current_time, 0);
      timersub(&current_time, &previous_time, &interval);
      previous_time = current_time;
      double fps = num_frames_stat/(interval.tv_sec +1e-6*interval.tv_usec);
      printf("running at %lf fps\n", fps);
      fflush(stdout);
     
    }
    count++;
  }
  freenect_sync_stop();
  
}