/**
	\brief Starts the kinect-dedicated reading thread.
	Does the following:
	1. Notify the start of the initialization and initialize
	2. Notify the initialization outcomes (error, or running if initialization was successful)
	3. If successful, does a continous:
	3.1. Wait/update kinect data
	3.2. Protected by a mutex, generate the images and data structures, made available to the user
	3.3. Check if a stop has been requested
	4. Stop the kinect reading and release resources, notify.

	The variable accessed outside of this thread are protected by mutexes. This includes:
	- The status
	- The QImages for depth, camera
	- The bodies
	- etc.
**/
void QKinectWrapper::run()
{
	mutex.lock();
	status=QKinect::Initializing;
	emit statusNotification(status);
	mutex.unlock();

	bool ok = initialize();

	if(!ok)
	{
		mutex.lock();
		status = QKinect::ErrorStop;
		emit statusNotification(status);
		mutex.unlock();
		return;
	}

	mutex.lock();
	status = QKinect::OkRun;
	emit statusNotification(status);
	mutex.unlock();

	frameid=0;
	while(!t_requeststop)
	{
		//double t1,t2;
		//t1 = PreciseTimer::QueryTimer();
		XnStatus status = g_Context.WaitAndUpdateAll();
		//msleep(100+(rand()%100));   // simulate some shit delay
		//if( (frameid%100) > 50)
		  // msleep(((frameid%100)-50)*10);           // simulate some slowing down delay delay
		//t2 = PreciseTimer::QueryTimer();
		//printf("Waitandupdate: %lf. %s\n",(t2-t1)*1000.0,xnGetStatusString(status));

		// Prepare the data to export outside of the thread
		mutex.lock();
		xn::DepthMetaData depthMD;
		g_DepthGenerator.GetMetaData(depthMD);
		//frameid = depthMD.FrameID();
		frameid++;
		//printf("frame id: %d %d\n",frameid,depthMD.FrameID());
		timestamp = (double)depthMD.Timestamp()/1000000.0;
		// Must create the bodies first
		bodies = createBodies();
		// Then can create the images, which may overlay the bodies
		imageCamera = createCameraImage();
		imageDepth = createDepthImage();
		emit dataNotification();
		mutex.unlock();
	}
	g_Context.Shutdown();

	mutex.lock();
	status = QKinect::Idle;
	emit statusNotification(status);
	mutex.unlock();
}
bool XtionGrabber::setupDepth(const std::string& device)
{
	ros::NodeHandle depth_nh(getPrivateNodeHandle(), "depth");

	m_depth_it.reset(new image_transport::ImageTransport(depth_nh));

	m_pub_depth = m_depth_it->advertiseCamera("image_raw", 1);

	m_depth_fd = open(device.c_str(), O_RDONLY);
	if(m_depth_fd < 0)
	{
		perror("Could not open depth device");
		return false;
	}

	struct v4l2_requestbuffers reqbuf;
	memset(&reqbuf, 0, sizeof(reqbuf));
	reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	reqbuf.memory = V4L2_MEMORY_USERPTR;
	reqbuf.count = NUM_BUFS;

	if(ioctl(m_depth_fd, VIDIOC_REQBUFS, &reqbuf) != 0)
	{
		perror("Could not request buffers");
		return false;
	}

	for(size_t i = 0; i < NUM_BUFS; ++i)
	{
		DepthBuffer* buffer = &m_depth_buffers[i];

		buffer->image = createDepthImage();

		memset(&buffer->buf, 0, sizeof(buffer->buf));
		buffer->buf.index = i;
		buffer->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buffer->buf.memory = V4L2_MEMORY_USERPTR;
		buffer->buf.m.userptr = (long unsigned int)buffer->image->data.data();
		buffer->buf.length = buffer->image->data.size();

		if(ioctl(m_depth_fd, VIDIOC_QBUF, &buffer->buf) != 0)
		{
			perror("Could not queue buffer");
			return false;
		}
	}

	if(ioctl(m_depth_fd, VIDIOC_STREAMON, &reqbuf.type) != 0)
	{
		perror("Could not start streaming");
		return false;
	}

	return true;
}
void XtionGrabber::read_thread()
{
	fd_set fds;

	while(!m_shouldExit)
	{
		FD_ZERO(&fds);
		FD_SET(m_depth_fd, &fds);
		FD_SET(m_color_fd, &fds);

		int ret = select(std::max(m_depth_fd, m_color_fd)+1, &fds, 0, 0, 0);

		if(ret < 0)
		{
			perror("Could not select()");
			return;
		}

		if(FD_ISSET(m_color_fd, &fds))
		{
			v4l2_buffer buf;
			memset(&buf, 0, sizeof(buf));
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
			buf.memory = V4L2_MEMORY_USERPTR;

			if(ioctl(m_color_fd, VIDIOC_DQBUF, &buf) != 0)
			{
				if(errno == EAGAIN)
					continue;
				perror("Could not dequeue buffer");
				return;
			}

			ColorBuffer* buffer = &m_color_buffers[buf.index];

			sensor_msgs::ImagePtr img = m_color_pool->create();
			img->width = m_colorWidth;
			img->height = m_colorHeight;
			img->step = img->width * 4;
			img->data.resize(img->step * img->height);
			img->header.stamp = timeFromTimeval(buf.timestamp);
			img->header.frame_id = "/camera_optical";

			img->encoding = sensor_msgs::image_encodings::BGRA8;

#if HAVE_LIBYUV
			libyuv::ConvertToARGB(
				buffer->data.data(), buffer->data.size(),
				img->data.data(),
				m_colorWidth*4, 0, 0, m_colorWidth, m_colorHeight, m_colorWidth, m_colorHeight,
				libyuv::kRotate0, libyuv::FOURCC_UYVY
			);
#else
			uint32_t* dptr = (uint32_t*)img->data.data();

			for(size_t y = 0; y < img->height; ++y)
			{
					for(size_t x = 0; x < img->width-1; x += 2)
					{
							unsigned char* base = &buffer->data[y*m_colorWidth*2+x*2];
							float y1 = base[1];
							float u  = base[0];
							float y2 = base[3];
							float v  = base[2];

							uint32_t rgb1 = yuv(y1, u, v);
							uint32_t rgb2 = yuv(y2, u, v);

							dptr[y*img->width + x + 0] = rgb1;
							dptr[y*img->width + x + 1] = rgb2;
					}
			}
#endif

			m_lastColorImage = img;
			m_lastColorSeq = buf.sequence;

			m_color_info.header.stamp = img->header.stamp;
			m_pub_color.publish(img, boost::make_shared<sensor_msgs::CameraInfo>(m_color_info));

			if(ioctl(m_color_fd, VIDIOC_QBUF, &buffer->buf) != 0)
			{
				perror("Could not queue buffer");
				return;
			}
		}

		if(FD_ISSET(m_depth_fd, &fds))
		{
			v4l2_buffer buf;
			memset(&buf, 0, sizeof(buf));
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
			buf.memory = V4L2_MEMORY_USERPTR;

			if(ioctl(m_depth_fd, VIDIOC_DQBUF, &buf) != 0)
			{
				if(errno == EAGAIN)
					continue;
				perror("Could not dequeue buffer");
				return;
			}

			DepthBuffer* buffer = &m_depth_buffers[buf.index];

			buffer->image->header.stamp = timeFromTimeval(buf.timestamp);

			m_lastDepthImage = buffer->image;
			m_lastDepthSeq = buf.sequence;

			m_depth_info.header.stamp = buffer->image->header.stamp;
			m_pub_depth.publish(buffer->image, boost::make_shared<sensor_msgs::CameraInfo>(m_depth_info));

			buffer->image.reset();

			buffer->image = createDepthImage();
			buffer->buf.m.userptr = (long unsigned int)buffer->image->data.data();

			if(ioctl(m_depth_fd, VIDIOC_QBUF, &buffer->buf) != 0)
			{
				perror("Could not queue buffer");
				return;
			}
		}

		if(m_lastColorSeq == m_lastDepthSeq)
		{
			if(m_pub_cloud.getNumSubscribers() != 0)
				publishPointCloud(m_lastDepthImage, &m_cloudGenerator, &m_pub_cloud);

			if(m_pub_filledCloud.getNumSubscribers() != 0)
			{
				sensor_msgs::ImagePtr filledDepth = m_depthFiller.fillDepth(
					m_lastDepthImage, m_lastColorImage
				);
				publishPointCloud(filledDepth, &m_filledCloudGenerator, &m_pub_filledCloud);
			}
		}
	}

	fprintf(stderr, "read thread exit now\n");
}