BayesianClass::BayesianClass(string name,string path):className(name),fileCount(0)
{
	DIR *dir;
	struct dirent *ent;
	cout<<"Calculating "<<className<<endl;
	if ((dir = opendir (path.c_str())) != NULL)
	{
		/* print all the files and directories within directory */
		while ((ent = readdir (dir)) != NULL) 
		{
			struct _stat buf;
			string directory_name(ent->d_name);
			if((directory_name==".")||(directory_name==".."))
				continue;
			string class_path = path + "\\" + directory_name;
			int status = _stat(class_path.c_str(),&buf);
			if(status!=0)
				continue;
			if (!(buf.st_mode&S_IFREG))
				continue;
			calculateHistogram(class_path);
		}
		closedir (dir);
	}
	else
	{
		/* could not open directory */
		perror ("");
		exit(errno);
	}

}
void RobotIdentifier::loadShirtImages()
{
    std::string path;
    ros::param::get("/vision/tracker/team", team_color_);
    path = ros::package::getPath("unball");
    path += "/data/camisas/";

    // Loads the shirt images
    if (team_color_ == "Blue")
    {
        shirt_images_[0] = cv::imread(path + "red/blue.png", CV_LOAD_IMAGE_COLOR);
        shirt_images_[1] = cv::imread(path + "green/blue.png", CV_LOAD_IMAGE_COLOR);
        shirt_images_[2] = cv::imread(path + "purple/blue.png", CV_LOAD_IMAGE_COLOR);
    }
    else if (team_color_ == "Yellow")
    {
        shirt_images_[0] = cv::imread(path + "red/yellow.png", CV_LOAD_IMAGE_COLOR);
        shirt_images_[1] = cv::imread(path + "green/yellow.png", CV_LOAD_IMAGE_COLOR);
        shirt_images_[2] = cv::imread(path + "purple/yellow.png", CV_LOAD_IMAGE_COLOR);
    }
    else
    {
        ROS_ERROR("Unknown team identification");
    }

    // Calculates shirt histograms
    for (int i = 0; i < 3; ++i)
        shirt_histograms_[i] = calculateHistogram(shirt_images_[i]);
}
void ColorMatcher::addTrainingInstance(const ed::Entity& e, const std::string& property, const std::string& value)
{
    if (property != "type")
        return;

    ColorHistogram color_histogram;
    calculateHistogram(e, color_histogram);

    std::map<std::string, ColorModel>::iterator it = models_.find(value);
    if (it == models_.end())
    {
        // No other training instances for this type
        ColorModel& model = models_[value];
        model.min = color_histogram;
        model.max = color_histogram;
    }
    else
    {
        ColorModel& model = it->second;

        // Determine maximum for each histogram bin (i.e., determine maximum for each color)
        for(unsigned int i = 0; i < ColorNameTable::NUM_COLORS; ++i)
        {
            model.min[i] = std::min(model.min[i], color_histogram[i]);
            model.max[i] = std::max(model.max[i], color_histogram[i]);
        }
    }
}
// Display the count images in bgr and compute their HSV histograms.
// Then compare each histogram to the first one and report results.
//
static void showHistogramComparisons(int count,
                                     const char *name[],
                                     const cv::Mat bgr[])
{
    std::vector<cv::Mat> hsv(count);
    std::vector<cv::Mat> histogram(count);
    for (int i = 0; i < count; ++i) {
        makeWindow(name[i], bgr[i]);
        cv::cvtColor(bgr[i], hsv[i], cv::COLOR_BGR2HSV);
    }
    for (int i = 0; i < histogram.size(); ++i) {
        histogram[i] = calculateHistogram(hsv[i]);
    }
    compareHistograms(histogram, name);
    std::cout << "Press a key to quit." << std::endl;
    cv::waitKey(0);
}
void ColorMatcher::classify(const ed::Entity& e, const std::string& property,
                            const ed::perception::CategoricalDistribution& prior,
                            ed::perception::ClassificationOutput& output) const
{
    tue::Configuration& result = output.data;

    ColorHistogram color_histogram;
    calculateHistogram(e, color_histogram);

    if (property == "color")
    {
        for(unsigned int i = 0; i < ColorNameTable::NUM_COLORS; ++i)
            output.likelihood.setScore(color_table_.intToColorName(i), color_histogram[i]);
        output.likelihood.setUnknownScore(0);
        return;
    }

    for(std::map<std::string, ColorModel>::const_iterator it = models_.begin(); it != models_.end(); ++it)
    {
        const std::string& model_name = it->first;
        const ColorModel& model = it->second;

        output.likelihood.setScore(model_name, 1);
        for(unsigned int i = 0; i < ColorNameTable::NUM_COLORS; ++i)
        {
            float v = color_histogram[i];
            if (v < model.min[i] - color_margin_ || v > model.max[i] + color_margin_)
                output.likelihood.setScore(model_name, 0);
        }
    }

    // Represent data (for debugging)
    result.writeArray("colors");
    for(unsigned int i = 0; i < ColorNameTable::NUM_COLORS; ++i)
    {
        result.addArrayItem();
        result.setValue("color", ColorNameTable::intToColorName(i));
        result.setValue("value", color_histogram[i]);
        result.endArrayItem();
    }
    result.endArray();
}
Esempio n. 6
0
void drawFrame()
{
	// calculate locations
	g_DrawConfig.DepthLocation.uBottom = 0;
	g_DrawConfig.DepthLocation.uTop = WIN_SIZE_Y - 1;
	g_DrawConfig.DepthLocation.uLeft = 0;
	g_DrawConfig.DepthLocation.uRight = WIN_SIZE_X - 1;

	g_DrawConfig.ColorLocation.uBottom = 0;
	g_DrawConfig.ColorLocation.uTop = WIN_SIZE_Y - 1;
	g_DrawConfig.ColorLocation.uLeft = 0;
	g_DrawConfig.ColorLocation.uRight = WIN_SIZE_X - 1;

	if (g_DrawConfig.Streams.ScreenArrangement == SIDE_BY_SIDE)
	{
		g_DrawConfig.DepthLocation.uTop = WIN_SIZE_Y / 2 - 1;
		g_DrawConfig.DepthLocation.uRight = WIN_SIZE_X / 2 - 1;
		g_DrawConfig.ColorLocation.uTop = WIN_SIZE_Y / 2 - 1;
		g_DrawConfig.ColorLocation.uLeft = WIN_SIZE_X / 2;
	}

	// Texture map init
	openni::VideoFrameRef* pDepthMD = &getDepthFrame();
	if (isDepthOn() && pDepthMD->isValid())
	{
		int maxDepth = 0;
		maxDepth = getDepthStream().getMaxPixelValue();
		g_fMaxDepth = maxDepth;
		
		TextureMapInit(&g_texDepth, pDepthMD->getVideoMode().getResolutionX(), pDepthMD->getVideoMode().getResolutionY(), 4, pDepthMD->getWidth(), pDepthMD->getHeight());
		fixLocation(&g_DrawConfig.DepthLocation, pDepthMD->getVideoMode().getResolutionX(), pDepthMD->getVideoMode().getResolutionY());
	}

	openni::VideoFrameRef* pImageMD = NULL;

	if (isColorOn())
	{
		pImageMD = &getColorFrame();
	}
 	else if (isIROn())
 	{
 		pImageMD = &getIRFrame();
 	}

	if (pImageMD != NULL && pImageMD->isValid())
	{
		TextureMapInit(&g_texColor, pImageMD->getVideoMode().getResolutionX(), pImageMD->getVideoMode().getResolutionY(), 4, pImageMD->getWidth(), pImageMD->getHeight());
		fixLocation(&g_DrawConfig.ColorLocation, pImageMD->getVideoMode().getResolutionX(), pImageMD->getVideoMode().getResolutionY());
	}

	// check if pointer is over a map
	bool bOverDepth = (pDepthMD != NULL && pDepthMD->isValid()) && isPointInRect(g_DrawUserInput.Cursor, &g_DrawConfig.DepthLocation);
	bool bOverImage = (pImageMD != NULL && pImageMD->isValid()) && isPointInRect(g_DrawUserInput.Cursor, &g_DrawConfig.ColorLocation);
	bool bDrawDepthPointer = false;
	bool bDrawImagePointer = false;
	int imagePointerRed = 255;
	int imagePointerGreen = 0;
	int imagePointerBlue = 0;

	IntPair pointerInDepth = {0,0};
	IntPair pointerInColor = {0,0};

	if (bOverImage)
	{
		pointerInColor.X = (double)(g_DrawUserInput.Cursor.X - g_DrawConfig.ColorLocation.uLeft) / (g_DrawConfig.ColorLocation.uRight - g_DrawConfig.ColorLocation.uLeft + 1) * pImageMD->getVideoMode().getResolutionX();
		pointerInColor.Y = (double)(g_DrawUserInput.Cursor.Y - g_DrawConfig.ColorLocation.uBottom) / (g_DrawConfig.ColorLocation.uTop - g_DrawConfig.ColorLocation.uBottom + 1) * pImageMD->getVideoMode().getResolutionY();
		bDrawImagePointer = true;
	}

	if (bOverDepth)
	{
		pointerInDepth.X = (double)(g_DrawUserInput.Cursor.X - g_DrawConfig.DepthLocation.uLeft) / (g_DrawConfig.DepthLocation.uRight - g_DrawConfig.DepthLocation.uLeft + 1) * pDepthMD->getVideoMode().getResolutionX();
		pointerInDepth.Y = (double)(g_DrawUserInput.Cursor.Y - g_DrawConfig.DepthLocation.uBottom) / (g_DrawConfig.DepthLocation.uTop - g_DrawConfig.DepthLocation.uBottom + 1) * pDepthMD->getVideoMode().getResolutionY();
		bDrawDepthPointer = true;

		if (!bOverImage && g_DrawConfig.bShowPointer &&
			pointerInDepth.X >= pDepthMD->getCropOriginX() && pointerInDepth.X < (pDepthMD->getCropOriginX() + pDepthMD->getWidth()) &&
			pointerInDepth.Y >= pDepthMD->getCropOriginY() && pointerInDepth.Y < (pDepthMD->getCropOriginY() + pDepthMD->getHeight()))
		{

			// try to translate depth pixel to image
			openni::DepthPixel* pDepthPixels = (openni::DepthPixel*)pDepthMD->getData();
			openni::DepthPixel pointerDepth = pDepthPixels[(pointerInDepth.Y - pDepthMD->getCropOriginY()) * pDepthMD->getWidth() + (pointerInDepth.X - pDepthMD->getCropOriginX())];
			if (convertDepthPointToColor(pointerInDepth.X, pointerInDepth.Y, pointerDepth, &pointerInColor.X, &pointerInColor.Y))
			{
				bDrawImagePointer = true;
				imagePointerRed = 0;
				imagePointerGreen = 0;
				imagePointerBlue = 255;
			}
		}
	}

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

	// Setup the opengl env for fixed location view
	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0,WIN_SIZE_X,WIN_SIZE_Y,0,-1.0,1.0);
	glDisable(GL_DEPTH_TEST); 

	if (g_DrawConfig.Streams.Depth.Coloring == CYCLIC_RAINBOW_HISTOGRAM || g_DrawConfig.Streams.Depth.Coloring == LINEAR_HISTOGRAM || g_DrawConfig.bShowPointer)
		calculateHistogram();

	drawColor(&g_DrawConfig.ColorLocation, bDrawImagePointer ? &pointerInColor : NULL, imagePointerRed, imagePointerGreen, imagePointerBlue);

	drawDepth(&g_DrawConfig.DepthLocation, bDrawDepthPointer ? &pointerInDepth : NULL);

	printRecordingInfo();

	if (g_DrawConfig.bShowPointer)
		drawPointerMode(bOverDepth ? &pointerInDepth : NULL);

	drawUserInput(!bOverDepth && !bOverImage);

	drawUserMessage();
	drawPlaybackSpeed();

	if (g_DrawConfig.strErrorState[0] != '\0')
		drawErrorState();

	if (g_DrawConfig.bHelp)
		drawHelpScreen();

	glutSwapBuffers();
}
Esempio n. 7
0
void drawFrame()
{
	// calculate locations
	g_DrawConfig.DepthLocation.uBottom = 0;
	g_DrawConfig.DepthLocation.uTop = WIN_SIZE_Y - 1;
	g_DrawConfig.DepthLocation.uLeft = 0;
	g_DrawConfig.DepthLocation.uRight = WIN_SIZE_X - 1;

	g_DrawConfig.ImageLocation.uBottom = 0;
	g_DrawConfig.ImageLocation.uTop = WIN_SIZE_Y - 1;
	g_DrawConfig.ImageLocation.uLeft = 0;
	g_DrawConfig.ImageLocation.uRight = WIN_SIZE_X - 1;

	if (g_DrawConfig.Streams.ScreenArrangement == SIDE_BY_SIDE)
	{
		g_DrawConfig.DepthLocation.uTop = WIN_SIZE_Y / 2 - 1;
		g_DrawConfig.DepthLocation.uRight = WIN_SIZE_X / 2 - 1;
		g_DrawConfig.ImageLocation.uTop = WIN_SIZE_Y / 2 - 1;
		g_DrawConfig.ImageLocation.uLeft = WIN_SIZE_X / 2;
	}

	// Texture map init
	const DepthMetaData* pDepthMD = getDepthMetaData();
	if (isDepthOn())
	{
		g_nMaxDepth = getDepthGenerator()->GetDeviceMaxDepth();
		TextureMapInit(&g_texDepth, pDepthMD->FullXRes(), pDepthMD->FullYRes(), 4, pDepthMD->XRes(), pDepthMD->YRes());
		fixLocation(&g_DrawConfig.DepthLocation, pDepthMD->FullXRes(), pDepthMD->FullYRes());
	}

	const MapMetaData* pImageMD = NULL;

	if (isImageOn())
	{
		pImageMD = getImageMetaData();
	}
	else if (isIROn())
	{
		pImageMD = getIRMetaData();
	}

	if (pImageMD != NULL)
	{
		TextureMapInit(&g_texImage, pImageMD->FullXRes(), pImageMD->FullYRes(), 4, pImageMD->XRes(), pImageMD->YRes());
		fixLocation(&g_DrawConfig.ImageLocation, pImageMD->FullXRes(), pImageMD->FullYRes());
	}

	// check if pointer is over a map
	bool bOverDepth = (pDepthMD != NULL) && isPointInRect(g_DrawUserInput.Cursor, &g_DrawConfig.DepthLocation);
	bool bOverImage = (pImageMD != NULL) && isPointInRect(g_DrawUserInput.Cursor, &g_DrawConfig.ImageLocation);
	bool bDrawDepthPointer = false;
	bool bDrawImagePointer = false;
	int imagePointerRed = 255;
	int imagePointerGreen = 0;
	int imagePointerBlue = 0;

	IntPair pointerInDepth;
	IntPair pointerInImage;

	if (bOverImage)
	{
		pointerInImage.X = (double)(g_DrawUserInput.Cursor.X - g_DrawConfig.ImageLocation.uLeft) / (g_DrawConfig.ImageLocation.uRight - g_DrawConfig.ImageLocation.uLeft + 1) * pImageMD->FullXRes();
		pointerInImage.Y = (double)(g_DrawUserInput.Cursor.Y - g_DrawConfig.ImageLocation.uBottom) / (g_DrawConfig.ImageLocation.uTop - g_DrawConfig.ImageLocation.uBottom + 1) * pImageMD->FullYRes();
		bDrawImagePointer = true;
	}

	if (bOverDepth)
	{
		pointerInDepth.X = (double)(g_DrawUserInput.Cursor.X - g_DrawConfig.DepthLocation.uLeft) / (g_DrawConfig.DepthLocation.uRight - g_DrawConfig.DepthLocation.uLeft + 1) * pDepthMD->FullXRes();
		pointerInDepth.Y = (double)(g_DrawUserInput.Cursor.Y - g_DrawConfig.DepthLocation.uBottom) / (g_DrawConfig.DepthLocation.uTop - g_DrawConfig.DepthLocation.uBottom + 1) * pDepthMD->FullYRes();

		// make sure we're in cropped area
		if (pointerInDepth.X >= pDepthMD->XOffset() && pointerInDepth.X < (pDepthMD->XOffset() + pDepthMD->XRes()) &&
			pointerInDepth.Y >= pDepthMD->YOffset() && pointerInDepth.Y < (pDepthMD->YOffset() + pDepthMD->YRes()))
		{
			bDrawDepthPointer = true;
			if (!bOverImage && g_DrawConfig.bShowPointer)
			{
				// try to translate depth pixel to image
				if (getImageCoordinatesForDepthPixel(pointerInDepth.X, pointerInDepth.Y, pointerInImage.X, pointerInImage.Y))
				{
					bDrawImagePointer = true;
					imagePointerRed = 0;
					imagePointerGreen = 0;
					imagePointerBlue = 255;
				}
			}
		}
	}

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

	// Setup the opengl env for fixed location view
	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0,WIN_SIZE_X,WIN_SIZE_Y,0,-1.0,1.0);
	glDisable(GL_DEPTH_TEST); 

	if (g_DrawConfig.Streams.Depth.Coloring == CYCLIC_RAINBOW_HISTOGRAM || g_DrawConfig.Streams.Depth.Coloring == LINEAR_HISTOGRAM || g_DrawConfig.bShowPointer)
		calculateHistogram();

	drawColorImage(&g_DrawConfig.ImageLocation, bDrawImagePointer ? &pointerInImage : NULL, imagePointerRed, imagePointerGreen, imagePointerBlue);

	drawDepth(&g_DrawConfig.DepthLocation, bDrawDepthPointer ? &pointerInDepth : NULL);

	printStatisticsInfo();
	printRecordingInfo();

	if (g_DrawConfig.bShowPointer)
		drawPointerMode(bDrawDepthPointer ? &pointerInDepth : NULL);

	drawUserInput(!bOverDepth && !bOverImage);

	drawUserMessage();
	drawPlaybackSpeed();

	if (g_DrawConfig.strErrorState != NULL)
		drawErrorState();

	if (g_DrawConfig.bHelp)
		drawHelpScreen();

	glutSwapBuffers();
}
Esempio n. 8
0
void SampleViewer::display()
{
	if (!m_pClosestPointListener->isAvailable())
	{
		return;
	}

	openni::VideoFrameRef depthFrame = m_pClosestPointListener->getFrame();
	const closest_point::IntPoint3D& closest = m_pClosestPointListener->getClosestPoint();
	m_pClosestPointListener->setUnavailable();

	if (m_pTexMap == NULL)
	{
		// Texture map init
		m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getWidth(), TEXTURE_SIZE);
		m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getHeight(), TEXTURE_SIZE);
		m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
	}



	glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

	if (depthFrame.isValid())
	{
		calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame);
	}

	memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

	float factor[3] = {1, 1, 1};
	// check if we need to draw depth frame to texture
	if (depthFrame.isValid())
	{
		const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX;
		int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);
		int width = depthFrame.getWidth();
		int height = depthFrame.getHeight();

		for (int y = 0; y < height; ++y)
		{
			const openni::DepthPixel* pDepth = pDepthRow;
			openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX();

			for (int x = 0; x < width; ++x, ++pDepth, ++pTex)
			{
				if (*pDepth != 0)
				{
					if (*pDepth == closest.Z)
					{
						factor[0] = factor[1] = 0;
					}
//					// Add debug lines - every 10cm
// 					else if ((*pDepth / 10) % 10 == 0)
// 					{
// 						factor[0] = factor[2] = 0;
// 					}

					int nHistValue = m_pDepthHist[*pDepth];
					pTex->r = nHistValue*factor[0];
					pTex->g = nHistValue*factor[1];
					pTex->b = nHistValue*factor[2];

					factor[0] = factor[1] = factor[2] = 1;
				}
			}

			pDepthRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}

	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glEnable(GL_TEXTURE_2D);
	glBegin(GL_QUADS);

	int nXRes = depthFrame.getWidth();
	int nYRes = depthFrame.getHeight();

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)nXRes/(float)m_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)nXRes/(float)m_nTexMapX, (float)nYRes/(float)m_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)nYRes/(float)m_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();
	glDisable(GL_TEXTURE_2D);

	float closestCoordinates[3] = {closest.X*GL_WIN_SIZE_X/float(depthFrame.getWidth()), closest.Y*GL_WIN_SIZE_Y/float(depthFrame.getHeight()), 0};

	glVertexPointer(3, GL_FLOAT, 0, closestCoordinates);
	glColor3f(1.f, 0.f, 0.f);
	glPointSize(10);
	glDrawArrays(GL_POINTS, 0, 1);
	glFlush();

	// Swap the OpenGL display buffers
	glutSwapBuffers();

}
Esempio n. 9
0
bool CHistogram2D::compute() throw()
{
	typedef boost::shared_ptr<ImageType> ImagePtr;
	typedef typename ImageType::TDataType ValueType;
  ImagePtr imagePtr = static_pointer_cast<ImageType>( getInput() );
  ImagePtr classPtr = static_pointer_cast<ImageType>( getInput(1) );
  ImagePtr gradientPtr = static_pointer_cast<ImageType>( getInput(2) );
  if ( !checkInput<ImageType>( imagePtr, 2, 3 )
  	|| !checkInput<ImageType>( classPtr, 2, 3 )
  	|| !checkInput<ImageType>( gradientPtr, 2, 3 ) )
  {
    return false;
  }
	bModuleReady = true;
  deleteOldOutput();

  double dClassOneMean = 0.0;
  double dClassOneVariance = 0.0;
  vector<ValueType> classOneValuesVec;
  double dClassTwoMean = 0.0;
  double dClassTwoVariance = 0.0;
  vector<ValueType> classTwoValuesVec;

  uint uiClassOne = parameters.getUnsignedLong( "Class1" );
  uint uiClassTwo = parameters.getUnsignedLong( "Class2" );
  cerr << uiClassOne << " " << uiClassTwo << endl;
	// Calculate intensity mean and variance of the selected classes
	typename ImageType::iterator classIt = classPtr->begin();
	typename ImageType::iterator imageIt = imagePtr->begin();
	while( classIt != classPtr->end() )
	{
		if ( *classIt == static_cast<ValueType>( uiClassOne ) )
			classOneValuesVec.push_back( *imageIt );
		else if ( *classIt == static_cast<ValueType>( uiClassTwo ) )
			classTwoValuesVec.push_back( *imageIt );
		++classIt;
		++imageIt;
	}

  for_each( classOneValuesVec.begin(), classOneValuesVec.end(), dClassOneMean += _1 );
  dClassOneMean /= static_cast<double>( classOneValuesVec.size() );
  for_each( classOneValuesVec.begin(), classOneValuesVec.end(),
  	dClassOneVariance += ( _1 - dClassOneMean ) * ( _1 - dClassOneMean ) );
  dClassOneVariance /= static_cast<double>( classOneValuesVec.size() );
  dClassOneVariance = sqrt( dClassOneVariance );

	for_each( classTwoValuesVec.begin(), classTwoValuesVec.end(), dClassTwoMean += _1 );
  dClassTwoMean /= static_cast<double>( classTwoValuesVec.size() );
  for_each( classTwoValuesVec.begin(), classTwoValuesVec.end(),
  	dClassTwoVariance += ( _1 - dClassTwoMean ) * ( _1 - dClassTwoMean ) );
  dClassTwoVariance /= static_cast<double>( classTwoValuesVec.size() );
  dClassTwoVariance = sqrt( dClassTwoVariance );

cerr << " C1 (" << uiClassOne << ") : Mean " << dClassOneMean << " SD " << dClassOneVariance << endl
<< " C2 (" << uiClassTwo << ") : Mean " << dClassTwoMean << " SD " << dClassTwoVariance << endl;

	int rangeMin, rangeMax;
	rangeMin = static_cast<int>(
		std::min( dClassOneMean - dClassOneVariance, dClassTwoMean - dClassTwoVariance ) );
	rangeMax = static_cast<int>(
		std::max( dClassOneMean - dClassOneVariance, dClassTwoMean - dClassTwoVariance ) );

	ImagePtr tmp( new ImageType( *gradientPtr ) );
	classIt = imagePtr->begin();
	imageIt = tmp->begin();
	while( classIt != imagePtr->end() )
	{
		if ( *classIt < rangeMin || *classIt > rangeMax )
			*imageIt = 0;
		++classIt;
		++imageIt;
	}
	calculateHistogram( tmp );
	// Do a gaussian smoothing on the histogram
	// Compute gaussian mask
	double dSigma = parameters.getDouble( "HistoSmooth" );
cerr << "HistoSmooth " << dSigma << endl;
	int iMaskSize = static_cast<uint>( floor( 6.0 * dSigma + 0.5 ) );
	vector<double> dGaussMask;
	for( int i = -( iMaskSize / 2 ); i <= ( iMaskSize / 2 ); ++i )
	{
		double dExponential = ( 1.0 / sqrt( dSigma * dSigma * 2.0 * M_PI ) )
			* exp( static_cast<double>( -i * i ) / ( 2.0 * ( dSigma * dSigma ) ) );
		dGaussMask.push_back( dExponential );
	}

cerr << "Mask size " << iMaskSize << endl;

	// Shift histogram to attend smoothing
	vector<double> shiftedHisto;
	for( int i = 0; i < iMaskSize/2; ++i )
		shiftedHisto.push_back(0.0);
	for( uint i = 0; i < theHistogramVec[0].size(); ++i )
	{
		shiftedHisto.push_back( theHistogramVec[0][i] );
	}
	for( int i = 0; i <= iMaskSize/2; ++i )
		shiftedHisto.push_back(0.0);
   const uint uiShift = iMaskSize/2;
	vector<double> smoothedHisto( shiftedHisto.size() + uiShift);

	for( uint i = uiShift+1; i < shiftedHisto.size() - uiShift; ++i )
	{
		double smoothed = 0.0;
		for( int j = -(uiShift+1); j <= uiShift; ++j )
		{
			smoothed += shiftedHisto[i+j]*dGaussMask[j+uiShift+1];
		}
		smoothedHisto[i] = smoothed;
	}
   smoothedHisto = shiftedHisto;
	// Smooth
	// Reshift histogram
	// Determine the global maximum g1
	uint g1 = 0;
	double gv1 = 0.0;
	for( uint i = uiShift+1; i < smoothedHisto.size(); ++i )
	{
		if ( smoothedHisto[i] >= smoothedHisto[i-1] && smoothedHisto[i] >= gv1 )
		{
			gv1 = smoothedHisto[i]; g1 = i;
		}
		else if ( smoothedHisto[i] < smoothedHisto[i-1] )
			break;
	}
	// Determine next global maximum g2
/*	if ( g1 == 0 ) g1 = parameters.getUnsignedLong("Shift");*/
	uint t = g1 + parameters.getUnsignedLong("Shift");
	while( smoothedHisto[t] <= smoothedHisto[t-1] )
		++t;
	uint g2 = t+1;
	double gv2 = 0;
	for( uint i = t+1; i < smoothedHisto.size(); ++i )
	{
		if ( smoothedHisto[i] > smoothedHisto[i-1] && smoothedHisto[i] > gv2 )
		{
			gv2 = smoothedHisto[i]; g2 = i;
		}
		else if ( smoothedHisto[i] < smoothedHisto[i-1] )
			break;
	}
	if ( g1 == 0 )
	{
		g1 = g2;
		t = g1+parameters.getUnsignedLong("Shift");
		while( smoothedHisto[t] <= smoothedHisto[t-1] )
			++t;
		g2 = t+1;
		gv2 = 0;
		for( uint i = t+1; i < smoothedHisto.size(); ++i )
		{
			if ( smoothedHisto[i] > smoothedHisto[i-1] && smoothedHisto[i] > gv2 )
			{
				gv2 = smoothedHisto[i]; g2 = i;
			}
			else if ( smoothedHisto[i] < smoothedHisto[i-1] )
				break;
		}
	}
	if ( g2-g1 > (2 * parameters.getUnsignedLong("Shift")) )
		g2 = g1 + 2 * parameters.getUnsignedLong("Shift");
   g1--;
   g2--;
 	cerr << "Threshold values are " << g1 << " and " << g2;
	parameters.setUnsignedLong("T1", static_cast<ulong>( g1 * 0.33)-1 );
	parameters.setUnsignedLong("T2",
		static_cast<ulong>( std::max( g1 * 0.66, g1 + parameters.getDouble("Shift") + 1) ) - 1 );
ofstream file( "/home/hendrik/orig_histogram.data" );
	for( uint i = 0; i <= theHistogramVec[0].size(); ++i )
	{
		file << i << " " << theHistogramVec[0][i] << endl;
	}
	file.close();
ofstream file2( "/home/hendrik/shifted_histogram.data" );
	for( uint i = 0; i <= shiftedHisto.size(); ++i )
	{
		file2 << i << " " << shiftedHisto[i] << endl;
	}
	file2.close();
ofstream file3( "/home/hendrik/smoothed_histogram.data" );
	for( uint i = 0; i <= smoothedHisto.size(); ++i )
	{
		file3 << i << " " << smoothedHisto[i] << endl;
	}
	file3.close();
	return true;
}
Esempio n. 10
0
void SampleViewer::display()
{
	int changedIndex;
	openni::Status rc = openni::OpenNI::waitForAnyStream(m_streams, 2, &changedIndex);

	if (rc != openni::STATUS_OK)
	{
		printf("Wait failed\n");
		return;
	}

	switch (changedIndex)
	{
	case 0:
		m_depth1.readFrame(&m_depth1Frame); break;
	case 1:
		m_depth2.readFrame(&m_depth2Frame); break;
	default:
		printf("Error in wait\n");
	}

	glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

	if (m_depth1Frame.isValid() && m_eViewState != DISPLAY_MODE_DEPTH2)
		calculateHistogram(m_pDepthHist, MAX_DEPTH, m_depth1Frame);
	else
		calculateHistogram(m_pDepthHist, MAX_DEPTH, m_depth2Frame);

	memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

	// check if we need to draw image frame to texture

	switch (m_eViewState)
	{
	case DISPLAY_MODE_DEPTH1:
		displayFrame(m_depth1Frame);
		break;
	case DISPLAY_MODE_DEPTH2:
		displayFrame(m_depth2Frame);
		break;
	default:
		displayBothFrames();
	}

	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glBegin(GL_QUADS);

	int nXRes = m_width;
	int nYRes = m_height;

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)nXRes/(float)m_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)nXRes/(float)m_nTexMapX, (float)nYRes/(float)m_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)nYRes/(float)m_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();

	// Swap the OpenGL display buffers
	glutSwapBuffers();

}
void SampleViewer::Display()
{
	// namespace bg = boost::geometry;	
	nite::UserTrackerFrameRef userTrackerFrame;
	openni::VideoFrameRef depthFrame;
	nite::Status rc = m_pUserTracker->readFrame(&userTrackerFrame);
	if (rc != nite::STATUS_OK)
	{
		printf("GetNextData failed\n");
		return;
	}

	depthFrame = userTrackerFrame.getDepthFrame();

	if (m_pTexMap == NULL)
	{
		// Texture map init
		m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE);
		m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE);
		m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
	}

	const nite::UserMap& userLabels = userTrackerFrame.getUserMap();

	glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

	if (depthFrame.isValid() && g_drawDepth)
	{
		calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame);
	}

	memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

	float factor[3] = {1, 1, 1};
	// check if we need to draw depth frame to texture
	if (depthFrame.isValid() && g_drawDepth)
	{
		const nite::UserId* pLabels = userLabels.getPixels();

		const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX;
		int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);

		for (int y = 0; y < depthFrame.getHeight(); ++y)
		{
			const openni::DepthPixel* pDepth = pDepthRow;
			openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX();

			for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex, ++pLabels)
			{
				if (*pDepth != 0)
				{
					if (*pLabels == 0)
					{
						if (!g_drawBackground)
						{
							factor[0] = factor[1] = factor[2] = 0;

						}
						else
						{
							factor[0] = Colors[colorCount][0];
							factor[1] = Colors[colorCount][1];
							factor[2] = Colors[colorCount][2];
						}
					}
					else
					{
						factor[0] = Colors[*pLabels % colorCount][0];
						factor[1] = Colors[*pLabels % colorCount][1];
						factor[2] = Colors[*pLabels % colorCount][2];
					}
//					// Add debug lines - every 10cm
// 					else if ((*pDepth / 10) % 10 == 0)
// 					{
// 						factor[0] = factor[2] = 0;
// 					}

					int nHistValue = m_pDepthHist[*pDepth];
					pTex->r = nHistValue*factor[0];
					pTex->g = nHistValue*factor[1];
					pTex->b = nHistValue*factor[2];

					factor[0] = factor[1] = factor[2] = 1;
				}
			}

			pDepthRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}

	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glEnable(GL_TEXTURE_2D);
	glBegin(GL_QUADS);

	g_nXRes = depthFrame.getVideoMode().getResolutionX();
	g_nYRes = depthFrame.getVideoMode().getResolutionY();

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();
	glDisable(GL_TEXTURE_2D);

	const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
	// 11.16.15 remove for loop for tracking only one user
	// for (int i = 0; i < users.getSize(); ++i)
	// {
		//const nite::UserData& user = users[i];
	if (users.getSize() > 0) {
		const nite::UserData& user = users[0];

		updateUserState(user, userTrackerFrame.getTimestamp());
		if (user.isNew())
		{
			m_pUserTracker->startSkeletonTracking(user.getId());
			m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS);
		}
		else if (!user.isLost())
		{
			if (g_drawStatusLabel)
			{
				DrawStatusLabel(m_pUserTracker, user);
			}
			if (g_drawCenterOfMass)
			{
				DrawCenterOfMass(m_pUserTracker, user);
			}
			if (g_drawBoundingBox)
			{
				DrawBoundingBox(user);
			}

			if (user.getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton)
			{
				DrawSkeleton(m_pUserTracker, user);
			}
		}

		if (m_poseUser == 0 || m_poseUser == user.getId())
		{
			const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS);

			if (pose.isEntered())
			{
				// Start timer
				sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s");
				printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000);
				m_poseUser = user.getId();
				m_poseTime = userTrackerFrame.getTimestamp();
			}
			else if (pose.isExited())
			{
				memset(g_generalMessage, 0, sizeof(g_generalMessage));
				printf("Count-down interrupted\n");
				m_poseTime = 0;
				m_poseUser = 0;
			}
			else if (pose.isHeld())
			{
				// tick
				if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000)
				{
					printf("Count down complete. Exit...\n");
					Finalize();
					exit(2);
				}
			}
		} //user

		char buffer[80] = "";
		// ---------------- calculate right shoulder 2 DOF ----------------------
		int rightShoulderX, rightShoulderY, rightShoulderZ;

		rightShoulderX = user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().x - 
				user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER).getPosition().x;
		rightShoulderY = user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().y - 
				user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER).getPosition().y;
		rightShoulderZ = user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().z - 
				user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER).getPosition().z;

		Spherical rightShoulderSpherical;
		rightShoulderSpherical = Cartesian2Spherical(rightShoulderX, rightShoulderY, rightShoulderZ);
		int rightShoulderPhi, rightShoulderTheta;
		rightShoulderTheta = radian2Degree(rightShoulderSpherical.radianTheta, rightShoulderThetaInit);	// horizontal rise it's -60 degree
		rightShoulderPhi = radian2Degree(rightShoulderSpherical.radianPhi, rightShoulderPitchInit);			// when hand's down, it's 70 degree
		int rightShoulderYawNow = rightShoulderPhi * sin(rightShoulderTheta * PI / 180);
		int rightShoulderPitchNow = rightShoulderPhi * cos(rightShoulderTheta * PI / 180);

		sprintf(buffer,"(rightShoulderTheta=%d, rightShoulderYaw=%d, rightShoulderPitch=%d)", rightShoulderTheta, rightShoulderYawNow, rightShoulderPitchNow);
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(20, 20);
		glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);

		// ------------------- calculate right elbow 2 DOF -----------------------
		int rightElbowX, rightElbowY, rightElbowZ;

		rightElbowX = user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND).getPosition().x - 
			 user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().x;
		rightElbowY = user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND).getPosition().y - 
			 user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().y;
		rightElbowZ = user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND).getPosition().z - 
			 user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().z;

		Spherical rightElbowSpherical;
		rightElbowSpherical = Cartesian2Spherical(rightElbowX, rightElbowY, rightElbowZ);
		int rightElbowThetaNow = - radian2Degree(rightElbowSpherical.radianTheta, rightElbowThetaInit);	// reverse for system
		int rightElbowYawNow = radian2Degree(rightElbowSpherical.radianPhi, rightElbowYawInit);

		sprintf(buffer,"(rightElbowThetaNow=%d, rightElbowYawNow=%d)", rightElbowThetaNow, rightElbowYawNow);
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(20, 60);
		glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);

		// ---------------- calculate left shoulder 2 DOF ----------------------
		int leftShoulderX, leftShoulderY, leftShoulderZ;

		leftShoulderX = user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().x - 
				user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER).getPosition().x;
		leftShoulderY = user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().y - 
				user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER).getPosition().y;
		leftShoulderZ = user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().z - 
				user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER).getPosition().z;

		Spherical leftShoulderSpherical;
		leftShoulderSpherical = Cartesian2Spherical(leftShoulderX, leftShoulderY, leftShoulderZ);
		int leftShoulderPhi, leftShoulderTheta;
		leftShoulderTheta = radian2Degree(leftShoulderSpherical.radianTheta, leftShoulderThetaInit);	// horizontal rise it's -60 degree
		leftShoulderPhi = radian2Degree(leftShoulderSpherical.radianPhi, leftShoulderPitchInit);			// when hand's down, it's 70 degree
		// need to reverse in left side
		int leftShoulderYawNow = - leftShoulderPhi * sin(leftShoulderTheta * PI / 180);
		int leftShoulderPitchNow = - leftShoulderPhi * cos(leftShoulderTheta * PI / 180);

		sprintf(buffer,"(leftShoulderTheta=%d, leftShoulderYaw=%d, leftShoulderPitch=%d)", leftShoulderTheta, leftShoulderYawNow, leftShoulderPitchNow);
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(20, 100);
		glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);

		// ------------------- calculate left elbow 2 DOF -----------------------
		int leftElbowX, leftElbowY, leftElbowZ;

		leftElbowX = user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND).getPosition().x - 
			 user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().x;
		leftElbowY = user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND).getPosition().y - 
			 user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().y;
		leftElbowZ = user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND).getPosition().z - 
			 user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().z;

		Spherical leftElbowSpherical;
		leftElbowSpherical = Cartesian2Spherical(leftElbowX, leftElbowY, leftElbowZ);
		int leftElbowThetaNow = radian2Degree(leftElbowSpherical.radianTheta, leftElbowThetaInit);
		int leftElbowYawNow = radian2Degree(leftElbowSpherical.radianPhi, leftElbowYawInit);

		sprintf(buffer,"(leftElbowTheta=%d, leftElbowYawNow=%d)", leftElbowThetaNow, leftElbowYawNow);
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(20, 140);
		glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);

		// ----------------  constraint movement and publish message  ------------
		int rightShoulderYawDiff = abs(rightShoulderYaw - rightShoulderYawNow);
		int rightShoulderPitchDiff = abs(rightShoulderPitch - rightShoulderPitchNow);
		int rightElbowThetaDiff = abs(rightElbowTheta - rightElbowThetaNow);
		int rightElbowYawDiff = abs(rightElbowYaw - rightElbowYawNow);
		int leftShoulderYawDiff = abs(leftShoulderYaw - leftShoulderYawNow);
		int leftShoulderPitchDiff = abs(leftShoulderPitch - leftShoulderPitchNow);
		int leftElbowThetaDiff = abs(leftElbowTheta - leftElbowThetaNow);
		int leftElbowYawDiff = abs(leftElbowYaw - leftElbowYawNow);
		if ((rightShoulderYawDiff < moveLimitDegree) && (rightShoulderPitchDiff < moveLimitDegree) && (rightElbowThetaDiff < moveLimitDegree) && (rightElbowYawDiff < moveLimitDegree) && (rightShoulderTheta >= 0)) {
			// in range then refresh robot angle
			rightShoulderYaw = rightShoulderYawNow;
			rightShoulderPitch = rightShoulderPitchNow;
			rightElbowTheta = rightElbowThetaNow;
			rightElbowYaw = rightElbowYawNow;
			// change the angle to 0 to 360 and publish
			rightShoulderYawPub = angleHandler(rightShoulderYaw);
			rightShoulderPitchPub = angleHandler(rightShoulderPitch);
			rightElbowThetaPub = angleHandler(rightElbowTheta);
			rightElbowYawPub = angleHandler(rightElbowYaw);
			sprintf(buffer,"tracking!");
			glColor3f(0.0f, 0.0f, 1.0f);
			glRasterPos2i(20, 180);
			glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);
			actionPublish(rightShoulderYawPub, rightShoulderPitchPub, rightElbowThetaPub, rightElbowYawPub, leftShoulderYawPub, leftShoulderPitchPub, leftElbowThetaPub, leftElbowYawPub);
		}
		else {
			sprintf(buffer,"soulder yaw: %d, soulder pitch: %d, elbow yaw: %d, rightShoulderTheta > 0: %d", rightShoulderYaw - rightShoulderYawNow, rightShoulderPitch - rightShoulderPitchNow, rightElbowYaw - rightElbowYawNow, rightShoulderTheta);
			glColor3f(1.0f, 0.0f, 0.5f);
			glRasterPos2i(20, 180);
			glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);
		}
		if((leftShoulderYawDiff < moveLimitDegree) && (leftShoulderPitchDiff < moveLimitDegree) && (leftElbowThetaDiff < moveLimitDegree) && (leftElbowYawDiff < moveLimitDegree) && (leftShoulderTheta >= 0)) {
			leftShoulderYaw = leftShoulderYawNow;
			leftShoulderPitch = leftShoulderPitchNow;
			leftElbowTheta = leftElbowThetaNow;
			leftElbowYaw = leftElbowYawNow;
			leftShoulderYawPub = angleHandler(leftShoulderYaw);
			leftShoulderPitchPub = angleHandler(leftShoulderPitch);
			leftElbowThetaPub = angleHandler(leftElbowTheta);
			leftElbowYawPub = angleHandler(leftElbowYaw);
			sprintf(buffer,"tracking!");
			glColor3f(0.0f, 0.3f, 1.0f);
			glRasterPos2i(20, 220);
			glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);
			actionPublish(rightShoulderYawPub, rightShoulderPitchPub, rightElbowThetaPub, rightElbowYawPub, leftShoulderYawPub, leftShoulderPitchPub, leftElbowThetaPub, leftElbowYawPub);
		}
		else {
			sprintf(buffer,"soulder yaw: %d, soulder pitch: %d, elbow yaw: %d, leftShoulderTheta > 0: %d", leftShoulderYaw - leftShoulderYawNow, leftShoulderPitch - leftShoulderPitchNow, leftElbowYaw - leftElbowYawNow, leftShoulderTheta);
			glColor3f(0.7f, 0.8f, 2.0f);
			glRasterPos2i(20, 220);
			glPrintString(GLUT_BITMAP_HELVETICA_18, buffer);
		}


	}

	if (g_drawFrameId)
	{
		DrawFrameId(userTrackerFrame.getFrameIndex());
	}

	if (g_generalMessage[0] != '\0')
	{
		char *msg = g_generalMessage;
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(100, 20);
		glPrintString(GLUT_BITMAP_HELVETICA_18, msg);
	}



	// Swap the OpenGL display buffers
	glutSwapBuffers();

}
Esempio n. 12
0
void SampleViewer::display()
{
	int changedIndex;
	openni::Status rc = openni::OpenNI::waitForAnyStream(m_streams, 2, &changedIndex);
	if (rc != openni::STATUS_OK)
	{
		printf("Wait failed\n");
		return;
	}

	switch (changedIndex)
	{
	case 0:
		m_depthStream.readFrame(&m_depthFrame); break;
	case 1:
		m_colorStream.readFrame(&m_colorFrame); break;
	default:
		printf("Error in wait\n");
	}

	glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

	if (m_depthFrame.isValid())
	{
		calculateHistogram(m_pDepthHist, MAX_DEPTH, m_depthFrame);
	}

	memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

	// check if we need to draw image frame to texture
	if ((m_eViewState == DISPLAY_MODE_OVERLAY ||
		m_eViewState == DISPLAY_MODE_IMAGE) && m_colorFrame.isValid())
	{
		const openni::RGB888Pixel* pImageRow = (const openni::RGB888Pixel*)m_colorFrame.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + m_colorFrame.getCropOriginY() * m_nTexMapX;
		int rowSize = m_colorFrame.getStrideInBytes() / sizeof(openni::RGB888Pixel);

		for (int y = 0; y < m_colorFrame.getHeight(); ++y)
		{
			const openni::RGB888Pixel* pImage = pImageRow;
			openni::RGB888Pixel* pTex = pTexRow + m_colorFrame.getCropOriginX();

			for (int x = 0; x < m_colorFrame.getWidth(); ++x, ++pImage, ++pTex)
			{
				*pTex = *pImage;
			}

			pImageRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}

	// check if we need to draw depth frame to texture
	if ((m_eViewState == DISPLAY_MODE_OVERLAY ||
		m_eViewState == DISPLAY_MODE_DEPTH) && m_depthFrame.isValid())
	{
		const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)m_depthFrame.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + m_depthFrame.getCropOriginY() * m_nTexMapX;
		int rowSize = m_depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);

		for (int y = 0; y < m_depthFrame.getHeight(); ++y)
		{
			const openni::DepthPixel* pDepth = pDepthRow;
			openni::RGB888Pixel* pTex = pTexRow + m_depthFrame.getCropOriginX();

			for (int x = 0; x < m_depthFrame.getWidth(); ++x, ++pDepth, ++pTex)
			{
				if (*pDepth != 0)
				{
					int nHistValue = m_pDepthHist[*pDepth];
					pTex->r = nHistValue;
					pTex->g = nHistValue;
					pTex->b = 0;
				}
			}

			pDepthRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}

	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glBegin(GL_QUADS);

	int nXRes = m_width;
	int nYRes = m_height;

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)nXRes/(float)m_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)nXRes/(float)m_nTexMapX, (float)nYRes/(float)m_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)nYRes/(float)m_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();

	// Swap the OpenGL display buffers
	glutSwapBuffers();

	KVector4 vec;
	m_device.getProperty( KINECT_DEVICE_PROPERTY_ACCELEROMETER, &vec );
	printf( "%f, %f, %f\n", vec.x, vec.y, vec.z );

}
Esempio n. 13
0
void SampleViewer::Display()
{
    if (g_pause)
        return;
    
	nite::UserTrackerFrameRef userTrackerFrame;
    nite::Status rc1 = m_pUserTracker->readFrame(&userTrackerFrame);
	if (rc1 != nite::STATUS_OK) {
		printf("GetNextData failed\n");
		return;
	}
    
	openni::VideoFrameRef depthFrameSide = userTrackerFrame.getDepthFrame();
    int height = depthFrameSide.getHeight();
    int width = depthFrameSide.getWidth();

    if (!label) {
    	label = (int *)malloc(width*height*sizeof(int));
    }
    
    openni::VideoFrameRef depthFrameTop;
    openni::Status rc2 = depthStreamTop.readFrame(&depthFrameTop);
	if (rc2 != openni::STATUS_OK) {
		printf("GetNextData failed\n");
		return;
	}

	if (m_pTexMap == NULL)
	{
		// Texture map init
		m_nTexMapX = MIN_CHUNKS_SIZE(depthFrameSide.getVideoMode().getResolutionX(), TEXTURE_SIZE);
		m_nTexMapY = MIN_CHUNKS_SIZE(depthFrameSide.getVideoMode().getResolutionY(), TEXTURE_SIZE);
		m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
	}

	const nite::UserMap& userLabels = userTrackerFrame.getUserMap();

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

	if (depthFrameSide.isValid() && g_drawDepth)
	{
		calculateHistogram(m_pDepthHistSide, MAX_DEPTH, depthFrameSide);
	}

	memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

	float factor[3] = {1, 1, 1};
	// check if we need to draw depth frame to texture
	if (depthFrameSide.isValid() && g_drawDepth)
	{
		const nite::UserId* pLabels = userLabels.getPixels();

		const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrameSide.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrameSide.getCropOriginY() * m_nTexMapX;
		int rowSize = depthFrameSide.getStrideInBytes() / sizeof(openni::DepthPixel);
        
		for (int y = 0; y < height; ++y)
		{
			const openni::DepthPixel* pDepth = pDepthRow;
			openni::RGB888Pixel* pTex = pTexRow + depthFrameSide.getCropOriginX();

			for (int x = 0; x < width; ++x, ++pDepth, ++pTex, ++pLabels)
			{
				if (*pDepth != 0)
				{
					if (*pLabels == 0)
					{
						if (!g_drawBackground)
						{
							factor[0] = factor[1] = factor[2] = 0;

						}
						else
						{
							factor[0] = Colors[colorCount][0];
							factor[1] = Colors[colorCount][1];
							factor[2] = Colors[colorCount][2];
						}
					}
					else
					{
						factor[0] = Colors[*pLabels % colorCount][0];
						factor[1] = Colors[*pLabels % colorCount][1];
						factor[2] = Colors[*pLabels % colorCount][2];
					}
//					// Add debug lines - every 10cm
// 					else if ((*pDepth / 10) % 10 == 0)
// 					{
// 						factor[0] = factor[2] = 0;
// 					}

					int nHistValue = m_pDepthHistSide[*pDepth];
					pTex->r = nHistValue*factor[0];
					pTex->g = nHistValue*factor[1];
					pTex->b = nHistValue*factor[2];

					factor[0] = factor[1] = factor[2] = 1;
				}
			}

			pDepthRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}
    
    const openni::DepthPixel *imgBufferSide = (const openni::DepthPixel *)depthFrameSide.getData();
    const openni::DepthPixel *imgBufferTop = (const openni::DepthPixel *)depthFrameTop.getData();
    calculateHistogram(m_pDepthHistTop, MAX_DEPTH, depthFrameTop);
    imgTop = Mat(depthFrameTop.getHeight(), depthFrameTop.getWidth(), CV_8UC3);
    Mat(depthFrameTop.getHeight(), depthFrameTop.getWidth(), CV_16U, (void *)imgBufferTop).convertTo(depthTop, CV_8U, 1.0/256);
    
    for (int i = 0; i < imgTop.rows; i++) {
        for (int j = 0; j < imgTop.cols; j++) {
            int val = (int)m_pDepthHistTop[imgBufferTop[j + i*imgTop.cols]];
            imgTop.at<Vec3b>(i, j).val[0] = val;
            imgTop.at<Vec3b>(i, j).val[1] = val;
            imgTop.at<Vec3b>(i, j).val[2] = val;
        }
    }
    
    if (g_getBackground)
        bgSubtractor->processImages(depthTop);
    bgSubtractor->getMask(depthTop, mask);
    imshow("Mask", mask);

	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glEnable(GL_TEXTURE_2D);
	glBegin(GL_QUADS);

    // 320x240
	g_nXRes = depthFrameSide.getVideoMode().getResolutionX();
	g_nYRes = depthFrameSide.getVideoMode().getResolutionY();

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();
	glDisable(GL_TEXTURE_2D);

	const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
    float maxSize = -1;
    int maxIdx = -1;
    
    for (int i = 0; i < users.getSize(); ++i) {
        const nite::UserData &user = users[i];
        
        if (!user.isVisible())
            continue;
        
        if (getSize(user) > maxSize) {
            maxSize = getSize(user);
            maxIdx = i;
        }
        //printf("user %d: size=%f\n, lost=%d, new=%d, visible=%d\n",
        //       i, getSize(user), user.isLost(), user.isNew(), user.isVisible());
    }
    
	for (int i = 0; i < users.getSize(); ++i)
	{
		const nite::UserData &user = users[i];

		updateUserState(user, userTrackerFrame.getTimestamp());
		if (user.isNew())
		{
			m_pUserTracker->startSkeletonTracking(user.getId());
			m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS);
		}
		else if (!user.isLost())
		{
			if (g_drawStatusLabel) {
				DrawStatusLabel(m_pUserTracker, user);
			}
            
            if (g_drawCenterOfMass) {
				DrawCenterOfMass(m_pUserTracker, user);
			}
            
			if (g_drawBoundingBox) {
				DrawBoundingBox(user);
			}

			if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton) {
                if (maxIdx == i) {
                    DrawSkeleton(m_pUserTracker, user);
                    sideSkel.setTo(Scalar(0, 0, 0));
                    drawSkeleton(sideSkel, sideJoints);
                    topSkel.setTo(Scalar(0, 0, 0));
                    drawSkeleton(topSkel, topJoints);
                    drawSkeleton(imgTop, topJoints);
                }
			}
		}

        // exit the program after a few seconds if PoseType == POSE_CROSSED_HANDS
		if (m_poseUser == 0 || m_poseUser == user.getId())
		{
			const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS);

			if (pose.isEntered())
			{
				// Start timer
				sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s");
				printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000);
				m_poseUser = user.getId();
				m_poseTime = userTrackerFrame.getTimestamp();
			}
			else if (pose.isExited())
			{
				memset(g_generalMessage, 0, sizeof(g_generalMessage));
				printf("Count-down interrupted\n");
				m_poseTime = 0;
				m_poseUser = 0;
			}
			else if (pose.isHeld())
			{
				// tick
				if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000)
				{
					printf("Count down complete. Exit...\n");
					Finalize();
					exit(2);
				}
			}
		}
	}

	if (g_drawFrameId)
	{
		DrawFrameId(userTrackerFrame.getFrameIndex());
	}

	if (g_generalMessage[0] != '\0')
	{
		char *msg = g_generalMessage;
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(100, 20);
		glPrintString(GLUT_BITMAP_HELVETICA_18, msg);
	}

    imshow("Side", sideSkel);
    imshow("Top", topSkel);
    imshow("DepthTop", imgTop);
    
    if (!g_getBackground) {
        knnsearch(topJoints, imgBufferTop, mask, labelTop, label, 320, 240);
        drawSkeleton(labelTop, topJoints);
        cv::resize(labelTop, labelTop, Size(), 2, 2);
        imshow("Label", labelTop);
        
        if (g_capture2) {
            // c style
            string path = outDir + "/depth-top" + to_string(nFrame) + ".txt";
            FILE *f = fopen(path.c_str(), "w");
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%u\n", imgBufferTop[i]);
            }
            fclose(f);
            
            path = outDir + "/depth-side" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%u\n", imgBufferSide[i]);
            }
            fclose(f);
            
            path = outDir + "/joints-top" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < N_JOINTS; i++) {
                fprintf(f, "%f, %f, %f, %f, %f\n", topJoints[i][0], topJoints[i][1],
                        topJoints[i][2], topJoints[i][3], topJoints[i][4]);
            }
            fclose(f);
            
            path = outDir + "/joints-side" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < N_JOINTS; i++) {
                fprintf(f, "%f, %f, %f, %f, %f\n", sideJoints[i][0], sideJoints[i][1],
                        sideJoints[i][2], sideJoints[i][3], sideJoints[i][4]);
            }
            fclose(f);

            path = outDir + "/label-top" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%d\n", label[i]);
            }
            fclose(f);
            
            path = outDir + "/label-side" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            const nite::UserId* labelsTop = userLabels.getPixels();
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%d\n", (int)labelsTop[i]);
            }
            fclose(f);
            
            nFrame++;
        }
    }
    
    // Swap the OpenGL display buffers
	glutSwapBuffers();
}
Esempio n. 14
0
void SampleViewer::Display()
{
	nite::UserTrackerFrameRef userTrackerFrame;
	openni::VideoFrameRef depthFrame;
	nite::Status rc = m_pUserTracker->readFrame(&userTrackerFrame);
	if (rc != nite::STATUS_OK)
	{
		printf("GetNextData failed\n");
		return;
	}

	depthFrame = userTrackerFrame.getDepthFrame();
	m_colorStream.readFrame(&m_colorFrame);

	if (m_pTexMap == NULL)
	{
		// Texture map init
		m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE);
		m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE);
		m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
	}

	const nite::UserMap& userLabels = userTrackerFrame.getUserMap();

	glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -2000.0, 2000.0);

	if (depthFrame.isValid() && g_drawDepth)
	{
		calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame);
	}

	memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

	// check if we need to draw image frame to texture
	if (m_eViewState == DISPLAY_MODE_IMAGE && m_colorFrame.isValid())
	{
		const openni::RGB888Pixel* pImageRow = (const openni::RGB888Pixel*)m_colorFrame.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + m_colorFrame.getCropOriginY() * m_nTexMapX;
		int rowSize = m_colorFrame.getStrideInBytes() / sizeof(openni::RGB888Pixel);

		for (int y = 0; y < m_colorFrame.getHeight(); ++y)
		{
			const openni::RGB888Pixel* pImage = pImageRow;
			openni::RGB888Pixel* pTex = pTexRow + m_colorFrame.getCropOriginX();

			for (int x = 0; x < m_colorFrame.getWidth(); ++x, ++pImage, ++pTex)
			{
				*pTex = *pImage;
			}

			pImageRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}

	float factor[3] = {1, 1, 1};
	// check if we need to draw depth frame to texture
	if (depthFrame.isValid() && g_drawDepth)
	{
		const nite::UserId* pLabels = userLabels.getPixels();

		const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX;
		int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);

		for (int y = 0; y < depthFrame.getHeight(); ++y)
		{
			const openni::DepthPixel* pDepth = pDepthRow;
			openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX();

			for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex, ++pLabels)
			{
				if (*pDepth != 0)
				{
					if (*pLabels == 0)
					{
						if (!g_drawBackground)
						{
							factor[0] = factor[1] = factor[2] = 0;

						}
						else
						{
							factor[0] = Colors[colorCount][0];
							factor[1] = Colors[colorCount][1];
							factor[2] = Colors[colorCount][2];
						}
					}
					else
					{
						factor[0] = Colors[*pLabels % colorCount][0];
						factor[1] = Colors[*pLabels % colorCount][1];
						factor[2] = Colors[*pLabels % colorCount][2];
					}
//					// Add debug lines - every 10cm
// 					else if ((*pDepth / 10) % 10 == 0)
// 					{
// 						factor[0] = factor[2] = 0;
// 					}

					int nHistValue = m_pDepthHist[*pDepth];
					pTex->r = nHistValue*factor[0];
					pTex->g = nHistValue*factor[1];
					pTex->b = nHistValue*factor[2];

					factor[0] = factor[1] = factor[2] = 1;
				}
			}

			pDepthRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}

	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glEnable(GL_TEXTURE_2D);
	glBegin(GL_QUADS);

	g_nXRes = depthFrame.getVideoMode().getResolutionX();
	g_nYRes = depthFrame.getVideoMode().getResolutionY();

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();
	glDisable(GL_TEXTURE_2D);

	const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
	for (int i = 0; i < users.getSize(); ++i)
	{
		const nite::UserData& user = users[i];

		updateUserState(user, userTrackerFrame.getTimestamp());
		if (user.isNew())
		{
			m_pUserTracker->startSkeletonTracking(user.getId());
			m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS);
		}
		else if (!user.isLost())
		{
			if (g_drawStatusLabel)
			{
				DrawStatusLabel(m_pUserTracker, user);
			}
			if (g_drawCenterOfMass)
			{
				DrawCenterOfMass(m_pUserTracker, user);
			}
			if (g_drawBoundingBox)
			{
				DrawBoundingBox(user);
			}

			if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton)
			{
				DrawSkeleton(m_pUserTracker, user);
			}
			if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawHat)
			{
				DrawHat(m_pUserTracker, user);
			}
			if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawCube)
			{
				DrawCube(m_pUserTracker, user);
			}
			if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawCubeFront)
			{
				DrawCubeFront(m_pUserTracker, user);
			}
		}

		if (m_poseUser == 0 || m_poseUser == user.getId())
		{
			const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS);

			if (pose.isEntered())
			{
				// Start timer
				sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s");
				printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000);
				m_poseUser = user.getId();
				m_poseTime = userTrackerFrame.getTimestamp();
			}
			else if (pose.isExited())
			{
				memset(g_generalMessage, 0, sizeof(g_generalMessage));
				printf("Count-down interrupted\n");
				m_poseTime = 0;
				m_poseUser = 0;
			}
			else if (pose.isHeld())
			{
				// tick
				if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000)
				{
					printf("Count down complete. Exit...\n");
					Finalize();
					exit(2);
				}
			}
		}
	}

	if (g_drawFrameId)
	{
		DrawFrameId(userTrackerFrame.getFrameIndex());
	}

	if (g_generalMessage[0] != '\0')
	{
		char *msg = g_generalMessage;
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(100, 20);
		glPrintString(GLUT_BITMAP_HELVETICA_18, msg);
	}

	// Swap the OpenGL display buffers
	glutSwapBuffers();

}