コード例 #1
0
ファイル: CameraDevice.cpp プロジェクト: animecomico/kth-rgbd
// -----------------------------------------------------------------------------------------------------
//  generateFrame
// -----------------------------------------------------------------------------------------------------
bool CameraDevice::generateFrame(IplImage* imgRGB, IplImage* imgDepth)
{
    XnStatus nRetVal = XN_STATUS_OK;
	const XnDepthPixel* pDepthMap = NULL;
	const XnRGB24Pixel* pImageMap = NULL;
	
	xnFPSMarkFrame(&g_xnFPS);
	nRetVal = g_context.WaitAndUpdateAll();
	if (nRetVal==XN_STATUS_OK)
	{
		g_depth.GetMetaData(g_depthMD);
		g_image.GetMetaData(g_imageMD);

		pDepthMap = g_depthMD.Data();
		pImageMap = g_image.GetRGB24ImageMap();

		printf("Frame %02d (%dx%d) Depth at middle point: %u. FPS: %f\r",
				g_depthMD.FrameID(),
				g_depthMD.XRes(),
				g_depthMD.YRes(),
				g_depthMD(g_depthMD.XRes()/2, g_depthMD.YRes()/2),
				xnFPSCalc(&g_xnFPS));

		// convert to OpenCV buffers
		convertImageRGB(pImageMap, imgRGB);
		convertImageDepth(pDepthMap, imgDepth);

		return true;
	}
	return false;
}
コード例 #2
0
ファイル: CameraDevice.cpp プロジェクト: animecomico/kth-rgbd
// -----------------------------------------------------------------------------------------------------
//  savePointCloud
// -----------------------------------------------------------------------------------------------------
int savePointCloud(
		const XnRGB24Pixel* pImageMap,
		const XnDepthPixel* pDepthMap,
		IplImage* pImgDepth,
		int frameID,
		bool savePointCloud)
{
	float focalInv = 0.001 / Config::_FocalLength;
	unsigned int rgb;
	int depth_index = 0;
	int ImageCenterX = g_depthMD.XRes() >> 1;	// divide by 2
	int ImageCenterY = g_depthMD.YRes() >> 1;
	for (int ind_y =0; ind_y < g_depthMD.YRes(); ind_y++)
	{
		for (int ind_x=0; ind_x < g_depthMD.XRes(); ind_x++, depth_index++)
		{
			pcl::PointXYZRGB& pt = g_cloudPointSave(ind_x,ind_y);

			if (pDepthMap[depth_index] == g_noSampleValue ||
				pDepthMap[depth_index] == g_shadowValue ||
				pDepthMap[depth_index] == 0 ){

				pt.x = bad_point;
				pt.y = bad_point;
				pt.z = bad_point;
			}
			else
			{
				// locate point in meters
				pt.x = (ind_x - ImageCenterX) * pDepthMap[depth_index] * focalInv;
				pt.y = (ImageCenterY - ind_y) * pDepthMap[depth_index] * focalInv;
				pt.z = pDepthMap[depth_index] * 0.001 ; // depth values are given in mm
				rgb = (((unsigned int)pImageMap[depth_index].nRed) << 16) |
					  (((unsigned int)pImageMap[depth_index].nGreen) << 8) |
					  ((unsigned int)pImageMap[depth_index].nBlue);
				pt.rgb = *reinterpret_cast<float*>(&rgb);
			}
		}
	}

	char buf[256];
	sprintf(buf, "%s/cloud%d.pcd", Config::_PathDataProd.c_str(), frameID);
	pcl::io::savePCDFile(buf, g_cloudPointSave, true);
	// bug in PCL - the binary file is not created with the good permissions!
	char bufsys[256];
	sprintf(bufsys, "chmod a+rw %s", buf);
	system(bufsys);
}
コード例 #3
0
int main()
{
	XnStatus nRetVal = XN_STATUS_OK;
	Context context;

	nRetVal = context.Init();
	CHECK_RC(nRetVal, "Initialize context");

	DepthGenerator depth;
	nRetVal = depth.Create(context);
	CHECK_RC(nRetVal, "Create depth generator");

	nRetVal = context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGeneratingAll");

	DepthMetaData depthMD;
	while (!xnOSWasKeyboardHit())
	{
		nRetVal = context.WaitOneUpdateAll(depth);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("UpdateData failed: %s\n", xnGetStatusString(nRetVal));
			continue;
		}

		depth.GetMetaData(depthMD);
		const XnDepthPixel* pDepthMap = depthMD.Data();

		printf("Frame %d Middle point is: %u.\n", depthMD.FrameID(), depthMD(depthMD.XRes() / 2, depthMD.YRes() / 2));
	}

	context.Shutdown();

	return 0;
}
コード例 #4
0
XnStatus prepare(char useScene, char useDepth, char useHistogram)
{
//TODO handle possible failures!
	if (useDepth)
	{
		mDepthGen.GetMetaData(depthMD);
		nXRes = depthMD.XRes();
		nYRes = depthMD.YRes();

		pDepth = depthMD.Data();

		if (useHistogram)
		{
			calcHist();

			// rewind the pointer
			pDepth = depthMD.Data();
		}
	}
	if (useScene) 
	{
		mUserGen.GetUserPixels(0, sceneMD);
		nXRes = sceneMD.XRes();
		nYRes = sceneMD.YRes();

		pLabels = sceneMD.Data();
	}
}
コード例 #5
0
ファイル: main_bk1.cpp プロジェクト: horsewin/ARMicroMachines
void inpaintDepth(DepthMetaData *niDepthMD, bool halfSize) {
	IplImage *depthIm, *depthImFull;
	
	if (halfSize) {
		depthImFull = cvCreateImage(cvSize(niDepthMD->XRes(), niDepthMD->YRes()), IPL_DEPTH_16U, 1);
		depthImFull->imageData = (char*)niDepthMD->WritableData();
		depthIm = cvCreateImage(cvSize(depthImFull->width/4.0, depthImFull->height/4.0), IPL_DEPTH_16U, 1);
		cvResize(depthImFull, depthIm, 0);
	} else {
		depthIm = cvCreateImage(cvSize(niDepthMD->XRes(), niDepthMD->YRes()), IPL_DEPTH_16U, 1);
		depthIm->imageData = (char*)niDepthMD->WritableData();
	}
	
	IplImage *depthImMask = cvCreateImage(cvGetSize(depthIm), IPL_DEPTH_8U, 1);
	for (int y=0; y<depthIm->height; y++) {
		for (int x=0; x<depthIm->width; x++) {
			CV_IMAGE_ELEM(depthImMask, char, y, x)=CV_IMAGE_ELEM(depthIm, unsigned short,y,x)==0?255:0;
		}
	}

	IplImage *depthImMaskInv = cvCreateImage(cvGetSize(depthIm), IPL_DEPTH_8U, 1);
	cvNot(depthImMask, depthImMaskInv);

	double min, max; cvMinMaxLoc(depthIm, &min, &max, 0, 0, depthImMaskInv);
	
	IplImage *depthIm8 = cvCreateImage(cvGetSize(depthIm), IPL_DEPTH_8U, 1);
	float scale = 255.0/(max-min);
	cvConvertScale(depthIm, depthIm8, scale, -(min*scale));

	IplImage *depthPaint = cvCreateImage(cvGetSize(depthIm8), IPL_DEPTH_8U, 1);
	cvInpaint(depthIm8, depthImMask, depthPaint, 3, CV_INPAINT_NS);
	
	IplImage *depthIm16 = cvCreateImage(cvGetSize(depthIm), IPL_DEPTH_16U, 1);
	cvConvertScale(depthPaint, depthIm16, 1/scale, min);

	if (halfSize) {
		IplImage *depthPaintedFull = cvCreateImage(cvGetSize(depthImFull), IPL_DEPTH_16U, 1);
		cvResize(depthIm16, depthPaintedFull,0);
		IplImage *depthImMaskFull = cvCreateImage(cvGetSize(depthImFull), IPL_DEPTH_8U, 1);
		for (int y=0; y<depthImFull->height; y++) for (int x=0; x<depthImFull->width; x++)
			CV_IMAGE_ELEM(depthImMaskFull, char, y, x)=CV_IMAGE_ELEM(depthImFull, unsigned short,y,x)==0?255:0;
		cvCopy(depthPaintedFull, depthImFull, depthImMaskFull);
		cvReleaseImage(&depthPaintedFull); cvReleaseImage(&depthImMaskFull);
		cvReleaseImage(&depthImFull);
	} else {
コード例 #6
0
ファイル: NiSimpleRead.cpp プロジェクト: nixz/OpenNI
int main()
{
    XnStatus nRetVal = XN_STATUS_OK;

    Context context;
    EnumerationErrors errors;

    nRetVal = context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);

    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        return (nRetVal);
    }

    DepthGenerator depth;
    nRetVal = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
    CHECK_RC(nRetVal, "Find depth generator");

    XnFPSData xnFPS;
    nRetVal = xnFPSInit(&xnFPS, 180);
    CHECK_RC(nRetVal, "FPS Init");

    DepthMetaData depthMD;

    while (!xnOSWasKeyboardHit())
    {
        nRetVal = context.WaitOneUpdateAll(depth);
        if (nRetVal != XN_STATUS_OK)
        {
            printf("UpdateData failed: %s\n", xnGetStatusString(nRetVal));
            continue;
        }

        xnFPSMarkFrame(&xnFPS);

        depth.GetMetaData(depthMD);
        const XnDepthPixel* pDepthMap = depthMD.Data();

        printf("Frame %d Middle point is: %u. FPS: %f\n", depthMD.FrameID(), depthMD(depthMD.XRes() / 2, depthMD.YRes() / 2), xnFPSCalc(&xnFPS));
    }

    context.Shutdown();

    return 0;
}
コード例 #7
0
ファイル: CameraDevice.cpp プロジェクト: animecomico/kth-rgbd
// -----------------------------------------------------------------------------------------------------
//  convertImageDepth
// -----------------------------------------------------------------------------------------------------
void convertImageDepth(const XnDepthPixel* pDepthMap, IplImage* pImgDepth)
{
	// convert from OpenNI buffer to IplImage
	// Save only the Z value per pixel as an image for quick visualization of depth
	for(unsigned int i=0; i<g_depthMD.XRes()*g_depthMD.YRes(); i++)
	{
		// depth pixels on 16 bits (11 effective bits)
		//short depthValue = pDepthMap[i]/16;	// for quick look only
		pImgDepth->imageData[3*i+0]=(unsigned char)(pDepthMap[i]>>8);
		pImgDepth->imageData[3*i+1]=(unsigned char)(pDepthMap[i] & 0xFF);
		pImgDepth->imageData[3*i+2]=0;
		//pImgDepth->imageData[i] = pDepthMap[i];
	}
}
コード例 #8
0
ファイル: main_bk1.cpp プロジェクト: horsewin/ARMicroMachines
void loadKinectTransform(char *filename) {
	CvFileStorage* fs = cvOpenFileStorage( filename, 0, CV_STORAGE_READ );
	if (fs!=0) {
		CvSeq *s = cvGetFileNodeByName(fs, 0, "MarkerSize")->data.seq;
		markerSize.width = cvReadInt((CvFileNode*)cvGetSeqElem(s, 0));
		markerSize.height = cvReadInt((CvFileNode*)cvGetSeqElem(s, 1));

		s = cvGetFileNodeByName(fs, 0, "MarkerOrigin")->data.seq;
		marker_origin.x = cvReadInt((CvFileNode*)cvGetSeqElem(s, 0));
		marker_origin.y = cvReadInt((CvFileNode*)cvGetSeqElem(s, 1));
		setWorldOrigin();
		WORLD_SCALE = cvReadRealByName(fs, 0, "WorldScale", 1);
		WORLD_ANGLE = cvReadRealByName(fs, 0, "WorldAngle", 0);
		MARKER_DEPTH = cvReadRealByName(fs, 0, "MARKER_DEPTH", 0);

		CvFileNode* fileparams = cvGetFileNodeByName( fs, NULL, "KinectTransform" );
		kinectTransform = (CvMat*)cvRead( fs, fileparams );
		cvReleaseFileStorage( &fs );

		if (niContext.WaitAnyUpdateAll() == XN_STATUS_OK) {
			//Load in the marker for registration
			osg_inittracker(MARKER_FILENAME, 400, markerSize.width);

			m_world->setWorldDepth(MARKER_DEPTH);
			m_world->setWorldScale(WORLD_SCALE);
			setOSGTrimeshScale(WORLD_SCALE);

			g_depth.GetMetaData(niDepthMD);
			inpaintDepth(&niDepthMD, true);
			depthIm = cvCreateImage(cvSize(niDepthMD.XRes(), niDepthMD.YRes()), IPL_DEPTH_16U, 1);
			transDepth160 = cvCreateImage(cvSize(MESH_SIZE.width, MESH_SIZE.height), IPL_DEPTH_32F, 1);
			memcpy(depthIm->imageData, niDepthMD.Data(), depthIm->imageSize);	

			TransformDepth(depthIm, transDepth160, MARKER_DEPTH, MESH_SIZE);
			GenerateTrimeshGroundFromDepth(transDepth160, MARKER_DEPTH);
			m_world->updateTrimesh(ground_grid);
			m_world->setMinHeight(MinHeight);
			m_world->setMaxHeight(MaxHeight);
			m_world->initPhysics();
#ifdef SIM_PARTICLES
			CreateOSGSphereProxy();//osg spheres representation
#endif
#ifdef SIM_MICROMACHINE
			m_world->resetCarScene(0);
			m_world->resetCarScene(1);
#endif /*SIM_MICROMACHINE*/
		}
	}
}
XnStatus prepare(char useScene, char useDepth, char useImage, char useIr, char useHistogram)
{
//TODO handle possible failures! Gotcha!
	if (useDepth)
	{
		mDepthGen.GetMetaData(depthMD);
		nXRes = depthMD.XRes();
		nYRes = depthMD.YRes();

		pDepth = depthMD.Data();

		if (useHistogram)
		{
			calcHist();

			// rewind the pointer
			pDepth = depthMD.Data();
		}
	}
	if (useScene) 
	{
		mUserGen.GetUserPixels(0, sceneMD);
		nXRes = sceneMD.XRes();
		nYRes = sceneMD.YRes();

		pLabels = sceneMD.Data();
	}
	if (useImage)
	{
		mImageGen.GetMetaData(imageMD);
		nXRes = imageMD.XRes();
		nYRes = imageMD.YRes();

		pRGB = imageMD.RGB24Data();
		// HISTOGRAM?????
	}
	if (useIr)
	{
		mIrGen.GetMetaData(irMD);
		nXRes = irMD.XRes();
		nYRes = irMD.YRes();

		pIR = irMD.Data();
		// HISTOGRAM????
	}
}
コード例 #10
0
ファイル: NiSimpleViewer.cpp プロジェクト: sledzias/libcvd-cl
void takePhoto() {
    static int index = 1;
    char fname[256] = {0,};
    sprintf(fname, "kinect%03d.txt", index++);

    g_depth.GetMetaData(g_depthMD);
    g_image.GetMetaData(g_imageMD);

    int const nx = g_depthMD.XRes();
    int const ny = g_depthMD.YRes();
    assert(nx == g_imageMD.XRes());
    assert(ny == g_imageMD.YRes());

    const XnDepthPixel* pDepth = g_depthMD.Data();
    const XnUInt8* pImage = g_imageMD.Data();

    FILE * file = fopen(fname, "wb");
    fprintf(file, "%d\n%d\n\n", nx, ny);

    for (int y = 0, di = 0, ri = 0, gi = 1, bi = 2; y < ny; y++) {
        for (int x = 0; x < nx; x++, di++, ri += 3, gi += 3, bi += 3) {
            int const r = pImage[ri];
            int const g = pImage[gi];
            int const b = pImage[bi];
            int const d = pDepth[di];

            assert(r >= 0);
            assert(g >= 0);
            assert(b >= 0);
            assert(d >= 0);

            assert(r <= 0xFF);
            assert(g <= 0xFF);
            assert(b <= 0xFF);
            assert(d <= 0xFFFF);

            fprintf(file, "%3d %3d %3d %5d\n", r, g, b, d);
        }

        fprintf(file, "\n");
    }

    fflush(file);
    fclose(file);
}
コード例 #11
0
WorldRenderer::WorldRenderer(RenderingContext* rctx, DepthGenerator* depthGen, ImageGenerator* imageGen,
							 BallManager* ball_manager)
: AbstractOpenGLRenderer(rctx)
{
	m_depthGen = depthGen;
	m_imageGen = imageGen;
	//m_henshinDetector = henshinDetector;
	m_ball_manager = ball_manager;

	DepthMetaData dmd;
	m_depthGen->GetMetaData(dmd);
	m_width = dmd.XRes();
	m_height = dmd.YRes();

	// allocate working buffers
	XnUInt32 numPoints = getNumPoints();
	m_vertexBuf = new M3DVector3f[numPoints];
	m_colorBuf = new M3DVector4f[numPoints];

	// pre-set values on working buffers
	M3DVector3f* vp = m_vertexBuf;
	M3DVector4f* cp = m_colorBuf;
	for (XnUInt32 iy = 0; iy < m_height; iy++) {
		for (XnUInt32 ix = 0; ix < m_width; ix++) {
			(*vp)[0] = normalizeX(float(ix));
			(*vp)[1] = normalizeY(float(iy));
			(*vp)[2] = 0;
			vp++;
			(*cp)[0] = (*cp)[1] = (*cp)[2] = 0;
			(*cp)[3] = 1; // alpha is always 1.0
			cp++;
		}
	}

	m_batch.init(numPoints);

	m_depthAdjustment = DEFAULT_DEPTH_ADJUSTMENT;
}
コード例 #12
0
ファイル: glinit.cpp プロジェクト: msdark/Kinect-Counter
void glutDisplay (void){
    glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    // Setup the OpenGL viewpoint
    glMatrixMode(GL_PROJECTION);
    glPushMatrix();
    glLoadIdentity();
    SceneMetaData sceneMD;
    DepthMetaData depthMD;
    ImageMetaData imageMD;
    g_DepthGenerator.GetMetaData(depthMD);
    glOrtho(0, depthMD.XRes(), depthMD.YRes(), 0, -1.0, 1.0);
    glDisable(GL_TEXTURE_2D);
    //XnStatus rc = g_Context.WaitOneUpdateAll(g_DepthGenerator);
    XnStatus rc = g_Context.WaitAnyUpdateAll();
    CHECK_RC("Wait Data",rc);
    g_DepthGenerator.GetMetaData(depthMD);
    if(g_UserGenerator.IsValid())
        g_UserGenerator.GetUserPixels(0, sceneMD);
    g_ImageGenerator.GetMetaData(imageMD);

    DrawDepthMap(depthMD, sceneMD);
    DrawImage(imageMD);
    glutSwapBuffers();
}//glutdisplay
コード例 #13
0
ファイル: main.cpp プロジェクト: horsewin/ARMicroMachines
//////////////////// Entry point //////////////////// 
int main(int argc, char* argv[]) 
{
	depthmask_for_mesh = cvCreateImage(MESH_SIZE, IPL_DEPTH_8U, 1);
	markerSize.width = -1; 
	markerSize.height = -1;

  //init OpenNI
  EnumerationErrors errors;
	switch (XnStatus rc = niContext.InitFromXmlFile(KINECT_CONFIG_FILENAME, &errors)) {
		case XN_STATUS_OK:
			break;
		case XN_STATUS_NO_NODE_PRESENT:
			XnChar strError[1024];	errors.ToString(strError, 1024);
			printf("%s\n", strError);
			return rc; break;
		default:
			printf("Open failed: %s\n", xnGetStatusString(rc));
			return rc;
	}

  //set camera parameter
  capture = new Camera(0, CAPTURE_SIZE, CAMERA_PARAMS_FILENAME);
	RegistrationParams = scaleParams(capture->getParameters(), double(REGISTRATION_SIZE.width)/double(CAPTURE_SIZE.width));

  //init parameter for rendering
  osg_init(calcProjection(RegistrationParams, capture->getDistortion(), REGISTRATION_SIZE));

  //for Kinect view
  loadKinectParams(KINECT_PARAMS_FILENAME, &kinectParams, &kinectDistort);
	kinectDistort =0;
	kinectParams->data.db[2]=320.0; 
	kinectParams->data.db[5]=240.0;

	//setting kinect context
	niContext.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
	niContext.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
	g_depth.GetMirrorCap().SetMirror(false);
	g_depth.GetAlternativeViewPointCap().SetViewPoint(g_image);

	//registration
	kinectReg = new RegistrationOPIRA(new OCVSurf());
	kinectReg->addResizedMarker(MARKER_FILENAME, 400);

	//physics
	m_world = new bt_ARMM_world();
	ground_grid = new float[GRID_SIZE];
	for (int i =0;i < GRID_SIZE; i++) {
		ground_grid[i] = 0; 
	}
#ifdef SIM_PARTICLES
	voxel_grid = new float[1200];
	for (int i =0;i < 1200; i++) {
		voxel_grid[i] = 0;
	}
#endif

	//controls
	KeyboardController *kc = new KeyboardController(m_world);
	XboxController *xc = new XboxController(m_world);

	loadKinectTransform(KINECT_TRANSFORM_FILENAME);

#ifdef USE_ARMM_VRPN
	//----->Server part
	m_Connection = new vrpn_Connection_IP();
	ARMM_server = new ARMM_Communicator(m_Connection	);

	//Open the imager server and set up channel zero to send our data.
	//if ( (ARMM_img_server = new vrpn_Imager_Server("ARMM_Image", m_Connection, MESH_SIZE.width, MESH_SIZE.height)) == NULL) {
	//	fprintf(stderr, "Could not open imager server\n");
	//	return -1;
	//}
	//if ( (channel_id = ARMM_img_server->add_channel("Grid")) == -1) {
	//	fprintf(stderr, "Could not add channel\n");
	//	return -1;
	//}
	ARMM_server->SetObjectsData(&(m_world->Objects_Body));
	ARMM_server->SetHandsData(&(m_world->HandObjectsArray));

  cout << "Created VRPN server." << endl;
	//<-----
#ifdef USE_ARMM_VRPN_RECEIVER 	//----->Receiver part
	ARMM_sever_receiver = new vrpn_Tracker_Remote (ARMM_CLIENT_IP);
	ARMM_sever_receiver->register_change_handler(NULL, handle_object);
#endif 	//<----- 

#endif

#ifdef USE_SKIN_SEGMENTATION	//Skin color look up
	_HandRegion.LoadSkinColorProbTable();
#endif

#ifdef USE_OPTICAL_FLOW
	prev_gray = cvCreateImage(cvSize(OPFLOW_SIZE.width, OPFLOW_SIZE.height), IPL_DEPTH_8U, 1);
	curr_gray = cvCreateImage(cvSize(OPFLOW_SIZE.width, OPFLOW_SIZE.height), IPL_DEPTH_8U, 1);
	flow_capture = new FlowCapture();
	flow_capture->Init();
#endif

/////////////////////////////////////////////Main Loop////////////////////////////////////////////////
	while (running) {
    //start kinect
		if (XnStatus rc = niContext.WaitAnyUpdateAll() != XN_STATUS_OK) {
			printf("Read failed: %s\n", xnGetStatusString(rc));
			return rc;
		}

    //get image and depth data from Kinect
		g_depth.GetMetaData(niDepthMD);
		g_image.GetMetaData(niImageMD);

		colourIm = cvCreateImage(cvSize(niImageMD.XRes(), niImageMD.YRes()), IPL_DEPTH_8U, 3);
		memcpy(colourIm->imageData, niImageMD.Data(), colourIm->imageSize); cvCvtColor(colourIm, colourIm, CV_RGB2BGR);
		cvFlip(colourIm, colourIm, 1);

		depthIm = cvCreateImage(cvSize(niDepthMD.XRes(), niDepthMD.YRes()), IPL_DEPTH_16U, 1);
		transDepth160 = cvCreateImage(cvSize(MESH_SIZE.width, MESH_SIZE.height), IPL_DEPTH_32F, 1);
		transDepth320 = cvCreateImage(cvSize(SKIN_SEGM_SIZE.width, SKIN_SEGM_SIZE.height), IPL_DEPTH_32F, 1);
		transColor320 = cvCreateImage(cvSize(SKIN_SEGM_SIZE.width, SKIN_SEGM_SIZE.height), IPL_DEPTH_8U, 3);
		memcpy(depthIm->imageData, niDepthMD.Data(), depthIm->imageSize);	
		//cvCircle(colourIm, cvPoint(marker_origin.x,marker_origin.y), 5, CV_BLUE, 3);
		cvShowImage("Kinect View", colourIm);
		IplImage *arImage = capture->getFrame();
		cvWaitKey(1); 

		//check input device 
		input_key = kc->check_input(); 
#ifdef USE_ARMM_VRPN_RECEIVER
		if( pass_key != 0){
			kc->check_input(pass_key);
			pass_key = 0;
		}
#endif
		xc->check_input();

		if(kinectTransform) { // kinect transform as cvmat* for use
			if( counter >= SIM_FREQUENCY) {
#ifdef UPDATE_TRIMESH
				inpaintDepth(&niDepthMD, true); 
				memcpy(depthIm->imageData, niDepthMD.Data(), depthIm->imageSize);				
				TransformImage(depthIm, transDepth160, MARKER_DEPTH, MESH_SIZE, true);
				GenerateTrimeshGroundFromDepth(transDepth160, MARKER_DEPTH); /*Trimesh generation*/
				m_world->updateTrimeshRefitTree(ground_grid);//opencl?
				osg_UpdateHeightfieldTrimesh(ground_grid);//opencl?
#endif

#ifdef SIM_PARTICLES
/*World spheres simulation*/
//				GenerateVoxelFromDepth(depthIm, MARKER_DEPTH);
//				m_world->updateWorldSphereTransform(voxel_grid);
//				osgUpdateWorldSphereTransform(voxel_grid);
#endif
				counter = 0;
			} else {
#ifdef USE_SKIN_SEGMENTATION /*Skin color segmentation*/ // may be reduce resolution first as well as cut off depth make processing faster
				// (2)Sphere representation
				FindHands(depthIm, colourIm);
				UpdateAllHands();
#endif

#ifdef USE_PARTICLES
//XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
#endif
				counter++;
			}
			//do hand pose recognition
			m_world->Update();
			//(B)normal client only rendering
			RenderScene(arImage, capture);
		}
//		TickCountAverageEnd();
#ifdef USE_ARMM_VRPN
	//Send Car position+orientation			
	ARMM_server->mainloop();
#ifdef USE_ARMM_VRPN_RECEIVER
	ARMM_sever_receiver->mainloop();
#endif
	////Copy depth info
	//for (int i = 0; i < GRID_SIZE;i++) {
	//	ARMM_img_buffer[i] = ground_grid[i];
	//}

	//Send depth grid info	
	//ARMM_img_server->send_begin_frame(0, MESH_SIZE.width-1, 0, MESH_SIZE.height-1);
 //   ARMM_img_server->mainloop();
 //   int nRowsPerRegion= ((int) vrpn_IMAGER_MAX_REGIONf32)/ MESH_SIZE.width;
 //   for(int y=0; y<MESH_SIZE.height; y+=nRowsPerRegion) {
 //     ARMM_img_server->send_region_using_base_pointer(channel_id,0,MESH_SIZE.width-1,y,min(MESH_SIZE.width,y+nRowsPerRegion)-1, ARMM_img_buffer, 1, MESH_SIZE.width, MESH_SIZE.height);
 //     ARMM_img_server->mainloop();
 //   }
 //   ARMM_img_server->send_end_frame(0, MESH_SIZE.width-1, 0, MESH_SIZE.height-1);
 //   ARMM_img_server->mainloop();
	//Exec data transmission
	m_Connection->mainloop();
#endif

#ifdef USE_OPTICAL_FLOW
		if(!RunOnce) RunOnce = true;
		cvCopyImage(curr_gray, prev_gray);
#endif

		cvReleaseImage(&arImage);
		cvReleaseImage(&depthIm); 
		cvReleaseImage(&colourIm);
		cvReleaseImage(&transDepth160);
#ifdef USE_SKIN_SEGMENTATION
		cvReleaseImage(&transDepth320);
		cvReleaseImage(&transColor320);
#endif
	}
#ifdef USE_OPTICAL_FLOW
	cvReleaseImage(&prev_gray); cvReleaseImage(&curr_gray);
#endif

	//memory release
	osg_uninit();
	delete m_world;
	delete kinectReg;
	cvReleaseMat(&RegistrationParams);
	delete kc;
	delete xc;

	return 0;
}
コード例 #14
0
int main()
{
	XnStatus nRetVal = XN_STATUS_OK;

	Context context;
	ScriptNode scriptNode;
	EnumerationErrors errors;

    XnUInt32 min_z1, min_z2, min_z3, maxGrad, distVal;

	const char *fn = NULL;
	if	(fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
	else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
	else {
		printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
		return XN_STATUS_ERROR;
	}
	printf("Reading config from: '%s'\n", fn);
	nRetVal = context.InitFromXmlFile(fn, scriptNode, &errors);

	if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
	{
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf("%s\n", strError);
		return (nRetVal);
	}
	else if (nRetVal != XN_STATUS_OK)
	{
		printf("Open failed: %s\n", xnGetStatusString(nRetVal));
		return (nRetVal);
	}

	DepthGenerator depth;
	nRetVal = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
	CHECK_RC(nRetVal, "Find depth generator");

	XnFPSData xnFPS;
	nRetVal = xnFPSInit(&xnFPS, 180);
	CHECK_RC(nRetVal, "FPS Init");

	DepthMetaData depthMD;

	//Initialize WiringPi
	if(wiringPiSetup() == -1)
		exit(1);

	//Enable SoftPWM on pin 1,2 and 3
	softPwmCreate(1, 0, RANGE);
	softPwmCreate(2, 0, RANGE);
	softPwmCreate(3, 0, RANGE);

	while (!xnOSWasKeyboardHit())
	{
		nRetVal = context.WaitOneUpdateAll(depth);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("UpdateData failed: %s\n", xnGetStatusString(nRetVal));
			continue;
		}

		xnFPSMarkFrame(&xnFPS);

		depth.GetMetaData(depthMD);
		const XnDepthPixel* pDepthMap = depthMD.Data();
		int XRes = depthMD.XRes();
		int YRes = depthMD.YRes();

		//To find closest pixel value in Zone 1, Zone 2 and Zone 3
		min_z1    = getClosestPixel(  0        , 0, (XRes / 2)    , YRes, depthMD);
		min_z2    = getClosestPixel( (XRes / 4), 0, (3 * XRes / 4), YRes, depthMD);
		min_z3    = getClosestPixel( (XRes / 2), 0,  XRes         , YRes, depthMD);

		double in_low = 600;
		double in_high = 2000;
		double in_diff = in_high - in_low;
		double out_low = 51;
		double out_high = 973;
		double out_diff = out_high - out_low;

		distVal = min_z1;
		XnUInt32 pwm_val1 = ( (out_diff) / ((in_diff)*(in_diff)*(in_diff)) ) * ((in_high - distVal) * (in_high - distVal) * (in_high - distVal)) + out_low;
		distVal = min_z2;
		XnUInt32 pwm_val2 = ( (out_diff) / ((in_diff)*(in_diff)*(in_diff)) ) * ((in_high - distVal) * (in_high - distVal) * (in_high - distVal)) + out_low;
		distVal = min_z3;
		XnUInt32 pwm_val3 = ( (out_diff) / ((in_diff)*(in_diff)*(in_diff)) ) * ((in_high - distVal) * (in_high - distVal) * (in_high - distVal)) + out_low;

		// Zone 1 - Left side (pin )

		if (pwm_val1 < out_low)
			pwm_val1 = 0;  		 // if object too far, set DUTY CYCLE to 0
		if (min_z1 == 9000.0)
			pwm_val1 = out_high; //if object too close, set DUTY CYCLE to max (here, 95%)
		if (min_z1 < 600)
			pwm_val1 = out_high;

		// Zone 2 - Center (pin )

		if (pwm_val2 < out_low)
			pwm_val2 = 0;  		 // if object too far, set DUTY CYCLE to 0
		if (min_z2 == 9000.0)
			pwm_val2 = out_high; //if object too close, set DUTY CYCLE to max (here, 95%)
		if (min_z2 < 600)
			pwm_val2 = out_high;

		// Zone 3 - Right side (pin )

		if (pwm_val3 < out_low)
			pwm_val3 = 0;  		 // if object too far, set DUTY CYCLE to 0
		if (min_z3 == 9000.0)
			pwm_val3 = out_high; //if object too close, set DUTY CYCLE to max (here, 95%)
		if (min_z3 < 600)
			pwm_val3 = out_high;

		pwm_val1 = ((pwm_val1 - out_low) / (1.0 * out_diff)) * 100.0;
		pwm_val2 = ((pwm_val2 - out_low) / (1.0 * out_diff)) * 100.0;
		pwm_val3 = ((pwm_val3 - out_low) / (1.0 * out_diff)) * 100.0;

		softPwmWrite(1,(int)pwm_val1);
		softPwmWrite(2,(int)pwm_val2);
		softPwmWrite(3,(int)pwm_val3);

		if ( (depthMD.FrameID() % 30) == 0)
		{
			printf("Frame %d", depthMD.FrameID());
			printf("\n");

			printf("Zone 1 value is %u \t", pwm_val1);
			printf("Zone 2 value is %u \t", pwm_val2);
			printf("Zone 3 value is %u \n", pwm_val3);

			printf("Zone1 min_dis   %u \t", min_z1);
			printf("Zone2 min_dis   %u \t", min_z2);
			printf("Zone3 min_dis   %u \n", min_z3);


		//To find a gradient value for the floor
		//maxGrad = getGradient( 5, 0, (YRes/2) + 1, depthMD.XRes(), depthMD.YRes(), depthMD);
		//printf("Frame %d max gradient for Floor is: %u. FPS: %f\n\n", depthMD.FrameID(), maxGrad, xnFPSCalc(&xnFPS));
		}

	}

	softPwmWrite(1,0);
	softPwmWrite(2,0);
	softPwmWrite(3,0);

	//release the nodes
	depth.Release();
	scriptNode.Release();
	context.Release();



	return 0;
}
コード例 #15
0
ファイル: mxNiIRImage.cpp プロジェクト: lucasb24/4202eipi
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
{
	mxArray *tmpContext;
	mxArray *tmpIR, *tmpIRMD;
	mxArray *tmpDepth, *tmpDepthMD;
	mxArray *context_initialised_array;
	mxArray *has_ir_node_array;
	mxArray *has_depth_node_array;

	Context* context;

	IRGenerator* ir;
	IRMetaData* irMD;

	DepthGenerator* depth;
	DepthMetaData* depthMD;

	unsigned short* output_ir = 0;
	unsigned short* output_depth = 0;

	mwSize dims2_ir[2];
	mwSize dims2_depth[2];
	
	bool context_initialised = false;
	bool has_ir_node = false;
	bool has_depth_node = false;

    //---------------------------------
    // Read input variables
    //---------------------------------
    // ni_context_obj
	tmpContext = mxGetField(prhs[0], 0, "ni_context_obj");
    memcpy((void*)&context, mxGetPr(tmpContext), sizeof(Context*));

    // ir_obj
	tmpIR = mxGetField(prhs[0], 0, "ir_obj");
    memcpy((void*)&ir, mxGetPr(tmpIR), sizeof(IRGenerator*));

    // irMD_obj
	tmpIRMD = mxGetField(prhs[0], 0, "irMD_obj");
    memcpy((void*)&irMD, mxGetPr(tmpIRMD), sizeof(IRMetaData*));

    // depth_obj
	tmpDepth = mxGetField(prhs[0], 0, "depth_obj");
    memcpy((void*)&depth, mxGetPr(tmpDepth), sizeof(DepthGenerator*));

    // depthMD_obj
	tmpDepthMD = mxGetField(prhs[0], 0, "depthMD_obj");
    memcpy((void*)&depthMD, mxGetPr(tmpDepthMD), sizeof(DepthMetaData*));

	// context_initialised
	context_initialised_array = mxGetField(prhs[0], 0, "context_initialised");
	context_initialised = mxGetScalar(context_initialised_array);

	// has_ir_node
	has_ir_node_array = mxGetField(prhs[0], 0, "has_ir_node");
	has_ir_node = mxGetScalar(has_ir_node_array);

	// has_depth_node
	has_depth_node_array = mxGetField(prhs[0], 0, "has_depth_node");
	has_depth_node = mxGetScalar(has_depth_node_array);

	//-------------------------------
	// Create output variables
	//-------------------------------	
	bool initialised = context_initialised;

	int ir_width = 640;
	int ir_height = 480;

	int depth_width = 640;
	int depth_height = 480;

	if(initialised){
		XnStatus rc;
		
		if(has_ir_node){
			ir->GetMetaData(*irMD);

			ir_width = irMD->XRes();
			ir_height = irMD->YRes();
		}

		if(has_depth_node){
			depth->GetMetaData(*depthMD);

			depth_width = depthMD->XRes();
			depth_height = depthMD->YRes();
		}
	} 

	dims2_ir[0] = ir_height;
	dims2_ir[1] = ir_width;
	plhs[0] = mxCreateNumericArray(2, dims2_ir, mxUINT16_CLASS, mxREAL);
	output_ir = (unsigned short*)mxGetPr(plhs[0]);

	if(nlhs >= 2){
		dims2_depth[0] = depth_height;
		dims2_depth[1] = depth_width;
		plhs[1] = mxCreateNumericArray(2, dims2_depth, mxUINT16_CLASS, mxREAL);
		output_depth = (unsigned short*)mxGetPr(plhs[1]);
	}
	
	if(!initialised) return;

	if(has_ir_node && output_ir != 0){
		const XnIRPixel* pIR = irMD->Data();
	
		int ir_image_size = ir_width*ir_height;
		for(int i=0;i<ir_height;i++){
			for(int j = 0;j < ir_width;j++){
				output_ir[j*ir_height+i] = pIR[i*ir_width+j];
			}
		}
	}

	if(has_depth_node && output_depth != 0){
		const XnDepthPixel* pDepth = depthMD->Data();
	
		int depth_image_size = depth_width*depth_height;
		for(int i=0;i<depth_height;i++){
			for(int j = 0;j < depth_width;j++){
				output_depth[j*depth_height+i] = pDepth[i*depth_width+j];
			}
		}
	}

}
コード例 #16
0
ファイル: CameraDevice.cpp プロジェクト: animecomico/kth-rgbd
// -----------------------------------------------------------------------------------------------------
//  saveHistogramImage
// -----------------------------------------------------------------------------------------------------
int saveHistogramImage(
		const XnRGB24Pixel* pImageMap,
		const XnDepthPixel* pDepthMap,
		IplImage* pImgDepth,
		int frameID)
{
	static float depthHistogram[MAX_DEPTH_HISTOGRAM];
	
	// Calculate the accumulative histogram (the yellow display...)
	const XnDepthPixel* pDepth = g_depthMD.Data();    
	xnOSMemSet(depthHistogram, 0, MAX_DEPTH_HISTOGRAM*sizeof(float));
	unsigned int nNumberOfPoints = 0;
	// count depth values
	for (XnUInt y = 0; y < g_depthMD.YRes(); ++y)
	{
		for (XnUInt x = 0; x < g_depthMD.XRes(); ++x, ++pDepth)
		{
			if (*pDepth != 0)
			{
				depthHistogram[*pDepth]++;
				nNumberOfPoints++;
			}
		}
	}
	// cumulative sum
	for (int nIndex=1; nIndex<MAX_DEPTH_HISTOGRAM; nIndex++)
	{
		depthHistogram[nIndex] += depthHistogram[nIndex-1];
	}
	// rescale to 0..256
	if (nNumberOfPoints)
	{
		for (int nIndex=1; nIndex<MAX_DEPTH_HISTOGRAM; nIndex++)
		{
			depthHistogram[nIndex] = (unsigned int)(256 * (1.0f - (depthHistogram[nIndex] / nNumberOfPoints)));
		}
	}
	// generate histogram depth image
	int i = 0;
	pDepth = g_depthMD.Data();
	for (XnUInt y = 0; y < g_depthMD.YRes(); ++y)
	{
		for (XnUInt x = 0; x < g_depthMD.XRes(); ++x, ++pDepth, ++i)
		{
			unsigned char nHistValue = 0;

			if (*pDepth != 0)
				nHistValue = depthHistogram[*pDepth];

			// yellow pixels
			pImgDepth->imageData[3*i+0] = 0;			//Blue
			pImgDepth->imageData[3*i+1] = nHistValue;	//Green
			pImgDepth->imageData[3*i+2] = nHistValue;	//Red
		}
	}

	if (frameID<0)
	frameID = g_depthMD.FrameID();	// use ID given by Kinect

	char bufFilename[256];
	sprintf(bufFilename,"%s/frame_%d_histo.bmp", Config::_PathFrameSequence.c_str(), frameID);
	cvSaveImage(bufFilename, pImgDepth);
}
コード例 #17
0
ファイル: main.cpp プロジェクト: Yusuke-Shimizu/depthkey
//----------------------------------------------------
// テクスチャの設定
//----------------------------------------------------
void setTexture(void){
	xnOSMemSet(g_pTexMap, 0, g_nTexMapX * g_nTexMapY * sizeof(XnRGB24Pixel));	// g_pTexMapの全てに0を代入

	// 描画モード1か3
	if (g_nViewState == DISPLAY_MODE_OVERLAY || g_nViewState == DISPLAY_MODE_IMAGE){
		const XnRGB24Pixel* pImageRow = g_imageMD.RGB24Data();	// g_imageMDのポインタ取得(画像データ取得)
		XnRGB24Pixel* pTexRow = g_pTexMap + g_imageMD.YOffset() * g_nTexMapX;

		for (XnUInt y = 0; y < KINECT_IMAGE_HEIGHT; ++ y){
			const XnRGB24Pixel* pImage = pImageRow;
			XnRGB24Pixel* pTex = pTexRow + g_imageMD.XOffset();

			for (XnUInt x = 0; x < KINECT_IMAGE_WIDTH; ++ x, ++ pImage, ++ pTex){
				*pTex = *pImage;
			}

			pImageRow += g_imageMD.XRes();
			pTexRow += g_nTexMapX;
		}
	}

	// 描画モード1か2
	if (g_nViewState == DISPLAY_MODE_OVERLAY || g_nViewState == DISPLAY_MODE_DEPTH){
		const XnDepthPixel* pDepthRow = g_depthMD.Data();
		XnRGB24Pixel* pTexRow = g_pTexMap + g_depthMD.YOffset() * g_nTexMapX;
		const XnLabel* pLabel = g_sceneMD.Data();

		for (XnUInt y = 0; y < KINECT_IMAGE_HEIGHT; ++ y){
			const XnDepthPixel* pDepth = pDepthRow;
			XnRGB24Pixel* pTex = pTexRow + g_depthMD.XOffset();

			for (XnUInt x = 0; x < KINECT_IMAGE_WIDTH; ++ x, ++ pDepth, ++ pTex, ++ pLabel){
				int nHistValue = g_pDepthHist[*pDepth];

				if(*pLabel){		// 人物なら
					*pTex = userColor[*pLabel];
				}else if (*pDepth != 0){
					if(*pDepth < 1000){
						*pTex = xnRGB24Pixel(nHistValue, 0, 0);		// red
					}else if(*pDepth < 2000){
						*pTex = xnRGB24Pixel(0, nHistValue, 0);		// green
					}else if(*pDepth < 3000){
						*pTex = xnRGB24Pixel(0, 0, nHistValue);		// blue
					}else if(*pDepth < 4000){
						*pTex = xnRGB24Pixel(nHistValue, nHistValue, 0);	// 水色
					}else if(*pDepth < 5000){
						*pTex = xnRGB24Pixel(0, nHistValue, nHistValue);	// yellow
					}else{
						*pTex = xnRGB24Pixel(nHistValue, 0, nHistValue);	// 紫
					}
				}
			}

			pDepthRow += g_depthMD.XRes();
			pTexRow += g_nTexMapX;
		}
	}

	// 描画モード4
	//if (g_nViewState == DISPLAY_MODE_CHROMA){
	//	// イメージデータ(カメラ映像)貼り付け
	//	const XnRGB24Pixel* pImageRow = g_imageMD.RGB24Data();	// g_imageMDのポインタ取得(画像データ取得)
	//	XnRGB24Pixel* pTexRow = g_pTexMap + g_imageMD.YOffset() * g_nTexMapX;

	//	for (XnUInt y = 0; y < KINECT_IMAGE_HEIGHT; ++ y){	// 480
	//		const XnRGB24Pixel* pImage = pImageRow;
	//		XnRGB24Pixel* pTex = pTexRow + g_imageMD.XOffset();

	//		for (XnUInt x = 0; x < KINECT_IMAGE_WIDTH; ++ x, ++ pImage, ++ pTex){	// 640
	//			*pTex = *pImage;
	//		}

	//		pImageRow += g_imageMD.XRes();
	//		pTexRow += g_nTexMapX;
	//	}

	//	// デプスデータを用いた人物抜き出し + 背景合成
	//	const XnDepthPixel* pDepthRow = g_depthMD.Data();		// デプスデータのポインタ取得
	//	pTexRow = g_pTexMap + g_depthMD.YOffset() * g_nTexMapX;
	//	GLuint g_backWidth = g_back.GetWidth();						// 背景の横幅の大きさ
	//	GLubyte* pBackData = g_back.GetData() + g_back.GetImageSize() - 3 * g_backWidth;	// 背景のポインタ取得(最後から見ていく)

	//	for (XnUInt y = 0; y < KINECT_IMAGE_HEIGHT; ++ y){	// 480
	//		const XnDepthPixel* pDepth = pDepthRow;			// デプスデータのポインタ取得
	//		XnRGB24Pixel* pTex = pTexRow + g_depthMD.XOffset();

	//		for (XnUInt x = 0; x < KINECT_IMAGE_WIDTH; ++ x, ++ pDepth, ++ pTex){	// 640
	//			// 深さが0か閾値以上なら背景画像を描画(閾値以下ならその部分を残す)
	//			if (*pDepth == 0 || *pDepth >= g_chromaThresh){
	//				pTex->nRed		= *pBackData;
	//				pTex->nGreen	= *(pBackData + 1);
	//				pTex->nBlue		= *(pBackData + 2);
	//			}

	//			pBackData += 3;
	//		}

	//		pDepthRow += g_depthMD.XRes();
	//		pTexRow += g_nTexMapX;
	//		pBackData -= 2 * 3 * g_backWidth;
	//	}
	//}
}
コード例 #18
0
ファイル: main_bk1.cpp プロジェクト: horsewin/ARMicroMachines
int main(int argc, char* argv[]) {

	markerSize.width = -1; markerSize.height = -1;
	EnumerationErrors errors;
	switch (XnStatus rc = niContext.InitFromXmlFile(KINECT_CONFIG_FILENAME, &errors)) {
		case XN_STATUS_OK:
			break;
		case XN_STATUS_NO_NODE_PRESENT:
			XnChar strError[1024];	errors.ToString(strError, 1024);
			printf("%s\n", strError);
			return rc; break;
		default:
			printf("Open failed: %s\n", xnGetStatusString(rc));
			return rc;
	}

	capture = new Camera(CAPTURE_SIZE, CAMERA_PARAMS_FILENAME);

	RegistrationParams = scaleParams(capture->getParameters(), double(REGISTRATION_SIZE.width)/double(CAPTURE_SIZE.width));
	osg_init(calcProjection(RegistrationParams, capture->getDistortion(), REGISTRATION_SIZE));

	loadKinectParams(KINECT_PARAMS_FILENAME, &kinectParams, &kinectDistort);
	kinectDistort =0;
	kinectParams->data.db[2]=320.0; kinectParams->data.db[5]=240.0;

	niContext.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
	niContext.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);

	g_depth.GetMirrorCap().SetMirror(false);
	g_depth.GetAlternativeViewPointCap().SetViewPoint(g_image);

	kinectReg = new RegistrationOPIRA(new OCVSurf());
	kinectReg->addResizedMarker(MARKER_FILENAME, 400);

	//physics
	m_world = new KCRPhysicsWorld();
	ground_grid = new float[19200];
	for (int i =0;i < 19200; i++) {
		ground_grid[i] = 0; 
	}
#ifdef SIM_PARTICLES
	voxel_grid = new float[1200];
	for (int i =0;i < 1200; i++) {
		voxel_grid[i] = 0;
	}
#endif

	//controls
	KeyboardController *kc = new KeyboardController(m_world);
	XboxController *xc = new XboxController(m_world);

	loadKinectTransform(KINECT_TRANSFORM_FILENAME);

#ifdef USE_ARMM_VRPN
	m_Connection = new vrpn_Connection_IP();
	ARMM_server = new ARMM_Communicator(m_Connection );
    cout << "Created VRPN server." << endl;
#endif

#ifdef USE_SKIN_SEGMENTATION	//Skin color look up
	_HandRegion.LoadSkinColorProbTable();
#endif

#ifdef USE_OPTICAL_FLOW
	prev_colourIm = cvCreateImage(cvSize(640, 480), IPL_DEPTH_8U, 3);
#endif
/////////////////////////////////////////////Main Loop////////////////////////////////////////////////
	while (running) {
		if (XnStatus rc = niContext.WaitAnyUpdateAll() != XN_STATUS_OK) {
			printf("Read failed: %s\n", xnGetStatusString(rc));
			return rc;
		}
		g_depth.GetMetaData(niDepthMD);
		g_image.GetMetaData(niImageMD);

		colourIm = cvCreateImage(cvSize(niImageMD.XRes(), niImageMD.YRes()), IPL_DEPTH_8U, 3);
		memcpy(colourIm->imageData, niImageMD.Data(), colourIm->imageSize); cvCvtColor(colourIm, colourIm, CV_RGB2BGR);
		cvFlip(colourIm, colourIm, 1);

		depthIm = cvCreateImage(cvSize(niDepthMD.XRes(), niDepthMD.YRes()), IPL_DEPTH_16U, 1);
		transDepth160 = cvCreateImage(cvSize(MESH_SIZE.width, MESH_SIZE.height), IPL_DEPTH_32F, 1);
		transDepth320 = cvCreateImage(cvSize(CV_OP_SIZE.width, CV_OP_SIZE.height), IPL_DEPTH_32F, 1);
		memcpy(depthIm->imageData, niDepthMD.Data(), depthIm->imageSize);	
		cvShowImage("Kinect View", colourIm);

		IplImage *arImage = capture->getFrame();
		cvWaitKey(1); 
		kc->check_input(); xc->check_input();

#ifdef USE_OPTICAL_FLOW
		if(RunOnce) SceneOpticalFlowLK(prev_colourIm, colourIm);
#endif

		if(kinectTransform) { // kinect transform as cvmat* for use
			if( counter >= 4) {
				inpaintDepth(&niDepthMD, true); 
				memcpy(depthIm->imageData, niDepthMD.Data(), depthIm->imageSize);				
				TransformDepth(depthIm, transDepth160, MARKER_DEPTH, MESH_SIZE);
				GenerateTrimeshGroundFromDepth(transDepth160, MARKER_DEPTH); /*Trimesh generation*/
				m_world->updateTrimeshRefitTree(ground_grid);//opencl?
				osg_UpdateHeightfieldTrimesh(ground_grid);//opencl?
#ifdef SIM_PARTICLES
/*World spheres simulation*/
//				GenerateVoxelFromDepth(depthIm, MARKER_DEPTH);
//				m_world->updateWorldSphereTransform(voxel_grid);
//				osgUpdateWorldSphereTransform(voxel_grid);
#endif
				counter = 0;
			} else {
#ifdef USE_SKIN_SEGMENTATION /*Skin color segmentation*/ // may be reduce resolution first as well as cut off depth make processing faster
				TransformDepth(depthIm, transDepth320, MARKER_DEPTH, CV_OP_SIZE);
				IplImage* depthTmp = cvCreateImage(cvSize(CV_OP_SIZE.width, CV_OP_SIZE.height), IPL_DEPTH_8U, 1);
				IplImage* colourImResized = cvCreateImage(cvSize(CV_OP_SIZE.width, CV_OP_SIZE.height), IPL_DEPTH_8U, 3);
				gray = cvCreateImage(cvSize(colourImResized->width, colourImResized->height),IPL_DEPTH_8U,1);
				hand_region = cvCreateImage(cvSize(colourImResized->width, colourImResized->height),IPL_DEPTH_8U,1);
				IplImage* colourIm640 = cvCreateImage(cvSize(640, 480), IPL_DEPTH_8U, 3);

//				cvCmpS(transDepth, 0, depthTmp, CV_CMP_LT);//dst must b 8U
				cvThreshold(transDepth320, depthTmp, 1, 255, CV_THRESH_BINARY_INV); //thres at 1cm above marker
				cvResize(colourIm, colourImResized, CV_INTER_NN);//use nearest neighbor interpolation
//				removeNoise( depthTmp, 100 );
//				cvSet(colourImResized, cvScalar(0), depthTmp);
				cvShowImage ("Marker Thresh", colourImResized);
				cvResize(colourImResized, colourIm640,CV_INTER_NN);
				cvShowImage ("Marker Thresh 640", colourIm640);

				cvCopyImage( _HandRegion.GetHandRegion( colourImResized, gray), hand_region );
//				removeNoise( hand_region, 20 );
				cvThreshold(hand_region, depthTmp, 0, 255, CV_THRESH_BINARY_INV);
//				removeNoise( depthTmp, 100 );
//				cvShowImage ("depthTmp", depthTmp);
//				cvShowImage ("hand_region", hand_region);

				cvSet(colourImResized, cvScalar(0), depthTmp);

//				cvShowImage ("Skin Color", colourImResized);

//				cvDilate(colourImResized,colourImResized,CV_SHAPE_RECT,1);
//				cvErode(colourImResized,colourImResized,CV_SHAPE_RECT,1);
//				cvMorphologyEx(colourImResized,colourImResized,NULL,CV_SHAPE_RECT,CV_MOP_OPEN,1);
				cvResize(colourImResized, colourIm640,CV_INTER_NN);
				cvShowImage ("Color Skin Color 640", colourIm640);
				cvReleaseImage(&depthTmp);
				cvReleaseImage(&colourImResized);
				cvReleaseImage(&colourIm640);
#endif

#ifdef USE_PARTICLES
/*
				IplImage* depthTmp1 = cvCreateImage(cvSize(depthIm->width, depthIm->height), IPL_DEPTH_32F, 1);
				IplImage* depthTmp2 = cvCreateImage(cvSize(depthIm->width, depthIm->height), IPL_DEPTH_8U, 1);
				cvConvertScale(depthIm, depthTmp1, 1);
//				cvThreshold(depthTmp1, depthTmp2, MARKER_DEPTH-5, 255, CV_THRESH_TOZERO_INV);//thresh 5mm above marker
				cvThreshold(depthTmp1, depthTmp2, MARKER_DEPTH-10, 255, CV_THRESH_TOZERO);
				//cvCmpS(depthTmp1, MARKER_DEPTH, depthTmp2, CV_CMP_GT);
//				cvShowImage("DEPTH640", depthTmp2);
//				IplImage* colourTmp = cvCreateImage(cvSize(colourIm->width, colourIm->height), IPL_DEPTH_8U, 3);
//				cvCopyImage(colourIm, colourTmp);
//				cvSet(colourTmp, cvScalar(0), depthTmp2);
//				cvShowImage ("Color640", colourTmp);
				cvSet(depthTmp1, cvScalar(0), depthTmp2);
				cvShowImage("DEPTH640_32F", depthTmp1);
*/
				inpaintDepth(&niDepthMD, true); 
//				TransformDepth(depthTmp1, transDepth, MARKER_DEPTH);
				memcpy(depthIm->imageData, niDepthMD.Data(), depthIm->imageSize);
				TransformDepth(depthIm, transDepth, MARKER_DEPTH);
				IplImage* depthTmp3 = cvCreateImage(cvSize(TRACKING_SIZE.width, TRACKING_SIZE.height), IPL_DEPTH_8U, 1);
				cvThreshold(transDepth, depthTmp3, 0.5, 255, CV_THRESH_BINARY_INV);
//				cvThreshold(transDepth, depthTmp3, 0, 255, CV_THRESH_TOZERO);
//				cvConvertScale(transDepth, depthTmp3, 1);
				cvShowImage("DEPTH160", transDepth);
				IplImage* colourImResized = cvCreateImage(cvSize(TRACKING_SIZE.width, TRACKING_SIZE.height), IPL_DEPTH_8U, 3);
				cvResize(colourIm, colourImResized, CV_INTER_NN);//use nearest neighbor interpolation
				cvSet(colourImResized, cvScalar(0), depthTmp3);
				cvShowImage ("Color160", colourImResized);

//				cvReleaseImage(&depthTmp1);
//				cvReleaseImage(&depthTmp2);
//				cvReleaseImage(&colourTmp);
				cvReleaseImage(&depthTmp3);
				cvReleaseImage(&colourImResized);

/*
				IplImage* depthTmp1 = cvCreateImage(cvSize(depthIm->width, depthIm->height), IPL_DEPTH_32F, 1);
				IplImage* depthTmp2 = cvCreateImage(cvSize(depthIm->width, depthIm->height), IPL_DEPTH_8U, 1);
				cvConvertScale(depthIm, depthTmp1, 1);
//				cvThreshold(depthTmp1, depthTmp2, MARKER_DEPTH-5, 255, CV_THRESH_TOZERO_INV);//thresh 5mm above marker
				cvThreshold(depthTmp1, depthTmp2, MARKER_DEPTH-5, 255, CV_THRESH_TOZERO);
				//cvCmpS(depthTmp1, MARKER_DEPTH, depthTmp2, CV_CMP_GT);
				cvShowImage("TMP_DEPTH", depthTmp2);
				IplImage* colourTmp = cvCreateImage(cvSize(colourIm->width, colourIm->height), IPL_DEPTH_8U, 3);
				cvCopyImage(colourIm, colourTmp);
				cvSet(colourTmp, cvScalar(0), depthTmp2);
				cvShowImage ("Basic Thresh", colourTmp);
				cvReleaseImage(&depthTmp1);
				cvReleaseImage(&depthTmp2);
*/
#endif
				counter++;
//			} else {
//				counter++;
			}
			//do hand pose recognition
			m_world->Update();
			RenderScene(arImage, capture);
		}
#ifdef USE_ARMM_VRPN
		ARMM_server->mainloop();
		m_Connection->mainloop();
#endif

#ifdef USE_OPTICAL_FLOW
		if(!RunOnce) RunOnce = true;
		cvCopyImage(colourIm, prev_colourIm);
		memcpy(prev_colourIm->imageData, niImageMD.Data(), prev_colourIm->imageSize);
		cvCvtColor(prev_colourIm, prev_colourIm, CV_RGB2BGR);
#endif

		cvReleaseImage(&arImage);
		cvReleaseImage(&depthIm); cvReleaseImage(&colourIm);
		cvReleaseImage(&transDepth320);cvReleaseImage(&transDepth160);
#ifdef USE_SKIN_SEGMENTATION
		cvReleaseImage(&gray); cvReleaseImage(&hand_region);
#endif
	}

	cvReleaseImage(&prev_colourIm);
	osg_uninit();
	delete m_world;
	delete kinectReg;

	cvReleaseMat(&RegistrationParams);

	delete kc;

	return 0;
}
コード例 #19
0
ファイル: NiSimpleViewer.cpp プロジェクト: 3david/OpenNI
void glutDisplay (void)
{
	XnStatus rc = XN_STATUS_OK;

	// Read a new frame
	rc = g_context.WaitAnyUpdateAll();
	if (rc != XN_STATUS_OK)
	{
		printf("Read failed: %s\n", xnGetStatusString(rc));
		return;
	}

	g_depth.GetMetaData(g_depthMD);
	g_image.GetMetaData(g_imageMD);

	const XnDepthPixel* pDepth = g_depthMD.Data();
	const XnUInt8* pImage = g_imageMD.Data();

	unsigned int nImageScale = GL_WIN_SIZE_X / g_depthMD.FullXRes();

	// Copied from SimpleViewer
	// Clear the OpenGL buffers
	glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Setup the OpenGL viewpoint
	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

	// Calculate the accumulative histogram (the yellow display...)
	xnOSMemSet(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));

	unsigned int nNumberOfPoints = 0;
	for (XnUInt y = 0; y < g_depthMD.YRes(); ++y)
	{
		for (XnUInt x = 0; x < g_depthMD.XRes(); ++x, ++pDepth)
		{
			if (*pDepth != 0)
			{
				g_pDepthHist[*pDepth]++;
				nNumberOfPoints++;
			}
		}
	}
	for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
	{
		g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
	}
	if (nNumberOfPoints)
	{
		for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
		{
			g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
		}
	}

	xnOSMemSet(g_pTexMap, 0, g_nTexMapX*g_nTexMapY*sizeof(XnRGB24Pixel));

	// check if we need to draw image frame to texture
	if (g_nViewState == DISPLAY_MODE_OVERLAY ||
		g_nViewState == DISPLAY_MODE_IMAGE)
	{
		const XnRGB24Pixel* pImageRow = g_imageMD.RGB24Data();
		XnRGB24Pixel* pTexRow = g_pTexMap + g_imageMD.YOffset() * g_nTexMapX;

		for (XnUInt y = 0; y < g_imageMD.YRes(); ++y)
		{
			const XnRGB24Pixel* pImage = pImageRow;
			XnRGB24Pixel* pTex = pTexRow + g_imageMD.XOffset();

			for (XnUInt x = 0; x < g_imageMD.XRes(); ++x, ++pImage, ++pTex)
			{
				*pTex = *pImage;
			}

			pImageRow += g_imageMD.XRes();
			pTexRow += g_nTexMapX;
		}
	}

	// check if we need to draw depth frame to texture
	if (g_nViewState == DISPLAY_MODE_OVERLAY ||
		g_nViewState == DISPLAY_MODE_DEPTH)
	{
		const XnDepthPixel* pDepthRow = g_depthMD.Data();
		XnRGB24Pixel* pTexRow = g_pTexMap + g_depthMD.YOffset() * g_nTexMapX;

		for (XnUInt y = 0; y < g_depthMD.YRes(); ++y)
		{
			const XnDepthPixel* pDepth = pDepthRow;
			XnRGB24Pixel* pTex = pTexRow + g_depthMD.XOffset();

			for (XnUInt x = 0; x < g_depthMD.XRes(); ++x, ++pDepth, ++pTex)
			{
				if (*pDepth != 0)
				{
					int nHistValue = g_pDepthHist[*pDepth];
					pTex->nRed = nHistValue;
					pTex->nGreen = nHistValue;
					pTex->nBlue = 0;
				}
			}

			pDepthRow += g_depthMD.XRes();
			pTexRow += g_nTexMapX;
		}
	}

	// Create the OpenGL texture map
	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, g_nTexMapX, g_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, g_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glBegin(GL_QUADS);

	int nXRes = g_depthMD.FullXRes();
	int nYRes = g_depthMD.FullYRes();

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)nXRes/(float)g_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)nXRes/(float)g_nTexMapX, (float)nYRes/(float)g_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)nYRes/(float)g_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();

	// Swap the OpenGL display buffers
	glutSwapBuffers();
}
コード例 #20
0
ファイル: main.cpp プロジェクト: alfiandosengkey/as3openni
void captureDepthMap(unsigned char* g_ucDepthBuffer)
{
	SceneMetaData smd;
	DepthMetaData dmd;
	_depth.GetMetaData(dmd);

	//printf("AS3OpenNI :: Frame %d Middle point is: %u. FPS: %f\n", dmd.FrameID(), dmd(dmd.XRes() / 2, dmd.YRes() / 2), xnFPSCalc(&xnFPS));

	_depth.GetMetaData(dmd);
	_userGenerator.GetUserPixels(0, smd);
	unsigned int nValue = 0;
	unsigned int nHistValue = 0;
	unsigned int nIndex = 0;
	unsigned int nX = 0;
	unsigned int nY = 0;
	unsigned int nNumberOfPoints = 0;
	XnUInt16 g_nXRes = dmd.XRes();
	XnUInt16 g_nYRes = dmd.YRes();

	const XnDepthPixel* pDepth = dmd.Data();
	const XnLabel* pLabels = smd.Data();

	// Calculate the accumulative histogram
	memset(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));
	for (nY=0; nY<g_nYRes; nY++)
	{
		for (nX=0; nX<g_nXRes; nX++)
		{
			nValue = *pDepth;
			if (nValue != 0)
			{
				g_pDepthHist[nValue]++;
				nNumberOfPoints++;
			}
			pDepth++;
		}
	}

	for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
	{
		g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
	}

	if (nNumberOfPoints)
	{
		for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
		{
			g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
		}
	}
	
	pDepth = dmd.Data();
	if (_drawPixels)
	{
		XnUInt32 nIndex = 0;
		for (nY=0; nY<g_nYRes; nY++)
		{
			for (nX=0; nX < g_nXRes; nX++, nIndex++)
			{
				g_ucDepthBuffer[0] = 0;
				g_ucDepthBuffer[1] = 0;
				g_ucDepthBuffer[2] = 0;
				g_ucDepthBuffer[3] = 0x00;
				if (_depthMapBackground || *pLabels != 0)
				{
					nValue = *pDepth;
					XnLabel label = *pLabels;
					XnUInt32 nColorID = label % nColors;
					if (label == 0)
					{
						nColorID = nColors;
					}

					if (nValue != 0)
					{
						nHistValue = g_pDepthHist[nValue];
						if(_depthMapDetect)
						{
							g_ucDepthBuffer[0] = nHistValue * Colors[nColorID][0]; 
							g_ucDepthBuffer[1] = nHistValue * Colors[nColorID][1];
							g_ucDepthBuffer[2] = nHistValue * Colors[nColorID][2];
						}
						else
						{
							g_ucDepthBuffer[0] = nHistValue; 
							g_ucDepthBuffer[1] = nHistValue;
							g_ucDepthBuffer[2] = nHistValue;
						}
						
						g_ucDepthBuffer[3] = 0xFF;
					}
				}
				pDepth++;
				pLabels++;
				g_ucDepthBuffer+=4;
			}
		}
	}
}
コード例 #21
0
ファイル: NiSimpleRead.cpp プロジェクト: ABMNYZ/OpenNI
int main()
{
	XnStatus nRetVal = XN_STATUS_OK;

	Context context;
	ScriptNode scriptNode;
	EnumerationErrors errors;

	const char *fn = NULL;
	if	(fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
	else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
	else {
		printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
		return XN_STATUS_ERROR;
	}
	printf("Reading config from: '%s'\n", fn);
	nRetVal = context.InitFromXmlFile(fn, scriptNode, &errors);

	if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
	{
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf("%s\n", strError);
		return (nRetVal);
	}
	else if (nRetVal != XN_STATUS_OK)
	{
		printf("Open failed: %s\n", xnGetStatusString(nRetVal));
		return (nRetVal);
	}

	DepthGenerator depth;
	nRetVal = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
	CHECK_RC(nRetVal, "Find depth generator");

	XnFPSData xnFPS;
	nRetVal = xnFPSInit(&xnFPS, 180);
	CHECK_RC(nRetVal, "FPS Init");

	DepthMetaData depthMD;

	while (!xnOSWasKeyboardHit())
	{
		nRetVal = context.WaitOneUpdateAll(depth);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("UpdateData failed: %s\n", xnGetStatusString(nRetVal));
			continue;
		}

		xnFPSMarkFrame(&xnFPS);

		depth.GetMetaData(depthMD);

		printf("Frame %d Middle point is: %u. FPS: %f\n", depthMD.FrameID(), depthMD(depthMD.XRes() / 2, depthMD.YRes() / 2), xnFPSCalc(&xnFPS));
	}

	depth.Release();
	scriptNode.Release();
	context.Release();

	return 0;
}
コード例 #22
0
//--------------------------------------------------------------
void testApp::update(){
	XnStatus rc = XN_STATUS_OK;
	
	// Read a new frame
	rc = g_context.WaitAnyUpdateAll();
	if (rc != XN_STATUS_OK)
	{
		printf("Read failed: %s\n", xnGetStatusString(rc));
		return;
	}
	
	g_depth.GetMetaData(g_depthMD);
	//g_image.GetMetaData(g_imageMD);
	
	const XnDepthPixel* pDepth = g_depthMD.Data();
	
	// Calculate the accumulative histogram (the yellow display...)
	xnOSMemSet(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));
	
	unsigned int nNumberOfPoints = 0;
	for (XnUInt y = 0; y < g_depthMD.YRes(); ++y)
	{
		for (XnUInt x = 0; x < g_depthMD.XRes(); ++x, ++pDepth)
		{
			if (*pDepth != 0)
			{
				g_pDepthHist[*pDepth]++;
				nNumberOfPoints++;
			}
		}
	}
	for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
	{
		g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
	}
	if (nNumberOfPoints)
	{
		for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
		{
			g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
		}
	}
	
	xnOSMemSet(g_pTexMap, 0, g_nTexMapX*g_nTexMapY*sizeof(XnRGB24Pixel));
	
	// check if we need to draw depth frame to texture
	const XnDepthPixel* pDepthRow = g_depthMD.Data();
	XnRGB24Pixel* pTexRow = g_pTexMap + g_depthMD.YOffset() * g_nTexMapX;
	
	for (XnUInt y = 0; y < g_depthMD.YRes(); ++y)
	{
		const XnDepthPixel* pDepth = pDepthRow;
		XnRGB24Pixel* pTex = pTexRow + g_depthMD.XOffset();
		
		for (XnUInt x = 0; x < g_depthMD.XRes(); ++x, ++pDepth, ++pTex)
		{
			int idx = (x + y * g_depthMD.XRes()) * 3;
			if (*pDepth != 0)
			{
				int nHistValue = g_pDepthHist[*pDepth];
				
				pixels[idx] = nHistValue;
				pixels[idx+1] = nHistValue;
				pixels[idx+2] = nHistValue;
			}
			else
			{
				pixels[idx] = 0;
				pixels[idx+1] = 0;
				pixels[idx+2] = 0;
			}
		}
		
		pDepthRow += g_depthMD.XRes();
		pTexRow += g_nTexMapX;
	}
	
	tex.loadData(pixels, 640, 480, GL_RGB);
}