int _tmain(int argc, _TCHAR* argv[])
{
	XnStatus nRetVal = XN_STATUS_OK;
	Context context;

	nRetVal = context.Init();
	DepthGenerator depth;
	nRetVal = depth.Create(context);

	XnMapOutputMode mapMode;

	mapMode.nXRes = XN_VGA_X_RES;
	mapMode.nYRes = XN_VGA_Y_RES; 
	mapMode.nFPS = 30; 

	nRetVal = depth.SetMapOutputMode(mapMode);

	nRetVal = context.StartGeneratingAll();

	XnUInt32 nMiddleIndex = XN_VGA_X_RES * XN_VGA_Y_RES/2 + XN_VGA_X_RES/2;

	int count = 0;
	const XnDepthPixel* pDepthMap;

	while (count<50) { // Update to next frame 
		nRetVal = context.WaitOneUpdateAll(depth);
		pDepthMap = depth.GetDepthMap(); 
		printf("Middle pixel is %u millimeters away\n", pDepthMap[nMiddleIndex]);
		count++;
	} 

	DepthMetaData g_depthMD;
	depth.GetMetaData(g_depthMD);

	cout<<g_depthMD.FullXRes();
	cout<<g_depthMD.FullYRes();

	
	const double maxDepth = 3000;

	const int xScale = 1;
	const int yScale = 1;

	const int xActualRes = XN_VGA_X_RES;
	const int yActualRes = XN_VGA_Y_RES;

	const int xRes = xActualRes/xScale;
	const int yRes = yActualRes/yScale;

	const int sizeOfMap = xRes * yRes + 2*xRes + 2*(yRes-2) +2 ; //multiply by two for back face

	vertex* vertices = new vertex[sizeOfMap];

	double xRealScale = 1.0;
	double yRealScale = 1.0;

	for (int j = 0; j < yRes; j++) {
		for (int i = 0; i < xRes; i++) {
		
			double x = xScale * xRealScale * i;
			double y = yScale * yRealScale * j;

			double z = -(double) g_depthMD.DepthMap()[xActualRes * j * yScale + i * xScale];

			if (z == 0) 
				z = -maxDepth;

			if (z > 1800) 
				z = -maxDepth;

			vertices[xRes*j + i] = vertex(x,y,z);
		}
	}

	int offset = xRes*yRes;

	vector<int> indices;

	//bottom facets
	for (int i = 0; i< xRes; i++) {
		double x = xRealScale * xScale* i;
		double y = 0;
		double z = -maxDepth - 100;  //placeholder

		vertices[offset + i] = vertex(x,y,z);
	}

	//bottom facet winding
	for (int i = 0; i< xRes-1; i++) {
		indices.push_back(i);				
		indices.push_back(i+1);				
		indices.push_back(xRes*yRes + 1 + i);	

		indices.push_back(i);				
		indices.push_back(xRes*yRes + 1 + i);	
		indices.push_back(xRes*yRes + i);	
	}

	offset = offset + xRes;

	//right side facets 
	for (int i = 1; i< yRes; i++) {
		double x = xRealScale * xScale * (xRes-1);
		double y = yRealScale * yScale * i;
		double z = -maxDepth - 100;  //placeholder

		vertices[offset + i -1] = vertex(x,y,z);
	}

	offset = offset + yRes -1;

	//right facet winding
	for (int i = 0; i< yRes-1; i++) {

		indices.push_back(xRes-1 + i * xRes);			//2	 good		
		indices.push_back(xRes-1 + xRes * (i+1));		//5	 			
		indices.push_back(xRes*yRes + xRes + i);		//12

		indices.push_back(xRes-1 + i * xRes);			//2	 good		
		indices.push_back(xRes*yRes + xRes + i -1);		//11	
		indices.push_back(xRes*yRes + xRes + i);		//12
	}


	//top side facets 
	for (int i = 1; i< xRes; i++) {
		double x = xRealScale * xScale * (xRes-1 - i);
		double y = yRealScale * yScale * (yRes-1);
		double z = -maxDepth - 100;  //placeholder

		vertices[offset + i -1] = vertex(x,y,z);
	}

	//top facet winding
	for (int i = 0; i< xRes-1; i++) {

		indices.push_back(xRes*yRes-1 - i);					//8	 good		
		indices.push_back(xRes*yRes -1 + xRes + yRes + i);	//14	 			
		indices.push_back(xRes*yRes -2 - i);				//7

		indices.push_back(xRes*yRes-1 - i);					//8	 good	
		indices.push_back(xRes*yRes -1 + xRes + yRes + i -1);	//13	
		indices.push_back(xRes*yRes -1 + xRes + yRes + i);		//14
	}

	offset = offset + xRes - 1; //check

	//left side facet 
	for (int i = 1; i< yRes+1; i++) {
		double x = 0;
		double y = yRealScale * yScale * (yRes - i); //check
		double z = -maxDepth - 100;  //placeholder

		vertices[offset + i -1] = vertex(x,y,z); //check
	}

	//left side winding
	for (int i = 0; i < yRes-1; i++) {
		indices.push_back(xRes*yRes - (xRes*(i+2)));	 //3
		indices.push_back(xRes*yRes - (xRes*(i+1)));	//6
		indices.push_back(xRes*yRes + 2*xRes + yRes -3 +i);  //15

		indices.push_back(xRes*yRes + 2*xRes + yRes -3 + i + 1);  //16
		indices.push_back(xRes*yRes - (xRes*(i+2)));
		indices.push_back(xRes*yRes + 2*xRes + yRes -3 +i); //15

		//indices.push_back(xRes*yRes - xRes * (i+1));			//6	
		//indices.push_back(xRes*yRes + 2*xRes + yRes - 3 + i);	//15
		//indices.push_back(xRes*yRes - xRes * (i+2));	 		//3	

		//indices.push_back(xRes*yRes - xRes * (i+2));	 			//3	
		//indices.push_back(xRes*yRes + 2*xRes + yRes - 3 + i);		//15
		//indices.push_back(xRes*yRes + 2*xRes + yRes - 3 + i +1);	//16

	}

	
		indices.push_back(xRes*yRes);			//6	
		indices.push_back(0);			//15
		indices.push_back(xRes*yRes + 2*xRes + 2*yRes -4);	

	//bottom face
		indices.push_back(xRes*yRes);			//6	
		indices.push_back(xRes*yRes + xRes-1);			//15
		indices.push_back(xRes*yRes + xRes + yRes -2);	 		//3	

		indices.push_back(xRes*yRes);			//6	
		indices.push_back(xRes*yRes + xRes + yRes -2);			//15
		indices.push_back(xRes*yRes + 2*xRes + yRes -3);		//3	


	printf("Created %u vertices\n",sizeOfMap);

	//front face - this shows the actual depth map
	for (int i = 0; i < xRes-1; i++) {
		for (int j = 0; j < yRes-1; j++) {
			indices.push_back(j * xRes + i);
			indices.push_back((j+1) * xRes + i);
			indices.push_back(j * xRes + i + 1);
			
			indices.push_back(j * xRes + i + 1);
			indices.push_back((j+1) * xRes + i);
			indices.push_back((j+1) * xRes + i + 1);
		}
	}

	int numFacets = indices.size()/3;

	printf("Created %u facets\n",numFacets);

	//int offset = sizeOfMap-1; // number of vertices on front face

	////back face
	////for (int i = 0; i < xRes-1; i++) {
	////	for (int j = 0; j < yRes-1; j++) {
	////		indices.push_back(j * xRes + i + 1 + offset);
	////		indices.push_back((j+1) * xRes + i + offset);
	////		indices.push_back(j * xRes + i + offset);
	////		
	////		indices.push_back((j+1) * xRes + i + 1 + offset);
	////		indices.push_back((j+1) * xRes + i + offset);
	////		indices.push_back(j * xRes + i + 1 + offset);
	////	}
	////}

	//int n = indices.size()/3;

	ofstream stlOut;
	stlOut.open ("stlOut.stl");
	printf("Writing to STL...\n");
	stlOut << "solid kinectout\n";

	//  printf("Face Count: %u Vertex Count: %u", numFacets, xRes*yRes);

    for (int ii = 0; ii < numFacets; ++ii)
    {

        int v1 = indices[3*ii];
        int v2 = indices[3*ii + 1];
        int v3 = indices[3*ii + 2];

		vertex p1 = vertices[v1];
        vertex p2 = vertices[v2];
        vertex p3 = vertices[v3];
 
        vertex dir1 = p1.subtract(p2);
        vertex dir2  = p3.subtract(p2);

        vertex n = dir1.cross(dir2).normalize();
 
		stlOut << "  facet normal "<< n.x << " " << n.y << " " << n.z << "\n";
		stlOut << "    outer loop\n";
		stlOut << "      vertex " << p1.x << " " << p1.y << " " << p1.z << "\n";
		stlOut << "      vertex " << p2.x << " " << p2.y << " " << p2.z << "\n";
		stlOut << "      vertex " << p3.x << " " << p3.y << " " << p3.z << "\n";
		stlOut << "    endloop\n";
		stlOut << "  endfacet\n";

    }
	stlOut << "endsolid kinectout";

	printf("Complete!\n");
	stlOut.close();

}
Exemple #2
0
void mixRGB_Depth()
{
	bool bShouldRun = true;
	int c;

	XnStatus nRetVal = XN_STATUS_OK;
	Context context;

	// Initialize context object
	nRetVal = context.Init();

	// Check error code
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	context.SetGlobalMirror(true);

	//Create Depth generator node
	DepthGenerator depth;
	nRetVal = depth.Create(context);
	// Check error code
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	// Create an ImageGenetor node
	ImageGenerator image;
	nRetVal = image.Create(context);
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	// Sync the DepthGenerator with the ImageGenerator
	nRetVal = depth.GetAlternativeViewPointCap().SetViewPoint(image);
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	//Set it to VGA maps at 30 fps
	XnMapOutputMode mapMode;
	mapMode.nXRes = XN_VGA_X_RES;
	mapMode.nYRes = XN_VGA_Y_RES;
	mapMode.nFPS = 30;
	nRetVal = depth.SetMapOutputMode(mapMode);

	// Make it start generating data
	nRetVal = context.StartGeneratingAll();
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	// Create an OpenCv matrix
	CvMat* depthMetersMat = cvCreateMat(480, 640, CV_16UC1);
	IplImage *kinectDepthImage;
	kinectDepthImage = cvCreateImage(cvSize(640,480), 16, 1);

	IplImage *rgbimg = cvCreateImageHeader(cvSize(640,480), 8,3);

	// Main loop
	while (bShouldRun)
	{
		//wait for new data to be available
		nRetVal = context.WaitOneUpdateAll(depth);
		if (nRetVal)
		{
			printf("Error: %s", xnGetStatusString(nRetVal));
			continue;
		}
		//Take current depth map
		const XnDepthPixel* pDepthMap = depth.GetDepthMap();

		for (int y=0; y<XN_VGA_Y_RES; y++)
		{
			for (int x=0; x<XN_VGA_X_RES; x++)
			{
				depthMetersMat->data.s[y*XN_VGA_X_RES+x]=10*pDepthMap[y*XN_VGA_X_RES+x];
			}
		}

		cvGetImage(depthMetersMat, kinectDepthImage);

		//take current image
		const XnRGB24Pixel* pImage = image.GetRGB24ImageMap();
		//process image data
		XnRGB24Pixel* ucpImage = const_cast<XnRGB24Pixel*>(pImage);
		cvSetData(rgbimg, ucpImage, 640*3);
		cvShowImage("RGB", kinectDepthImage);

		c = cvWaitKey(1);
		if (c == 27)
			bShouldRun = false;
	}

	cvReleaseImageHeader(&kinectDepthImage);
	context.Shutdown();
}
Exemple #3
0
int main(int argc, char *argv[])
{	
	//--------------------------------------------------------------------//
	//------------------------- SETUP REQUIRED NODES ---------------------//
	//--------------------------------------------------------------------//
	
	// Setup the command line parameters.
	setupParams(argc, argv);
	
	// Setup all the sockets.
	setupSockets();
    
	// Setup the capture socket server for Mac.
	#if (XN_PLATFORM == XN_PLATFORM_MACOSX)
		if(_featureDepthMapCapture || _featureRGBCapture)
		{
			if(_useSockets)
			{
				g_AS3Network = network();
				g_AS3Network.init(setupServer);
			}
		}
	#endif
	
	// Setup the status.
    XnStatus _status = XN_STATUS_OK;
    EnumerationErrors _errors;
    
    // Context Init and Add license.
	_status = _context.Init();
	CHECK_RC(_status, "AS3OpenNI :: Initialize context");
	_context.SetGlobalMirror(_mirror);
	
	XnChar vendor[XN_MAX_NAME_LENGTH];
	XnChar license[XN_MAX_LICENSE_LENGTH];

	_license.strVendor[XN_MAX_NAME_LENGTH] = strcmp(vendor, "PrimeSense");
	_license.strKey[XN_MAX_LICENSE_LENGTH] = strcmp(license, "0KOIk2JeIBYClPWVnMoRKn5cdY4=");
		
	_status = _context.AddLicense(_license);
   	CHECK_RC(_status, "AS3OpenNI :: Added license");
   	
   	// Set it to VGA maps at 30 FPS
	_depthMode.nXRes = 640;
	_depthMode.nYRes = 480;
	_depthMode.nFPS = 30;
	
	// Depth map create.
	_status = _depth.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create depth generator");
	_status = _depth.SetMapOutputMode(_depthMode);
	
	// Depth map create.
	_status = _image.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create image generator");
	_status = _image.SetMapOutputMode(_depthMode);
	_status = _image.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
	
	// Create the hands generator.
	_status = _hands.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create hands generator");
	_hands.SetSmoothing(0.1);

	// Create the gesture generator.
	_status = _gesture.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create gesture generator");
	
	// Create user generator.
	_status = _userGenerator.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Find user generator");
	
	// Create and initialize point tracker
	_sessionManager = new XnVSessionManager();
	_status = _sessionManager->Initialize(&_context, "Wave", "RaiseHand");
	
	if (_status != XN_STATUS_OK)
	{
		printf("AS3OpenNI :: Couldn't initialize the Session Manager: %s\n", xnGetStatusString(_status));
		CleanupExit();
	}
	_sessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress);
	
	// Start catching signals for quit indications
	CatchSignals(&_quit);
	
	//---------------------------------------------------------------//
	//------------------------- SETUP FEATURES ---------------------//
	//--------------------------------------------------------------//
	
	// Define the Wave and SinglePoint detectors.
	_waveDetector = new XnVWaveDetector();
	
	// SinglePoint detector.
	if(_featureSinglePoint) _waveDetector->RegisterPointUpdate(NULL, &OnPointUpdate);
	
	// Feature Gesture.
	if(_featureGesture)
	{
		// Wave detector.
		_waveDetector->RegisterWave(NULL, &OnWave);
		
		// Push detector.
		_pushDetector = new XnVPushDetector();
		_pushDetector->RegisterPush(NULL, &onPush);
	
		// Swipe detector.
		_swipeDetector = new XnVSwipeDetector();
		_swipeDetector->RegisterSwipeUp(NULL, &Swipe_SwipeUp);
		_swipeDetector->RegisterSwipeDown(NULL, &Swipe_SwipeDown);
		_swipeDetector->RegisterSwipeLeft(NULL, &Swipe_SwipeLeft);
		_swipeDetector->RegisterSwipeRight(NULL, &Swipe_SwipeRight);
	
		// Steady detector.
		_steadyDetector = new XnVSteadyDetector();
		_steadyDetector->RegisterSteady(NULL, &Steady_OnSteady);
	}
	
	// Feature Circle.
	if(_featureCircle)
	{
		// Circle detector.
		_circleDetector = new XnVCircleDetector();
		_circleDetector->RegisterCircle(NULL, &CircleCB);
		_circleDetector->RegisterNoCircle(NULL, &NoCircleCB);
		_circleDetector->RegisterPrimaryPointCreate(NULL, &Circle_PrimaryCreate);
		_circleDetector->RegisterPrimaryPointDestroy(NULL, &Circle_PrimaryDestroy);
	}
	
	// Feature Slider.
	if(_featureSlider)
	{
		// Left/Right slider.
		_leftRightSlider = new XnVSelectableSlider1D(3, 0, AXIS_X);
		_leftRightSlider->RegisterActivate(NULL, &LeftRightSlider_OnActivate);
		_leftRightSlider->RegisterDeactivate(NULL, &LeftRightSlider_OnDeactivate);
		_leftRightSlider->RegisterPrimaryPointCreate(NULL, &LeftRightSlider_OnPrimaryCreate);
		_leftRightSlider->RegisterPrimaryPointDestroy(NULL, &LeftRightSlider_OnPrimaryDestroy);
		_leftRightSlider->RegisterValueChange(NULL, &LeftRightSlider_OnValueChange);
		_leftRightSlider->SetValueChangeOnOffAxis(false);
		
		// Up/Down slider.
		_upDownSlider = new XnVSelectableSlider1D(3, 0, AXIS_Y);
		_upDownSlider->RegisterActivate(NULL, &UpDownSlider_OnActivate);
		_upDownSlider->RegisterDeactivate(NULL, &UpDownSlider_OnDeactivate);
		_upDownSlider->RegisterPrimaryPointCreate(NULL, &UpDownSlider_OnPrimaryCreate);
		_upDownSlider->RegisterPrimaryPointDestroy(NULL, &UpDownSlider_OnPrimaryDestroy);
		_upDownSlider->RegisterValueChange(NULL, &UpDownSlider_OnValueChange);
		_upDownSlider->SetValueChangeOnOffAxis(false);
		
		// In/Out slider.
		_inOutSlider = new XnVSelectableSlider1D(3, 0, AXIS_Z);
		_inOutSlider->RegisterActivate(NULL, &InOutSlider_OnActivate);
		_inOutSlider->RegisterDeactivate(NULL, &InOutSlider_OnDeactivate);
		_inOutSlider->RegisterPrimaryPointCreate(NULL, &InOutSlider_OnPrimaryCreate);
		_inOutSlider->RegisterPrimaryPointDestroy(NULL, &InOutSlider_OnPrimaryDestroy);
		_inOutSlider->RegisterValueChange(NULL, &InOutSlider_OnValueChange);
		_inOutSlider->SetValueChangeOnOffAxis(false);
	}
	
	// Feature TrackPad.
	if(_featureTrackPad)
	{
		// Track Pad.
		if(trackpad_columns > 0 && trackpad_rows > 0)
		{
			_trackPad = new XnVSelectableSlider2D(trackpad_columns, trackpad_rows);
		}
		else
		{
			_trackPad = new XnVSelectableSlider2D(4, 9);
		}
		
		_trackPad->RegisterItemHover(NULL, &TrackPad_ItemHover);
		_trackPad->RegisterItemSelect(NULL, &TrackPad_ItemSelect);
	    _trackPad->RegisterPrimaryPointCreate(NULL, &TrackPad_PrimaryCreate);
	  	_trackPad->RegisterPrimaryPointDestroy(NULL, &TrackPad_PrimaryDestroy);
	}
	
	// Feature User Tracking.
	if(_featureUserTracking)
	{
		// Setup user generator callbacks.
		XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
		if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
		{
			printf("AS3OpenNI :: Supplied user generator doesn't support skeleton\n");
			return 1;
		}
		_userGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
		
		// Setup Skeleton detection.
		_userGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);
		if (_userGenerator.GetSkeletonCap().NeedPoseForCalibration())
		{
			_needPose = true;
			if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
			{
				printf("AS3OpenNI :: Pose required, but not supported\n");
				return 1;
			}
			_userGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
			_userGenerator.GetSkeletonCap().GetCalibrationPose(_strPose);
		}
		_userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
	}
	
	// Create the broadcaster manager.
	_broadcaster = new XnVBroadcaster();
	
	// Start generating all.
	_context.StartGeneratingAll();
	
	// Set the frame rate.
	_status = xnFPSInit(&xnFPS, 180);
	CHECK_RC(_status, "AS3OpenNI :: FPS Init");
	
	//----------------------------------------------------------------------//
	//------------------------- SETUP DISPLAY SUPPORT ---------------------//
	//--------------------------------------------------------------------//
	
	// Setup depth and image data.
	_depth.GetMetaData(_depthData);
	_image.GetMetaData(_imageData);
	
	// Hybrid mode isn't supported in this sample
	if (_imageData.FullXRes() != _depthData.FullXRes() || _imageData.FullYRes() != _depthData.FullYRes())
	{
		printf ("AS3OpenNI :: The device depth and image resolution must be equal!\n");
		return 1;
	}

	// RGB is the only image format supported.
	if (_imageData.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
	{
		printf("AS3OpenNI :: The device image format must be RGB24\n");
		return 1;
	}
	
	// Setup the view points to match between the depth and image maps.
	if(_snapPixels) _depth.GetAlternativeViewPointCap().SetViewPoint(_image);
	
	//-------------------------------------------------------------//
	//------------------------- MAIN LOOP ------------------------//
	//-----------------------------------------------------------//
	
	// Setup the capture socket server for PC.
	#if (XN_PLATFORM == XN_PLATFORM_WIN32)
		if(_featureDepthMapCapture || _featureRGBCapture || _featureUserTracking)
		{
			if(_useSockets)
			{
				g_AS3Network = network();
				g_AS3Network.init(setupServer);
			}
		}
	#endif
	
	// Main loop
	while ((!_kbhit()) && (!_quit))
	{
		xnFPSMarkFrame(&xnFPS);
		_context.WaitAndUpdateAll();
		_sessionManager->Update(&_context);
		if(_featureDepthMapCapture) captureDepthMap(g_ucDepthBuffer);
		if(_featureRGBCapture) captureRGB(g_ucImageBuffer);
		#if (XN_PLATFORM == XN_PLATFORM_WIN32)
			if(_featureUserTracking) getPlayers();
		#else
			if(_featureUserTracking) renderSkeleton();
		#endif
	}
	
	CleanupExit();
}
void main()
{
	//--------------------------------------------------- 
	bool bShouldRun = true;
	int c ;

	XnStatus nRetVal = XN_STATUS_OK;
	Context context;

	// Initialize context object
	nRetVal = context.Init();

	// check error code
	if(nRetVal)
		printf("Error : %s", xnGetStatusString(nRetVal));

	context.SetGlobalMirror(true); //mirror image

	// Create a DepthGenerator node
	DepthGenerator depth;
	nRetVal = depth.Create(context);
	// check error code
	if(nRetVal)
		printf("Failed to create depth generator: %s\n", xnGetStatusString(nRetVal));

	/// Create an ImageGenerator node
	ImageGenerator image;
	nRetVal = image.Create(context);
	if(nRetVal)
		printf("Failed to create image generator: %s\n", xnGetStatusString(nRetVal));

	if(nRetVal)
		printf("Failed to match Depth and RGB points of view: %s\n",xnGetStatusString(nRetVal));

	// Set it to VGA maps at 30 FPS
	XnMapOutputMode mapMode;
	mapMode.nXRes = XN_VGA_X_RES;
	mapMode.nYRes = XN_VGA_Y_RES;
	mapMode.nFPS = 30;
	nRetVal = depth.SetMapOutputMode(mapMode);

	// Make it start generating data
	nRetVal = context.StartGeneratingAll();

	//  check error code
	if(nRetVal)
		printf("Error : %s", xnGetStatusString(nRetVal));

	//create a OpenCv matrix
	CvMat* depthMetersMat = cvCreateMat(480, 640, CV_16UC1);
	IplImage *kinectDepthImage;
	kinectDepthImage = cvCreateImage( cvSize(640,480),16,1);
	IplImage *kinectDepthImage_raw= cvCreateImage( cvSize(640,480),16,1);

	IplImage rgbimg;

	XnPoint3D* pDepthPointSet = new XnPoint3D[ 640*480 ];


	// Main loop

	while (bShouldRun)
	{
		// Wait for new data to be available
		nRetVal = context.WaitOneUpdateAll(depth);

		if (nRetVal != XN_STATUS_OK)
		{
			printf("Failed updating data: %s\n", xnGetStatusString(nRetVal));
			continue;
		}

		// Take current depth map
		const XnDepthPixel* pDepthMap = depth.GetDepthMap();
		xn::DepthGenerator rDepth;

		//Copy the depth values
		for (int y=0; y<XN_VGA_Y_RES; y++)
			for(int x=0;x<XN_VGA_X_RES;x++)
			{
				depthMetersMat->data.s[y * XN_VGA_X_RES + x ] = 20*pDepthMap[y * XN_VGA_X_RES + x];

				// Convert the coordinates in the camera coordinate system
				pDepthPointSet[y * XN_VGA_X_RES + x].X = (XnFloat) x;
				pDepthPointSet[y * XN_VGA_X_RES + x].Y = (XnFloat) y;
				pDepthPointSet[y * XN_VGA_X_RES + x].Z = pDepthMap[y * XN_VGA_X_RES + x];

			}
		
			cvGetImage(depthMetersMat,kinectDepthImage_raw);
			cvShowImage("Depth stream", kinectDepthImage_raw);

			unsigned char* picture_RGB = new unsigned char[XN_VGA_X_RES * XN_VGA_Y_RES * 3];
			//initialization with the retrieved data
			memcpy(picture_RGB, (unsigned char*)image.GetRGB24ImageMap(),XN_VGA_Y_RES * XN_VGA_X_RES * 3);

			//From BGR to RGB
			for(int i = 0 ; i < XN_VGA_X_RES * XN_VGA_Y_RES ; i++)
			{   
				unsigned char temp = picture_RGB[i*3];
				picture_RGB[i*3] = picture_RGB[i*3+2];
				picture_RGB[i*3+2] = temp;
			}
			cv::Mat colorMatRes(XN_VGA_Y_RES, XN_VGA_X_RES, CV_8UC3, picture_RGB);
			rgbimg=colorMatRes; //Conversion from cv::mat to IplImage format
			cvShowImage("Color stream",&rgbimg); //Display the RGB stream

			// free memory 
			delete picture_RGB;
			c = cvWaitKey(1);
			if (c == 27)
				bShouldRun = false; //exit main loop

	}

	// Clean-up
	cvDestroyWindow("Color stream"); 
	cvDestroyWindow("Depth stream"); 
	cvReleaseImageHeader(&kinectDepthImage);
	delete pDepthPointSet;
	
}