Exemplo n.º 1
0
void mixRGB_Depth()
{
	bool bShouldRun = true;
	int c;

	XnStatus nRetVal = XN_STATUS_OK;
	Context context;

	// Initialize context object
	nRetVal = context.Init();

	// Check error code
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	context.SetGlobalMirror(true);

	//Create Depth generator node
	DepthGenerator depth;
	nRetVal = depth.Create(context);
	// Check error code
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	// Create an ImageGenetor node
	ImageGenerator image;
	nRetVal = image.Create(context);
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	// Sync the DepthGenerator with the ImageGenerator
	nRetVal = depth.GetAlternativeViewPointCap().SetViewPoint(image);
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	//Set it to VGA maps at 30 fps
	XnMapOutputMode mapMode;
	mapMode.nXRes = XN_VGA_X_RES;
	mapMode.nYRes = XN_VGA_Y_RES;
	mapMode.nFPS = 30;
	nRetVal = depth.SetMapOutputMode(mapMode);

	// Make it start generating data
	nRetVal = context.StartGeneratingAll();
	if (nRetVal)
		printf("Error: %s", xnGetStatusString(nRetVal));

	// Create an OpenCv matrix
	CvMat* depthMetersMat = cvCreateMat(480, 640, CV_16UC1);
	IplImage *kinectDepthImage;
	kinectDepthImage = cvCreateImage(cvSize(640,480), 16, 1);

	IplImage *rgbimg = cvCreateImageHeader(cvSize(640,480), 8,3);

	// Main loop
	while (bShouldRun)
	{
		//wait for new data to be available
		nRetVal = context.WaitOneUpdateAll(depth);
		if (nRetVal)
		{
			printf("Error: %s", xnGetStatusString(nRetVal));
			continue;
		}
		//Take current depth map
		const XnDepthPixel* pDepthMap = depth.GetDepthMap();

		for (int y=0; y<XN_VGA_Y_RES; y++)
		{
			for (int x=0; x<XN_VGA_X_RES; x++)
			{
				depthMetersMat->data.s[y*XN_VGA_X_RES+x]=10*pDepthMap[y*XN_VGA_X_RES+x];
			}
		}

		cvGetImage(depthMetersMat, kinectDepthImage);

		//take current image
		const XnRGB24Pixel* pImage = image.GetRGB24ImageMap();
		//process image data
		XnRGB24Pixel* ucpImage = const_cast<XnRGB24Pixel*>(pImage);
		cvSetData(rgbimg, ucpImage, 640*3);
		cvShowImage("RGB", kinectDepthImage);

		c = cvWaitKey(1);
		if (c == 27)
			bShouldRun = false;
	}

	cvReleaseImageHeader(&kinectDepthImage);
	context.Shutdown();
}
Exemplo n.º 2
0
int main(int argc, char *argv[])
{	
	//--------------------------------------------------------------------//
	//------------------------- SETUP REQUIRED NODES ---------------------//
	//--------------------------------------------------------------------//
	
	// Setup the command line parameters.
	setupParams(argc, argv);
	
	// Setup all the sockets.
	setupSockets();
    
	// Setup the capture socket server for Mac.
	#if (XN_PLATFORM == XN_PLATFORM_MACOSX)
		if(_featureDepthMapCapture || _featureRGBCapture)
		{
			if(_useSockets)
			{
				g_AS3Network = network();
				g_AS3Network.init(setupServer);
			}
		}
	#endif
	
	// Setup the status.
    XnStatus _status = XN_STATUS_OK;
    EnumerationErrors _errors;
    
    // Context Init and Add license.
	_status = _context.Init();
	CHECK_RC(_status, "AS3OpenNI :: Initialize context");
	_context.SetGlobalMirror(_mirror);
	
	XnChar vendor[XN_MAX_NAME_LENGTH];
	XnChar license[XN_MAX_LICENSE_LENGTH];

	_license.strVendor[XN_MAX_NAME_LENGTH] = strcmp(vendor, "PrimeSense");
	_license.strKey[XN_MAX_LICENSE_LENGTH] = strcmp(license, "0KOIk2JeIBYClPWVnMoRKn5cdY4=");
		
	_status = _context.AddLicense(_license);
   	CHECK_RC(_status, "AS3OpenNI :: Added license");
   	
   	// Set it to VGA maps at 30 FPS
	_depthMode.nXRes = 640;
	_depthMode.nYRes = 480;
	_depthMode.nFPS = 30;
	
	// Depth map create.
	_status = _depth.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create depth generator");
	_status = _depth.SetMapOutputMode(_depthMode);
	
	// Depth map create.
	_status = _image.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create image generator");
	_status = _image.SetMapOutputMode(_depthMode);
	_status = _image.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
	
	// Create the hands generator.
	_status = _hands.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create hands generator");
	_hands.SetSmoothing(0.1);

	// Create the gesture generator.
	_status = _gesture.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Create gesture generator");
	
	// Create user generator.
	_status = _userGenerator.Create(_context);
	CHECK_RC(_status, "AS3OpenNI :: Find user generator");
	
	// Create and initialize point tracker
	_sessionManager = new XnVSessionManager();
	_status = _sessionManager->Initialize(&_context, "Wave", "RaiseHand");
	
	if (_status != XN_STATUS_OK)
	{
		printf("AS3OpenNI :: Couldn't initialize the Session Manager: %s\n", xnGetStatusString(_status));
		CleanupExit();
	}
	_sessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress);
	
	// Start catching signals for quit indications
	CatchSignals(&_quit);
	
	//---------------------------------------------------------------//
	//------------------------- SETUP FEATURES ---------------------//
	//--------------------------------------------------------------//
	
	// Define the Wave and SinglePoint detectors.
	_waveDetector = new XnVWaveDetector();
	
	// SinglePoint detector.
	if(_featureSinglePoint) _waveDetector->RegisterPointUpdate(NULL, &OnPointUpdate);
	
	// Feature Gesture.
	if(_featureGesture)
	{
		// Wave detector.
		_waveDetector->RegisterWave(NULL, &OnWave);
		
		// Push detector.
		_pushDetector = new XnVPushDetector();
		_pushDetector->RegisterPush(NULL, &onPush);
	
		// Swipe detector.
		_swipeDetector = new XnVSwipeDetector();
		_swipeDetector->RegisterSwipeUp(NULL, &Swipe_SwipeUp);
		_swipeDetector->RegisterSwipeDown(NULL, &Swipe_SwipeDown);
		_swipeDetector->RegisterSwipeLeft(NULL, &Swipe_SwipeLeft);
		_swipeDetector->RegisterSwipeRight(NULL, &Swipe_SwipeRight);
	
		// Steady detector.
		_steadyDetector = new XnVSteadyDetector();
		_steadyDetector->RegisterSteady(NULL, &Steady_OnSteady);
	}
	
	// Feature Circle.
	if(_featureCircle)
	{
		// Circle detector.
		_circleDetector = new XnVCircleDetector();
		_circleDetector->RegisterCircle(NULL, &CircleCB);
		_circleDetector->RegisterNoCircle(NULL, &NoCircleCB);
		_circleDetector->RegisterPrimaryPointCreate(NULL, &Circle_PrimaryCreate);
		_circleDetector->RegisterPrimaryPointDestroy(NULL, &Circle_PrimaryDestroy);
	}
	
	// Feature Slider.
	if(_featureSlider)
	{
		// Left/Right slider.
		_leftRightSlider = new XnVSelectableSlider1D(3, 0, AXIS_X);
		_leftRightSlider->RegisterActivate(NULL, &LeftRightSlider_OnActivate);
		_leftRightSlider->RegisterDeactivate(NULL, &LeftRightSlider_OnDeactivate);
		_leftRightSlider->RegisterPrimaryPointCreate(NULL, &LeftRightSlider_OnPrimaryCreate);
		_leftRightSlider->RegisterPrimaryPointDestroy(NULL, &LeftRightSlider_OnPrimaryDestroy);
		_leftRightSlider->RegisterValueChange(NULL, &LeftRightSlider_OnValueChange);
		_leftRightSlider->SetValueChangeOnOffAxis(false);
		
		// Up/Down slider.
		_upDownSlider = new XnVSelectableSlider1D(3, 0, AXIS_Y);
		_upDownSlider->RegisterActivate(NULL, &UpDownSlider_OnActivate);
		_upDownSlider->RegisterDeactivate(NULL, &UpDownSlider_OnDeactivate);
		_upDownSlider->RegisterPrimaryPointCreate(NULL, &UpDownSlider_OnPrimaryCreate);
		_upDownSlider->RegisterPrimaryPointDestroy(NULL, &UpDownSlider_OnPrimaryDestroy);
		_upDownSlider->RegisterValueChange(NULL, &UpDownSlider_OnValueChange);
		_upDownSlider->SetValueChangeOnOffAxis(false);
		
		// In/Out slider.
		_inOutSlider = new XnVSelectableSlider1D(3, 0, AXIS_Z);
		_inOutSlider->RegisterActivate(NULL, &InOutSlider_OnActivate);
		_inOutSlider->RegisterDeactivate(NULL, &InOutSlider_OnDeactivate);
		_inOutSlider->RegisterPrimaryPointCreate(NULL, &InOutSlider_OnPrimaryCreate);
		_inOutSlider->RegisterPrimaryPointDestroy(NULL, &InOutSlider_OnPrimaryDestroy);
		_inOutSlider->RegisterValueChange(NULL, &InOutSlider_OnValueChange);
		_inOutSlider->SetValueChangeOnOffAxis(false);
	}
	
	// Feature TrackPad.
	if(_featureTrackPad)
	{
		// Track Pad.
		if(trackpad_columns > 0 && trackpad_rows > 0)
		{
			_trackPad = new XnVSelectableSlider2D(trackpad_columns, trackpad_rows);
		}
		else
		{
			_trackPad = new XnVSelectableSlider2D(4, 9);
		}
		
		_trackPad->RegisterItemHover(NULL, &TrackPad_ItemHover);
		_trackPad->RegisterItemSelect(NULL, &TrackPad_ItemSelect);
	    _trackPad->RegisterPrimaryPointCreate(NULL, &TrackPad_PrimaryCreate);
	  	_trackPad->RegisterPrimaryPointDestroy(NULL, &TrackPad_PrimaryDestroy);
	}
	
	// Feature User Tracking.
	if(_featureUserTracking)
	{
		// Setup user generator callbacks.
		XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
		if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
		{
			printf("AS3OpenNI :: Supplied user generator doesn't support skeleton\n");
			return 1;
		}
		_userGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
		
		// Setup Skeleton detection.
		_userGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);
		if (_userGenerator.GetSkeletonCap().NeedPoseForCalibration())
		{
			_needPose = true;
			if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
			{
				printf("AS3OpenNI :: Pose required, but not supported\n");
				return 1;
			}
			_userGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
			_userGenerator.GetSkeletonCap().GetCalibrationPose(_strPose);
		}
		_userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
	}
	
	// Create the broadcaster manager.
	_broadcaster = new XnVBroadcaster();
	
	// Start generating all.
	_context.StartGeneratingAll();
	
	// Set the frame rate.
	_status = xnFPSInit(&xnFPS, 180);
	CHECK_RC(_status, "AS3OpenNI :: FPS Init");
	
	//----------------------------------------------------------------------//
	//------------------------- SETUP DISPLAY SUPPORT ---------------------//
	//--------------------------------------------------------------------//
	
	// Setup depth and image data.
	_depth.GetMetaData(_depthData);
	_image.GetMetaData(_imageData);
	
	// Hybrid mode isn't supported in this sample
	if (_imageData.FullXRes() != _depthData.FullXRes() || _imageData.FullYRes() != _depthData.FullYRes())
	{
		printf ("AS3OpenNI :: The device depth and image resolution must be equal!\n");
		return 1;
	}

	// RGB is the only image format supported.
	if (_imageData.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
	{
		printf("AS3OpenNI :: The device image format must be RGB24\n");
		return 1;
	}
	
	// Setup the view points to match between the depth and image maps.
	if(_snapPixels) _depth.GetAlternativeViewPointCap().SetViewPoint(_image);
	
	//-------------------------------------------------------------//
	//------------------------- MAIN LOOP ------------------------//
	//-----------------------------------------------------------//
	
	// Setup the capture socket server for PC.
	#if (XN_PLATFORM == XN_PLATFORM_WIN32)
		if(_featureDepthMapCapture || _featureRGBCapture || _featureUserTracking)
		{
			if(_useSockets)
			{
				g_AS3Network = network();
				g_AS3Network.init(setupServer);
			}
		}
	#endif
	
	// Main loop
	while ((!_kbhit()) && (!_quit))
	{
		xnFPSMarkFrame(&xnFPS);
		_context.WaitAndUpdateAll();
		_sessionManager->Update(&_context);
		if(_featureDepthMapCapture) captureDepthMap(g_ucDepthBuffer);
		if(_featureRGBCapture) captureRGB(g_ucImageBuffer);
		#if (XN_PLATFORM == XN_PLATFORM_WIN32)
			if(_featureUserTracking) getPlayers();
		#else
			if(_featureUserTracking) renderSkeleton();
		#endif
	}
	
	CleanupExit();
}
void main()
{
	//--------------------------------------------------- 
	bool bShouldRun = true;
	int c ;

	XnStatus nRetVal = XN_STATUS_OK;
	Context context;

	// Initialize context object
	nRetVal = context.Init();

	// check error code
	if(nRetVal)
		printf("Error : %s", xnGetStatusString(nRetVal));

	context.SetGlobalMirror(true); //mirror image

	// Create a DepthGenerator node
	DepthGenerator depth;
	nRetVal = depth.Create(context);
	// check error code
	if(nRetVal)
		printf("Failed to create depth generator: %s\n", xnGetStatusString(nRetVal));

	/// Create an ImageGenerator node
	ImageGenerator image;
	nRetVal = image.Create(context);
	if(nRetVal)
		printf("Failed to create image generator: %s\n", xnGetStatusString(nRetVal));

	if(nRetVal)
		printf("Failed to match Depth and RGB points of view: %s\n",xnGetStatusString(nRetVal));

	// Set it to VGA maps at 30 FPS
	XnMapOutputMode mapMode;
	mapMode.nXRes = XN_VGA_X_RES;
	mapMode.nYRes = XN_VGA_Y_RES;
	mapMode.nFPS = 30;
	nRetVal = depth.SetMapOutputMode(mapMode);

	// Make it start generating data
	nRetVal = context.StartGeneratingAll();

	//  check error code
	if(nRetVal)
		printf("Error : %s", xnGetStatusString(nRetVal));

	//create a OpenCv matrix
	CvMat* depthMetersMat = cvCreateMat(480, 640, CV_16UC1);
	IplImage *kinectDepthImage;
	kinectDepthImage = cvCreateImage( cvSize(640,480),16,1);
	IplImage *kinectDepthImage_raw= cvCreateImage( cvSize(640,480),16,1);

	IplImage rgbimg;

	XnPoint3D* pDepthPointSet = new XnPoint3D[ 640*480 ];


	// Main loop

	while (bShouldRun)
	{
		// Wait for new data to be available
		nRetVal = context.WaitOneUpdateAll(depth);

		if (nRetVal != XN_STATUS_OK)
		{
			printf("Failed updating data: %s\n", xnGetStatusString(nRetVal));
			continue;
		}

		// Take current depth map
		const XnDepthPixel* pDepthMap = depth.GetDepthMap();
		xn::DepthGenerator rDepth;

		//Copy the depth values
		for (int y=0; y<XN_VGA_Y_RES; y++)
			for(int x=0;x<XN_VGA_X_RES;x++)
			{
				depthMetersMat->data.s[y * XN_VGA_X_RES + x ] = 20*pDepthMap[y * XN_VGA_X_RES + x];

				// Convert the coordinates in the camera coordinate system
				pDepthPointSet[y * XN_VGA_X_RES + x].X = (XnFloat) x;
				pDepthPointSet[y * XN_VGA_X_RES + x].Y = (XnFloat) y;
				pDepthPointSet[y * XN_VGA_X_RES + x].Z = pDepthMap[y * XN_VGA_X_RES + x];

			}
		
			cvGetImage(depthMetersMat,kinectDepthImage_raw);
			cvShowImage("Depth stream", kinectDepthImage_raw);

			unsigned char* picture_RGB = new unsigned char[XN_VGA_X_RES * XN_VGA_Y_RES * 3];
			//initialization with the retrieved data
			memcpy(picture_RGB, (unsigned char*)image.GetRGB24ImageMap(),XN_VGA_Y_RES * XN_VGA_X_RES * 3);

			//From BGR to RGB
			for(int i = 0 ; i < XN_VGA_X_RES * XN_VGA_Y_RES ; i++)
			{   
				unsigned char temp = picture_RGB[i*3];
				picture_RGB[i*3] = picture_RGB[i*3+2];
				picture_RGB[i*3+2] = temp;
			}
			cv::Mat colorMatRes(XN_VGA_Y_RES, XN_VGA_X_RES, CV_8UC3, picture_RGB);
			rgbimg=colorMatRes; //Conversion from cv::mat to IplImage format
			cvShowImage("Color stream",&rgbimg); //Display the RGB stream

			// free memory 
			delete picture_RGB;
			c = cvWaitKey(1);
			if (c == 27)
				bShouldRun = false; //exit main loop

	}

	// Clean-up
	cvDestroyWindow("Color stream"); 
	cvDestroyWindow("Depth stream"); 
	cvReleaseImageHeader(&kinectDepthImage);
	delete pDepthPointSet;
	
}