Exemple #1
0
int main(int argc, wchar_t* argv[]) {
	
	UtilPipeline pipeline;
	PXCGesture * gesture;
	
	pipeline.EnableGesture();
	pipeline.Init();


	while(1)
	{
		if(pipeline.AcquireFrame(false))
		{
			gesture = pipeline.QueryGesture();

			// look for the wave gesture for the primary hand
			PXCGesture::Gesture handGesture;
			int i=0;
			while(gesture->QueryGestureData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY, i++, &handGesture) != PXC_STATUS_ITEM_UNAVAILABLE)
				if(handGesture.label == PXCGesture::Gesture::LABEL_HAND_WAVE)
					cout << "\nwave";
			
			// get the node data for the whole primary hadn (the one that shows up in the view first)
			PXCGesture::GeoNode handNode;
			if(gesture->QueryNodeData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY, &handNode) != PXC_STATUS_ITEM_UNAVAILABLE)
				cout << "\nPrimary hand at x = " << handNode.positionWorld.x << " y = " << handNode.positionWorld.y << " z = " << handNode.positionWorld.z;

			// we must release the frame
			pipeline.ReleaseFrame();
		}
	}
}
Exemple #2
0
int main(int argc, wchar_t* argv[]) {

	map<PXCFaceAnalysis::Landmark::Label, string> faceLandmarkNames;
	faceLandmarkNames[PXCFaceAnalysis::Landmark::LABEL_LEFT_EYE_INNER_CORNER] = "left eye inner";
	faceLandmarkNames[PXCFaceAnalysis::Landmark::LABEL_LEFT_EYE_OUTER_CORNER] = "left eye outer";
	faceLandmarkNames[PXCFaceAnalysis::Landmark::LABEL_RIGHT_EYE_INNER_CORNER] = "right eye inner";
	faceLandmarkNames[PXCFaceAnalysis::Landmark::LABEL_RIGHT_EYE_OUTER_CORNER] = "right eye outer";
	faceLandmarkNames[PXCFaceAnalysis::Landmark::LABEL_MOUTH_LEFT_CORNER] = "mouth left";
	faceLandmarkNames[PXCFaceAnalysis::Landmark::LABEL_MOUTH_RIGHT_CORNER] = "mouth right";


	UtilPipeline pipeline;
	PXCFaceAnalysis *faceAnalysis;

	pipeline.EnableFaceLocation();
	pipeline.EnableFaceLandmark();
	pipeline.Init();


	while(1)
	{
		if(pipeline.AcquireFrame(false))
		{
			faceAnalysis = pipeline.QueryFace();

			// get the face id for the first face
			pxcUID faceId;
			if(faceAnalysis->QueryFace(0, &faceId) != PXC_STATUS_ITEM_UNAVAILABLE)
			{
				// get the face location
				PXCFaceAnalysis::Detection *detector = faceAnalysis->DynamicCast<PXCFaceAnalysis::Detection>();
				PXCFaceAnalysis::Detection::Data faceData;
				if(detector->QueryData(faceId, &faceData) != PXC_STATUS_ITEM_UNAVAILABLE)
					cout << "\nFace " << faceId << " at location " << faceData.rectangle.x << " " << faceData.rectangle.y;

				PXCFaceAnalysis::Landmark *landmarkDetector = faceAnalysis->DynamicCast<PXCFaceAnalysis::Landmark>();
				PXCFaceAnalysis::Landmark::LandmarkData landmarkData;
				int i=0;
				while(landmarkDetector->QueryLandmarkData(faceId, PXCFaceAnalysis::Landmark::LABEL_6POINTS, i++, &landmarkData) != PXC_STATUS_ITEM_UNAVAILABLE)
					cout << "\nFace landmark " << faceLandmarkNames[landmarkData.label] << " at x = " << landmarkData.position.x << " y = " << landmarkData.position.y;
			}

			pipeline.ReleaseFrame();
		}
	}
}
int _tmain(int argc, _TCHAR* argv[])
{
	UtilPipeline pp;
	pp.EnableImage(PXCImage::COLOR_FORMAT_RGB32, 640, 480);
	pp.EnableImage(PXCImage::COLOR_FORMAT_DEPTH, 320, 240);
	if(!pp.Init()){
		return -1;
	}
	UtilRender colorRender(L"Color Render");
	UtilRender depthRender(L"Depth Render");
	while(true){
		pp.AcquireFrame(true);
		PXCImage* rgbImage = pp.QueryImage(PXCImage::IMAGE_TYPE_COLOR);
		PXCImage* depthImage = pp.QueryImage(PXCImage::IMAGE_TYPE_DEPTH);
		if (!colorRender.RenderFrame(rgbImage)) break;
        if (!depthRender.RenderFrame(depthImage)) break;
		pp.ReleaseFrame();
	}
	pp.Close();
	return 0;
}
Exemple #4
0
int _tmain(int argc, _TCHAR* argv[])
{
		//Initializing objects for analysis
	//Creating the session
	PXCSmartPtr<PXCSession> session;
	PXCSession_Create(&session);
	//Creating face analysis object
	PXCFaceAnalysis *face=0; 
	session->CreateImpl(PXCFaceAnalysis::CUID,(void**)&face);
	//Initializing profile info
	PXCFaceAnalysis::ProfileInfo pinfo; 
	face->QueryProfile(0,&pinfo); 
	//Starting capture
	UtilCapture capture(session); 
	capture.LocateStreams(&pinfo.inputs); 
	face->SetProfile(&pinfo); 
	//Creating face detection module
	PXCFaceAnalysis::Detection *det=face->DynamicCast<PXCFaceAnalysis::Detection>(); 
	PXCFaceAnalysis::Detection::ProfileInfo dinfo = {0}; 
	det->QueryProfile(0,&dinfo); 
	det->SetProfile(&dinfo); 
	//Creating landmark module 
	PXCFaceAnalysis::Landmark *landmark=face->DynamicCast<PXCFaceAnalysis::Landmark>();
	PXCFaceAnalysis::Landmark::ProfileInfo linfo = {0};
	landmark->QueryProfile(1,&linfo); 
	landmark->SetProfile(&linfo);
	PXCFaceAnalysis::Landmark::LandmarkData ldata[7];
	//Declaring Detection and Landmark data objects for analysis
	PXCFaceAnalysis::Detection::Data data; 
	PXCFaceAnalysis::Landmark::PoseData pdata;
	//Storage containers for images
	PXCImage* mcImage;
	PXCImage::ImageData mCImageData; //Color Image data
	PXCSmartArray<PXCImage> images; 
	PXCSmartSPArray sps(2);



	UtilPipeline pipeline;
	pipeline.EnableImage(PXCImage::COLOR_FORMAT_RGB24,640,480);
	pipeline.EnableImage(PXCImage::COLOR_FORMAT_DEPTH,320,240); //depth resolution 320,240
	pipeline.Init();
	UtilRender color_render(L"Color Stream");
	UtilRender depth_render(L"Depth Stream");

	///////////// OPENCV
	IplImage *image=0;
	CvSize gab_size;
	gab_size.height=480;
	gab_size.width=640;
	image=cvCreateImage(gab_size,8,3);

	PXCImage::ImageData idata;

	unsigned char *rgb_data;//=new unsigned char[];
	PXCImage::ImageInfo rgb_info;

	Mat colorMat;
	namedWindow("Test");
	namedWindow("Test2");
	///////
	for (;;) {
		capture.ReadStreamAsync(images.ReleaseRefs(),sps.ReleaseRef(0)); 
		face->ProcessImageAsync(images,sps.ReleaseRef(1)); 
		sps.SynchronizeEx(); 

		if (!pipeline.AcquireFrame(true)) break;
		PXCImage *color_image=pipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR);

		color_image->AcquireAccess(PXCImage::ACCESS_READ_WRITE,PXCImage::COLOR_FORMAT_RGB24,&idata); 

		rgb_data=idata.planes[0];


		for(int y=0; y<480; y++)
		{
			for(int x=0; x<640; x++)
			{ 
				for(int k=0; k<3 ; k++)
				{
					image->imageData[y*640*3+x*3+k]=rgb_data[y*640*3+x*3+k];
				}
			}
		}

		color_image->ReleaseAccess(&idata);
		/*
		cvShowImage("rgb_cv", image);
		cvShowImage("depth_cv2", depth);
		*/
		colorMat = image;

		//imshow("ShaurinSucks", colorMat);



		// Tracking or recognition results are ready 
		pxcUID fid; pxcU64 ts; 
		//cout << "QUERY FACE" << endl;
		if (face->QueryFace(0,&fid,&ts)<PXC_STATUS_NO_ERROR) continue; 
		landmark->QueryLandmarkData(fid, linfo.labels, &ldata[0]);
		landmark->QueryPoseData(fid, &pdata);
		det->QueryData(fid,&data); 
		//cout << "DATA   " << data.confidence << " " << data.viewAngle << " " << data.rectangle.x << " " << data.rectangle.y << endl;
		//cout << "PDATA  " << pdata.pitch << " " << pdata.roll << " " << pdata.yaw << endl;
						
		//cout << "LDATA ";
		for (int j = 0; j < 7; j++){
			//cout << ldata[j].position.x << " " << ldata[j].position.y << " " << ldata[j].position.z << endl;
		}

		//Point center( ldata[6].position.x, ldata[6].position.y);
		//ellipse(colorMat, center, Size( 10, 10), 0, 0, 360, Scalar(255, 0, 255), 4, 8, 0);


		cout << ldata[0].position.x << " " << ldata[0].position.y << " | " << ldata[1].position.x << " " << ldata[1].position.y << endl;
		//Point2f start1(ldata[0].position.x, ldata[0].position.y);
		//Point2f end1(ldata[1].position.x, ldata[1].position.y);

		//Point2f start2(ldata[2].position.x, ldata[2].position.y);
		//Point2f end2(ldata[3].position.x, ldata[3].position.y);

		//int rad = abs((ldata[0].position.x+ldata[1].position.x)/2 - ldata[0].position.x);

		//Point2f mid1((ldata[0].position.x+ldata[1].position.x)/2, (ldata[0].position.y+ldata[1].position.y)/2);
		//ellipse(colorMat, mid1, Size( 50, 50), 0, 0, 360, Scalar(255, 0, 255), 4, 8, 0);

		Point2f corner1(ldata[0].position.x, ldata[0].position.y+10);
		Point2f corner2(ldata[1].position.x, ldata[1].position.y-10);

		//rectangle(colorMat, corner1, corner2, Scalar(0, 255, 255), 2, 8);

		Rect temp(corner1, corner2);
		//colorMat(temp);


		//line(colorMat, start1, end1, Scalar(255,0,255), 2, 8);
		//line(colorMat, start2, end2, Scalar(255,0,255), 2, 8);

		imshow("Test", colorMat);
		Mat frame_gray;
		cvtColor(colorMat, frame_gray, CV_BGR2GRAY);
		equalizeHist(frame_gray, frame_gray);
		Mat faceROI = frame_gray(temp);
		imshow("Test2", faceROI);
				
		if( cvWaitKey(10) >= 0 )
		break;
		//IF THIS IS AFTER. THEN THE STREAM STOPS IF FACE ISNT IN THERE
		pipeline.ReleaseFrame();

	}
	
	cvReleaseImage(&image);
	pipeline.Close();
	return 0;
}
Exemple #5
0
int main(int argc, char* argv[]) {
	// use pipeline for aligned image
	UtilPipeline pflt;
    pflt.QueryCapture()->SetFilter(PXCCapture::Device::PROPERTY_DEPTH_SMOOTHING,true);		// use smoothed depth
	pflt.EnableImage(PXCImage::COLOR_FORMAT_RGB32);
    pflt.EnableImage(PXCImage::COLOR_FORMAT_DEPTH);
    pflt.Init();


	PXCSession* ss = pflt.QuerySession();
	PXCCapture::Device *dev = pflt.QueryCapture()->QueryDevice();

	UtilRender color_render(L"Color Stream");
	UtilRender depth_render(L"Depth Stream");

	for (int i=0;;i++) {
		if (!pflt.AcquireFrame(true)) break;
		PXCImage *color_image=pflt.QueryImage(PXCImage::IMAGE_TYPE_COLOR);
		PXCImage *depth_image=pflt.QueryImage(PXCImage::IMAGE_TYPE_DEPTH);
		
		// save the captured images
		if( i % 150 == 0 ) {
			stringstream ss;
			ss << (i / 150) << endl;
			string idxstr;
			ss >> idxstr;

			cout << "Capturing image #" << idxstr << endl;

			// use this to get raw buffer
			PXCImage::ImageData rgbData;
			PXCImage::ImageInfo rgbinfo;
			color_image->QueryInfo(&rgbinfo);
			assert( rgbinfo.format == PXCImage::COLOR_FORMAT_RGB24 );
			while( 1 ) {
				if( color_image->AcquireAccess(PXCImage::ACCESS_READ, &rgbData) == PXC_STATUS_NO_ERROR ) break;
			}


			// obtain the bgr image
#if 0
			vector<unsigned char> rgbimg(rgbinfo.width*rgbinfo.height*3);
			for(int i=0;i<rgbinfo.height;i++)
				memcpy((&rgbimg[0])+i*rgbinfo.width*3, rgbData.planes[0]+i*rgbData.pitches[0], rgbinfo.width*3*sizeof(unsigned char));
#else
			vector<unsigned char> rgbimg(rgbData.planes[0], rgbData.planes[0]+rgbinfo.width*rgbinfo.height*3);
#endif

			// save it to file
			dump2file<unsigned char>("color_" + idxstr + ".bin", rgbimg, rgbinfo.width, rgbinfo.height, 3);

			color_image->ReleaseAccess(&rgbData);

			PXCImage::ImageData depthData;
			PXCImage::ImageInfo depthinfo;
			depth_image->QueryInfo(&depthinfo);
			assert( depthinfo.format == PXCImage::COLOR_FORMAT_DEPTH );
			while( 1 ) {
				if( depth_image->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::COLOR_FORMAT_DEPTH, &depthData) == PXC_STATUS_NO_ERROR ) break;
			}

			// obtain the depth image
			short* src  = (short*) depthData.planes[0];		

			float minDistance = 150;
			float maxDistance = 2000;
			float scaleZ = 1.0f;

#define MAP_TO_COLOR 1
#if MAP_TO_COLOR
			vector<float> depthimg(rgbinfo.width*rgbinfo.height, -1.0);

			// mapping from depth to color image, the mapped depth scatter around the color image space
			float *uvmap = (float*)depthData.planes[2];
			// process the image
			for (int i=0, idx=0; i<depthinfo.height; i++) {
				for (int j=0; j<depthinfo.width; j++, idx++) {
					float depth = (float)src[0];

					int x = uvmap[idx*2] * rgbinfo.width;
					int y = uvmap[idx*2+1] * rgbinfo.height;

					int cidx = y * rgbinfo.width + x;

					if ( depth < minDistance || depth > maxDistance ) { 
						// mark as bad point
						depthimg[cidx] = -1.0;
					}
					else {

						// normalize depth, not necessary
						//depth = (depth-minDistance) / (maxDistance-minDistance);
						if( depthimg[cidx] < 0 ) depthimg[cidx] = depth;
						else depthimg[cidx] = min(depth, depthimg[cidx]);
					}
					src ++;
				}
			}

			// apply knn filters to the sparse depth map, with decreasing window size
			depthimg = knn(depthimg, rgbinfo.width, rgbinfo.height, 3);
			depthimg = knn(depthimg, rgbinfo.width, rgbinfo.height, 2);
			depthimg = knn(depthimg, rgbinfo.width, rgbinfo.height, 1);
			// save it to file
			dump2file<float>("depth_" + idxstr + ".bin", depthimg, rgbinfo.width, rgbinfo.height, 1);
#else
			vector<float> depthimg(depthinfo.width*depthinfo.height, -1);

			// mapping from depth to color image, the mapped depth scatter around the color image space
			float *uvmap = (float*)depthData.planes[2];
			// process the image
			for (int i=0, idx=0; i<depthinfo.height; i++) {
				for (int j=0; j<depthinfo.width; j++, idx++) {
					float depth = (float)src[0];

					if ( depth < minDistance || depth > maxDistance ) { 
						// mark as bad point
						depth = -1.0;
					}

					// normalize depth, not necessary
					//depth = (depth-minDistance) / (maxDistance-minDistance);
					depthimg[idx] = depth;
					src ++;
				}
			}
			// save it to file
			dump2file<float>("depth_" + idxstr + ".bin", depthimg, depthinfo.width, depthinfo.height, 1);
#endif

			depth_image->ReleaseAccess(&depthData);
		}

		if (!color_render.RenderFrame(color_image)) break;
		if (!depth_render.RenderFrame(depth_image)) break;

		pflt.ReleaseFrame();
	}