示例#1
0
void Camera::getFrame(vctDynamicVector<vct3> & points) {

	sm = PXCSenseManager::CreateInstance();
	PXCCaptureManager *cm = sm->QueryCaptureManager();

	sm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 640, 480, 30);

	pxcStatus sts = sm->Init();
	if (sts < PXC_STATUS_NO_ERROR) {
		std::cout << "DIDN\'T WORK" << std::endl;
	}

	PXCCapture::Device *device = sm->QueryCaptureManager()->QueryDevice();
	device->ResetProperties(PXCCapture::STREAM_TYPE_ANY);

	std::cout << device->SetDepthUnit(1000) << std::endl;

	PXCProjection *projection = device->CreateProjection();


	pxcStatus sts2 = sm->AcquireFrame(false);
	if (sts2 >= PXC_STATUS_NO_ERROR) {
		PXCCapture::Sample *sample = sm->QuerySample();
		PXCImage* image = (*sample)[streamType];
		PXCImage::ImageInfo info = {};
		PXCImage::ImageData data;
		image->AcquireAccess(PXCImage::ACCESS_READ, &data);
		PXCImage::ImageInfo dinfo = sample->depth->QueryInfo();
		int dpitch = data.pitches[0] / sizeof(short);
		short *dpixels = (short*)data.planes[0];
		int i = 0;
		points.SetSize(dinfo.width * dinfo.height);

		PXCPoint3DF32 * vertices = new PXCPoint3DF32[dinfo.width * dinfo.height];
		projection->QueryVertices(image, vertices);

		int c = 0;
		for (int i = 0; i < points.size(); i++) {
			PXCPoint3DF32 point = vertices[i];
			if (point.z != 0) {
				vct3 newPoint(point.x, point.y, point.z);
				points[c++] = newPoint;
			}
		}
		points.resize(c);
		image->ReleaseAccess(&data);
	}
	projection->Release();
	std::cout << sts2 << std::endl;
}
void
CameraCalib::translatePoints(Feed type)
{
	PXCProjection *projection = device->CreateProjection();	

	PXCImage::ImageInfo dInfo = depthImage->QueryInfo();
	PXCImage::ImageInfo cInfo = colorImage->QueryInfo();
	std::vector<PXCPoint3DF32> translatedPoints(points.size());
	std::vector<PXCPoint3DF32> pointsDepth = std::vector<PXCPoint3DF32>(points.size());
	if (type == DEPTH1 || type == DEPTH2) {

		PXCImage::ImageData depthData;
		if (depthImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_DEPTH, &depthData) >= PXC_STATUS_NO_ERROR) {
			unsigned short *depthBuffer = (unsigned short*)depthData.planes[0];


			for (int i = 0; i < points.size(); i++) {
				pointsDepth[i].x = points[i].x;
				pointsDepth[i].y = points[i].y;

				int zloc = points[i].y * IMG_WIDTH + points[i].x;
				pointsDepth[i].z = depthBuffer[zloc];
				std::cout << "D: " << depthBuffer[zloc] << std::endl;
			}
			
			depthImage->ReleaseAccess(&depthData);
			//pxcStatus status = projection->MapDepthToColor(points.size(), &pointsDepth[0], &translatedPoints[0]);
			pxcStatus status = projection->ProjectColorToCamera(points.size(), &pointsDepth[0], &translatedPoints[0]);
			if (status >= PXC_STATUS_NO_ERROR) {
				DeviceSettings::SolvePose(translatedPoints, points);
			}
			else {
				std::cout << "unsuccessful translation" << std::endl;
			}
		}
		else {
			std::cout << "incorrect mode - switch to depth" << std::endl;
		}
	}
		
	for (int i = 0; i < points.size(); i++) {
		//std::cout << pointsDepth[i].x << ", " << pointsDepth[i].y << ", " << pointsDepth[i].z << std::endl;
		std::cout << translatedPoints[i].x << ", " << translatedPoints[i].y << ", " << translatedPoints[i].z<< std::endl;
	}
		
}
示例#3
0
void ITA_ForcesApp::cleanup()
{
	if (mIsRunning) {
		mRS->Close();
		try {
			mMapper->Release();
		}
		catch (...) {
		
		}
 	}
}
示例#4
0
void ITA_ForcesApp::getRSCursorPos()
{
	mHasHand = false;
	mMousePos.clear();
	auto numHands = mHandData->QueryNumberOfHands();
	pxcUID handId;

	mNumInputs = 0.0f;
	for (int i = 0; i < numHands; ++i)
	{
		if (mHandData->QueryHandId(PXCHandData::ACCESS_ORDER_BY_ID, i, handId) >= PXC_STATUS_NO_ERROR)
		{
			PXCHandData::IHand *hand;
			if (mHandData->QueryHandDataById(handId, hand) >= PXC_STATUS_NO_ERROR)
			{
				if (hand->HasCursor())
				{
					PXCHandData::ICursor *cursor;
					if (hand->QueryCursor(cursor) >= PXC_STATUS_NO_ERROR)
					{
						auto cursorPos = cursor->QueryPointImage();

						// Mapping block
						PXCPoint3DF32 uvz[1]{ cursorPos };
						uvz[0].z *= 1000.0f;
						PXCPointF32 ijj[1];
						auto stat = mMapper->MapDepthToColor(1, uvz, ijj);
						//
						if(!mMouseInput) 
							mMousePos.push_back( vec3(remapPos(vec2(ijj[0].x, ijj[0].y)), 0.0));

						mHasHand = true;
						mNumInputs += 1.0f;
					}
				}
			}
		}
	}

	if (numHands < 1)
		mIdle = true;
	else
		mIdle = false;

	mForceMode = mRepulsing ? FORCE_R : FORCE_A;
}
示例#5
0
RealsenseEngine::RealsenseEngine(const char *calibFilename)//, Vector2i requested_imageSize_rgb, Vector2i requested_imageSize_d)
	: ImageSourceEngine(calibFilename)
{

	data = new PrivateData();

	
#ifndef SDK20	
	data->pp = PXCSenseManager::CreateInstance();

	/* Sets file recording or playback */
	data->cm = data->pp->QueryCaptureManager();


#ifndef SDK2016R3
	data->pp->EnableBlob(0);	
	PXCBlobModule *blobModule = data->pp->QueryBlob();
	data-> blobData = blobModule->CreateOutput();
	data->blobConfiguration = blobModule->CreateActiveConfiguration();

	data->pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_COLOR, 640, 480, 30);
	// Enable blob color mapping
	data->blobConfiguration->EnableColorMapping(true);
	data->blobConfiguration->EnableContourExtraction(true);
	data->blobConfiguration->EnableSegmentationImage(true);

	data->blobConfiguration->ApplyChanges();
#endif


	data->pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_DEPTH, 640, 480, 30);

	/* Initializes the pipeline */
	data->sts = data->pp->Init();
	if (data->sts<PXC_STATUS_NO_ERROR) {
		wprintf_s(L"Failed to locate any video stream(s)\n");
		data->pp->Release();

		return ;
		//return sts;
	}

#ifndef SDK2016R3
	data->blobConfiguration->Update();
#endif

	/* Reset all properties */
	PXCCapture::Device *device = data->pp->QueryCaptureManager()->QueryDevice();
	device->ResetProperties(PXCCapture::STREAM_TYPE_ANY);

	PXCProjection *projection = device->CreateProjection();
	/* Get a calibration instance */


	PXCCalibration *calib = projection-> QueryInstance<PXCCalibration>();


	PXCCalibration::StreamCalibration clibration;
	PXCCalibration::StreamTransform transformation;
	pxcStatus ret= calib->QueryStreamProjectionParameters(PXCCapture::STREAM_TYPE_COLOR, &clibration, &transformation);

	PXCSizeI32 s = data->cm->QueryImageSize(PXCCapture::STREAM_TYPE_COLOR);

	ImageSourceEngine::calib.intrinsics_rgb.SetFrom(clibration.focalLength.x, clibration.focalLength.y, clibration.principalPoint.x, clibration.principalPoint.y,s.width,s.height);
	Matrix4f trans;
	trans.setIdentity();
	trans.m00 = transformation.rotation[0][0];
	trans.m01 = transformation.rotation[0][1];
	trans.m02 = transformation.rotation[0][2];	
	trans.m10 = transformation.rotation[1][0];
	trans.m11 = transformation.rotation[1][1];
	trans.m12 = transformation.rotation[1][2];
	trans.m20 = transformation.rotation[2][0];
	trans.m21 = transformation.rotation[2][1];
	trans.m22 = transformation.rotation[2][2];
	trans.m03 = transformation.translation[0];
	trans.m13 = transformation.translation[1];
	trans.m23 = transformation.translation[2];



	ImageSourceEngine::calib.trafo_rgb_to_depth.SetFrom(trans);

	s = data->cm->QueryImageSize(PXCCapture::STREAM_TYPE_DEPTH);

	 ret = calib->QueryStreamProjectionParameters(PXCCapture::STREAM_TYPE_DEPTH, &clibration, &transformation);
	 ImageSourceEngine::calib.intrinsics_d.SetFrom(clibration.focalLength.x, clibration.focalLength.y, clibration.principalPoint.x, clibration.principalPoint.y, s.width, s.height);

	 /* Release the interface */
		projection->Release();



	/* Set mirror mode */
	//if (cmdl.m_bMirror) {
	//	device->SetMirrorMode(PXCCapture::Device::MirrorMode::MIRROR_MODE_HORIZONTAL);
	//}
	//else {
	//	device->SetMirrorMode(PXCCapture::Device::MirrorMode::MIRROR_MODE_DISABLED);
	//}
#else

	rs2::config cfg;
	cfg.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_BGR8, 30);
	cfg.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
	data->profile =  data->pipe.start(cfg);
	
	//rs2::video_stream_profile color_stream= data->profile.get_stream(RS2_STREAM_COLOR).as<rs2::video_stream_profile>();
	//rs2::video_stream_profile depth_stream = data->profile.get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>();


	//rs2_extrinsics extri=   depth_stream.get_extrinsics_to(color_stream);

	//rs2_intrinsics intri = depth_stream.get_intrinsics();

	//  depth
	//	ppx	321.966583	float
	//	ppy	246.107162	float
	//	fx	475.193237	float
	//	fy	475.193146	float
	//
	//  color
	//	ppx	304.000061	float
	//	ppy	223.181778	float
	//	fx	615.904358	float
	//	fy	615.904419	float

	//-		rotation	0x000000846513f198 {0.999991775, -0.00406141346, -0.000183502605, 0.00406062370, 0.999983311, -0.00411631726, ...}	float[9]
	 //   [0]	0.999991775	float
		//[1] - 0.00406141346	float
		//[2] - 0.000183502605	float
		//[3]	0.00406062370	float
		//[4]	0.999983311	float
		//[5] - 0.00411631726	float
		//[6]	0.000200217590	float
		//[7]	0.00411553821	float
		//[8]	0.999991536	float
		//- translation	0x000000846513f1bc {0.0247000027, 8.39376517e-05, 0.00402066438}	float[3]
		//[0]	0.0247000027	float
		//[1]	8.39376517e-05	float
		//[2]	0.00402066438	float





	//data->depth_scale = get_depth_scale(data->profile.get_device());

	//Pipeline could choose a device that does not have a color stream
	//If there is no color stream, choose to align depth to another stream



	data->align_to = find_stream_to_align(data->profile.get_streams());

	// Create a rs2::align object.
	// rs2::align allows us to perform alignment of depth frames to others frames
	//The "align_to" is the stream type to which we plan to align depth frames.

//	data->align= &rs2::align(data->align_to);



	// Define a variable for controlling the distance to clip

	





#endif


}