Пример #1
0
void main() {
	// Create the session manager
	PXCSenseManager *sense_mgr = PXCSenseManager::CreateInstance();
	// Select the stream. Note that if the format of the stream is not supported, the Init() will fail
	sense_mgr->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 640, 480, 30);
	sense_mgr->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480, 30);
	// Start the camera. If the stream size is wrong, the following will fail
	pxcStatus status = sense_mgr->Init();
	if (PXC_STATUS_NO_ERROR != status)
	{
		printf_s("Status is %d\n", status);
		return;
	}

	// Processing the image
	for (;;) {
		// This function blocks until a sample is ready
		if (sense_mgr->AcquireFrame(true)<PXC_STATUS_NO_ERROR) break;
		// retrieve the sample
		PXCCapture::Sample *sample = sense_mgr->QuerySample();

		// work on the image sample->color
		PXCImage *image = sample->depth;
		PXCImage::ImageInfo imageInfo = image->QueryInfo();

		// go fetching the next sample
		sense_mgr->ReleaseFrame();
	}

	// Close down
	sense_mgr->Release();
}
Пример #2
0
PXCImage * CVMat2PXCImage(cv::Mat cvImage)
{
	PXCImage::ImageInfo iinfo;
	memset(&iinfo, 0, sizeof(iinfo));
	iinfo.width = cvImage.cols;
	iinfo.height = cvImage.rows;

	PXCImage::PixelFormat format;
	int type = cvImage.type();
	if (type == CV_8UC1)
		format = PXCImage::PIXEL_FORMAT_Y8;
	else if (type == CV_8UC3)
		format = PXCImage::PIXEL_FORMAT_RGB24;
	else if (type == CV_32FC1)
		format = PXCImage::PIXEL_FORMAT_DEPTH_F32;

	iinfo.format = format;


	PXCImage *pxcImage = pxcSenseManager->QuerySession()->CreateImage(&iinfo);

	PXCImage::ImageData data;
	pxcImage->AcquireAccess(PXCImage::ACCESS_WRITE, format, &data);

	data.planes[0] = cvImage.data;

	pxcImage->ReleaseAccess(&data);
	return pxcImage;
}
Пример #3
0
void Camera::getFrame(vctDynamicVector<vct3> & points) {

	sm = PXCSenseManager::CreateInstance();
	PXCCaptureManager *cm = sm->QueryCaptureManager();

	sm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 640, 480, 30);

	pxcStatus sts = sm->Init();
	if (sts < PXC_STATUS_NO_ERROR) {
		std::cout << "DIDN\'T WORK" << std::endl;
	}

	PXCCapture::Device *device = sm->QueryCaptureManager()->QueryDevice();
	device->ResetProperties(PXCCapture::STREAM_TYPE_ANY);

	std::cout << device->SetDepthUnit(1000) << std::endl;

	PXCProjection *projection = device->CreateProjection();


	pxcStatus sts2 = sm->AcquireFrame(false);
	if (sts2 >= PXC_STATUS_NO_ERROR) {
		PXCCapture::Sample *sample = sm->QuerySample();
		PXCImage* image = (*sample)[streamType];
		PXCImage::ImageInfo info = {};
		PXCImage::ImageData data;
		image->AcquireAccess(PXCImage::ACCESS_READ, &data);
		PXCImage::ImageInfo dinfo = sample->depth->QueryInfo();
		int dpitch = data.pitches[0] / sizeof(short);
		short *dpixels = (short*)data.planes[0];
		int i = 0;
		points.SetSize(dinfo.width * dinfo.height);

		PXCPoint3DF32 * vertices = new PXCPoint3DF32[dinfo.width * dinfo.height];
		projection->QueryVertices(image, vertices);

		int c = 0;
		for (int i = 0; i < points.size(); i++) {
			PXCPoint3DF32 point = vertices[i];
			if (point.z != 0) {
				vct3 newPoint(point.x, point.y, point.z);
				points[c++] = newPoint;
			}
		}
		points.resize(c);
		image->ReleaseAccess(&data);
	}
	projection->Release();
	std::cout << sts2 << std::endl;
}
Пример #4
0
void Tracker::update() {
	if (!AcquireFrame(false)) return;
	if (!labelMap) return;

	PXCImage *image;
	if (QueryGesture()->QueryBlobImage(PXCGesture::Blob::LABEL_SCENE, 0, &image) >= PXC_STATUS_NO_ERROR) {
		PXCImage::ImageData data;
		if (image->AcquireAccess(PXCImage::ACCESS_READ, &data) >= PXC_STATUS_NO_ERROR) {
			tex.loadData(data.planes[0], tex.getWidth(), tex.getHeight(), GL_LUMINANCE);
			image->ReleaseAccess(&data);
		}
	}

	PXCGesture::Gesture *gesture = new PXCGesture::Gesture();
	if (QueryGesture()->QueryGestureData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY, 0, gesture) >= PXC_STATUS_NO_ERROR) {
		string name = "";
		switch (gesture->label) {
			case PXCGesture::Gesture::LABEL_HAND_CIRCLE: name = "CIRCLE"; break;
			case PXCGesture::Gesture::LABEL_HAND_WAVE: name = "WAVE"; break;
			case PXCGesture::Gesture::LABEL_POSE_BIG5: name = "HIGH5"; break;
			case PXCGesture::Gesture::LABEL_POSE_THUMB_UP: name = "THUMBUP"; break;
			case PXCGesture::Gesture::LABEL_POSE_THUMB_DOWN: name = "THUMBDOWN"; break;
			case PXCGesture::Gesture::LABEL_NAV_SWIPE_DOWN: name = "SWIPEDOWN"; break;
			case PXCGesture::Gesture::LABEL_NAV_SWIPE_LEFT: name = "SWIPELEFT"; break;
			case PXCGesture::Gesture::LABEL_NAV_SWIPE_RIGHT: name = "SWIPERIGHT"; break;
			case PXCGesture::Gesture::LABEL_NAV_SWIPE_UP: name = "SWIPEUP"; break;
		}

		if (name != "") {
			if (!timeout.isRunning()) {
				timeout.setParameters(easing, ofxTween::easeIn, 0, 1, 1000, 0); // ONLY SEND ONCE PER SECOND
				ofSendMessage("GESTURE_" + name);
			} 
		}
	}

	if (timeout.isRunning()) timeout.update();

	PXCGesture::GeoNode indexData;
	QueryGesture()->QueryNodeData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY|PXCGesture::GeoNode::LABEL_FINGER_THUMB, &indexData);
	ofPoint p;
	p.x = ofMap(indexData.positionImage.x, 0, 320, ofGetWidth(), 0, true);
	p.y = ofMap(indexData.positionImage.y, 0, 240, 0, ofGetHeight(), true);
	p.z = indexData.radiusImage;
	ofNotifyEvent(GEONODE_POSITION_CHANGE, p);

	ReleaseFrame();
}
Пример #5
0
    void updateHandFrame()
    {
        handData->Update();

        // 画像を初期化
        handImage1 = cv::Mat::zeros( DEPTH_HEIGHT, DEPTH_WIDTH, CV_8UC1 );
        handImage2 = cv::Mat::zeros( DEPTH_HEIGHT, DEPTH_WIDTH, CV_8UC1 );

        // 検出した手の数を取得する
        auto numOfHands = handData->QueryNumberOfHands();
        for ( int i = 0; i < numOfHands; i++ ) {
            // 手を取得する
            PXCHandData::IHand* hand;
            auto sts = handData->QueryHandData(
                PXCHandData::AccessOrderType::ACCESS_ORDER_BY_ID, i, hand );
            if ( sts < PXC_STATUS_NO_ERROR ) {
                continue;
            }

            // 手のマスク画像を取得する
            PXCImage* image = 0;
            sts = hand->QuerySegmentationImage( image );
            if ( sts < PXC_STATUS_NO_ERROR ) {
                continue;
            }

            // マスク画像を取得する
            PXCImage::ImageData data;
            sts = image->AcquireAccess( 
                PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_Y8, &data );
            if ( sts < PXC_STATUS_NO_ERROR ){
                continue;
            }

            // マスク画像のサイズはDepthに依存
            // 手は2つまで
            PXCImage::ImageInfo info = image->QueryInfo();
            auto& handImage = (i == 0) ? handImage1 : handImage2;
            memcpy( handImage.data, data.planes[0], info.height * info.width );

            // データを表示させる(時間がかかります)
            //for ( int i = 0; i < info.height * info.width; i++ ){
            //    std::cout << (int)data.planes[0][i] << " ";
            //}

            image->ReleaseAccess( &data );
        }
    }
Пример #6
0
//global
void HodginDidItApp::updateCamera()
{
	mImgRgb = mPXC.QueryImage(PXCImage::IMAGE_TYPE_COLOR);
	if(mImgRgb->AcquireAccess(PXCImage::ACCESS_READ, &mDataRgb)>=PXC_STATUS_NO_ERROR)
	{
		mTexRgb = gl::Texture(mDataRgb.planes[0],GL_BGR,mRgbW,mRgbH);
		mImgRgb->ReleaseAccess(&mDataRgb);
	}

	if(mStage==3||mStage==4)	//BL_2_SEG
	{
		PXCImage *cImgSeg = mPXC.QuerySegmentationImage();
		PXCImage::ImageData cDataSeg;
		if(cImgSeg->AcquireAccess(PXCImage::ACCESS_READ, &cDataSeg)>=PXC_STATUS_NO_ERROR)
		{
			mTexSeg = gl::Texture(cDataSeg.planes[0],GL_LUMINANCE,mDepthW,mDepthH);
			cImgSeg->ReleaseAccess(&cDataSeg);
		}
		mChanBW = Channel(mTexRgb);
	}
	else if(mStage==4||mStage==5)	//SEG_2_BW
	{
		mChanBW = Channel(mTexRgb);
	}
	else if(mStage==5||mStage==6)	//BW_2_RGB
	{
		PXCImage *cImgDepth = mPXC.QueryImage(PXCImage::IMAGE_TYPE_DEPTH);
		PXCImage::ImageData cDataDepth;
		if(cImgDepth->AcquireAccess(PXCImage::ACCESS_READ, &cDataDepth)>=PXC_STATUS_NO_ERROR)
		{
			memcpy(mDepthBuffer, cDataDepth.planes[0], (size_t)(mDepthW*mDepthH*sizeof(pxcU16)));
			cImgDepth->ReleaseAccess(&cDataDepth);
		}
	}
}
Пример #7
0
//--------------------------------------------------------------
void IntelFaceScanner::updateBitmap(PXCImage *newimage) 
{
	if(!newimage) return;

	PXCImage* image = newimage;
	PXCImage::ImageInfo info=image->QueryInfo();
    PXCImage::ImageData data;

	if(info.format == PXCImage::PIXEL_FORMAT_RGB32)
	{
		if (image->AcquireAccess(PXCImage::ACCESS_READ,PXCImage::PIXEL_FORMAT_RGB32, &data)>=PXC_STATUS_NO_ERROR) 
		{
			for(int i=0; i < info.height;i++) {
				memcpy_s((char*)(m_charBuffer+i*data.pitches[0]),info.width*4,data.planes[0]+i*data.pitches[0],info.width*4);
			}
			frame.setFromPixels(m_charBuffer, info.width, info.height, OF_IMAGE_COLOR_ALPHA, true);
			
			ScanningData fdata(frame);
			scanningData.send(fdata);

			image->ReleaseAccess(&data);
		}
	}
}
Пример #8
0
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) 
{
    UInt64 *MXadress;
    if(nrhs==0)
    {
        printf("Close failed: Give Pointer to intel camera as input\n");
        mexErrMsgTxt("Intel camera error"); 
    }
    MXadress = (UInt64*)mxGetData(prhs[0]);
    
    if(MXadress[0]==0)
    { 
        return;
    }
    
    UtilPipeline* utilPipelineP = (UtilPipeline*)MXadress[0];
    UtilPipeline& utilPipeline = utilPipelineP[0];

    PXCImage *depthImage = utilPipeline.QueryImage(PXCImage::IMAGE_TYPE_DEPTH);
    
    PXCImage::ImageData depthImageData;
    depthImage->AcquireAccess(PXCImage::ACCESS_READ,&depthImageData);
    
    //if(depthImageData.format != PXCImage::COLOR_FORMAT_DEPTH)
    //{
    //    mexErrMsgTxt("COLOR_FORMAT_DEPTH error"); 
    //}

    if(depthImageData.type != PXCImage::SURFACE_TYPE_SYSTEM_MEMORY)
    {
        mexErrMsgTxt("SURFACE_TYPE_SYSTEM_MEMORY error"); 
    }

    PXCImage::ImageInfo depthInfo;
    depthImage->QueryInfo(&depthInfo);
    printf("Depth Image :  Width %d, Height %d \r\n",depthInfo.width,depthInfo.height); 

    mwSize dimsDepth[2];
    dimsDepth[0]=depthInfo.width;
    dimsDepth[1]=depthInfo.height;
    unsigned short *Iout;
    plhs[0] = mxCreateNumericArray(2, dimsDepth, mxUINT16_CLASS, mxREAL);
    Iout = (unsigned short*)mxGetData(plhs[0]);
    memcpy (Iout,depthImageData.planes[0],dimsDepth[0]*dimsDepth[1]*sizeof(unsigned short));  

    if(nlhs>1)
    {
        unsigned short *Cout;
        plhs[1] = mxCreateNumericArray(2, dimsDepth, mxUINT16_CLASS, mxREAL);
        Cout = (unsigned short*)mxGetData(plhs[1]);
        memcpy (Cout,depthImageData.planes[1],dimsDepth[0]*dimsDepth[1]*sizeof(unsigned short));  
    }
    	
    if(nlhs>2)
    {
		mwSize dimsF[3];
		dimsF[0]=2;
		dimsF[1]=depthInfo.width;
		dimsF[2]=depthInfo.height;
	
        float *Dout;
        plhs[2] = mxCreateNumericArray(3, dimsF, mxSINGLE_CLASS, mxREAL);
        Dout = (float*)mxGetData(plhs[2]);
        memcpy (Dout,depthImageData.planes[2],dimsF[0]*dimsF[1]*dimsF[2]*sizeof(float));  
    }
	
    depthImage->ReleaseAccess(&depthImageData);
}
Пример #9
0
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) 
{
    UInt64 *MXadress;
    if(nrhs==0)
    {
        printf("Close failed: Give Pointer to intel camera as input\n");
        mexErrMsgTxt("Intel camera error"); 
    }
    MXadress = (UInt64*)mxGetData(prhs[0]);
    
    if(MXadress[0]==0)
    { 
        return;
    }
    
    UtilPipeline* utilPipelineP = (UtilPipeline*)MXadress[0];
    UtilPipeline& utilPipeline = utilPipelineP[0];
    
    PXCImage *rgbImage = utilPipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR);
 
    PXCImage::ImageData rgbImageData;
    rgbImage->AcquireAccess(PXCImage::ACCESS_READ,&rgbImageData);

	  
    //if(depthImageData.format != PXCImage::COLOR_FORMAT_DEPTH)
    //{
    //    mexErrMsgTxt("COLOR_FORMAT_DEPTH error"); 
    //}

	if(rgbImageData.type != PXCImage::SURFACE_TYPE_SYSTEM_MEMORY)
    {
        mexErrMsgTxt("SURFACE_TYPE_SYSTEM_MEMORY error"); 
    }

	
    PXCImage::ImageInfo rgbInfo;
    rgbImage->QueryInfo(&rgbInfo);
    printf("RGB Image :  Width %d, Height %d \r\n",rgbInfo.width,rgbInfo.height); 

    mwSize dimsRGB[3];
    dimsRGB[0]=3;
    dimsRGB[1]=rgbInfo.width;
    dimsRGB[2]=rgbInfo.height;

    unsigned char *Iout;
    plhs[0] = mxCreateNumericArray(3, dimsRGB, mxUINT8_CLASS, mxREAL);
    Iout = (unsigned char*)mxGetData(plhs[0]);
    memcpy (Iout,rgbImageData.planes[0],dimsRGB[0]*dimsRGB[1]*dimsRGB[2]);  
  
    rgbImage->ReleaseAccess(&rgbImageData);


	
    if(nlhs>1)
    {
		UtilCapture *capture = utilPipeline.QueryCapture();
		if(!capture)
		{
			 printf("No valid capture object\n");
			 return;
		}
		
		PXCCapture::Device *device = capture->QueryDevice();
		if(!device)
		{
			 printf("No valid device object\n");
			 return;
		}

		// Get Camera Projection Data
		PXCSession *session = utilPipeline.QuerySession();
		pxcUID pid;
		device->QueryPropertyAsUID(PXCCapture::Device::PROPERTY_PROJECTION_SERIALIZABLE,&pid);
		PXCSmartPtr<PXCProjection> projection;
		PXCMetadata *metadata = session->DynamicCast<PXCMetadata>();
		metadata->CreateSerializable<PXCProjection>( pid, &projection );
		if(!projection)
		{
			 printf("No valid projection data\n");
			 return;
		}

		pxcU32 npoints = (pxcU32) dimsRGB[1]*(pxcU32)dimsRGB[2];
		
		PXCPointF32 *posc = new PXCPointF32[npoints];
		PXCPointF32 *posd = new PXCPointF32[npoints];
				
		int i=0;
		for (int y=0;y< dimsRGB[2];y++)
		{
			for (int x=0;x< dimsRGB[1];x++)
			{
				posc[i].x=(pxcF32)x, 
				posc[i].y=(pxcF32)y;
				i++;
			}
		}

		projection->MapColorCoordinatesToDepth(npoints,posc,posd);

		//projection->Release();
	
		mwSize dimsM[3];
		dimsM[0]=2;
		dimsM[1]=dimsRGB[1];
		dimsM[2]=dimsRGB[2];
        plhs[1] = mxCreateNumericArray(3, dimsM, mxSINGLE_CLASS, mxREAL);
        float* Mout = (float*)mxGetData(plhs[1]);
        memcpy (Mout,posd,dimsM[0]*dimsM[1]*dimsM[2]*sizeof(float));  
        
        delete(posc);
        delete(posd);
    }
}
Пример #10
0
// Camera Processing Thread
// Initialize the RealSense SenseManager and initiate camera processing loop:
// Step 1: Acquire new camera frame
// Step 2: Load shared settings
// Step 3: Perform Core SDK and middleware processing and store results
//         in background RealSenseDataFrame
// Step 4: Swap the background and mid RealSenseDataFrames
void RealSenseImpl::CameraThread()
{
	uint64 currentFrame = 0;

	fgFrame->number = 0;
	midFrame->number = 0;
	bgFrame->number = 0;

	pxcStatus status = senseManager->Init();
	RS_LOG_STATUS(status, "SenseManager Initialized")

	assert(status == PXC_STATUS_NO_ERROR);

	if (bFaceEnabled) {
		faceData = pFace->CreateOutput();
	}

	while (bCameraThreadRunning == true) {
		// Acquires new camera frame
		status = senseManager->AcquireFrame(true);
		assert(status == PXC_STATUS_NO_ERROR);

		bgFrame->number = ++currentFrame;

		// Performs Core SDK and middleware processing and store results 
		// in background RealSenseDataFrame
		if (bCameraStreamingEnabled) {
			PXCCapture::Sample* sample = senseManager->QuerySample();

			CopyColorImageToBuffer(sample->color, bgFrame->colorImage, colorResolution.width, colorResolution.height);
			CopyDepthImageToBuffer(sample->depth, bgFrame->depthImage, depthResolution.width, depthResolution.height);
		}

		if (bScan3DEnabled) {
			if (bScanStarted) {
				PXC3DScan::Configuration config = p3DScan->QueryConfiguration();
				config.startScan = true;
				p3DScan->SetConfiguration(config);
				bScanStarted = false;
			}

			if (bScanStopped) {
				PXC3DScan::Configuration config = p3DScan->QueryConfiguration();
				config.startScan = false;
				p3DScan->SetConfiguration(config);
				bScanStopped = false;
			}

			PXCImage* scanImage = p3DScan->AcquirePreviewImage();
			if (scanImage) {
				UpdateScan3DImageSize(scanImage->QueryInfo());
				CopyColorImageToBuffer(scanImage, bgFrame->scanImage, scan3DResolution.width, scan3DResolution.height);
				scanImage->Release();
			}
			
			if (bReconstructEnabled) {
				status = p3DScan->Reconstruct(scan3DFileFormat, scan3DFilename.GetCharArray().GetData());
				bReconstructEnabled = false;
				bScanCompleted = true;
			}
		}

		if (bFaceEnabled) {
			faceData->Update();
			bgFrame->headCount = faceData->QueryNumberOfDetectedFaces();
			if (bgFrame->headCount > 0) {
				PXCFaceData::Face* face = faceData->QueryFaceByIndex(0);
				PXCFaceData::PoseData* poseData = face->QueryPose();

				if (poseData) {
					PXCFaceData::HeadPosition headPosition = {};
					poseData->QueryHeadPosition(&headPosition);
					bgFrame->headPosition = FVector(headPosition.headCenter.x, headPosition.headCenter.y, headPosition.headCenter.z);

					PXCFaceData::PoseEulerAngles headRotation = {};
					poseData->QueryPoseAngles(&headRotation);
					bgFrame->headRotation = FRotator(headRotation.pitch, headRotation.yaw, headRotation.roll);
				}
			}
		}
		
		senseManager->ReleaseFrame();

		// Swaps background and mid RealSenseDataFrames
		std::unique_lock<std::mutex> lockIntermediate(midFrameMutex);
		bgFrame.swap(midFrame);
	}
}
// Camera Processing Thread
// Initialize the RealSense SenseManager and initiate camera processing loop:
// Step 1: Acquire new camera frame
// Step 2: Load shared settings
// Step 3: Perform Core SDK and middleware processing and store results
//         in background RealSenseDataFrame
// Step 4: Swap the background and mid RealSenseDataFrames
void RealSenseImpl::CameraThread()
{
	uint64 currentFrame = 0;

	fgFrame->number = 0;
	midFrame->number = 0;
	bgFrame->number = 0;

	pxcStatus status = senseManager->Init();
	RS_LOG_STATUS(status, "SenseManager Initialized")

	assert(status == PXC_STATUS_NO_ERROR);

	while (bCameraThreadRunning == true) {
		// Acquires new camera frame
		status = senseManager->AcquireFrame(true);
		assert(status == PXC_STATUS_NO_ERROR);

		bgFrame->number = ++currentFrame;

		PXCCapture::Sample* sample = senseManager->QuerySample();

		// Performs Core SDK and middleware processing and store results 
		// in background RealSenseDataFrame
		if (bColorStreamingEnabled) {
			if (sample->color) {
				CopyColorImageToBuffer(sample->color, bgFrame->colorImage, colorResolution.width, colorResolution.height);
			}
		}

		if (bDepthStreamingEnabled) {
			if (sample->depth) {
				CopyDepthImageToBuffer(sample->depth, bgFrame->depthImage, depthResolution.width, depthResolution.height);
			}
		}

		if (bScan3DEnabled) {
			if (bScanStarted) {
				PXC3DScan::Configuration config = p3DScan->QueryConfiguration();
				config.startScan = true;
				p3DScan->SetConfiguration(config);
				bScanStarted = false;
			}

			if (bScanStopped) {
				PXC3DScan::Configuration config = p3DScan->QueryConfiguration();
				config.startScan = false;
				p3DScan->SetConfiguration(config);
				bScanStopped = false;
			}

			PXCImage* scanImage = p3DScan->AcquirePreviewImage();
			if (scanImage) {
				UpdateScan3DImageSize(scanImage->QueryInfo());
				CopyColorImageToBuffer(scanImage, bgFrame->scanImage, scan3DResolution.width, scan3DResolution.height);
				scanImage->Release();
			}
			
			if (bReconstructEnabled) {
				status = p3DScan->Reconstruct(scan3DFileFormat, scan3DFilename.GetCharArray().GetData());
				bReconstructEnabled = false;
				bScanCompleted = true;
			}
		}
		
		senseManager->ReleaseFrame();

		// Swaps background and mid RealSenseDataFrames
		std::unique_lock<std::mutex> lockIntermediate(midFrameMutex);
		bgFrame.swap(midFrame);
	}
}
Пример #12
0
void Graphics::DrawBitmap(PXCCapture::Sample* sample)
{
	PXCImage *imageDepth = sample->depth;
	assert(imageDepth);
	PXCImage::ImageInfo imageDepthInfo = imageDepth->QueryInfo();

	m_outputImageInfo.width = 1024;
	m_outputImageInfo.height = 1024;
	m_outputImageInfo.format = PXCImage::PIXEL_FORMAT_RGB32;
	m_outputImageInfo.reserved = 0;

	m_outputImage = m_session->CreateImage(&m_outputImageInfo);
	assert(m_outputImage);
	
	PXCImage::ImageData imageDepthData;
	if(imageDepth->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &imageDepthData) >= PXC_STATUS_NO_ERROR)
	{
		memset(&m_outputImageData, 0, sizeof(m_outputImageData));
		pxcStatus status = m_outputImage->AcquireAccess(PXCImage::ACCESS_WRITE, PXCImage::PIXEL_FORMAT_RGB32, &m_outputImageData);
		if(status < PXC_STATUS_NO_ERROR) return;

		int stridePixels = m_outputImageData.pitches[0];
		pxcBYTE *pixels = reinterpret_cast<pxcBYTE*> (m_outputImageData.planes[0]);
		memset(pixels, 0, stridePixels * m_outputImageInfo.height);

		// get access to depth data
		PXCPoint3DF32* vertices = new PXCPoint3DF32[imageDepthInfo.width * imageDepthInfo.height];
		PXCProjection* projection(m_senseManager->QueryCaptureManager()->QueryDevice()->CreateProjection());
		if (!projection)
		{
			if (vertices) delete[] vertices;
			return;
		}

		projection->QueryVertices(imageDepth, vertices);
		projection->Release();
		int strideVertices = imageDepthInfo.width;

		// render vertices
		int numVertices = 0;
		for (int y = 0; y < imageDepthInfo.height; y++)
		{
			const PXCPoint3DF32 *verticesRow = vertices + y * strideVertices;
			for (int x = 0; x < imageDepthInfo.width; x++)
			{
				const PXCPoint3DF32 &v = verticesRow[x];
				if (v.z <= 0.0f)
				{
					continue;
				}

				int ix = 0, iy = 0;
				if(ProjectVertex(v, ix, iy))
				{
					pxcBYTE *ptr = m_outputImageData.planes[0];
					ptr += iy * m_outputImageData.pitches[0];
					ptr += ix * 4;
					ptr[0] = pxcBYTE(255.0f * 0.5f);
					ptr[1] = pxcBYTE(255.0f * 0.5f);
					ptr[2] = pxcBYTE(255.0f * 0.5f);
					ptr[3] = pxcBYTE(255.0f);
				}

				numVertices++;
			}
		}
		if (vertices) delete[] vertices;

		if (m_bitmap)
		{
			DeleteObject(m_bitmap);
			m_bitmap = 0;
		}

		HWND hwndPanel = GetDlgItem(m_window, IDC_PANEL);
		HDC dc = GetDC(hwndPanel);
		BITMAPINFO binfo;
		memset(&binfo, 0, sizeof(binfo));
		binfo.bmiHeader.biWidth = m_outputImageData.pitches[0] / 4;
		binfo.bmiHeader.biHeight = -(int)m_outputImageInfo.height;
		binfo.bmiHeader.biBitCount = 32;
		binfo.bmiHeader.biPlanes = 1;
		binfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
		binfo.bmiHeader.biCompression = BI_RGB;
		Sleep(1);
		m_bitmap = CreateDIBitmap(dc, &binfo.bmiHeader, CBM_INIT, m_outputImageData.planes[0], &binfo, DIB_RGB_COLORS);

		ReleaseDC(hwndPanel, dc);

		m_outputImage->ReleaseAccess(&m_outputImageData);
		imageDepth->ReleaseAccess(&imageDepthData);
		m_outputImage->Release();
	}
}
/*
 *_stream() reads a single frame
 */
void GestureHandler::_stream()
{
    if (!_pipeline.AcquireFrame(true))    // true - blocking
        return;

    // Retrieve QueryImage() as PXCSmartPtr<PXCImage> will crash later on
    PXCImage* image = _pipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR);

    PXCImage::ImageData image_data;
    pxcStatus sts = image->AcquireAccess(PXCImage::ACCESS_READ,
                                         PXCImage::COLOR_FORMAT_RGB24,
                                         &image_data);
    if (sts < PXC_STATUS_NO_ERROR)
    {
        std::cout << "!!! Failed AcquireAccess." << std::endl;
        _pipeline.ReleaseFrame();
        return;
    }

    PXCImage::ImageInfo image_info;
    image->QueryInfo(&image_info);
    if (sts < PXC_STATUS_NO_ERROR)
    {
        std::cout << "!!! Failed QueryInfo." << std::endl;
        _pipeline.ReleaseFrame();
        return;
    }

    if (image_info.format == PXCImage::COLOR_FORMAT_RGB24)
    {   // BGR layout on little-endian machine
        unsigned char* bgr_img_data = (unsigned char*)image_data.planes[0];

        QImage temp(bgr_img_data,
                    image_info.width,
                    image_info.height,
                    QImage::Format_RGB888);

        // Convert data from BGR (PXCImage format) to RGB (QPixmap)
        QPixmap pixmap = QPixmap::fromImage(temp.rgbSwapped());
        emit notify_image(pixmap);
    }
    else
    {
        std::cout << "!!! TODO: support other image formats besides RGB24." << std::endl;
        _pipeline.ReleaseFrame();
        return;
    }

    image->ReleaseAccess(&image_data);
    if (sts < PXC_STATUS_NO_ERROR)
    {
        std::cout << "!!! Failed ReleaseAccess." << std::endl;
        _pipeline.ReleaseFrame();
        return;
    }

    /* Detect gestures */

    PXCGesture* gesture = _pipeline.QueryGesture();
    if (gesture)
    {
        // get the node data for the whole primary hand
        // (the one that shows up in the view first)
        PXCGesture::GeoNode node;
        sts = gesture->QueryNodeData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY, &node);
        if (sts == PXC_STATUS_ITEM_UNAVAILABLE)
        {
            //std::cout << "!!! QueryNodeData: PXC_STATUS_ITEM_UNAVAILABLE" << std::endl;
            _pipeline.ReleaseFrame();
            return;
        }

        //std::cout << "* Hand at " << node.positionWorld.x << "x" << node.positionWorld.y << " z = " << node.positionWorld.z << std::endl;

        PXCGesture::Gesture gest;
        sts = gesture->QueryGestureData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY, 0, &gest);
        if (sts == PXC_STATUS_ITEM_UNAVAILABLE)
        {
            //std::cout << "!!! QueryGestureData: PXC_STATUS_ITEM_UNAVAILABLE" << std::endl;
            _pipeline.ReleaseFrame();
            return;
        }

        if (gest.active)
        {
            // how many ms has passed since last gesture detection
            qint64 elapsed = _elapsed_timer.elapsed();

            switch (gest.label)
            {
                case PXCGesture::Gesture::LABEL_HAND_WAVE:
                case PXCGesture::Gesture::LABEL_NAV_SWIPE_LEFT:
                case PXCGesture::Gesture::LABEL_NAV_SWIPE_RIGHT:
                case PXCGesture::Gesture::LABEL_NAV_SWIPE_UP:
                case PXCGesture::Gesture::LABEL_NAV_SWIPE_DOWN:
                case PXCGesture::Gesture::LABEL_HAND_CIRCLE:
                default:
                    emit notify_gesture(gest);
                break;

                case PXCGesture::Gesture::LABEL_POSE_THUMB_UP:
                    if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_THUMB_UP && elapsed > 1000) ||
                          (_last_gesture != PXCGesture::Gesture::LABEL_POSE_THUMB_UP) )
                        emit notify_gesture(gest);
                break;

                case PXCGesture::Gesture::LABEL_POSE_THUMB_DOWN:
                    if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_THUMB_DOWN && elapsed > 1000) ||
                        (_last_gesture != PXCGesture::Gesture::LABEL_POSE_THUMB_DOWN) )
                        emit notify_gesture(gest);
                break;

                case PXCGesture::Gesture::LABEL_POSE_PEACE:
                    if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_PEACE && elapsed > 1000) ||
                         (_last_gesture != PXCGesture::Gesture::LABEL_POSE_PEACE) )
                        emit notify_gesture(gest);
                break;

                case PXCGesture::Gesture::LABEL_POSE_BIG5:
                    if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_BIG5 && elapsed > 1000) ||
                        (_last_gesture != PXCGesture::Gesture::LABEL_POSE_BIG5) )
                        emit notify_gesture(gest);
                break;
            }
            _last_gesture = gest.label;
            if (elapsed > 1000)
                _elapsed_timer.restart();
        }
    }

    _pipeline.ReleaseFrame();
}
Пример #14
0
//----------------------------------------------------------
void ofxRSSDK::update()
{
	if (!bGrabberInited)
	{
		return;
	}


	if (mRealSenseDevice->AcquireFrame(false, 0) >= PXC_STATUS_NO_ERROR)
	{
		PXCCapture::Sample *cSample = mRealSenseDevice->QuerySample();
		if (!cSample)
			return;

		if (bGrabVideo)
		{
			PXCImage *cRgbImage = cSample->color;
			if (cRgbImage)
			{
				PXCImage::ImageData cRgbData;
				if (cRgbImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &cRgbData) >= PXC_STATUS_NO_ERROR)
				{
					uint8_t *cBuffer = cRgbData.planes[0];
					videoPixels.setFromPixels(cBuffer, width, height, 4);

					cRgbImage->ReleaseAccess(&cRgbData);
				}
			}
		}

		PXCImage *cDepthImage = cSample->depth;
		if (cDepthImage)
		{
			PXCImage::ImageData cDepthData;
			if (cDepthImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_DEPTH, &cDepthData) >= PXC_STATUS_NO_ERROR)
			{
				depthPixelsRaw.setFromPixels(reinterpret_cast<uint16_t *>(cDepthData.planes[0]), width, height, 1);
				memcpy(mDepthBuffer, reinterpret_cast<uint16_t *>(cDepthData.planes[0]), (size_t)(width*height*sizeof(uint16_t)));
				cDepthImage->ReleaseAccess(&cDepthData);
			}

			if (cDepthImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_DEPTH_F32, &cDepthData) >= PXC_STATUS_NO_ERROR)
			{
				distancePixels.setFromPixels(reinterpret_cast<float *>(cDepthData.planes[0]), width, height, 1);
				cDepthImage->ReleaseAccess(&cDepthData);
			}

			if (cDepthImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &cDepthData) >= PXC_STATUS_NO_ERROR)
			{
				depthPixels.setFromPixels(cDepthData.planes[0], width, height, 4);
				cDepthImage->ReleaseAccess(&cDepthData);
			}
		}

		mRealSenseDevice->ReleaseFrame();
	}

	//updateDepthPixels();
	depthTex.loadData(depthPixels.getPixels(), width, height, GL_RGBA);
	videoTex.loadData(videoPixels.getPixels(), width, height, GL_BGRA);
	if (bCalcCameraPoints)
		updateCameraPoints();
}