Exemplo n.º 1
0
void foo1()
{
	PXCSession *session;
	PXCSession_Create(&session);

	// session is a PXCSession instance
	PXCSession::ImplDesc desc1;
	memset(&desc1,0,sizeof(desc1));
	desc1.group=PXCSession::IMPL_GROUP_SENSOR;
	desc1.subgroup=PXCSession::IMPL_SUBGROUP_AUDIO_CAPTURE;

	vector<std::wstring> deviceNames;
	for (int m=0;;m++) {
		PXCSession::ImplDesc desc2;
		if (session->QueryImpl(&desc1,m,&desc2)<PXC_STATUS_NO_ERROR) break;

		PXCSmartPtr<PXCCapture> capture;
		if (session->CreateImpl<PXCCapture>(&desc2,&capture)<PXC_STATUS_NO_ERROR) continue;
		for (int d=0;;d++) {
			PXCCapture::DeviceInfo dinfo;
			if (capture->QueryDevice(d,&dinfo)<PXC_STATUS_NO_ERROR) break;
			std::wstring dname(dinfo.name);
			deviceNames.push_back(dname);
		}
	}

	// display devices and require a selection
	int deviceNum = deviceNames.size();
	int selectedDeviceId = 0;
	wprintf_s(L"Device list:\n");
	for (int i = 0; i < deviceNum; i++)
	{
		wprintf_s(L"Device[%d]: %s\n",i,deviceNames[i].c_str());
	}
	if (deviceNum > 0)
	{
		while (true)
		{
			wprintf_s(L"Please select a device from [0]~[%d]\n", deviceNum-1);
			cin >> selectedDeviceId;
			if (selectedDeviceId > -1 && selectedDeviceId < deviceNum)
			{
				wprintf_s(L"You selected device: [%d]%s\n", selectedDeviceId, deviceNames[selectedDeviceId].c_str());
				break;
			}
		}
	}
	else
	{
Exemplo n.º 2
0
pxcStatus CaptureRecording::CreateDevice(pxcU32 didx, Device **device) {
    PXCCapture::Device *device2=0;
    pxcStatus sts=capture->CreateDevice(didx,&device2);
    if (sts>=PXC_STATUS_NO_ERROR) {
        *device=new CaptureRecording::DeviceRecording(device2,scheduler,types);
        if (!(*device)) sts=PXC_STATUS_ALLOC_FAILED;
    }
    return sts;
}
Exemplo n.º 3
0
bool IntelPerCStreamBase::initDevice(PXCSession *session)
{
    if (NULL == session)
        return false;

    pxcStatus sts = PXC_STATUS_NO_ERROR;
    PXCSession::ImplDesc templat;
    memset(&templat,0,sizeof(templat));
    templat.group   = PXCSession::IMPL_GROUP_SENSOR;
    templat.subgroup= PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE;

    for (int modidx = 0; PXC_STATUS_NO_ERROR <= sts; modidx++)
    {
        PXCSession::ImplDesc desc;
        sts = session->QueryImpl(&templat, modidx, &desc);
        if (PXC_STATUS_NO_ERROR > sts)
            break;

        PXCSmartPtr<PXCCapture> capture;
        sts = session->CreateImpl<PXCCapture>(&desc, &capture);
        if (!capture.IsValid())
            continue;

        /* enumerate devices */
        for (int devidx = 0; PXC_STATUS_NO_ERROR <= sts; devidx++)
        {
            PXCSmartPtr<PXCCapture::Device> device;
            sts = capture->CreateDevice(devidx, &device);
            if (PXC_STATUS_NO_ERROR <= sts)
            {
                m_device = device.ReleasePtr();
                return true;
            }
        }
    }
    return false;
}
Exemplo n.º 4
0
int _tmain(int argc, _TCHAR* argv[])
{
		//Initializing objects for analysis
	//Creating the session
	PXCSmartPtr<PXCSession> session;
	PXCSession_Create(&session);
	//Creating face analysis object
	PXCFaceAnalysis *face=0; 
	session->CreateImpl(PXCFaceAnalysis::CUID,(void**)&face);
	//Initializing profile info
	PXCFaceAnalysis::ProfileInfo pinfo; 
	face->QueryProfile(0,&pinfo); 
	//Starting capture
	UtilCapture capture(session); 
	capture.LocateStreams(&pinfo.inputs); 
	face->SetProfile(&pinfo); 
	//Creating face detection module
	PXCFaceAnalysis::Detection *det=face->DynamicCast<PXCFaceAnalysis::Detection>(); 
	PXCFaceAnalysis::Detection::ProfileInfo dinfo = {0}; 
	det->QueryProfile(0,&dinfo); 
	det->SetProfile(&dinfo); 
	//Creating landmark module 
	PXCFaceAnalysis::Landmark *landmark=face->DynamicCast<PXCFaceAnalysis::Landmark>();
	PXCFaceAnalysis::Landmark::ProfileInfo linfo = {0};
	landmark->QueryProfile(1,&linfo); 
	landmark->SetProfile(&linfo);
	PXCFaceAnalysis::Landmark::LandmarkData ldata[7];
	//Declaring Detection and Landmark data objects for analysis
	PXCFaceAnalysis::Detection::Data data; 
	PXCFaceAnalysis::Landmark::PoseData pdata;
	//Storage containers for images
	PXCImage* mcImage;
	PXCImage::ImageData mCImageData; //Color Image data
	PXCSmartArray<PXCImage> images; 
	PXCSmartSPArray sps(2);



	UtilPipeline pipeline;
	pipeline.EnableImage(PXCImage::COLOR_FORMAT_RGB24,640,480);
	pipeline.EnableImage(PXCImage::COLOR_FORMAT_DEPTH,320,240); //depth resolution 320,240
	pipeline.Init();
	UtilRender color_render(L"Color Stream");
	UtilRender depth_render(L"Depth Stream");

	///////////// OPENCV
	IplImage *image=0;
	CvSize gab_size;
	gab_size.height=480;
	gab_size.width=640;
	image=cvCreateImage(gab_size,8,3);

	PXCImage::ImageData idata;

	unsigned char *rgb_data;//=new unsigned char[];
	PXCImage::ImageInfo rgb_info;

	Mat colorMat;
	namedWindow("Test");
	namedWindow("Test2");
	///////
	for (;;) {
		capture.ReadStreamAsync(images.ReleaseRefs(),sps.ReleaseRef(0)); 
		face->ProcessImageAsync(images,sps.ReleaseRef(1)); 
		sps.SynchronizeEx(); 

		if (!pipeline.AcquireFrame(true)) break;
		PXCImage *color_image=pipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR);

		color_image->AcquireAccess(PXCImage::ACCESS_READ_WRITE,PXCImage::COLOR_FORMAT_RGB24,&idata); 

		rgb_data=idata.planes[0];


		for(int y=0; y<480; y++)
		{
			for(int x=0; x<640; x++)
			{ 
				for(int k=0; k<3 ; k++)
				{
					image->imageData[y*640*3+x*3+k]=rgb_data[y*640*3+x*3+k];
				}
			}
		}

		color_image->ReleaseAccess(&idata);
		/*
		cvShowImage("rgb_cv", image);
		cvShowImage("depth_cv2", depth);
		*/
		colorMat = image;

		//imshow("ShaurinSucks", colorMat);



		// Tracking or recognition results are ready 
		pxcUID fid; pxcU64 ts; 
		//cout << "QUERY FACE" << endl;
		if (face->QueryFace(0,&fid,&ts)<PXC_STATUS_NO_ERROR) continue; 
		landmark->QueryLandmarkData(fid, linfo.labels, &ldata[0]);
		landmark->QueryPoseData(fid, &pdata);
		det->QueryData(fid,&data); 
		//cout << "DATA   " << data.confidence << " " << data.viewAngle << " " << data.rectangle.x << " " << data.rectangle.y << endl;
		//cout << "PDATA  " << pdata.pitch << " " << pdata.roll << " " << pdata.yaw << endl;
						
		//cout << "LDATA ";
		for (int j = 0; j < 7; j++){
			//cout << ldata[j].position.x << " " << ldata[j].position.y << " " << ldata[j].position.z << endl;
		}

		//Point center( ldata[6].position.x, ldata[6].position.y);
		//ellipse(colorMat, center, Size( 10, 10), 0, 0, 360, Scalar(255, 0, 255), 4, 8, 0);


		cout << ldata[0].position.x << " " << ldata[0].position.y << " | " << ldata[1].position.x << " " << ldata[1].position.y << endl;
		//Point2f start1(ldata[0].position.x, ldata[0].position.y);
		//Point2f end1(ldata[1].position.x, ldata[1].position.y);

		//Point2f start2(ldata[2].position.x, ldata[2].position.y);
		//Point2f end2(ldata[3].position.x, ldata[3].position.y);

		//int rad = abs((ldata[0].position.x+ldata[1].position.x)/2 - ldata[0].position.x);

		//Point2f mid1((ldata[0].position.x+ldata[1].position.x)/2, (ldata[0].position.y+ldata[1].position.y)/2);
		//ellipse(colorMat, mid1, Size( 50, 50), 0, 0, 360, Scalar(255, 0, 255), 4, 8, 0);

		Point2f corner1(ldata[0].position.x, ldata[0].position.y+10);
		Point2f corner2(ldata[1].position.x, ldata[1].position.y-10);

		//rectangle(colorMat, corner1, corner2, Scalar(0, 255, 255), 2, 8);

		Rect temp(corner1, corner2);
		//colorMat(temp);


		//line(colorMat, start1, end1, Scalar(255,0,255), 2, 8);
		//line(colorMat, start2, end2, Scalar(255,0,255), 2, 8);

		imshow("Test", colorMat);
		Mat frame_gray;
		cvtColor(colorMat, frame_gray, CV_BGR2GRAY);
		equalizeHist(frame_gray, frame_gray);
		Mat faceROI = frame_gray(temp);
		imshow("Test2", faceROI);
				
		if( cvWaitKey(10) >= 0 )
		break;
		//IF THIS IS AFTER. THEN THE STREAM STOPS IF FACE ISNT IN THERE
		pipeline.ReleaseFrame();

	}
	
	cvReleaseImage(&image);
	pipeline.Close();
	return 0;
}
Exemplo n.º 5
0
pxcStatus UtilCapture::LocateStreams(std::vector<PXCCapture::VideoStream::DataDesc*> &vinputs,std::vector<PXCCapture::AudioStream::DataDesc*> &ainputs) {
    UtilTrace trace(L"UtilCapture::LocateStreams(video)", m_session_service);
	if (vinputs.empty() && ainputs.empty()) return PXC_STATUS_ITEM_UNAVAILABLE;

	PXCCapture::AudioStream::DataDesc ainput;
	if (!ConsolidateAudioRequests(ainputs,&ainput)) return PXC_STATUS_ITEM_UNAVAILABLE;

    int n1=CalculateNumFormats(vinputs);

    pxcStatus sts;
    m_desc_filter.group=PXCSession::IMPL_GROUP_SENSOR;
    m_desc_filter.subgroup=(vinputs.size()>0?PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE:0)|(ainputs.size()>0?PXCSession::IMPL_SUBGROUP_AUDIO_CAPTURE:0);
    for (int module_idx=0;;module_idx++) {
        sts = CreateCapture(module_idx, m_capture.ReleaseRef());
        if (sts == PXC_STATUS_ITEM_UNAVAILABLE) break;
        if (sts < PXC_STATUS_NO_ERROR) continue;

        PXCCapture::DeviceInfo dinfo;
        for (int device_idx=0;;device_idx++) {
            sts=m_capture->QueryDevice(device_idx,&dinfo);
            if (sts<PXC_STATUS_NO_ERROR) break;
            if (m_session_service) m_session_service->TraceParam(L"Locating stream(s) on device ", dinfo.name);
            if (m_name_filter) if (!wcsstr(dinfo.name,m_name_filter)) continue;

            sts=m_capture->CreateDevice(device_idx,m_device.ReleaseRef());
            if (sts<PXC_STATUS_NO_ERROR) continue;

            /* Match image formats */
            ClearMaps(vinputs);

            int n2=0;
            bool am=(ainputs.size()>0)?false:true;
            for (int stream_idx=0;;stream_idx++) {
                PXCCapture::Device::StreamInfo sinfo;
                sts=m_device->QueryStream(stream_idx, &sinfo);
                if (sts<PXC_STATUS_NO_ERROR) break;
                if (sinfo.cuid==PXCCapture::VideoStream::CUID && n2<n1) {
					PXCSmartPtr<PXCCapture::VideoStream> vstream;
					sts=m_device->CreateStream(stream_idx,PXCCapture::VideoStream::CUID,(void**)&vstream);
					if (sts<PXC_STATUS_NO_ERROR) break;

					std::list<PXCCapture::VideoStream::ProfileInfo> profiles;
					ScanProfiles(profiles,sinfo.imageType,vstream);

					int n3=MatchProfiles(vinputs,sinfo,profiles,(int)m_vstreams.size());
					if (n3==0) continue;

					FindBestProfile(vinputs,profiles,(int)m_vstreams.size());
					sts=vstream->SetProfile(&*profiles.begin());
					if (sts<PXC_STATUS_NO_ERROR) break;

					m_vstreams.push_back(vstream.ReleasePtr());
					n2+=n3;
				}
				if (sinfo.cuid==PXCCapture::AudioStream::CUID && !am) {
					sts=m_device->CreateStream(stream_idx, PXCCapture::AudioStream::CUID, (void**)m_astream.ReleaseRef());
					if (sts<PXC_STATUS_NO_ERROR) continue;

					for (int profile_idx=0;;profile_idx++) {
						PXCCapture::AudioStream::ProfileInfo pinfo;
						sts=m_astream->QueryProfile(profile_idx,&pinfo);
						if (sts<PXC_STATUS_NO_ERROR) break;

						if (ainput.info.nchannels>0  && ainput.info.nchannels!=pinfo.audioInfo.nchannels) continue;
						if (ainput.info.sampleRate>0 && ainput.info.sampleRate!=pinfo.audioInfo.sampleRate) continue;
						if (ainput.info.bufferSize>0 && ainput.info.bufferSize!=pinfo.audioInfo.bufferSize) continue;
						if (ainput.info.channelMask>0 && ainput.info.channelMask!=pinfo.audioInfo.channelMask) continue;

						sts=m_astream->SetProfile(&pinfo);
						if (sts<PXC_STATUS_NO_ERROR) break;

						for (int i=0;i<(int)ainputs.size();i++) {
							memcpy_s(&ainputs[i]->info,sizeof(ainputs[i]->info),&pinfo.audioInfo,sizeof(pinfo.audioInfo));
							ainputs[i]->options=pinfo.audioOptions;
						}
                        am=true;
						break;
					}
					if (sts<PXC_STATUS_NO_ERROR) m_astream.ReleaseRef();
				}
				if (sts>=PXC_STATUS_NO_ERROR && n2>=n1 && am) break;
            }
            if (sts>=PXC_STATUS_NO_ERROR && n2>=n1 && am) 
                if (RecordProperties(vinputs)) break;

            DeleteStreams();
            m_device.ReleaseRef();
        }
        if (sts>=PXC_STATUS_NO_ERROR)
        {
            if (m_session_service) m_session_service->TraceParam(L"Successfully located streams on device ", dinfo.name);
            // update actual image size
            for (int i = 0; i < (int)vinputs.size() && i < (int)m_maps.size(); i++)
            {
                for (int c = 0; c < PXCCapture::VideoStream::STREAM_LIMIT; c++)
                {
                    PXCCapture::VideoStream::ProfileInfo info;
            	    if (m_maps[i][c]<0) break;
                    PXCCapture::VideoStream* vstream = m_vstreams[m_maps[i][c]];
                    if (!vstream) break;
                    if (vstream->QueryProfile(&info) >= PXC_STATUS_NO_ERROR)
                    {
                        vinputs[i]->streams[c].sizeMin.width = info.imageInfo.width;
                        vinputs[i]->streams[c].sizeMin.height = info.imageInfo.height;
                        vinputs[i]->streams[c].sizeMax.width = info.imageInfo.width;
                        vinputs[i]->streams[c].sizeMax.height = info.imageInfo.height;
                    }
                }
            }
            break;
        }
        m_capture.ReleaseRef();
    }
    return sts;
}
Exemplo n.º 6
0
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) 
{
    UInt64 *MXadress;
    if(nrhs==0)
    {
        printf("Close failed: Give Pointer to intel camera as input\n");
        mexErrMsgTxt("Intel camera error"); 
    }
    MXadress = (UInt64*)mxGetData(prhs[0]);
    
    if(MXadress[0]==0)
    { 
        return;
    }
    
    UtilPipeline* utilPipelineP = (UtilPipeline*)MXadress[0];
    UtilPipeline& utilPipeline = utilPipelineP[0];
    
    PXCImage *rgbImage = utilPipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR);
 
    PXCImage::ImageData rgbImageData;
    rgbImage->AcquireAccess(PXCImage::ACCESS_READ,&rgbImageData);

	  
    //if(depthImageData.format != PXCImage::COLOR_FORMAT_DEPTH)
    //{
    //    mexErrMsgTxt("COLOR_FORMAT_DEPTH error"); 
    //}

	if(rgbImageData.type != PXCImage::SURFACE_TYPE_SYSTEM_MEMORY)
    {
        mexErrMsgTxt("SURFACE_TYPE_SYSTEM_MEMORY error"); 
    }

	
    PXCImage::ImageInfo rgbInfo;
    rgbImage->QueryInfo(&rgbInfo);
    printf("RGB Image :  Width %d, Height %d \r\n",rgbInfo.width,rgbInfo.height); 

    mwSize dimsRGB[3];
    dimsRGB[0]=3;
    dimsRGB[1]=rgbInfo.width;
    dimsRGB[2]=rgbInfo.height;

    unsigned char *Iout;
    plhs[0] = mxCreateNumericArray(3, dimsRGB, mxUINT8_CLASS, mxREAL);
    Iout = (unsigned char*)mxGetData(plhs[0]);
    memcpy (Iout,rgbImageData.planes[0],dimsRGB[0]*dimsRGB[1]*dimsRGB[2]);  
  
    rgbImage->ReleaseAccess(&rgbImageData);


	
    if(nlhs>1)
    {
		UtilCapture *capture = utilPipeline.QueryCapture();
		if(!capture)
		{
			 printf("No valid capture object\n");
			 return;
		}
		
		PXCCapture::Device *device = capture->QueryDevice();
		if(!device)
		{
			 printf("No valid device object\n");
			 return;
		}

		// Get Camera Projection Data
		PXCSession *session = utilPipeline.QuerySession();
		pxcUID pid;
		device->QueryPropertyAsUID(PXCCapture::Device::PROPERTY_PROJECTION_SERIALIZABLE,&pid);
		PXCSmartPtr<PXCProjection> projection;
		PXCMetadata *metadata = session->DynamicCast<PXCMetadata>();
		metadata->CreateSerializable<PXCProjection>( pid, &projection );
		if(!projection)
		{
			 printf("No valid projection data\n");
			 return;
		}

		pxcU32 npoints = (pxcU32) dimsRGB[1]*(pxcU32)dimsRGB[2];
		
		PXCPointF32 *posc = new PXCPointF32[npoints];
		PXCPointF32 *posd = new PXCPointF32[npoints];
				
		int i=0;
		for (int y=0;y< dimsRGB[2];y++)
		{
			for (int x=0;x< dimsRGB[1];x++)
			{
				posc[i].x=(pxcF32)x, 
				posc[i].y=(pxcF32)y;
				i++;
			}
		}

		projection->MapColorCoordinatesToDepth(npoints,posc,posd);

		//projection->Release();
	
		mwSize dimsM[3];
		dimsM[0]=2;
		dimsM[1]=dimsRGB[1];
		dimsM[2]=dimsRGB[2];
        plhs[1] = mxCreateNumericArray(3, dimsM, mxSINGLE_CLASS, mxREAL);
        float* Mout = (float*)mxGetData(plhs[1]);
        memcpy (Mout,posd,dimsM[0]*dimsM[1]*dimsM[2]*sizeof(float));  
        
        delete(posc);
        delete(posd);
    }
}
Exemplo n.º 7
0
////////////////////////////////////////////////////////////////////////////////
// protected member functions
////////////////////////////////////////////////////////////////////////////////
int IntelCamera::InitCamera(int device_num)
{
	// create a session
	pxcStatus sts = PXCSession_Create(&session);
	if (sts<PXC_STATUS_NO_ERROR) {
		fprintf(stderr,"Failed to create a session\n");
		return -1;
	}

	// create a video capture
	PXCSession::ImplDesc desc;
	memset(&desc, 0, sizeof(desc));
	desc.subgroup = PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE;

	PXCSmartPtr<PXCCapture> capture;
	sts = session->CreateImpl(&desc, PXCCapture::CUID, (void**)&capture);
	if (sts<PXC_STATUS_NO_ERROR) {
		fprintf(stderr,"Failed to create a capture\n");
		return -1;
	}

	// create a device
	PXCSmartPtr<PXCCapture::Device> device;
	sts = capture->CreateDevice(device_num, &device);
	if (sts<PXC_STATUS_NO_ERROR) {
		fprintf(stderr,"Failed to create a device\n");
		return -1;
	}

	// create color stream: 640x480
	sts = device->CreateStream(0, PXCCapture::VideoStream::CUID, (void**)&pColorStream);
	if(sts<PXC_STATUS_NO_ERROR){
		fprintf(stderr,"failed to create color stream\n");
		return -1;
	}
	PXCCapture::VideoStream::ProfileInfo pinfoColor;
	sts = pColorStream->QueryProfile(0, &pinfoColor);
	if(sts<PXC_STATUS_NO_ERROR){
		fprintf(stderr,"failed to get color stream profile\n");
		return -1;
	}

	// set for less error on depth image : because of sync color/depth
	pinfoColor.frameRateMax.denominator =  0;
	pinfoColor.frameRateMax.numerator   = 60;
	pinfoColor.frameRateMin.denominator =  0;
	pinfoColor.frameRateMin.numerator   = 60;

	sts = pColorStream->SetProfile(&pinfoColor);
	if(sts<PXC_STATUS_NO_ERROR){
		fprintf(stderr,"failed to get color stream profile\n");
		return -1;
	}

	// create depth streams: 320x240
	sts = device->CreateStream(1, PXCCapture::VideoStream::CUID, (void**)&pDepthStream);
	if(sts<PXC_STATUS_NO_ERROR){
		fprintf(stderr,"failed to create depth stream\n");
		return -1;
	}
	PXCCapture::VideoStream::ProfileInfo pinfoDepth;
	sts = pDepthStream->QueryProfile(0, &pinfoDepth);
	if(sts<PXC_STATUS_NO_ERROR){
		fprintf(stderr,"failed to get depth stream profile\n");
		return -1;
	}

	// set for less error on depth image
	pinfoDepth.frameRateMax.denominator =  1;
	pinfoDepth.frameRateMax.numerator   = 60;
	pinfoDepth.frameRateMin.denominator =  1;
	pinfoDepth.frameRateMin.numerator   = 60;

	sts = pDepthStream->SetProfile(&pinfoDepth);
	if(sts<PXC_STATUS_NO_ERROR){
		fprintf(stderr,"failed to get depth stream profile\n");
		return -1;
	}

	// read streams
	pColorStream->ReadStreamAsync(&images[0], &sps[0]);
	pDepthStream->ReadStreamAsync(&images[1], &sps[1]);

	// get constant values
	pxcUID prj_value;
	device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_SMOOTHING, 1); // depth smoothing
	device->QueryPropertyAsUID(PXCCapture::Device::PROPERTY_PROJECTION_SERIALIZABLE, &prj_value);
	device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, &no_confidence);
	device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE    , &depth_saturation);

	// get projection instance
	sts = session->DynamicCast<PXCMetadata>()->CreateSerializable<PXCProjection>(prj_value, &projection);
	if (sts<PXC_STATUS_NO_ERROR) {
		fprintf(stderr,"Failed to create a projection\n");
		return -1;
	}

	return 1;
}