コード例 #1
0
ファイル: kinectCapture.cpp プロジェクト: caomw/opencv-rgbd
void KinectCapture::GetColorFrame(IMultiSourceFrame* pMultiFrame)
{
	IColorFrameReference* pColorFrameReference = NULL;
	IColorFrame* pColorFrame = NULL;
	pMultiFrame->get_ColorFrameReference(&pColorFrameReference);
	HRESULT hr = pColorFrameReference->AcquireFrame(&pColorFrame);

	if (SUCCEEDED(hr))
	{
		if (pColorRGBX == NULL)
		{
			IFrameDescription* pFrameDescription = NULL;
			hr = pColorFrame->get_FrameDescription(&pFrameDescription);
			hr = pFrameDescription->get_Width(&nColorFrameWidth);
			hr = pFrameDescription->get_Height(&nColorFrameHeight);
			pColorRGBX = new RGB[nColorFrameWidth * nColorFrameHeight];
			SafeRelease(pFrameDescription);
		}

		UINT nBufferSize = nColorFrameWidth * nColorFrameHeight * sizeof(RGB);
		hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pColorRGBX), ColorImageFormat_Bgra);
	}

	SafeRelease(pColorFrame);
	SafeRelease(pColorFrameReference);
}
コード例 #2
0
ファイル: capture.cpp プロジェクト: hitsjt/Kinect-project
void idle()
{
	// Read color data
	IColorFrame* pCFrame = nullptr;
	if (pColorFrameReader->AcquireLatestFrame(&pCFrame) == S_OK)
	{
		pCFrame->CopyConvertedFrameDataToArray(uColorBufferSize, pColorBuffer, ColorImageFormat_Rgba);

		pCFrame->Release();
		pCFrame = nullptr;
	}

	// Read depth data
	IDepthFrame* pDFrame = nullptr;
	if (pDepthFrameReader->AcquireLatestFrame(&pDFrame) == S_OK)
	{
		pDFrame->CopyFrameDataToArray(uDepthPointNum, pDepthBuffer);

		pDFrame->Release();
		pDFrame = nullptr;

		// map to camera space
		pCoordinateMapper->MapColorFrameToCameraSpace(uDepthPointNum, pDepthBuffer, uColorPointNum, pCSPoints);
	}
}
コード例 #3
0
ファイル: glut.cpp プロジェクト: A2ARevolution/Tutorials
void getKinectData(GLubyte* dest) {
    IColorFrame* frame = NULL;
    if (SUCCEEDED(reader->AcquireLatestFrame(&frame))) {
        frame->CopyConvertedFrameDataToArray(width*height*4, data, ColorImageFormat_Bgra);
    }
    if (frame) frame->Release();
}
コード例 #4
0
//--------------------------------------------------------------
void ofApp::update(){
	// color
	IColorFrame* pColorFrame = nullptr;
	HRESULT hResult = pColorReader->AcquireLatestFrame(&pColorFrame);

	if (SUCCEEDED(hResult)) {
		hResult = pColorFrame->CopyConvertedFrameDataToArray(colorHeight * colorWidth * colorBytesPerPixels, colorImage.getPixels(), ColorImageFormat_Rgba);
		colorImage.update();
	}

	//body
	IBodyFrame *pBodyFrame = nullptr;
	hResult = pBodyReader -> AcquireLatestFrame(&pBodyFrame);

	if (SUCCEEDED(hResult)) {
		IBody *pBody[BODY_COUNT] = {0};
		hResult - pBodyFrame -> GetAndRefreshBodyData(BODY_COUNT, pBody);

		if (SUCCEEDED(hResult)) {

			jointList.clear();

			for (int count = 0; count < BODY_COUNT; count++) {

				JointState state;
				state.userNum = count;
				jointList.push_back(state);

				BOOLEAN bTracked = false;
				hResult = pBody [count] -> get_IsTracked(&bTracked);

				if (SUCCEEDED(hResult) && bTracked) {

					Joint joint[JointType::JointType_Count];
					hResult = pBody[count] -> GetJoints(JointType::JointType_Count, joint);

					for (int type = 0; type < JointType_Count; type++) {
						jointList.back().joint[type] = joint[type];
					}

				}
			}
		}

		for (int count = 0; count < BODY_COUNT; count++){
			SafeRelease(pBody[count]);
		}


	}

	//release
	SafeRelease(pColorFrame);
	SafeRelease(pBodyFrame);
}
コード例 #5
0
bool KinectInterface::getFrameData(IMultiSourceFrame* frame, cv::Mat& intensity_mat, cv::Mat& depth_mat, cv::Mat& pos_mat) {
	//Obtain depth frame
	IDepthFrame* depthframe = nullptr;
	if (FAILED(depthFrameReader->AcquireLatestFrame(&depthframe))) return false;
	if (!depthframe) return false;
	// Get data from frame
	unsigned int sz;
	unsigned short* buf;
	if (FAILED(depthframe->AccessUnderlyingBuffer(&sz, &buf))) return false;
	//get depth -> xyz mapping
	if (FAILED(mapper->MapDepthFrameToCameraSpace(width*height, buf, width*height, depth2xyz))) return false;
	//get depth -> rgb image mapping 
	if (FAILED(mapper->MapDepthFrameToColorSpace(width*height, buf, width*height, depth2rgb))) return false;
	//save depth
	if (FAILED(depthframe->CopyFrameDataToArray(height * width, depth_data)));

	if (depthframe) depthframe->Release();


	//Obtain RGB frame
	IColorFrame* colorframe;
	if (FAILED(colorFrameReader->AcquireLatestFrame(&colorframe))) return false;
	if (!colorframe) return false;

	// Get data from frame
	if (FAILED(colorframe->CopyConvertedFrameDataToArray(colorwidth*colorheight * 4, rgbimage, ColorImageFormat_Rgba))) return false;


	cv::Mat tmp_depth = cv::Mat::zeros(colorheight, colorwidth, CV_16UC1);
	cv::Mat tmp_pos = cv::Mat::zeros(colorheight, colorwidth, CV_32FC3);
	cv::Mat depth_org(height, width, CV_16UC1, depth_data);
	cv::Mat tmp_rgb(colorheight, colorwidth, CV_8UC4, rgbimage);

	// Write color array for vertices
	for (int i = 0; i < width*height; i++) {
		ColorSpacePoint p = depth2rgb[i];
		int iY = (int)(p.Y + 0.5);
		int iX = (int)(p.X + 0.5);
		if (iX >= 0 && iY >= 0 && iX < colorwidth && iY < colorheight) {
			// Check if color pixel coordinates are in bounds
			tmp_depth.at<unsigned short>(iY, iX) = depth_data[i];
			//tmp_pos.at<float>(iY, iX, 0) = depth2xyz[i].X;
			//tmp_pos.at<float>(iY, iX, 1) = depth2xyz[i].Y;
			//tmp_pos.at<float>(iY, iX, 2) = depth2xyz[i].Z;
		}
	}

	if (colorframe) colorframe->Release();

	cv::resize(tmp_rgb(cv::Rect(240, 0, 1440, 1080)), intensity_mat, cv::Size(640, 480));
	cv::resize(tmp_depth(cv::Rect(240, 0, 1440, 1080)), depth_mat, cv::Size(640, 480));
	cv::resize(tmp_pos(cv::Rect(240, 0, 1440, 1080)), pos_mat, cv::Size(640, 480));
	cv::cvtColor(intensity_mat, intensity_mat, CV_RGBA2GRAY);
	return true;
}
コード例 #6
0
ファイル: module_main.cpp プロジェクト: dalbeenet/OpenNUI_G3
bool ms_kinect2::acquire_color_frame(const _OPENNUI byte* dst)
{
    bool result = false;
    IColorFrame* pColorFrame = nullptr;
    static unsigned int bufferSize = 1920 * 1080 * 4 * sizeof(unsigned char);

    HRESULT hResult = S_OK;
    hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
    if (SUCCEEDED(hResult))
    {
        hResult = pColorFrame->CopyConvertedFrameDataToArray(bufferSize, (BYTE*)dst, ColorImageFormat::ColorImageFormat_Rgba);
        if (SUCCEEDED(hResult))
            result = true;
    }
    SafeRelease(pColorFrame);

#ifdef ENVIRONMENT64
    BYTE* dest = (BYTE*)dst;
    for (int i = 0; i < 1920 * 1080; ++i)
    {
        BYTE r = *(dest + (i * 4));
        BYTE b = *(dest + (i * 4) + 2);
        *(dest + (i * 4)) = b;
        *(dest + (i * 4) + 2) = r;
    }
#else
    void* source = (BYTE*)dst;
    __asm                             
    {
        mov ecx, 1920 * 1080    // Set Up A Counter (Dimensions Of Memory Block)
            mov ebx, source     // Points ebx To Our Data
label :
        mov al, [ebx + 0]
            mov ah, [ebx + 2]
            mov[ebx + 2], al
            mov[ebx + 0], ah
            add ebx, 4
            dec ecx             // Decreases Our Loop Counter
            jnz label           // If Not Zero Jump Back To Label
    }
#endif

    return result;
}
コード例 #7
0
void pcl::Kinect2Grabber::threadFunction()
{
	while (!quit){
		boost::unique_lock<boost::mutex> lock(mutex);

		// Acquire Latest Color Frame
		IColorFrame* colorFrame = nullptr;
		result = colorReader->AcquireLatestFrame(&colorFrame);
		if (SUCCEEDED(result)){
			// Retrieved Color Data
			result = colorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
			if (FAILED(result)){
				throw std::exception("Exception : IColorFrame::CopyConvertedFrameDataToArray()");
			}
		}
		SafeRelease(colorFrame);

		// Acquire Latest Depth Frame
		IDepthFrame* depthFrame = nullptr;
		result = depthReader->AcquireLatestFrame(&depthFrame);
		if (SUCCEEDED(result)){
			// Retrieved Depth Data
			result = depthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
			if (FAILED(result)){
				throw std::exception("Exception : IDepthFrame::CopyFrameDataToArray()");
			}
		}
		SafeRelease(depthFrame);

		lock.unlock();

		if (signal_PointXYZ->num_slots() > 0) {
			signal_PointXYZ->operator()(convertDepthToPointXYZ(&depthBuffer[0]));
		}

		if (signal_PointXYZRGB->num_slots() > 0) {
			signal_PointXYZRGB->operator()(convertRGBDepthToPointXYZRGB(&colorBuffer[0], &depthBuffer[0]));
		}
		if (signal_PointXYZI->num_slots() > 0) {
			signal_PointXYZI->operator()(convertRGBDepthToPointXYZI(&colorBuffer[0], &depthBuffer[0]));
		}
	}
}
コード例 #8
0
void MultiCursorAppCpp::getRgbImageV2()
{
	int width = 1920;
	int height = 1080;
	unsigned int bufferSize = width * height * 4 * sizeof(unsigned char);
	cv::Mat bufferMat(height, width, CV_8UC4);
	rgbImage = Mat(height / 2, width / 2, CV_8UC4);

	// Frame
	IColorFrame* pColorFrame = nullptr;
	HRESULT hResult = S_OK;
	hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
	if (SUCCEEDED(hResult)){
		hResult = pColorFrame->CopyConvertedFrameDataToArray(bufferSize, reinterpret_cast<BYTE*>(bufferMat.data), ColorImageFormat_Bgra);
		if (SUCCEEDED(hResult)){
			cv::resize(bufferMat, rgbImage, cv::Size(), 0.5, 0.5);
		}
	}
	SafeRelease(pColorFrame);
}
コード例 #9
0
void KinectHDFaceGrabber::update()
{
	if (!m_pColorFrameReader || !m_pBodyFrameReader){
		return;
	}

    IColorFrame* pColorFrame = nullptr;
    HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

	IDepthFrame* depthFrame = nullptr;
	if (SUCCEEDED(hr)){
		hr = m_pDepthFrameReader->AcquireLatestFrame(&depthFrame);
	}
	
    if (SUCCEEDED(hr)){
        ColorImageFormat imageFormat = ColorImageFormat_None;
		
        if (SUCCEEDED(hr)){
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }
	
        if (SUCCEEDED(hr)){
			UINT nBufferSize = m_colorWidth * m_colorHeight * sizeof(RGBQUAD);
			hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(m_colorBuffer.data()), ColorImageFormat_Bgra);
        }
		
		
		if (SUCCEEDED(hr)){
			hr = depthFrame->CopyFrameDataToArray(m_depthBuffer.size(), &m_depthBuffer[0]);
		}
		if (SUCCEEDED(hr)){
			renderColorFrameAndProcessFaces();
		}
				
	}
	
	SafeRelease(depthFrame);
	SafeRelease(pColorFrame);  
}
コード例 #10
0
ファイル: KinectV2.cpp プロジェクト: tokoik/projection
// カラーデータを取得する
GLuint KinectV2::getColor() const
{
  // カラーのテクスチャを指定する
  glBindTexture(GL_TEXTURE_2D, colorTexture);

  // 次のカラーのフレームデータが到着していれば
  IColorFrame *colorFrame;
  if (colorReader->AcquireLatestFrame(&colorFrame) == S_OK)
  {
    // カラーデータを取得して RGBA 形式に変換する
    colorFrame->CopyConvertedFrameDataToArray(colorCount * 4,
      static_cast<BYTE *>(color), ColorImageFormat::ColorImageFormat_Bgra);

    // カラーフレームを開放する
    colorFrame->Release();

    // カラーデータをテクスチャに転送する
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, colorWidth, colorHeight, GL_BGRA, GL_UNSIGNED_BYTE, color);
  }

  return colorTexture;
}
コード例 #11
0
ファイル: MKinect.cpp プロジェクト: martincrb/Face_tracking
void MKinect::getColorData(IMultiSourceFrame* frame, QImage& dest) {
	IColorFrame* colorframe;
	IColorFrameReference* frameref = NULL;
	frame->get_ColorFrameReference(&frameref);
	frameref->AcquireFrame(&colorframe);
	if (frameref) frameref->Release();
	if (!colorframe) return;

	// Process color frame data...
	colorframe->CopyConvertedFrameDataToArray(KinectColorWidth*KinectColorHeight * 4, data, ColorImageFormat_Bgra);
	QImage colorImage(data, KinectColorWidth, KinectColorHeight, QImage::Format_RGB32);
	//QImage depthImage(depthData.planes[0], width2, height2, QImage::Format_RGB32);
	dest = colorImage;
	//QDir dir("../tests/k2/last_test");
	//if (!dir.exists()) {
	//	dir.mkpath(".");
	//	colorImage.save("../tests/k2/last_test/image_" + QString::number(_actual_frame) + ".png", 0);
	//}
	//else {
	//	colorImage.save("../tests/k2/last_test/image_" + QString::number(_actual_frame) + ".png", 0);
	//}
	if (colorframe) colorframe->Release();
}
コード例 #12
0
ファイル: KinectColor.cpp プロジェクト: Kousuke-N/Mai-Kagami
void KinectColor::Update() {
	IColorFrame *pColorFrame = NULL;
	HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);
	if (SUCCEEDED(hr))
	{
		hr = pColorFrame->CopyConvertedFrameDataToArray(*bufferSize, reinterpret_cast<BYTE*>(bufferMat->data), ColorImageFormat::ColorImageFormat_Bgra);
		if (SUCCEEDED(hr))
		{
			cv::resize(*bufferMat, *colorMat, cv::Size(), (double)WIDTH / *width, (double)HEIGHT / *height);
			baseImage.GraphData = colorMat->data;
			baseImage.Width = colorMat->cols;
			baseImage.Height = colorMat->rows;
			baseImage.Pitch = colorMat->step;

			if (handle == -1) {
				handle = CreateGraphFromBaseImage(&baseImage);
			}
			else {
				ReCreateGraphFromBaseImage(&baseImage, handle);
			}
		}
	}
	SafeRelease(pColorFrame);
}
コード例 #13
0
ファイル: cvBody.cpp プロジェクト: hitsjt/Kinect-project
int main(int argc, char** argv) {
	// 1a. Get default Sensor
	std::cout << "Try to get default sensor" << std::endl;
	IKinectSensor* pSensor = nullptr;
	if (GetDefaultKinectSensor(&pSensor) != S_OK) {
		cerr << "Get Sensor failed" << std::endl;
		return -1;
	}

	// 1b. Open sensor
	std::cout << "Try to open sensor" << std::endl;
	if (pSensor->Open() != S_OK) {
		cerr << "Can't open sensor" << std::endl;
		return -1;
	}

	// 2. Color Related code
	IColorFrameReader* pColorFrameReader = nullptr;
	cv::Mat	mColorImg;
	UINT uBufferSize = 0;
	{
		// 2a. Get color frame source
		std::cout << "Try to get color source" << std::endl;
		IColorFrameSource* pFrameSource = nullptr;
		if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) {
			cerr << "Can't get color frame source" << std::endl;
			return -1;
		}

		// 2b. Get frame description
		std::cout << "get color frame description" << std::endl;
		int		iWidth = 0;
		int		iHeight = 0;
		IFrameDescription* pFrameDescription = nullptr;
		if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)	{
			pFrameDescription->get_Width(&iWidth);
			pFrameDescription->get_Height(&iHeight);
		}
		pFrameDescription->Release();
		pFrameDescription = nullptr;

		// 2c. get frame reader
		std::cout << "Try to get color frame reader" << std::endl;
		if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) {
			cerr << "Can't get color frame reader" << std::endl;
			return -1;
		}

		// 2d. release Frame source
		std::cout << "Release frame source" << std::endl;
		pFrameSource->Release();
		pFrameSource = nullptr;

		// Prepare OpenCV data
		mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4);
		uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE);
	}

	// 3. Body related code
	IBodyFrameReader* pBodyFrameReader = nullptr;
	IBody** aBodyData = nullptr;
	INT32 iBodyCount = 0;
	{
		// 3a. Get frame source
		std::cout << "Try to get body source" << std::endl;
		IBodyFrameSource* pFrameSource = nullptr;
		if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) {
			cerr << "Can't get body frame source" << std::endl;
			return -1;
		}

		// 3b. Get the number of body
		if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) {
			cerr << "Can't get body count" << std::endl;
			return -1;
		}
		std::cout << " > Can trace " << iBodyCount << " bodies" << std::endl;
		aBodyData = new IBody*[iBodyCount];
		for (int i = 0; i < iBodyCount; ++i)
			aBodyData[i] = nullptr;

		// 3c. get frame reader
		std::cout << "Try to get body frame reader" << std::endl;
		if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) {
			cerr << "Can't get body frame reader" << std::endl;
			return -1;
		}

		// 3d. release Frame source
		std::cout << "Release frame source" << std::endl;
		pFrameSource->Release();
		pFrameSource = nullptr;
	}

	// 4. get CoordinateMapper
	ICoordinateMapper* pCoordinateMapper = nullptr;
	if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) {
		std::cout << "Can't get coordinate mapper" << std::endl;
		return -1;
	}

	// Enter main loop
	cv::namedWindow("Body Image");

	// Debug:output the velocity of joints
	ofstream current_average_velocityTXT("current_average_velocity.txt");
	ofstream average_velocityTXT("average_velocity.txt");
	int frame_count = 0;
	int frame_count_for_standby = 0;
	float positionX0[25] = {0};
	float positionX1[25] = {0};
	float positionY0[25] = { 0 };
	float positionY1[25] = { 0 };
	float positionZ0[25] = { 0 };
	float positionZ1[25] = { 0 };

	float velocityX[25] = { 0 };
	float velocityY[25] = { 0 };
	float velocityZ[25] = { 0 };
	float current_velocity[25] = { 0 };
	float velocityee[8] = { 0 };
	float current_total_velocity = 0;
	float current_average_velocity = 0;
	float total_velocity = 0;
	float average_velocity = 0;

	while (true)
	{
		// 4a. Get last frame
		IColorFrame* pColorFrame = nullptr;
		if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK)	 {
			// 4c. Copy to OpenCV image
			if (pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra) != S_OK)	{
				cerr << "Data copy error" << endl;
			}
			// 4e. release frame
			pColorFrame->Release();
		}
		cv::Mat mImg = mColorImg.clone();
		// 4b. Get body data
		IBodyFrame* pBodyFrame = nullptr;	
		if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) {
			// 4b. get Body data
			if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) {
				// 4c. for each body
				for (int i = 0; i < iBodyCount; ++i) {
					IBody* pBody = aBodyData[i];
					// check if is tracked
					BOOLEAN bTracked = false;
					if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) {
						// get joint position
						Joint aJoints[JointType::JointType_Count];
						if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) {
							DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HipLeft], aJoints[JointType_KneeLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_KneeLeft], aJoints[JointType_AnkleLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_AnkleLeft], aJoints[JointType_FootLeft], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HipRight], aJoints[JointType_KneeRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_KneeRight], aJoints[JointType_AnkleRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_AnkleRight], aJoints[JointType_FootRight], pCoordinateMapper);
						}
						// Debug:print out the number of frame					
						std::cout << "frame " << ++frame_count << std::endl;
					
						for (int j = 1; j < 8; j++) {
							velocityee[j] = velocityee[j-1];
							total_velocity += velocityee[j];
						}
						average_velocity = total_velocity / 8.0;
						
						if (average_velocity <= 0.0015) {	
							// determine if the person is still 
							if (frame_count_for_standby == 0) {
								PlaySound(TEXT("Alarm02.wav"), NULL, SND_FILENAME);
								std::cout << "Start capturing points!" << std::endl;
							}
							// count the number of frame whose velocity is below the threshold
							frame_count_for_standby++;
							if (frame_count_for_standby >= 5) {
								frame_count_for_standby = 0;
							}
						} 
						// Debug:output the average velocity 
						average_velocityTXT << frame_count << " " << average_velocity << std::endl;
						total_velocity = 0;
						// Update the average velocity
						int available_joints = 0;
						for (int i = 0; i < 25; i++) {
							// X 
							positionX1[i] = positionX0[i];
							positionX0[i] = aJoints[i].Position.X;
							velocityX[i] = (positionX1[i] - positionX0[i]) * (positionX1[i] - positionX0[i]);
							// Y
							positionY1[i] = positionY0[i];
							positionY0[i] = aJoints[i].Position.Y;
							velocityY[i] = (positionY1[i] - positionY0[i]) * (positionY1[i] - positionY0[i]);
							// Z
							positionZ1[i] = positionZ0[i];
							positionZ0[i] = aJoints[i].Position.Z;
							velocityZ[i] = (positionZ1[i] - positionZ0[i]) * (positionZ1[i] - positionZ0[i]);
							current_velocity[i] = sqrtf(velocityX[i] + velocityY[i] + velocityZ[i]);
							// exclude the discrete velocity
							if (current_velocity[i] < 0.01) {
								current_total_velocity += current_velocity[i];
								available_joints++;
							}
						}
						// If no joint is available, save the velocity of last frame
						if (available_joints != 0) {
							current_average_velocity = current_total_velocity / available_joints;
						}
						velocityee[0] = current_average_velocity;
						// Debug:output the current average velocity 
						current_average_velocityTXT << frame_count << " " << current_average_velocity << std::endl;
											
						current_total_velocity = 0;					
					}
				}
			} else {
				cerr << "Can't read body data" << endl;
			}
			// 4e. release frame
			pBodyFrame->Release();
		}
		// show image
		cv::imshow("Body Image",mImg);
		// 4c. check keyboard input
		if (cv::waitKey(30) == VK_ESCAPE) {
			break;
		}
	}
	// 3. delete body data array
	delete[] aBodyData;
	// 3. release frame reader
	std::cout << "Release body frame reader" << std::endl;
	pBodyFrameReader->Release();
	pBodyFrameReader = nullptr;
	// 2. release color frame reader
	std::cout << "Release color frame reader" << std::endl;
	pColorFrameReader->Release();
	pColorFrameReader = nullptr;
	// 1c. Close Sensor
	std::cout << "close sensor" << std::endl;
	pSensor->Close();
	// 1d. Release Sensor
	std::cout << "Release sensor" << std::endl;
	pSensor->Release();
	pSensor = nullptr;

	return 0;
}
コード例 #14
0
ファイル: capture.cpp プロジェクト: hitsjt/Kinect-project
void capture_point()    // 抓人體
{
	int count_number = 0;
	
	#ifdef HUMANCOLORIMAGE
	// Read color data
	IColorFrame* pCFrame = nullptr;
	if (pColorFrameReader->AcquireLatestFrame(&pCFrame) == S_OK) {
		// 使用openCV擷取Kinect畫面
		cv::Mat	mImg(iColorHeight, iColorWidth, CV_8UC4);
		cv::Mat gray_image(iColorHeight, iColorWidth, CV_8UC4);
		cv::namedWindow("Color Image", 0);						// 創造彩色圖片顯示視窗
		if (pCFrame->CopyConvertedFrameDataToArray(uColorBufferSize, mImg.data, ColorImageFormat_Bgra) == S_OK)	{
			if ((int)pColorBuffer[0] != 0) {
				raw_color = mImg.clone();
				cv::cvtColor(mImg, gray_image, CV_RGB2GRAY);		// 彩色轉灰階
				cv::imshow("Color Image", raw_color);				// 顯示彩色畫面	

				# ifdef BACKGROUND
				cv::imwrite("background_color.bmp", mImg);		// 輸出背景的彩色畫面
				# endif

				cout << "Press any key on the image window to continue!" << endl;
				cv::waitKey();
			}
		}

		# ifdef CAPTURE
		absdiff();
		watershed();
		human_mask();
		# endif

		cv::destroyWindow("Color Image");
		pCFrame->Release();
		pCFrame = nullptr;
	}
	#endif

	for (int y = 0; y < iColorHeight; ++y) {
		for (int x = 0; x < iColorWidth; ++x) {
			int idx = x + y * iColorWidth;
			CameraSpacePoint& rPt = pCSPoints[idx];

			if (rPt.Z <= 0)
				rPt.X = rPt.Y = rPt.Z = 0;

			if (rPt.Z < 2.35 && rPt.Z != 0 && rPt.Z>1.1 && rPt.X<0.69 && rPt.X>-0.69 && rPt.Y>-0.88) {
				//raw_point << rPt.X * 1000 << " " << rPt.Z * 1000 << " " << rPt.Y * 1000 << endl;

				if (rPt.Y > -0.74 && idx % 75 == 0) 	{
					//for_icp << rPt.X * 1000 << " " << rPt.Z * 1000 << " " << rPt.Y * 1000 << endl;
				}
			}
			count_number++;
		}
	}
	
	//for_icp.close();
	//raw_point.close();

	//if (count_number>0)
		//system("pause");
}
コード例 #15
0
int main()
{

	// name and position windows
	cvNamedWindow("Color Probabilistic Tracking - Samples", 1);
	cvMoveWindow("Color Probabilistic Tracking - Samples", 0, 0);
	cvNamedWindow("Color Probabilistic Tracking - Result", 1);
	cvMoveWindow("Color Probabilistic Tracking - Result", 1000, 0);

	//control mouse
	setMouseCallback("Color Probabilistic Tracking - Samples", onMouse, 0);

	cv::setUseOptimized(true);

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor(&pSensor);
	if (FAILED(hResult)) {
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource(&pColorSource);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	IDepthFrameSource* pDepthSource;
	hResult = pSensor->get_DepthFrameSource(&pDepthSource);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
		return -1;
	}

	/*IBodyIndexFrameSource* pBodyIndexSource;
	hResult = pSensor->get_BodyIndexFrameSource(&pBodyIndexSource);*/

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader(&pColorReader);
	if (FAILED(hResult)) {
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	IDepthFrameReader* pDepthReader;
	hResult = pDepthSource->OpenReader(&pDepthReader);
	if (FAILED(hResult)) {
		std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	//IBodyIndexFrameReader* pBodyIndexReader;//saferealease
	//hResult = pBodyIndexSource->OpenReader(&pBodyIndexReader);

	// Description
	IFrameDescription* pColorDescription;
	hResult = pColorSource->get_FrameDescription(&pColorDescription);
	if (FAILED(hResult)) {
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int colorWidth = 0;
	int colorHeight = 0;
	pColorDescription->get_Width(&colorWidth); // 1920
	pColorDescription->get_Height(&colorHeight); // 1080
	unsigned int colorBufferSize = colorWidth * colorHeight * 4 * sizeof(unsigned char);

	cv::Mat colorBufferMat(colorHeight, colorWidth, CV_8UC4);
	cv::Mat colorMat(colorHeight / 2, colorWidth / 2, CV_8UC4);
	cv::namedWindow("Color");

	RGBQUAD* m_pDepthRGBX;
	m_pDepthRGBX = new RGBQUAD[512 * 424];// create heap storage for color pixel data in RGBX format

	IFrameDescription* pDepthDescription;
	hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
	if (FAILED(hResult)) {
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int depthWidth = 0;
	int depthHeight = 0;
	pDepthDescription->get_Width(&depthWidth); // 512
	pDepthDescription->get_Height(&depthHeight); // 424
	unsigned int depthBufferSize = depthWidth * depthHeight * sizeof(unsigned short);

	cv::Mat depthBufferMat(depthHeight, depthWidth, CV_16UC1);
	UINT16* pDepthBuffer = nullptr;
	cv::Mat depthMat(depthHeight, depthWidth, CV_8UC1);
	cv::namedWindow("Depth");

	//UINT32 nBodyIndexSize = 0;
	//BYTE* pIndexBuffer = nullptr;//This hasn't been safe realease yet

	// Coordinate Mapper
	ICoordinateMapper* pCoordinateMapper;
	hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
		return -1;
	}

	cv::Mat coordinateMapperMat(depthHeight, depthWidth, CV_8UC4);
	cv::namedWindow("CoordinateMapper");

	unsigned short minDepth, maxDepth;
	pDepthSource->get_DepthMinReliableDistance(&minDepth);
	pDepthSource->get_DepthMaxReliableDistance(&maxDepth);




	while (1) {

		double t = (double)getTickCount();


		// Color Frame
		IColorFrame* pColorFrame = nullptr;
		hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
		if (SUCCEEDED(hResult)) {
			hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBufferSize, reinterpret_cast<BYTE*>(colorBufferMat.data), ColorImageFormat::ColorImageFormat_Bgra);
			if (SUCCEEDED(hResult)) {
				cv::resize(colorBufferMat, colorMat, cv::Size(), 0.5, 0.5);
			}
		}
		//SafeRelease( pColorFrame );

		// Depth Frame
		IDepthFrame* pDepthFrame = nullptr;
		hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);

		if (SUCCEEDED(hResult)) {
			hResult = pDepthFrame->AccessUnderlyingBuffer(&depthBufferSize, reinterpret_cast<UINT16**>(&depthBufferMat.data));

		}

		if (SUCCEEDED(hResult)) {
			hResult = pDepthFrame->AccessUnderlyingBuffer(&depthBufferSize, &pDepthBuffer);

			if (SUCCEEDED(hResult))
			{
				RGBQUAD* pRGBX = m_pDepthRGBX;

				// end pixel is start + width*height - 1
				const UINT16* pBufferEnd = pDepthBuffer + (512 * 424);
				int index = 0;

				while (pDepthBuffer < pBufferEnd)
				{
					USHORT depth = *pDepthBuffer;

					BYTE intensity = static_cast<BYTE>((depth >= 50) && (depth <= 5000) ? (depth % 256) : 0);

					pRGBX->rgbRed = intensity;
					pRGBX->rgbGreen = intensity;
					pRGBX->rgbBlue = intensity;

					depthData[index] = depth;

					++index;
					++pRGBX;
					++pDepthBuffer;
				}
			}
		}

		Mat DepthImage(424, 512, CV_8UC4, m_pDepthRGBX);
		//SafeRelease( pDepthFrame );

		// Mapping (Depth to Color)
		if (SUCCEEDED(hResult)) {
			std::vector<ColorSpacePoint> colorSpacePoints(depthWidth * depthHeight);
			hResult = pCoordinateMapper->MapDepthFrameToColorSpace(depthWidth * depthHeight, reinterpret_cast<UINT16*>(depthBufferMat.data), depthWidth * depthHeight, &colorSpacePoints[0]);
			if (SUCCEEDED(hResult)) {
				coordinateMapperMat = cv::Scalar(0, 0, 0, 0);
				for (int y = 0; y < depthHeight; y++) {
					for (int x = 0; x < depthWidth; x++) {
						unsigned int index = y * depthWidth + x;
						ColorSpacePoint point = colorSpacePoints[index];
						int colorX = static_cast<int>(std::floor(point.X + 0.5));
						int colorY = static_cast<int>(std::floor(point.Y + 0.5));
						unsigned short depth = depthBufferMat.at<unsigned short>(y, x);
						if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight)/* && ( depth >= minDepth ) && ( depth <= maxDepth )*/) {
							coordinateMapperMat.at<cv::Vec4b>(y, x) = colorBufferMat.at<cv::Vec4b>(colorY, colorX);
						}
					}
				}
			}
		}

		if (SUCCEEDED(hResult))
		{

			//Particle Filter Start from here
			frame = &(IplImage)coordinateMapperMat;//transorm mat into IplImage

			if (image == 0)
			{
				// initialize variables and allocate buffers 
				image = cvCreateImage(cvGetSize(frame), 8, 4);//every pixel has 4 channels
				image->origin = frame->origin;

				result = cvCreateImage(cvGetSize(frame), 8, 4);
				result->origin = frame->origin;

				hsv = cvCreateImage(cvGetSize(frame), 8, 3);

				hue = cvCreateImage(cvGetSize(frame), 8, 1);

				sat = cvCreateImage(cvGetSize(frame), 8, 1);

				histimg_ref = cvCreateImage(cvGetSize(frame), 8, 3);
				histimg_ref->origin = frame->origin;
				cvZero(histimg_ref);

				histimg = cvCreateImage(cvGetSize(frame), 8, 3);
				histimg->origin = frame->origin;
				cvZero(histimg);

				bin_w = histimg_ref->width / BIN;
				bin_h = histimg_ref->height / BIN;


				data1.sample_t = reinterpret_cast<Region *> (malloc(sizeof(Region)* SAMPLE));
				data1.sample_t_1 = reinterpret_cast<Region *> (malloc(sizeof(Region)* SAMPLE));
				data1.sample_weight = reinterpret_cast<double *> (malloc(sizeof(double)* SAMPLE));
				data1.accum_weight = reinterpret_cast<double *> (malloc(sizeof(double)* SAMPLE));

			}

			cvCopy(frame, image);
			cvCopy(frame, result);
			cvCvtColor(image, hsv, CV_BGR2HSV);//image ~ hsv


			if (tracking)
			{
				//v_max = 0.0;

				cvSplit(hsv, hue, 0, 0, 0);//hsv->hue
				cvSplit(hsv, 0, 0, sat, 0);//hsv-saturation

				if (selecting)
				{

					// get the selected target area
					//ref_v_max = 0.0;
					area.width = abs(P_org.x - P_end.x);
					area.height = abs(P_org.y - P_end.y);
					area.x = MIN(P_org.x, P_end.x);
					area.y = MIN(P_org.y, P_end.y);

					cvZero(histimg_ref);

					// build reference histogram
					cvSetImageROI(hue, area);
					cvSetImageROI(sat, area);

					// zero reference histogram
					for (i = 0; i < BIN; i++)
						for (j = 0; j < BIN; j++)
							hist_ref[i][j] = 0.0;

					// calculate reference histogram
					for (i = 0; i < area.height; i++)
					{
						for (j = 0; j < area.width; j++)
						{
							im_hue = cvGet2D(hue, i, j);
							im_sat = cvGet2D(sat, i, j);
							k = int(im_hue.val[0] / STEP_HUE);
							h = int(im_sat.val[0] / STEP_SAT);
							hist_ref[k][h] = hist_ref[k][h] + 1.0;
						}
					}


					// rescale the value of each bin in the reference histogram 
					// and show it as an image
					for (i = 0; i < BIN; i++)
					{
						for (j = 0; j < BIN; j++)
						{
							hist_ref[i][j] = hist_ref[i][j] / (area.height*area.width);
						}
					}

					cvResetImageROI(hue);
					cvResetImageROI(sat);

					// initialize tracking and samples
					track_win = area;
					Initdata(track_win);
					track_win_last = track_win;

					// set up flag of tracking
					selecting = 0;

				}

				// sample propagation and weighting
				track_win = ImProcess(hue, sat, hist_ref, track_win_last);
				FrameNumber++;

				track_win_last = track_win;
				cvZero(histimg);

				// draw the one RED bounding box
				cvRectangle(image, cvPoint(track_win.x, track_win.y), cvPoint(track_win.x + track_win.width, track_win.y + track_win.height), CV_RGB(255, 0, 0), 2);
				printf("width = %d, height = %d\n", track_win.width, track_win.height);

				//save certian images
				if (FrameNumber % 10 == 0)
				{
					if (FrameNumber / 10 == 1) cvSaveImage("./imageout1.jpg", image);

					if (FrameNumber / 10 == 2) cvSaveImage("./imageout2.jpg", image);

					if (FrameNumber / 10 == 3) cvSaveImage("./imageout3.jpg", image);

					if (FrameNumber / 10 == 4) cvSaveImage("./imageout4.jpg", image);

					if (FrameNumber / 10 == 5) cvSaveImage("./imageout5.jpg", image);

					if (FrameNumber / 10 == 6) cvSaveImage("./imageout6.jpg", image);

					if (FrameNumber / 10 == 7) cvSaveImage("./imageout7.jpg", image);

					if (FrameNumber / 10 == 8) cvSaveImage("./imageout8.jpg", image);
				}

				//save certian images
				if (FrameNumber % 10 == 0)
				{
					if (FrameNumber / 10 == 1) cvSaveImage("./resultout1.jpg", result);

					if (FrameNumber / 10 == 2) cvSaveImage("./resultout2.jpg", result);

					if (FrameNumber / 10 == 3) cvSaveImage("./resultout3.jpg", result);

					if (FrameNumber / 10 == 4) cvSaveImage("./resultout4.jpg", result);

					if (FrameNumber / 10 == 5) cvSaveImage("./resultout5.jpg", result);

					if (FrameNumber / 10 == 6) cvSaveImage("./resultout6.jpg", result);

					if (FrameNumber / 10 == 7) cvSaveImage("./resultout7.jpg", result);

					if (FrameNumber / 10 == 8) cvSaveImage("./resultout8.jpg", result);
				}

				//draw a same bounding box in DepthImage
				rectangle(DepthImage, track_win, CV_RGB(255, 0, 0), 2);

				//******************************************************Geodesic Distance***************************************************************************************
					//Point propagation and weight
					if (PointTrack == 1)
					{

						if (PointSelect == 1)//only visit once
						{

							// initialize tracking and samples
							for (int i = 0; i < SAMPLE; i++)
							{
								point[i].x_1 = P_track.x;
								point[i].y_1 = P_track.y;
								point[i].z_1 = depthData[P_track.x + P_track.y * 512];
								point[i].x_1_prime = 0.0;
								point[i].y_1_prime = 0.0;
							}
							
							refeFlag = 1;

							p_win = P_track;

							//p_transtart is the start point of the surface mesh
							P_transtart.x = track_win.x;
							P_transtart.y = track_win.y;

							PointSelect = 0;

						}

						//construct the graph(mesh)
						ConstructMesh(depthData, adjlist, P_transtart,track_win.width,track_win.height);

						//calculate shortest path
						vector<int> vertexDist;
						vertexDist.resize(track_win.width*track_win.height);

						ShortestPath(P_extre, adjlist,  vertexDist);

						cvCircle(image, P_extre, 3, CV_RGB(0, 255, 0),1);

						//generate the refernce distance for comparing
						if (refeFlag > 0)
						{
							cvCircle(image, p_win, 3, CV_RGB(0, 0, 255), 1);

							int track = abs(P_transtart.x - P_track.x) + track_win.width * abs(P_transtart.y - P_track.y);

							referDistance = vertexDist[track];

							refeFlag = 0;
						}
						
						//samples propagation
						PredictPoint(p_win);

						//get geodesic distance for each sample. 
						//find the sample which have most similar distance to the refernce distance
						float Dist, AbsDist, WinDist, minAbsDist = 10000;
						int number,sum=0,count=0;

						for (int i = 0; i < SAMPLE; i++)
						{
							int t = abs(P_transtart.x - point[i].x) + track_win.width * abs(P_transtart.y - point[i].y);
							if (adjlist[t].v == false) { count++; continue; }

							int refer = abs(point[i].x - P_transtart.x) + track_win.width * abs(point[i].y - P_transtart.y);
							Dist = vertexDist[refer];

							AbsDist = fabs(referDistance - Dist);

							//point[i].SampleWeight = AbsDist;
							//point[i].AccumWeight = sum;
							//sum = sum + AbsDist;

							if (AbsDist < minAbsDist) { AbsDist = Dist; number = i; WinDist = Dist; }

						}

						//for (int i = 0; i < SAMPLE; i++)
						//{
						//	point[i].SampleWeight = point[i].SampleWeight / sum;
						//	point[i].AccumWeight = point[i].AccumWeight / sum;
						//}

						printf("referDist = %f, winDist = %f, discardPoints = %d\n", referDistance, WinDist,count);
						
						p_win_last = p_win;
						p_win.x = point[number].x;
						p_win.y = point[number].y;

						//samples re-location
						float deltaX = p_win.x - p_win_last.x;
						float deltaY = p_win.y - p_win_last.y;

						UpdatePoint(number, deltaX, deltaY);

						cvCircle(image, p_win, 5, CV_RGB(0, 0, 0));
					}



				//	//****************************************************************************************************************************************
			}

			// if still selecting a target, show the RED selected area
			else cvRectangle(image, P_org, P_end, CV_RGB(255, 0, 0), 1);

		}

		imshow("Depth", DepthImage);
		cvShowImage("Color Probabilistic Tracking - Samples", image);
		cvShowImage("Color Probabilistic Tracking - Result", result);

		SafeRelease(pColorFrame);
		SafeRelease(pDepthFrame);
		//SafeRelease(pBodyIndexFrame);

		cv::imshow("Color", colorMat);
		cv::imshow("Depth", DepthImage);
		cv::imshow("CoordinateMapper", coordinateMapperMat);

		//END OF THE TIME POINT
		t = ((double)getTickCount() - t) / getTickFrequency();
		t = 1 / t;

		//cout << "FPS:" << t << "FrameNumber\n" << FrameNumebr<< endl;
		printf("FPS:%f Frame:%d \n\n", t, FrameNumber);


		if (cv::waitKey(30) == VK_ESCAPE) {
			break;
		}
	}

	SafeRelease(pColorSource);
	SafeRelease(pDepthSource);
	//SafeRelease(pBodyIndexSource);

	SafeRelease(pColorReader);
	SafeRelease(pDepthReader);
	//SafeRelease(pBodyIndexReader);

	SafeRelease(pColorDescription);
	SafeRelease(pDepthDescription);
	SafeRelease(pCoordinateMapper);
	if (pSensor) {
		pSensor->Close();
	}
	SafeRelease(pSensor);
	cv::destroyAllWindows();


	cvReleaseImage(&image);
	cvReleaseImage(&result);
	cvReleaseImage(&histimg_ref);
	cvReleaseImage(&histimg);
	cvReleaseImage(&hsv);
	cvReleaseImage(&hue);
	cvReleaseImage(&sat);
	cvDestroyWindow("Color Probabilistic Tracking - Samples");
	cvDestroyWindow("Color Probabilistic Tracking - Result");

	return 0;
}
コード例 #16
0
ファイル: CameraKinect.cpp プロジェクト: marekkopicki/Golem
	void capture(Image::Ptr& pImage)
	{
		HRESULT hr;
		
		if (m_pMultiSourceFrameReader==nullptr)
		{
			camera->getContext().error("CameraKinectDevice::capture: m_pMultiSourceFrameReader is nullptr\n");
			// this is bad news - perhaps throw?
			return; // @@@
		}

		IMultiSourceFrame* pMultiSourceFrame = nullptr;
		IDepthFrame* pDepthFrame = nullptr;
		IColorFrame* pColorFrame = nullptr;

		const golem::MSecTmU32 waitStep = 1;
		golem::MSecTmU32 timeWaited = 0;
		golem::Sleep timer;
		while (FAILED(hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame)))
		{
			// this is in CameraOpenNI, but suspect may be causing problem here
			// if (camera->isTerminating())	return;

			timer.msleep(waitStep);
			timeWaited += waitStep;
			if (timeWaited >= timeout)
			{
				camera->getContext().error("CameraKinectDevice::capture: failed to acquire frame within %d ms\n", timeout);
				// keep going - don't return with nothing; reset stopwatch @@@
				timeWaited = 0;
			}
		}
		
		const golem::SecTmReal systemTime1 = camera->getContext().getTimer().elapsed();

		if (SUCCEEDED(hr))
		{
			IDepthFrameReference* pDepthFrameReference = nullptr;

			hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
			if (SUCCEEDED(hr))
			{
				hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
			}
			RELEASE_PTR(pDepthFrameReference);
		}

		if (SUCCEEDED(hr))
		{
			IColorFrameReference* pColorFrameReference = nullptr;

			hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
			if (SUCCEEDED(hr))
			{
				hr = pColorFrameReference->AcquireFrame(&pColorFrame);
			}
			RELEASE_PTR(pColorFrameReference);
		}

		if (SUCCEEDED(hr))
		{
			INT64 nDepthTime = 0;
			IFrameDescription* pDepthFrameDescription = nullptr;
			int nDepthWidth = 0;
			int nDepthHeight = 0;
			UINT nDepthBufferSize = 0;
			UINT16 *pDepthBuffer = nullptr;

			IFrameDescription* pColorFrameDescription = nullptr;
			int nColorWidth = 0;
			int nColorHeight = 0;
			ColorImageFormat imageFormat = ColorImageFormat_None;
			UINT nColorBufferSize = 0;
			RGBQUAD *pColorBuffer = nullptr;

			// get depth frame data

			hr = pDepthFrame->get_RelativeTime(&nDepthTime);

			if (SUCCEEDED(hr))
				hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);

			if (SUCCEEDED(hr))
				hr = pDepthFrameDescription->get_Width(&nDepthWidth);

			if (SUCCEEDED(hr))
				hr = pDepthFrameDescription->get_Height(&nDepthHeight);

			if (SUCCEEDED(hr))
				hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);

			// get color frame data

			if (SUCCEEDED(hr))
				hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);

			if (SUCCEEDED(hr))
				hr = pColorFrameDescription->get_Width(&nColorWidth);

			if (SUCCEEDED(hr))
				hr = pColorFrameDescription->get_Height(&nColorHeight);

			if (SUCCEEDED(hr))
				hr = pColorFrame->get_RawColorImageFormat(&imageFormat);

			if (SUCCEEDED(hr))
			{
				if (imageFormat == ColorImageFormat_Bgra)
				{
					hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
				}
				else if (m_pColorRGBX)
				{
					pColorBuffer = m_pColorRGBX;
					nColorBufferSize = nColorWidth * nColorHeight * sizeof(RGBQUAD);
					hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
				}
				else
				{
					hr = E_FAIL;
				}
			}
			
			cv::Mat colorFrame;
			colorFrame.create(cColorHeight, cColorWidth, CV_8UC4);
			colorFrame.setTo(cv::Scalar(0, 0, 0, 0));
			//copying color buffer into cv::mat		
			memcpy(colorFrame.data, pColorBuffer, (nColorWidth * nColorHeight) * 4);
			//originally kinect has mirrored image
			cv::flip(colorFrame, colorFrame, 1);
			//converstion to pImage which is BGR 
			cv::Mat colorFrameT;
			cv::cvtColor(colorFrame, colorFrameT, CV_BGRA2BGR);
			
			if (mode & CameraKinect::MODE_COLOUR)
			{
				// initialize buffer
				if (pImage == nullptr)
					pImage.reset(new Image());
				pImage->reserve((int)1920, (int)1080);
				//printf("Before clonning frame\n");
				IplImage * img = cvCloneImage(&(IplImage)colorFrameT);
				cvCopy(img, pImage->image);
				cvReleaseImage(&img);
				//printf("After clonning frame\n");
				const golem::SecTmReal systemTime2 = camera->getContext().getTimer().elapsed();
				//seting timestamp
				pImage->timeStamp = REAL_HALF*(systemTime1 + systemTime2);
			}
			//camera->getContext().debug("After colour\n");
			//needed for retain information of the begining of the buffer
			const UINT16* pBufferStart = pDepthBuffer;
			const UINT16* pBufferEnd = pDepthBuffer + (nDepthWidth * nDepthHeight);
			//copying depth buffer into cv::Mat
			long depthSizeP = (pBufferEnd - pBufferStart) * 2;

			cv::Mat depthFrame;
			depthFrame.create(cDepthHeight, cDepthWidth, CV_16UC1);
			depthFrame.setTo(cv::Scalar(0));
			memcpy(depthFrame.data, pBufferStart, depthSizeP);
			//originally kinect has mirrored image
			cv::flip(depthFrame, depthFrame, 1);
			//camera->getContext().debug("After getting depth data\n");
			//depth mode color data mapped into camera space (remember high resolution 1920x1080)
			if (mode & CameraKinect::MODE_DEPTH && (std::stoi(this->property.format.c_str()) == 101))
			{
				//camera->getContext().debug("In depth mode\n");
				if (pImage == nullptr)
					pImage.reset(new Image());
				pImage->reserve((int)1920, (int)1080);

				pImage->cloud.reset(new PointSeq((uint32_t)property.width, (uint32_t)property.height));
			
				//Obtaining the pointcloud
				hasImageStream = false;
				float missingPoint = std::numeric_limits<float>::quiet_NaN();

				hr = m_pCoordinateMapper->MapColorFrameToCameraSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer, cColorHeight * cColorWidth, m_pCameraCoordinatesColor);

				golem::Point* pOutWidth = &pImage->cloud->front();
				CameraSpacePoint* pCamCWidth = m_pCameraCoordinatesColor;

				for (int y = 0; y < cColorHeight; ++y)
				{
					golem::Point* pOut = pOutWidth;
					CameraSpacePoint* pCamC = pCamCWidth;

					for (int x = 0; x < cColorWidth; ++x, pCamC++, ++pOut) {
						cv::Vec4b color;
						int abc = pCamC->Z;
						if (abc != 0) {

							pOut->x = pCamC->X;
							pOut->y = pCamC->Y;
							pOut->z = pCamC->Z;
						}
						else {
							pOut->x = pOut->y = pOut->z = missingPoint;
							//printf("Getting to this point4\n");
						}
						pOut->normal_x = pOut->normal_y = pOut->normal_z = golem::numeric_const<float>::ZERO;

						color = colorFrame.at<cv::Vec4b>(y, x);
						if (hasImageStream) {
							pOut->b = color[0];
							pOut->g = color[1];
							pOut->r = color[2];
							pOut->a = colour._rgba.a;
						}
						else {
							pOut->r = colour._rgba.r;
							pOut->g = colour._rgba.g;
							pOut->b = colour._rgba.b;
							pOut->a = colour._rgba.a;
						}
					}
					colorFrame += cColorWidth;
					pCamCWidth += cColorWidth;
					pOutWidth += cColorWidth;
				}
			}

			//depth mode depth data mapped into camera space
			if (mode & CameraKinect::MODE_DEPTH && (std::stoi(this->property.format.c_str()) == 102))
			{
				if (pImage == nullptr)
					pImage.reset(new Image());
				//camera->getContext().debug("In depth mode\n");
				pImage->cloud.reset(new PointSeq((uint32_t)property.width, (uint32_t)property.height));
				//Obtaining the pointcloud
				hasImageStream = false;
				float missingPoint = std::numeric_limits<float>::quiet_NaN();

				hr = m_pCoordinateMapper->MapDepthFrameToCameraSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer, cDepthHeight * cDepthWidth, m_pCameraCoordinatesDepth);

				golem::Point* pOutWidth = &pImage->cloud->front();
				CameraSpacePoint* pCamDWidth = m_pCameraCoordinatesDepth;

				for (int y = 0; y < cDepthHeight; ++y)
				{
					golem::Point* pOut = pOutWidth;
					CameraSpacePoint* pCamC = pCamDWidth;

					for (int x = 0; x < cDepthWidth; ++x, pCamC++, ++pOut) {
						cv::Vec4b color;
						//int abc = pCamC->Z;
						if (pCamC->Z != 0) {
							pOut->x = pCamC->X;
							pOut->y = pCamC->Y;
							pOut->z = pCamC->Z;
						}
						else {
							pOut->x = missingPoint;
							pOut->y = missingPoint;
							pOut->z = missingPoint;
							//	//printf("Getting to this point4\n");
						}
						pOut->normal_x = pOut->normal_y = pOut->normal_z = golem::numeric_const<float>::ZERO;

						/*color = colorframe.at<cv::vec4b>(y, x);
						if (hasimagestream) {
						pout->b = color[0];
						pout->g = color[1];
						pout->r = color[2];
						pout->a = colour._rgba.a;
						}*/
						//else {
						pOut->r = colour._rgba.r;
						pOut->g = colour._rgba.g;
						pOut->b = colour._rgba.b;
						pOut->a = colour._rgba.a;
						//}
					}
					//colorFrame += cDepthWidth;
					pCamDWidth += cDepthWidth;
					pOutWidth += cDepthWidth;
				}
				golem::Mat33 aMatrix;
				aMatrix.m11 = golem::Real(-1);
				aMatrix.m12 = golem::Real(0);
				aMatrix.m13 = golem::Real(0);
				aMatrix.m21 = golem::Real(0);
				aMatrix.m22 = golem::Real(-1);
				aMatrix.m23 = golem::Real(0);
				aMatrix.m31 = golem::Real(0);
				aMatrix.m32 = golem::Real(0);
				aMatrix.m33 = golem::Real(1);
				golem::Vec3 aVector;
				aVector.v1 = 0;
				aVector.v2 = 0;
				aVector.v3 = 0;
				golem::Mat34 aMatrix34;
				aMatrix34.R = aMatrix;
				aMatrix34.p = aVector;

				Cloud::PointSeqPtr pCloud = pImage->cloud;
				const Mat34 frame = Cloud::getSensorFrame(*pCloud);
				Cloud::transform(aMatrix34, *pCloud, *pCloud);
				Cloud::setSensorFrame(frame, *pCloud); // don't change the camera frame !!!!!!!!!!!!!!!!!!!!!!!!!!!!

				const golem::SecTmReal systemTime2 = camera->getContext().getTimer().elapsed();
				//seting timestamp
				pImage->timeStamp = REAL_HALF*(systemTime1 + systemTime2);
			}
			RELEASE_PTR(pDepthFrameDescription);
			RELEASE_PTR(pColorFrameDescription);
		}
		RELEASE_PTR(pDepthFrame);
		RELEASE_PTR(pColorFrame);
		RELEASE_PTR(pMultiSourceFrame);
	}
コード例 #17
0
ファイル: Event.cpp プロジェクト: AMBITIONBIN/Kinect2Sample
int _tmain( int argc, _TCHAR* argv[] )
{
	cv::setUseOptimized( true );

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor( &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource( &pColorSource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader( &pColorReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pColorSource->get_FrameDescription( &pDescription );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width( &width ); // 1920
	pDescription->get_Height( &height ); // 1080
	unsigned int bufferSize = width * height * 4 * sizeof( unsigned char );

	cv::Mat bufferMat( height, width, CV_8UC4 );
	cv::Mat colorMat( height / 2, width / 2, CV_8UC4 );
	cv::namedWindow( "Color" );

	// Subscribe Handle
	WAITABLE_HANDLE hColorWaitable;
	hResult = pColorReader->SubscribeFrameArrived( &hColorWaitable );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameReader::SubscribeFrameArrived()" << std::endl;
		return -1;
	}

	while( 1 ){
		// Waitable Events
		HANDLE hEvents[ ] = { reinterpret_cast<HANDLE>( hColorWaitable ) };
		WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE );

		// Arrived Data
		IColorFrameArrivedEventArgs* pColorArgs = nullptr;
		hResult = pColorReader->GetFrameArrivedEventData( hColorWaitable, &pColorArgs );
		if( SUCCEEDED( hResult ) ){
			// Reference
			IColorFrameReference* pColorReference = nullptr;
			hResult = pColorArgs->get_FrameReference( &pColorReference );
			if( SUCCEEDED( hResult ) ){
				// Frame
				IColorFrame* pColorFrame = nullptr;
				hResult = pColorReference->AcquireFrame( &pColorFrame );
				if( SUCCEEDED( hResult ) ){
					hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
					if( SUCCEEDED( hResult ) ){
						cv::resize( bufferMat, colorMat, cv::Size(), 0.5, 0.5 );
					}
				}
				SafeRelease( pColorFrame );
			}
			SafeRelease( pColorReference );
		}
		SafeRelease( pColorArgs );

		cv::imshow( "Color", colorMat );

		if( cv::waitKey( 30 ) == VK_ESCAPE ){
			break;
		}
	}

	SafeRelease( pColorSource );
	SafeRelease( pColorReader, hColorWaitable );
	SafeRelease( pDescription );
	if( pSensor ){
		pSensor->Close();
	}
	SafeRelease( pSensor );
	cv::destroyAllWindows();

	return 0;
}
コード例 #18
0
ファイル: KinectManager.cpp プロジェクト: NPatch/3DCameraDX11
	void KinectManager::Update(void)
	{
		if (!m_pColorFrameReader)
		{
			return;
		}

		IColorFrame* pColorFrame = NULL;

		HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

		if (SUCCEEDED(hr))
		{
			INT64 nTime = 0;
			IFrameDescription* pFrameDescription = NULL;
			int nWidth = 0;
			int nHeight = 0;
			ColorImageFormat imageFormat = ColorImageFormat_None;

			hr = pColorFrame->get_RelativeTime(&nTime);

			if (SUCCEEDED(hr))
			{
				hr = pColorFrame->get_FrameDescription(&pFrameDescription);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_Width(&nWidth);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_Height(&nHeight);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_BytesPerPixel(&color_frame_bytes_per_pixel);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_LengthInPixels(&color_frame_length_in_pixels);
			}

			if (SUCCEEDED(hr))
			{
				hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
			}


			if (SUCCEEDED(hr))
			{



				if (imageFormat == ColorImageFormat_Rgba)
				{
					UINT nBufferSize = KINECT_STREAM_COLOR_WIDTH * KINECT_STREAM_COLOR_HEIGHT * color_frame_bytes_per_pixel;

					hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&m_pColorRGBX));
				}
				else if (m_pColorRGBX)
				{
					IFrameDescription* pRGBFrameDescription = NULL;
					pColorFrame->CreateFrameDescription(ColorImageFormat_Rgba, &pRGBFrameDescription);
					unsigned int clpp = 0;
					pRGBFrameDescription->get_BytesPerPixel(&clpp);
					unsigned int clip = 0;
					pRGBFrameDescription->get_LengthInPixels(&clip);
					color_frame_bytes_per_pixel = clpp;
					color_frame_length_in_pixels = clip;
					UINT nBufferSize = clip * clpp;
					hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, (reinterpret_cast<BYTE*>(&m_pColorRGBX)), ColorImageFormat_Rgba);
				}
				else
				{
					hr = E_FAIL;
				}
			}

			SafeRelease(pFrameDescription);
		}

		SafeRelease(pColorFrame);
	}
コード例 #19
0
ファイル: KinectDevice.cpp プロジェクト: AliShug/Kinecting
void KinectDevice::listen() {
    if (_listening) throw std::exception("Already listening for new frames");

    _listening = true;
    while (_listening) {
        int idx = WaitForSingleObject((HANDLE) _frameEvent, 100);
        switch (idx) {
        case WAIT_TIMEOUT:
            std::cout << ".";
            continue;
        case WAIT_OBJECT_0:
            IMultiSourceFrameArrivedEventArgs *frameArgs = nullptr;
            IMultiSourceFrameReference *frameRef = nullptr;
            HRESULT hr = _reader->GetMultiSourceFrameArrivedEventData(_frameEvent, &frameArgs);

            if (hr == S_OK) {
                hr = frameArgs->get_FrameReference(&frameRef);
                frameArgs->Release();
            }

            if (hr == S_OK) {
                //if (_lastFrame) _lastFrame->Release();
                hr = frameRef->AcquireFrame(&_lastFrame);
                frameRef->Release();
            }

            if (hr == S_OK) {
                // Store frame data
                //std::cout << "Got a frame YEAH" << std::endl;

                IDepthFrameReference                *depthRef   = nullptr;
                IColorFrameReference                *colorRef   = nullptr;
                IInfraredFrameReference             *irRef      = nullptr;
                ILongExposureInfraredFrameReference *hdirRef    = nullptr;
                IBodyIndexFrameReference            *indexRef   = nullptr;

                IDepthFrame                         *depth      = nullptr;
                IColorFrame                         *color      = nullptr;
                IInfraredFrame                      *ir         = nullptr;
                ILongExposureInfraredFrame          *hdir       = nullptr;
                IBodyIndexFrame                     *index      = nullptr;

                size_t size;
                uint16_t *buff;
                BYTE *cbuff;

                frameLock.lock();
                if (_streams & Streams::DEPTH_STREAM) {
                    _lastFrame->get_DepthFrameReference(&depthRef);
                    depthRef->AcquireFrame(&depth);
                    
                    if (depth) {
                        depthSwap();
                        depth->AccessUnderlyingBuffer(&size, &buff);
                        memcpy(depthData.get(), buff, size * sizeof(uint16_t));
                        depth->Release();
                    }
                    
                    depthRef->Release();
                }
                if (_streams & Streams::COLOR_STREAM) {
                    _lastFrame->get_ColorFrameReference(&colorRef);
                    colorRef->AcquireFrame(&color);
                    //color->AccessUnderlyingBuffer(&size, &buff);
                    //memcpy(_colorData.get(), buff, size);
                    color->Release();
                    colorRef->Release();
                }
                if (_streams & Streams::IR_STREAM) {
                    _lastFrame->get_InfraredFrameReference(&irRef);
                    irRef->AcquireFrame(&ir);
                    ir->AccessUnderlyingBuffer(&size, &buff);
                    memcpy(irData.get(), buff, size);
                    ir->Release();
                    irRef->Release();
                }
                if (_streams & Streams::HDIR_STREAM) {
                    _lastFrame->get_LongExposureInfraredFrameReference(&hdirRef);
                    hdirRef->AcquireFrame(&hdir);
                    hdir->AccessUnderlyingBuffer(&size, &buff);
                    memcpy(hdirData.get(), buff, size);
                    hdir->Release();
                    hdirRef->Release();
                }
                if (_streams & Streams::INDEX_STREAM) {
                    _lastFrame->get_BodyIndexFrameReference(&indexRef); 
                    indexRef->AcquireFrame(&index);
                    index->AccessUnderlyingBuffer(&size, &cbuff);
                    memcpy(indexData.get(), cbuff, size);
                    index->Release();
                    indexRef->Release();
                }

                frameLock.unlock();

                _lastFrame->Release();
            }
        }
    }
}
コード例 #20
0
  bool KinectV2Controller::UpdateCamera()
  {
    if(InitializeMultiFrameReader())
    {

      IMultiSourceFrame* pMultiSourceFrame = NULL;
      IDepthFrame* pDepthFrame = NULL;
      IColorFrame* pColorFrame = NULL;
      IInfraredFrame* pInfraRedFrame = NULL;

      HRESULT hr = -1;

      static DWORD lastTime = 0;

      DWORD currentTime = GetTickCount();

      //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS.
      if( unsigned int(currentTime - lastTime) > 33 )
      {
        hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
        lastTime = currentTime;
      }

      if (SUCCEEDED(hr))
      {
        IDepthFrameReference* pDepthFrameReference = NULL;

        hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
        if (SUCCEEDED(hr))
        {
          hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
        }

        SafeRelease(pDepthFrameReference);
      }

      if (SUCCEEDED(hr))
      {
        IColorFrameReference* pColorFrameReference = NULL;

        hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
        if (SUCCEEDED(hr))
        {
          hr = pColorFrameReference->AcquireFrame(&pColorFrame);
        }

        SafeRelease(pColorFrameReference);
      }

      if (SUCCEEDED(hr))
      {
        IInfraredFrameReference* pInfraredFrameReference = NULL;

        hr = pMultiSourceFrame->get_InfraredFrameReference(&pInfraredFrameReference);
        if (SUCCEEDED(hr))
        {
          hr = pInfraredFrameReference->AcquireFrame(&pInfraRedFrame);
        }

        SafeRelease(pInfraredFrameReference);
      }

      if (SUCCEEDED(hr))
      {
        UINT nDepthBufferSize = 0;
        UINT16 *pDepthBuffer = NULL;
        UINT16 *pIntraRedBuffer = NULL;

        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nColorBufferSize = 0;
        RGBQUAD *pColorBuffer = NULL;

        if (SUCCEEDED(hr))
        {
          hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
        }
        if (SUCCEEDED(hr))
        {
          hr = pInfraRedFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pIntraRedBuffer);
        }
        if (SUCCEEDED(hr))
        {
          for(int i = 0; i < d->m_DepthCaptureHeight*d->m_DepthCaptureWidth; ++i)
          {
            d->m_Distances[i] = static_cast<float>(*pDepthBuffer);
            d->m_Amplitudes[i] = static_cast<float>(*pIntraRedBuffer);
            ++pDepthBuffer;
            ++pIntraRedBuffer;
          }
        }
        else
        {
          MITK_ERROR << "AccessUnderlyingBuffer";
        }

        // get color frame data
        if (SUCCEEDED(hr))
        {
          hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
          if (imageFormat == ColorImageFormat_Bgra)
          {
            hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
          }
          else if (d->m_pColorRGBX)
          {
            pColorBuffer = d->m_pColorRGBX;
            nColorBufferSize = d->m_RGBCaptureWidth * d->m_RGBCaptureHeight * sizeof(RGBQUAD);
            hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
          }
          else
          {
            hr = E_FAIL;
          }
          if (SUCCEEDED(hr))
          {
            for(int i = 0; i < d->m_RGBBufferSize; i+=3)
            {
              //convert from BGR to RGB
              d->m_Colors[i+0] = pColorBuffer->rgbRed;
              d->m_Colors[i+1] = pColorBuffer->rgbGreen;
              d->m_Colors[i+2] = pColorBuffer->rgbBlue;
              ++pColorBuffer;
            }
          }
        }
      }

      SafeRelease(pDepthFrame);
      SafeRelease(pColorFrame);
      SafeRelease(pInfraRedFrame);
      SafeRelease(pMultiSourceFrame);

      if( hr != -1 && !SUCCEEDED(hr) )
      {
        //The thread gets here, if the data is requested faster than the device can deliver it.
        //This may happen from time to time.
        return false;
      }

      return true;
    }
    MITK_ERROR << "Unable to initialize MultiFrameReader";
    return false;
  }
コード例 #21
0
ファイル: Body.cpp プロジェクト: AMBITIONBIN/Kinect2Sample
int _tmain( int argc, _TCHAR* argv[] )
{
	cv::setUseOptimized( true );

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor( &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open( );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource( &pColorSource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	IBodyFrameSource* pBodySource;
	hResult = pSensor->get_BodyFrameSource( &pBodySource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader( &pColorReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	IBodyFrameReader* pBodyReader;
	hResult = pBodySource->OpenReader( &pBodyReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pColorSource->get_FrameDescription( &pDescription );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width( &width ); // 1920
	pDescription->get_Height( &height ); // 1080
	unsigned int bufferSize = width * height * 4 * sizeof( unsigned char );

	cv::Mat bufferMat( height, width, CV_8UC4 );
	cv::Mat bodyMat( height / 2, width / 2, CV_8UC4 );
	cv::namedWindow( "Body" );

	// Color Table
	cv::Vec3b color[BODY_COUNT];
	color[0] = cv::Vec3b( 255,   0,   0 );
	color[1] = cv::Vec3b(   0, 255,   0 );
	color[2] = cv::Vec3b(   0,   0, 255 );
	color[3] = cv::Vec3b( 255, 255,   0 );
	color[4] = cv::Vec3b( 255,   0, 255 );
	color[5] = cv::Vec3b(   0, 255, 255 );

	// Coordinate Mapper
	ICoordinateMapper* pCoordinateMapper;
	hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
		return -1;
	}

	while( 1 ){
		// Frame
		IColorFrame* pColorFrame = nullptr;
		hResult = pColorReader->AcquireLatestFrame( &pColorFrame );
		if( SUCCEEDED( hResult ) ){
			hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
			if( SUCCEEDED( hResult ) ){
				cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 );
			}
		}
		//SafeRelease( pColorFrame );

		IBodyFrame* pBodyFrame = nullptr;
		hResult = pBodyReader->AcquireLatestFrame( &pBodyFrame );
		if( SUCCEEDED( hResult ) ){
			IBody* pBody[BODY_COUNT] = { 0 };
			hResult = pBodyFrame->GetAndRefreshBodyData( BODY_COUNT, pBody );
			if( SUCCEEDED( hResult ) ){
				for( int count = 0; count < BODY_COUNT; count++ ){
					BOOLEAN bTracked = false;
					hResult = pBody[count]->get_IsTracked( &bTracked );
					if( SUCCEEDED( hResult ) && bTracked ){
						Joint joint[JointType::JointType_Count];
						hResult = pBody[ count ]->GetJoints( JointType::JointType_Count, joint );
						if( SUCCEEDED( hResult ) ){
							// Left Hand State
							HandState leftHandState = HandState::HandState_Unknown;
							hResult = pBody[count]->get_HandLeftState( &leftHandState );
							if( SUCCEEDED( hResult ) ){
								ColorSpacePoint colorSpacePoint = { 0 };
								hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandLeft].Position, &colorSpacePoint );
								if( SUCCEEDED( hResult ) ){
									int x = static_cast<int>( colorSpacePoint.X );
									int y = static_cast<int>( colorSpacePoint.Y );
									if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){
										if( leftHandState == HandState::HandState_Open ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA );
										}
										else if( leftHandState == HandState::HandState_Closed ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA );
										}
										else if( leftHandState == HandState::HandState_Lasso ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA );
										}
									}
								}
							}

							// Right Hand State
							HandState rightHandState = HandState::HandState_Unknown;
							hResult = pBody[count]->get_HandRightState( &rightHandState );
							if( SUCCEEDED( hResult ) ){
								ColorSpacePoint colorSpacePoint = { 0 };
								hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandRight].Position, &colorSpacePoint );
								if( SUCCEEDED( hResult ) ){
									int x = static_cast<int>( colorSpacePoint.X );
									int y = static_cast<int>( colorSpacePoint.Y );
									if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){
										if( rightHandState == HandState::HandState_Open ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA );
										}
										else if( rightHandState == HandState::HandState_Closed ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA );
										}
										else if( rightHandState == HandState::HandState_Lasso ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA );
										}
									}
								}
							}

							// Joint
							for( int type = 0; type < JointType::JointType_Count; type++ ){
								ColorSpacePoint colorSpacePoint = { 0 };
								pCoordinateMapper->MapCameraPointToColorSpace( joint[type].Position, &colorSpacePoint );
								int x = static_cast<int>( colorSpacePoint.X );
								int y = static_cast<int>( colorSpacePoint.Y );
								if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){
									cv::circle( bufferMat, cv::Point( x, y ), 5, static_cast< cv::Scalar >( color[count] ), -1, CV_AA );
								}
							}
						}

						/*// Activity
						UINT capacity = 0;
						DetectionResult detectionResults = DetectionResult::DetectionResult_Unknown;
						hResult = pBody[count]->GetActivityDetectionResults( capacity, &detectionResults );
						if( SUCCEEDED( hResult ) ){
							if( detectionResults == DetectionResult::DetectionResult_Yes ){
								switch( capacity ){
									case Activity::Activity_EyeLeftClosed:
										std::cout << "Activity_EyeLeftClosed" << std::endl;
										break;
									case Activity::Activity_EyeRightClosed:
										std::cout << "Activity_EyeRightClosed" << std::endl;
										break;
									case Activity::Activity_MouthOpen:
										std::cout << "Activity_MouthOpen" << std::endl;
										break;
									case Activity::Activity_MouthMoved:
										std::cout << "Activity_MouthMoved" << std::endl;
										break;
									case Activity::Activity_LookingAway:
										std::cout << "Activity_LookingAway" << std::endl;
										break;
									default:
										break;
								}
							}
						}
						else{
							std::cerr << "Error : IBody::GetActivityDetectionResults()" << std::endl;
						}*/

						/*// Appearance
						capacity = 0;
						detectionResults = DetectionResult::DetectionResult_Unknown;
						hResult = pBody[count]->GetAppearanceDetectionResults( capacity, &detectionResults );
						if( SUCCEEDED( hResult ) ){
							if( detectionResults == DetectionResult::DetectionResult_Yes ){
								switch( capacity ){
									case Appearance::Appearance_WearingGlasses:
										std::cout << "Appearance_WearingGlasses" << std::endl;
										break;
									default:
										break;
								}
							}
						}
						else{
							std::cerr << "Error : IBody::GetAppearanceDetectionResults()" << std::endl;
						}*/

						/*// Expression
						capacity = 0;
						detectionResults = DetectionResult::DetectionResult_Unknown;
						hResult = pBody[count]->GetExpressionDetectionResults( capacity, &detectionResults );
						if( SUCCEEDED( hResult ) ){
							if( detectionResults == DetectionResult::DetectionResult_Yes ){
								switch( capacity ){
									case Expression::Expression_Happy:
										std::cout << "Expression_Happy" << std::endl;
										break;
									case Expression::Expression_Neutral:
										std::cout << "Expression_Neutral" << std::endl;
										break;
									default:
										break;
								}
							}
						}
						else{
							std::cerr << "Error : IBody::GetExpressionDetectionResults()" << std::endl;
						}*/

						// Lean
						PointF amount;
						hResult = pBody[count]->get_Lean( &amount );
						if( SUCCEEDED( hResult ) ){
							std::cout << "amount : " << amount.X << ", " << amount.Y << std::endl;
						}
					}
				}
				cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 );
			}
			for( int count = 0; count < BODY_COUNT; count++ ){
				SafeRelease( pBody[count] );
			}
		}
		//SafeRelease( pBodyFrame );

		SafeRelease( pColorFrame );
		SafeRelease( pBodyFrame );

		cv::imshow( "Body", bodyMat );

		if( cv::waitKey( 10 ) == VK_ESCAPE ){
			break;
		}
	}

	SafeRelease( pColorSource );
	SafeRelease( pBodySource );
	SafeRelease( pColorReader );
	SafeRelease( pBodyReader );
	SafeRelease( pDescription );
	SafeRelease( pCoordinateMapper );
	if( pSensor ){
		pSensor->Close();
	}
	SafeRelease( pSensor );
	cv::destroyAllWindows();

	return 0;
}
コード例 #22
0
ファイル: Kinect2.cpp プロジェクト: naychrist/Cinder-Kinect2
void Device::update()
{
	if ( mSensor != 0 ) {
		mSensor->get_Status( &mStatus );
	}

	if ( mFrameReader == 0 ) {
		return;
	}

	IAudioBeamFrame* audioFrame								= 0;
	IBodyFrame* bodyFrame									= 0;
	IBodyIndexFrame* bodyIndexFrame							= 0;
	IColorFrame* colorFrame									= 0;
	IDepthFrame* depthFrame									= 0;
	IMultiSourceFrame* frame								= 0;
	IInfraredFrame* infraredFrame							= 0;
	ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;
	
	HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

	// TODO audio
	if ( SUCCEEDED( hr ) ) {
		console() << "SUCCEEDED " << getElapsedFrames() << endl;
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
		IBodyFrameReference* frameRef = 0;
		hr = frame->get_BodyFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &bodyFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
		IBodyIndexFrameReference* frameRef = 0;
		hr = frame->get_BodyIndexFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &bodyIndexFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
		IColorFrameReference* frameRef = 0;
		hr = frame->get_ColorFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &colorFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
		IDepthFrameReference* frameRef = 0;
		hr = frame->get_DepthFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &depthFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
		IInfraredFrameReference* frameRef = 0;
		hr = frame->get_InfraredFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &infraredFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
		ILongExposureInfraredFrameReference* frameRef = 0;
		hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) ) {
		long long time											= 0L;

		// TODO audio

		IFrameDescription* bodyFrameDescription					= 0;
		int32_t bodyWidth										= 0;
		int32_t bodyHeight										= 0;
		uint32_t bodyBufferSize									= 0;
		uint8_t* bodyBuffer										= 0;

		IFrameDescription* bodyIndexFrameDescription			= 0;
		int32_t bodyIndexWidth									= 0;
		int32_t bodyIndexHeight									= 0;
		uint32_t bodyIndexBufferSize							= 0;
		uint8_t* bodyIndexBuffer								= 0;
		
		IFrameDescription* colorFrameDescription				= 0;
		int32_t colorWidth										= 0;
		int32_t colorHeight										= 0;
		ColorImageFormat imageFormat							= ColorImageFormat_None;
		uint32_t colorBufferSize								= 0;
		uint8_t* colorBuffer									= 0;

		IFrameDescription* depthFrameDescription				= 0;
		int32_t depthWidth										= 0;
		int32_t depthHeight										= 0;
		uint16_t depthMinReliableDistance						= 0;
		uint16_t depthMaxReliableDistance						= 0;
		uint32_t depthBufferSize								= 0;
		uint16_t* depthBuffer									= 0;

		IFrameDescription* infraredFrameDescription				= 0;
		int32_t infraredWidth									= 0;
		int32_t infraredHeight									= 0;
		uint32_t infraredBufferSize								= 0;
		uint16_t* infraredBuffer								= 0;

		IFrameDescription* infraredLongExposureFrameDescription	= 0;
		int32_t infraredLongExposureWidth						= 0;
		int32_t infraredLongExposureHeight						= 0;
		uint32_t infraredLongExposureBufferSize					= 0;
		uint16_t* infraredLongExposureBuffer					= 0;

		hr = depthFrame->get_RelativeTime( &time );

		// TODO audio
		if ( mDeviceOptions.isAudioEnabled() ) {

		}

		// TODO body
		if ( mDeviceOptions.isBodyEnabled() ) {

		}

		if ( mDeviceOptions.isBodyIndexEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
			}
			if ( SUCCEEDED( hr ) ) {
 				//hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
			}
		}

		if ( mDeviceOptions.isColorEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrame->get_FrameDescription( &colorFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrameDescription->get_Width( &colorWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrameDescription->get_Height( &colorHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrame->get_RawColorImageFormat( &imageFormat );
			}
			if ( SUCCEEDED( hr ) ) {
				bool isAllocated	= false;
				SurfaceChannelOrder channelOrder = SurfaceChannelOrder::BGRA;
				if ( imageFormat == ColorImageFormat_Bgra ) {
					hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) );
					channelOrder = SurfaceChannelOrder::BGRA;
				} else if ( imageFormat == ColorImageFormat_Rgba ) {
					hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) );
					channelOrder = SurfaceChannelOrder::RGBA;
				} else {
					isAllocated = true;
					colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
					colorBuffer = new uint8_t[ colorBufferSize ];
					hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );
					channelOrder = SurfaceChannelOrder::RGBA;
				}

				if ( SUCCEEDED( hr ) ) {
					colorFrame->get_RelativeTime( &time );
					Surface8u colorSurface = Surface8u( colorBuffer, colorWidth, colorHeight, colorWidth * sizeof( uint8_t ) * 4, channelOrder );
					mFrame.mSurfaceColor = Surface8u( colorWidth, colorHeight, false, channelOrder );
					mFrame.mSurfaceColor.copyFrom( colorSurface, colorSurface.getBounds() );

					console() << "Color\n\twidth: " << colorWidth << "\n\theight: " << colorHeight 
						<< "\n\tbuffer size: " << colorBufferSize << "\n\ttime: " << time << endl;
				}

				if ( isAllocated && colorBuffer != 0 ) {
					delete[] colorBuffer;
					colorBuffer = 0;
				}
			}
		}

		if ( mDeviceOptions.isDepthEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->get_FrameDescription( &depthFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrameDescription->get_Width( &depthWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrameDescription->get_Height( &depthHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
			}
			if ( SUCCEEDED( hr ) ) {
				Channel16u depthChannel = Channel16u( depthWidth, depthHeight, depthWidth * sizeof( uint16_t ), 1, depthBuffer );
				mFrame.mChannelDepth = Channel16u( depthWidth, depthHeight );
				mFrame.mChannelDepth.copyFrom( depthChannel, depthChannel.getBounds() );

				console( ) << "Depth\n\twidth: " << depthWidth << "\n\theight: " << depthHeight << endl;
			}
		}

		if ( mDeviceOptions.isInfraredEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrameDescription->get_Width( &infraredWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrameDescription->get_Height( &infraredHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
			}
			if ( SUCCEEDED( hr ) ) {
				Channel16u infraredChannel = Channel16u( infraredWidth, infraredHeight, infraredWidth * sizeof( uint16_t ), 1, infraredBuffer );
				mFrame.mChannelInfrared = Channel16u( infraredWidth, infraredHeight );
				mFrame.mChannelInfrared.copyFrom( infraredChannel, infraredChannel.getBounds() );

				console( ) << "Infrared\n\twidth: " << infraredWidth << "\n\theight: " << infraredHeight << endl;
			}
		}

		if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
			}
			if ( SUCCEEDED( hr ) ) {
				Channel16u infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight, infraredLongExposureWidth * sizeof( uint16_t ), 1, infraredLongExposureBuffer );
				mFrame.mChannelInfraredLongExposure = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
				mFrame.mChannelInfraredLongExposure.copyFrom( infraredLongExposureChannel, infraredLongExposureChannel.getBounds() );

				int64_t irLongExpTime = 0;
				hr = infraredLongExposureFrame->get_RelativeTime( &irLongExpTime );

				console( ) << "Infrared Long Exposure\n\twidth: " << infraredLongExposureWidth << "\n\theight: " << infraredLongExposureHeight;
				if ( SUCCEEDED( hr ) ) {
					console() << "\n\ttimestamp: " << irLongExpTime;
				}
				console() << endl;
			}
		}

		if ( SUCCEEDED( hr ) ) {
			// TODO build Kinect2::Frame from buffers, data
			mFrame.mTimeStamp = time;
		}

		if ( bodyFrameDescription != 0 ) {
			bodyFrameDescription->Release();
			bodyFrameDescription = 0;
		}
		if ( bodyIndexFrameDescription != 0 ) {
			bodyIndexFrameDescription->Release();
			bodyIndexFrameDescription = 0;
		}
		if ( colorFrameDescription != 0 ) {
			colorFrameDescription->Release();
			colorFrameDescription = 0;
		}
		if ( depthFrameDescription != 0 ) {
			depthFrameDescription->Release();
			depthFrameDescription = 0;
		}
		if ( infraredFrameDescription != 0 ) {
			infraredFrameDescription->Release();
			infraredFrameDescription = 0;
		}
		if ( infraredLongExposureFrameDescription != 0 ) {
			infraredLongExposureFrameDescription->Release();
			infraredLongExposureFrameDescription = 0;
		}
	}

	if ( audioFrame != 0 ) {
		audioFrame->Release();
		audioFrame = 0;
	}
	if ( bodyFrame != 0 ) {
		bodyFrame->Release();
		bodyFrame = 0;
	}
	if ( bodyIndexFrame != 0 ) {
		bodyIndexFrame->Release();
		bodyIndexFrame = 0;
	}
	if ( colorFrame != 0 ) {
		colorFrame->Release();
		colorFrame = 0;
	}
	if ( depthFrame != 0 ) {
		depthFrame->Release();
		depthFrame = 0;
	}
	if ( frame != 0 ) {
		frame->Release();
		frame = 0;
	}
	if ( infraredFrame != 0 ) {
		infraredFrame->Release();
		infraredFrame = 0;
	}
	if ( infraredLongExposureFrame != 0 ) {
		infraredLongExposureFrame->Release();
		infraredLongExposureFrame = 0;
	}
}
コード例 #23
0
int main(int argc, char* argv[])
{


	int similarity = 0;

	string line;
	ifstream myfile ("source_config.txt");
	if (myfile.is_open())
	{
		getline (myfile,line);
		hauteurCamera = stoi(line);
		getline (myfile,line);
		fountainXPosition = stoi(line);
		getline (myfile,line);
		fountainYPosition = stoi(line);
		getline (myfile,line);
		fountainWidth = stoi(line);

		getline (myfile,line);
		blasterWidth = stoi(line);
		getline (myfile,line);
		int numberOfBlaster = stoi(line);

		for(int i = 0;i<numberOfBlaster;i++){
			getline (myfile,line);
			blasterXPosition.push_back(stoi(line));
			getline (myfile,line);
			blasterYPosition.push_back(stoi(line));
		}

		myfile.close();
	}

	else
	{
		cout << "Unable to open file"; 
		exit(-1);
	}

	IKinectSensor* m_pKinectSensor;
	IDepthFrameReader* pDepthReader;
	IDepthFrameSource* pDepthFrameSource = NULL; // Depth image

	IColorFrameReader* pColorReader;
	IColorFrameSource* pColorFrameSource = NULL;


	HRESULT hr;

	hr = GetDefaultKinectSensor(&m_pKinectSensor);
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : GetDefaultKinectSensor failed." << endl;
		return false;
	}

	hr = m_pKinectSensor->Open();
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : Open failed." << endl;
		return false;
	}

	hr = m_pKinectSensor->get_DepthFrameSource( &pDepthFrameSource );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : get_DepthFrameSource failed." << endl;
		return false;
	}
	hr = pDepthFrameSource->OpenReader( &pDepthReader );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : OpenReader failed." << endl;
		return false;
	}


	IFrameDescription* pDepthDescription;
	hr = pDepthFrameSource->get_FrameDescription( &pDepthDescription );
	if( FAILED( hr ) ){
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}
	w = 0,h = 0;
	pDepthDescription->get_Width( &w ); // 512
	pDepthDescription->get_Height( &h ); // 424
	//unsigned int irBufferSize = w * h * sizeof( unsigned short );


	hr = m_pKinectSensor->get_ColorFrameSource( &pColorFrameSource );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : get_ColorFrameSource failed." << endl;
		return false;
	}
	hr = pColorFrameSource->OpenReader( &pColorReader );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : OpenReader failed." << endl;
		return false;
	}

	// Description
	IFrameDescription* pColorDescription;
	hr = pColorFrameSource->get_FrameDescription( &pColorDescription );
	if( FAILED( hr ) ){
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}
	int colorWidth = 0;
	int colorHeight = 0;
	pColorDescription->get_Width( &colorWidth ); // 1920
	pColorDescription->get_Height( &colorHeight ); // 1080
	unsigned int colorBufferSize = colorWidth * colorHeight * 4 * sizeof( unsigned char );

	Mat colorBufferMat = Mat::zeros( cvSize(colorWidth,colorHeight), CV_8UC4   );
	//colorMat = Mat::zeros( cvSize(colorWidth/2,colorHeight/2), CV_8UC4   );



	ICoordinateMapper* pCoordinateMapper;
	hr = m_pKinectSensor->get_CoordinateMapper( &pCoordinateMapper );



	//  open a opencv window
	cv::namedWindow("source_config", CV_WINDOW_AUTOSIZE );
	setMouseCallback("source_config", MouseCallBackFunc, NULL);

	Mat frame(h,w, CV_8UC3, Scalar(255,255,255));
	Mat display;
	//Mat img;

	char k = 0;
	while(k!=27){

		HRESULT hResult = S_OK;

		if(displayColor){
			IColorFrame* pColorFrame = nullptr;
			hResult = pColorReader->AcquireLatestFrame( &pColorFrame );
			while(!SUCCEEDED(hResult)){
				Sleep(50);
				hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
			}

			if( SUCCEEDED( hResult ) ){
				hResult = pColorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<BYTE*>( colorBufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
				if( !SUCCEEDED( hResult ) ){
					return false;
				}

				resize(colorBufferMat,display,Size(displaySize*w,displaySize*h));

				flip(display,display,1);

				cv::line(display,Point(displaySize*w/2,0),Point(displaySize*w/2,displaySize*h),Scalar(0,0,255),2);
				cv::line(display,Point(0,displaySize*h/2),Point(displaySize*w,displaySize*h/2),Scalar(0,0,255),2);


				if (pColorFrame )
				{
					pColorFrame ->Release();
					pColorFrame  = NULL;
				}



			}
			else 
				return false;



		}
		else
		{

			IDepthFrame* pDepthFrame = nullptr;
			hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );
			while(!SUCCEEDED(hResult)){
				Sleep(10);
				hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );
			}

			if( SUCCEEDED( hResult ) ){
				unsigned int bufferSize = 0;
				unsigned short* buffer = nullptr;
				hResult = pDepthFrame->AccessUnderlyingBuffer( &bufferSize, &buffer );

				if( SUCCEEDED( hResult ) ){
					for( int y = 0; y < h; y++ ){
						for( int x = 0; x < w; x++ ){
							Vec3b intensity = frame.at<Vec3b>(y, x);
							if(buffer[ y * w + (w - x - 1) ]  < hauteurCamera){
								int d = buffer[ y * w + (w - x - 1) ];
								intensity.val[0] = 2.55*(d % 100);
								intensity.val[1] = 1.22*(d % 200);
								intensity.val[2] = 256.0*d/hauteurCamera;
							}
							else
							{
								intensity.val[0] = 255;
								intensity.val[1] = 255;
								intensity.val[2] = 255;
							}
							/*intensity.val[0] = buffer[ y * w + x ] >> 8;
							intensity.val[1] = buffer[ y * w + x ] >> 8;
							intensity.val[2] = buffer[ y * w + x ] >> 8;*/
							frame.at<Vec3b>(y, x) = intensity;
						}
					}

					// changer la couleur du rectangle en fonction de la hauteur des coins (similaire ou non) ( moins de 4cm)

					float d1 = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
					float d2 = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
					float d3 = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
					float d4 = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];

					if((d1 < 0)||(d1>3500)||(d2 < 0)||(d2>3500)||(d3 < 0)||(d3>3500)||(d4 < 0)||(d4>3500)){
						similarity = 0;
					}
					else
					{
						int mn = 100;
						if((abs(d1-d2) < mn)
							&&(abs(d1-d3) < mn)
							&&(abs(d1-d4) < mn)
							&&(abs(d2-d3) < mn)
							&&(abs(d2-d4) < mn)
							&&(abs(d3-d4) < mn))
							similarity = 255;
						else{
							int md = abs(d1-d2);
							md = MAX(md,abs(d1-d3));
							md = MAX(md,abs(d1-d4));
							md = MAX(md,abs(d2-d3));
							md = MAX(md,abs(d2-d4));
							md = MAX(md,abs(d3-d4));
							if(md-mn>128)
								similarity = 0;
							else
								similarity = 128 - (md - mn);
						}
					}


					if(k=='s'){

						// get hauteur camera

						// Depthframe get 3D position of 1m20 jets et les enregistrer dans un autre fichier pour etre charger par un Observer


						CameraSpacePoint cameraPoint = { 0 };
						DepthSpacePoint depthPoint = { 0 };
						UINT16 depth;

						// Compute hauteur camera, by average of four points
						float h = 0;
						int cptH = 0;
						float d;

						d = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						if(cptH>0){
							//hauteurCamera = h/cptH;
							cout << "H = " << hauteurCamera << endl;
						}

						// Compute real size of blaster
						/*depthPoint.X = static_cast<float>(blasterXPosition[0] - blasterWidth); 
						depthPoint.Y = static_cast<float>(blasterYPosition[0] - blasterWidth); 
						depth = hauteurCamera - 1000.0f; // TODO change
						pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
						float corner1X = static_cast<float>(cameraPoint.X);
						float corner1Y = static_cast<float>(cameraPoint.Y);


						depthPoint.X = static_cast<float>(blasterXPosition[0] + blasterWidth); 
						depthPoint.Y = static_cast<float>(blasterYPosition[0] + blasterWidth); 
						pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
						float corner2X = static_cast<float>(cameraPoint.X);
						float corner2Y = static_cast<float>(cameraPoint.Y);*/

						//float realBlasterWidth = 1000.0*(abs(corner2X-corner1X)+abs(corner2Y-corner1Y))/2.0;


						ofstream myfile;
						myfile.open ("source_config.txt");
						myfile << hauteurCamera << "\n";
						myfile << fountainXPosition << "\n";
						myfile << fountainYPosition << "\n";
						myfile << fountainWidth << "\n";
						myfile << blasterWidth << "\n";
						myfile << blasterXPosition.size() << "\n";
						for(int i = 0;i<blasterXPosition.size();i++){
							myfile << blasterXPosition[i] << "\n";
							myfile << blasterYPosition[i] << "\n";
						}
						myfile.close();

						//  save real positions to file

						myfile.open ("source3D.txt");
						myfile << hauteurCamera << "\n";
						myfile << blasterWidth << "\n";
						myfile << blasterXPosition.size() << "\n";
						for(int i = 0;i<blasterXPosition.size();i++){
							depthPoint.X = static_cast<float>(blasterXPosition[i]); 
							depthPoint.Y = static_cast<float>(blasterYPosition[i]); 
							depth = hauteurCamera;
							pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
							cout << depthPoint.X << " - " << depthPoint.Y << endl;
							cout << static_cast<float>(cameraPoint.X) << " - " << static_cast<float>(cameraPoint.Y) << endl;
							pCoordinateMapper->MapCameraPointToDepthSpace(cameraPoint, &depthPoint);
							cout << depthPoint.X << " - " << depthPoint.Y << endl;
							cout << static_cast<float>(cameraPoint.X) << " - " << static_cast<float>(cameraPoint.Y) << endl;
							myfile << 1000.0*static_cast<float>(cameraPoint.X) << "\n";
							myfile << 1000.0*static_cast<float>(cameraPoint.Y) << "\n";
						}
						myfile.close();


					}


				}
				else{
					return false;
				}

			}
			else 
				return false;

			if( pDepthFrame != NULL ){
				pDepthFrame->Release();
				pDepthFrame = NULL;
			}

			rectangle(frame,
				Rect(fountainXPosition-fountainWidth/2.0, 
				fountainYPosition-fountainWidth/2.0, 
				fountainWidth	, 
				fountainWidth),
				Scalar(0,similarity,255-similarity),
				3);
			for(int i = -2; i <= 2; i++)
			{
				rectangle(frame,
					Rect(fountainXPosition - (i*blasterWidth) - (blasterWidth/2.0), 
					fountainYPosition-fountainWidth/2.0, 
					blasterWidth, 
					fountainWidth),
					Scalar(0,255,0),
					1);
				rectangle(frame,
					Rect(fountainXPosition-fountainWidth/2.0,
					fountainYPosition - (i*blasterWidth) - (blasterWidth/2.0),
					fountainWidth, 
					blasterWidth),
					Scalar(0,255,0),
					1);
			}

			char textbuffer [33];
			for(int i = 0;i<blasterXPosition.size();i++){
				sprintf(textbuffer,"%i",i+1);
				putText(frame,textbuffer, Point2f(blasterXPosition[i]-blasterWidth/2.0,blasterYPosition[i]), FONT_HERSHEY_PLAIN, 1,  Scalar(0,0,255,255), 2);
				//rectangle(frame,Rect(blasterXPosition[i]-blasterWidth/2.0,blasterYPosition[i]-blasterWidth/2.0,blasterWidth,blasterWidth),Scalar(255,0,0));
				circle(frame,Point(blasterXPosition[i],blasterYPosition[i]),blasterWidth/2.0,Scalar(255,0,0));
			}
			resize(frame,display,Size(displaySize*w,displaySize*h));
		}

		cv::imshow("source_config", display);


		k=cv::waitKey(1);	

		if(k == 32) // Space
			displayColor = ! displayColor;
		
		if(k=='a')
			alignBuseFromLayout();
		if(k=='r')
			resetFountainPosition();

	}


	if (pDepthReader)
	{
		pDepthReader->Release();
		pDepthReader = NULL;
	}


	if (pCoordinateMapper)
	{
		pCoordinateMapper->Release();
		pCoordinateMapper = NULL;
	}

	m_pKinectSensor->Close();
	if (m_pKinectSensor)
	{
		m_pKinectSensor->Release();
		m_pKinectSensor = NULL;
	}


	//system("pause");

	return 0;
}
コード例 #24
0
	void UpdateColor()
	{
		if (!m_pColorFrameReader)
		{
			return;
		}

		IColorFrame* pColorFrame = NULL;

		HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

		if (SUCCEEDED(hr))
		{
			INT64 nTime = 0;
			IFrameDescription* pFrameDescription = NULL;
			int nWidth = 0;
			int nHeight = 0;
			ColorImageFormat imageFormat = ColorImageFormat_None;
			UINT nBufferSize = 0;
			RGBQUAD *pBuffer = NULL;

			hr = pColorFrame->get_RelativeTime(&nTime);

			if (SUCCEEDED(hr))
			{
				hr = pColorFrame->get_FrameDescription(&pFrameDescription);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_Width(&nWidth);
			}

			if (SUCCEEDED(hr))
			{
				m_nColorWidth = nWidth;
				hr = pFrameDescription->get_Height(&nHeight);
			}

			if (SUCCEEDED(hr))
			{
				m_nColorHeight = nHeight;
				hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
			}

			if (SUCCEEDED(hr))
			{
				if (imageFormat == ColorImageFormat_Bgra)
				{
					hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&pBuffer));
				}
				else if (m_pColorRGBX)
				{
					pBuffer = m_pColorRGBX;
					nBufferSize = nWidth * nHeight * sizeof(RGBQUAD);
					hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pBuffer), ColorImageFormat_Bgra);            
				}
				else
				{
					hr = E_FAIL;
				}
			}

			if (SUCCEEDED(hr))
			{
				{
					ProcessColor(nTime, pBuffer, nWidth, nHeight);
				}
			}

			SafeRelease(pFrameDescription);
		}
		else{
			DumpHR(hr);
		}

		SafeRelease(pColorFrame);
	}
コード例 #25
0
void* Kinect2StreamImpl::populateFrameBuffer(int& buffWidth, int& buffHeight)
{
  buffWidth = 0;
  buffHeight = 0;

  if (m_sensorType == ONI_SENSOR_COLOR) {
    if (m_pFrameReader.color && m_pFrameBuffer.color) {
      buffWidth = 1920;
      buffHeight = 1080;

      IColorFrame* frame = NULL;
      HRESULT hr = m_pFrameReader.color->AcquireLatestFrame(&frame);
      if (SUCCEEDED(hr)) {
        ColorImageFormat imageFormat = ColorImageFormat_None;
        hr = frame->get_RawColorImageFormat(&imageFormat);
        if (SUCCEEDED(hr)) {
          if (imageFormat == ColorImageFormat_Bgra) {
            RGBQUAD* data;
            UINT bufferSize;
            frame->AccessRawUnderlyingBuffer(&bufferSize, reinterpret_cast<BYTE**>(&data));
            memcpy(m_pFrameBuffer.color, data, 1920*1080*sizeof(RGBQUAD));
          }
          else {
            frame->CopyConvertedFrameDataToArray(1920*1080*sizeof(RGBQUAD), reinterpret_cast<BYTE*>(m_pFrameBuffer.color), ColorImageFormat_Bgra);
          }
        }
      }
      if (frame) {
        frame->Release();
      }

      return reinterpret_cast<void*>(m_pFrameBuffer.color);
    }
  }
  else if (m_sensorType == ONI_SENSOR_DEPTH) {
    if (m_pFrameReader.depth && m_pFrameBuffer.depth) {
      buffWidth = 512;
      buffHeight = 424;

      IDepthFrame* frame = NULL;
      HRESULT hr = m_pFrameReader.depth->AcquireLatestFrame(&frame);
      if (SUCCEEDED(hr)) {
        UINT16* data;
        UINT bufferSize;
        frame->AccessUnderlyingBuffer(&bufferSize, &data);
        memcpy(m_pFrameBuffer.depth, data, 512*424*sizeof(UINT16));
      }
      if (frame) {
        frame->Release();
      }

      return reinterpret_cast<void*>(m_pFrameBuffer.depth);
    }
  }
  else { // ONI_SENSOR_IR
    if (m_pFrameReader.infrared && m_pFrameBuffer.infrared) {
      buffWidth = 512;
      buffHeight = 424;

      IInfraredFrame* frame = NULL;
      HRESULT hr = m_pFrameReader.infrared->AcquireLatestFrame(&frame);
      if (SUCCEEDED(hr)) {
        UINT16* data;
        UINT bufferSize;
        frame->AccessUnderlyingBuffer(&bufferSize, &data);
        memcpy(m_pFrameBuffer.infrared, data, 512*424*sizeof(UINT16));
      }
      if (frame) {
        frame->Release();
      }

      return reinterpret_cast<void*>(m_pFrameBuffer.infrared);
    }
  }

  return NULL;
}
コード例 #26
0
ファイル: ofxKinect2.cpp プロジェクト: Furkanzmc/ofxKinect2
bool ColorStream::readFrame(IMultiSourceFrame *multiFrame)
{
    bool readed = false;
    if (!m_StreamHandle.colorFrameReader) {
        ofLogWarning("ofxKinect2::ColorStream") << "Stream is not open.";
        return readed;
    }
    Stream::readFrame(multiFrame);
    IColorFrame *colorFrame = nullptr;

    HRESULT hr = E_FAIL;
    if (!multiFrame) {
        hr = m_StreamHandle.colorFrameReader->AcquireLatestFrame(&colorFrame);
    }
    else {
        IColorFrameReference *colorFrameFeference = nullptr;
        hr = multiFrame->get_ColorFrameReference(&colorFrameFeference);
        if (SUCCEEDED(hr)) {
            hr = colorFrameFeference->AcquireFrame(&colorFrame);
        }
        safeRelease(colorFrameFeference);
    }

    if (SUCCEEDED(hr)) {
        IFrameDescription *colorFrameDescription = nullptr;
        ColorImageFormat imageFormat = ColorImageFormat_None;

        hr = colorFrame->get_RelativeTime((INT64 *)&m_Frame.timestamp);

        if (SUCCEEDED(hr)) {
            hr = colorFrame->get_FrameDescription(&colorFrameDescription);
        }

        if (SUCCEEDED(hr)) {
            hr = colorFrameDescription->get_Width(&m_Frame.width);
        }

        if (SUCCEEDED(hr)) {
            hr = colorFrameDescription->get_Height(&m_Frame.height);
        }

        if (SUCCEEDED(hr)) {
            hr = colorFrameDescription->get_HorizontalFieldOfView(&m_Frame.horizontalFieldOfView);
        }
        if (SUCCEEDED(hr)) {
            hr = colorFrameDescription->get_VerticalFieldOfView(&m_Frame.verticalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = colorFrameDescription->get_DiagonalFieldOfView(&m_Frame.diagonalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = colorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr)) {
            if (m_Buffer == nullptr) {
                m_Buffer = new unsigned char[m_Frame.width * m_Frame.height * 4];
            }
            if (imageFormat == ColorImageFormat_Rgba) {
                hr = colorFrame->AccessRawUnderlyingBuffer((UINT *)&m_Frame.dataSize, reinterpret_cast<BYTE **>(&m_Frame.data));
            }
            else {
                m_Frame.data = m_Buffer;
                m_Frame.dataSize = m_Frame.width * m_Frame.height * 4 * sizeof(unsigned char);
                hr = colorFrame->CopyConvertedFrameDataToArray((UINT)m_Frame.dataSize, reinterpret_cast<BYTE *>(m_Frame.data),  ColorImageFormat_Rgba);
            }
        }

        if (SUCCEEDED(hr)) {
            readed = true;
            setPixels(m_Frame);
        }
        safeRelease(colorFrameDescription);
    }

    safeRelease(colorFrame);

    return readed;
}
コード例 #27
0
	void processColor() {
		if (!device) return;
		if (!m_pColorFrameReader) return;
		
		IColorFrame* pColorFrame = NULL;
		HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);
		if (SUCCEEDED(hr)) {
			INT64 nTime = 0;
			IFrameDescription* pFrameDescription = NULL;
			int nWidth = 0;
			int nHeight = 0;
			ColorImageFormat imageFormat = ColorImageFormat_None;
			UINT nBufferSize = 0;
			RGBQUAD *src = NULL;

			hr = pColorFrame->get_RelativeTime(&nTime);
			if (SUCCEEDED(hr)) {
				hr = pColorFrame->get_FrameDescription(&pFrameDescription);
			}
			if (SUCCEEDED(hr)) {
				hr = pFrameDescription->get_Width(&nWidth);
			}
			if (SUCCEEDED(hr)) {
				hr = pFrameDescription->get_Height(&nHeight);
			}
			if (SUCCEEDED(hr)) {
				hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
			}

			if (imageFormat != ColorImageFormat_Bgra)
			{
				if (!rgb_buffer) {
					rgb_buffer = new RGBQUAD[nWidth * nHeight];
				}

				//post("image format %d", imageFormat);
				//error("not brga");
				nBufferSize = nWidth * nHeight * sizeof(RGBQUAD);
				hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(rgb_buffer), ColorImageFormat_Rgba);
				if (FAILED(hr)) {
					error("failed to convert image");
					return;
				}

				src = rgb_buffer;
			}


			hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&src));
			ARGB * dst = (ARGB *)rgb_mat.back;
			int cells = nWidth * nHeight;

			//if (align_depth_to_color) {
				for (int i = 0; i < cells; ++i) {
					dst[i].r = src[i].rgbRed;
					dst[i].g = src[i].rgbGreen;
					dst[i].b = src[i].rgbBlue;
				}
			/*}
			else {
				// align color to depth:
				//std::fill(dst, dst + cells, RGB(0, 0, 0));
				for (int i = 0; i < cells; ++i) {
					int c = colorCoordinates[i * 2];
					int r = colorCoordinates[i * 2 + 1];
					if (c >= 0 && c < KINECT_DEPTH_WIDTH
						&& r >= 0 && r < KINECT_DEPTH_HEIGHT) {
						// valid location: depth value:
						int idx = r*KINECT_DEPTH_WIDTH + c;
						dst[i].r = src[idx].r;
						dst[i].g = src[idx].g;
						dst[i].b = src[idx].b;
					}
				}
			}*/

			new_rgb_data = 1;

		}

		
	}
コード例 #28
0
ファイル: mykinect2.cpp プロジェクト: Kaanade/Boiterie
void MyKinect2::Update()
{
    // récupération de l'image en 2D
    if (!m_pColorFrameReader)
    {
        return;
    }

    IColorFrame* pColorFrame = NULL;

    HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

    if (SUCCEEDED(hr))
    {
        INT64 nTime = 0;
        IFrameDescription* pFrameDescription = NULL;
        int nWidth = 0;
        int nHeight = 0;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nBufferSize = 0;


        hr = pColorFrame->get_RelativeTime(&nTime);

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Width(&nWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Height(&nHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }


        if (SUCCEEDED(hr) && (nWidth == cColorWidth) && (nHeight == cColorHeight))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&webcam.data));
            }
            else if (m_pColorRGBX)
            {
                nBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(webcam.data), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }
        SafeRelease(pFrameDescription);
    }

    // récupération du squelette
    if (!m_pBodyFrameReader)
    {
        return;
    }

    IBodyFrame* pBodyFrame = NULL;

    hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);

    if (SUCCEEDED(hr))
    {
        INT64 nTime = 0;

        hr = pBodyFrame->get_RelativeTime(&nTime);

        IBody* ppBodies[BODY_COUNT] = {0};

        if (SUCCEEDED(hr))
        {
            hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
        }

        if (SUCCEEDED(hr))
        {
            ProcessBody(BODY_COUNT, ppBodies); // BODY_COUNT est un define de Kinect.h égal à 6... peut etre mettre 1 plus tard afin déviter des problemes lors de la récupération des positions de joints
        }

        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }

    SafeRelease(pBodyFrame);
    SafeRelease(pColorFrame);
}
コード例 #29
0
/// <summary>
/// Main processing function
/// </summary>
void CCoordinateMappingBasics::Update()
{
    if (!m_pMultiSourceFrameReader)
    {
        return;
    }

    IMultiSourceFrame* pMultiSourceFrame = NULL;
    IDepthFrame* pDepthFrame = NULL;
    IColorFrame* pColorFrame = NULL;
    IBodyIndexFrame* pBodyIndexFrame = NULL;

    IBodyFrame* pBodyFrame = NULL;

    HRESULT hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);

    if (SUCCEEDED(hr))
    {
        IDepthFrameReference* pDepthFrameReference = NULL;

        hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
        }

        SafeRelease(pDepthFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IColorFrameReference* pColorFrameReference = NULL;

        hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pColorFrameReference->AcquireFrame(&pColorFrame);
        }

        SafeRelease(pColorFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyIndexFrameReference* pBodyIndexFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyIndexFrameReference(&pBodyIndexFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
        }

        SafeRelease(pBodyIndexFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyFrameReference* pBodyFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyFrameReference(&pBodyFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyFrameReference->AcquireFrame(&pBodyFrame);
        }

		SafeRelease(pBodyFrameReference);
    }

    if (SUCCEEDED(hr))
    {
		// Depth
        INT64 nDepthTime = 0;
        IFrameDescription* pDepthFrameDescription = NULL;
        int nDepthWidth = 0;
        int nDepthHeight = 0;
        UINT nDepthBufferSize = 0;
        UINT16 *pDepthBuffer = NULL;

		// Color
        IFrameDescription* pColorFrameDescription = NULL;
        int nColorWidth = 0;
        int nColorHeight = 0;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nColorBufferSize = 0;
        RGBQUAD *pColorBuffer = NULL;

		// BodyIndex
        IFrameDescription* pBodyIndexFrameDescription = NULL;
        int nBodyIndexWidth = 0;
        int nBodyIndexHeight = 0;
        UINT nBodyIndexBufferSize = 0;
        BYTE *pBodyIndexBuffer = NULL;

		// Body
		IBody* ppBodies[BODY_COUNT] = { 0 };

        // get depth frame data
        hr = pDepthFrame->get_RelativeTime(&nDepthTime);

		// Depth
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Width(&nDepthWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Height(&nDepthHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);            
        }

        // get color frame data
        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Width(&nColorWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Height(&nColorHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
            }
            else if (m_pColorRGBX)
            {
                pColorBuffer = m_pColorRGBX;
                nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }

        // get body index frame data
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);            
        }

		// get body frame data

		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

        if (SUCCEEDED(hr))
        {
            ProcessFrame(nDepthTime, pDepthBuffer, nDepthWidth, nDepthHeight, 
                pColorBuffer, nColorWidth, nColorHeight,
				pBodyIndexBuffer, nBodyIndexWidth, nBodyIndexHeight, BODY_COUNT, ppBodies);
        }

        SafeRelease(pDepthFrameDescription);
        SafeRelease(pColorFrameDescription);
        SafeRelease(pBodyIndexFrameDescription);

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);
		}
	}

    SafeRelease(pDepthFrame);
    SafeRelease(pColorFrame);
    SafeRelease(pBodyIndexFrame);
    SafeRelease(pBodyFrame);
    SafeRelease(pMultiSourceFrame);
}
コード例 #30
0
ファイル: main.cpp プロジェクト: irvs/send_skeleton
int main(int argc, char **argv)
{
  std::cout << "Hello World." << std::endl;
  	cv::setUseOptimized( true );

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor( &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource( &pColorSource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader( &pColorReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pColorSource->get_FrameDescription( &pDescription );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width( &width ); // 1920
	pDescription->get_Height( &height ); // 1080
	unsigned int bufferSize = width * height * 4 * sizeof( unsigned char );

	cv::Mat bufferMat( height, width, CV_8UC4 );
	cv::Mat colorMat( height / 2, width / 2, CV_8UC4 );
	cv::namedWindow( "Color" );

	while( 1 ){
		// Frame
		IColorFrame* pColorFrame = nullptr;
		hResult = pColorReader->AcquireLatestFrame( &pColorFrame );
		if( SUCCEEDED( hResult ) ){
			hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
			if( SUCCEEDED( hResult ) ){
				cv::resize( bufferMat, colorMat, cv::Size(), 0.5, 0.5 );
			}
		}
		SafeRelease( &pColorFrame );

		cv::imshow( "Color", colorMat );

		if( cv::waitKey( 30 ) == VK_ESCAPE ){
			break;
		}
	}

	SafeRelease( &pColorSource );
	SafeRelease( &pColorReader );
	SafeRelease( &pDescription );
	if( pSensor ){
		pSensor->Close();
	}
	SafeRelease( &pSensor );
	cv::destroyAllWindows();

	return 0;
  return 0;
}