Ejemplo n.º 1
0
// デプスデータを取得する
GLuint KinectV2::getDepth() const
{
  // デプスのテクスチャを指定する
  glBindTexture(GL_TEXTURE_2D, depthTexture);

  // 次のデプスのフレームデータが到着していれば
  IDepthFrame *depthFrame;
  if (depthReader->AcquireLatestFrame(&depthFrame) == S_OK)
  {
    // デプスデータのサイズと格納場所を得る
    UINT depthSize;
    UINT16 *depthBuffer;
    depthFrame->AccessUnderlyingBuffer(&depthSize, &depthBuffer);

    // カラーのテクスチャ座標を求めてバッファオブジェクトに転送する
    glBindBuffer(GL_ARRAY_BUFFER, coordBuffer);
    ColorSpacePoint *const texcoord(static_cast<ColorSpacePoint *>(glMapBuffer(GL_ARRAY_BUFFER, GL_WRITE_ONLY)));
    coordinateMapper->MapDepthFrameToColorSpace(depthCount, depthBuffer, depthCount, texcoord);
    glUnmapBuffer(GL_ARRAY_BUFFER);

    // デプスデータをテクスチャに転送する
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, depthWidth, depthHeight, GL_RED, GL_UNSIGNED_SHORT, depthBuffer);

    // デプスフレームを開放する
    depthFrame->Release();
  }

  return depthTexture;
}
Ejemplo n.º 2
0
void KinectCapture::GetDepthFrame(IMultiSourceFrame* pMultiFrame)
{
	IDepthFrameReference* pDepthFrameReference = NULL;
	IDepthFrame* pDepthFrame = NULL;
	pMultiFrame->get_DepthFrameReference(&pDepthFrameReference);
	HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);

	if (SUCCEEDED(hr))
	{
		if (pDepth == NULL)
		{
			IFrameDescription* pFrameDescription = NULL;
			hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
			pFrameDescription->get_Width(&nDepthFrameWidth);
			pFrameDescription->get_Height(&nDepthFrameHeight);
			pDepth = new UINT16[nDepthFrameHeight * nDepthFrameWidth];
			SafeRelease(pFrameDescription);
		}

		UINT nBufferSize = nDepthFrameHeight * nDepthFrameWidth;
		hr = pDepthFrame->CopyFrameDataToArray(nBufferSize, pDepth);
	}

	SafeRelease(pDepthFrame);
	SafeRelease(pDepthFrameReference);
}
Ejemplo n.º 3
0
void idle()
{
	// Read color data
	IColorFrame* pCFrame = nullptr;
	if (pColorFrameReader->AcquireLatestFrame(&pCFrame) == S_OK)
	{
		pCFrame->CopyConvertedFrameDataToArray(uColorBufferSize, pColorBuffer, ColorImageFormat_Rgba);

		pCFrame->Release();
		pCFrame = nullptr;
	}

	// Read depth data
	IDepthFrame* pDFrame = nullptr;
	if (pDepthFrameReader->AcquireLatestFrame(&pDFrame) == S_OK)
	{
		pDFrame->CopyFrameDataToArray(uDepthPointNum, pDepthBuffer);

		pDFrame->Release();
		pDFrame = nullptr;

		// map to camera space
		pCoordinateMapper->MapColorFrameToCameraSpace(uDepthPointNum, pDepthBuffer, uColorPointNum, pCSPoints);
	}
}
Ejemplo n.º 4
0
// カメラ座標を取得する
GLuint KinectV2::getPoint() const
{
  // カメラ座標のテクスチャを指定する
  glBindTexture(GL_TEXTURE_2D, pointTexture);

  // 次のデプスのフレームデータが到着していれば
  IDepthFrame *depthFrame;
  if (depthReader->AcquireLatestFrame(&depthFrame) == S_OK)
  {
    // デプスデータのサイズと格納場所を得る
    UINT depthSize;
    UINT16 *depthBuffer;
    depthFrame->AccessUnderlyingBuffer(&depthSize, &depthBuffer);

    // カラーのテクスチャ座標を求めてバッファオブジェクトに転送する
    glBindBuffer(GL_ARRAY_BUFFER, coordBuffer);
    ColorSpacePoint *const texcoord(static_cast<ColorSpacePoint *>(glMapBuffer(GL_ARRAY_BUFFER, GL_WRITE_ONLY)));
    coordinateMapper->MapDepthFrameToColorSpace(depthCount, depthBuffer, depthCount, texcoord);
    glUnmapBuffer(GL_ARRAY_BUFFER);

    // カメラ座標への変換テーブルを得る
    UINT32 entry;
    PointF *table;
    coordinateMapper->GetDepthFrameToCameraSpaceTable(&entry, &table);

    // すべての点について
    for (unsigned int i = 0; i < entry; ++i)
    {
      // デプス値の単位をメートルに換算する係数
      static const GLfloat zScale(-0.001f);

      // その点のデプス値を得る
      const unsigned short d(depthBuffer[i]);

      // デプス値の単位をメートルに換算する (計測不能点は maxDepth にする)
      const GLfloat z(d == 0 ? -maxDepth : GLfloat(d) * zScale);

      // その点のスクリーン上の位置を求める
      const GLfloat x(table[i].X);
      const GLfloat y(-table[i].Y);

      // その点のカメラ座標を求める
      position[i][0] = x * z;
      position[i][1] = y * z;
      position[i][2] = z;
    }

    // カメラ座標を転送する
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, depthWidth, depthHeight, GL_RGB, GL_FLOAT, position);

    // テーブルを開放する
    CoTaskMemFree(table);

    // デプスフレームを開放する
    depthFrame->Release();
  }

  return pointTexture;
}
Ejemplo n.º 5
0
cv::Mat capKinect::update(cv::Mat& depth_show)
{
	if (!m_pDepthReader) return cv::Mat();
	IDepthFrame* pDepthFrame = NULL;

	HRESULT hr = m_pDepthReader->AcquireLatestFrame(&pDepthFrame);
	cv::Mat re;

	if (SUCCEEDED(hr))
	{
		IFrameDescription* pFrameDescription = NULL;
		int nWidth = 0;
		int nHeight = 0;
		USHORT nDepthMinReliableDistance = 0;
		USHORT nDepthMaxDistance = 0;
		UINT nBufferSize = 0;
		UINT16 *pBuffer = NULL;
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Width(&nWidth);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Height(&nHeight);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
		}
		if (SUCCEEDED(hr))
		{
			// In order to see the full range of depth (including the less reliable far field depth)  
			// we are setting nDepthMaxDistance to the extreme potential depth threshold  
			nDepthMaxDistance = USHRT_MAX; //here we set maxDepth as 1000 mm (1 m) to simply cut the back background

			// Note:  If you wish to filter by reliable depth distance, uncomment the following line.  
			//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);  
		}

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
		}

		if (SUCCEEDED(hr))
		{
			re=capture(pBuffer, nWidth, nHeight, depth_show, nDepthMinReliableDistance, nDepthMaxDistance);
		}

		if(pFrameDescription)SafeRelease(pFrameDescription);
	}

	if(pDepthFrame)SafeRelease(pDepthFrame);
	return re;
}
bool KinectInterface::getFrameData(IMultiSourceFrame* frame, cv::Mat& intensity_mat, cv::Mat& depth_mat, cv::Mat& pos_mat) {
	//Obtain depth frame
	IDepthFrame* depthframe = nullptr;
	if (FAILED(depthFrameReader->AcquireLatestFrame(&depthframe))) return false;
	if (!depthframe) return false;
	// Get data from frame
	unsigned int sz;
	unsigned short* buf;
	if (FAILED(depthframe->AccessUnderlyingBuffer(&sz, &buf))) return false;
	//get depth -> xyz mapping
	if (FAILED(mapper->MapDepthFrameToCameraSpace(width*height, buf, width*height, depth2xyz))) return false;
	//get depth -> rgb image mapping 
	if (FAILED(mapper->MapDepthFrameToColorSpace(width*height, buf, width*height, depth2rgb))) return false;
	//save depth
	if (FAILED(depthframe->CopyFrameDataToArray(height * width, depth_data)));

	if (depthframe) depthframe->Release();


	//Obtain RGB frame
	IColorFrame* colorframe;
	if (FAILED(colorFrameReader->AcquireLatestFrame(&colorframe))) return false;
	if (!colorframe) return false;

	// Get data from frame
	if (FAILED(colorframe->CopyConvertedFrameDataToArray(colorwidth*colorheight * 4, rgbimage, ColorImageFormat_Rgba))) return false;


	cv::Mat tmp_depth = cv::Mat::zeros(colorheight, colorwidth, CV_16UC1);
	cv::Mat tmp_pos = cv::Mat::zeros(colorheight, colorwidth, CV_32FC3);
	cv::Mat depth_org(height, width, CV_16UC1, depth_data);
	cv::Mat tmp_rgb(colorheight, colorwidth, CV_8UC4, rgbimage);

	// Write color array for vertices
	for (int i = 0; i < width*height; i++) {
		ColorSpacePoint p = depth2rgb[i];
		int iY = (int)(p.Y + 0.5);
		int iX = (int)(p.X + 0.5);
		if (iX >= 0 && iY >= 0 && iX < colorwidth && iY < colorheight) {
			// Check if color pixel coordinates are in bounds
			tmp_depth.at<unsigned short>(iY, iX) = depth_data[i];
			//tmp_pos.at<float>(iY, iX, 0) = depth2xyz[i].X;
			//tmp_pos.at<float>(iY, iX, 1) = depth2xyz[i].Y;
			//tmp_pos.at<float>(iY, iX, 2) = depth2xyz[i].Z;
		}
	}

	if (colorframe) colorframe->Release();

	cv::resize(tmp_rgb(cv::Rect(240, 0, 1440, 1080)), intensity_mat, cv::Size(640, 480));
	cv::resize(tmp_depth(cv::Rect(240, 0, 1440, 1080)), depth_mat, cv::Size(640, 480));
	cv::resize(tmp_pos(cv::Rect(240, 0, 1440, 1080)), pos_mat, cv::Size(640, 480));
	cv::cvtColor(intensity_mat, intensity_mat, CV_RGBA2GRAY);
	return true;
}
Ejemplo n.º 7
0
bool ms_kinect2::acquire_depth_frame(const _OPENNUI byte* dst)
{
    bool result = false;
    IDepthFrame* pDepthFrame = NULL;
    static unsigned int bufferSize = 512 * 424;
    HRESULT hResult = S_OK;

    hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);
    if (SUCCEEDED(hResult))
    {
        hResult = pDepthFrame->CopyFrameDataToArray(bufferSize, (UINT16*)dst);
        if (SUCCEEDED(hResult))
            result = true;
    }
    SafeRelease(pDepthFrame);

    return result;
}
Ejemplo n.º 8
0
	void Microsoft2Grabber::DepthFrameArrived(IDepthFrameReference* pDepthFrameReference) {
		IDepthFrame* pDepthFrame = NULL;
		HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
		//HRESULT hr = pDepthFrameReference->AcquireLatestFrame(&pDepthFrame);
		if(FAILED(hr))
			return;
		//cout << "got a depth frame" << endl;
		INT64 nDepthTime = 0;
		IFrameDescription* pDepthFrameDescription = NULL;
		int nDepthWidth = 0;
		int nDepthHeight = 0;
		UINT nDepthBufferSize = 0;

		// get depth frame data
		hr = pDepthFrame->get_RelativeTime(&nDepthTime);
		if (SUCCEEDED(hr)) {
			hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
		}
		if (SUCCEEDED(hr)) {
			hr = pDepthFrameDescription->get_Width(&nDepthWidth);
		}
		if (SUCCEEDED(hr)) {
			hr = pDepthFrameDescription->get_Height(&nDepthHeight);
		}
		if (SUCCEEDED(hr)) {
			hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &m_pDepthBuffer);
			//WaitForSingleObject(hDepthMutex,INFINITE);
			Mat tmp = Mat(m_depthSize, DEPTH_PIXEL_TYPE, m_pDepthBuffer, Mat::AUTO_STEP);
			MatDepth depth_img = *((MatDepth*)&(tmp.clone()));
			m_depthTime = nDepthTime;
			if (depth_image_signal_->num_slots () > 0) {
				depth_image_signal_->operator()(depth_img);
			}
			if (num_slots<sig_cb_microsoft_point_cloud_rgba>() > 0 || all_data_signal_->num_slots() > 0 || image_depth_image_signal_->num_slots() > 0) {
				//rgb_sync_.add1 (depth_img, m_depthTime);
				imageDepthOnlyImageCallback(depth_img);
			}
			
			//ReleaseMutex(hDepthMutex);
		}
		SafeRelease(pDepthFrameDescription);
		SafeRelease(pDepthFrame);
	}
void pcl::Kinect2Grabber::threadFunction()
{
	while (!quit){
		boost::unique_lock<boost::mutex> lock(mutex);

		// Acquire Latest Color Frame
		IColorFrame* colorFrame = nullptr;
		result = colorReader->AcquireLatestFrame(&colorFrame);
		if (SUCCEEDED(result)){
			// Retrieved Color Data
			result = colorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra);
			if (FAILED(result)){
				throw std::exception("Exception : IColorFrame::CopyConvertedFrameDataToArray()");
			}
		}
		SafeRelease(colorFrame);

		// Acquire Latest Depth Frame
		IDepthFrame* depthFrame = nullptr;
		result = depthReader->AcquireLatestFrame(&depthFrame);
		if (SUCCEEDED(result)){
			// Retrieved Depth Data
			result = depthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]);
			if (FAILED(result)){
				throw std::exception("Exception : IDepthFrame::CopyFrameDataToArray()");
			}
		}
		SafeRelease(depthFrame);

		lock.unlock();

		if (signal_PointXYZ->num_slots() > 0) {
			signal_PointXYZ->operator()(convertDepthToPointXYZ(&depthBuffer[0]));
		}

		if (signal_PointXYZRGB->num_slots() > 0) {
			signal_PointXYZRGB->operator()(convertRGBDepthToPointXYZRGB(&colorBuffer[0], &depthBuffer[0]));
		}
		if (signal_PointXYZI->num_slots() > 0) {
			signal_PointXYZI->operator()(convertRGBDepthToPointXYZI(&colorBuffer[0], &depthBuffer[0]));
		}
	}
}
Ejemplo n.º 10
0
void MKinect::getDepthData(IMultiSourceFrame* frame, float* dest) {
	IDepthFrame* depthframe;
	IDepthFrameReference* frameref = NULL;
	frame->get_DepthFrameReference(&frameref);
	frameref->AcquireFrame(&depthframe);
	if (frameref) frameref->Release();
	if (!depthframe) return;
	// Get data from frame
	unsigned int sz;
	UINT16 * buf;
	while (!SUCCEEDED(depthframe->AccessUnderlyingBuffer(&sz, &buf))) {

	}
	HRESULT res = S_OK;
	res = mapper->MapDepthFrameToCameraSpace(
		KinectColorWidth*KinectColorHeight, buf,        // Depth frame data and size of depth frame
		KinectColorWidth*KinectColorHeight, depth2xyz); // Output CameraSpacePoint array and size
	// Process depth frame data...
	
	if (depthframe) depthframe->Release();
}
Ejemplo n.º 11
0
void KinectHDFaceGrabber::update()
{
	if (!m_pColorFrameReader || !m_pBodyFrameReader){
		return;
	}

    IColorFrame* pColorFrame = nullptr;
    HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

	IDepthFrame* depthFrame = nullptr;
	if (SUCCEEDED(hr)){
		hr = m_pDepthFrameReader->AcquireLatestFrame(&depthFrame);
	}
	
    if (SUCCEEDED(hr)){
        ColorImageFormat imageFormat = ColorImageFormat_None;
		
        if (SUCCEEDED(hr)){
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }
	
        if (SUCCEEDED(hr)){
			UINT nBufferSize = m_colorWidth * m_colorHeight * sizeof(RGBQUAD);
			hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(m_colorBuffer.data()), ColorImageFormat_Bgra);
        }
		
		
		if (SUCCEEDED(hr)){
			hr = depthFrame->CopyFrameDataToArray(m_depthBuffer.size(), &m_depthBuffer[0]);
		}
		if (SUCCEEDED(hr)){
			renderColorFrameAndProcessFaces();
		}
				
	}
	
	SafeRelease(depthFrame);
	SafeRelease(pColorFrame);  
}
Ejemplo n.º 12
0
HRESULT KinectHandler::GetDepthImageData(RGBQUAD* &dest)
{
	if (!m_pMultiFrameReader)
	{
		cout << "No frame reader!" << endl;
		return E_FAIL;
	}

	IDepthFrame* pDepthFrame = NULL;
	IMultiSourceFrame* pMultiSourceFrame = NULL;
	HRESULT hr = m_pMultiFrameReader->AcquireLatestFrame(&pMultiSourceFrame);

	if (SUCCEEDED(hr))
	{
		IDepthFrameReference* pDepthFrameReference = NULL;
		hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
		}

		SafeRelease(pDepthFrameReference);
	}

	if (SUCCEEDED(hr))
	{
		INT64 nTime = 0;
		IFrameDescription* pDepthFrameDescription = NULL;
		int nDepthWidth = 0;
		int nDepthHeight = 0;
		USHORT nDepthMinReliableDistance = 0;
		USHORT nDepthMaxDistance = 0;
		UINT nDepthBufferSize = 0;
		UINT16 *pDepthBuffer = NULL;

		hr = pDepthFrame->get_RelativeTime(&nTime);

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameDescription->get_Width(&nDepthWidth);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameDescription->get_Height(&nDepthHeight);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
		}
		if (SUCCEEDED(hr))
		{
			// In order to see the full range of depth (including the less reliable far field depth)
			// we are setting nDepthMaxDistance to the extreme potential depth threshold
			nDepthMaxDistance = USHRT_MAX;

			// Note:  If you wish to filter by reliable depth distance, uncomment the following line.
			//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
		}

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
		}
		if (SUCCEEDED(hr))
		{
			//RGBQUAD* pRGBX = new RGBQUAD[cDepthWidth * cDepthHeight];


			// end pixel is start + width*height - 1
			cout << "w:" << nDepthWidth << " h:" << nDepthHeight << endl;
			cout << "buffersize:" << nDepthBufferSize << endl;

			const UINT16* pBufferEnd = pDepthBuffer + (nDepthWidth * nDepthHeight);
			RGBQUAD* auxiliar = m_pDepthRGBX;
			//const UINT16* pBufferEnd = pDepthBuffer + (640 * 480);

			cout << "bufferLocation:" << pDepthBuffer << endl;
			cout << "bufferend:" << pBufferEnd << endl;
			int counter = 0;

			while (pDepthBuffer < pBufferEnd)
			{
				//cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl;
				USHORT depth = *pDepthBuffer;
				//cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl;

				// To convert to a byte, we're discarding the most-significant
				// rather than least-significant bits.
				// We're preserving detail, although the intensity will "wrap."
				// Values outside the reliable depth range are mapped to 0 (black).

				// Note: Using conditionals in this loop could degrade performance.
				// Consider using a lookup table instead when writing production code.
				BYTE intensity = static_cast<BYTE>((depth >= nDepthMinReliableDistance) && (depth <= nDepthMaxDistance) ? (depth % 256) : 0);
				auxiliar->rgbBlue = intensity;
				auxiliar->rgbGreen = intensity;
				auxiliar->rgbRed = intensity;
				auxiliar->rgbReserved = (BYTE)255;

				counter++;

				++auxiliar;
				++pDepthBuffer;
			}

			dest = m_pDepthRGBX;
			cout << "total struct:" << counter << endl;
		}

		SafeRelease(pDepthFrameDescription);
	}
	else
	{
		cout << "Acquire last frame FAILED " << endl;
		hr = E_FAIL;
		SafeRelease(pDepthFrame);
		return hr;
	}

	SafeRelease(pDepthFrame);
	SafeRelease(pMultiSourceFrame);
	return hr;
}
int main(int argc, char** argv)
{
	// 1a. Get default Sensor
	cout << "Try to get default sensor" << endl;
	IKinectSensor* pSensor = nullptr;
	if (GetDefaultKinectSensor(&pSensor) != S_OK)
	{
		cerr << "Get Sensor failed" << endl;
	}
	else
	{
		// 1b. Open sensor
		cout << "Try to open sensor" << endl;
		if (pSensor->Open() != S_OK)
		{
			cerr << "Can't open sensor" << endl;
		}
		else
		{
			// 2a. Get frame source
			cout << "Try to get source" << endl;
			IDepthFrameSource* pFrameSource = nullptr;
			if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK)
			{
				cerr << "Can't get frame source" << endl;
			}
			else
			{
				// 2b. Get frame description
				int		iWidth = 0;
				int		iHeight = 0;
				IFrameDescription* pFrameDescription = nullptr;
				if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)
				{
					pFrameDescription->get_Width(&iWidth);
					pFrameDescription->get_Height(&iHeight);
					pFrameDescription->Release();
					pFrameDescription = nullptr;
				}

				// 2c. get some dpeth only meta
				UINT16 uDepthMin = 0, uDepthMax = 0;
				pFrameSource->get_DepthMinReliableDistance(&uDepthMin);
				pFrameSource->get_DepthMaxReliableDistance(&uDepthMax);
				cout << "Reliable Distance: " << uDepthMin << " - " << uDepthMax << endl;

				// perpare OpenCV
				cv::Mat mDepthImg(iHeight, iWidth, CV_16UC1);
				cv::Mat mImg8bit(iHeight, iWidth, CV_8UC1);
				cv::namedWindow( "Depth Map" );

				// 3a. get frame reader
				cout << "Try to get frame reader" << endl;
				IDepthFrameReader* pFrameReader = nullptr;
				if (pFrameSource->OpenReader(&pFrameReader) != S_OK)
				{
					cerr << "Can't get frame reader" << endl;
				}
				else
				{
					// Enter main loop
					cout << "Enter main loop" << endl;
					while (true)
					{
						// 4a. Get last frame
						IDepthFrame* pFrame = nullptr;
						if (pFrameReader->AcquireLatestFrame(&pFrame) == S_OK)
						{
							// 4c. copy the depth map to image
							if (pFrame->CopyFrameDataToArray(iWidth * iHeight, reinterpret_cast<UINT16*>(mDepthImg.data)) == S_OK)
							{
								// 4d. convert from 16bit to 8bit
								mDepthImg.convertTo(mImg8bit, CV_8U, 255.0f / uDepthMax);
								cv::imshow("Depth Map", mImg8bit);
							}
							else
							{
								cerr << "Data copy error" << endl;
							}

							// 4e. release frame
							pFrame->Release();
						}

						// 4f. check keyboard input
						if (cv::waitKey(30) == VK_ESCAPE){
							break;
						}
					}

					// 3b. release frame reader
					cout << "Release frame reader" << endl;
					pFrameReader->Release();
					pFrameReader = nullptr;
				}

				// 2d. release Frame source
				cout << "Release frame source" << endl;
				pFrameSource->Release();
				pFrameSource = nullptr;
			}

			// 1c. Close Sensor
			cout << "close sensor" << endl;
			pSensor->Close();
		}

		// 1d. Release Sensor
		cout << "Release sensor" << endl;
		pSensor->Release();
		pSensor = nullptr;
	}
	return 0;
}
Ejemplo n.º 14
0
	void capture(Image::Ptr& pImage)
	{
		HRESULT hr;
		
		if (m_pMultiSourceFrameReader==nullptr)
		{
			camera->getContext().error("CameraKinectDevice::capture: m_pMultiSourceFrameReader is nullptr\n");
			// this is bad news - perhaps throw?
			return; // @@@
		}

		IMultiSourceFrame* pMultiSourceFrame = nullptr;
		IDepthFrame* pDepthFrame = nullptr;
		IColorFrame* pColorFrame = nullptr;

		const golem::MSecTmU32 waitStep = 1;
		golem::MSecTmU32 timeWaited = 0;
		golem::Sleep timer;
		while (FAILED(hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame)))
		{
			// this is in CameraOpenNI, but suspect may be causing problem here
			// if (camera->isTerminating())	return;

			timer.msleep(waitStep);
			timeWaited += waitStep;
			if (timeWaited >= timeout)
			{
				camera->getContext().error("CameraKinectDevice::capture: failed to acquire frame within %d ms\n", timeout);
				// keep going - don't return with nothing; reset stopwatch @@@
				timeWaited = 0;
			}
		}
		
		const golem::SecTmReal systemTime1 = camera->getContext().getTimer().elapsed();

		if (SUCCEEDED(hr))
		{
			IDepthFrameReference* pDepthFrameReference = nullptr;

			hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
			if (SUCCEEDED(hr))
			{
				hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
			}
			RELEASE_PTR(pDepthFrameReference);
		}

		if (SUCCEEDED(hr))
		{
			IColorFrameReference* pColorFrameReference = nullptr;

			hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
			if (SUCCEEDED(hr))
			{
				hr = pColorFrameReference->AcquireFrame(&pColorFrame);
			}
			RELEASE_PTR(pColorFrameReference);
		}

		if (SUCCEEDED(hr))
		{
			INT64 nDepthTime = 0;
			IFrameDescription* pDepthFrameDescription = nullptr;
			int nDepthWidth = 0;
			int nDepthHeight = 0;
			UINT nDepthBufferSize = 0;
			UINT16 *pDepthBuffer = nullptr;

			IFrameDescription* pColorFrameDescription = nullptr;
			int nColorWidth = 0;
			int nColorHeight = 0;
			ColorImageFormat imageFormat = ColorImageFormat_None;
			UINT nColorBufferSize = 0;
			RGBQUAD *pColorBuffer = nullptr;

			// get depth frame data

			hr = pDepthFrame->get_RelativeTime(&nDepthTime);

			if (SUCCEEDED(hr))
				hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);

			if (SUCCEEDED(hr))
				hr = pDepthFrameDescription->get_Width(&nDepthWidth);

			if (SUCCEEDED(hr))
				hr = pDepthFrameDescription->get_Height(&nDepthHeight);

			if (SUCCEEDED(hr))
				hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);

			// get color frame data

			if (SUCCEEDED(hr))
				hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);

			if (SUCCEEDED(hr))
				hr = pColorFrameDescription->get_Width(&nColorWidth);

			if (SUCCEEDED(hr))
				hr = pColorFrameDescription->get_Height(&nColorHeight);

			if (SUCCEEDED(hr))
				hr = pColorFrame->get_RawColorImageFormat(&imageFormat);

			if (SUCCEEDED(hr))
			{
				if (imageFormat == ColorImageFormat_Bgra)
				{
					hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
				}
				else if (m_pColorRGBX)
				{
					pColorBuffer = m_pColorRGBX;
					nColorBufferSize = nColorWidth * nColorHeight * sizeof(RGBQUAD);
					hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
				}
				else
				{
					hr = E_FAIL;
				}
			}
			
			cv::Mat colorFrame;
			colorFrame.create(cColorHeight, cColorWidth, CV_8UC4);
			colorFrame.setTo(cv::Scalar(0, 0, 0, 0));
			//copying color buffer into cv::mat		
			memcpy(colorFrame.data, pColorBuffer, (nColorWidth * nColorHeight) * 4);
			//originally kinect has mirrored image
			cv::flip(colorFrame, colorFrame, 1);
			//converstion to pImage which is BGR 
			cv::Mat colorFrameT;
			cv::cvtColor(colorFrame, colorFrameT, CV_BGRA2BGR);
			
			if (mode & CameraKinect::MODE_COLOUR)
			{
				// initialize buffer
				if (pImage == nullptr)
					pImage.reset(new Image());
				pImage->reserve((int)1920, (int)1080);
				//printf("Before clonning frame\n");
				IplImage * img = cvCloneImage(&(IplImage)colorFrameT);
				cvCopy(img, pImage->image);
				cvReleaseImage(&img);
				//printf("After clonning frame\n");
				const golem::SecTmReal systemTime2 = camera->getContext().getTimer().elapsed();
				//seting timestamp
				pImage->timeStamp = REAL_HALF*(systemTime1 + systemTime2);
			}
			//camera->getContext().debug("After colour\n");
			//needed for retain information of the begining of the buffer
			const UINT16* pBufferStart = pDepthBuffer;
			const UINT16* pBufferEnd = pDepthBuffer + (nDepthWidth * nDepthHeight);
			//copying depth buffer into cv::Mat
			long depthSizeP = (pBufferEnd - pBufferStart) * 2;

			cv::Mat depthFrame;
			depthFrame.create(cDepthHeight, cDepthWidth, CV_16UC1);
			depthFrame.setTo(cv::Scalar(0));
			memcpy(depthFrame.data, pBufferStart, depthSizeP);
			//originally kinect has mirrored image
			cv::flip(depthFrame, depthFrame, 1);
			//camera->getContext().debug("After getting depth data\n");
			//depth mode color data mapped into camera space (remember high resolution 1920x1080)
			if (mode & CameraKinect::MODE_DEPTH && (std::stoi(this->property.format.c_str()) == 101))
			{
				//camera->getContext().debug("In depth mode\n");
				if (pImage == nullptr)
					pImage.reset(new Image());
				pImage->reserve((int)1920, (int)1080);

				pImage->cloud.reset(new PointSeq((uint32_t)property.width, (uint32_t)property.height));
			
				//Obtaining the pointcloud
				hasImageStream = false;
				float missingPoint = std::numeric_limits<float>::quiet_NaN();

				hr = m_pCoordinateMapper->MapColorFrameToCameraSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer, cColorHeight * cColorWidth, m_pCameraCoordinatesColor);

				golem::Point* pOutWidth = &pImage->cloud->front();
				CameraSpacePoint* pCamCWidth = m_pCameraCoordinatesColor;

				for (int y = 0; y < cColorHeight; ++y)
				{
					golem::Point* pOut = pOutWidth;
					CameraSpacePoint* pCamC = pCamCWidth;

					for (int x = 0; x < cColorWidth; ++x, pCamC++, ++pOut) {
						cv::Vec4b color;
						int abc = pCamC->Z;
						if (abc != 0) {

							pOut->x = pCamC->X;
							pOut->y = pCamC->Y;
							pOut->z = pCamC->Z;
						}
						else {
							pOut->x = pOut->y = pOut->z = missingPoint;
							//printf("Getting to this point4\n");
						}
						pOut->normal_x = pOut->normal_y = pOut->normal_z = golem::numeric_const<float>::ZERO;

						color = colorFrame.at<cv::Vec4b>(y, x);
						if (hasImageStream) {
							pOut->b = color[0];
							pOut->g = color[1];
							pOut->r = color[2];
							pOut->a = colour._rgba.a;
						}
						else {
							pOut->r = colour._rgba.r;
							pOut->g = colour._rgba.g;
							pOut->b = colour._rgba.b;
							pOut->a = colour._rgba.a;
						}
					}
					colorFrame += cColorWidth;
					pCamCWidth += cColorWidth;
					pOutWidth += cColorWidth;
				}
			}

			//depth mode depth data mapped into camera space
			if (mode & CameraKinect::MODE_DEPTH && (std::stoi(this->property.format.c_str()) == 102))
			{
				if (pImage == nullptr)
					pImage.reset(new Image());
				//camera->getContext().debug("In depth mode\n");
				pImage->cloud.reset(new PointSeq((uint32_t)property.width, (uint32_t)property.height));
				//Obtaining the pointcloud
				hasImageStream = false;
				float missingPoint = std::numeric_limits<float>::quiet_NaN();

				hr = m_pCoordinateMapper->MapDepthFrameToCameraSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer, cDepthHeight * cDepthWidth, m_pCameraCoordinatesDepth);

				golem::Point* pOutWidth = &pImage->cloud->front();
				CameraSpacePoint* pCamDWidth = m_pCameraCoordinatesDepth;

				for (int y = 0; y < cDepthHeight; ++y)
				{
					golem::Point* pOut = pOutWidth;
					CameraSpacePoint* pCamC = pCamDWidth;

					for (int x = 0; x < cDepthWidth; ++x, pCamC++, ++pOut) {
						cv::Vec4b color;
						//int abc = pCamC->Z;
						if (pCamC->Z != 0) {
							pOut->x = pCamC->X;
							pOut->y = pCamC->Y;
							pOut->z = pCamC->Z;
						}
						else {
							pOut->x = missingPoint;
							pOut->y = missingPoint;
							pOut->z = missingPoint;
							//	//printf("Getting to this point4\n");
						}
						pOut->normal_x = pOut->normal_y = pOut->normal_z = golem::numeric_const<float>::ZERO;

						/*color = colorframe.at<cv::vec4b>(y, x);
						if (hasimagestream) {
						pout->b = color[0];
						pout->g = color[1];
						pout->r = color[2];
						pout->a = colour._rgba.a;
						}*/
						//else {
						pOut->r = colour._rgba.r;
						pOut->g = colour._rgba.g;
						pOut->b = colour._rgba.b;
						pOut->a = colour._rgba.a;
						//}
					}
					//colorFrame += cDepthWidth;
					pCamDWidth += cDepthWidth;
					pOutWidth += cDepthWidth;
				}
				golem::Mat33 aMatrix;
				aMatrix.m11 = golem::Real(-1);
				aMatrix.m12 = golem::Real(0);
				aMatrix.m13 = golem::Real(0);
				aMatrix.m21 = golem::Real(0);
				aMatrix.m22 = golem::Real(-1);
				aMatrix.m23 = golem::Real(0);
				aMatrix.m31 = golem::Real(0);
				aMatrix.m32 = golem::Real(0);
				aMatrix.m33 = golem::Real(1);
				golem::Vec3 aVector;
				aVector.v1 = 0;
				aVector.v2 = 0;
				aVector.v3 = 0;
				golem::Mat34 aMatrix34;
				aMatrix34.R = aMatrix;
				aMatrix34.p = aVector;

				Cloud::PointSeqPtr pCloud = pImage->cloud;
				const Mat34 frame = Cloud::getSensorFrame(*pCloud);
				Cloud::transform(aMatrix34, *pCloud, *pCloud);
				Cloud::setSensorFrame(frame, *pCloud); // don't change the camera frame !!!!!!!!!!!!!!!!!!!!!!!!!!!!

				const golem::SecTmReal systemTime2 = camera->getContext().getTimer().elapsed();
				//seting timestamp
				pImage->timeStamp = REAL_HALF*(systemTime1 + systemTime2);
			}
			RELEASE_PTR(pDepthFrameDescription);
			RELEASE_PTR(pColorFrameDescription);
		}
		RELEASE_PTR(pDepthFrame);
		RELEASE_PTR(pColorFrame);
		RELEASE_PTR(pMultiSourceFrame);
	}
int main()
{

	// name and position windows
	cvNamedWindow("Color Probabilistic Tracking - Samples", 1);
	cvMoveWindow("Color Probabilistic Tracking - Samples", 0, 0);
	cvNamedWindow("Color Probabilistic Tracking - Result", 1);
	cvMoveWindow("Color Probabilistic Tracking - Result", 1000, 0);

	//control mouse
	setMouseCallback("Color Probabilistic Tracking - Samples", onMouse, 0);

	cv::setUseOptimized(true);

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor(&pSensor);
	if (FAILED(hResult)) {
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource(&pColorSource);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	IDepthFrameSource* pDepthSource;
	hResult = pSensor->get_DepthFrameSource(&pDepthSource);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
		return -1;
	}

	/*IBodyIndexFrameSource* pBodyIndexSource;
	hResult = pSensor->get_BodyIndexFrameSource(&pBodyIndexSource);*/

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader(&pColorReader);
	if (FAILED(hResult)) {
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	IDepthFrameReader* pDepthReader;
	hResult = pDepthSource->OpenReader(&pDepthReader);
	if (FAILED(hResult)) {
		std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	//IBodyIndexFrameReader* pBodyIndexReader;//saferealease
	//hResult = pBodyIndexSource->OpenReader(&pBodyIndexReader);

	// Description
	IFrameDescription* pColorDescription;
	hResult = pColorSource->get_FrameDescription(&pColorDescription);
	if (FAILED(hResult)) {
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int colorWidth = 0;
	int colorHeight = 0;
	pColorDescription->get_Width(&colorWidth); // 1920
	pColorDescription->get_Height(&colorHeight); // 1080
	unsigned int colorBufferSize = colorWidth * colorHeight * 4 * sizeof(unsigned char);

	cv::Mat colorBufferMat(colorHeight, colorWidth, CV_8UC4);
	cv::Mat colorMat(colorHeight / 2, colorWidth / 2, CV_8UC4);
	cv::namedWindow("Color");

	RGBQUAD* m_pDepthRGBX;
	m_pDepthRGBX = new RGBQUAD[512 * 424];// create heap storage for color pixel data in RGBX format

	IFrameDescription* pDepthDescription;
	hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
	if (FAILED(hResult)) {
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int depthWidth = 0;
	int depthHeight = 0;
	pDepthDescription->get_Width(&depthWidth); // 512
	pDepthDescription->get_Height(&depthHeight); // 424
	unsigned int depthBufferSize = depthWidth * depthHeight * sizeof(unsigned short);

	cv::Mat depthBufferMat(depthHeight, depthWidth, CV_16UC1);
	UINT16* pDepthBuffer = nullptr;
	cv::Mat depthMat(depthHeight, depthWidth, CV_8UC1);
	cv::namedWindow("Depth");

	//UINT32 nBodyIndexSize = 0;
	//BYTE* pIndexBuffer = nullptr;//This hasn't been safe realease yet

	// Coordinate Mapper
	ICoordinateMapper* pCoordinateMapper;
	hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
		return -1;
	}

	cv::Mat coordinateMapperMat(depthHeight, depthWidth, CV_8UC4);
	cv::namedWindow("CoordinateMapper");

	unsigned short minDepth, maxDepth;
	pDepthSource->get_DepthMinReliableDistance(&minDepth);
	pDepthSource->get_DepthMaxReliableDistance(&maxDepth);




	while (1) {

		double t = (double)getTickCount();


		// Color Frame
		IColorFrame* pColorFrame = nullptr;
		hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
		if (SUCCEEDED(hResult)) {
			hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBufferSize, reinterpret_cast<BYTE*>(colorBufferMat.data), ColorImageFormat::ColorImageFormat_Bgra);
			if (SUCCEEDED(hResult)) {
				cv::resize(colorBufferMat, colorMat, cv::Size(), 0.5, 0.5);
			}
		}
		//SafeRelease( pColorFrame );

		// Depth Frame
		IDepthFrame* pDepthFrame = nullptr;
		hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);

		if (SUCCEEDED(hResult)) {
			hResult = pDepthFrame->AccessUnderlyingBuffer(&depthBufferSize, reinterpret_cast<UINT16**>(&depthBufferMat.data));

		}

		if (SUCCEEDED(hResult)) {
			hResult = pDepthFrame->AccessUnderlyingBuffer(&depthBufferSize, &pDepthBuffer);

			if (SUCCEEDED(hResult))
			{
				RGBQUAD* pRGBX = m_pDepthRGBX;

				// end pixel is start + width*height - 1
				const UINT16* pBufferEnd = pDepthBuffer + (512 * 424);
				int index = 0;

				while (pDepthBuffer < pBufferEnd)
				{
					USHORT depth = *pDepthBuffer;

					BYTE intensity = static_cast<BYTE>((depth >= 50) && (depth <= 5000) ? (depth % 256) : 0);

					pRGBX->rgbRed = intensity;
					pRGBX->rgbGreen = intensity;
					pRGBX->rgbBlue = intensity;

					depthData[index] = depth;

					++index;
					++pRGBX;
					++pDepthBuffer;
				}
			}
		}

		Mat DepthImage(424, 512, CV_8UC4, m_pDepthRGBX);
		//SafeRelease( pDepthFrame );

		// Mapping (Depth to Color)
		if (SUCCEEDED(hResult)) {
			std::vector<ColorSpacePoint> colorSpacePoints(depthWidth * depthHeight);
			hResult = pCoordinateMapper->MapDepthFrameToColorSpace(depthWidth * depthHeight, reinterpret_cast<UINT16*>(depthBufferMat.data), depthWidth * depthHeight, &colorSpacePoints[0]);
			if (SUCCEEDED(hResult)) {
				coordinateMapperMat = cv::Scalar(0, 0, 0, 0);
				for (int y = 0; y < depthHeight; y++) {
					for (int x = 0; x < depthWidth; x++) {
						unsigned int index = y * depthWidth + x;
						ColorSpacePoint point = colorSpacePoints[index];
						int colorX = static_cast<int>(std::floor(point.X + 0.5));
						int colorY = static_cast<int>(std::floor(point.Y + 0.5));
						unsigned short depth = depthBufferMat.at<unsigned short>(y, x);
						if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight)/* && ( depth >= minDepth ) && ( depth <= maxDepth )*/) {
							coordinateMapperMat.at<cv::Vec4b>(y, x) = colorBufferMat.at<cv::Vec4b>(colorY, colorX);
						}
					}
				}
			}
		}

		if (SUCCEEDED(hResult))
		{

			//Particle Filter Start from here
			frame = &(IplImage)coordinateMapperMat;//transorm mat into IplImage

			if (image == 0)
			{
				// initialize variables and allocate buffers 
				image = cvCreateImage(cvGetSize(frame), 8, 4);//every pixel has 4 channels
				image->origin = frame->origin;

				result = cvCreateImage(cvGetSize(frame), 8, 4);
				result->origin = frame->origin;

				hsv = cvCreateImage(cvGetSize(frame), 8, 3);

				hue = cvCreateImage(cvGetSize(frame), 8, 1);

				sat = cvCreateImage(cvGetSize(frame), 8, 1);

				histimg_ref = cvCreateImage(cvGetSize(frame), 8, 3);
				histimg_ref->origin = frame->origin;
				cvZero(histimg_ref);

				histimg = cvCreateImage(cvGetSize(frame), 8, 3);
				histimg->origin = frame->origin;
				cvZero(histimg);

				bin_w = histimg_ref->width / BIN;
				bin_h = histimg_ref->height / BIN;


				data1.sample_t = reinterpret_cast<Region *> (malloc(sizeof(Region)* SAMPLE));
				data1.sample_t_1 = reinterpret_cast<Region *> (malloc(sizeof(Region)* SAMPLE));
				data1.sample_weight = reinterpret_cast<double *> (malloc(sizeof(double)* SAMPLE));
				data1.accum_weight = reinterpret_cast<double *> (malloc(sizeof(double)* SAMPLE));

			}

			cvCopy(frame, image);
			cvCopy(frame, result);
			cvCvtColor(image, hsv, CV_BGR2HSV);//image ~ hsv


			if (tracking)
			{
				//v_max = 0.0;

				cvSplit(hsv, hue, 0, 0, 0);//hsv->hue
				cvSplit(hsv, 0, 0, sat, 0);//hsv-saturation

				if (selecting)
				{

					// get the selected target area
					//ref_v_max = 0.0;
					area.width = abs(P_org.x - P_end.x);
					area.height = abs(P_org.y - P_end.y);
					area.x = MIN(P_org.x, P_end.x);
					area.y = MIN(P_org.y, P_end.y);

					cvZero(histimg_ref);

					// build reference histogram
					cvSetImageROI(hue, area);
					cvSetImageROI(sat, area);

					// zero reference histogram
					for (i = 0; i < BIN; i++)
						for (j = 0; j < BIN; j++)
							hist_ref[i][j] = 0.0;

					// calculate reference histogram
					for (i = 0; i < area.height; i++)
					{
						for (j = 0; j < area.width; j++)
						{
							im_hue = cvGet2D(hue, i, j);
							im_sat = cvGet2D(sat, i, j);
							k = int(im_hue.val[0] / STEP_HUE);
							h = int(im_sat.val[0] / STEP_SAT);
							hist_ref[k][h] = hist_ref[k][h] + 1.0;
						}
					}


					// rescale the value of each bin in the reference histogram 
					// and show it as an image
					for (i = 0; i < BIN; i++)
					{
						for (j = 0; j < BIN; j++)
						{
							hist_ref[i][j] = hist_ref[i][j] / (area.height*area.width);
						}
					}

					cvResetImageROI(hue);
					cvResetImageROI(sat);

					// initialize tracking and samples
					track_win = area;
					Initdata(track_win);
					track_win_last = track_win;

					// set up flag of tracking
					selecting = 0;

				}

				// sample propagation and weighting
				track_win = ImProcess(hue, sat, hist_ref, track_win_last);
				FrameNumber++;

				track_win_last = track_win;
				cvZero(histimg);

				// draw the one RED bounding box
				cvRectangle(image, cvPoint(track_win.x, track_win.y), cvPoint(track_win.x + track_win.width, track_win.y + track_win.height), CV_RGB(255, 0, 0), 2);
				printf("width = %d, height = %d\n", track_win.width, track_win.height);

				//save certian images
				if (FrameNumber % 10 == 0)
				{
					if (FrameNumber / 10 == 1) cvSaveImage("./imageout1.jpg", image);

					if (FrameNumber / 10 == 2) cvSaveImage("./imageout2.jpg", image);

					if (FrameNumber / 10 == 3) cvSaveImage("./imageout3.jpg", image);

					if (FrameNumber / 10 == 4) cvSaveImage("./imageout4.jpg", image);

					if (FrameNumber / 10 == 5) cvSaveImage("./imageout5.jpg", image);

					if (FrameNumber / 10 == 6) cvSaveImage("./imageout6.jpg", image);

					if (FrameNumber / 10 == 7) cvSaveImage("./imageout7.jpg", image);

					if (FrameNumber / 10 == 8) cvSaveImage("./imageout8.jpg", image);
				}

				//save certian images
				if (FrameNumber % 10 == 0)
				{
					if (FrameNumber / 10 == 1) cvSaveImage("./resultout1.jpg", result);

					if (FrameNumber / 10 == 2) cvSaveImage("./resultout2.jpg", result);

					if (FrameNumber / 10 == 3) cvSaveImage("./resultout3.jpg", result);

					if (FrameNumber / 10 == 4) cvSaveImage("./resultout4.jpg", result);

					if (FrameNumber / 10 == 5) cvSaveImage("./resultout5.jpg", result);

					if (FrameNumber / 10 == 6) cvSaveImage("./resultout6.jpg", result);

					if (FrameNumber / 10 == 7) cvSaveImage("./resultout7.jpg", result);

					if (FrameNumber / 10 == 8) cvSaveImage("./resultout8.jpg", result);
				}

				//draw a same bounding box in DepthImage
				rectangle(DepthImage, track_win, CV_RGB(255, 0, 0), 2);

				//******************************************************Geodesic Distance***************************************************************************************
					//Point propagation and weight
					if (PointTrack == 1)
					{

						if (PointSelect == 1)//only visit once
						{

							// initialize tracking and samples
							for (int i = 0; i < SAMPLE; i++)
							{
								point[i].x_1 = P_track.x;
								point[i].y_1 = P_track.y;
								point[i].z_1 = depthData[P_track.x + P_track.y * 512];
								point[i].x_1_prime = 0.0;
								point[i].y_1_prime = 0.0;
							}
							
							refeFlag = 1;

							p_win = P_track;

							//p_transtart is the start point of the surface mesh
							P_transtart.x = track_win.x;
							P_transtart.y = track_win.y;

							PointSelect = 0;

						}

						//construct the graph(mesh)
						ConstructMesh(depthData, adjlist, P_transtart,track_win.width,track_win.height);

						//calculate shortest path
						vector<int> vertexDist;
						vertexDist.resize(track_win.width*track_win.height);

						ShortestPath(P_extre, adjlist,  vertexDist);

						cvCircle(image, P_extre, 3, CV_RGB(0, 255, 0),1);

						//generate the refernce distance for comparing
						if (refeFlag > 0)
						{
							cvCircle(image, p_win, 3, CV_RGB(0, 0, 255), 1);

							int track = abs(P_transtart.x - P_track.x) + track_win.width * abs(P_transtart.y - P_track.y);

							referDistance = vertexDist[track];

							refeFlag = 0;
						}
						
						//samples propagation
						PredictPoint(p_win);

						//get geodesic distance for each sample. 
						//find the sample which have most similar distance to the refernce distance
						float Dist, AbsDist, WinDist, minAbsDist = 10000;
						int number,sum=0,count=0;

						for (int i = 0; i < SAMPLE; i++)
						{
							int t = abs(P_transtart.x - point[i].x) + track_win.width * abs(P_transtart.y - point[i].y);
							if (adjlist[t].v == false) { count++; continue; }

							int refer = abs(point[i].x - P_transtart.x) + track_win.width * abs(point[i].y - P_transtart.y);
							Dist = vertexDist[refer];

							AbsDist = fabs(referDistance - Dist);

							//point[i].SampleWeight = AbsDist;
							//point[i].AccumWeight = sum;
							//sum = sum + AbsDist;

							if (AbsDist < minAbsDist) { AbsDist = Dist; number = i; WinDist = Dist; }

						}

						//for (int i = 0; i < SAMPLE; i++)
						//{
						//	point[i].SampleWeight = point[i].SampleWeight / sum;
						//	point[i].AccumWeight = point[i].AccumWeight / sum;
						//}

						printf("referDist = %f, winDist = %f, discardPoints = %d\n", referDistance, WinDist,count);
						
						p_win_last = p_win;
						p_win.x = point[number].x;
						p_win.y = point[number].y;

						//samples re-location
						float deltaX = p_win.x - p_win_last.x;
						float deltaY = p_win.y - p_win_last.y;

						UpdatePoint(number, deltaX, deltaY);

						cvCircle(image, p_win, 5, CV_RGB(0, 0, 0));
					}



				//	//****************************************************************************************************************************************
			}

			// if still selecting a target, show the RED selected area
			else cvRectangle(image, P_org, P_end, CV_RGB(255, 0, 0), 1);

		}

		imshow("Depth", DepthImage);
		cvShowImage("Color Probabilistic Tracking - Samples", image);
		cvShowImage("Color Probabilistic Tracking - Result", result);

		SafeRelease(pColorFrame);
		SafeRelease(pDepthFrame);
		//SafeRelease(pBodyIndexFrame);

		cv::imshow("Color", colorMat);
		cv::imshow("Depth", DepthImage);
		cv::imshow("CoordinateMapper", coordinateMapperMat);

		//END OF THE TIME POINT
		t = ((double)getTickCount() - t) / getTickFrequency();
		t = 1 / t;

		//cout << "FPS:" << t << "FrameNumber\n" << FrameNumebr<< endl;
		printf("FPS:%f Frame:%d \n\n", t, FrameNumber);


		if (cv::waitKey(30) == VK_ESCAPE) {
			break;
		}
	}

	SafeRelease(pColorSource);
	SafeRelease(pDepthSource);
	//SafeRelease(pBodyIndexSource);

	SafeRelease(pColorReader);
	SafeRelease(pDepthReader);
	//SafeRelease(pBodyIndexReader);

	SafeRelease(pColorDescription);
	SafeRelease(pDepthDescription);
	SafeRelease(pCoordinateMapper);
	if (pSensor) {
		pSensor->Close();
	}
	SafeRelease(pSensor);
	cv::destroyAllWindows();


	cvReleaseImage(&image);
	cvReleaseImage(&result);
	cvReleaseImage(&histimg_ref);
	cvReleaseImage(&histimg);
	cvReleaseImage(&hsv);
	cvReleaseImage(&hue);
	cvReleaseImage(&sat);
	cvDestroyWindow("Color Probabilistic Tracking - Samples");
	cvDestroyWindow("Color Probabilistic Tracking - Result");

	return 0;
}
Ejemplo n.º 16
0
/// <summary>
/// Main processing function
/// </summary>
void CCoordinateMappingBasics::Update()
{
    if (!m_pMultiSourceFrameReader)
    {
        return;
    }

    IMultiSourceFrame* pMultiSourceFrame = NULL;
    IDepthFrame* pDepthFrame = NULL;
    IColorFrame* pColorFrame = NULL;
    IBodyIndexFrame* pBodyIndexFrame = NULL;

    IBodyFrame* pBodyFrame = NULL;

    HRESULT hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);

    if (SUCCEEDED(hr))
    {
        IDepthFrameReference* pDepthFrameReference = NULL;

        hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
        }

        SafeRelease(pDepthFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IColorFrameReference* pColorFrameReference = NULL;

        hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pColorFrameReference->AcquireFrame(&pColorFrame);
        }

        SafeRelease(pColorFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyIndexFrameReference* pBodyIndexFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyIndexFrameReference(&pBodyIndexFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
        }

        SafeRelease(pBodyIndexFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyFrameReference* pBodyFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyFrameReference(&pBodyFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyFrameReference->AcquireFrame(&pBodyFrame);
        }

		SafeRelease(pBodyFrameReference);
    }

    if (SUCCEEDED(hr))
    {
		// Depth
        INT64 nDepthTime = 0;
        IFrameDescription* pDepthFrameDescription = NULL;
        int nDepthWidth = 0;
        int nDepthHeight = 0;
        UINT nDepthBufferSize = 0;
        UINT16 *pDepthBuffer = NULL;

		// Color
        IFrameDescription* pColorFrameDescription = NULL;
        int nColorWidth = 0;
        int nColorHeight = 0;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nColorBufferSize = 0;
        RGBQUAD *pColorBuffer = NULL;

		// BodyIndex
        IFrameDescription* pBodyIndexFrameDescription = NULL;
        int nBodyIndexWidth = 0;
        int nBodyIndexHeight = 0;
        UINT nBodyIndexBufferSize = 0;
        BYTE *pBodyIndexBuffer = NULL;

		// Body
		IBody* ppBodies[BODY_COUNT] = { 0 };

        // get depth frame data
        hr = pDepthFrame->get_RelativeTime(&nDepthTime);

		// Depth
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Width(&nDepthWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Height(&nDepthHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);            
        }

        // get color frame data
        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Width(&nColorWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Height(&nColorHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
            }
            else if (m_pColorRGBX)
            {
                pColorBuffer = m_pColorRGBX;
                nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }

        // get body index frame data
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);            
        }

		// get body frame data

		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

        if (SUCCEEDED(hr))
        {
            ProcessFrame(nDepthTime, pDepthBuffer, nDepthWidth, nDepthHeight, 
                pColorBuffer, nColorWidth, nColorHeight,
				pBodyIndexBuffer, nBodyIndexWidth, nBodyIndexHeight, BODY_COUNT, ppBodies);
        }

        SafeRelease(pDepthFrameDescription);
        SafeRelease(pColorFrameDescription);
        SafeRelease(pBodyIndexFrameDescription);

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);
		}
	}

    SafeRelease(pDepthFrame);
    SafeRelease(pColorFrame);
    SafeRelease(pBodyIndexFrame);
    SafeRelease(pBodyFrame);
    SafeRelease(pMultiSourceFrame);
}
Ejemplo n.º 17
0
int _tmain( int argc, _TCHAR* argv[] )
{
	cv::setUseOptimized( true );

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor( &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IDepthFrameSource* pDepthSource;
	hResult = pSensor->get_DepthFrameSource( &pDepthSource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IDepthFrameReader* pDepthReader;
	hResult = pDepthSource->OpenReader( &pDepthReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pDepthSource->get_FrameDescription( &pDescription );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width( &width ); // 512
	pDescription->get_Height( &height ); // 424
	unsigned int bufferSize = width * height * sizeof( unsigned short );

	cv::Mat bufferMat( height, width, CV_16UC1 );
	cv::Mat depthMat( height, width, CV_8UC1 );
	cv::namedWindow( "Depth" );

	// Coordinate Mapper
	ICoordinateMapper* pCoordinateMapper;
	hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
		return -1;
	}

	// Create Reconstruction
	NUI_FUSION_RECONSTRUCTION_PARAMETERS reconstructionParameter;
	reconstructionParameter.voxelsPerMeter = 256;
	reconstructionParameter.voxelCountX = 512;
	reconstructionParameter.voxelCountY = 384;
	reconstructionParameter.voxelCountZ = 512;

	Matrix4 worldToCameraTransform;
	SetIdentityMatrix( worldToCameraTransform );
	INuiFusionReconstruction* pReconstruction;
	hResult = NuiFusionCreateReconstruction( &reconstructionParameter, NUI_FUSION_RECONSTRUCTION_PROCESSOR_TYPE_AMP, -1, &worldToCameraTransform, &pReconstruction );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiFusionCreateReconstruction()" << std::endl;
		return -1;
	}

	// Create Image Frame
	// Depth
	NUI_FUSION_IMAGE_FRAME* pDepthFloatImageFrame;
	hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_FLOAT, width, height, nullptr, &pDepthFloatImageFrame );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiFusionCreateImageFrame( FLOAT )" << std::endl;
		return -1;
	}

	// SmoothDepth
	NUI_FUSION_IMAGE_FRAME* pSmoothDepthFloatImageFrame;
	hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_FLOAT, width, height, nullptr, &pSmoothDepthFloatImageFrame );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiFusionCreateImageFrame( FLOAT )" << std::endl;
		return -1;
	}

	// Point Cloud
	NUI_FUSION_IMAGE_FRAME* pPointCloudImageFrame;
	hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_POINT_CLOUD, width, height, nullptr, &pPointCloudImageFrame );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiFusionCreateImageFrame( POINT_CLOUD )" << std::endl;
		return -1;
	}

	// Surface
	NUI_FUSION_IMAGE_FRAME* pSurfaceImageFrame;
	hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_COLOR, width, height, nullptr, &pSurfaceImageFrame );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiFusionCreateImageFrame( COLOR )" << std::endl;
		return -1;
	}

	// Normal
	NUI_FUSION_IMAGE_FRAME* pNormalImageFrame;
	hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_COLOR, width, height, nullptr, &pNormalImageFrame );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiFusionCreateImageFrame( COLOR )" << std::endl;
		return -1;
	}

	cv::namedWindow( "Surface" );
	cv::namedWindow( "Normal" );

	while( 1 ){
		// Frame
		IDepthFrame* pDepthFrame = nullptr;
		hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );

		if( SUCCEEDED( hResult ) ){
			hResult = pDepthFrame->AccessUnderlyingBuffer( &bufferSize, reinterpret_cast<UINT16**>( &bufferMat.data ) );
			if( SUCCEEDED( hResult ) ){
				bufferMat.convertTo( depthMat, CV_8U, -255.0f / 8000.0f, 255.0f );
				hResult = pReconstruction->DepthToDepthFloatFrame( reinterpret_cast<UINT16*>( bufferMat.data ), width * height * sizeof( UINT16 ), pDepthFloatImageFrame, NUI_FUSION_DEFAULT_MINIMUM_DEPTH/* 0.5[m] */, NUI_FUSION_DEFAULT_MAXIMUM_DEPTH/* 8.0[m] */, true );
				if( FAILED( hResult ) ){
					std::cerr << "Error :INuiFusionReconstruction::DepthToDepthFloatFrame()" << std::endl;
					return -1;
				}
			}
		}
		SafeRelease( pDepthFrame );

		// Smooting Depth Image Frame
		hResult = pReconstruction->SmoothDepthFloatFrame( pDepthFloatImageFrame, pSmoothDepthFloatImageFrame, 1, 0.04f );
		if( FAILED( hResult ) ){
			std::cerr << "Error :INuiFusionReconstruction::SmoothDepthFloatFrame" << std::endl;
			return -1;
		}

		// Reconstruction Process
		pReconstruction->GetCurrentWorldToCameraTransform( &worldToCameraTransform );
		hResult = pReconstruction->ProcessFrame( pSmoothDepthFloatImageFrame, NUI_FUSION_DEFAULT_ALIGN_ITERATION_COUNT, NUI_FUSION_DEFAULT_INTEGRATION_WEIGHT, nullptr, &worldToCameraTransform );
		if( FAILED( hResult ) ){
			static int errorCount = 0;
			errorCount++;
			if( errorCount >= 100 ) {
				errorCount = 0;
				ResetReconstruction( pReconstruction, &worldToCameraTransform );
			}
		}

		// Calculate Point Cloud
		hResult = pReconstruction->CalculatePointCloud( pPointCloudImageFrame, &worldToCameraTransform );
		if( FAILED( hResult ) ){
			std::cerr << "Error : CalculatePointCloud" << std::endl;
			return -1;
		}

		// Shading Point Clouid
		Matrix4 worldToBGRTransform = { 0.0f };
		worldToBGRTransform.M11 = reconstructionParameter.voxelsPerMeter / reconstructionParameter.voxelCountX;
		worldToBGRTransform.M22 = reconstructionParameter.voxelsPerMeter / reconstructionParameter.voxelCountY;
		worldToBGRTransform.M33 = reconstructionParameter.voxelsPerMeter / reconstructionParameter.voxelCountZ;
		worldToBGRTransform.M41 = 0.5f;
		worldToBGRTransform.M42 = 0.5f;
		worldToBGRTransform.M43 = 0.0f;
		worldToBGRTransform.M44 = 1.0f;

		hResult = NuiFusionShadePointCloud( pPointCloudImageFrame, &worldToCameraTransform, &worldToBGRTransform, pSurfaceImageFrame, pNormalImageFrame );
		if( FAILED( hResult ) ){
			std::cerr << "Error : NuiFusionShadePointCloud" << std::endl;
			return -1;
		}

		cv::Mat surfaceMat( height, width, CV_8UC4, pSurfaceImageFrame->pFrameBuffer->pBits );
		cv::Mat normalMat( height, width, CV_8UC4, pNormalImageFrame->pFrameBuffer->pBits );

		cv::imshow( "Depth", depthMat );
		cv::imshow( "Surface", surfaceMat );
		cv::imshow( "Normal", normalMat );

		int key = cv::waitKey( 30 );
		if( key == VK_ESCAPE ){
			break;
		}
		else if( key == 'r' ){
			ResetReconstruction( pReconstruction, &worldToCameraTransform );
		}
	}

	SafeRelease( pDepthSource );
	SafeRelease( pDepthReader );
	SafeRelease( pDescription );
	SafeRelease( pCoordinateMapper );
	SafeRelease( pReconstruction );
	NuiFusionReleaseImageFrame( pDepthFloatImageFrame );
	NuiFusionReleaseImageFrame( pSmoothDepthFloatImageFrame );
	NuiFusionReleaseImageFrame( pPointCloudImageFrame );
	NuiFusionReleaseImageFrame( pSurfaceImageFrame );
	NuiFusionReleaseImageFrame( pNormalImageFrame );
	if( pSensor ){
		pSensor->Close();
	}
	SafeRelease( pSensor );
	cv::destroyAllWindows();

	return 0;
}
Ejemplo n.º 18
0
/// Main processing function
void CBodyBasics::Update()
{
	clear = true;
	for ( int i = 0; i < BODY_COUNT; i ++ )
	{
		bodyXY[i][0] = bodyXY[i][1] = -1;
		position[i][0] = position[i][1] = -1;
		angle[i] = -1;
		distance = -1;
	}
	//每次先清空skeletonImg
	skeletonImg.setTo(0);

	//如果丢失了kinect,则不继续操作
	if (!m_pBodyFrameReader)
	{
		return;
	}

	IBodyFrame* pBodyFrame = NULL;//骨架信息
	IDepthFrame* pDepthFrame = NULL;//深度信息
	IBodyIndexFrame* pBodyIndexFrame = NULL;//背景二值图

	//记录每次操作的成功与否
	HRESULT hr = S_OK;

	//---------------------------------------获取背景二值图并显示---------------------------------
	if (SUCCEEDED(hr)){
		hr = m_pBodyIndexFrameReader->AcquireLatestFrame(&pBodyIndexFrame);//获得背景二值图信息
	}
	if (SUCCEEDED(hr)){
		BYTE *bodyIndexArray = new BYTE[cDepthHeight * cDepthWidth];//背景二值图是8为uchar,有人是黑色,没人是白色
		pBodyIndexFrame->CopyFrameDataToArray(cDepthHeight * cDepthWidth, bodyIndexArray);

		//把背景二值图画到MAT里
		uchar* skeletonData = (uchar*)skeletonImg.data;
		for (int j = 0; j < cDepthHeight * cDepthWidth; ++j){
			*skeletonData = bodyIndexArray[j]; ++skeletonData;
			*skeletonData = bodyIndexArray[j]; ++skeletonData;
			*skeletonData = bodyIndexArray[j]; ++skeletonData;
		}
		delete[] bodyIndexArray;
	}
	SafeRelease(pBodyIndexFrame);//必须要释放,否则之后无法获得新的frame数据
	//-----------------------------获取骨架并显示----------------------------
	if (SUCCEEDED(hr)){
		hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);//获取骨架信息
	}
	if (SUCCEEDED(hr))
	{
		IBody* ppBodies[BODY_COUNT] = { 0 };//每一个IBody可以追踪一个人,总共可以追踪六个人

		if (SUCCEEDED(hr))
		{
			//把kinect追踪到的人的信息,分别存到每一个IBody中
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

		if (SUCCEEDED(hr))
		{
			//对每一个IBody,我们找到他的骨架信息,并且画出来
			ProcessBody(BODY_COUNT, ppBodies);
		}

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);//释放所有
		}
	}
	SafeRelease(pBodyFrame);//必须要释放,否则之后无法获得新的frame数据
	
	//-----------------------获取深度数据并显示--------------------------
	if (SUCCEEDED(hr)){
		hr = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);//获得深度数据
	}
	if (SUCCEEDED(hr)){
		UINT16 *depthArray = new UINT16[cDepthHeight * cDepthWidth];//深度数据是16位unsigned int
		pDepthFrame->CopyFrameDataToArray(cDepthHeight * cDepthWidth, depthArray);

		//把深度数据画到MAT中
		uchar* depthData = (uchar*)depthImg.data;
		for (int j = 0; j < cDepthHeight * cDepthWidth; ++j){
			*depthData = depthArray[j];
			++depthData;
		}
		distance = depthArray[cDepthHeight*cDepthWidth/2 + cDepthWidth/2];
		for ( int j = 0; j < BODY_COUNT; j ++ )
		{
			if ( -1 == (bodyXY[j][0] | bodyXY[j][1]) )
			{
				continue;
			}
			double r = depthArray[cDepthWidth*bodyXY[j][1] + bodyXY[j][0]];
			position[j][0] = r * cos(angle[j]) / 1000.0;
			position[j][1] = r * sin(angle[j]) / 1000.0;
		}
		delete[] depthArray;
	}
	SafeRelease(pDepthFrame);//必须要释放,否则之后无法获得新的frame数据
	imshow("depthImg", depthImg);
	cv::waitKey(5);

}
Ejemplo n.º 19
0
bool DepthStream::readFrame(IMultiSourceFrame *multiFrame)
{
    bool readed = false;
    if (!m_StreamHandle.depthFrameReader) {
        ofLogWarning("ofxKinect2::DepthStream") << "Stream is not open.";
        return readed;
    }

    Stream::readFrame(multiFrame);
    IDepthFrame *depthFrame = nullptr;

    HRESULT hr = E_FAIL;
    if (!multiFrame) {
        hr = m_StreamHandle.depthFrameReader->AcquireLatestFrame(&depthFrame);
    }
    else {
        IDepthFrameReference *depthFrameReference = nullptr;
        hr = multiFrame->get_DepthFrameReference(&depthFrameReference);

        if (SUCCEEDED(hr)) {
            hr = depthFrameReference->AcquireFrame(&depthFrame);
        }

        safeRelease(depthFrameReference);
    }

    if (SUCCEEDED(hr)) {
        IFrameDescription *depthFrameDescription = nullptr;

        hr = depthFrame->get_RelativeTime((INT64 *)&m_Frame.timestamp);

        if (SUCCEEDED(hr)) {
            hr = depthFrame->get_FrameDescription(&depthFrameDescription);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrameDescription->get_Width(&m_Frame.width);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrameDescription->get_Height(&m_Frame.height);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrameDescription->get_HorizontalFieldOfView(&m_Frame.horizontalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrameDescription->get_VerticalFieldOfView(&m_Frame.verticalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrameDescription->get_DiagonalFieldOfView(&m_Frame.diagonalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrame->get_DepthMinReliableDistance((USHORT *)&m_NearValue);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrame->get_DepthMaxReliableDistance((USHORT *)&m_FarValue);
        }

        if (SUCCEEDED(hr)) {
            hr = depthFrame->get_DepthMinReliableDistance((USHORT *)&m_NearValue);
        }

        if (SUCCEEDED(hr)) {
            if (m_Frame.dataSize == 0) {
                m_Frame.dataSize = m_Frame.width * m_Frame.height;
                m_Frame.data = new UINT16[m_Frame.width * m_Frame.height];
            }
            hr = depthFrame->CopyFrameDataToArray(m_Frame.width * m_Frame.height, reinterpret_cast<UINT16 *>(m_Frame.data));
        }

        if (SUCCEEDED(hr)) {
            readed = true;
            setPixels(m_Frame);
        }
        safeRelease(depthFrameDescription);
    }

    safeRelease(depthFrame);

    return readed;
}
//Hao modified it
void Kinect2Engine::getImages(ITMUChar4Image *rgbImage, ITMShortImage *rawDepthImage)
{
	Vector4u *rgb = rgbImage->GetData(MEMORYDEVICE_CPU);
	if (colorAvailable)
	{
		IColorFrame* pColorFrame = NULL;
		ColorImageFormat imageFormat = ColorImageFormat_None;
		UINT nBufferSize = 0;
		RGBQUAD *c_pBuffer = NULL;

		HRESULT hr = data->colorFrameReader->AcquireLatestFrame(&pColorFrame);

		if (SUCCEEDED(hr))
		{
			if (SUCCEEDED(hr))
				hr = pColorFrame->get_RawColorImageFormat(&imageFormat);

			if (SUCCEEDED(hr))
			{
				if (imageFormat == ColorImageFormat_Bgra)
				{
					hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&c_pBuffer));
				}
				else if (m_pColorRGBX)
				{
					c_pBuffer = m_pColorRGBX;
					nBufferSize = imageSize_rgb.x * imageSize_rgb.y * sizeof(RGBQUAD);
					hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(c_pBuffer), ColorImageFormat_Bgra);
				}
				else
				{
					hr = E_FAIL;
				}
			}

			if (SUCCEEDED(hr) && c_pBuffer)
			{
				for (int i = 0; i < imageSize_rgb.x * imageSize_rgb.y; i++)
				{
					Vector4u newPix; 
					RGBQUAD oldPix = c_pBuffer[i];
					newPix.x = oldPix.rgbRed; 
					newPix.y = oldPix.rgbGreen; 
					newPix.z = oldPix.rgbBlue; 
					newPix.w = 255;
					rgb[i] = newPix;
				}
			}
		}
		SafeRelease(pColorFrame);
	}
	else memset(rgb, 0, rgbImage->dataSize * sizeof(Vector4u));

	short *depth = rawDepthImage->GetData(MEMORYDEVICE_CPU);
	if (depthAvailable)
	{
		IDepthFrame* pDepthFrame = NULL;
		UINT16 *d_pBuffer = NULL;
		UINT nBufferSize = 0;

		HRESULT hr = data->depthFrameReader->AcquireLatestFrame(&pDepthFrame);

		if (SUCCEEDED(hr))
		{
			if (SUCCEEDED(hr))
				hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &d_pBuffer);

			if (SUCCEEDED(hr) && d_pBuffer)
			{
				for (int i = 0; i < imageSize_d.x * imageSize_d.y; i++)
				{
					ushort depthPix = d_pBuffer[i];
					depth[i] = (depthPix >= nDepthMinReliableDistance) && (depthPix <= nDepthMaxDistance) ? (short)depthPix : -1;
				}
			}
		}

		SafeRelease(pDepthFrame);
	}
	else memset(depth, 0, rawDepthImage->dataSize * sizeof(short));

	//out->inputImageType = ITMView::InfiniTAM_FLOAT_DEPTH_IMAGE;

	return /*true*/;
}
Ejemplo n.º 21
0
void Device::update()
{
	if ( mSensor != 0 ) {
		mSensor->get_Status( &mStatus );
	}

	if ( mFrameReader == 0 ) {
		return;
	}

	IAudioBeamFrame* audioFrame								= 0;
	IBodyFrame* bodyFrame									= 0;
	IBodyIndexFrame* bodyIndexFrame							= 0;
	IColorFrame* colorFrame									= 0;
	IDepthFrame* depthFrame									= 0;
	IMultiSourceFrame* frame								= 0;
	IInfraredFrame* infraredFrame							= 0;
	ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;
	
	HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

	// TODO audio
	if ( SUCCEEDED( hr ) ) {
		console() << "SUCCEEDED " << getElapsedFrames() << endl;
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
		IBodyFrameReference* frameRef = 0;
		hr = frame->get_BodyFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &bodyFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
		IBodyIndexFrameReference* frameRef = 0;
		hr = frame->get_BodyIndexFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &bodyIndexFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
		IColorFrameReference* frameRef = 0;
		hr = frame->get_ColorFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &colorFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
		IDepthFrameReference* frameRef = 0;
		hr = frame->get_DepthFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &depthFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
		IInfraredFrameReference* frameRef = 0;
		hr = frame->get_InfraredFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &infraredFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
		ILongExposureInfraredFrameReference* frameRef = 0;
		hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
		if ( SUCCEEDED( hr ) ) {
			hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
		}
		if ( frameRef != 0 ) {
			frameRef->Release();
			frameRef = 0;
		}
	}

	if ( SUCCEEDED( hr ) ) {
		long long time											= 0L;

		// TODO audio

		IFrameDescription* bodyFrameDescription					= 0;
		int32_t bodyWidth										= 0;
		int32_t bodyHeight										= 0;
		uint32_t bodyBufferSize									= 0;
		uint8_t* bodyBuffer										= 0;

		IFrameDescription* bodyIndexFrameDescription			= 0;
		int32_t bodyIndexWidth									= 0;
		int32_t bodyIndexHeight									= 0;
		uint32_t bodyIndexBufferSize							= 0;
		uint8_t* bodyIndexBuffer								= 0;
		
		IFrameDescription* colorFrameDescription				= 0;
		int32_t colorWidth										= 0;
		int32_t colorHeight										= 0;
		ColorImageFormat imageFormat							= ColorImageFormat_None;
		uint32_t colorBufferSize								= 0;
		uint8_t* colorBuffer									= 0;

		IFrameDescription* depthFrameDescription				= 0;
		int32_t depthWidth										= 0;
		int32_t depthHeight										= 0;
		uint16_t depthMinReliableDistance						= 0;
		uint16_t depthMaxReliableDistance						= 0;
		uint32_t depthBufferSize								= 0;
		uint16_t* depthBuffer									= 0;

		IFrameDescription* infraredFrameDescription				= 0;
		int32_t infraredWidth									= 0;
		int32_t infraredHeight									= 0;
		uint32_t infraredBufferSize								= 0;
		uint16_t* infraredBuffer								= 0;

		IFrameDescription* infraredLongExposureFrameDescription	= 0;
		int32_t infraredLongExposureWidth						= 0;
		int32_t infraredLongExposureHeight						= 0;
		uint32_t infraredLongExposureBufferSize					= 0;
		uint16_t* infraredLongExposureBuffer					= 0;

		hr = depthFrame->get_RelativeTime( &time );

		// TODO audio
		if ( mDeviceOptions.isAudioEnabled() ) {

		}

		// TODO body
		if ( mDeviceOptions.isBodyEnabled() ) {

		}

		if ( mDeviceOptions.isBodyIndexEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
			}
			if ( SUCCEEDED( hr ) ) {
 				//hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
			}
		}

		if ( mDeviceOptions.isColorEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrame->get_FrameDescription( &colorFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrameDescription->get_Width( &colorWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrameDescription->get_Height( &colorHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = colorFrame->get_RawColorImageFormat( &imageFormat );
			}
			if ( SUCCEEDED( hr ) ) {
				bool isAllocated	= false;
				SurfaceChannelOrder channelOrder = SurfaceChannelOrder::BGRA;
				if ( imageFormat == ColorImageFormat_Bgra ) {
					hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) );
					channelOrder = SurfaceChannelOrder::BGRA;
				} else if ( imageFormat == ColorImageFormat_Rgba ) {
					hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) );
					channelOrder = SurfaceChannelOrder::RGBA;
				} else {
					isAllocated = true;
					colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
					colorBuffer = new uint8_t[ colorBufferSize ];
					hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );
					channelOrder = SurfaceChannelOrder::RGBA;
				}

				if ( SUCCEEDED( hr ) ) {
					colorFrame->get_RelativeTime( &time );
					Surface8u colorSurface = Surface8u( colorBuffer, colorWidth, colorHeight, colorWidth * sizeof( uint8_t ) * 4, channelOrder );
					mFrame.mSurfaceColor = Surface8u( colorWidth, colorHeight, false, channelOrder );
					mFrame.mSurfaceColor.copyFrom( colorSurface, colorSurface.getBounds() );

					console() << "Color\n\twidth: " << colorWidth << "\n\theight: " << colorHeight 
						<< "\n\tbuffer size: " << colorBufferSize << "\n\ttime: " << time << endl;
				}

				if ( isAllocated && colorBuffer != 0 ) {
					delete[] colorBuffer;
					colorBuffer = 0;
				}
			}
		}

		if ( mDeviceOptions.isDepthEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->get_FrameDescription( &depthFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrameDescription->get_Width( &depthWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrameDescription->get_Height( &depthHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
			}
			if ( SUCCEEDED( hr ) ) {
				Channel16u depthChannel = Channel16u( depthWidth, depthHeight, depthWidth * sizeof( uint16_t ), 1, depthBuffer );
				mFrame.mChannelDepth = Channel16u( depthWidth, depthHeight );
				mFrame.mChannelDepth.copyFrom( depthChannel, depthChannel.getBounds() );

				console( ) << "Depth\n\twidth: " << depthWidth << "\n\theight: " << depthHeight << endl;
			}
		}

		if ( mDeviceOptions.isInfraredEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrameDescription->get_Width( &infraredWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrameDescription->get_Height( &infraredHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
			}
			if ( SUCCEEDED( hr ) ) {
				Channel16u infraredChannel = Channel16u( infraredWidth, infraredHeight, infraredWidth * sizeof( uint16_t ), 1, infraredBuffer );
				mFrame.mChannelInfrared = Channel16u( infraredWidth, infraredHeight );
				mFrame.mChannelInfrared.copyFrom( infraredChannel, infraredChannel.getBounds() );

				console( ) << "Infrared\n\twidth: " << infraredWidth << "\n\theight: " << infraredHeight << endl;
			}
		}

		if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
			}
			if ( SUCCEEDED( hr ) ) {
				hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
			}
			if ( SUCCEEDED( hr ) ) {
				Channel16u infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight, infraredLongExposureWidth * sizeof( uint16_t ), 1, infraredLongExposureBuffer );
				mFrame.mChannelInfraredLongExposure = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
				mFrame.mChannelInfraredLongExposure.copyFrom( infraredLongExposureChannel, infraredLongExposureChannel.getBounds() );

				int64_t irLongExpTime = 0;
				hr = infraredLongExposureFrame->get_RelativeTime( &irLongExpTime );

				console( ) << "Infrared Long Exposure\n\twidth: " << infraredLongExposureWidth << "\n\theight: " << infraredLongExposureHeight;
				if ( SUCCEEDED( hr ) ) {
					console() << "\n\ttimestamp: " << irLongExpTime;
				}
				console() << endl;
			}
		}

		if ( SUCCEEDED( hr ) ) {
			// TODO build Kinect2::Frame from buffers, data
			mFrame.mTimeStamp = time;
		}

		if ( bodyFrameDescription != 0 ) {
			bodyFrameDescription->Release();
			bodyFrameDescription = 0;
		}
		if ( bodyIndexFrameDescription != 0 ) {
			bodyIndexFrameDescription->Release();
			bodyIndexFrameDescription = 0;
		}
		if ( colorFrameDescription != 0 ) {
			colorFrameDescription->Release();
			colorFrameDescription = 0;
		}
		if ( depthFrameDescription != 0 ) {
			depthFrameDescription->Release();
			depthFrameDescription = 0;
		}
		if ( infraredFrameDescription != 0 ) {
			infraredFrameDescription->Release();
			infraredFrameDescription = 0;
		}
		if ( infraredLongExposureFrameDescription != 0 ) {
			infraredLongExposureFrameDescription->Release();
			infraredLongExposureFrameDescription = 0;
		}
	}

	if ( audioFrame != 0 ) {
		audioFrame->Release();
		audioFrame = 0;
	}
	if ( bodyFrame != 0 ) {
		bodyFrame->Release();
		bodyFrame = 0;
	}
	if ( bodyIndexFrame != 0 ) {
		bodyIndexFrame->Release();
		bodyIndexFrame = 0;
	}
	if ( colorFrame != 0 ) {
		colorFrame->Release();
		colorFrame = 0;
	}
	if ( depthFrame != 0 ) {
		depthFrame->Release();
		depthFrame = 0;
	}
	if ( frame != 0 ) {
		frame->Release();
		frame = 0;
	}
	if ( infraredFrame != 0 ) {
		infraredFrame->Release();
		infraredFrame = 0;
	}
	if ( infraredLongExposureFrame != 0 ) {
		infraredLongExposureFrame->Release();
		infraredLongExposureFrame = 0;
	}
}
Ejemplo n.º 22
0
int _tmain( int argc, _TCHAR* argv[] )
{
	cv::setUseOptimized( true );

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor( &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IDepthFrameSource* pDepthSource;
	hResult = pSensor->get_DepthFrameSource( &pDepthSource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IDepthFrameReader* pDepthReader;
	hResult = pDepthSource->OpenReader( &pDepthReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pDepthSource->get_FrameDescription( &pDescription );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width( &width ); // 512
	pDescription->get_Height( &height ); // 424
	unsigned int bufferSize = width * height * sizeof( unsigned short );

	cv::Mat bufferMat( height, width, CV_16SC1 );
	cv::Mat depthMat( height, width, CV_8UC1 );
	cv::namedWindow( "Depth" );

	while( 1 ){
		// Frame
		IDepthFrame* pDepthFrame = nullptr;
		hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );
		if( SUCCEEDED( hResult ) ){
			hResult = pDepthFrame->AccessUnderlyingBuffer( &bufferSize, reinterpret_cast<UINT16**>( &bufferMat.data ) );
			if( SUCCEEDED( hResult ) ){
				bufferMat.convertTo( depthMat, CV_8U, -255.0f / 4500.0f, 255.0f );
			}
		}
	
		SafeRelease( pDepthFrame );

		cv::imshow( "Depth", depthMat );

		if( cv::waitKey( 30 ) == VK_ESCAPE ){
			break;
		}
	}

	SafeRelease( pDepthSource );
	SafeRelease( pDepthReader );
	SafeRelease( pDescription );
	if( pSensor ){
		pSensor->Close();
	}
	SafeRelease( pSensor );
	cv::destroyAllWindows();

	return 0;
}
void* Kinect2StreamImpl::populateFrameBuffer(int& buffWidth, int& buffHeight)
{
  buffWidth = 0;
  buffHeight = 0;

  if (m_sensorType == ONI_SENSOR_COLOR) {
    if (m_pFrameReader.color && m_pFrameBuffer.color) {
      buffWidth = 1920;
      buffHeight = 1080;

      IColorFrame* frame = NULL;
      HRESULT hr = m_pFrameReader.color->AcquireLatestFrame(&frame);
      if (SUCCEEDED(hr)) {
        ColorImageFormat imageFormat = ColorImageFormat_None;
        hr = frame->get_RawColorImageFormat(&imageFormat);
        if (SUCCEEDED(hr)) {
          if (imageFormat == ColorImageFormat_Bgra) {
            RGBQUAD* data;
            UINT bufferSize;
            frame->AccessRawUnderlyingBuffer(&bufferSize, reinterpret_cast<BYTE**>(&data));
            memcpy(m_pFrameBuffer.color, data, 1920*1080*sizeof(RGBQUAD));
          }
          else {
            frame->CopyConvertedFrameDataToArray(1920*1080*sizeof(RGBQUAD), reinterpret_cast<BYTE*>(m_pFrameBuffer.color), ColorImageFormat_Bgra);
          }
        }
      }
      if (frame) {
        frame->Release();
      }

      return reinterpret_cast<void*>(m_pFrameBuffer.color);
    }
  }
  else if (m_sensorType == ONI_SENSOR_DEPTH) {
    if (m_pFrameReader.depth && m_pFrameBuffer.depth) {
      buffWidth = 512;
      buffHeight = 424;

      IDepthFrame* frame = NULL;
      HRESULT hr = m_pFrameReader.depth->AcquireLatestFrame(&frame);
      if (SUCCEEDED(hr)) {
        UINT16* data;
        UINT bufferSize;
        frame->AccessUnderlyingBuffer(&bufferSize, &data);
        memcpy(m_pFrameBuffer.depth, data, 512*424*sizeof(UINT16));
      }
      if (frame) {
        frame->Release();
      }

      return reinterpret_cast<void*>(m_pFrameBuffer.depth);
    }
  }
  else { // ONI_SENSOR_IR
    if (m_pFrameReader.infrared && m_pFrameBuffer.infrared) {
      buffWidth = 512;
      buffHeight = 424;

      IInfraredFrame* frame = NULL;
      HRESULT hr = m_pFrameReader.infrared->AcquireLatestFrame(&frame);
      if (SUCCEEDED(hr)) {
        UINT16* data;
        UINT bufferSize;
        frame->AccessUnderlyingBuffer(&bufferSize, &data);
        memcpy(m_pFrameBuffer.infrared, data, 512*424*sizeof(UINT16));
      }
      if (frame) {
        frame->Release();
      }

      return reinterpret_cast<void*>(m_pFrameBuffer.infrared);
    }
  }

  return NULL;
}
int TextureManager::CreateSensorTexture(char *errorString, const char *name) {
	if (!depth_frame_reader_) {
		sprintf_s(errorString, MAX_ERROR_LENGTH,
			"No depth sensor exists for texture creation");
		return -1;
	}

	glGenTextures(1, textureID + numTextures);
	strcpy_s(textureName[numTextures], TM_MAX_FILENAME_LENGTH, name);
	glBindTexture(GL_TEXTURE_2D, textureID[numTextures]);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
	//gluBuild2DMipmaps(GL_TEXTURE_2D, GL_RGBA,
	//	TM_NOISE_TEXTURE_SIZE, TM_NOISE_TEXTURE_SIZE,
	//	GL_BGRA, GL_UNSIGNED_BYTE, noiseIntData);

	IDepthFrame* pDepthFrame = NULL;
	HRESULT hr;

	bool hasSucceeded = false;
	for (int tries = 0; tries < 20 && !hasSucceeded; tries++) {
		Sleep(100);
		hr = depth_frame_reader_->AcquireLatestFrame(&pDepthFrame);
		if (SUCCEEDED(hr)) hasSucceeded = true;
	}
	if (!hasSucceeded) {
		sprintf_s(errorString, MAX_ERROR_LENGTH,
			"Could not acquire last depth sensor frame");
		return -1;
	}

	pDepthFrame->get_RelativeTime(&last_frame_time_);

	IFrameDescription* pFrameDescription = NULL;
	int nWidth = 0;
	int nHeight = 0;

	hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
	if (FAILED(hr)) {
		pDepthFrame->Release();
		sprintf_s(errorString, MAX_ERROR_LENGTH,
			"Could not get Depth Frame description");
		return -1;
	}

	pFrameDescription->get_Width(&nWidth);
	pFrameDescription->get_Height(&nHeight);
    depth_sensor_width_ = nWidth;
    depth_sensor_height_ = nHeight;

	if (cpu_depth_sensor_buffer_) delete[] cpu_depth_sensor_buffer_;
	cpu_depth_sensor_buffer_ = new float[nWidth * nHeight];
	memset(cpu_depth_sensor_buffer_, 0, nWidth * nHeight);
	if (smoothed_depth_sensor_buffer_[0]) {
		delete[] smoothed_depth_sensor_buffer_[0];
		delete[] smoothed_depth_sensor_buffer_[1];
	}
	smoothed_depth_sensor_buffer_[0] = new float[nWidth * nHeight];
	smoothed_depth_sensor_buffer_[1] = new float[nWidth * nHeight];
	memset(smoothed_depth_sensor_buffer_[0], 0,
		   nWidth*nHeight*sizeof(smoothed_depth_sensor_buffer_[0][0]));
	memset(smoothed_depth_sensor_buffer_[1], 0,
		   nWidth*nHeight*sizeof(smoothed_depth_sensor_buffer_[1][0]));

	glTexImage2D(GL_TEXTURE_2D, 0, GL_R32F,
		nWidth, nHeight,
		0, GL_RED, GL_FLOAT, smoothed_depth_sensor_buffer_[0]);

	textureWidth[numTextures] = nWidth;
	textureHeight[numTextures] = nHeight;
	numTextures++;

	pFrameDescription->Release();
	pDepthFrame->Release();

	return 0;
}
Ejemplo n.º 25
0
  bool KinectV2Controller::UpdateCamera()
  {
    if(InitializeMultiFrameReader())
    {

      IMultiSourceFrame* pMultiSourceFrame = NULL;
      IDepthFrame* pDepthFrame = NULL;
      IColorFrame* pColorFrame = NULL;
      IInfraredFrame* pInfraRedFrame = NULL;

      HRESULT hr = -1;

      static DWORD lastTime = 0;

      DWORD currentTime = GetTickCount();

      //Check if we do not request data faster than 30 FPS. Kinect V2 can only deliver 30 FPS.
      if( unsigned int(currentTime - lastTime) > 33 )
      {
        hr = d->m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);
        lastTime = currentTime;
      }

      if (SUCCEEDED(hr))
      {
        IDepthFrameReference* pDepthFrameReference = NULL;

        hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
        if (SUCCEEDED(hr))
        {
          hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
        }

        SafeRelease(pDepthFrameReference);
      }

      if (SUCCEEDED(hr))
      {
        IColorFrameReference* pColorFrameReference = NULL;

        hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
        if (SUCCEEDED(hr))
        {
          hr = pColorFrameReference->AcquireFrame(&pColorFrame);
        }

        SafeRelease(pColorFrameReference);
      }

      if (SUCCEEDED(hr))
      {
        IInfraredFrameReference* pInfraredFrameReference = NULL;

        hr = pMultiSourceFrame->get_InfraredFrameReference(&pInfraredFrameReference);
        if (SUCCEEDED(hr))
        {
          hr = pInfraredFrameReference->AcquireFrame(&pInfraRedFrame);
        }

        SafeRelease(pInfraredFrameReference);
      }

      if (SUCCEEDED(hr))
      {
        UINT nDepthBufferSize = 0;
        UINT16 *pDepthBuffer = NULL;
        UINT16 *pIntraRedBuffer = NULL;

        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nColorBufferSize = 0;
        RGBQUAD *pColorBuffer = NULL;

        if (SUCCEEDED(hr))
        {
          hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
        }
        if (SUCCEEDED(hr))
        {
          hr = pInfraRedFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pIntraRedBuffer);
        }
        if (SUCCEEDED(hr))
        {
          for(int i = 0; i < d->m_DepthCaptureHeight*d->m_DepthCaptureWidth; ++i)
          {
            d->m_Distances[i] = static_cast<float>(*pDepthBuffer);
            d->m_Amplitudes[i] = static_cast<float>(*pIntraRedBuffer);
            ++pDepthBuffer;
            ++pIntraRedBuffer;
          }
        }
        else
        {
          MITK_ERROR << "AccessUnderlyingBuffer";
        }

        // get color frame data
        if (SUCCEEDED(hr))
        {
          hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
          if (imageFormat == ColorImageFormat_Bgra)
          {
            hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
          }
          else if (d->m_pColorRGBX)
          {
            pColorBuffer = d->m_pColorRGBX;
            nColorBufferSize = d->m_RGBCaptureWidth * d->m_RGBCaptureHeight * sizeof(RGBQUAD);
            hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
          }
          else
          {
            hr = E_FAIL;
          }
          if (SUCCEEDED(hr))
          {
            for(int i = 0; i < d->m_RGBBufferSize; i+=3)
            {
              //convert from BGR to RGB
              d->m_Colors[i+0] = pColorBuffer->rgbRed;
              d->m_Colors[i+1] = pColorBuffer->rgbGreen;
              d->m_Colors[i+2] = pColorBuffer->rgbBlue;
              ++pColorBuffer;
            }
          }
        }
      }

      SafeRelease(pDepthFrame);
      SafeRelease(pColorFrame);
      SafeRelease(pInfraRedFrame);
      SafeRelease(pMultiSourceFrame);

      if( hr != -1 && !SUCCEEDED(hr) )
      {
        //The thread gets here, if the data is requested faster than the device can deliver it.
        //This may happen from time to time.
        return false;
      }

      return true;
    }
    MITK_ERROR << "Unable to initialize MultiFrameReader";
    return false;
  }
Ejemplo n.º 26
0
HRESULT KinectHandler::GetColorAndDepth(RGBQUAD* &color, RGBQUAD* &depth, UINT16*& depthBuffer)
{
	if (!m_pMultiFrameReader)
	{
		cout << "No frame reader!" << endl;
		return E_FAIL;
	}

	IColorFrame* pColorFrame = NULL;
	IDepthFrame* pDepthFrame = NULL;
	IMultiSourceFrame* pMultiSourceFrame = NULL;
	HRESULT hr = m_pMultiFrameReader->AcquireLatestFrame(&pMultiSourceFrame);

	if (SUCCEEDED(hr))
	{
		IColorFrameReference* pColorFrameReference = NULL;
		hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);

		if (SUCCEEDED(hr))
		{
			hr = pColorFrameReference->AcquireFrame(&pColorFrame);
		}

		IDepthFrameReference* pDepthFrameReference = NULL;
		hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
		}

		SafeRelease(pColorFrameReference);
		SafeRelease(pDepthFrameReference);
	}

	if (SUCCEEDED(hr) && pColorFrame != NULL && pDepthFrame != NULL)
	{
		INT64 nTime = 0;
		IFrameDescription* pColorFrameDescription = NULL;
		int nColorWidth = 0;
		int nColorHeight = 0;
		ColorImageFormat imageFormat = ColorImageFormat_None;
		UINT nColorBufferSize = 0;
		RGBQUAD *pColorBuffer = NULL;

		hr = pColorFrame->get_RelativeTime(&nTime);

		if (SUCCEEDED(hr))
		{
			hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
		}
		if (SUCCEEDED(hr))
		{
			hr = pColorFrameDescription->get_Width(&nColorWidth);
		}
		if (SUCCEEDED(hr))
		{
			hr = pColorFrameDescription->get_Height(&nColorHeight);
		}
		if (SUCCEEDED(hr))
		{
			hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
		}
		if (SUCCEEDED(hr))
		{
			if (imageFormat == ColorImageFormat_Bgra)
			{
				hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
			}
			else if (m_pColorRGBX)
			{
				pColorBuffer = m_pColorRGBX;
				nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
				hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
			}
			else
			{
				cout << "FAILED" << endl;
				hr = E_FAIL;
			}
		}

		if (SUCCEEDED(hr))
		{
			color = pColorBuffer;
		}


		///===========================================////


		nTime = 0;
		IFrameDescription* pDepthFrameDescription = NULL;
		int nDepthWidth = 0;
		int nDepthHeight = 0;
		USHORT nDepthMinReliableDistance = 0;
		USHORT nDepthMaxDistance = 0;
		UINT nDepthBufferSize = 0;
		UINT16 *pDepthBuffer = NULL;

		hr = pDepthFrame->get_RelativeTime(&nTime);

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameDescription->get_Width(&nDepthWidth);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameDescription->get_Height(&nDepthHeight);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
		}
		if (SUCCEEDED(hr))
		{
			// In order to see the full range of depth (including the less reliable far field depth)
			// we are setting nDepthMaxDistance to the extreme potential depth threshold
			nDepthMaxDistance = USHRT_MAX;

			// Note:  If you wish to filter by reliable depth distance, uncomment the following line.
			//// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
		}

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);
		}
		if (SUCCEEDED(hr))
		{
			//RGBQUAD* pRGBX = new RGBQUAD[cDepthWidth * cDepthHeight];


			// end pixel is start + width*height - 1

			const UINT16* pBufferEnd = pDepthBuffer + (nDepthWidth * nDepthHeight);
			RGBQUAD* auxiliar = m_pDepthRGBX;
			//const UINT16* pBufferEnd = pDepthBuffer + (640 * 480);
			int counter = 0;

			while (pDepthBuffer < pBufferEnd)
			{
				//cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl;
				USHORT depth = *pDepthBuffer;
				//cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl;

				// To convert to a byte, we're discarding the most-significant
				// rather than least-significant bits.
				// We're preserving detail, although the intensity will "wrap."
				// Values outside the reliable depth range are mapped to 0 (black).

				// Note: Using conditionals in this loop could degrade performance.
				// Consider using a lookup table instead when writing production code.
				//BYTE intensity = static_cast<BYTE>((depth >= nDepthMinReliableDistance) && (depth <= nDepthMaxDistance) ? (depth % 256) : 0);
				BYTE intensity = static_cast<BYTE>((depth >= nDepthMinReliableDistance) && (depth <= nDepthMaxDistance) ? ((depth - nDepthMinReliableDistance) * (0 - 255) / (nDepthMaxDistance / 50 - nDepthMinReliableDistance) + 255) : 0);
				auxiliar->rgbBlue = intensity;
				auxiliar->rgbGreen = intensity;
				auxiliar->rgbRed = intensity;
				auxiliar->rgbReserved = (BYTE)255;

				counter++;

				++auxiliar;
				++pDepthBuffer;
			}

			depth = m_pDepthRGBX;
		}

		if (m_pDepthRawBuffer)
		{
			hr = pDepthFrame->CopyFrameDataToArray((cDepthWidth * cDepthHeight), m_pDepthRawBuffer);
			if(SUCCEEDED(hr)) depthBuffer = m_pDepthRawBuffer;
		}

		SafeRelease(pDepthFrameDescription);

	}
	else
	{
		cout << "Acquire last frame FAILED " << endl;
		hr = E_FAIL;
		SafeRelease(pColorFrame);
		SafeRelease(pDepthFrame);
		return hr;
	}

	SafeRelease(pColorFrame);
	SafeRelease(pDepthFrame);
	SafeRelease(pMultiSourceFrame);
	return hr;
}
/*
bool  MyKinect::Process_AudioFrame(IMultiSourceFrame * pMultiSourceFrame, bool *returnbool)
{
	if (!pMultiSourceFrame) {
		*returnbool = false;
		return false;
	}
	*returnbool = false;
	IDepthFrame * pDepthFrame = NULL;
	IDepthFrameReference *pDepthFrameReference = NULL;
	HRESULT hr = pMultiSourceFrame->get_(&pDepthFrameReference);

	if (SUCCEEDED(hr))
	{
		hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
	}
	SafeRelease(pDepthFrameReference);
 
	if (SUCCEEDED(hr))
	{
		IFrameDescription * pFrameDescription = NULL;
		int nWidth = 0;
		int nHeight = 0;
		//	USHORT nDepthMinReliableDistance = 0;
		//USHORT nDepthMaxDistance = 0;
		UINT nBufferSize = 0;
		UINT16 *pBuffer = NULL;

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Width(&nWidth);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Height(&nHeight);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMinReliableDistance(&cDepthMinReliableDistance);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMaxReliableDistance(&cDepthMaxDistance);
		}

		if (SUCCEEDED(hr))
		{//这里是将指针buffer提取出来了,没有拷贝
			hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);//这里的size是ushort而言的,memcopy是uchar来的。
		}

		if (SUCCEEDED(hr))
		{
			//int tempsize = cDepthHeight*cDepthWidth *sizeof(USHORT);
			memcpy(m_pDepthBuffer, pBuffer, nBufferSize*sizeof(USHORT));

			*returnbool = true;
			//ProcessDepth(pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxDistance);
		}

		SafeRelease(pFrameDescription);//Description 和Frame 都要释放的
	}
 
	SafeRelease(pDepthFrame);

	return *returnbool;
}*/
bool  MyKinect::Process_DepthFrame(IMultiSourceFrame * pMultiSourceFrame, bool *returnbool)
{
	if (!pMultiSourceFrame) {
		*returnbool = false;
		return false;
	}
	 *returnbool = false;
	IDepthFrame * pDepthFrame = NULL;
	IDepthFrameReference *pDepthFrameReference = NULL;
	HRESULT hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);

	if (SUCCEEDED(hr))
	{
		hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
	}
	SafeRelease(pDepthFrameReference);
	/*if (!SUCCEEDED(hr))
	{
		cout << "    深度帧丢失" << ++depthLostFrames << endl;
	}*/
	

	if (SUCCEEDED(hr))
	{
		IFrameDescription * pFrameDescription = NULL;
		int nWidth = 0;
		int nHeight = 0;
	//	USHORT nDepthMinReliableDistance = 0;
		//USHORT nDepthMaxDistance = 0;
		UINT nBufferSize = 0;
		UINT16 *pBuffer = NULL;
		
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Width(&nWidth);
		}
		if (SUCCEEDED(hr))
		{
			hr = pFrameDescription->get_Height(&nHeight);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMinReliableDistance(&cDepthMinReliableDistance);
		}
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_DepthMaxReliableDistance(&cDepthMaxDistance);
		}

		if (SUCCEEDED(hr))
		{//这里是将指针buffer提取出来了,没有拷贝
			hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);//这里的size是ushort而言的,memcopy是uchar来的。
		}

		if (SUCCEEDED(hr))
		{
			//int tempsize = cDepthHeight*cDepthWidth *sizeof(USHORT);
			memcpy(m_pDepthBuffer, pBuffer, nBufferSize*sizeof(USHORT));

			*returnbool = true;
			//ProcessDepth(pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxDistance);
		}

		SafeRelease(pFrameDescription);//Description 和Frame 都要释放的
	}
/*	else if (colorDepthFramesynchronization)//深度帧没了,但是彩色帧处理了,就要用上一帧来
	{//没用的, 已经用上一帧的coordinates处理了,这就多余了
	}
	*/
	SafeRelease(pDepthFrame);
	
	return *returnbool;

}
Ejemplo n.º 28
0
void KinectDevice::listen() {
    if (_listening) throw std::exception("Already listening for new frames");

    _listening = true;
    while (_listening) {
        int idx = WaitForSingleObject((HANDLE) _frameEvent, 100);
        switch (idx) {
        case WAIT_TIMEOUT:
            std::cout << ".";
            continue;
        case WAIT_OBJECT_0:
            IMultiSourceFrameArrivedEventArgs *frameArgs = nullptr;
            IMultiSourceFrameReference *frameRef = nullptr;
            HRESULT hr = _reader->GetMultiSourceFrameArrivedEventData(_frameEvent, &frameArgs);

            if (hr == S_OK) {
                hr = frameArgs->get_FrameReference(&frameRef);
                frameArgs->Release();
            }

            if (hr == S_OK) {
                //if (_lastFrame) _lastFrame->Release();
                hr = frameRef->AcquireFrame(&_lastFrame);
                frameRef->Release();
            }

            if (hr == S_OK) {
                // Store frame data
                //std::cout << "Got a frame YEAH" << std::endl;

                IDepthFrameReference                *depthRef   = nullptr;
                IColorFrameReference                *colorRef   = nullptr;
                IInfraredFrameReference             *irRef      = nullptr;
                ILongExposureInfraredFrameReference *hdirRef    = nullptr;
                IBodyIndexFrameReference            *indexRef   = nullptr;

                IDepthFrame                         *depth      = nullptr;
                IColorFrame                         *color      = nullptr;
                IInfraredFrame                      *ir         = nullptr;
                ILongExposureInfraredFrame          *hdir       = nullptr;
                IBodyIndexFrame                     *index      = nullptr;

                size_t size;
                uint16_t *buff;
                BYTE *cbuff;

                frameLock.lock();
                if (_streams & Streams::DEPTH_STREAM) {
                    _lastFrame->get_DepthFrameReference(&depthRef);
                    depthRef->AcquireFrame(&depth);
                    
                    if (depth) {
                        depthSwap();
                        depth->AccessUnderlyingBuffer(&size, &buff);
                        memcpy(depthData.get(), buff, size * sizeof(uint16_t));
                        depth->Release();
                    }
                    
                    depthRef->Release();
                }
                if (_streams & Streams::COLOR_STREAM) {
                    _lastFrame->get_ColorFrameReference(&colorRef);
                    colorRef->AcquireFrame(&color);
                    //color->AccessUnderlyingBuffer(&size, &buff);
                    //memcpy(_colorData.get(), buff, size);
                    color->Release();
                    colorRef->Release();
                }
                if (_streams & Streams::IR_STREAM) {
                    _lastFrame->get_InfraredFrameReference(&irRef);
                    irRef->AcquireFrame(&ir);
                    ir->AccessUnderlyingBuffer(&size, &buff);
                    memcpy(irData.get(), buff, size);
                    ir->Release();
                    irRef->Release();
                }
                if (_streams & Streams::HDIR_STREAM) {
                    _lastFrame->get_LongExposureInfraredFrameReference(&hdirRef);
                    hdirRef->AcquireFrame(&hdir);
                    hdir->AccessUnderlyingBuffer(&size, &buff);
                    memcpy(hdirData.get(), buff, size);
                    hdir->Release();
                    hdirRef->Release();
                }
                if (_streams & Streams::INDEX_STREAM) {
                    _lastFrame->get_BodyIndexFrameReference(&indexRef); 
                    indexRef->AcquireFrame(&index);
                    index->AccessUnderlyingBuffer(&size, &cbuff);
                    memcpy(indexData.get(), cbuff, size);
                    index->Release();
                    indexRef->Release();
                }

                frameLock.unlock();

                _lastFrame->Release();
            }
        }
    }
}
Ejemplo n.º 29
0
	//after calling this, get the depth fram with GetDepth or GetDepthRGBX
	void UpdateDepth(){

		if (!m_pDepthFrameReader)
		{
			return;
		}

		IDepthFrame* pDepthFrame = NULL;

		HRESULT hr = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);

		if (SUCCEEDED(hr))
		{
			INT64 nTime = 0;
			IFrameDescription* pFrameDescription = NULL;
			int nWidth = 0;
			int nHeight = 0;
			USHORT nDepthMinReliableDistance = 0;
			USHORT nDepthMaxReliableDistance = 0;
			UINT nBufferSize = 0;
			UINT16 *pBuffer = NULL;

			hr = pDepthFrame->get_RelativeTime(&nTime);

			if (SUCCEEDED(hr))
			{
				hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_Width(&nWidth);
			}

			if (SUCCEEDED(hr))
			{
				m_nDepthWidth = nWidth;
				hr = pFrameDescription->get_Height(&nHeight);
			}

			if (SUCCEEDED(hr))
			{
				m_nDepthHeight = nHeight;
				hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
			}

			if (SUCCEEDED(hr))
			{
				hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxReliableDistance);
			}

			if (SUCCEEDED(hr))
			{
				hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);            
			}

			if (SUCCEEDED(hr))
			{

				if(m_bCalculateDepthRGBX)
					ProcessDepth(nTime, pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxReliableDistance);
				else
					ProcessDepthNoRGBX(nTime, pBuffer, nWidth, nHeight, nDepthMinReliableDistance, nDepthMaxReliableDistance);

				if(!m_bColorDepthMapCalculated){
					CalculateColorDepthMap();
				}

				if(m_bMapDepthToColor && m_nColorWidth > 0 && m_nColorHeight > 0 && SUCCEEDED(hr) && m_bColorDepthMapCalculated){
					ProcessDepthToColor(m_pDepth, m_nDepthWidth, m_nDepthHeight, m_pColorDepthMap, m_nColorWidth, m_nColorHeight);
				}
			}

			SafeRelease(pFrameDescription);
		}
		else{
			DumpHR(hr);
		}

		SafeRelease(pDepthFrame);
	}
Ejemplo n.º 30
0
int main(int argc, char* argv[])
{


	int similarity = 0;

	string line;
	ifstream myfile ("source_config.txt");
	if (myfile.is_open())
	{
		getline (myfile,line);
		hauteurCamera = stoi(line);
		getline (myfile,line);
		fountainXPosition = stoi(line);
		getline (myfile,line);
		fountainYPosition = stoi(line);
		getline (myfile,line);
		fountainWidth = stoi(line);

		getline (myfile,line);
		blasterWidth = stoi(line);
		getline (myfile,line);
		int numberOfBlaster = stoi(line);

		for(int i = 0;i<numberOfBlaster;i++){
			getline (myfile,line);
			blasterXPosition.push_back(stoi(line));
			getline (myfile,line);
			blasterYPosition.push_back(stoi(line));
		}

		myfile.close();
	}

	else
	{
		cout << "Unable to open file"; 
		exit(-1);
	}

	IKinectSensor* m_pKinectSensor;
	IDepthFrameReader* pDepthReader;
	IDepthFrameSource* pDepthFrameSource = NULL; // Depth image

	IColorFrameReader* pColorReader;
	IColorFrameSource* pColorFrameSource = NULL;


	HRESULT hr;

	hr = GetDefaultKinectSensor(&m_pKinectSensor);
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : GetDefaultKinectSensor failed." << endl;
		return false;
	}

	hr = m_pKinectSensor->Open();
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : Open failed." << endl;
		return false;
	}

	hr = m_pKinectSensor->get_DepthFrameSource( &pDepthFrameSource );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : get_DepthFrameSource failed." << endl;
		return false;
	}
	hr = pDepthFrameSource->OpenReader( &pDepthReader );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : OpenReader failed." << endl;
		return false;
	}


	IFrameDescription* pDepthDescription;
	hr = pDepthFrameSource->get_FrameDescription( &pDepthDescription );
	if( FAILED( hr ) ){
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}
	w = 0,h = 0;
	pDepthDescription->get_Width( &w ); // 512
	pDepthDescription->get_Height( &h ); // 424
	//unsigned int irBufferSize = w * h * sizeof( unsigned short );


	hr = m_pKinectSensor->get_ColorFrameSource( &pColorFrameSource );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : get_ColorFrameSource failed." << endl;
		return false;
	}
	hr = pColorFrameSource->OpenReader( &pColorReader );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : OpenReader failed." << endl;
		return false;
	}

	// Description
	IFrameDescription* pColorDescription;
	hr = pColorFrameSource->get_FrameDescription( &pColorDescription );
	if( FAILED( hr ) ){
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}
	int colorWidth = 0;
	int colorHeight = 0;
	pColorDescription->get_Width( &colorWidth ); // 1920
	pColorDescription->get_Height( &colorHeight ); // 1080
	unsigned int colorBufferSize = colorWidth * colorHeight * 4 * sizeof( unsigned char );

	Mat colorBufferMat = Mat::zeros( cvSize(colorWidth,colorHeight), CV_8UC4   );
	//colorMat = Mat::zeros( cvSize(colorWidth/2,colorHeight/2), CV_8UC4   );



	ICoordinateMapper* pCoordinateMapper;
	hr = m_pKinectSensor->get_CoordinateMapper( &pCoordinateMapper );



	//  open a opencv window
	cv::namedWindow("source_config", CV_WINDOW_AUTOSIZE );
	setMouseCallback("source_config", MouseCallBackFunc, NULL);

	Mat frame(h,w, CV_8UC3, Scalar(255,255,255));
	Mat display;
	//Mat img;

	char k = 0;
	while(k!=27){

		HRESULT hResult = S_OK;

		if(displayColor){
			IColorFrame* pColorFrame = nullptr;
			hResult = pColorReader->AcquireLatestFrame( &pColorFrame );
			while(!SUCCEEDED(hResult)){
				Sleep(50);
				hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
			}

			if( SUCCEEDED( hResult ) ){
				hResult = pColorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<BYTE*>( colorBufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
				if( !SUCCEEDED( hResult ) ){
					return false;
				}

				resize(colorBufferMat,display,Size(displaySize*w,displaySize*h));

				flip(display,display,1);

				cv::line(display,Point(displaySize*w/2,0),Point(displaySize*w/2,displaySize*h),Scalar(0,0,255),2);
				cv::line(display,Point(0,displaySize*h/2),Point(displaySize*w,displaySize*h/2),Scalar(0,0,255),2);


				if (pColorFrame )
				{
					pColorFrame ->Release();
					pColorFrame  = NULL;
				}



			}
			else 
				return false;



		}
		else
		{

			IDepthFrame* pDepthFrame = nullptr;
			hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );
			while(!SUCCEEDED(hResult)){
				Sleep(10);
				hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );
			}

			if( SUCCEEDED( hResult ) ){
				unsigned int bufferSize = 0;
				unsigned short* buffer = nullptr;
				hResult = pDepthFrame->AccessUnderlyingBuffer( &bufferSize, &buffer );

				if( SUCCEEDED( hResult ) ){
					for( int y = 0; y < h; y++ ){
						for( int x = 0; x < w; x++ ){
							Vec3b intensity = frame.at<Vec3b>(y, x);
							if(buffer[ y * w + (w - x - 1) ]  < hauteurCamera){
								int d = buffer[ y * w + (w - x - 1) ];
								intensity.val[0] = 2.55*(d % 100);
								intensity.val[1] = 1.22*(d % 200);
								intensity.val[2] = 256.0*d/hauteurCamera;
							}
							else
							{
								intensity.val[0] = 255;
								intensity.val[1] = 255;
								intensity.val[2] = 255;
							}
							/*intensity.val[0] = buffer[ y * w + x ] >> 8;
							intensity.val[1] = buffer[ y * w + x ] >> 8;
							intensity.val[2] = buffer[ y * w + x ] >> 8;*/
							frame.at<Vec3b>(y, x) = intensity;
						}
					}

					// changer la couleur du rectangle en fonction de la hauteur des coins (similaire ou non) ( moins de 4cm)

					float d1 = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
					float d2 = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
					float d3 = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
					float d4 = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];

					if((d1 < 0)||(d1>3500)||(d2 < 0)||(d2>3500)||(d3 < 0)||(d3>3500)||(d4 < 0)||(d4>3500)){
						similarity = 0;
					}
					else
					{
						int mn = 100;
						if((abs(d1-d2) < mn)
							&&(abs(d1-d3) < mn)
							&&(abs(d1-d4) < mn)
							&&(abs(d2-d3) < mn)
							&&(abs(d2-d4) < mn)
							&&(abs(d3-d4) < mn))
							similarity = 255;
						else{
							int md = abs(d1-d2);
							md = MAX(md,abs(d1-d3));
							md = MAX(md,abs(d1-d4));
							md = MAX(md,abs(d2-d3));
							md = MAX(md,abs(d2-d4));
							md = MAX(md,abs(d3-d4));
							if(md-mn>128)
								similarity = 0;
							else
								similarity = 128 - (md - mn);
						}
					}


					if(k=='s'){

						// get hauteur camera

						// Depthframe get 3D position of 1m20 jets et les enregistrer dans un autre fichier pour etre charger par un Observer


						CameraSpacePoint cameraPoint = { 0 };
						DepthSpacePoint depthPoint = { 0 };
						UINT16 depth;

						// Compute hauteur camera, by average of four points
						float h = 0;
						int cptH = 0;
						float d;

						d = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						if(cptH>0){
							//hauteurCamera = h/cptH;
							cout << "H = " << hauteurCamera << endl;
						}

						// Compute real size of blaster
						/*depthPoint.X = static_cast<float>(blasterXPosition[0] - blasterWidth); 
						depthPoint.Y = static_cast<float>(blasterYPosition[0] - blasterWidth); 
						depth = hauteurCamera - 1000.0f; // TODO change
						pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
						float corner1X = static_cast<float>(cameraPoint.X);
						float corner1Y = static_cast<float>(cameraPoint.Y);


						depthPoint.X = static_cast<float>(blasterXPosition[0] + blasterWidth); 
						depthPoint.Y = static_cast<float>(blasterYPosition[0] + blasterWidth); 
						pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
						float corner2X = static_cast<float>(cameraPoint.X);
						float corner2Y = static_cast<float>(cameraPoint.Y);*/

						//float realBlasterWidth = 1000.0*(abs(corner2X-corner1X)+abs(corner2Y-corner1Y))/2.0;


						ofstream myfile;
						myfile.open ("source_config.txt");
						myfile << hauteurCamera << "\n";
						myfile << fountainXPosition << "\n";
						myfile << fountainYPosition << "\n";
						myfile << fountainWidth << "\n";
						myfile << blasterWidth << "\n";
						myfile << blasterXPosition.size() << "\n";
						for(int i = 0;i<blasterXPosition.size();i++){
							myfile << blasterXPosition[i] << "\n";
							myfile << blasterYPosition[i] << "\n";
						}
						myfile.close();

						//  save real positions to file

						myfile.open ("source3D.txt");
						myfile << hauteurCamera << "\n";
						myfile << blasterWidth << "\n";
						myfile << blasterXPosition.size() << "\n";
						for(int i = 0;i<blasterXPosition.size();i++){
							depthPoint.X = static_cast<float>(blasterXPosition[i]); 
							depthPoint.Y = static_cast<float>(blasterYPosition[i]); 
							depth = hauteurCamera;
							pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
							cout << depthPoint.X << " - " << depthPoint.Y << endl;
							cout << static_cast<float>(cameraPoint.X) << " - " << static_cast<float>(cameraPoint.Y) << endl;
							pCoordinateMapper->MapCameraPointToDepthSpace(cameraPoint, &depthPoint);
							cout << depthPoint.X << " - " << depthPoint.Y << endl;
							cout << static_cast<float>(cameraPoint.X) << " - " << static_cast<float>(cameraPoint.Y) << endl;
							myfile << 1000.0*static_cast<float>(cameraPoint.X) << "\n";
							myfile << 1000.0*static_cast<float>(cameraPoint.Y) << "\n";
						}
						myfile.close();


					}


				}
				else{
					return false;
				}

			}
			else 
				return false;

			if( pDepthFrame != NULL ){
				pDepthFrame->Release();
				pDepthFrame = NULL;
			}

			rectangle(frame,
				Rect(fountainXPosition-fountainWidth/2.0, 
				fountainYPosition-fountainWidth/2.0, 
				fountainWidth	, 
				fountainWidth),
				Scalar(0,similarity,255-similarity),
				3);
			for(int i = -2; i <= 2; i++)
			{
				rectangle(frame,
					Rect(fountainXPosition - (i*blasterWidth) - (blasterWidth/2.0), 
					fountainYPosition-fountainWidth/2.0, 
					blasterWidth, 
					fountainWidth),
					Scalar(0,255,0),
					1);
				rectangle(frame,
					Rect(fountainXPosition-fountainWidth/2.0,
					fountainYPosition - (i*blasterWidth) - (blasterWidth/2.0),
					fountainWidth, 
					blasterWidth),
					Scalar(0,255,0),
					1);
			}

			char textbuffer [33];
			for(int i = 0;i<blasterXPosition.size();i++){
				sprintf(textbuffer,"%i",i+1);
				putText(frame,textbuffer, Point2f(blasterXPosition[i]-blasterWidth/2.0,blasterYPosition[i]), FONT_HERSHEY_PLAIN, 1,  Scalar(0,0,255,255), 2);
				//rectangle(frame,Rect(blasterXPosition[i]-blasterWidth/2.0,blasterYPosition[i]-blasterWidth/2.0,blasterWidth,blasterWidth),Scalar(255,0,0));
				circle(frame,Point(blasterXPosition[i],blasterYPosition[i]),blasterWidth/2.0,Scalar(255,0,0));
			}
			resize(frame,display,Size(displaySize*w,displaySize*h));
		}

		cv::imshow("source_config", display);


		k=cv::waitKey(1);	

		if(k == 32) // Space
			displayColor = ! displayColor;
		
		if(k=='a')
			alignBuseFromLayout();
		if(k=='r')
			resetFountainPosition();

	}


	if (pDepthReader)
	{
		pDepthReader->Release();
		pDepthReader = NULL;
	}


	if (pCoordinateMapper)
	{
		pCoordinateMapper->Release();
		pCoordinateMapper = NULL;
	}

	m_pKinectSensor->Close();
	if (m_pKinectSensor)
	{
		m_pKinectSensor->Release();
		m_pKinectSensor = NULL;
	}


	//system("pause");

	return 0;
}