void Microsoft2Grabber::BodyIndexFrameArrived(IBodyIndexFrameReference* pBodyIndexFrameReference) {
		IBodyIndexFrame* pBodyIndexFrame = NULL;
		HRESULT hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
		if(FAILED(hr))
			return;
		//cout << "got a body index frame" << endl;
		IFrameDescription* pBodyIndexFrameDescription = NULL;
		int nBodyIndexWidth = 0;
		int nBodyIndexHeight = 0;
		UINT nBodyIndexBufferSize = 0;
		BYTE *pBodyIndexBuffer = NULL;

		// get body index frame data
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
		}
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
		}
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
		}
		if (SUCCEEDED(hr)) {
			hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);            
		}
		SafeRelease(pBodyIndexFrameDescription);
		SafeRelease(pBodyIndexFrame);
	}
Esempio n. 2
0
	void UpdateBodyFrameIndex(){
		if(!m_pBodyIndexFrameReader){
			return;
		}

		if(m_nDepthWidth == 0 || m_nDepthHeight == 0 || m_nColorWidth == 0 || m_nColorHeight == 0){
			
			return;
		}

		if(m_nBodyIndex == 0xff){
			return;
		}

		IBodyIndexFrame *  pBodyIndexFrame = NULL;

		HRESULT hr = m_pBodyIndexFrameReader->AcquireLatestFrame(&pBodyIndexFrame);

		if(SUCCEEDED(hr)){
			unsigned int nBufferSize = 0;
			unsigned char * pBuffer = NULL;

			if(SUCCEEDED(hr)){
				hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
			}
			
			if(SUCCEEDED(hr)){
				hr = m_pCoordinateMapper->MapColorFrameToDepthSpace(m_nDepthWidth*m_nDepthHeight, m_pDepth, m_nColorWidth*m_nColorHeight, m_pColorDepthMap);
			}

			if(SUCCEEDED(hr)){
				ProcessBodyFrameIndexColor(pBuffer, m_nDepthWidth, m_nDepthHeight, m_pColorDepthMap, m_nColorWidth, m_nColorHeight);
			}
		}

		SafeRelease(pBodyIndexFrame);
	}
int main(int argc, char** argv)
{
	// 1a. Get default Sensor
	cout << "Try to get default sensor" << endl;
	IKinectSensor* pSensor = nullptr;
	if (GetDefaultKinectSensor(&pSensor) != S_OK)
	{
		cerr << "Get Sensor failed" << endl;
		return -1;
	}

	// 1b. Open sensor
	cout << "Try to open sensor" << endl;
	if (pSensor->Open() != S_OK)
	{
		cerr << "Can't open sensor" << endl;
		return -1;
	}

	// 2a. Get frame source
	cout << "Try to get body index source" << endl;
	IBodyIndexFrameSource* pFrameSource = nullptr;
	if (pSensor->get_BodyIndexFrameSource(&pFrameSource) != S_OK)
	{
		cerr << "Can't get body index frame source" << endl;
		return -1;
	}

	// 2b. Get frame description
	cout << "get body index frame description" << endl;
	int		iWidth = 0;
	int		iHeight = 0;
	IFrameDescription* pFrameDescription = nullptr;
	if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)
	{
		pFrameDescription->get_Width(&iWidth);
		pFrameDescription->get_Height(&iHeight);
	}
	pFrameDescription->Release();
	pFrameDescription = nullptr;

	// 3a. get frame reader
	cout << "Try to get body index frame reader" << endl;
	IBodyIndexFrameReader* pFrameReader = nullptr;
	if (pFrameSource->OpenReader(&pFrameReader) != S_OK)
	{
		cerr << "Can't get body index frame reader" << endl;
		return -1;
	}

	// 2c. release Frame source
	cout << "Release frame source" << endl;
	pFrameSource->Release();
	pFrameSource = nullptr;

	// Prepare OpenCV data
	cv::Mat	mImg(iHeight, iWidth, CV_8UC3);
	cv::namedWindow("Body Index Image");

	// color array
	cv::Vec3b aColorTable[7] = {
		cv::Vec3b(255,0,0),
		cv::Vec3b(0,255,0),
		cv::Vec3b(0,0,255),
		cv::Vec3b(255,255,0),
		cv::Vec3b(255,0,255),
		cv::Vec3b(0,255,255),
		cv::Vec3b(0,0,0),
	};

	// Enter main loop
	while (true)
	{
		// 4a. Get last frame
		IBodyIndexFrame* pFrame = nullptr;
		if (pFrameReader->AcquireLatestFrame(&pFrame) == S_OK)
		{
			// 4c. Fill OpenCV image
			UINT uSize = 0;
			BYTE* pBuffer = nullptr;
			if (pFrame->AccessUnderlyingBuffer(&uSize,&pBuffer) == S_OK)
			{
				for (int y = 0; y < iHeight; ++y)
				{
					for (int x = 0; x < iWidth; ++x)
					{
						int uBodyIdx = pBuffer[x + y * iWidth];
						if (uBodyIdx < 6)
							mImg.at<cv::Vec3b>(y, x) = aColorTable[uBodyIdx];
						else
							mImg.at<cv::Vec3b>(y, x) = aColorTable[6];
					}
				}
				cv::imshow("Body Index Image", mImg);
			}
			else
			{
				cerr << "Data access error" << endl;
			}

			// 4e. release frame
			pFrame->Release();
		}

		// 4f. check keyboard input
		if (cv::waitKey(30) == VK_ESCAPE){
			break;
		}
	}

	// 3b. release frame reader
	cout << "Release frame reader" << endl;
	pFrameReader->Release();
	pFrameReader = nullptr;

	// 1c. Close Sensor
	cout << "close sensor" << endl;
	pSensor->Close();

	// 1d. Release Sensor
	cout << "Release sensor" << endl;
	pSensor->Release();
	pSensor = nullptr;

	return 0;
}
Esempio n. 4
0
int _tmain( int argc, _TCHAR* argv[] )
{
	cv::setUseOptimized( true );

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor( &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IBodyIndexFrameSource* pBodyIndexSource;
	hResult = pSensor->get_BodyIndexFrameSource( &pBodyIndexSource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_BodyIndexFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IBodyIndexFrameReader* pBodyIndexReader;
	hResult = pBodyIndexSource->OpenReader( &pBodyIndexReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IBodyIndexFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pBodyIndexSource->get_FrameDescription( &pDescription );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IBodyIndexFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width( &width ); // 512
	pDescription->get_Height( &height ); // 424

	cv::Mat bodyIndexMat( height, width, CV_8UC3 );
	cv::namedWindow( "BodyIndex" );

	// Color Table
	cv::Vec3b color[BODY_COUNT];
	color[0] = cv::Vec3b( 255,   0,   0 );
	color[1] = cv::Vec3b(   0, 255,   0 );
	color[2] = cv::Vec3b(   0,   0, 255 );
	color[3] = cv::Vec3b( 255, 255,   0 );
	color[4] = cv::Vec3b( 255,   0, 255 );
	color[5] = cv::Vec3b(   0, 255, 255 );

	while( 1 ){
		// Frame
		IBodyIndexFrame* pBodyIndexFrame = nullptr;
		hResult = pBodyIndexReader->AcquireLatestFrame( &pBodyIndexFrame );
		if( SUCCEEDED( hResult ) ){
			unsigned int bufferSize = 0;
			unsigned char* buffer = nullptr;
			hResult = pBodyIndexFrame->AccessUnderlyingBuffer( &bufferSize, &buffer );
			if( SUCCEEDED( hResult ) ){
				for( int y = 0; y < height; y++ ){
					for( int x = 0; x < width; x++ ){
						unsigned int index = y * width + x;
						if( buffer[index] != 0xff ){
							bodyIndexMat.at<cv::Vec3b>( y, x ) = color[buffer[index]];
						}
						else{
							bodyIndexMat.at<cv::Vec3b>( y, x ) = cv::Vec3b( 0, 0, 0 );
						}
					}
				}
			}
		}
		SafeRelease( pBodyIndexFrame );

		cv::imshow( "BodyIndex", bodyIndexMat );

		if( cv::waitKey( 30 ) == VK_ESCAPE ){
			break;
		}
	}

	SafeRelease( pBodyIndexSource );
	SafeRelease( pBodyIndexReader );
	SafeRelease( pDescription );
	if( pSensor ){
		pSensor->Close();
	}
	SafeRelease( pSensor );
	cv::destroyAllWindows();

	return 0;
}
/// <summary>
/// Main processing function
/// </summary>
void CCoordinateMappingBasics::Update()
{
    if (!m_pMultiSourceFrameReader)
    {
        return;
    }

    IMultiSourceFrame* pMultiSourceFrame = NULL;
    IDepthFrame* pDepthFrame = NULL;
    IColorFrame* pColorFrame = NULL;
    IBodyIndexFrame* pBodyIndexFrame = NULL;

    IBodyFrame* pBodyFrame = NULL;

    HRESULT hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);

    if (SUCCEEDED(hr))
    {
        IDepthFrameReference* pDepthFrameReference = NULL;

        hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
        }

        SafeRelease(pDepthFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IColorFrameReference* pColorFrameReference = NULL;

        hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pColorFrameReference->AcquireFrame(&pColorFrame);
        }

        SafeRelease(pColorFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyIndexFrameReference* pBodyIndexFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyIndexFrameReference(&pBodyIndexFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
        }

        SafeRelease(pBodyIndexFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyFrameReference* pBodyFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyFrameReference(&pBodyFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyFrameReference->AcquireFrame(&pBodyFrame);
        }

		SafeRelease(pBodyFrameReference);
    }

    if (SUCCEEDED(hr))
    {
		// Depth
        INT64 nDepthTime = 0;
        IFrameDescription* pDepthFrameDescription = NULL;
        int nDepthWidth = 0;
        int nDepthHeight = 0;
        UINT nDepthBufferSize = 0;
        UINT16 *pDepthBuffer = NULL;

		// Color
        IFrameDescription* pColorFrameDescription = NULL;
        int nColorWidth = 0;
        int nColorHeight = 0;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nColorBufferSize = 0;
        RGBQUAD *pColorBuffer = NULL;

		// BodyIndex
        IFrameDescription* pBodyIndexFrameDescription = NULL;
        int nBodyIndexWidth = 0;
        int nBodyIndexHeight = 0;
        UINT nBodyIndexBufferSize = 0;
        BYTE *pBodyIndexBuffer = NULL;

		// Body
		IBody* ppBodies[BODY_COUNT] = { 0 };

        // get depth frame data
        hr = pDepthFrame->get_RelativeTime(&nDepthTime);

		// Depth
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Width(&nDepthWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Height(&nDepthHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);            
        }

        // get color frame data
        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Width(&nColorWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Height(&nColorHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
            }
            else if (m_pColorRGBX)
            {
                pColorBuffer = m_pColorRGBX;
                nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }

        // get body index frame data
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);            
        }

		// get body frame data

		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

        if (SUCCEEDED(hr))
        {
            ProcessFrame(nDepthTime, pDepthBuffer, nDepthWidth, nDepthHeight, 
                pColorBuffer, nColorWidth, nColorHeight,
				pBodyIndexBuffer, nBodyIndexWidth, nBodyIndexHeight, BODY_COUNT, ppBodies);
        }

        SafeRelease(pDepthFrameDescription);
        SafeRelease(pColorFrameDescription);
        SafeRelease(pBodyIndexFrameDescription);

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);
		}
	}

    SafeRelease(pDepthFrame);
    SafeRelease(pColorFrame);
    SafeRelease(pBodyIndexFrame);
    SafeRelease(pBodyFrame);
    SafeRelease(pMultiSourceFrame);
}
Esempio n. 6
0
bool BodyIndexStream::readFrame(IMultiSourceFrame *multiFrame)
{
    bool isSuccess = false;
    if (!m_StreamHandle.depthFrameReader) {
        ofLogWarning("ofxKinect2::BodyIndexStream") << "Stream is not open.";
        return isSuccess;
    }

    Stream::readFrame(multiFrame);
    IBodyIndexFrame *bodyIndexFrame = nullptr;

    HRESULT hr = E_FAIL;
    if (!multiFrame) {
        hr = m_StreamHandle.bodyIndexFrameReader->AcquireLatestFrame(&bodyIndexFrame);
    }
    else {
        IBodyIndexFrameReference *frameReference = nullptr;
        hr = multiFrame->get_BodyIndexFrameReference(&frameReference);

        if (SUCCEEDED(hr)) {
            hr = frameReference->AcquireFrame(&bodyIndexFrame);
        }

        safeRelease(frameReference);
    }

    if (SUCCEEDED(hr)) {
        IFrameDescription *frameDescription = nullptr;

        hr = bodyIndexFrame->get_RelativeTime((INT64 *)&m_Frame.timestamp);
        if (SUCCEEDED(hr)) {
            hr = bodyIndexFrame->get_FrameDescription(&frameDescription);
        }

        if (SUCCEEDED(hr)) {
            hr = frameDescription->get_Width(&m_Frame.width);
        }

        if (SUCCEEDED(hr)) {
            hr = frameDescription->get_Height(&m_Frame.height);
        }

        if (SUCCEEDED(hr)) {
            hr = frameDescription->get_HorizontalFieldOfView(&m_Frame.horizontalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = frameDescription->get_VerticalFieldOfView(&m_Frame.verticalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = frameDescription->get_DiagonalFieldOfView(&m_Frame.diagonalFieldOfView);
        }

        if (SUCCEEDED(hr)) {
            hr = bodyIndexFrame->AccessUnderlyingBuffer((UINT *)&m_Frame.dataSize, reinterpret_cast<BYTE **>(&m_Frame.data));
        }

        if (SUCCEEDED(hr)) {
            isSuccess = true;
            setPixels(m_Frame);
        }
        safeRelease(frameDescription);
    }

    safeRelease(bodyIndexFrame);

    return isSuccess;
}
Esempio n. 7
0
void KinectDevice::listen() {
    if (_listening) throw std::exception("Already listening for new frames");

    _listening = true;
    while (_listening) {
        int idx = WaitForSingleObject((HANDLE) _frameEvent, 100);
        switch (idx) {
        case WAIT_TIMEOUT:
            std::cout << ".";
            continue;
        case WAIT_OBJECT_0:
            IMultiSourceFrameArrivedEventArgs *frameArgs = nullptr;
            IMultiSourceFrameReference *frameRef = nullptr;
            HRESULT hr = _reader->GetMultiSourceFrameArrivedEventData(_frameEvent, &frameArgs);

            if (hr == S_OK) {
                hr = frameArgs->get_FrameReference(&frameRef);
                frameArgs->Release();
            }

            if (hr == S_OK) {
                //if (_lastFrame) _lastFrame->Release();
                hr = frameRef->AcquireFrame(&_lastFrame);
                frameRef->Release();
            }

            if (hr == S_OK) {
                // Store frame data
                //std::cout << "Got a frame YEAH" << std::endl;

                IDepthFrameReference                *depthRef   = nullptr;
                IColorFrameReference                *colorRef   = nullptr;
                IInfraredFrameReference             *irRef      = nullptr;
                ILongExposureInfraredFrameReference *hdirRef    = nullptr;
                IBodyIndexFrameReference            *indexRef   = nullptr;

                IDepthFrame                         *depth      = nullptr;
                IColorFrame                         *color      = nullptr;
                IInfraredFrame                      *ir         = nullptr;
                ILongExposureInfraredFrame          *hdir       = nullptr;
                IBodyIndexFrame                     *index      = nullptr;

                size_t size;
                uint16_t *buff;
                BYTE *cbuff;

                frameLock.lock();
                if (_streams & Streams::DEPTH_STREAM) {
                    _lastFrame->get_DepthFrameReference(&depthRef);
                    depthRef->AcquireFrame(&depth);
                    
                    if (depth) {
                        depthSwap();
                        depth->AccessUnderlyingBuffer(&size, &buff);
                        memcpy(depthData.get(), buff, size * sizeof(uint16_t));
                        depth->Release();
                    }
                    
                    depthRef->Release();
                }
                if (_streams & Streams::COLOR_STREAM) {
                    _lastFrame->get_ColorFrameReference(&colorRef);
                    colorRef->AcquireFrame(&color);
                    //color->AccessUnderlyingBuffer(&size, &buff);
                    //memcpy(_colorData.get(), buff, size);
                    color->Release();
                    colorRef->Release();
                }
                if (_streams & Streams::IR_STREAM) {
                    _lastFrame->get_InfraredFrameReference(&irRef);
                    irRef->AcquireFrame(&ir);
                    ir->AccessUnderlyingBuffer(&size, &buff);
                    memcpy(irData.get(), buff, size);
                    ir->Release();
                    irRef->Release();
                }
                if (_streams & Streams::HDIR_STREAM) {
                    _lastFrame->get_LongExposureInfraredFrameReference(&hdirRef);
                    hdirRef->AcquireFrame(&hdir);
                    hdir->AccessUnderlyingBuffer(&size, &buff);
                    memcpy(hdirData.get(), buff, size);
                    hdir->Release();
                    hdirRef->Release();
                }
                if (_streams & Streams::INDEX_STREAM) {
                    _lastFrame->get_BodyIndexFrameReference(&indexRef); 
                    indexRef->AcquireFrame(&index);
                    index->AccessUnderlyingBuffer(&size, &cbuff);
                    memcpy(indexData.get(), cbuff, size);
                    index->Release();
                    indexRef->Release();
                }

                frameLock.unlock();

                _lastFrame->Release();
            }
        }
    }
}
Esempio n. 8
0
void Device::update()
{
    if ( mFrameReader == 0 ) {
        return;
    }

    IAudioBeamFrame* audioFrame								= 0;
    IBodyFrame* bodyFrame									= 0;
    IBodyIndexFrame* bodyIndexFrame							= 0;
    IColorFrame* colorFrame									= 0;
    IDepthFrame* depthFrame									= 0;
    IMultiSourceFrame* frame								= 0;
    IInfraredFrame* infraredFrame							= 0;
    ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;

    HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

    if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) {
        // TODO audio
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
        IBodyFrameReference* frameRef = 0;
        hr = frame->get_BodyFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
        IBodyIndexFrameReference* frameRef = 0;
        hr = frame->get_BodyIndexFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyIndexFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
        IColorFrameReference* frameRef = 0;
        hr = frame->get_ColorFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &colorFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
        IDepthFrameReference* frameRef = 0;
        hr = frame->get_DepthFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &depthFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
        IInfraredFrameReference* frameRef = 0;
        hr = frame->get_InfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
        ILongExposureInfraredFrameReference* frameRef = 0;
        hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) ) {
        long long timeStamp										= 0L;

        // TODO audio

        std::vector<Body> bodies;
        int64_t bodyTime										= 0L;
        IBody* kinectBodies[ BODY_COUNT ]						= { 0 };
        Vec4f floorClipPlane									= Vec4f::zero();

        Channel8u bodyIndexChannel;
        IFrameDescription* bodyIndexFrameDescription			= 0;
        int32_t bodyIndexWidth									= 0;
        int32_t bodyIndexHeight									= 0;
        uint32_t bodyIndexBufferSize							= 0;
        uint8_t* bodyIndexBuffer								= 0;
        int64_t bodyIndexTime									= 0L;

        Surface8u colorSurface;
        IFrameDescription* colorFrameDescription				= 0;
        int32_t colorWidth										= 0;
        int32_t colorHeight										= 0;
        ColorImageFormat colorImageFormat						= ColorImageFormat_None;
        uint32_t colorBufferSize								= 0;
        uint8_t* colorBuffer									= 0;

        Channel16u depthChannel;
        IFrameDescription* depthFrameDescription				= 0;
        int32_t depthWidth										= 0;
        int32_t depthHeight										= 0;
        uint16_t depthMinReliableDistance						= 0;
        uint16_t depthMaxReliableDistance						= 0;
        uint32_t depthBufferSize								= 0;
        uint16_t* depthBuffer									= 0;

        Channel16u infraredChannel;
        IFrameDescription* infraredFrameDescription				= 0;
        int32_t infraredWidth									= 0;
        int32_t infraredHeight									= 0;
        uint32_t infraredBufferSize								= 0;
        uint16_t* infraredBuffer								= 0;

        Channel16u infraredLongExposureChannel;
        IFrameDescription* infraredLongExposureFrameDescription	= 0;
        int32_t infraredLongExposureWidth						= 0;
        int32_t infraredLongExposureHeight						= 0;
        uint32_t infraredLongExposureBufferSize					= 0;
        uint16_t* infraredLongExposureBuffer					= 0;

        hr = depthFrame->get_RelativeTime( &timeStamp );

        // TODO audio
        if ( mDeviceOptions.isAudioEnabled() ) {

        }

        if ( mDeviceOptions.isBodyEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->get_RelativeTime( &bodyTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies );
            }
            if ( SUCCEEDED( hr ) ) {
                Vector4 v;
                hr = bodyFrame->get_FloorClipPlane( &v );
                floorClipPlane = toVec4f( v );
            }
            if ( SUCCEEDED( hr ) ) {
                for ( uint8_t i = 0; i < BODY_COUNT; ++i ) {
                    IBody* kinectBody = kinectBodies[ i ];
                    if ( kinectBody != 0 ) {
                        uint8_t isTracked	= false;
                        hr					= kinectBody->get_IsTracked( &isTracked );
                        if ( SUCCEEDED( hr ) && isTracked ) {
                            Joint joints[ JointType_Count ];
                            kinectBody->GetJoints( JointType_Count, joints );

                            JointOrientation jointOrientations[ JointType_Count ];
                            kinectBody->GetJointOrientations( JointType_Count, jointOrientations );

                            uint64_t id = 0;
                            kinectBody->get_TrackingId( &id );

                            std::map<JointType, Body::Joint> jointMap;
                            for ( int32_t j = 0; j < JointType_Count; ++j ) {
                                Body::Joint joint(
                                    toVec3f( joints[ j ].Position ),
                                    toQuatf( jointOrientations[ j ].Orientation ),
                                    joints[ j ].TrackingState
                                );
                                jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) );
                            }
                            Body body( id, i, jointMap );
                            bodies.push_back( body );
                        }
                    }
                }
            }
        }

        if ( mDeviceOptions.isBodyIndexEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight );
                memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) );
            }
        }

        if ( mDeviceOptions.isColorEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_FrameDescription( &colorFrameDescription );
                if ( SUCCEEDED( hr ) ) {
                    float vFov = 0.0f;
                    float hFov = 0.0f;
                    float dFov = 0.0f;
                    colorFrameDescription->get_VerticalFieldOfView( &vFov );
                    colorFrameDescription->get_HorizontalFieldOfView( &hFov );
                    colorFrameDescription->get_DiagonalFieldOfView( &dFov );
                }
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Width( &colorWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Height( &colorHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_RawColorImageFormat( &colorImageFormat );
            }
            if ( SUCCEEDED( hr ) ) {
                colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
                colorBuffer		= new uint8_t[ colorBufferSize ];
                hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );

                if ( SUCCEEDED( hr ) ) {
                    colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA );
                    memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 );
                }

                delete [] colorBuffer;
                colorBuffer = 0;
            }
        }

        if ( mDeviceOptions.isDepthEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_FrameDescription( &depthFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Width( &depthWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Height( &depthHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                depthChannel = Channel16u( depthWidth, depthHeight );
                memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Width( &infraredWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Height( &infraredHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredChannel = Channel16u( infraredWidth, infraredHeight );
                memcpy( infraredChannel.getData(), infraredBuffer,  infraredWidth * infraredHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
                memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) );
            }
        }

        if ( SUCCEEDED( hr ) ) {
            mFrame.mBodies						= bodies;
            mFrame.mChannelBodyIndex			= bodyIndexChannel;
            mFrame.mChannelDepth				= depthChannel;
            mFrame.mChannelInfrared				= infraredChannel;
            mFrame.mChannelInfraredLongExposure	= infraredLongExposureChannel;
            mFrame.mDeviceId					= mDeviceOptions.getDeviceId();
            mFrame.mSurfaceColor				= colorSurface;
            mFrame.mTimeStamp					= timeStamp;
            mFrame.mFloorClipPlane				= floorClipPlane;
        }

        if ( bodyIndexFrameDescription != 0 ) {
            bodyIndexFrameDescription->Release();
            bodyIndexFrameDescription = 0;
        }
        if ( colorFrameDescription != 0 ) {
            colorFrameDescription->Release();
            colorFrameDescription = 0;
        }
        if ( depthFrameDescription != 0 ) {
            depthFrameDescription->Release();
            depthFrameDescription = 0;
        }
        if ( infraredFrameDescription != 0 ) {
            infraredFrameDescription->Release();
            infraredFrameDescription = 0;
        }
        if ( infraredLongExposureFrameDescription != 0 ) {
            infraredLongExposureFrameDescription->Release();
            infraredLongExposureFrameDescription = 0;
        }
    }

    if ( audioFrame != 0 ) {
        audioFrame->Release();
        audioFrame = 0;
    }
    if ( bodyFrame != 0 ) {
        bodyFrame->Release();
        bodyFrame = 0;
    }
    if ( bodyIndexFrame != 0 ) {
        bodyIndexFrame->Release();
        bodyIndexFrame = 0;
    }
    if ( colorFrame != 0 ) {
        colorFrame->Release();
        colorFrame = 0;
    }
    if ( depthFrame != 0 ) {
        depthFrame->Release();
        depthFrame = 0;
    }
    if ( frame != 0 ) {
        frame->Release();
        frame = 0;
    }
    if ( infraredFrame != 0 ) {
        infraredFrame->Release();
        infraredFrame = 0;
    }
    if ( infraredLongExposureFrame != 0 ) {
        infraredLongExposureFrame->Release();
        infraredLongExposureFrame = 0;
    }
}
void capture()
{
	IMultiSourceFrame *multiFrame = NULL;

	IColorFrame *colorFrame = NULL;
	IColorFrameReference *colorFrameReference = NULL;
	UINT colorBufferSize = 0;
	RGBQUAD *colorBuffer = NULL;

	IDepthFrame *depthFrame = NULL;
	IDepthFrameReference *depthFrameReference = NULL;
	UINT bufferSize = 0;
//	UINT16 *depthBuffer = NULL;

	IBodyIndexFrame *bodyIndexFrame = NULL;
	IBodyIndexFrameReference *bodyIndexFrameReference = NULL;
	UINT bodyIndexBufferSize = 0;
	static int lastTime = 0;
	static int currentTime = 0;
	HRESULT hr = -1;

	//フレームリーダーが読み込み可能になるのを待つループ(各Frameしか取らない)
  while(1) {
	  if((currentTime = GetTickCount()) > 33)
	  {
	    hr = multiFrameReader->AcquireLatestFrame(&multiFrame);
		lastTime = currentTime;
	  }else continue;

    if(FAILED(hr)) {
       //fprintf(stderr, "AcquireLatestFrame(&multiFrame)\n");
      Sleep(1);
      continue;
    }
    
    hr = multiFrame->get_ColorFrameReference(&colorFrameReference);
    if(FAILED(hr)) {
      Sleep(1);
      fprintf(stderr, "ColorFrameReference(&colorFrameReference)\n");
      SafeRelease(multiFrame);
      continue;
    }

    hr = colorFrameReference->AcquireFrame(&colorFrame);
    if(FAILED(hr)) {
      Sleep(1);
      fprintf(stderr, "AcquireFrame(&colorFrame)\n");		
      SafeRelease(colorFrameReference);
      SafeRelease(multiFrame);
      continue;
    }

	hr = multiFrame->get_DepthFrameReference(&depthFrameReference);
	if (FAILED(hr)) {
		Sleep(1);
		fprintf(stderr, "DepthFrameReference(&depthFrameReference)\n");
		SafeRelease(colorFrame);
		SafeRelease(colorFrameReference);
		SafeRelease(multiFrame);
		continue;
	}

	hr = depthFrameReference->AcquireFrame(&depthFrame);
	if (FAILED(hr)) {
		Sleep(1);
		fprintf(stderr, "AcquireFrame(&depthFrame)\n");
		SafeRelease(depthFrameReference);
		SafeRelease(colorFrame);
		SafeRelease(colorFrameReference);
		SafeRelease(multiFrame);
		continue;
	}

//	hr = depthFrame->AccessUnderlyingBuffer(&bufferSize, &depthBuffer);
	hr = depthFrame->CopyFrameDataToArray( dPixels, &depthBuffer[0] );
	if (FAILED(hr)) {
		Sleep(1);
		fprintf(stderr, "AccessUnderlyingBuffer(&bufferSize, &depthBuffer\n");
		SafeRelease(depthFrame);
		SafeRelease(depthFrameReference);
		SafeRelease(colorFrame);
		SafeRelease(colorFrameReference);
		SafeRelease(multiFrame);
		continue;
	}


	hr = multiFrame->get_BodyIndexFrameReference(&bodyIndexFrameReference);
	if (FAILED(hr)) {
		Sleep(1);
		fprintf(stderr, "BodyIndexReference(&colorFrameReference)\n");
		free(depthBuffer);
		SafeRelease(depthFrame);
		SafeRelease(depthFrameReference);
		SafeRelease(colorFrame);
		SafeRelease(colorFrameReference);
		SafeRelease(multiFrame);
		continue;
	}

	hr = bodyIndexFrameReference->AcquireFrame(&bodyIndexFrame);
	if (FAILED(hr)) {
		Sleep(1);
		fprintf(stderr, "AcquireFrame(&bodyIndexFrame)\n");
		SafeRelease(bodyIndexFrameReference);
		free(depthBuffer);
		SafeRelease(depthFrame);
		SafeRelease(depthFrameReference);
		SafeRelease(colorFrame);
		SafeRelease(colorFrameReference);
		SafeRelease(multiFrame);
		continue;
	}

	hr = bodyIndexFrame->AccessUnderlyingBuffer(&bodyIndexBufferSize, &bodyIndexBuffer);
	if(FAILED(hr))
	{
		Sleep(1);
		fprintf(stderr, "bodyIndexFrame->AccessUnderlyingBuffer(&bodyIndexBufferSize, &bodyIndexBuffer)");
		SafeRelease(bodyIndexFrame);
		SafeRelease(bodyIndexFrameReference);
		free(depthBuffer);
		SafeRelease(depthFrame);
		SafeRelease(depthFrameReference);
		SafeRelease(colorFrame);
		SafeRelease(colorFrameReference);
		SafeRelease(multiFrame);
		continue;
	}

    SafeRelease(colorFrameReference);
	SafeRelease(bodyIndexFrameReference);
	SafeRelease(depthFrameReference);
    break;
  }

  //深度値をBufferへ格納
 // ERROR_CHECK(depthFrame->AccessUnderlyingBuffer(&bufferSize, &depthBuffer));

	//カラーマップの設定データの読み込みと、colorBufferメモリの確保
	if(colorRGBX == NULL)
	{
		IFrameDescription *colorFrameDescription = NULL;

		ERROR_CHECK2(colorFrame->get_FrameDescription(&colorFrameDescription), "FrameDescription");
		ERROR_CHECK2(colorFrameDescription->get_Width(&colorWidth), "get_Width");
		ERROR_CHECK2(colorFrameDescription->get_Height(&colorHeight), "get_Height");
		colorRGBX = new RGBQUAD[colorWidth * colorHeight];

		glutReshapeWindow(width, height);

		ERROR_CHECK2(colorFrame->get_RawColorImageFormat(&imageFormat), "get_RawColorImageFormat");

		SafeRelease(colorFrameDescription);
	}

	//カラーイメージをcolorBufferへコピー
	if(imageFormat == ColorImageFormat_Bgra)
	{
		ERROR_CHECK2(colorFrame->AccessRawUnderlyingBuffer(&colorBufferSize,
			reinterpret_cast<BYTE**>(&colorBuffer)), "AccessRawUnderlyingBuffer");
	}else if(colorRGBX)
	{
		colorBuffer = colorRGBX;
		colorBufferSize = colorWidth * colorHeight * sizeof(RGBQUAD);
		ERROR_CHECK2(colorFrame->CopyConvertedFrameDataToArray(colorBufferSize,
			reinterpret_cast<BYTE*>(colorBuffer),
			ColorImageFormat_Bgra), "CopyConvertedFrameDataToArray");
	}else
	{
		//Error
	}

	//colorMapの初期化 一度だけ実行される
	if(colorMap == NULL)
	{
		colorMap = new float[colorWidth * colorHeight][3];
	}
	if (colorCoordinates == NULL)
	{
		colorCoordinates = new ColorSpacePoint[width * height];
	}
	if (cameraSpacePoints == NULL)
	{
		cameraSpacePoints = new CameraSpacePoint[width * height];
	}
	if (cameraSpacePoints_ave == NULL)
	{
		cameraSpacePoints_ave = new CameraSpacePoint[width * height];
	}

	ERROR_CHECK2(coordinateMapper->MapDepthFrameToColorSpace(
	      width * height, (UINT16*)depthBuffer, 
              width * height, colorCoordinates), "MapDepthFrameToColorSpace");

	ERROR_CHECK2(coordinateMapper->MapDepthFrameToCameraSpace(
	      width * height, (UINT16*)depthBuffer, 
	      width * height, cameraSpacePoints),"MapDepthFrameToCameraSpace");

	//colorBufferのデータを、colorMapにコピー
	for(int i = 0; i < height; i++){
		for(int j = 0; j < width; j++){
			int index = i * width + j;

			ColorSpacePoint colorPoint = colorCoordinates[index];
			int colorX = (int)(floor(colorPoint.X + 0.5));
			int colorY = (int)(floor(colorPoint.Y + 0.5));

			if(colorX >= 0 && colorX < colorWidth && colorY >= 0 && colorY < colorHeight)
			{
				int colorIndex = colorX + colorY * colorWidth;
				//格納先はミラーモードを解除
				float* colorp = colorMap[index];

				UCHAR* data = (UCHAR*)(colorBuffer + colorIndex);
				colorp[0] = (float)data[2] / 255.0f;
				colorp[1] = (float)data[1] / 255.0f;
				colorp[2] = (float)data[0] / 255.0f;
			}else
			{
				float* colorp = colorMap[index];
				colorp[0] = 0;
				colorp[1] = 0;
				colorp[2] = 0;
			}
		}
	}


	//フレームリソースを解放
  SafeRelease(colorFrame);
  SafeRelease(multiFrame);
  SafeRelease(bodyIndexFrame);
  SafeRelease(depthFrame);
}
void KinectGrabber::GetNextFrame() {
	if (!m_pMultiSourceFrameReader)
	{
		return;
	}

	IMultiSourceFrame* pMultiSourceFrame = NULL;
	IDepthFrame* pDepthFrame = NULL;
	IColorFrame* pColorFrame = NULL;
	IBodyIndexFrame* pBodyIndexFrame = NULL;

	HRESULT hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);

	if (SUCCEEDED(hr))
	{
		IDepthFrameReference* pDepthFrameReference = NULL;

		hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
		}

		SafeRelease(pDepthFrameReference);
	}

	if (SUCCEEDED(hr))
	{
		IColorFrameReference* pColorFrameReference = NULL;

		hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
		if (SUCCEEDED(hr))
		{
			hr = pColorFrameReference->AcquireFrame(&pColorFrame);
		}

		SafeRelease(pColorFrameReference);
	}

	if (SUCCEEDED(hr))
	{
		IBodyIndexFrameReference* pBodyIndexFrameReference = NULL;

		hr = pMultiSourceFrame->get_BodyIndexFrameReference(&pBodyIndexFrameReference);
		if (SUCCEEDED(hr))
		{
			hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
		}

		SafeRelease(pBodyIndexFrameReference);
	}

	if (SUCCEEDED(hr))
	{
		INT64 nDepthTime = 0;
		IFrameDescription* pDepthFrameDescription = NULL;
		int nDepthWidth = 0;
		int nDepthHeight = 0;
		UINT nDepthBufferSize = 0;

		IFrameDescription* pColorFrameDescription = NULL;
		int nColorWidth = 0;
		int nColorHeight = 0;
		ColorImageFormat imageFormat = ColorImageFormat_None;
		UINT nColorBufferSize = 0;
		RGBQUAD *pColorBuffer = NULL;

		IFrameDescription* pBodyIndexFrameDescription = NULL;
		int nBodyIndexWidth = 0;
		int nBodyIndexHeight = 0;
		UINT nBodyIndexBufferSize = 0;
		BYTE *pBodyIndexBuffer = NULL;

		// get depth frame data

		hr = pDepthFrame->get_RelativeTime(&nDepthTime);

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
		}

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameDescription->get_Width(&nDepthWidth);
		}

		if (SUCCEEDED(hr))
		{
			hr = pDepthFrameDescription->get_Height(&nDepthHeight);
		}

		if (SUCCEEDED(hr))
		{
			//m_pDepthBuffer = new UINT16[cDepthWidth * cDepthHeight];
			hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &m_pDepthBuffer);
			//pDepthFrame->CopyFrameDataToArray(nDepthBufferSize,m_pDepthBuffer);
			WaitForSingleObject(hDepthMutex,INFINITE);
			m_depthImage.release();
			Mat tmp = Mat(m_depthSize, DEPTH_PIXEL_TYPE, m_pDepthBuffer, Mat::AUTO_STEP);
			m_depthImage = tmp.clone(); //need to deep copy because of the call to SafeRelease(pDepthFrame) to prevent access violation
			ReleaseMutex(hDepthMutex);
		}

		// get color frame data

		if (SUCCEEDED(hr))
		{
			hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
		}

		if (SUCCEEDED(hr))
		{
			hr = pColorFrameDescription->get_Width(&nColorWidth);
		}

		if (SUCCEEDED(hr))
		{
			hr = pColorFrameDescription->get_Height(&nColorHeight);
		}

		if (SUCCEEDED(hr))
		{
			hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
		}

		if (SUCCEEDED(hr))
		{
			if (imageFormat == ColorImageFormat_Bgra)
			{
				hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
			}
			else if (m_pColorRGBX)
			{
				pColorBuffer = m_pColorRGBX;
				nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
				hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
			}
			else
			{
				hr = E_FAIL;
			}
			if(SUCCEEDED(hr)) {
				WaitForSingleObject(hColorMutex,INFINITE);
				m_colorImage.release();
				Mat tmp = Mat(m_colorSize, COLOR_PIXEL_TYPE, pColorBuffer, Mat::AUTO_STEP);
				m_colorImage = tmp.clone();
				ReleaseMutex(hColorMutex);
			}
		}

		// get body index frame data

		if (SUCCEEDED(hr))
		{
			hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
		}

		if (SUCCEEDED(hr))
		{
			hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
		}

		if (SUCCEEDED(hr))
		{
			hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
		}

		if (SUCCEEDED(hr))
		{
			hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);            
		}

		SafeRelease(pDepthFrameDescription);
		SafeRelease(pColorFrameDescription);
		SafeRelease(pBodyIndexFrameDescription);
	}

	SafeRelease(pDepthFrame);
	SafeRelease(pColorFrame);
	SafeRelease(pBodyIndexFrame);
	SafeRelease(pMultiSourceFrame);
}