Beispiel #1
0
// Start capturing
bool ciFlyCap::Obj::start(bool callback)
{

	// Free resources first if already capturing
	if (mCapturing)
		stop();

	// Connect, if needed
	if (!isConnected())
		connect();

	// Bail if no camera
	if (mCamera == 0)
		return false;

	// Initialize surface
	mSurface.reset();
	mSurface = Surface8u(mRawImage.GetCols(), mRawImage.GetRows(), false, mSurfaceChannelOrder);

	mCallback = callback;

	FlyCapture2::ImageEventCallback;


	// Start capture
	if(mCallback) {
		mErr = mCamera->StartCapture(ciFlyCap::Obj::onImageGrabbed, this);
	} else {
		mErr = mCamera->StartCapture();
	}
    if (mErr != PGRERROR_OK)
	{
		showError();
		stop();
		return false;
	}

	// Default to application frame rate
	mProperty.type = PropertyType::FRAME_RATE;
	mErr = mCamera->GetProperty(&mProperty);
	if (mErr != PGRERROR_OK)
	{
		mProperty.absControl = true;
		mProperty.absValue = app::getFrameRate();
		mErr = mCamera->SetProperty(&mProperty);
	}

	// Run update in separate thread
	mCapturing = true;
	mNewFrame = false;
	if( ! mCallback) {
		thread(&ciFlyCap::Obj::update, this);
	}

	// Return true if no error
	return true;

}
	void Kinect::enableVideo( bool enable )
	{
		bool toggle = mEnabledVideo != enable;
		mEnabledVideo = enable;
		if ( toggle ) {
			if ( !mEnabledVideo ) {
				mFrameRateVideo = 0.0f;
			}
			mVideoSurface = Surface8u( mVideoWidth, mVideoHeight, false, SurfaceChannelOrder::RGBA );
		}
	}
Surface8u RendererImpl2dGdi::copyWindowContents( const Area &area )
{
	// Warning - if you step through this code with a debugger, the image returned
	// will be of the foreground window (presumably your IDE) instead
	::RECT clientRect;
	::GetClientRect( mWnd, &clientRect );
	Area clippedArea = area.getClipBy( Area( clientRect.left, clientRect.top, clientRect.right, clientRect.bottom ) );
	::HDC hdc = ::GetDC( mWnd );
	::HDC memDC = ::CreateCompatibleDC( hdc );

	::HBITMAP copyBitmap = ::CreateCompatibleBitmap( hdc, clippedArea.getWidth(), clippedArea.getHeight() );
	if( ! copyBitmap ) {
		CI_LOG_E( "::CreateCompatibleBitmap() failed on copyWindowContents()" );
		return Surface8u();
	}

	::GdiFlush();

	// Copy the bits to our bitmap
	::HGDIOBJ oldBitmap = ::SelectObject( memDC, copyBitmap );
	if( ! ::BitBlt( memDC, 0, 0, clippedArea.getWidth(), clippedArea.getHeight(), hdc, clippedArea.x1, clippedArea.y1, SRCCOPY ) ){
		CI_LOG_E( "BitBlt() failed on copyWindowContents()" );
		return Surface8u();
	}
	::SelectObject( memDC, oldBitmap );
	
	auto tempSurface = msw::convertHBitmap( copyBitmap );
	Surface8u result( *tempSurface );

	::DeleteObject( copyBitmap );

	::ReleaseDC( NULL, hdc );
	::DeleteDC( memDC );

	return result;
}
	Surface8u getNewSurface()
	{
		// try to find an available block of pixel data to wrap a surface around	
		for( size_t i = 0; i < mSurfaceData.size(); ++i ) {
			if( ! mSurfaceUsed[i] ) {
				mSurfaceUsed[i] = true;
				Surface8u result( mSurfaceData[i].get(), mWidth, mHeight, mWidth * mSCO.getPixelInc(), mSCO );
				result.setDeallocator( surfaceDeallocator, &mDeallocatorRefcon[i] );
				return result;
			}
		}

		// we couldn't find an available surface, so we'll need to allocate one
		return Surface8u( mWidth, mHeight, mSCO.hasAlpha(), mSCO );
	}
void HodginDidItApp::setupGraphics()
{
	mDepthBuffer = new uint16_t[mDepthW*mDepthH];
	mSurfDepth = Surface8u(mDepthW, mDepthH, true, SurfaceChannelOrder::RGBA);
	mBlendAmt = S_BLEND_MAX;

	mFont = gl::TextureFont::create(Font(loadAsset("IntelClear_RG.ttf"), 48));
	mHello = "Hello.";
	mQuestion = "Are You There?";
	mCursor = '_';
	mScreenText.clear();

	mTexBg = gl::Texture(loadImage(loadAsset("bg_test.png")));
	mPixelSize = Vec2f::one()/Vec2f(160,120);
}
Beispiel #6
0
	bool CinderDSAPI::update()
	{
		bool retVal = mDSAPI->grab();
		if (retVal)
		{
			if (mHasRgb)
				mRgbFrame = Surface8u((uint8_t *)mDSRGB->getThirdImage(), mRgbWidth, mRgbHeight, mRgbWidth * 3, SurfaceChannelOrder::RGB);
			if (mHasDepth)
				mDepthFrame = Channel16u(mLRZWidth, mLRZHeight, int32_t(mLRZWidth*sizeof(uint16_t)), 1, mDSAPI->getZImage());
			if (mHasLeft)
				mLeftFrame = Channel8u(mLRZWidth, mLRZHeight, mLRZWidth, 1, (uint8_t *)mDSAPI->getLImage());
			if (mHasRight)
				mRightFrame = Channel8u(mLRZWidth, mLRZHeight, mLRZWidth, 1, (uint8_t *)mDSAPI->getRImage());
		}
		return retVal;;
	}
Beispiel #7
0
Surface8u convertHBitmap( HBITMAP hbitmap )
{
	// create a temporary DC
	HDC hdc = ::CreateCompatibleDC( 0 );
	
	// determine the dimensions first
	BITMAP bm;
	::GetObject( hbitmap, sizeof(BITMAP), &bm );
	
	BITMAPINFO bmi;
	memset( &bmi, 0, sizeof(BITMAPINFO) );
	bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	bmi.bmiHeader.biHeight = -bm.bmHeight;
	bmi.bmiHeader.biWidth = bm.bmWidth;
	bmi.bmiHeader.biPlanes = 1;
	bmi.bmiHeader.biBitCount = 32;
	bmi.bmiHeader.biCompression = BI_RGB;
	bmi.bmiHeader.biSizeImage = 0;
	bmi.bmiHeader.biXPelsPerMeter = 0;
	bmi.bmiHeader.biYPelsPerMeter = 0;
	bmi.bmiHeader.biClrUsed = 0;
	bmi.bmiHeader.biClrImportant = 0;

	int width = bmi.bmiHeader.biWidth;
	int height = -bmi.bmiHeader.biHeight;

	// allocate enough space to hold the result
	// We use GlobalAlloc / GlobalFree to ensure 8-byte alignment
	DWORD dwBmpSize = ((width * 32 + 31) / 32) * 4 * height;
	uint8_t *data = reinterpret_cast<uint8_t*>( ::GlobalAlloc( GMEM_FIXED, dwBmpSize ) );
	Surface8u result = Surface8u( data, width, height, width * 4, SurfaceChannelOrder::BGRX );
	// have the Surface's destructor call this to call ::GlobalFree
	result.setDeallocator( surfaceDeallocatorGlobalAlloc, data );
	
	if( ::GetDIBits( hdc, hbitmap, 0, height, result.getData(), &bmi, DIB_RGB_COLORS ) == 0 )
		throw std::exception( "Invalid HBITMAP" );
	
	::ReleaseDC( NULL, hdc );

	return result;
}
Beispiel #8
0
Surface8u colorizeBodyIndex( const Channel8u& bodyIndexChannel )
{
    Surface8u surface;
    if ( bodyIndexChannel ) {
        surface = Surface8u( bodyIndexChannel.getWidth(), bodyIndexChannel.getHeight(), true, SurfaceChannelOrder::RGBA );
        Channel8u::ConstIter iterChannel	= bodyIndexChannel.getIter();
        Surface8u::Iter iterSurface			= surface.getIter();
        while ( iterChannel.line() && iterSurface.line() ) {
            while ( iterChannel.pixel() && iterSurface.pixel() ) {
                size_t index				= (size_t)iterChannel.v();
                ColorA8u color( getBodyColor( index ), 0xFF );
                if ( index == 0 || index > BODY_COUNT ) {
                    color.a					= 0x00;
                }
                iterSurface.r()				= color.r;
                iterSurface.g()				= color.g;
                iterSurface.b()				= color.b;
                iterSurface.a()				= color.a;
            }
        }
    }
    return surface;
}
Beispiel #9
0
	bool CinderDSAPI::initRgb(const FrameSize &pRes, const int &pFPS)
	{
		ivec2 cSize;
		if(setupStream(pRes, cSize))
		{
			mDSRGB = mDSAPI->accessThird();
			if (mDSRGB)
			{
				if(mDSRGB->enableThird(true))
				{
					mHasRgb = mDSRGB->setThirdResolutionMode(true, cSize.x, cSize.y, pFPS, DS_RGB8);
					if (mHasRgb)
					{
						mRgbWidth = cSize.x;
						mRgbHeight = cSize.y;
						mRgbFrame = Surface8u(mRgbWidth, mRgbHeight, false, SurfaceChannelOrder::RGB);
						mDSRGB->getCalibIntrinsicsRectThird(mRgbIntrinsics);
						mDSRGB->getCalibExtrinsicsZToRectThird(mZToRgb);
					}
				}
			}
		}
		return mHasRgb;
	}
Beispiel #10
0
Surface	TextLayout::render( bool useAlpha, bool premultiplied )
{
	Surface result;
	
	// determine the extents for all the lines and the result surface
	float totalHeight = 0, maxWidth = 0;
	for( deque<shared_ptr<Line> >::iterator lineIt = mLines.begin(); lineIt != mLines.end(); ++lineIt ) {
		(*lineIt)->calcExtents();
		totalHeight = std::max( totalHeight, totalHeight + (*lineIt)->mHeight + (*lineIt)->mLeadingOffset );
		if( (*lineIt)->mWidth > maxWidth )
			maxWidth = (*lineIt)->mWidth;
	}
	// for the last line, instead of using the font info, we'll use the true height
/*	if( ! mLines.empty() ) {
		totalHeight = currentY - (mLines.back()->mAscent - mLines.back()->mDescent - mLines.back()->mLeadingOffset - mLines.back()->mLeading );
		totalHeight += mLines.back()->mHeight;
	}*/

	// round up from the floating point sizes to get the number of pixels we'll need
	int pixelWidth = (int)math<float>::ceil( maxWidth ) + mHorizontalBorder * 2;
	int pixelHeight = (int)math<float>::ceil( totalHeight ) + mVerticalBorder * 2;

	// Odd failure - return a NULL Surface
	if( ( pixelWidth < 0 ) || ( pixelHeight < 0 ) )
		return Surface();

	// allocate the surface based on our collective extents
#if defined( CINDER_MAC )
	result = Surface( pixelWidth, pixelHeight, useAlpha, (useAlpha)?SurfaceChannelOrder::RGBA:SurfaceChannelOrder::RGBX );
	CGContextRef cgContext = cocoa::createCgBitmapContext( result );
	ip::fill( &result, mBackgroundColor.premultiplied() );

	float currentY = totalHeight + 1.0f + mVerticalBorder;
	for( deque<shared_ptr<Line> >::iterator lineIt = mLines.begin(); lineIt != mLines.end(); ++lineIt ) {
		// these are negated from Cinder's normal pixel coordinate system
		currentY -= (*lineIt)->mAscent + (*lineIt)->mLeadingOffset;
		(*lineIt)->render( cgContext, currentY, (float)mHorizontalBorder, pixelWidth );
		currentY -= (*lineIt)->mDescent + (*lineIt)->mLeading;
	}

	// force all the rendering to finish and release the context
	CGContextFlush( cgContext );
	CGContextRelease( cgContext );

	// since CGContextBitmaps are always premultiplied, if the caller didn't want that we'll have to undo it
	if( ! premultiplied )
		ip::unpremultiply( &result );
#elif defined( CINDER_MSW )
	// I don't have a great explanation for this other than it seems to be necessary
	pixelHeight += 1;
	// prep our GDI and GDI+ resources
	HDC dc = TextManager::instance()->getDc();
	result = Surface8u( pixelWidth, pixelHeight, useAlpha, SurfaceConstraintsGdiPlus() );
	Gdiplus::Bitmap *offscreenBitmap = msw::createGdiplusBitmap( result, premultiplied );
	//Gdiplus::Bitmap *offscreenBitmap = new Gdiplus::Bitmap( pixelWidth, pixelHeight, (premultiplied) ? PixelFormat32bppPARGB : PixelFormat32bppARGB );
	Gdiplus::Graphics *offscreenGraphics = Gdiplus::Graphics::FromImage( offscreenBitmap );
	// high quality text rendering
	offscreenGraphics->SetTextRenderingHint( Gdiplus::TextRenderingHintAntiAlias );
	// fill the surface with the background color
	offscreenGraphics->Clear( Gdiplus::Color( (BYTE)(mBackgroundColor.a * 255), (BYTE)(mBackgroundColor.r * 255), 
			(BYTE)(mBackgroundColor.g * 255), (BYTE)(mBackgroundColor.b * 255) ) );

	// walk the lines and render them, advancing our Y offset along the way
	float currentY = (float)mVerticalBorder;
	for( deque<shared_ptr<Line> >::iterator lineIt = mLines.begin(); lineIt != mLines.end(); ++lineIt ) {
		//currentY += (*lineIt)->mLeadingOffset + (*lineIt)->mAscent;
		currentY += (*lineIt)->mLeadingOffset + (*lineIt)->mLeading;
		(*lineIt)->render( offscreenGraphics, currentY, (float)mHorizontalBorder, (float)pixelWidth );
		//currentY += (*lineIt)->mDescent + (*lineIt)->mLeading;
		currentY += (*lineIt)->mAscent + (*lineIt)->mDescent;
	}

	GdiFlush();

	delete offscreenBitmap;
	delete offscreenGraphics;		
#endif

	return result;
}
Beispiel #11
0
void ShapeDetection::onDepth( openni::VideoFrameRef frame, const OpenNI::DeviceOptions& deviceOptions )
{
    // convert frame from the camera to an OpenCV matrix
    mInput = toOcv( OpenNI::toChannel16u(frame) );
    
    cv::Mat thresh;
    cv::Mat eightBit;
    cv::Mat withoutBlack;
    
    // remove black pixels from frame which get detected as noise
    withoutBlack = removeBlack( mInput, mNearLimit, mFarLimit );
    
    // convert matrix from 16 bit to 8 bit with some color compensation
    withoutBlack.convertTo( eightBit, CV_8UC3, 0.1/1.0 );
    
    // invert the image
    cv::bitwise_not( eightBit, eightBit );
    
    mContours.clear();
    mApproxContours.clear();
    
    // using a threshold to reduce noise
    cv::threshold( eightBit, thresh, mThresh, mMaxVal, CV_8U );
    
    // draw lines around shapes
    cv::findContours( thresh, mContours, mHierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE );
    
    vector<cv::Point> approx;
    // approx number of points per contour
    for ( int i = 0; i < mContours.size(); i++ ) {
        cv::approxPolyDP(mContours[i], approx, 1, true );
        mApproxContours.push_back( approx );
    }
    
    mShapes.clear();
    // get data that we can later compare
    mShapes = getEvaluationSet( mApproxContours, 75, 100000 );
    
    // find the nearest match for each shape
    for ( int i = 0; i < mTrackedShapes.size(); i++ ) {
        Shape* nearestShape = findNearestMatch( mTrackedShapes[i], mShapes, 5000 );
        
        // a tracked shape was found, update that tracked shape with the new shape
        if ( nearestShape != NULL ) {
            nearestShape->matchFound = true;
            mTrackedShapes[i].centroid = nearestShape->centroid;
            // get depth value from center point
            float centerDepth = (float)mInput.at<short>( mTrackedShapes[i].centroid.y, mTrackedShapes[i].centroid.x );
            // map 10 10000 to 0 1
            mTrackedShapes[i].depth = lmap( centerDepth, (float)mNearLimit, (float)mFarLimit, 0.0f, 1.0f );
            mTrackedShapes[i].lastFrameSeen = ci::app::getElapsedFrames();
            mTrackedShapes[i].hull.clear();
            mTrackedShapes[i].hull = nearestShape->hull;
            mTrackedShapes[i].motion = nearestShape->motion;
            Vec3f centerVec = Vec3f( mTrackedShapes[i].centroid.x, mTrackedShapes[i].centroid.y, 0.0f );
            mTrackedShapes[i].mTrailPoint.arrive(centerVec);
            mTrackedShapes[i].mTrailPoint.updateTrail();
        }
    }
    
    // if shape->matchFound is false, add it as a new shape
    for ( int i = 0; i < mShapes.size(); i++ ) {
        if( mShapes[i].matchFound == false ){
            // assign an unique ID
            mShapes[i].ID = shapeUID;
            mShapes[i].lastFrameSeen = ci::app::getElapsedFrames();
            // starting point of the trail
            mShapes[i].mTrailPoint.mLocation = Vec3f( mShapes[i].centroid.x, mShapes[i].centroid.y, 0.0f );
            // add this new shape to tracked shapes
            mTrackedShapes.push_back( mShapes[i] );
            shapeUID++;
        }
    }
    
    // if we didn't find a match for x frames, delete the tracked shape
    for ( vector<Shape>::iterator it = mTrackedShapes.begin(); it != mTrackedShapes.end(); ) {
        if ( ci::app::getElapsedFrames() - it->lastFrameSeen > 20 ) {
            // remove the tracked shape
            it = mTrackedShapes.erase(it);
        } else {
            ++it;
        }
    }
    mSurfaceDepth = Surface8u( fromOcv( mInput  ) );
    mSurfaceSubtract = Surface8u( fromOcv(eightBit) );
}
Beispiel #12
0
void Device::update()
{
    if ( mFrameReader == 0 ) {
        return;
    }

    IAudioBeamFrame* audioFrame								= 0;
    IBodyFrame* bodyFrame									= 0;
    IBodyIndexFrame* bodyIndexFrame							= 0;
    IColorFrame* colorFrame									= 0;
    IDepthFrame* depthFrame									= 0;
    IMultiSourceFrame* frame								= 0;
    IInfraredFrame* infraredFrame							= 0;
    ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;

    HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

    if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) {
        // TODO audio
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
        IBodyFrameReference* frameRef = 0;
        hr = frame->get_BodyFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
        IBodyIndexFrameReference* frameRef = 0;
        hr = frame->get_BodyIndexFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyIndexFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
        IColorFrameReference* frameRef = 0;
        hr = frame->get_ColorFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &colorFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
        IDepthFrameReference* frameRef = 0;
        hr = frame->get_DepthFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &depthFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
        IInfraredFrameReference* frameRef = 0;
        hr = frame->get_InfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
        ILongExposureInfraredFrameReference* frameRef = 0;
        hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) ) {
        long long timeStamp										= 0L;

        // TODO audio

        std::vector<Body> bodies;
        int64_t bodyTime										= 0L;
        IBody* kinectBodies[ BODY_COUNT ]						= { 0 };
        Vec4f floorClipPlane									= Vec4f::zero();

        Channel8u bodyIndexChannel;
        IFrameDescription* bodyIndexFrameDescription			= 0;
        int32_t bodyIndexWidth									= 0;
        int32_t bodyIndexHeight									= 0;
        uint32_t bodyIndexBufferSize							= 0;
        uint8_t* bodyIndexBuffer								= 0;
        int64_t bodyIndexTime									= 0L;

        Surface8u colorSurface;
        IFrameDescription* colorFrameDescription				= 0;
        int32_t colorWidth										= 0;
        int32_t colorHeight										= 0;
        ColorImageFormat colorImageFormat						= ColorImageFormat_None;
        uint32_t colorBufferSize								= 0;
        uint8_t* colorBuffer									= 0;

        Channel16u depthChannel;
        IFrameDescription* depthFrameDescription				= 0;
        int32_t depthWidth										= 0;
        int32_t depthHeight										= 0;
        uint16_t depthMinReliableDistance						= 0;
        uint16_t depthMaxReliableDistance						= 0;
        uint32_t depthBufferSize								= 0;
        uint16_t* depthBuffer									= 0;

        Channel16u infraredChannel;
        IFrameDescription* infraredFrameDescription				= 0;
        int32_t infraredWidth									= 0;
        int32_t infraredHeight									= 0;
        uint32_t infraredBufferSize								= 0;
        uint16_t* infraredBuffer								= 0;

        Channel16u infraredLongExposureChannel;
        IFrameDescription* infraredLongExposureFrameDescription	= 0;
        int32_t infraredLongExposureWidth						= 0;
        int32_t infraredLongExposureHeight						= 0;
        uint32_t infraredLongExposureBufferSize					= 0;
        uint16_t* infraredLongExposureBuffer					= 0;

        hr = depthFrame->get_RelativeTime( &timeStamp );

        // TODO audio
        if ( mDeviceOptions.isAudioEnabled() ) {

        }

        if ( mDeviceOptions.isBodyEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->get_RelativeTime( &bodyTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies );
            }
            if ( SUCCEEDED( hr ) ) {
                Vector4 v;
                hr = bodyFrame->get_FloorClipPlane( &v );
                floorClipPlane = toVec4f( v );
            }
            if ( SUCCEEDED( hr ) ) {
                for ( uint8_t i = 0; i < BODY_COUNT; ++i ) {
                    IBody* kinectBody = kinectBodies[ i ];
                    if ( kinectBody != 0 ) {
                        uint8_t isTracked	= false;
                        hr					= kinectBody->get_IsTracked( &isTracked );
                        if ( SUCCEEDED( hr ) && isTracked ) {
                            Joint joints[ JointType_Count ];
                            kinectBody->GetJoints( JointType_Count, joints );

                            JointOrientation jointOrientations[ JointType_Count ];
                            kinectBody->GetJointOrientations( JointType_Count, jointOrientations );

                            uint64_t id = 0;
                            kinectBody->get_TrackingId( &id );

                            std::map<JointType, Body::Joint> jointMap;
                            for ( int32_t j = 0; j < JointType_Count; ++j ) {
                                Body::Joint joint(
                                    toVec3f( joints[ j ].Position ),
                                    toQuatf( jointOrientations[ j ].Orientation ),
                                    joints[ j ].TrackingState
                                );
                                jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) );
                            }
                            Body body( id, i, jointMap );
                            bodies.push_back( body );
                        }
                    }
                }
            }
        }

        if ( mDeviceOptions.isBodyIndexEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight );
                memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) );
            }
        }

        if ( mDeviceOptions.isColorEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_FrameDescription( &colorFrameDescription );
                if ( SUCCEEDED( hr ) ) {
                    float vFov = 0.0f;
                    float hFov = 0.0f;
                    float dFov = 0.0f;
                    colorFrameDescription->get_VerticalFieldOfView( &vFov );
                    colorFrameDescription->get_HorizontalFieldOfView( &hFov );
                    colorFrameDescription->get_DiagonalFieldOfView( &dFov );
                }
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Width( &colorWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Height( &colorHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_RawColorImageFormat( &colorImageFormat );
            }
            if ( SUCCEEDED( hr ) ) {
                colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
                colorBuffer		= new uint8_t[ colorBufferSize ];
                hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );

                if ( SUCCEEDED( hr ) ) {
                    colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA );
                    memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 );
                }

                delete [] colorBuffer;
                colorBuffer = 0;
            }
        }

        if ( mDeviceOptions.isDepthEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_FrameDescription( &depthFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Width( &depthWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Height( &depthHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                depthChannel = Channel16u( depthWidth, depthHeight );
                memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Width( &infraredWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Height( &infraredHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredChannel = Channel16u( infraredWidth, infraredHeight );
                memcpy( infraredChannel.getData(), infraredBuffer,  infraredWidth * infraredHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
                memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) );
            }
        }

        if ( SUCCEEDED( hr ) ) {
            mFrame.mBodies						= bodies;
            mFrame.mChannelBodyIndex			= bodyIndexChannel;
            mFrame.mChannelDepth				= depthChannel;
            mFrame.mChannelInfrared				= infraredChannel;
            mFrame.mChannelInfraredLongExposure	= infraredLongExposureChannel;
            mFrame.mDeviceId					= mDeviceOptions.getDeviceId();
            mFrame.mSurfaceColor				= colorSurface;
            mFrame.mTimeStamp					= timeStamp;
            mFrame.mFloorClipPlane				= floorClipPlane;
        }

        if ( bodyIndexFrameDescription != 0 ) {
            bodyIndexFrameDescription->Release();
            bodyIndexFrameDescription = 0;
        }
        if ( colorFrameDescription != 0 ) {
            colorFrameDescription->Release();
            colorFrameDescription = 0;
        }
        if ( depthFrameDescription != 0 ) {
            depthFrameDescription->Release();
            depthFrameDescription = 0;
        }
        if ( infraredFrameDescription != 0 ) {
            infraredFrameDescription->Release();
            infraredFrameDescription = 0;
        }
        if ( infraredLongExposureFrameDescription != 0 ) {
            infraredLongExposureFrameDescription->Release();
            infraredLongExposureFrameDescription = 0;
        }
    }

    if ( audioFrame != 0 ) {
        audioFrame->Release();
        audioFrame = 0;
    }
    if ( bodyFrame != 0 ) {
        bodyFrame->Release();
        bodyFrame = 0;
    }
    if ( bodyIndexFrame != 0 ) {
        bodyIndexFrame->Release();
        bodyIndexFrame = 0;
    }
    if ( colorFrame != 0 ) {
        colorFrame->Release();
        colorFrame = 0;
    }
    if ( depthFrame != 0 ) {
        depthFrame->Release();
        depthFrame = 0;
    }
    if ( frame != 0 ) {
        frame->Release();
        frame = 0;
    }
    if ( infraredFrame != 0 ) {
        infraredFrame->Release();
        infraredFrame = 0;
    }
    if ( infraredLongExposureFrame != 0 ) {
        infraredLongExposureFrame->Release();
        infraredLongExposureFrame = 0;
    }
}
void CinderOpenNISkeleton::setDepthSurface() {
	int texWidth =  gCinderOpenNISkeleton->mDepthMD.XRes();
	int texHeight = gCinderOpenNISkeleton->mDepthMD.YRes();
	
	const XnDepthPixel* pDepth = gCinderOpenNISkeleton->mDepthMD.Data();
	const XnLabel* pLabels = gCinderOpenNISkeleton->mSceneMD.Data();
	
	// Calculate the accumulative histogram -  whatever that means
	memset(gCinderOpenNISkeleton->pDepthHist, 0, MAX_DEPTH*sizeof(float));
	
	unsigned int nX = 0;
	unsigned int nValue = 0;
	unsigned int nIndex = 0;
	unsigned int nY = 0;
	unsigned int nNumberOfPoints = 0;
	unsigned int nHistValue = 0;
	
	if(!gCinderOpenNISkeleton->bInitialized)
	{
	
		gCinderOpenNISkeleton->pDepthTexBuf = new unsigned char[texWidth*texHeight*3];
		
		app::console() << "Initialised Buffer" <<endl;
		
		gCinderOpenNISkeleton->bInitialized = true;
		
		gCinderOpenNISkeleton->mDepthSurface = Surface8u( texWidth, texHeight, false ); // width, height, alpha?

	}
	
	
	for (nY=0; nY<texHeight; nY++)
	{
		for (nX=0; nX<texWidth; nX++)
		{
			nValue = *pDepth;
			
			if (nValue != 0)
			{
				gCinderOpenNISkeleton->pDepthHist[nValue]++;
				nNumberOfPoints++;
			}
			
			pDepth++;
		}
	}
	
	for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
	{
		gCinderOpenNISkeleton->pDepthHist[nIndex] += gCinderOpenNISkeleton->pDepthHist[nIndex-1];
	}
	if (nNumberOfPoints)
	{
		for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
		{
			gCinderOpenNISkeleton->pDepthHist[nIndex] = 
			(unsigned int)(256 * (1.0f - (gCinderOpenNISkeleton->pDepthHist[nIndex] / nNumberOfPoints)));
		}
	}
	
	pDepth = gCinderOpenNISkeleton->mDepthMD.Data();

	Area area( 0, 0, texWidth, texHeight );

	Surface::Iter iter = gCinderOpenNISkeleton->mDepthSurface.getIter( area );
	while( iter.line() ) {
		while( iter.pixel() ) {
			iter.r() = 0;
			iter.g() = 0;
			iter.b() = 0;
			
			if ( *pLabels != 0) // Buh?
			{
				nValue = *pDepth;
				XnLabel label = *pLabels;
				XnUInt32 nColorID = label % nColors;
				if (label == 0)
				{
					nColorID = nColors;
				}
				
				if (nValue != 0)
				{
					nHistValue = gCinderOpenNISkeleton->pDepthHist[nValue];
					
					iter.r() = nHistValue * Colors[nColorID][0]; 
					iter.g() = nHistValue * Colors[nColorID][1];
					iter.b() = nHistValue * Colors[nColorID][2];
				}
				
				//app::console() << "PLABEL with Colour: " << label % nColors << endl;
			}
			else {
				nValue = *pDepth;
				//app::console() << "nValue: " << nValue << endl;
				if (nValue != 0)
				{
					nHistValue = gCinderOpenNISkeleton->pDepthHist[nValue];
					
					//app::console() << "nHistValue: " << nHistValue << endl;
					
					iter.r() = nHistValue; 
					iter.g() = nHistValue;
					iter.b() = nHistValue;
				}
			}
			
			pDepth++;
			pLabels++;
			
		}
	}

	gl::clear( Color( 0, 0, 0 ) );
	gl::draw(gl::Texture(gCinderOpenNISkeleton->mDepthSurface));
}