void Renderer2dApp::draw() { // Render using CoreGraphics on the mac #if defined( CINDER_COCOA ) CGContextRef context = cocoa::getWindowContext(); CGColorSpaceRef baseSpace = CGColorSpaceCreateDeviceRGB(); CGFloat colors[8] = { 0, 0, 0, 1, 0.866, 0.866, 0.866, 1 }; CGGradientRef gradient = CGGradientCreateWithColorComponents( baseSpace, colors, NULL, 2 ); ::CGColorSpaceRelease( baseSpace ), baseSpace = NULL; ::CGContextDrawLinearGradient( context, gradient, CGPointMake( 0, 0 ), CGPointMake( 0, getWindowHeight() ), 0 ); ::CGGradientRelease(gradient), gradient = NULL; // CoreGraphics is "upside down" by default; setup CTM to flip and center it ivec2 imgSize( ::CGImageGetWidth( mImage ), ::CGImageGetHeight( mImage ) ); ivec2 centerMargin( ( getWindowWidth() - imgSize.x ) / 2, ( getWindowHeight() - imgSize.y ) / 2 ); ::CGContextTranslateCTM( context, centerMargin.x, imgSize.y + centerMargin.y ); ::CGContextScaleCTM( context, 1.0, -1.0 ); ::CGContextDrawImage( context, CGRectMake( 0, 0, imgSize.x, imgSize.y ), mImage ); #elif defined( CINDER_MSW ) // Render using GDI+ on Windows Gdiplus::Graphics graphics( getWindow()->getDc() ); Gdiplus::LinearGradientBrush brush( Gdiplus::Rect( 0, 0, getWindowWidth(), getWindowHeight() ), Gdiplus::Color( 0, 0, 0 ), Gdiplus::Color( 220, 220, 220 ), Gdiplus::LinearGradientModeVertical ); graphics.FillRectangle( &brush, 0, 0, getWindowWidth(), getWindowHeight() ); graphics.DrawImage( mImage, ( getWindowWidth() - mImageSurface.getWidth() ) / 2, ( getWindowHeight() - mImageSurface.getHeight() ) / 2, mImageSurface.getWidth(), mImageSurface.getHeight() ); #endif }
void Lidar::writeImage( int size ) { if(mPointList.size()){ Surface8u surf = Surface8u( size, size, false, SurfaceChannelOrder::RGBA ); vector<Vec3f> pos; vector<Vec2f> texCoords; long count = 0; list<LidarPoint*>::iterator p = mPointList.begin(); for (; p != mPointList.end(); ++p, ++count ) { // Positions Vec3f position = Vec3f( (*p)->x, (*p)->y, (*p)->z ); int tx = int ( (position.x - mMinBounds.x) / (mMaxBounds.x - mMinBounds.x) * size ); float ty = position.y - mMinBounds.y; int tz = int ( (position.z - mMinBounds.z) / (mMaxBounds.z - mMinBounds.z) * size ); float time = ((*p)->time - mTimeRange[0]) / (mTimeRange[1] - mTimeRange[0]) * 255; ColorA c = ColorAT<float>(ty,(*p)->intensity,time,1.0); surf.setPixel( Vec2i( tx, tz ) , c ); //cout << tx << " " << tz << " : " << ty << endl; } cout << "BOUNDS " << mMinBounds << " " << mMaxBounds << " " << mTimeRange << " " << mMaxIntensity << endl; ci::writeImage( "/Users/mdunkley/Desktop/test_image.png", surf ); cout << "IMAGE WRITTEN" << endl; } }
// Receives Color data void MeshApp::onColorData( Surface8u surface, const DeviceOptions& deviceOptions ) { if ( mTextureColor ) { mTextureColor.update( surface, surface.getBounds() ); } else { mTextureColor = gl::Texture( surface ); mTextureColor.setWrap( GL_REPEAT, GL_REPEAT ); } }
void ocvPerspectiveApp::updateImage() { cv::Mat input( toOcv( mInputImage ) ), output; cv::Point2f src[4]; src[0] = cv::Point2f( 0, 0 ); src[1] = cv::Point2f( mInputImage.getWidth(), 0 ); src[2] = cv::Point2f( mInputImage.getWidth(), mInputImage.getHeight() ); src[3] = cv::Point2f( 0, mInputImage.getHeight() ); cv::Point2f dst[4]; for( int i = 0; i < 4; ++i ) dst[i] = toOcv( mPoints[i] ); cv::Mat warpMatrix = cv::getPerspectiveTransform( src, dst ); cv::warpPerspective( input, output, warpMatrix, toOcv( getWindowSize() ), cv::INTER_CUBIC ); mTexture = gl::Texture( fromOcv( output ) ); }
void Kinect::pixelToVideoSurface( Surface8u &surface, uint8_t *buffer ) { if ( mNewVideoFrame ) { return; } int32_t height = surface.getHeight(); int32_t width = surface.getWidth(); int32_t size = width * height * 4; // Swap red/blue channels for ( int32_t i = 0; i < size; i += 4 ) { uint8_t b = buffer[ i ]; buffer[ i ] = buffer[ i + 2 ]; buffer[ i + 2 ] = b; } memcpy( surface.getData(), buffer, size ); mNewVideoFrame = true; }
Surface8u convertHBitmap( HBITMAP hbitmap ) { // create a temporary DC HDC hdc = ::CreateCompatibleDC( 0 ); // determine the dimensions first BITMAP bm; ::GetObject( hbitmap, sizeof(BITMAP), &bm ); BITMAPINFO bmi; memset( &bmi, 0, sizeof(BITMAPINFO) ); bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); bmi.bmiHeader.biHeight = -bm.bmHeight; bmi.bmiHeader.biWidth = bm.bmWidth; bmi.bmiHeader.biPlanes = 1; bmi.bmiHeader.biBitCount = 32; bmi.bmiHeader.biCompression = BI_RGB; bmi.bmiHeader.biSizeImage = 0; bmi.bmiHeader.biXPelsPerMeter = 0; bmi.bmiHeader.biYPelsPerMeter = 0; bmi.bmiHeader.biClrUsed = 0; bmi.bmiHeader.biClrImportant = 0; int width = bmi.bmiHeader.biWidth; int height = -bmi.bmiHeader.biHeight; // allocate enough space to hold the result // We use GlobalAlloc / GlobalFree to ensure 8-byte alignment DWORD dwBmpSize = ((width * 32 + 31) / 32) * 4 * height; uint8_t *data = reinterpret_cast<uint8_t*>( ::GlobalAlloc( GMEM_FIXED, dwBmpSize ) ); Surface8u result = Surface8u( data, width, height, width * 4, SurfaceChannelOrder::BGRX ); // have the Surface's destructor call this to call ::GlobalFree result.setDeallocator( surfaceDeallocatorGlobalAlloc, data ); if( ::GetDIBits( hdc, hbitmap, 0, height, result.getData(), &bmi, DIB_RGB_COLORS ) == 0 ) throw std::exception( "Invalid HBITMAP" ); ::ReleaseDC( NULL, hdc ); return result; }
Surface8u colorizeBodyIndex( const Channel8u& bodyIndexChannel ) { Surface8u surface; if ( bodyIndexChannel ) { surface = Surface8u( bodyIndexChannel.getWidth(), bodyIndexChannel.getHeight(), true, SurfaceChannelOrder::RGBA ); Channel8u::ConstIter iterChannel = bodyIndexChannel.getIter(); Surface8u::Iter iterSurface = surface.getIter(); while ( iterChannel.line() && iterSurface.line() ) { while ( iterChannel.pixel() && iterSurface.pixel() ) { size_t index = (size_t)iterChannel.v(); ColorA8u color( getBodyColor( index ), 0xFF ); if ( index == 0 || index > BODY_COUNT ) { color.a = 0x00; } iterSurface.r() = color.r; iterSurface.g() = color.g; iterSurface.b() = color.b; iterSurface.a() = color.a; } } } return surface; }
void cApp::setup(){ mPln.setSeed( 345 ); mPln.setOctaves( 4 ); openDir(); fs::path path = dir/("f_00000.png"); sur = Surface8u( loadImage( path) ); int w = sur.getWidth(); int h = sur.getHeight(); pcam = CameraPersp(w, h, 50, 1, 10000); camUi.setCamera( &pcam ); mExp.setup( w, h, 0, 3000-1, GL_RGB, mt::getRenderPath(), 0 ); setWindowSize( w*0.5, h*0.5 ); setWindowPos(0, 0); #ifdef RENDER mExp.startRender(); #endif }
Texture::Texture( const Surface8u &surface, Format format ) : mObj( shared_ptr<Obj>( new Obj( surface.getWidth(), surface.getHeight() ) ) ) { if( format.mInternalFormat < 0 ) format.mInternalFormat = surface.hasAlpha() ? GL_RGBA : GL_RGB; mObj->mInternalFormat = format.mInternalFormat; mObj->mTarget = format.mTarget; GLint dataFormat; GLenum type; SurfaceChannelOrderToDataFormatAndType( surface.getChannelOrder(), &dataFormat, &type ); init( surface.getData(), surface.getRowBytes() / surface.getChannelOrder().getPixelInc(), dataFormat, type, format ); }
void SmilesApp::update() { mFps = getAverageFps(); if(mCapture && mCapture.checkNewFrame() ){ mSurface = mCapture.getSurface(); } if (mSurface){ mGreyChannel = Channel( mSurface.clone(mSmileRect.getInteriorArea()) ); int totalDetectionPixels = mGreyChannel.getWidth()*mGreyChannel.getHeight(); unsigned char * detectionPixels = mGreyChannel.getData(); for (int i = 0; i < totalDetectionPixels; i++){ mRImage_pixels->array[i] = detectionPixels[i]; } detectSmiles(*mRImage_pixels); //console() << smileThreshold << endl; } }
void CubeMap::update() { glBindTexture(GL_TEXTURE_CUBE_MAP_ARB, textureObject); int i = 0; for( vector<ofVideoGrabber>::iterator cIt = mCaptures.begin(); cIt != mCaptures.end(); ++cIt ) { int GLCubeDir = -1; if( (*cIt)->checkNewFrame() ) { Surface8u surf = (*cIt)->getSurface(); switch(i) { case 0: GLCubeDir = GL_TEXTURE_CUBE_MAP_POSITIVE_Z_ARB; glTexImage2D(GLCubeDir, 0, GL_RGBA, mWidth, mHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, surf.getData()); glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y_ARB, 0, GL_RGBA, mWidth, mHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, surf.getData()); flipSurface(surf); glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X_ARB, 0, GL_RGBA, mWidth, mHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, surf.getData()); break; case 1: GLCubeDir = GL_TEXTURE_CUBE_MAP_NEGATIVE_X_ARB; // Surface8u surf = (*cIt)->getSurface(); glTexImage2D(GLCubeDir, 0, GL_RGBA, mWidth, mHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, surf.getData()); glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_ARB, 0, GL_RGBA, mWidth, mHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, surf.getData()); flipSurface(surf); glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_ARB, 0, GL_RGBA, mWidth, mHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, surf.getData()); break; // case 2: // GLCubeDir = GL_TEXTURE_CUBE_MAP_POSITIVE_X_ARB; // break; default: GLCubeDir = GL_TEXTURE_CUBE_MAP_POSITIVE_Z_ARB; break; } } i++; } }
void Fluid2DCamAppApp::update() { if( mCapture && mCapture.checkNewFrame() ) { if( ! mTexCam ) { mTexCam = gl::Texture( mCapture.getSurface() ); } // Flip the image if( ! mFlipped ) { Surface8u srcImg = mCapture.getSurface(); mFlipped = Surface8u( srcImg.getWidth(), srcImg.getHeight(), srcImg.hasAlpha(), srcImg.getChannelOrder() ); } Surface8u srcImg = mCapture.getSurface(); mFlipped = Surface8u( srcImg.getWidth(), srcImg.getHeight(), srcImg.hasAlpha(), srcImg.getChannelOrder() ); for( int y = 0; y < mCapture.getHeight(); ++y ) { const Color8u* src = (const Color8u*)(srcImg.getData() + (y + 1)*srcImg.getRowBytes() - srcImg.getPixelInc()); Color8u* dst = (Color8u*)(mFlipped.getData() + y*mFlipped.getRowBytes()); for( int x = 0; x < mCapture.getWidth(); ++x ) { *dst = *src; ++dst; --src; } } // Create scaled image if( ! mCurScaled ) { mCurScaled = Surface8u( mFlipped.getWidth()/kFlowScale, mFlipped.getHeight()/kFlowScale, mFlipped.hasAlpha(), mFlipped.getChannelOrder() ); } ip::resize( mFlipped, &mCurScaled ); // Optical flow if( mCurScaled && mPrvScaled ) { mPrvCvData = mCurCvData; mCurCvData = cv::Mat( toOcv( Channel( mCurScaled ) ) ); if( mPrvCvData.data && mCurCvData.data ) { int pyrLvels = 3; int winSize = 3; int iters = 5; int poly_n = 7; double poly_sigma = 1.5; cv::calcOpticalFlowFarneback( mPrvCvData, mCurCvData, mFlow, 0.5, pyrLvels, 2*winSize + 1, iters, poly_n, poly_sigma, cv::OPTFLOW_FARNEBACK_GAUSSIAN ); if( mFlow.data ) { if( mFlowVectors.empty() ) { mFlowVectors.resize( mCurScaled.getWidth()*mCurScaled.getHeight() ); } //memset( &mFlowVectors[0], 0, mCurScaled.getWidth()*mCurScaled.getHeight()*sizeof( Vec2f ) ); mNumActiveFlowVectors = 0; for( int j = 0; j < mCurScaled.getHeight(); ++j ) { for( int i = 0; i < mCurScaled.getWidth(); ++i ) { const float* fptr = reinterpret_cast<float*>(mFlow.data + j*mFlow.step + i*sizeof(float)*2); // Vec2f v = Vec2f( fptr[0], fptr[1] ); if( v.lengthSquared() >= mVelThreshold ) { if( mNumActiveFlowVectors >= (int)mFlowVectors.size() ) { mFlowVectors.push_back( std::make_pair( Vec2i( i, j ), v ) ); } else { mFlowVectors[mNumActiveFlowVectors] = std::make_pair( Vec2i( i, j ), v ); } ++mNumActiveFlowVectors; } } } } } } // Update texture mTexCam.update( mFlipped ); // Save previous frame if( ! mPrvScaled ) { mPrvScaled = Surface8u( mCurScaled.getWidth(), mCurScaled.getHeight(), mCurScaled.hasAlpha(), mCurScaled.getChannelOrder() ); } memcpy( mPrvScaled.getData(), mCurScaled.getData(), mCurScaled.getHeight()*mCurScaled.getRowBytes() ); } // Update fluid float dx = (mFluid2DResX - 2)/(float)(640/kFlowScale); float dy = (mFluid2DResY - 2)/(float)(480/kFlowScale); for( int i = 0; i < mNumActiveFlowVectors; ++i ) { Vec2f P = mFlowVectors[i].first; const Vec2f& v = mFlowVectors[i].second; mFluid2D.splatDensity( P.x*dx + 1, P.y*dy + 1, mDenScale*v.lengthSquared() ); mFluid2D.splatVelocity( P.x*dx + 1, P.y*dy + 1, v*mVelScale ); } mFluid2D.step(); // Update velocity const Vec2f* srcVel0 = mFluid2D.dbgVel0().data(); const Vec2f* srcVel1 = mFluid2D.dbgVel1().data(); Colorf* dstVel0 = (Colorf*)mSurfVel0.getData(); Colorf* dstVel1 = (Colorf*)mSurfVel1.getData(); for( int j = 0; j < mFluid2DResY; ++j ) { for( int i = 0; i < mFluid2DResX; ++i ) { *dstVel0 = Colorf( srcVel0->x, srcVel0->y, 0.0f ); *dstVel1 = Colorf( srcVel1->x, srcVel1->y, 0.0f ); ++srcVel0; ++srcVel1; ++dstVel0; ++dstVel1; } } // Update Density mChanDen0 = Channel32f( mFluid2DResX, mFluid2DResY, mFluid2DResX*sizeof(float), 1, mFluid2D.dbgDen0().data() ); mChanDen1 = Channel32f( mFluid2DResX, mFluid2DResY, mFluid2DResX*sizeof(float), 1, mFluid2D.dbgDen1().data() ); mTexDen0.update( mChanDen0 ); mTexDen1.update( mChanDen1 ); // Update velocity textures mTexVel0.update( mSurfVel0 ); mTexVel1.update( mSurfVel1 ); // Update Divergence mChanDiv = Channel32f( mFluid2DResX, mFluid2DResY, mFluid2DResX*sizeof(float), 1, mFluid2D.dbgDivergence().data() ); mTexDiv.update( mChanDiv ); // Update Divergence mChanPrs = Channel32f( mFluid2DResX, mFluid2DResY, mFluid2DResX*sizeof(float), 1, mFluid2D.dbgPressure().data() ); mTexPrs.update( mChanPrs ); // Update Curl, Curl Length mChanCurl = Channel32f( mFluid2DResX, mFluid2DResY, mFluid2DResX*sizeof(float), 1, mFluid2D.dbgCurl().data() ); mTexCurl.update( mChanCurl ); mChanCurlLen = Channel32f( mFluid2DResX, mFluid2DResY, mFluid2DResX*sizeof(float), 1, mFluid2D.dbgCurlLength().data() ); mTexCurlLen.update( mChanCurlLen ); }
TextureCache::Obj::Obj( const Surface8u &prototypeSurface, const Texture::Format &format ) : mWidth( prototypeSurface.getWidth() ), mHeight( prototypeSurface.getHeight() ), mFormat( format ), mNextId( 0 ) { }
void Device::update() { if ( mSensor != 0 ) { mSensor->get_Status( &mStatus ); } if ( mFrameReader == 0 ) { return; } IAudioBeamFrame* audioFrame = 0; IBodyFrame* bodyFrame = 0; IBodyIndexFrame* bodyIndexFrame = 0; IColorFrame* colorFrame = 0; IDepthFrame* depthFrame = 0; IMultiSourceFrame* frame = 0; IInfraredFrame* infraredFrame = 0; ILongExposureInfraredFrame* infraredLongExposureFrame = 0; HRESULT hr = mFrameReader->AcquireLatestFrame( &frame ); // TODO audio if ( SUCCEEDED( hr ) ) { console() << "SUCCEEDED " << getElapsedFrames() << endl; } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) { IBodyFrameReference* frameRef = 0; hr = frame->get_BodyFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) { IBodyIndexFrameReference* frameRef = 0; hr = frame->get_BodyIndexFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyIndexFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) { IColorFrameReference* frameRef = 0; hr = frame->get_ColorFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &colorFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) { IDepthFrameReference* frameRef = 0; hr = frame->get_DepthFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &depthFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) { IInfraredFrameReference* frameRef = 0; hr = frame->get_InfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) { ILongExposureInfraredFrameReference* frameRef = 0; hr = frame->get_LongExposureInfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredLongExposureFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) ) { long long time = 0L; // TODO audio IFrameDescription* bodyFrameDescription = 0; int32_t bodyWidth = 0; int32_t bodyHeight = 0; uint32_t bodyBufferSize = 0; uint8_t* bodyBuffer = 0; IFrameDescription* bodyIndexFrameDescription = 0; int32_t bodyIndexWidth = 0; int32_t bodyIndexHeight = 0; uint32_t bodyIndexBufferSize = 0; uint8_t* bodyIndexBuffer = 0; IFrameDescription* colorFrameDescription = 0; int32_t colorWidth = 0; int32_t colorHeight = 0; ColorImageFormat imageFormat = ColorImageFormat_None; uint32_t colorBufferSize = 0; uint8_t* colorBuffer = 0; IFrameDescription* depthFrameDescription = 0; int32_t depthWidth = 0; int32_t depthHeight = 0; uint16_t depthMinReliableDistance = 0; uint16_t depthMaxReliableDistance = 0; uint32_t depthBufferSize = 0; uint16_t* depthBuffer = 0; IFrameDescription* infraredFrameDescription = 0; int32_t infraredWidth = 0; int32_t infraredHeight = 0; uint32_t infraredBufferSize = 0; uint16_t* infraredBuffer = 0; IFrameDescription* infraredLongExposureFrameDescription = 0; int32_t infraredLongExposureWidth = 0; int32_t infraredLongExposureHeight = 0; uint32_t infraredLongExposureBufferSize = 0; uint16_t* infraredLongExposureBuffer = 0; hr = depthFrame->get_RelativeTime( &time ); // TODO audio if ( mDeviceOptions.isAudioEnabled() ) { } // TODO body if ( mDeviceOptions.isBodyEnabled() ) { } if ( mDeviceOptions.isBodyIndexEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight ); } if ( SUCCEEDED( hr ) ) { //hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer ); } } if ( mDeviceOptions.isColorEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_FrameDescription( &colorFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Width( &colorWidth ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Height( &colorHeight ); } if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_RawColorImageFormat( &imageFormat ); } if ( SUCCEEDED( hr ) ) { bool isAllocated = false; SurfaceChannelOrder channelOrder = SurfaceChannelOrder::BGRA; if ( imageFormat == ColorImageFormat_Bgra ) { hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) ); channelOrder = SurfaceChannelOrder::BGRA; } else if ( imageFormat == ColorImageFormat_Rgba ) { hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) ); channelOrder = SurfaceChannelOrder::RGBA; } else { isAllocated = true; colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4; colorBuffer = new uint8_t[ colorBufferSize ]; hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba ); channelOrder = SurfaceChannelOrder::RGBA; } if ( SUCCEEDED( hr ) ) { colorFrame->get_RelativeTime( &time ); Surface8u colorSurface = Surface8u( colorBuffer, colorWidth, colorHeight, colorWidth * sizeof( uint8_t ) * 4, channelOrder ); mFrame.mSurfaceColor = Surface8u( colorWidth, colorHeight, false, channelOrder ); mFrame.mSurfaceColor.copyFrom( colorSurface, colorSurface.getBounds() ); console() << "Color\n\twidth: " << colorWidth << "\n\theight: " << colorHeight << "\n\tbuffer size: " << colorBufferSize << "\n\ttime: " << time << endl; } if ( isAllocated && colorBuffer != 0 ) { delete[] colorBuffer; colorBuffer = 0; } } } if ( mDeviceOptions.isDepthEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_FrameDescription( &depthFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Width( &depthWidth ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Height( &depthHeight ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u depthChannel = Channel16u( depthWidth, depthHeight, depthWidth * sizeof( uint16_t ), 1, depthBuffer ); mFrame.mChannelDepth = Channel16u( depthWidth, depthHeight ); mFrame.mChannelDepth.copyFrom( depthChannel, depthChannel.getBounds() ); console( ) << "Depth\n\twidth: " << depthWidth << "\n\theight: " << depthHeight << endl; } } if ( mDeviceOptions.isInfraredEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredFrame->get_FrameDescription( &infraredFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Width( &infraredWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Height( &infraredHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u infraredChannel = Channel16u( infraredWidth, infraredHeight, infraredWidth * sizeof( uint16_t ), 1, infraredBuffer ); mFrame.mChannelInfrared = Channel16u( infraredWidth, infraredHeight ); mFrame.mChannelInfrared.copyFrom( infraredChannel, infraredChannel.getBounds() ); console( ) << "Infrared\n\twidth: " << infraredWidth << "\n\theight: " << infraredHeight << endl; } } if ( mDeviceOptions.isInfraredLongExposureEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight, infraredLongExposureWidth * sizeof( uint16_t ), 1, infraredLongExposureBuffer ); mFrame.mChannelInfraredLongExposure = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight ); mFrame.mChannelInfraredLongExposure.copyFrom( infraredLongExposureChannel, infraredLongExposureChannel.getBounds() ); int64_t irLongExpTime = 0; hr = infraredLongExposureFrame->get_RelativeTime( &irLongExpTime ); console( ) << "Infrared Long Exposure\n\twidth: " << infraredLongExposureWidth << "\n\theight: " << infraredLongExposureHeight; if ( SUCCEEDED( hr ) ) { console() << "\n\ttimestamp: " << irLongExpTime; } console() << endl; } } if ( SUCCEEDED( hr ) ) { // TODO build Kinect2::Frame from buffers, data mFrame.mTimeStamp = time; } if ( bodyFrameDescription != 0 ) { bodyFrameDescription->Release(); bodyFrameDescription = 0; } if ( bodyIndexFrameDescription != 0 ) { bodyIndexFrameDescription->Release(); bodyIndexFrameDescription = 0; } if ( colorFrameDescription != 0 ) { colorFrameDescription->Release(); colorFrameDescription = 0; } if ( depthFrameDescription != 0 ) { depthFrameDescription->Release(); depthFrameDescription = 0; } if ( infraredFrameDescription != 0 ) { infraredFrameDescription->Release(); infraredFrameDescription = 0; } if ( infraredLongExposureFrameDescription != 0 ) { infraredLongExposureFrameDescription->Release(); infraredLongExposureFrameDescription = 0; } } if ( audioFrame != 0 ) { audioFrame->Release(); audioFrame = 0; } if ( bodyFrame != 0 ) { bodyFrame->Release(); bodyFrame = 0; } if ( bodyIndexFrame != 0 ) { bodyIndexFrame->Release(); bodyIndexFrame = 0; } if ( colorFrame != 0 ) { colorFrame->Release(); colorFrame = 0; } if ( depthFrame != 0 ) { depthFrame->Release(); depthFrame = 0; } if ( frame != 0 ) { frame->Release(); frame = 0; } if ( infraredFrame != 0 ) { infraredFrame->Release(); infraredFrame = 0; } if ( infraredLongExposureFrame != 0 ) { infraredLongExposureFrame->Release(); infraredLongExposureFrame = 0; } }
void Device::update() { if ( mFrameReader == 0 ) { return; } IAudioBeamFrame* audioFrame = 0; IBodyFrame* bodyFrame = 0; IBodyIndexFrame* bodyIndexFrame = 0; IColorFrame* colorFrame = 0; IDepthFrame* depthFrame = 0; IMultiSourceFrame* frame = 0; IInfraredFrame* infraredFrame = 0; ILongExposureInfraredFrame* infraredLongExposureFrame = 0; HRESULT hr = mFrameReader->AcquireLatestFrame( &frame ); if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) { // TODO audio } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) { IBodyFrameReference* frameRef = 0; hr = frame->get_BodyFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) { IBodyIndexFrameReference* frameRef = 0; hr = frame->get_BodyIndexFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyIndexFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) { IColorFrameReference* frameRef = 0; hr = frame->get_ColorFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &colorFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) { IDepthFrameReference* frameRef = 0; hr = frame->get_DepthFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &depthFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) { IInfraredFrameReference* frameRef = 0; hr = frame->get_InfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) { ILongExposureInfraredFrameReference* frameRef = 0; hr = frame->get_LongExposureInfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredLongExposureFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) ) { long long timeStamp = 0L; // TODO audio std::vector<Body> bodies; int64_t bodyTime = 0L; IBody* kinectBodies[ BODY_COUNT ] = { 0 }; Vec4f floorClipPlane = Vec4f::zero(); Channel8u bodyIndexChannel; IFrameDescription* bodyIndexFrameDescription = 0; int32_t bodyIndexWidth = 0; int32_t bodyIndexHeight = 0; uint32_t bodyIndexBufferSize = 0; uint8_t* bodyIndexBuffer = 0; int64_t bodyIndexTime = 0L; Surface8u colorSurface; IFrameDescription* colorFrameDescription = 0; int32_t colorWidth = 0; int32_t colorHeight = 0; ColorImageFormat colorImageFormat = ColorImageFormat_None; uint32_t colorBufferSize = 0; uint8_t* colorBuffer = 0; Channel16u depthChannel; IFrameDescription* depthFrameDescription = 0; int32_t depthWidth = 0; int32_t depthHeight = 0; uint16_t depthMinReliableDistance = 0; uint16_t depthMaxReliableDistance = 0; uint32_t depthBufferSize = 0; uint16_t* depthBuffer = 0; Channel16u infraredChannel; IFrameDescription* infraredFrameDescription = 0; int32_t infraredWidth = 0; int32_t infraredHeight = 0; uint32_t infraredBufferSize = 0; uint16_t* infraredBuffer = 0; Channel16u infraredLongExposureChannel; IFrameDescription* infraredLongExposureFrameDescription = 0; int32_t infraredLongExposureWidth = 0; int32_t infraredLongExposureHeight = 0; uint32_t infraredLongExposureBufferSize = 0; uint16_t* infraredLongExposureBuffer = 0; hr = depthFrame->get_RelativeTime( &timeStamp ); // TODO audio if ( mDeviceOptions.isAudioEnabled() ) { } if ( mDeviceOptions.isBodyEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyFrame->get_RelativeTime( &bodyTime ); } if ( SUCCEEDED( hr ) ) { hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies ); } if ( SUCCEEDED( hr ) ) { Vector4 v; hr = bodyFrame->get_FloorClipPlane( &v ); floorClipPlane = toVec4f( v ); } if ( SUCCEEDED( hr ) ) { for ( uint8_t i = 0; i < BODY_COUNT; ++i ) { IBody* kinectBody = kinectBodies[ i ]; if ( kinectBody != 0 ) { uint8_t isTracked = false; hr = kinectBody->get_IsTracked( &isTracked ); if ( SUCCEEDED( hr ) && isTracked ) { Joint joints[ JointType_Count ]; kinectBody->GetJoints( JointType_Count, joints ); JointOrientation jointOrientations[ JointType_Count ]; kinectBody->GetJointOrientations( JointType_Count, jointOrientations ); uint64_t id = 0; kinectBody->get_TrackingId( &id ); std::map<JointType, Body::Joint> jointMap; for ( int32_t j = 0; j < JointType_Count; ++j ) { Body::Joint joint( toVec3f( joints[ j ].Position ), toQuatf( jointOrientations[ j ].Orientation ), joints[ j ].TrackingState ); jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) ); } Body body( id, i, jointMap ); bodies.push_back( body ); } } } } } if ( mDeviceOptions.isBodyIndexEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer ); } if ( SUCCEEDED( hr ) ) { bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight ); memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) ); } } if ( mDeviceOptions.isColorEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_FrameDescription( &colorFrameDescription ); if ( SUCCEEDED( hr ) ) { float vFov = 0.0f; float hFov = 0.0f; float dFov = 0.0f; colorFrameDescription->get_VerticalFieldOfView( &vFov ); colorFrameDescription->get_HorizontalFieldOfView( &hFov ); colorFrameDescription->get_DiagonalFieldOfView( &dFov ); } } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Width( &colorWidth ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Height( &colorHeight ); } if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_RawColorImageFormat( &colorImageFormat ); } if ( SUCCEEDED( hr ) ) { colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4; colorBuffer = new uint8_t[ colorBufferSize ]; hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba ); if ( SUCCEEDED( hr ) ) { colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA ); memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 ); } delete [] colorBuffer; colorBuffer = 0; } } if ( mDeviceOptions.isDepthEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_FrameDescription( &depthFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Width( &depthWidth ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Height( &depthHeight ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer ); } if ( SUCCEEDED( hr ) ) { depthChannel = Channel16u( depthWidth, depthHeight ); memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) ); } } if ( mDeviceOptions.isInfraredEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredFrame->get_FrameDescription( &infraredFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Width( &infraredWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Height( &infraredHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer ); } if ( SUCCEEDED( hr ) ) { infraredChannel = Channel16u( infraredWidth, infraredHeight ); memcpy( infraredChannel.getData(), infraredBuffer, infraredWidth * infraredHeight * sizeof( uint16_t ) ); } } if ( mDeviceOptions.isInfraredLongExposureEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer ); } if ( SUCCEEDED( hr ) ) { infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight ); memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) ); } } if ( SUCCEEDED( hr ) ) { mFrame.mBodies = bodies; mFrame.mChannelBodyIndex = bodyIndexChannel; mFrame.mChannelDepth = depthChannel; mFrame.mChannelInfrared = infraredChannel; mFrame.mChannelInfraredLongExposure = infraredLongExposureChannel; mFrame.mDeviceId = mDeviceOptions.getDeviceId(); mFrame.mSurfaceColor = colorSurface; mFrame.mTimeStamp = timeStamp; mFrame.mFloorClipPlane = floorClipPlane; } if ( bodyIndexFrameDescription != 0 ) { bodyIndexFrameDescription->Release(); bodyIndexFrameDescription = 0; } if ( colorFrameDescription != 0 ) { colorFrameDescription->Release(); colorFrameDescription = 0; } if ( depthFrameDescription != 0 ) { depthFrameDescription->Release(); depthFrameDescription = 0; } if ( infraredFrameDescription != 0 ) { infraredFrameDescription->Release(); infraredFrameDescription = 0; } if ( infraredLongExposureFrameDescription != 0 ) { infraredLongExposureFrameDescription->Release(); infraredLongExposureFrameDescription = 0; } } if ( audioFrame != 0 ) { audioFrame->Release(); audioFrame = 0; } if ( bodyFrame != 0 ) { bodyFrame->Release(); bodyFrame = 0; } if ( bodyIndexFrame != 0 ) { bodyIndexFrame->Release(); bodyIndexFrame = 0; } if ( colorFrame != 0 ) { colorFrame->Release(); colorFrame = 0; } if ( depthFrame != 0 ) { depthFrame->Release(); depthFrame = 0; } if ( frame != 0 ) { frame->Release(); frame = 0; } if ( infraredFrame != 0 ) { infraredFrame->Release(); infraredFrame = 0; } if ( infraredLongExposureFrame != 0 ) { infraredLongExposureFrame->Release(); infraredLongExposureFrame = 0; } }
void cApp::makeFeature(){ stringstream fst; fst << setfill('0') << setw(5) << frame; fs::path path = dir/("f_" + fst.str() + ".png"); sur = Surface8u( loadImage( path) ); cv::Mat input( toOcv(sur) ); //cv::blur(input, input, cv::Size(3,3) ); cv::cvtColor( input, input, CV_RGB2GRAY ); cv::equalizeHist(input, input); cv::Canny( input, input, 10, 10, 3, false); //sur = fromOcv( input ); //auto detector = cv::BRISK::create(10, 1, 1.f); auto detector = cv::ORB::create(100, 1.2f, 16, 0, 0, 4); //auto detector = cv::MSER::create( 5, 60, 14400); //auto detector = cv::FastFeatureDetector::create( 30, true, cv::FastFeatureDetector::TYPE_9_16 ); detector->detect( input, key ); { vbo.resetPos(); vbo.resetCol(); vbo.resetVbo(); for( int i=0; i<key.size(); i++ ){ cv::Point2f & p = key[i].pt; vbo.addPos( vec3(p.x, p.y, 0) ); const ColorAf & c = sur.getPixel(vec2(p.x, p.y) ); vbo.addCol( ColorAf( 0.8f-c.r*1.2f,0.8f-c.g*1.2f,0.8f-c.b*1.2, 0.3)); } vbo.init( GL_POINTS ); if(0){ nline.resetPos(); nline.resetCol(); nline.resetVbo(); const vector<vec3> & inpos = vbo.getPos(); const vector<ColorAf> & incol = vbo.getCol(); for( int i=0; i<5000; i++){ int id1 = randInt(0,inpos.size()); int id2 = randInt(0,inpos.size()); const vec3 &p1 = inpos[id1]; const vec3 &p2 = inpos[id2]; float dist = glm::distance(p1, p2); if( 10<dist && dist<1300){ const ColorAf &c1 = incol[id1]; const ColorAf &c2 = incol[id2]; nline.addPos(p1); nline.addPos(p2); nline.addCol(c1); nline.addCol(c2); } } nline.init(GL_LINES); } if(1){ int num_line = 3; int num_dupl = 1; int size = key.size(); float nlimit = 10; float flimit = 1000/2; const vector<vec3> & inpos = vbo.getPos(); const vector<ColorAf> & incol = vbo.getCol(); vector<vec3> outpos( size*(num_line*num_dupl)*2 ); vector<ColorAf> outcol( size*(num_line*num_dupl)*2 ); TbbNpFinder np; np.findNearestPoints(&inpos[0], &outpos[0], &incol[0], &outcol[0], size, num_line, num_dupl, nlimit, flimit); nline.resetPos(); nline.resetCol(); nline.resetVbo(); nline.addPos(outpos); nline.addCol(outcol); nline.init(GL_LINES); } } }