void FxApp::draw() { static gl::Texture source; gl::clear( Color::black() ); bool isNewFrame = mCapture && mCapture.checkNewFrame(); if ( isNewFrame ) { source = gl::Texture( mCapture.getSurface() ); } gl::setMatricesWindow( getWindowSize() ); gl::setViewport( getWindowBounds() ); if ( isNewFrame ) source = mEffects[ mCurrentEffect ]->process( source ); if ( source ) { gl::draw( source, Area::proportionalFit( source.getBounds(), getWindowBounds(), true, true ) ); } params::InterfaceGl::draw(); }
void PhotoBoothApp::update() { if(mCurrentState == STATE_COUNT_DOWN){ mCountDownNumber = int(getElapsedSeconds() - mCountDownStartTime); mCountDownFractional = (getElapsedSeconds() - mCountDownStartTime) - int(getElapsedSeconds() - mCountDownStartTime); // check to see if the count-down has hit the end. if(mCountDownNumber == mNumberTextures.size()){ mCameraFlash = 1; mDarkBgAlpha = 1; mCurrentState = STATE_ACCEPT; mPreviewTexturePos = Vec2f(0,0); mPreviewTexture = gl::Texture( mCameraTexture ); timeline().apply( &mCameraFlash, 0.0f, 0.8f, EaseOutCubic() ); // Show buttons timeline().apply( &mDiscardPos, Vec2f(100, height-200), 1.0f, EaseOutQuint() ).delay(0.25f); timeline().apply( &mSavePos, Vec2f(width-700, height-200), 1.0f, EaseOutQuint() ).delay(0.50f); } } // don't update the camera texture after the snapshot has been taken. if( mCapture && mCapture.checkNewFrame() && (mCurrentState != STATE_ACCEPT) ) { mCameraSurface = mCapture.getSurface(); mCameraTexture = gl::Texture( mCapture.getSurface() ); } }
void shaderExternalFileExampleApp::update(){ if( mCapture && mCapture.checkNewFrame() ) { mTexture = gl::Texture( mCapture.getSurface() ); mTexture.setWrap(GL_CLAMP, GL_CLAMP); mTexture.setMinFilter(GL_NEAREST); mTexture.setMagFilter(GL_NEAREST); } }
void ocvFaceDetectApp::update() { if( mCapture.checkNewFrame() ) { Surface surface = mCapture.getSurface(); mCameraTexture = gl::Texture( surface ); updateFaces( surface ); } }
void TellThatToMyCamera_v1_0App::update() { if( mCapture.checkNewFrame() ) { Surface surface = mCapture.getSurface(); mCameraTexture = gl::Texture(surface); updateExpressions(surface); } }
void RotatingCubeApp::update() { if( mCapture && mCapture.checkNewFrame() ) mTexture = gl::Texture2d::create( *mCapture.getSurface(), gl::Texture2d::Format().loadTopDown() ); // Rotate the cube by .03 radians around an arbitrary axis mCubeRotation *= rotate( 0.03f, vec3( 1 ) ); }
void ___PACKAGENAMEASIDENTIFIER___App::update() { if( mCapture && mCapture.checkNewFrame() ){ cv::Mat input( toOcv( mCapture.getSurface() ) ), output; cv::Sobel( input, output, CV_8U, 1, 0 ); mTexture = gl::Texture( fromOcv( output ) ); } }
void update() { if( mCapture->checkNewFrame() ) { delete mTexture; mTexture = new gl::Texture( mCapture->getSurface() ); } if( ! mPaused ) dynamicsWorld->stepSimulation(1.0f, 10); }
void ICPApp::update() { if( mCapture.checkNewFrame() ) { Surface surface = mCapture.getSurface(); mCameraTexture = gl::Texture(surface); updateExpressions(surface); // FOR TESTING PURPOSES // mTexture = gl::Texture(mSurf); // updateExpressions(mSurf); } }
void ocvCaptureApp::update() { if( mCap && mCap.checkNewFrame() ) { cv::Mat input( toOcv( mCap.getSurface() ) ), output; // cv::threshold( input, output, 128, 255, CV_8U ); cv::Sobel( input, output, CV_8U, 1, 0 ); // cv::Laplacian( input, output, CV_8U ); // cv::circle( output, toOcv( Vec2f(200, 200) ), 300, toOcv( Color( 0, 0.5f, 1 ) ), -1 ); // cv::line( output, cv::Point( 1, 1 ), cv::Point( 30, 30 ), toOcv( Color( 1, 0.5f, 0 ) ) ); mTexture = gl::Texture( fromOcv( output ) ); } }
void SmilesApp::update() { mFps = getAverageFps(); if(mCapture && mCapture.checkNewFrame() ){ mSurface = mCapture.getSurface(); } if (mSurface){ mGreyChannel = Channel( mSurface.clone(mSmileRect.getInteriorArea()) ); int totalDetectionPixels = mGreyChannel.getWidth()*mGreyChannel.getHeight(); unsigned char * detectionPixels = mGreyChannel.getData(); for (int i = 0; i < totalDetectionPixels; i++){ mRImage_pixels->array[i] = detectionPixels[i]; } detectSmiles(*mRImage_pixels); //console() << smileThreshold << endl; } }
void PaintingBeingsApp::captureCamera() { if (_launchAlgoGen) { _launchAlgoGen = false; setStop(); } if (_capture && _capture.checkNewFrame()) { _image.setImage( _capture.getSurface()); _algoGen.setup(_image.getMiniatureSurface()); _imageBeing.setup(_image.getMiniatureSize()); resetCamera(); _launchAlgoGen = true; _showImageBeing = false; updateInterface(true); setPlay(); } }
void ScreenShadersApp::update() { if ( mCapture && mCapture.checkNewFrame() ) { mTexture = gl::Texture( mCapture.getSurface() ); } }
void LEDCamApp::update() { if( mCapture && mCapture.checkNewFrame() ) mTexture = gl::Texture( mCapture.getSurface() ); }
void CaptureApp::update() { if( mCapture && mCapture.checkNewFrame() ) { mTexture = gl::Texture( mCapture.getSurface() ); } }
void HexagonMirrorApp::update() { // update webcam image if( mWebcam.checkNewFrame() ) mWebcamTexture = gl::Texture( mWebcam.getSurface() ); }
void BoidsApp::update() { double deltaT = lastFrameTime - getElapsedSeconds(); //silly variable names, but let's hope it works if(isFullScreen() != shouldBeFullscreen) { setFullScreen(shouldBeFullscreen); } mEye = Vec3f( 0.0f, 0.0f, mCameraDistance ); mCam.lookAt( mEye, mCenter, mUp ); gl::setMatrices( mCam ); gl::rotate( mSceneRotation); // image CODE //gl::enableAlphaBlending(); // glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); //gl::enableAdditiveBlending(); //glEnable(GL_BLEND); //glEnable( GL_TEXTURE_2D ); //gl::enableDepthWrite( true ); //gl::enableDepthWrite( true ); //glBlendFunc( GL_ONE, GL_SRC_ALPHA ); if (checkTime()) { //get the next boidRuleset. int boidRuleToUse = (currentBoidRuleNumber++ % boidRulesets.size()); BoidSysPair thisPair = boidRulesets[boidRuleToUse]; flock_one.zoneRadius = thisPair.flockOneProps.zoneRadius; flock_one.lowerThresh = thisPair.flockOneProps.lowerThresh; flock_one.higherThresh = thisPair.flockOneProps.higherThresh; flock_one.attractStrength = thisPair.flockOneProps.attractStrength; flock_one.repelStrength = thisPair.flockOneProps.repelStrength; flock_one.orientStrength = thisPair.flockOneProps.orientStrength; flock_one.silThresh = thisPair.flockOneProps.silThresh; flock_one.silRepelStrength = thisPair.flockOneProps.silRepelStrength; flock_one.gravity = thisPair.flockOneProps.gravity; flock_one.setColor(thisPair.flockOneProps.baseColor); flock_two.zoneRadius = thisPair.flockTwoProps.zoneRadius; flock_two.lowerThresh = thisPair.flockTwoProps.lowerThresh; flock_two.higherThresh = thisPair.flockTwoProps.higherThresh; flock_two.attractStrength = thisPair.flockTwoProps.attractStrength; flock_two.repelStrength = thisPair.flockTwoProps.repelStrength; flock_two.orientStrength = thisPair.flockTwoProps.orientStrength; flock_two.silThresh = thisPair.flockTwoProps.silThresh; flock_two.silRepelStrength = thisPair.flockTwoProps.silRepelStrength; flock_two.gravity = thisPair.flockTwoProps.gravity; flock_two.setColor(thisPair.flockTwoProps.baseColor); imageColor = thisPair.imageColor; } //OpenCV IO //Only do OpenCV business if capture device is open and a new frame is ready if( capture && capture.checkNewFrame() ) { polygons->clear(); ci::Surface captureSurface = capture.getSurface(); ci::Surface outputSurface = captureSurface; silhouetteDetector->processSurface(&captureSurface,polygons,&outputSurface); //this only works because processSurface doesn't retain either pointer texture = outputSurface; flock_one.applySilhouetteToBoids(polygons,&imageToScreenMap); flock_two.applySilhouetteToBoids(polygons,&imageToScreenMap); } flock_one.applyForceToBoids(); if( flock_one.centralGravity ) flock_one.pullToCenter( mCenter ); flock_one.update(deltaT,getElapsedSeconds()); flock_two.applyForceToBoids(); if( flock_two.centralGravity) flock_two.pullToCenter( mCenter); flock_two.update(deltaT,getElapsedSeconds()); }
void ___PACKAGENAMEASIDENTIFIER___App::update() { if( mCapture && mCapture.checkNewFrame() ) mTexture = gl::Texture( mCapture.getSurface() ); }
void motionHistAdvApp::update() { if( !mCapture.checkNewFrame() ) { return; // nothing to do } float secs = getElapsedSeconds(); // always make sure that all the methods have the exact same time. mBoundingRects.clear(); mAngles.clear(); mCaptureSurface = mCapture.getSurface(); mCaptureTex = gl::Texture( mCaptureSurface ); mPrevFrame = mCurrentFrame; mCurrentFrame = toOcv( Channel( mCaptureSurface ) ); if( mPrevFrame.size().width == 0) { return; } cv::absdiff( mCurrentFrame, mPrevFrame, mInput ); cv::threshold( mInput, mInput, 20, 1, cv::THRESH_BINARY ); cv::updateMotionHistory( mInput, mHistory, secs, MHI_DURATION ); cv::convertScaleAbs( mHistory, mMask, 255./MHI_DURATION, (MHI_DURATION - secs)*255.0/MHI_DURATION ); // find the motion gradient cv::calcMotionGradient( mHistory, mMask, mOrientation, MAX_TIME_DELTA, MIN_TIME_DELTA, 3 ); //find the motion segment vector<cv::Rect> tmpRects; cv::segmentMotion( mHistory, mMotionMask, tmpRects, secs, MIN_TIME_DELTA ); if( tmpRects.size() == 0) { mAngles.push_back( cv::calcGlobalOrientation( mOrientation, mMask, mHistory, secs, MHI_DURATION ) ); } else { for( int i=0; i<tmpRects.size(); i++ ){ if( tmpRects[i].area() > 200 ){ cv::Mat mroi( mMask, tmpRects[i] ); cv::Mat oroi( mOrientation, tmpRects[i] ); cv::Mat hroi( mHistory, tmpRects[i] ); double a = cv::calcGlobalOrientation( oroi, mroi, hroi, secs, MAX_TIME_DELTA ); mAngles.push_back(a); mBoundingRects.push_back(tmpRects[i] ); Rectf r = fromOcv( tmpRects[i] ); Vec2f pos = r.getCenter(); float xp = cos( toRadians( mAngles.back() ) ); float yp = sin( toRadians( mAngles.back() ) ); Vec2f vel = Vec2f( xp, yp ); Area area = Area( r ); area.expand( area.getWidth()/3, area.getHeight()/3 ); Rectf rect = r - pos; Surface surf = mCaptureSurface.clone( area ); mQuads.push_back( Quad( pos, vel, surf, rect ) ); } } } mHistoryTex.update( Surface( fromOcv( mHistory ) ) ); mMotionMaskTex.update( Surface( fromOcv( mMotionMask ) ) ); mDiffTex.update( Surface( fromOcv( mInput ) ) ); mOrientTex.update( Surface( fromOcv( mOrientation ) ) ); for( vector<Quad>::iterator it = mQuads.begin(); it != mQuads.end(); ){ if( it->isDead() ){ it = mQuads.erase( it ); } else { it->update(); ++it; } } }