void MultipleKinectsApp::update() { // Update textures if( mDevice0 ) { mColorTex0.update( getColorImage(mDevice0) ); mDepthTex0.update( getDepthImage(mDevice0) ); } if( mDevice1 ) { mColorTex1.update( getColorImage(mDevice1) ); mDepthTex1.update( getDepthImage(mDevice1) ); } }
void MilluminApp::setup() { listener.setup(5001); host = "127.0.0.1"; port = 5000; sender.setup(host, port); mTex = gl::Texture(200, 100); //create our texture to publish mSurface = Surface8u(200, 100, false); //create a surface to manipulate randomizeSurface(&mSurface); //randomize our surface mTex.update(mSurface); //tell the texture about our changes archimedes.set(100.f, 0.6f); //set up and calculate our spiral archimedes.calc(); mRot = 0.f; mScreenSyphon.setName("Cinder Screen"); // set a name for each item to be published mTextureSyphon.setName("Cinder Texture"); mClientSyphon.setup(); // in order for this to work, you must run simple server from the testapps directory // any other syphon item you create would work as well, just change the name mClientSyphon.setApplicationName("Simple Server"); mClientSyphon.setServerName(""); mClientSyphon.bind(); }
void rayMarcherApp::update() { if( mCurrentLine < getWindowHeight() ) { mMarcher.renderScanline( mCurrentLine, mImageSurface.get() ); mImageTexture.update( *mImageSurface, Area( 0, mCurrentLine, mImageSurface->getWidth(), mCurrentLine + 1 ) ); mCurrentLine++; } }
void MilluminApp::update() { while (listener.hasWaitingMessages()) { osc::Message message; listener.getNextMessage(&message); console() << "New message received" << std::endl; console() << "Address: " << message.getAddress() << std::endl; console() << "Num Arg: " << message.getNumArgs() << std::endl; for (int i = 0; i < message.getNumArgs(); i++) { console() << "-- Argument " << i << std::endl; console() << "---- type: " << message.getArgTypeName(i) << std::endl; if (message.getArgType(i) == osc::TYPE_INT32){ try { console() << "------ value: "<< message.getArgAsInt32(i) << std::endl; } catch (int value) { console() << "------ value through exception: "<< value << std::endl; } }else if (message.getArgType(i) == osc::TYPE_FLOAT){ try { console() << "------ value: " << message.getArgAsFloat(i) << std::endl; } catch (float val) { console() << "------ value trough exception: " << val << std::endl; } mRot += 1.f; }else if (message.getArgType(i) == osc::TYPE_STRING){ try { console() << "------ value: " << message.getArgAsString(i).c_str() << std::endl; } catch (std::string str) { console() << "------ value: " << str << std::endl; } } } } osc::Message message; message.addFloatArg( (cos(getElapsedSeconds()) / 2.0f + .5f)*100.f ); message.setAddress("/millumin/selectedLayer/opacity"); message.setRemoteEndpoint(host, port); sender.sendMessage(message); if(getElapsedFrames() % 2 == 0) // for those of us with slower computers randomizeSurface(&mSurface); mTex.update(mSurface); mRot += 0.2f; }
bool CameraController::copy_frame_to_texture(gl::Texture &tex) { #if !defined(KINSKI_ARM) if(m_impl) { auto buf = m_impl->m_gst_util.new_buffer(); if(buf) { int width = m_impl->m_gst_util.video_info().width; int height = m_impl->m_gst_util.video_info().height; // map the buffer for reading gst_buffer_map(buf.get(), &m_impl->m_memory_map_info, GST_MAP_READ); uint8_t *buf_data = m_impl->m_memory_map_info.data; size_t num_bytes = m_impl->m_memory_map_info.size; if(m_impl->m_buffer_front.num_bytes() != num_bytes) { m_impl->m_buffer_front.set_data(nullptr, num_bytes); } uint8_t *ptr = m_impl->m_buffer_front.map(); memcpy(ptr, buf_data, num_bytes); m_impl->m_buffer_front.unmap(); gst_buffer_unmap(buf.get(), &m_impl->m_memory_map_info); // bind pbo and schedule texture upload m_impl->m_buffer_front.bind(); tex.update(nullptr, GL_UNSIGNED_BYTE, GL_RGB, width, height, true); m_impl->m_buffer_front.unbind(); // ping pong our pbos std::swap(m_impl->m_buffer_front, m_impl->m_buffer_back); return true; } } #endif return false; }
void ARTestApp::update() { ARMarkerInfo *marker_info; // Pointer to array holding the details of detected markers. int marker_num; // Count of number of markers detected. int j, k; // Grab a video frame. #if defined( USE_AR_VIDEO ) ARUint8 *image; if ((image = arVideoGetImage()) != NULL) { #else if( mCapture->checkNewFrame() ) { #endif #if defined( USE_AR_VIDEO ) gARTImage = image; // Save the fetched image. mTexture->enableAndBind(); #else const fli::Surface8u &surface( mCapture->getSurface() ); mTexture->update( surface ); gARTImage = const_cast<uint8_t*>( surface.getData() ); #endif gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame. if (arDetectMarker(gARTImage, gARTThreshhold, &marker_info, &marker_num) < 0) { exit(-1); } // check for known patterns for( int i = 0; i < objectnum; i++ ) { k = -1; for( j = 0; j < marker_num; j++ ) { if( object[i].id == marker_info[j].id) { /* you've found a pattern */ if( k == -1 ) k = j; else /* make sure you have the best pattern (highest confidence factor) */ if( marker_info[k].cf < marker_info[j].cf ) k = j; } } if( k == -1 ) { object[i].visible = 0; continue; } /* calculate the transform for each marker */ if( object[i].visible == 0 ) { arGetTransMat(&marker_info[k], object[i].marker_center, object[i].marker_width, object[i].trans); } else { arGetTransMatCont(&marker_info[k], object[i].trans, object[i].marker_center, object[i].marker_width, object[i].trans); } object[i].visible = 1; } } if( mLockedMode >= 0 ) { for( int i = 0; i < objectnum; i++ ) { object[i].visible = 0; } object[mLockedMode].visible = 1; } for( int mod = 0; mod < objectnum; ++mod ) mModules[mod]->update( this, object[mod].visible ); } void ARTestApp::draw() { GLdouble p[16]; GLdouble m[16]; // Select correct buffer for this context. glClearColor( 0, 0, 0, 1 ); // Clear the buffers for new frame. gl::enableDepthWrite(); glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); // Clear the buffers for new frame. gl::disableDepthRead(); gl::disableDepthWrite(); gl::enableAlphaBlending(); if( object[0].visible || object[1].visible || object[2].visible ) mCurrentAlpha += ( 0.0f - mCurrentAlpha ) * 0.05f; else mCurrentAlpha += ( 1.0f - mCurrentAlpha ) * 0.05f; gl::setMatricesScreenOrtho( getWindowWidth(), getWindowHeight() ); // draw the camera image centered glColor4f( 1, 1, 1, 1 );//0.2f + mCurrentAlpha * 0.8f ); float width = ( getWindowHeight() * ( mTexture->getWidth() / (float)mTexture->getHeight() ) ); mTexture->draw( ( getWindowWidth() - width ) / 2.0f, 0, width, getWindowHeight() ); glDisable( mTexture->getTarget() ); #if defined( USE_AR_VIDEO ) arVideoCapNext(); gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext(). #endif // Projection transformation. arglCameraFrustumRH( &gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p ); glMatrixMode( GL_PROJECTION ); glLoadMatrixd( p ); // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). for( int mod = 0; mod < objectnum; ++mod ) { if( object[mod].visible ) { arglCameraViewRH( object[mod].trans, m, VIEW_SCALEFACTOR ); glMatrixMode(GL_MODELVIEW); glLoadMatrixd( m ); fli::Matrix44d mvd( m ); mModules[mod]->draw( this, mvd * Vec4d( 0, 0, 0, 1 ) ); } } }
bool RogersGuiApp::takeScreenshotButtonClick( MouseEvent event ) { screenshot.update(copyWindowSurface()); screenshotTextureControl->refresh(); // !NEW! Force texture refresh return false; }
void motionHistAdvApp::update() { if( !mCapture.checkNewFrame() ) { return; // nothing to do } float secs = getElapsedSeconds(); // always make sure that all the methods have the exact same time. mBoundingRects.clear(); mAngles.clear(); mCaptureSurface = mCapture.getSurface(); mCaptureTex = gl::Texture( mCaptureSurface ); mPrevFrame = mCurrentFrame; mCurrentFrame = toOcv( Channel( mCaptureSurface ) ); if( mPrevFrame.size().width == 0) { return; } cv::absdiff( mCurrentFrame, mPrevFrame, mInput ); cv::threshold( mInput, mInput, 20, 1, cv::THRESH_BINARY ); cv::updateMotionHistory( mInput, mHistory, secs, MHI_DURATION ); cv::convertScaleAbs( mHistory, mMask, 255./MHI_DURATION, (MHI_DURATION - secs)*255.0/MHI_DURATION ); // find the motion gradient cv::calcMotionGradient( mHistory, mMask, mOrientation, MAX_TIME_DELTA, MIN_TIME_DELTA, 3 ); //find the motion segment vector<cv::Rect> tmpRects; cv::segmentMotion( mHistory, mMotionMask, tmpRects, secs, MIN_TIME_DELTA ); if( tmpRects.size() == 0) { mAngles.push_back( cv::calcGlobalOrientation( mOrientation, mMask, mHistory, secs, MHI_DURATION ) ); } else { for( int i=0; i<tmpRects.size(); i++ ){ if( tmpRects[i].area() > 200 ){ cv::Mat mroi( mMask, tmpRects[i] ); cv::Mat oroi( mOrientation, tmpRects[i] ); cv::Mat hroi( mHistory, tmpRects[i] ); double a = cv::calcGlobalOrientation( oroi, mroi, hroi, secs, MAX_TIME_DELTA ); mAngles.push_back(a); mBoundingRects.push_back(tmpRects[i] ); Rectf r = fromOcv( tmpRects[i] ); Vec2f pos = r.getCenter(); float xp = cos( toRadians( mAngles.back() ) ); float yp = sin( toRadians( mAngles.back() ) ); Vec2f vel = Vec2f( xp, yp ); Area area = Area( r ); area.expand( area.getWidth()/3, area.getHeight()/3 ); Rectf rect = r - pos; Surface surf = mCaptureSurface.clone( area ); mQuads.push_back( Quad( pos, vel, surf, rect ) ); } } } mHistoryTex.update( Surface( fromOcv( mHistory ) ) ); mMotionMaskTex.update( Surface( fromOcv( mMotionMask ) ) ); mDiffTex.update( Surface( fromOcv( mInput ) ) ); mOrientTex.update( Surface( fromOcv( mOrientation ) ) ); for( vector<Quad>::iterator it = mQuads.begin(); it != mQuads.end(); ){ if( it->isDead() ){ it = mQuads.erase( it ); } else { it->update(); ++it; } } }
bool ButtonsAndTexturesApp::takeScreenshotButtonClick( MouseEvent event ) { screenshot.update(copyWindowSurface()); return false; }