示例#1
0
void PhotoBoothApp::update()
{
    if(mCurrentState == STATE_COUNT_DOWN){
        
        mCountDownNumber = int(getElapsedSeconds() - mCountDownStartTime);
        mCountDownFractional = (getElapsedSeconds() - mCountDownStartTime) - int(getElapsedSeconds() - mCountDownStartTime);
        
        // check to see if the count-down has hit the end.
        if(mCountDownNumber == mNumberTextures.size()){
            mCameraFlash = 1;
            mDarkBgAlpha = 1;
            mCurrentState = STATE_ACCEPT;
            
            mPreviewTexturePos = Vec2f(0,0);
            mPreviewTexture = gl::Texture( mCameraTexture );
            
            timeline().apply( &mCameraFlash, 0.0f, 0.8f, EaseOutCubic() );
            
            // Show buttons
            timeline().apply( &mDiscardPos, Vec2f(100, height-200), 1.0f, EaseOutQuint() ).delay(0.25f);
            timeline().apply( &mSavePos, Vec2f(width-700, height-200), 1.0f, EaseOutQuint() ).delay(0.50f);
        }
    }
    
    // don't update the camera texture after the snapshot has been taken.
    if( mCapture && mCapture.checkNewFrame() && (mCurrentState != STATE_ACCEPT) ) {
        mCameraSurface = mCapture.getSurface();
		mCameraTexture = gl::Texture( mCapture.getSurface() );
	}
}
SP_RESULT load_calc(const char *load_file_name,
               const char *file_name,
               EPAnalysis& ep,
               bool playback) {
    RawData data;
    Capture c;
    char fn_buffer [128]="";
    
    data.loadWav(stringFile(load_file_name,".wav",fn_buffer));
    ep.Initial(&data);
    ep.reCalcAllData();
    ep.saveMatlab(stringFile(file_name,".dat",fn_buffer));
    
    if(playback)c.play(&data);
    
    ep.smooth();
    ep.saveMatlab(stringFile(file_name,"_smooth.dat",fn_buffer));
    ep.cut();
    ep.saveMatlab(stringFile(file_name,"_cut.dat",fn_buffer));
    
    //ep.smooth();
    //ep.cut();
    //ep.saveMatlab(stringFile(file_name,"_cut_2.m",fn_buffer));

    if(playback)c.play(&data);

    return SP_SUCCESS;
}
示例#3
0
static void *status_thread(void *arg)
{
    Capture *p = ((thread_arg_t*)arg)->obj;

    GeneralStats gen;
    RecordStats rec;

    while (1) {
        p->get_general_stats(&gen);
        if (term) {
            MT_mvprintw(4,0,"INPUT: video %s audio %s", gen.video_ok ? "OK " : "BAD", gen.audio_ok ? "OK " : "BAD");
        }
        else {
            printf("INPUT: video %s audio %s\n", gen.video_ok ? "OK " : "BAD", gen.audio_ok ? "OK " : "BAD");
        }

        if (gen.recording) {
            p->get_record_stats(&rec);
            if (term) {
                MT_mvprintw(5,0,"     : recording: state=%d  framecount=%d  filesize=%llu\n", rec.record_state, rec.current_framecount, rec.file_size);
            }
            else {
                printf("  recording: state=%d framecount=%d filesize=%llu\n", rec.record_state, rec.current_framecount, rec.file_size);
            }
        }
        else {
            if (term)
                MT_mvprintw(5,0,"     : not recording                        ");
        }
        if (term)
            usleep(100 * 1000);     // every 100 ms
        else
            usleep(1000 * 1000);        // every second
    }
}
示例#4
0
void RotatingCubeApp::setup()
{
	try {
		mCapture = Capture( 320, 240 );
		mCapture.start();
	}
	catch( CaptureExc &exc ) {
	    console() << "failed to initialize the webcam, what: " << exc.what() << std::endl;

	    // create a warning texture
		// if we threw in the start, we'll set the Capture to null
		mCapture.reset();
		
		TextLayout layout;
		layout.clear( Color( 0.3f, 0.3f, 0.3f ) );
		layout.setColor( Color( 1, 1, 1 ) );
		layout.setFont( Font( "Arial", 96 ) );
		layout.addCenteredLine( "No Webcam" );
		layout.addCenteredLine( "Detected" );
		mTexture = gl::Texture2d::create( layout.render() );
	}
	
	mCam.lookAt( vec3( 3, 2, -3 ), vec3( 0 ) );
	gl::enableDepthRead();
	gl::enableDepthWrite();
}
示例#5
0
文件: FxApp.cpp 项目: gaborpapp/apps
void FxApp::draw()
{
	static gl::Texture source;

	gl::clear( Color::black() );

	bool isNewFrame = mCapture && mCapture.checkNewFrame();

	if ( isNewFrame )
	{
		source = gl::Texture( mCapture.getSurface() );
	}

	gl::setMatricesWindow( getWindowSize() );
	gl::setViewport( getWindowBounds() );

	if ( isNewFrame )
		source = mEffects[ mCurrentEffect ]->process( source );

	if ( source )
	{
		gl::draw( source,
				Area::proportionalFit( source.getBounds(), getWindowBounds(),
									   true, true ) );
	}

	params::InterfaceGl::draw();
}
void TellThatToMyCamera_v1_0App::update()
{
	if( mCapture.checkNewFrame() ) {
		Surface surface = mCapture.getSurface();
		mCameraTexture = gl::Texture(surface);
        updateExpressions(surface);
	}
}
void ___PACKAGENAMEASIDENTIFIER___App::update()
{
	if( mCapture && mCapture.checkNewFrame() ){
		cv::Mat input( toOcv( mCapture.getSurface() ) ), output;
		cv::Sobel( input, output, CV_8U, 1, 0 );
		mTexture = gl::Texture( fromOcv( output ) );
	}
}
示例#8
0
void RotatingCubeApp::update()
{
	if( mCapture && mCapture.checkNewFrame() )
		mTexture = gl::Texture2d::create( *mCapture.getSurface(), gl::Texture2d::Format().loadTopDown() );
	
	// Rotate the cube by .03 radians around an arbitrary axis
	mCubeRotation *= rotate( 0.03f, vec3( 1 ) );
}
void shaderExternalFileExampleApp::update(){
    if( mCapture && mCapture.checkNewFrame() ) {
		mTexture = gl::Texture( mCapture.getSurface() );
        mTexture.setWrap(GL_CLAMP, GL_CLAMP);
		mTexture.setMinFilter(GL_NEAREST);
		mTexture.setMagFilter(GL_NEAREST);
	}
}
void ocvFaceDetectApp::update()
{
	if( mCapture.checkNewFrame() ) {
		Surface surface = mCapture.getSurface();
		mCameraTexture = gl::Texture( surface );
		updateFaces( surface );
	}
}
示例#11
0
void CaptureApp::keyDown( KeyEvent event )
{
#if defined( CINDER_MAC )
	if( event.getChar() == 'f' )
		setFullScreen( ! isFullScreen() );
	else if( event.getChar() == ' ' )
		( mCapture && mCapture.isCapturing() ) ? mCapture.stop() : mCapture.start();
#endif
}
示例#12
0
C_IMPL const unsigned char* captureGetFrame( CCapture* capture, int* width, int* height, int* stride )
{
    const Capture obj = c_to_cpp(capture);
    const CaptureSurface8u surf = obj.getSurface();
    *width = surf.getWidth();
    *height = surf.getHeight();
    *stride = surf.getRowBytes();
    return surf.getData();
}
示例#13
0
	void update()
	{
		if( mCapture->checkNewFrame() ) {
			delete mTexture;
			mTexture = new gl::Texture( mCapture->getSurface() );
		}
		
		if( ! mPaused )
			dynamicsWorld->stepSimulation(1.0f, 10);
	}
示例#14
0
void DialogCamera::videoOpen()
{
  QString fileName = QFileDialog::getOpenFileName(this, tr("Open Video"), QDir::currentPath());
  if(fileName.isEmpty())
    return;
  Capture capture;
  capture.setFormat(false);
  capture.setFileName(fileName);
  capture.run();
}
示例#15
0
void ICPApp::update()
{
	if( mCapture.checkNewFrame() ) {
		Surface surface = mCapture.getSurface();
		mCameraTexture = gl::Texture(surface);
        updateExpressions(surface);
        
//      FOR TESTING PURPOSES
//      mTexture = gl::Texture(mSurf);
//      updateExpressions(mSurf);

	}
}
void ___PACKAGENAMEASIDENTIFIER___App::setup()
{
	try {
		mCapture = Capture( kCaptureWidth, kCaptureHeight );
		mCapture.start();
	} catch ( ... ) {
		console() << "Error with capture device." << std::endl;
		exit(1);
	}

	try {
		mShader = gl::GlslProg( loadResource( RES_SHADER_PASSTHRU ), loadResource( RES_SHADER_FRAGMENT ) );
	} catch ( gl::GlslProgCompileExc &exc ) {
		console() << "Cannot compile shader: " << exc.what() << std::endl;
		exit(1);
	}catch ( Exception &exc ){
		console() << "Cannot load shader: " << exc.what() << std::endl;
		exit(1);
	}
	
	mFbo = gl::Fbo( kWindowWidth, kWindowHeight );

	mMixColorRed = 0.0f;
	mMixColorGreen = 0.0f;
	mMixColorBlue = 0.0f;

	mParams = params::InterfaceGl( "Parameters", Vec2i( kParamsWidth, kParamsHeight ) );
	mParams.addParam( "Mix Red", &mMixColorRed, "min=-1.0 max=1.0 step=0.01 keyIncr=r keyDecr=R" );
	mParams.addParam( "Mix Green", &mMixColorGreen, "min=-1.0 max=1.0 step=0.01 keyIncr=g keyDecr=G" );
	mParams.addParam( "Mix Blue", &mMixColorBlue, "min=-1.0 max=1.0 step=0.01 keyIncr=b keyDecr=B" );


}
示例#17
0
文件: FxApp.cpp 项目: gaborpapp/apps
void FxApp::shutdown()
{
	if ( mCapture )
	{
		mCapture.stop();
	}
}
示例#18
0
文件: FxApp.cpp 项目: gaborpapp/apps
void FxApp::setup()
{
	gl::disableVerticalSync();

	int w = 640;
	int h = 480;
	mEffects.push_back( fx::EffectRef( new fx::PassThrough( w, h ) ) );
	mEffects.push_back( fx::EffectRef( new fx::LumaOffset( w, h ) ) );
	mEffects.push_back( fx::EffectRef( new fx::Posterize( w, h ) ) );

	mParams = params::InterfaceGl( "Parameters", Vec2i( 300, 300 ) );

	for ( vector< mndl::fx::EffectRef >::iterator it = mEffects.begin(); it != mEffects.end(); ++it )
	{
		mEffectNames.push_back( (*it)->getName() );
	}

	mCurrentEffect = 0;

	// capture
	try
	{
		mCapture = Capture( 640, 480 );
		mCapture.start();
	}
	catch (...)
	{
		console() << "Failed to initialize capture" << std::endl;
	}
}
void SmilesApp::setup()
{	
    mSmileLimit = 4.0f;
    mSmileAverageNumOfFrames = 10;
    mCamIndex = 0;
    mFps = getAverageFps();
    
    try {
		mCapture = Capture( CAMWIDTH, CAMHEIGHT );
		mCapture.start();
	}
	catch( ... ) {
		console() << "Failed to initialize capture" << std::endl;
	}
    
    mSmileRect = Rectf(300,100,600,400);
    setupSmileDetector(mSmileRect.getInteriorArea().getWidth(), mSmileRect.getInteriorArea().getHeight());
    console()<< mSmileRect.getInteriorArea().getWidth() << mSmileRect.getInteriorArea().getHeight() << endl;
	mSmileThreshold = 0;
	mSmileAverageIndex = 0;
    
    mParams = params::InterfaceGl( "Parameters", Vec2i( 220, 170 ) );
    mParams.addParam( "FPS", &mFps,"", true );
    mParams.addSeparator();
	mParams.addParam( "SmileResponse", &mSmileResponse, "", true );
    mParams.addParam( "SmileThreshold", &mSmileThreshold, "", true );
    
    mParams.addParam( "mSmileLimit", &mSmileLimit );
    mParams.addParam( "mSmileAverageNumOfFrames", &mSmileAverageNumOfFrames );
    
}
示例#20
0
	void setup()
	{
		mCam = new CameraPersp( getWindowWidth(), getWindowHeight(), 60.0f );
		mCam->lookAt(Vec3f(100,400,-400), Vec3f::zero());
		
		mSurface = 0;
		mTexture = 0;
		mCapture = new Capture( 320, 240 );
		mCapture->startCapture();
		mPaused = false;
		mDrawTextured = true;
		
		btVector3 worldAabbMin(-10000,-10000,-10000);
		btVector3 worldAabbMax(10000,10000,10000);
		int maxProxies = 1024;
		
		btAxisSweep3 * broadphase									= new btAxisSweep3(worldAabbMin,worldAabbMax,maxProxies);
		btDefaultCollisionConfiguration	* collisionConfiguration	= new btDefaultCollisionConfiguration();
		btCollisionDispatcher * dispatcher							= new btCollisionDispatcher(collisionConfiguration);
		btSequentialImpulseConstraintSolver * solver				= new btSequentialImpulseConstraintSolver;
		
		dynamicsWorld = new btDiscreteDynamicsWorld(dispatcher,broadphase,solver,collisionConfiguration);
		dynamicsWorld->setGravity(btVector3(0,-10,0));
		
		btCollisionShape * groundShape	= new btStaticPlaneShape(btVector3(0,1,0),1);
		
		btDefaultMotionState * groundMotionState = new btDefaultMotionState(btTransform(btQuaternion(0,0,0,1),btVector3(0,-1,0)));
		btRigidBody::btRigidBodyConstructionInfo groundRigidBodyCI(0,groundMotionState,groundShape,btVector3(0,0,0));
		
		groundRigidBody = new btRigidBody(groundRigidBodyCI);
		dynamicsWorld->addRigidBody(groundRigidBody);
		
	}
void ocvFaceDetectApp::setup()
{
	mFaceCascade.load( getAssetPath( "haarcascade_frontalface_alt.xml" ).string() );
	mEyeCascade.load( getAssetPath( "haarcascade_eye.xml" ).string() );	
	
	mCapture = Capture( 640, 480 );
	mCapture.start();
}
示例#22
0
void ocvCaptureApp::update()
{
	if( mCap && mCap.checkNewFrame() ) {
		cv::Mat input( toOcv( mCap.getSurface() ) ), output;

//		cv::threshold( input, output, 128, 255, CV_8U );
		
		cv::Sobel( input, output, CV_8U, 1, 0 );

//		cv::Laplacian( input, output, CV_8U );		

//		cv::circle( output, toOcv( Vec2f(200, 200) ), 300, toOcv( Color( 0, 0.5f, 1 ) ), -1 );

//		cv::line( output, cv::Point( 1, 1 ), cv::Point( 30, 30 ), toOcv( Color( 1, 0.5f, 0 ) ) );
		
		mTexture = gl::Texture( fromOcv( output ) );
	}	 
}
void SmilesApp::update()
{
    mFps = getAverageFps();
    
    if(mCapture && mCapture.checkNewFrame() ){
            mSurface = mCapture.getSurface();
    }
    if (mSurface){
        mGreyChannel = Channel( mSurface.clone(mSmileRect.getInteriorArea()) );
        int totalDetectionPixels = mGreyChannel.getWidth()*mGreyChannel.getHeight();
        unsigned char * detectionPixels = mGreyChannel.getData();
        for (int i = 0; i < totalDetectionPixels; i++){
            mRImage_pixels->array[i] = detectionPixels[i];
        }
        detectSmiles(*mRImage_pixels);
        //console() << smileThreshold  << endl;
    }
}
示例#24
0
void CaptureApp::setup()
{	
	try {
		mCapture = Capture( 640, 480 );
		mCapture.start();
	}
	catch( ... ) {
		console() << "Failed to initialize capture" << std::endl;
	}
}
示例#25
0
void peopleDetectApp::setup()
{
    hog.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector());
    
    capture = Capture(640, 480);
    capture.start();
    
    stillSurface = loadImage(loadAsset( "people_in_park.jpg" ) );
	stillTex = gl::Texture(stillSurface);
    
}
void TellThatToMyCamera_v1_0App::setup()
{
    mExpressionsCascade.load(getAssetPath("haarcascade_frontalface_alt.xml").string());
    mPath= getAssetPath("ppdtest.csv").string();
    
	mCapture = Capture( 640, 480 );                 // Camera settings
	mCapture.start();
    
    read_csv(mPath, mDBimgFaces, mDBLabels);        // Read DB of faces for FaceRec algorithm
    mFisherFaceRec->train(mDBimgFaces, mDBLabels);  // Train the Fisher Face Recognizer algorithm
}
void ocvFaceDetectApp::setup()
{
#if defined( CINDER_MAC )
	mFaceCascade.load( getResourcePath( "haarcascade_frontalface_alt.xml" ) );
	mEyeCascade.load( getResourcePath( "haarcascade_eye.xml" ) );	
#else
	mFaceCascade.load( getAppPath() + "../../resources/haarcascade_frontalface_alt.xml" );
	mEyeCascade.load( getAppPath() + "../../resources/haarcascade_eye.xml" );	
#endif
	
	mCapture = Capture( 640, 480 );
	mCapture.start();
}
void PaintingBeingsApp::captureCamera()
{
	if (_launchAlgoGen)
	{
		_launchAlgoGen = false;
		setStop();
	}

	if (_capture && _capture.checkNewFrame())
	{
		_image.setImage( _capture.getSurface());
		_algoGen.setup(_image.getMiniatureSurface());
		_imageBeing.setup(_image.getMiniatureSize());

		resetCamera();

		_launchAlgoGen = true;
		_showImageBeing = false;

		updateInterface(true);

		setPlay();
	}
}
示例#29
0
  int capture(vector<long>& data, long* timestamp)
  {
    long raw_timestamp = 0;
    int n = capture_->capture(data, &raw_timestamp);
    if (n < 0) {
      error_message_ = scip_.what();
      return n;
    }

    recent_timestamp_ = raw_timestamp - timestamp_offset_;
    if (timestamp) {
      *timestamp = recent_timestamp_;
    }
    return n;
  }
示例#30
0
void PhotoBoothApp::setup()
{
    // Start cameara.
    try{
        
        vector<Capture::DeviceRef> devices( Capture::getDevices() );
        
        // Look for a camera called "Front Camera"
        for( vector<Capture::DeviceRef>::const_iterator deviceIt = devices.begin(); deviceIt != devices.end(); ++deviceIt ) {
            Capture::DeviceRef device = *deviceIt;
            
            if(device->getName() == "Front Camera"){
                mCapture = Capture( CAM_WIDTH, CAM_HEIGHT, device );
                mCapture.start();
            }
        }
	}
	catch( ... ) {
		console() << "Failed to initialize camera" << std::endl;
	}
    
    // Load textures
    mConfirmMessage         = loadImage( loadResource("assets/confirm_message.png"));
    mSaveTexture            = loadImage( loadResource("assets/save.png"));
    mDiscardTexture         = loadImage( loadResource("assets/discard.png"));
    mCameraButtonTexture    = loadImage( loadResource("assets/camera.png"));
    mIntroTexture           = loadImage( loadResource("assets/attract.png" ));
    mLightBg                = loadImage( loadResource("assets/bkg_light.png" ));
    mDarkBg                 = loadImage( loadResource("assets/bkg_dark.png" ));
    mNumberBg               = loadImage( loadResource("assets/countdown_bkg.png") );
    mNumberProgress         = loadImage( loadResource("assets/countdown_progress.png") );
    
    mNumberTextures.push_back( loadImage( loadResource("assets/countdown_5.png")));
    mNumberTextures.push_back( loadImage( loadResource("assets/countdown_4.png")));
    mNumberTextures.push_back( loadImage( loadResource("assets/countdown_3.png")));
    mNumberTextures.push_back( loadImage( loadResource("assets/countdown_2.png")));
    mNumberTextures.push_back( loadImage( loadResource("assets/countdown_1.png")));

    width               = getWindowWidth() / DISPLAY_SCALE;
	height              = getWindowHeight() / DISPLAY_SCALE;
    
    mCurrentState       = STATE_PREVIEW;
    
    mDiscardPos         = Vec2f(100, height + 100 );
    mSavePos            = Vec2f(width - 700, height + 100);
    mCameraButtonPos    = Vec2f(width/2 - mCameraButtonTexture.getWidth() / 2, 650 - mCameraButtonTexture.getHeight() / 2);
}