Exemple #1
0
void AppBase::privateUpdate__()
{
	mFrameCount++;

	// service asio::io_service
	mIo->poll();

	if( getNumWindows() > 0 ) {
		WindowRef mainWin = getWindowIndex( 0 );
		if( mainWin )
			mainWin->getRenderer()->makeCurrentContext();
	}

	mSignalUpdate.emit();

	update();

	mTimeline->stepTo( static_cast<float>( getElapsedSeconds() ) );

	double now = mTimer.getSeconds();
	if( now > mFpsLastSampleTime + mFpsSampleInterval ) {
		//calculate average Fps over sample interval
		uint32_t framesPassed = mFrameCount - mFpsLastSampleFrame;
		mAverageFps = (float)(framesPassed / (now - mFpsLastSampleTime));

		mFpsLastSampleTime = now;
		mFpsLastSampleFrame = mFrameCount;
	}
}
bool SensorFile::init( const string &fileName )
{
	if( mInited )
		return false;

	fs::path fullPath( getAssetPath( string( "capture/" ) + fileName ));
	if( ! fs::exists( fullPath ))
	{
		console() << "Unable to load captured file: " << fileName << endl;
		return false;
	}

	mDoc = XmlTree( loadFile( fullPath ));

	if( mDoc.hasChild( "PulseSensor" ))
	{
		XmlTree &xmlPulseSensor = mDoc.getChild( "PulseSensor" );
		mName = xmlPulseSensor.getAttributeValue<string>( "Name"    , "" );

		if( xmlPulseSensor.hasChild( "Data" ))
		{
			mListXmlData   = &xmlPulseSensor.getChildren();
			mXmlDataActIt  =  mListXmlData->end();
			mInited        =  true;
			mTime          =  getElapsedSeconds();
			return true;
		}
	}

	console() << "Invalid captured file: " << fileName << endl;
	return false;
}
void Connection::update()
{
    mClient->poll();

    if ( mShouldAutoReconnect ) {
        if ( ! mIsConnected && getElapsedSeconds() - mLastTimeTriedConnect > mReconnectInterval ) {
			// Disconnect update signal:
			mUpdateConnection.disconnect();
			// Re-initialize ws client:
			initialize();
			// Re-establish connection:
			mClient->connect( mHost );
			// Reset connection attempt time:
			mLastTimeTriedConnect = getElapsedSeconds();
        }
    }
}
// All logic updates here
// Called every frame
void HelloWorldApp::update() {
    // This is a work hacky work around, to force our application to be above other windows when launched
    static bool hasBecomeFirstResponder = false;
    if( !hasBecomeFirstResponder && getElapsedSeconds() > 2 ) { // After 2 seconds, resume normal behavior
        hasBecomeFirstResponder = true;
        setAlwaysOnTop( false );
    }
}
void StateInit::update()
{
	if (_app._hands[LEFT]->state == Hand::NORMAL || _app._hands[RIGHT]->state == Hand::NORMAL)//entering gesture
	{
		resetTimer();
	}
	if (getElapsedSeconds() > n_countdown)
		_app.changeToState(_app._state_countdown);
}
		// Runs update logic
		void DynamicsWorld::update()
		{

			// Get time since last frame, update
			double elapsedSeconds = getElapsedSeconds();
			double step = elapsedSeconds - mElapsedSeconds;
			mElapsedSeconds = elapsedSeconds;

			// Update Bullet world
			mDynamicsWorld->stepSimulation(1.0f, 10, 1.0f / math<float>::max(1.0f, getFrameRate()));

			// Get object counts
			uint32_t numObjects = mObjects.size();

			// Update bodies
			for (uint32_t i = 0; i < mObjects.size(); i++)
			{
				mObjects[i]->update(step);
				if (mObjects[i]->getLifespan() > 0.0 && mObjects[i]->getLifetime() > mObjects[i]->getLifespan())
					mObjects.erase(mObjects.begin() + i);
			}

			// Object count has changed
			if (numObjects != mObjects.size())
			{
	
				// Iterate through objects
				numObjects = mDynamicsWorld->getNumCollisionObjects();
				for (int32_t i = (int32_t)numObjects - 1; i >= 0 ; i--)
				{

					// Check range
					if (i >= 0)
					{

						// Activate object if it is a rigid or soft body
						btCollisionObject * collisionObject = mDynamicsWorld->getCollisionObjectArray()[i];
						btRigidBody * rigidBody = btRigidBody::upcast(collisionObject);
						if (rigidBody)
						{
							rigidBody->activate(true);
						}
						else
						{
							btSoftBody * softBody = btSoftBody::upcast(collisionObject);
							if (softBody)
								softBody->activate(true);
						}

					}

				}

			}

		}
Exemple #7
0
//--------------------------------------------------------------
// Reads and parses a message from the server.
//--------------------------------------------------------------
void mpeClientTCP::read(string _serverInput) {
    out("Receiving: " + _serverInput);
        
    char c = _serverInput.at(0);
    if (c == 'G' || c == 'B' || c == 'I') {
        if (!allConnected) {
            if (DEBUG) out("all connected!");
            allConnected = true;
        }
        // split into frame message and data message
        vector<string> info = ofSplitString(_serverInput, ":");
        vector<string> frameMessage = ofSplitString(info[0], ",");
        int fc = ofToInt(frameMessage[1]);
        
        if (info.size() > 1) {
            // there is a message here with the frame event
            info.erase(info.begin());
            dataMessage.clear();
            dataMessage = info;
            bMessageAvailable = true;
        } else {
            bMessageAvailable = false;
        }
        
        // assume no arrays are available
        bIntsAvailable  = false;
        bBytesAvailable = false; 
        
        if (fc == frameCount) {
            rendering = true;
            frameCount++;
            
            // calculate new framerate
            float ms = getElapsedSeconds()*1000.0 - lastMs;
            fps = 1000.f / ms;
            lastMs = getElapsedSeconds()*1000.0;
            
            if (!autoMode) {
                parent->frameEvent();
            }
        }
    }
}
void PlayerBullet::update()
{
    //location -= Vec2f((sin(getElapsedSeconds()*40)*7),SPEED*(getElapsedSeconds() - t)*60);
    location -= Vec2f(0,SPEED);
    t = getElapsedSeconds();
    if(location.y < 0)
    {
        isAlive = false;
    }
}
Exemple #9
0
Cube::Cube(Vec3f loc, Vec3f colour, string type){
    cubeMaterial.setSpecular( ColorA (colour.x, colour.y, colour.z, .3) );
	cubeMaterial.setDiffuse( ColorA (colour.x, colour.y, colour.z, .3));
	cubeMaterial.setAmbient(ColorA (colour.x, colour.y, .05f, .01f)) ;
	cubeMaterial.setShininess( 600.0f );
    cubeMaterial.setEmission(ColorA(1, 1, 1, 1 ));
    
    gl::lineWidth(15);
    
    tempCube = cinder::gl::DisplayList (GL_COMPILE);
    tempCube.newList();
    if (type == "stroked")
    gl::drawStrokedCube(Vec3f(0,0,0), Vec3f (80, 80, 80));
    if (type == "solid")
    gl::drawCube(Vec3f(0,0,0), Vec3f (80, 80, 80));
    tempCube.endList();
    tempCube.setMaterial( cubeMaterial );
    
    location = loc;
    rotator = Vec3f(sin(getElapsedSeconds()), cos (getElapsedSeconds()), tan(getElapsedFrames()));
}
Exemple #10
0
void Device::setTilt( int32_t degrees )
{
	double elapsedSeconds = getElapsedSeconds();
	if ( mCapture && mSensor != 0 && elapsedSeconds - mTiltRequestTime > kTiltRequestInterval ) {
		long hr = mSensor->NuiCameraElevationSetAngle( (long)math<int32_t>::clamp( degrees, -MAXIMUM_TILT_ANGLE, MAXIMUM_TILT_ANGLE ) );
		if ( FAILED( hr ) ) {
			console() << "Unable to change device angle: " << endl;
			error( hr );
		}
		mTiltRequestTime = elapsedSeconds;
	}
}
int
PitchBendSequenceDialog::numVibratoCycles(void)
{
    const int vibratoFrequency  = m_vibratoFrequency->value();
    const float totalCyclesExact =
        float(vibratoFrequency) * getElapsedSeconds();
    // We round so that the interval gets an exact number of cycles.
    const int totalCycles = int(totalCyclesExact + 0.5);

    // Since the user wanted vibrato, provide at least one cycle.
    if (totalCycles > 1) { return totalCycles; }
    else { return 1; }
}
void SystemCinderApp::update()
{
	/*update head pos and interaction goes here*/
	//elapsed time
	double timePassed = getElapsedSeconds() - mCurrentSeconds;
	mCurrentSeconds += timePassed;

	// rotate the teapot object
	mObjectRotation *= rotate(0.04f, normalize(vec3(0.1f, 1, 0.1f)));

	renderSceneToFbo();

}
	void Kinect::setTilt( int32_t degrees )
	{

		// Tilt requests should be spaced apart to prevent wear on the motor
		double elapsedSeconds = getElapsedSeconds();
		if ( mCapture && mSensor != 0 && elapsedSeconds - mTiltRequestTime > kTiltRequestInterval ) {
			long hr = mSensor->NuiCameraElevationSetAngle( (long)math<int32_t>::clamp( degrees, -MAXIMUM_TILT_ANGLE, MAXIMUM_TILT_ANGLE ) );
			if ( FAILED( hr ) ) {
				trace( "Unable to change device angle: " );
				error( hr );
			}
			mTiltRequestTime = elapsedSeconds;
		}

	}
Exemple #14
0
MidiApp::MidiApp()
: _detail(new Detail())
{
#define TEST 0
    switch (TEST) {
        case 0: _detail->testMidi = new TestSoftSynth("resources/rachmaninov3.mid"); break;
        case 1: _detail->testMidi = new TestSoftSynth("resources/209-Tchaikovsky - Russian Dance (Nutcracker)"); break;
        case 2: _detail->testMidi = new TestSoftSynth(MMLtune, true); break;
        case 3: _detail->testMidi = new TestInOut(); break;
        case 4: _detail->testMidi = new TestFrequencyCalc(); break;
        case 5: _detail->testMidi = new TestSoftSynth("resources/106-Grieg - In the Hall of the Mountain King (Peer Gynt)"); break;
    }
    
    _detail->startTime = getElapsedSeconds();
    _detail->listPorts();
}
void PulseSensor::processMessage( const char messageId, const std::string message )
{
	MessageType messageType  = convertCharToMessageType( messageId );
	int         messageValue = atoi( message.c_str());

	switch( messageType )
	{
	case MT_BeatPerMinute : mBeatPerMinute = messageValue; break;
	case MT_SensorData    : mSensorData    = messageValue; break;
	case MT_BeatPauseTime : mBeatPauseTime = messageValue; break;
	default               : console() << "unknown message: " << message << endl; return;
	}

	mListener.callCallback( (int)messageType, messageValue );

	if( mRecorder.isRecording())
		mRecorder.dataRecording( getElapsedSeconds(), messageId, message );
}
Exemple #16
0
void SplatTestApp::update() {
  if (spaceNav) {
    spaceNav->update();
  }

  {
    const auto &ori = cameraBody.orientation;
    cameraBody.impulse = glm::rotate(ori, cameraTranslation * vec3(1, 1, -1)) * 0.000005f;
    cameraBody.angularImpulse = quat(cameraRotation * vec3(1, 1, -1) * 0.00000333f);
  }

  cameraBody.step();
  cameraBody.applyTransform(camera);

  particleSys->update(getElapsedSeconds(), getElapsedFrames(), cameraBody.position,
                      cameraBody.position - cameraBody.positionPrev, camera.getViewDirection());

  updateGui();
}
	void Kinect::run()
	{
		while ( mCapture ) {
			if ( mSensor != 0 ) {

				// Get elapsed time to calculate frame rate
				double time = getElapsedSeconds();

				//////////////////////////////////////////////////////////////////////////////////////////////

				if ( mDeviceOptions.isDepthEnabled() && mDepthStreamHandle != 0 && !mNewDepthSurface ) {
					
					_NUI_IMAGE_FRAME imageFrame;
					long hr = mSensor->NuiImageStreamGetNextFrame( mDepthStreamHandle, WAIT_TIME, &imageFrame );
					if ( FAILED( hr ) ) {
						error( hr );
					} else {

						INuiFrameTexture * texture = imageFrame.pFrameTexture;
						_NUI_LOCKED_RECT lockedRect;
						hr = texture->LockRect( 0, &lockedRect, 0, 0 );
						if ( FAILED( hr ) ) {
							error( hr );
						}
						if ( lockedRect.Pitch == 0 ) {
							trace( "Invalid buffer length received" );
						} else {
							pixelToDepthSurface( (uint16_t*)lockedRect.pBits );
						}

						hr = mSensor->NuiImageStreamReleaseFrame( mDepthStreamHandle, &imageFrame );
						if ( FAILED( hr ) ) {
							error( hr ); 
						}
						
						mFrameRateDepth = (float)( 1.0 / ( time - mReadTimeDepth ) );
						mReadTimeDepth = time;

						mUserCount = 0;
						for ( uint32_t i = 0; i < NUI_SKELETON_COUNT; i++ ) {
							if ( mActiveUsers[ i ] ) {
								mUserCount++;
							}
						}

						mNewDepthSurface = true;
					}

				}

				//////////////////////////////////////////////////////////////////////////////////////////////

				if ( mDeviceOptions.isSkeletonTrackingEnabled() && mIsSkeletonDevice && !mNewSkeletons ) {

					_NUI_SKELETON_FRAME skeletonFrame;
					long hr = mSensor->NuiSkeletonGetNextFrame( WAIT_TIME, &skeletonFrame );
					if ( FAILED( hr ) ) {
						error( hr );
					} else {

						bool foundSkeleton = false;
						for ( int32_t i = 0; i < NUI_SKELETON_COUNT; i++ ) {

							mSkeletons.at( i ).clear();

							NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[ i ].eTrackingState;
							if ( trackingState == NUI_SKELETON_TRACKED || trackingState == NUI_SKELETON_POSITION_ONLY ) {

								if ( !foundSkeleton ) {
									_NUI_TRANSFORM_SMOOTH_PARAMETERS transform = kTransformParams[ mTransform ];
									hr = mSensor->NuiTransformSmooth( &skeletonFrame, &transform );
									if ( FAILED( hr ) ) {
										error( hr );
									}
									foundSkeleton = true;
								}

								// Flip X when flipping the image.
								if ( mFlipped ) {
									( skeletonFrame.SkeletonData + i )->Position.x *= -1.0f;
									for ( int32_t j = 0; j < (int32_t)NUI_SKELETON_POSITION_COUNT; j++ ) {
										( skeletonFrame.SkeletonData + i )->SkeletonPositions[ j ].x *= -1.0f;
									}
								}

								_NUI_SKELETON_BONE_ORIENTATION bones[ NUI_SKELETON_POSITION_COUNT ];
								hr = NuiSkeletonCalculateBoneOrientations( skeletonFrame.SkeletonData + i, bones );
								if ( FAILED( hr ) ) {
									error( hr );
								}

								for ( int32_t j = 0; j < (int32_t)NUI_SKELETON_POSITION_COUNT; j++ ) {
									Bone bone( *( ( skeletonFrame.SkeletonData + i )->SkeletonPositions + j ), *( bones + j ) );
									( mSkeletons.begin() + i )->insert( std::make_pair<JointName, Bone>( (JointName)j, bone ) );
								}

							}

						}

						mFrameRateSkeletons = (float)( 1.0 / ( time - mReadTimeSkeletons ) );
						mReadTimeSkeletons = time;

						mNewSkeletons = true;
					}

				}

				//////////////////////////////////////////////////////////////////////////////////////////////

				if ( mDeviceOptions.isVideoEnabled() && mVideoStreamHandle != 0 && !mNewVideoSurface ) {

					_NUI_IMAGE_FRAME imageFrame;
					long hr = mSensor->NuiImageStreamGetNextFrame( mVideoStreamHandle, WAIT_TIME, &imageFrame );
					if ( FAILED( hr ) ) {
						error( hr );
					} else {

						INuiFrameTexture * texture = imageFrame.pFrameTexture;
						_NUI_LOCKED_RECT lockedRect;
						hr = texture->LockRect( 0, &lockedRect, 0, 0 );
						if ( FAILED( hr ) ) {
							error( hr );
						}
						if ( lockedRect.Pitch != 0 ) {
							pixelToVideoSurface( (uint8_t *)lockedRect.pBits );
						} else {
							trace( "Invalid buffer length received." );
						}

						hr = mSensor->NuiImageStreamReleaseFrame( mVideoStreamHandle, &imageFrame );
						if ( FAILED( hr ) ) {
							error( hr );
						}

						mFrameRateVideo = (float)( 1.0 / ( time - mReadTimeVideo ) );
						mReadTimeVideo = time;

						mNewVideoSurface = true;
					}

				}

			}

			Sleep( 8 );

		}

		// Return to join thread
		return;
	}
RecipeView::RecipeView(UserAreaModel _area, RecipeModel _recipe){
   
    lastTouched = getElapsedSeconds();
    
    recipeModel = _recipe;
    areaModel = _area;
    timeOut = areaModel.timeout;
    // menu_img.load(_model.getMenuImage());
    debugState = 2;
    
    curStep = 0;
    stepState = 0;
    prevStepState = -1;
    prevStep = -1;
    
    pos = Vec2f(_area.x,_area.y);
    rotation = _area.r;
    
    align=TOP_LEFT;
    
    if(_area.name.compare("bottom-left")==0) align=BOTTOM_LEFT;
    if(_area.name.compare("bottom-right")==0) align=BOTTOM_RIGHT;
    if(_area.name.compare("top-left")==0){
     align=BOTTOM_RIGHT;
        console() << "aligning top left";
    }
    if(_area.name.compare("top-right")==0){
        align=BOTTOM_LEFT;
    console() << "aligning top right";
    }
    console() << "this recipemodel has this many steps: " << recipeModel.getNumSteps() << "." << endl;
    for(int i=0;i<recipeModel.getNumSteps();i++){
        console() << "wtf: " << recipeModel.getCookStep(i).img.at(0) << endl;
        gallerytools::Image anImage;
        anImage.load(recipeModel.getCookStep(i).img.at(recipeModel.curLanguage));
        anImage.hide();
        images.push_back(anImage);
        gallerytools::Video aVideo;
        console() << "loading video::: " << aVideo.load(recipeModel.getCookStep(i).video.at(recipeModel.curLanguage)) << endl;
        aVideo.hide();
        videos.push_back(aVideo);
    }
    back_btn.load(areaModel.back_btn.path);
    back_btn.moveTo(areaModel.back_btn.x,areaModel.back_btn.y,false);
    fwd_btn.load(areaModel.fwd_btn.path);
    fwd_btn.moveTo(areaModel.fwd_btn.x,areaModel.fwd_btn.y,false);
    select_btn.load(areaModel.select_btn.path);
    select_btn.moveTo(areaModel.select_btn.x,areaModel.select_btn.y,false);
    
    string normalFont( "Arial" );
    
	TextLayout layout;
	layout.clear( ColorA( 0.5f, 0.5f, 0.5f, 0.5f ) );
	layout.setFont( Font( normalFont, 24 ) );
	layout.setColor( Color( 1, 1, 1 ) );
	layout.addLine( "testing here");
	Surface8u rendered = layout.render( true, false );
	text_texture = gl::Texture( rendered );
    select_btn.setHalfHidden();
    back_btn.setHalfHidden();
    fwd_btn.setHalfHidden();
    
}
void RecipeView::update(){
    
    
    //console() << ":::TIMED OUT:::" << endl;
    if((curStep==0 && stepState==0) || stepState==1){
        // it's already at the start, or if it's on a video...
        delayTimeOut();
    } else {
        double curTime = getElapsedSeconds();
        if((curTime-lastTouched)>timeOut){
            console() << "going to start: " << recipeModel.name << endl;
            goStart();
        }
    }
    
    
      for(int i=0;i<videos.size();i++){
        if(i!=curStep){
                      // videos.at(i).stop();
        }
    }
    if(stepState==1){
        if(videos.size()>curStep){
            videos.at(curStep).update();
            if(videos.at(curStep).isDone()) forwardRelease();
        }
    }
    
    char buffer [40];
    sprintf (buffer, "curStep: %i \n stepState: %i", curStep, stepState);
    TextLayout layout;
	layout.clear( ColorA( 0.5f, 0.5f, 0.5f, 0.5f ) );
	layout.setFont( Font( "Arial", 18 ) );
	layout.setColor( Color( 1, 1, 1 ) );
    layout.addLine(recipeModel.name.c_str());
	layout.addLine( buffer);
	Surface8u rendered = layout.render( true, false );
    text_texture = gl::Texture( rendered );
    
    
    // this needs work here....
    // this is specifically to handle a goStart() situation...
    if(prevStepState==0 && stepState == 0 && prevStep!=curStep){
        console() << "THIS SHOULD BE FIRING on TIMEOUT >>> " << recipeModel.name << endl;
        if(images.size()>prevStep) images.at(prevStep).hide();
        if(images.size()>curStep) images.at(curStep).show();
        prevStep = curStep;
        
    } else {
        
    if(prevStep!=curStep){
        // moved from video to new start image
        // load both the image and the video for the new step
       if(prevStep>-1) videos.at(prevStep).stop();
        prevStep = curStep;
    }
    if(prevStepState!=stepState){
        // moved from start image to video
        if(stepState==1){
            if(videos.size()>curStep)    videos.at(curStep).show();
            if(images.size()>curStep)  images.at(curStep).hide();
            if(videos.size()>curStep)   videos.at(curStep).play();
        } else {
            if(images.size()>curStep) images.at(curStep).show();
            if(videos.size()>curStep)  videos.at(curStep).hide();
            if(videos.size()>curStep)  videos.at(curStep).stop();
        }
        prevStepState = stepState;
    }
        }
    
}
void RecipeView::delayTimeOut(){
    lastTouched = getElapsedSeconds();
}
Exemple #21
0
bool MidiApp::running()
{
    return _detail->testMidi->running(getElapsedSeconds() - _detail->startTime);
}
	void Kinect::run()
	{
		while ( mCapture ) {
			if ( mSensor != 0 ) {

				// Get elapsed time to calculate frame rate
				double time = getElapsedSeconds();

				//////////////////////////////////////////////////////////////////////////////////////////////

				if ( mEnabledDepth && mDepthStreamHandle != 0 && !mNewDepthFrame ) {

					// Acquire depth image
					_NUI_IMAGE_FRAME imageFrame;
					HRESULT hr = mSensor->NuiImageStreamGetNextFrame( mDepthStreamHandle, WAIT_TIME, &imageFrame );
					if ( FAILED( hr ) ) {
						error( hr );
					} else {

						// Read texture to surface
						INuiFrameTexture * texture = imageFrame.pFrameTexture;
						_NUI_LOCKED_RECT lockedRect;
						hr = texture->LockRect( 0, &lockedRect, 0, 0 );
						if ( FAILED( hr ) ) {
							error( hr );
						}
						if ( lockedRect.Pitch == 0 ) {
							trace( "Invalid buffer length received" );
						} else {
							pixelToDepthSurface( mDepthSurface, (uint16_t*)lockedRect.pBits );
						}

						// Clean up
						hr = mSensor->NuiImageStreamReleaseFrame( mDepthStreamHandle, &imageFrame );
						if ( FAILED( hr ) ) {
							error( hr ); 
						}
						
						// Update frame rate
						mFrameRateDepth = (float)( 1.0 / ( time - mReadTimeDepth ) );
						mReadTimeDepth = time;

						// Set flag
						mNewDepthFrame = true;

						// Update user count
						mUserCount = 0;
						for ( uint32_t i = 0; i < NUI_SKELETON_COUNT; i++ ) {
							if ( mActiveUsers[ i ] ) {
								mUserCount++;
							}
						}

					}

				}

				//////////////////////////////////////////////////////////////////////////////////////////////

				if ( mEnabledSkeletons && mIsSkeletonDevice && !mNewSkeletons ) {

					// Acquire skeleton
					_NUI_SKELETON_FRAME skeletonFrame;
					HRESULT hr = mSensor->NuiSkeletonGetNextFrame( WAIT_TIME, &skeletonFrame );
					if ( FAILED( hr ) ) {
						error( hr );
					} else {

						// Iterate through skeletons
						bool foundSkeleton = false;
						for ( int32_t i = 0; i < NUI_SKELETON_COUNT; i++ ) {

							// Clear skeleton data
							mSkeletons[ i ].clear();

							// Mark skeleton found
							if ( ( skeletonFrame.SkeletonData + i )->eTrackingState == NUI_SKELETON_TRACKED ) {

								// Smooth out the skeleton data when found
								if ( !foundSkeleton ) {
									hr = mSensor->NuiTransformSmooth( &skeletonFrame, 0 );
									if ( FAILED( hr ) ) {
										error( hr );
									}
									foundSkeleton = true;
								}

								// Get skeleton data
								_NUI_SKELETON_DATA skeletonData = *( skeletonFrame.SkeletonData + i );

								// Set joint data
								for ( int32_t j = 0; j < (int32_t)NUI_SKELETON_POSITION_COUNT; j++ ) {
									Vector4 point = *( skeletonData.SkeletonPositions + j );
									( mSkeletons.begin() + i )->insert( std::make_pair<JointName, Vec3f>( (JointName)j, Vec3f( point.x, point.y, point.z ) ) );
								}

							}

						}

						// Update frame rate
						mFrameRateSkeletons = (float)( 1.0 / ( time - mReadTimeSkeletons ) );
						mReadTimeSkeletons = time;

						// Set flag
						mNewSkeletons = true;

					}

				}

				//////////////////////////////////////////////////////////////////////////////////////////////

				if ( mEnabledVideo && mVideoStreamHandle != 0 && !mNewVideoFrame ) {

					// Acquire video image
					_NUI_IMAGE_FRAME imageFrame;
					HRESULT hr = mSensor->NuiImageStreamGetNextFrame( mVideoStreamHandle, WAIT_TIME, &imageFrame );
					if ( FAILED( hr ) ) {
						error( hr );
					} else {

						// Read texture
						INuiFrameTexture * texture = imageFrame.pFrameTexture;
						_NUI_LOCKED_RECT lockedRect;
						hr = texture->LockRect( 0, &lockedRect, 0, 0 );
						if ( FAILED( hr ) ) {
							error( hr );
						}
						if ( lockedRect.Pitch != 0 ) {
							pixelToVideoSurface( mVideoSurface, (uint8_t *)lockedRect.pBits );
						} else {
							trace( "Invalid buffer length received." );
						}

						// Clean up
						hr = mSensor->NuiImageStreamReleaseFrame( mVideoStreamHandle, &imageFrame );
						if ( FAILED( hr ) ) {
							error( hr );
						}

						// Update frame rate
						mFrameRateVideo = (float)( 1.0 / ( time - mReadTimeVideo ) );
						mReadTimeVideo = time;

						// Set flag
						mNewVideoFrame = true;

					}

				}

			}

			// Pause thread
			Sleep( 17 );

		}

		// Return to join thread
		return;
	}
double ClockInterface::getElapsedSeconds(const PreciseClock& start) {
  return getElapsedSeconds(start, getMonotonicClock());
}
Exemple #24
0
void AppBase::privateSetup__()
{
	mTimeline->stepTo( static_cast<float>( getElapsedSeconds() ) );

	setup();
}
void StateLose::update()
{
	if (getElapsedSeconds() > n_countdown)
		_app.changeToState(_app._state_idle);
}
void ProjectManagerApp::draw()
{
	gl::clear(bgColor);
	gui->draw(getElapsedSeconds());
}
void DataManager::updateDataCrawler( DataCrawler* dataCrawler ){
        
    if(dataCrawler->length == 0) return;
    
    double time = getElapsedSeconds();
    double diff = time - dataCrawler->lastUpdate;

    float pos = dataCrawler->roiDataSet.startPosition + (diff * (float)dataCrawler->speed*mMainSpeed);
//    console() << "DataManager::updateDataCrawler!  pos "  h<< pos << " diff:" << diff << std::endl;
    dataCrawler->pos = pos;
    if( dataCrawler->roiDataSet.endPosition <= pos ){
        dataCrawler->roiDataSet = getNextRoi(dataCrawler);
        dataCrawler->roiDataSetID = dataCrawler->roiDataSet.roiId;
        dataCrawler->lastUpdate = getElapsedSeconds();
        dataCrawler->pos = dataCrawler->roiDataSet.startPosition;
        dataCrawler->speed = 2;//pow(Rand::randInt(1,4),2);
        sOnRoiChange();
        console() << "DataManager::updateDataCrawler::resetCrawler " << std::endl;
    }
//    int len = dataCrawler->length;
    int len = dataCrawler->roiDataSet.basePairsCount;
        
    char* datas = (char*)mDataBuffer.getData();
    int size = mDataBuffer.getDataSize();
    
    char d;
    string dataString = "";
    for(int i=dataCrawler->roiDataSet.startPosition;i<dataCrawler->roiDataSet.startPosition+len;i++){
        d = *(datas+min(i,size-1));        
        dataString += d;
    }
//    for(int i=start;i<end;i++){
//        
//        d = *(datas+min(i,size-1));
//        
//        dataString += d;
////        dataCharPos = (int)(cnt / 4);
////        dataBitPos = ((cnt%4)) * 2;
//        
//        // seems to be the fastest way to do it like this according to:
//        // http://stackoverflow.com/questions/6860525/c-what-is-faster-lookup-in-hashmap-or-switch-statement
//        //
//        switch(d){
//            case 'A':
////                rawData[dataCharPos] |= 0 << dataBitPos;
////                dataString += "A";
//                break;
//            case 'C':
////                rawData[dataCharPos] |= 1 << dataBitPos;
////                dataString += "C";
//                break;
//            case 'G':
////                rawData[dataCharPos] |= 2 << dataBitPos;
////                dataString += "G";
//                break;
//            case 'T':
////                dataString += "T";
////                rawData[dataCharPos] |= 3 << dataBitPos;
//                break;
//                
//        }
//        cnt++;
//    }
    
    dataCrawler->dataSet.dataBitsString = dataString;
    dataCrawler->dataSet.startPosition = pos;
    dataCrawler->dataSet.basePairsCount = len;
    dataCrawler->dataSet.chromosomeData = mCurrentDataSet;
    dataCrawler->dataSet.roi = dataCrawler->roiDataSet;
    dataCrawler->dataSet.percent = (dataCrawler->pos-dataCrawler->roiDataSet.startPosition) / (float)dataCrawler->roiDataSet.basePairsCount;

}
Exemple #28
0
void MidiApp::update()
{
    _detail->testMidi->update(getElapsedSeconds() - _detail->startTime);
}
Exemple #29
0
void AppImplMswBasic::run()
{
	mFrameRate = mApp->getSettings().getFrameRate();
	mFrameRateEnabled = mApp->getSettings().isFrameRateEnabled();

	auto formats = mApp->getSettings().getWindowFormats();
	if( formats.empty() )
		formats.push_back( mApp->getSettings().getDefaultWindowFormat() );
	for( auto format = formats.begin(); format != formats.end(); ++format ) {
		if( ! format->isTitleSpecified() )
			format->setTitle( mApp->getSettings().getTitle() );
		createWindow( *format );
	}

	mApp->privateSetup__();
	mSetupHasBeenCalled = true;
	for( auto windowIt = mWindows.begin(); windowIt != mWindows.end(); ++windowIt )
		(*windowIt)->resize();

	// initialize our next frame time
	mNextFrameTime = getElapsedSeconds();

	// inner loop
	while( ! mShouldQuit ) {
		// update and draw
		mApp->privateUpdate__();
		for( auto windowIt = mWindows.begin(); windowIt != mWindows.end(); ++windowIt )
			(*windowIt)->redraw();

		// get current time in seconds
		double currentSeconds = mApp->getElapsedSeconds();

		// calculate time per frame in seconds
		double secondsPerFrame = 1.0 / mFrameRate;

		// determine if application was frozen for a while and adjust next frame time
		double elapsedSeconds = currentSeconds - mNextFrameTime;
		if( elapsedSeconds > 1.0 ) {
			int numSkipFrames = (int)(elapsedSeconds / secondsPerFrame);
			mNextFrameTime += (numSkipFrames * secondsPerFrame);
		}

		// determine when next frame should be drawn
		mNextFrameTime += secondsPerFrame;

		// sleep and process messages until next frame
		if( ( mFrameRateEnabled ) && ( mNextFrameTime > currentSeconds ) )
			sleep(mNextFrameTime - currentSeconds);
		else {
			MSG msg;
			while( ::PeekMessage( &msg, NULL, 0, 0, PM_REMOVE ) ) {
				::TranslateMessage( &msg );
				::DispatchMessage( &msg );
			}
		}
	}

//	killWindow( mFullScreen );
	mApp->emitShutdown();
	delete mApp;
}
Exemple #30
0
void Device::run()
{
	HANDLE events[ 4 ];
    events[ 0 ] = mColorEvent;
    events[ 1 ] = mDepthEvent;
    events[ 2 ] = mSkeletonEvent;

	while ( mCapture ) {
		if ( mSensor != 0 ) {
			double time = getElapsedSeconds();

			WaitForMultipleObjects( sizeof( events ) / sizeof( events[ 0 ]), events, 0, WAIT_TIME );

			const NUI_IMAGE_FRAME* frameColor	= 0;
			const NUI_IMAGE_FRAME* frameDepth	= 0;
			NUI_SKELETON_FRAME frameSkeleton	= { 0 };

			bool readColor		= !mNewColorSurface;
			bool readDepth		= !mNewDepthSurface;
			bool readSkeleton	= !mNewSkeletons;
			/*if ( mDeviceOptions.isFrameSyncEnabled() && mDeviceOptions.isColorEnabled() && mDeviceOptions.isDepthEnabled() ) {
				if ( readColor != readDepth ) {
					readColor	= false;
					readDepth	= false;
				}
				readSkeleton	= readDepth;
			}*/
			readColor		= readColor && mDeviceOptions.isColorEnabled();
			readDepth		= readDepth && mDeviceOptions.isDepthEnabled();
			readSkeleton	= readSkeleton && mDeviceOptions.isSkeletonTrackingEnabled();

			//////////////////////////////////////////////////////////////////////////////////////////////

			if ( readDepth && WAIT_OBJECT_0 == WaitForSingleObject( mDepthEvent, 0 ) ) {
				if ( SUCCEEDED( NuiImageStreamGetNextFrame( mDepthStreamHandle, 0, &frameDepth ) ) && 
					frameDepth != 0 && frameDepth->pFrameTexture != 0 ) {
					mDepthTimeStamp				= frameDepth->liTimeStamp.QuadPart;
					INuiFrameTexture* texture	= frameDepth->pFrameTexture;
					_NUI_LOCKED_RECT lockedRect;
					long hr = texture->LockRect( 0, &lockedRect, 0, 0 );
					if ( FAILED( hr ) ) {
						error( hr );
					}
					if ( lockedRect.Pitch == 0 ) {
						console() << "Invalid buffer length received" << endl;
					} else {
						pixelToDepthSurface( (uint16_t*)lockedRect.pBits );
					}

					hr = NuiImageStreamReleaseFrame( mDepthStreamHandle, frameDepth );
					if ( FAILED( hr ) ) {
						error( hr ); 
					}
					
					mUserCount = 0;
					for ( uint32_t i = 0; i < NUI_SKELETON_COUNT; ++i ) {
						if ( mActiveUsers[ i ] ) {
							++mUserCount;
						}
					}
					mNewDepthSurface = true;
				}
			}

			//////////////////////////////////////////////////////////////////////////////////////////////
				
			if ( readColor && WAIT_OBJECT_0 == WaitForSingleObject( mColorEvent, 0 ) ) {
				if ( SUCCEEDED( NuiImageStreamGetNextFrame( mColorStreamHandle, 0, &frameColor ) ) && 
					frameColor != 0 && frameColor->pFrameTexture != 0 ) {
					INuiFrameTexture* texture = frameColor->pFrameTexture;
					_NUI_LOCKED_RECT lockedRect;
					long hr = texture->LockRect( 0, &lockedRect, 0, 0 );
					if ( FAILED( hr ) ) {
						error( hr );
					}
					if ( lockedRect.Pitch != 0 ) {
						pixelToColorSurface( (uint8_t*)lockedRect.pBits );
						/*if ( mDeviceOptions.isFrameSyncEnabled() ) {
							mColorFrames.push_back( ColorFrame( mColorSurface, frameColor->liTimeStamp.QuadPart ) );
							if ( mColorFrames.size() > 10 ) {
								mColorFrames.erase( mColorFrames.begin() );
							}
						}*/
					} else {
						console() << "Invalid buffer length received." << endl;
					}

					hr = NuiImageStreamReleaseFrame( mColorStreamHandle, frameColor );
					if ( FAILED( hr ) ) {
						error( hr );
					}
					mNewColorSurface = true;
				}
			}

			//////////////////////////////////////////////////////////////////////////////////////////////

			if ( readSkeleton && WAIT_OBJECT_0 == WaitForSingleObject( mSkeletonEvent, 0 ) ) {
				long hr = NuiSkeletonGetNextFrame( 0, &frameSkeleton );
				if ( SUCCEEDED( hr ) ) {
					bool foundSkeleton = false;
					for ( int32_t i = 0; i < NUI_SKELETON_COUNT; ++i ) {

						mSkeletons.at( i ).clear();

						NUI_SKELETON_TRACKING_STATE trackingState = frameSkeleton.SkeletonData[ i ].eTrackingState;
						if ( trackingState == NUI_SKELETON_TRACKED || trackingState == NUI_SKELETON_POSITION_ONLY ) {

							if ( !foundSkeleton ) {
								_NUI_TRANSFORM_SMOOTH_PARAMETERS transform = kTransformParams[ mTransform ];
								hr = mSensor->NuiTransformSmooth( &frameSkeleton, &transform );
								if ( FAILED( hr ) ) {
									error( hr );
								}
								foundSkeleton = true;
							}

							if ( mFlipped ) {
								( frameSkeleton.SkeletonData + i )->Position.x *= -1.0f;
								for ( int32_t j = 0; j < (int32_t)NUI_SKELETON_POSITION_COUNT; ++j ) {
									( frameSkeleton.SkeletonData + i )->SkeletonPositions[ j ].x *= -1.0f;
								}
							}

							_NUI_SKELETON_BONE_ORIENTATION bones[ NUI_SKELETON_POSITION_COUNT ];
							hr = NuiSkeletonCalculateBoneOrientations( frameSkeleton.SkeletonData + i, bones );
							if ( FAILED( hr ) ) {
								error( hr );
							}

							for ( int32_t j = 0; j < (int32_t)NUI_SKELETON_POSITION_COUNT; ++j ) {
								Bone bone( *( ( frameSkeleton.SkeletonData + i )->SkeletonPositions + j ), *( bones + j ) );
								( mSkeletons.begin() + i )->insert( std::pair<JointName, Bone>( (JointName)j, bone ) );
							}

						}

					}
					mNewSkeletons = true;
				}

				mFrameRate	= (float)( 1.0 / ( time - mReadTime ) );
				mReadTime	= time;
			}
		}
	}
	return;
}