RobotWindow::RobotWindow(double x, double y, double z) : WindowControl(), platform(x, y, z), robot(5, platform), textLives(), textGameOver(), textFps(), borderland(8, 200), platformColor() { // registra os parêmtros da criação da classe platWidth = x; platHeight = y; platDeep = z; // cria e configura objetos para desenhar os textos textGameOver.setFontStyle(GLUT_BITMAP_TIMES_ROMAN_24); textGameOver.setText("GAME OVER"); textGameOver.setPosition(-0.25, 0.1, -1); textGameOver.setColor(vermelho); textLives.setText("Vidas: %d", robot.getLives()); textLives.setPosition(-0.95, 0.9, -1); textLives.setColor(verde); textFps.setText("FPS: %d", getFps()); textFps.setPosition(-0.95, -0.95, -1); // agenda um evento para garantir ao menos um // glutPostRedisplay() por segundo newEvent(EVENT_POST_REDISPLAY, 1000); setFPS(120); // executa a configuração do ambiente openGL setTitle("SCARA - OpenGL"); configure(); // configura a posicição inicial da câmera robot.configureLookAt(lookAt); // agenda o evento de animação de iniciação do programa cameraRadius = 400; setCameraPosition(180, 90); newEvent(EVENT_RESET, interval); // inicia desenhando o limite espacial do jogo drawCage = true; // define as cores do robô platformColor.setColor(cinzaEscuro); robot.base.setColor(cinzaMaisEscuro); robot.armBase.setColor(amarelo); robot.arm.setColor(cinzaMaisEscuro); robot.clawBase.setColor(amarelo); robot.claw.hand.setColor(vermelho); // configurações iniciais de controles do jogo mouseX = mouseY = 0; mouseButton = 0; mouseState = GLUT_UP; eventFlying = 0; }
void Fps::renderInfo() { //dogo : oslPrintf_xy(0,25,"FPS : %0.1f %d\n", getFps(), getUpf()); char fps[256]; sprintf(fps, "FPS : %0.1f", getFps()); oslIntraFontSetStyle(gFont, 1.0f,RGBA(192,192,192,255), RGBA(0,0,0,0),INTRAFONT_ALIGN_LEFT); oslDrawString(0,23, fps); }
//------------------------------------------------------------------------------------------------- void QTDialogSample::Update() { // update the fps double fps = getFps(); QString fpsStr = "FPS = "; fpsStr += QString::number(fps, 3, 2); m_ui->fpsLabel->setText(fpsStr); }
VideoFrame VideoBuffer::getVideoFrame(TimeDiff time){ VideoFrame frame; if(size()>0){ int frameback = CLAMP((int)((float)time/1000000.0*(float)getFps()),1,int(size())); int currentPos = CLAMP(size()-frameback,0,size()-1); frame = frames[currentPos]; } return frame; }
int main() { SDeviceContextSettings settings; settings.MultiSamplingCount = 4; settings.MultiSamplingQuality = 32; IDevice* device = createDevice(EDT_DIRECT3D11, 800, 600, EWS_NONE, true, settings); IVideoDriver* driver = device->getVideoDriver(); ISceneManager* smgr = device->getSceneManager(); IMeshManager* meshManager = driver->getMeshManager(); IResourceGroupManager* resourceGroupManager = driver->getResourceGroupManager(); resourceGroupManager->init("Resources.cfg"); resourceGroupManager->loadResourceGroup("General"); XMFLOAT3 vertices[4]; vertices[0] = XMFLOAT3(-10.0f, 0.0f, 10.0f); vertices[1] = XMFLOAT3(10.0f, 0.0f, 10.0f); vertices[2] = XMFLOAT3(-10.0f, 0.0f, -10.0f); vertices[3] = XMFLOAT3(10.0f, 0.0f, -10.0f); ISimpleMesh* mesh = meshManager->createSimpleMesh("pointlist", vertices, NULL, 4, sizeof(XMFLOAT3), 0, false); IMeshNode* meshNode = smgr->addMeshNode(mesh, nullptr, nullptr); meshNode->setMaterialName("test/ts_material"); ICameraNode* camera = smgr->addFpsCameraNode(1, nullptr, XMFLOAT3(0, 1.0f, -4.0f), XMFLOAT3(0, 1.0f, 0.0f)); char caption[200]; ITimer* timer = device->createTimer(); timer->reset(); while (device->run()) { const float clearColor[] = { 0.0f, 0.0f, 0.0f, 1.0f }; driver->beginScene(true, true, clearColor); float dt = timer->tick(); updateCamera(camera, dt); smgr->drawAll(); driver->endScene(); sprintf(caption, "FPS:%f", getFps(dt)); device->setWindowCaption(caption); } device->drop(); return 0; }
void Game::run() { initialize(); while (m_world->isExists()) { clock_t clockNow = clock(); clock_t deltaClock = clockNow - m_clockLastFrame; float deltaTime = float(deltaClock) / CLOCKS_PER_SEC; m_clockLastFrame = clockNow; m_framesCounter++; m_framesTimeCounter += deltaTime; if (m_framesTimeCounter >= 1.0) { m_framesTimeCounter -= 1.0; m_fps = m_framesCounter; m_framesCounter = 0; } m_renderSystem->clear(); if (m_currentScreen != NULL) { ScreenType next = m_currentScreen->openNext(); if (ScreenType_None == next) { m_currentScreen->render(m_renderSystem, m_world); m_currentScreen->update(deltaTime, m_world); } else { setScreen(next); } } else { char buffer[SCREEN_WIDTH]; int length = sprintf_s(buffer, "SCREEN NOT SET!"); int x = SCREEN_WIDTH / 2 - length / 2; m_renderSystem->drawText(x, SCREEN_HEIGHT / 2, buffer, ConsoleColor_Grey, ConsoleColor_Black); } m_world->postUpdate(deltaTime); char buffer[SCREEN_WIDTH]; int length = sprintf_s(buffer, "FPS: %d", getFps()); m_renderSystem->drawText(SCREEN_WIDTH - length, 0, buffer, ConsoleColor_Grey, ConsoleColor_Black); m_renderSystem->flush(); } }
void VideoInput::createDecoder() { deleteDecoder(); switchPending_ = false; if (decOpts_.input.empty()) { foundDecOpts(decOpts_); return; } auto decoder = std::unique_ptr<MediaDecoder>(new MediaDecoder()); if (emulateRate_) decoder->emulateRate(); decoder->setInterruptCallback( [](void* data) -> int { return not static_cast<VideoInput*>(data)->isCapturing(); }, this); if (decoder->openInput(decOpts_) < 0) { RING_ERR("Could not open input \"%s\"", decOpts_.input.c_str()); foundDecOpts(decOpts_); return; } /* Data available, finish the decoding */ if (decoder->setupFromVideoData() < 0) { RING_ERR("decoder IO startup failed"); foundDecOpts(decOpts_); return; } decOpts_.width = decoder->getWidth(); decOpts_.height = decoder->getHeight(); decOpts_.framerate = decoder->getFps(); RING_DBG("created decoder with video params : size=%dX%d, fps=%lf", decOpts_.width, decOpts_.height, decOpts_.framerate.real()); decoder_ = std::move(decoder); foundDecOpts(decOpts_); /* Signal the client about readable sink */ sink_->setFrameSize(decoder_->getWidth(), decoder_->getHeight()); }
// Prepare parameters void Oni2Grabber::initCamera_() { auto video_mode = streams_[0]->getVideoMode(); // (1) Set parameters if(desired_width_ && desired_height_) { video_mode.setResolution(640,480); streams_[0]->setVideoMode(video_mode); } // (2) Get parameters width_ = video_mode.getResolutionX(); height_ = video_mode.getResolutionY(); fps_ = video_mode.getFps(); size_ = width_ * height_; }
int main() { int i; int mouseX, mouseY; waveStruct waves[maxWaves]; int wavesCount = 0; int drawWaves; double buildTime = glfwGetTime(); glfwInit(); glfwOpenWindowHint(GLFW_FSAA_SAMPLES,8); glfwOpenWindow(xSize, ySize, 0, 0, 0, 0, 24, 0, GLFW_WINDOW); glfwSetWindowPos(0, 0); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0, xSize, 0, ySize, 0, 1); glMatrixMode(GL_MODELVIEW); glDisable(GL_DEPTH_TEST); glfwSwapInterval(1); while(1) { glClear(GL_COLOR_BUFFER_BIT); getFps(); if(currentTime - buildTime > 0.48) { buildTime = glfwGetTime(); glfwGetMousePos(&mouseX,&mouseY); buildWave(waves, &wavesCount, mouseX, mouseY); } drawWaves = (wavesCount >= maxWaves) ? maxWaves : wavesCount; for(i = 0; i < drawWaves; ++i) { drawWave(waves[i]); waves[i].radius += 0.032 * (1024/FPS); } glfwSwapBuffers(); } return 0; }
void VideoBuffer::draw(){ float length = (float(size())/float(maxSize))*(ofGetWidth()-(PMDRAWSPACING*2)); float oneLength=float(ofGetWidth()-PMDRAWSPACING*2)/(float)(maxSize); int sizeInOneLengths= oneLength*size(); int drawBufferY = ofGetHeight() -80; int originXAtEnd = ofGetWidth() - PMDRAWSPACING; ofEnableAlphaBlending(); ofSetColor(25,25,25,240); ofRect(PMDRAWSPACING,ofGetHeight()-140,ofGetWidth()-2*PMDRAWSPACING,100); if(!stopped) ofSetColor(255,0,0); else ofSetColor(255); //ofSetColor(255); ofLine(originXAtEnd-int(sizeInOneLengths),drawBufferY,originXAtEnd,drawBufferY); ofSetColor(255); int fps = getFps(); char measureMessage[10]; for(int i=0;i<size()+1;i=i+5){ if((size()-i)%fps==0){ ofSetColor(200); ofRect(originXAtEnd-(oneLength*(i)),drawBufferY,oneLength,-10); ofLine(originXAtEnd-(oneLength*(i)),drawBufferY,originXAtEnd-(oneLength*(i)),drawBufferY-10); ofDrawBitmapString(ofToString(int((float(size())-float(i))/float(fps)))+"s",originXAtEnd-(oneLength*(i)),drawBufferY+15); // ofLine(originXAtEnd - (oneLength*i),710,originXAtEnd - (oneLength*i),700); // sprintf(measureMessage,"%0.2f",(float)(frames[i]->getTimestamp()-initTime)/1000000.0); // ofDrawBitmapString(measureMessage,originXAtEnd - (oneLength*i),695); } else { ofSetColor(64); ofLine(originXAtEnd-(oneLength*(i)),drawBufferY,originXAtEnd-(oneLength*(i)),drawBufferY-10); } } ofDisableAlphaBlending(); ofSetColor(255); }
void VideoBuffer::draw(int _x, int _y, int _w, int _h){ float length = _w; float oneLength=(float)(ofGetWidth()-_x)/(float)(maxSize); int drawBufferY = _y; if(stopped) ofSetColor(255,0,0); else ofSetColor(255); ofDrawLine(_x,drawBufferY,length,drawBufferY); ofSetColor(255); char measureMessage[10]; for(int i=0;i<(int)size()+1;i++){ /* if(i%100==0){ ofLine(oneLength*i,710,oneLength*i,700); sprintf(measureMessage,"%0.2f",(float)(frames[i]->getTimestamp()-initTime)/1000000.0); ofDrawBitmapString(measureMessage,oneLength*i,695); } */ int fps = getFps(); if(fps && i%(int)fps==0) { ofSetLineWidth(2.0); ofSetColor(255,128,0); if(i!=int(size())) ofDrawBitmapString(ofToString(int(size()-i-1)),oneLength*(i)+_x + oneLength/2,PMDRAWELEMENTSY+104); else { ofSetColor(50); ofDrawBitmapString(ofToString(getTotalFrames()),oneLength*(i)+_x - 10,PMDRAWELEMENTSY+55); } //if(i!=int(size())) ofDrawBitmapString(ofToString(getTotalFrames()-i),ofGetWidth()-PMDRAWSPACING-(oneLength*(i+1)) + oneLength/2,drawBufferY-15); } else { ofSetLineWidth(1.0); ofSetColor(155,58,0); } ofDrawLine(oneLength*(i)+_x,drawBufferY,oneLength*(i)+_x,drawBufferY-10); } }
/** * Returns the number of frames per second */ double cmTimer::fps() { return getFps(delta_var); }
float SceneManager::getDeltaTime() const { return 1.0f / getFps(); }
qint64 WorkflowRenderer::length() const { return qRound64( (qreal)getLengthMs() / 1000.0 * (qreal)getFps() ); }
qint64 WorkflowRenderer::getLengthMs() const { return m_mainWorkflow->getLengthFrame() / getFps() * 1000; }
void Clock::draw() const { IOManager::getInstance()->printMessageValueAt("Avg fps: ", getFps(), FPSPos[0], FPSPos[1]); IOManager::getInstance()->printMessageValueAt("Seconds: ", getSeconds(), secondPos[0], secondPos[1]); IOManager::getInstance()->printMessageCenteredAt("Project #1 Solution ", pojPos); IOManager::getInstance()->printMessageAt("Maoting Ren's Project", namePos[0], namePos[1]); }
void Clock::draw() const { IOManager::getInstance(). printMessageValueAt("Seconds: ", getSeconds(), pos[0], pos[1]); IOManager::getInstance(). printMessageValueAt("fps: ", getFps(), pos[0], pos[1]+20); }
// Load an animation clip MStatus skeleton::loadClip(MString clipName,int start,int stop,int rate) { uint fps = getFps(); float frameTime = 1000.0f / fps; MStatus stat; int i,j; std::vector<int> times; if (m_joints.size() < 0) return MS::kFailure; times.clear(); for (int t=start; t<stop; t+=rate) times.push_back(t); times.push_back(stop); // create the animation animation a; a.name = clipName.asChar(); if(m_animations.size() == 0) { a.startTime = 0; a.endTime = times[times.size()-1] - times[0]; } else { a.startTime = m_animations[m_animations.size()-1].endTime + 1; a.endTime = a.startTime + times[times.size()-1] - times[0]; } m_animations.push_back(a); int animIdx = m_animations.size() - 1; for (i=0; i<times.size(); i++) { MAnimControl::setCurrentTime(MTime(times[i],MTime::uiUnit())); for (j=0; j<m_joints.size(); j++) { keyframeTranslation translation; keyframeRotation rotation; keyframeScale scale; joint &jt = m_joints[j]; int time = times[i] - times[0] + a.startTime; loadKeyframe(m_joints[j],time,translation,rotation,scale); translation.time *= frameTime; rotation.time *= frameTime; scale.time *= frameTime; size_t size = jt.keyframesTranslation.size(); if(size > 0) { keyframeTranslation& t = jt.keyframesTranslation[size - 1]; if(!equal(translation.v[0],t.v[0]) || !equal(translation.v[1],t.v[1]) || !equal(translation.v[2],t.v[2])) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(t.time / frameTime); if(time - 1 > lastTime) { keyframeTranslation temp = t; temp.time = (time - 1) * frameTime; jt.keyframesTranslation.push_back(temp); } jt.keyframesTranslation.push_back(translation); } } else { jt.keyframesTranslation.push_back(translation); } MFnIkJoint jn(jt.jointDag); if(jn.name() == "Hips") { // breakable; } size = jt.keyframesRotation.size(); if(size > 0) { keyframeRotation& r = jt.keyframesRotation[size - 1]; if(!equal(rotation.q[0],r.q[0]) || !equal(rotation.q[1],r.q[1]) || !equal(rotation.q[2],r.q[2]) || !equal(rotation.q[3],r.q[3])) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(r.time / frameTime); if(time - 1 > lastTime) { keyframeRotation temp = r; temp.time = (time - 1) * frameTime; jt.keyframesRotation.push_back(temp); } jt.keyframesRotation.push_back(rotation); } } else { jt.keyframesRotation.push_back(rotation); } size = jt.keyframesScale.size(); if(size > 0) { keyframeScale& s = jt.keyframesScale[size - 1]; if(!equal(scale.v[0],s.v[0]) || !equal(scale.v[1],s.v[1]) || !equal(scale.v[2],s.v[2])) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(s.time / frameTime); if(time - 1 > lastTime) { keyframeScale temp = s; temp.time = (time - 1) * frameTime; jt.keyframesScale.push_back(temp); } jt.keyframesScale.push_back(scale); } } else { jt.keyframesScale.push_back(scale); } if(jt.hasRibbonSystem) { keyframeT<bool> keyframeVisible; keyframeT<float> keyframeAbove; keyframeT<float> keyframeBelow; keyframeT<short> keyframeSlot; keyframeT<float3> keyframeColor; keyframeT<float> keyframeAlpha; MFnIkJoint jointFn(jt.jointDag); MPlug plug; plug = jointFn.findPlug("unRibbonVisible"); bool visible; plug.getValue(visible); plug = jointFn.findPlug("unRibbonAbove"); float above; plug.getValue(above); plug = jointFn.findPlug("unRibbonBelow"); float below; plug.getValue(below); plug = jointFn.findPlug("unRibbonTextureSlot"); short slot; plug.getValue(slot); plug = jointFn.findPlug("unRibbonVertexColor"); MObject object; plug.getValue(object); MFnNumericData data(object); float r,g,b; data.getData(r,g,b); plug = jointFn.findPlug("unRibbonVertexAlpha"); float alpha; plug.getValue(alpha); keyframeVisible.time = time * frameTime; keyframeAbove.time = time * frameTime; keyframeBelow.time = time * frameTime; keyframeSlot.time = time * frameTime; keyframeColor.time = time * frameTime; keyframeAlpha.time = time * frameTime; keyframeVisible.data = visible; keyframeAbove.data = above; keyframeBelow.data = below; keyframeSlot.data = slot; keyframeColor.data[0] = r; keyframeColor.data[1] = g; keyframeColor.data[2] = b; keyframeAlpha.data = alpha; addKeyFramesBool(&jt.ribbon.keyframesVisible,&keyframeVisible); addKeyFramesFloat(&jt.ribbon.keyframeAbove,&keyframeAbove); addKeyFramesFloat(&jt.ribbon.keyframeBelow,&keyframeBelow); size = jt.ribbon.keyframeSlot.size(); if(size > 0) { keyframeT<short>& s = jt.ribbon.keyframeSlot[size - 1]; if(s.data == slot) { jt.ribbon.keyframeSlot.push_back(keyframeSlot); } } else { jt.ribbon.keyframeSlot.push_back(keyframeSlot); } size = jt.ribbon.keyframeColor.size(); if(size > 0) { keyframeT<float3>& s = jt.ribbon.keyframeColor[size - 1]; if(!equal(s.data[0],r) || !equal(s.data[1],g) || !equal(s.data[2],b)) { jt.ribbon.keyframeColor.push_back(keyframeColor); } } else { jt.ribbon.keyframeColor.push_back(keyframeColor); } addKeyFramesFloat(&jt.ribbon.keyframeAlpha,&keyframeAlpha); } if(jt.hasParticleSystem) { keyframeT<bool> keyframeVisible; keyframeT<float> keyframeSpeed; keyframeT<float> keyframeVariation; keyframeT<float> keyframeConeAngle; keyframeT<float> keyframeGravity; keyframeT<float> keyframeExplosiveForce; keyframeT<float> keyframeEmissionRate; keyframeT<float> keyframeWidth; keyframeT<float> keyframeLength; keyframeT<float> keyframeHeight; MFnIkJoint jointFn(jt.jointDag); MPlug plug; plug = jointFn.findPlug("unParticleVisible"); bool visible; plug.getValue(visible); plug = jointFn.findPlug("unParticleSpeed"); float speed; plug.getValue(speed); plug = jointFn.findPlug("unParticleVariationPercent"); float variation; plug.getValue(variation); plug = jointFn.findPlug("unParticleConeAngle"); float coneAngle; plug.getValue(coneAngle); plug = jointFn.findPlug("unParticleGravity"); float gravity; plug.getValue(gravity); plug = jointFn.findPlug("unParticleExplosiveForce"); float explosiveForce = 0.0f; if(!plug.isNull()) { plug.getValue(explosiveForce); } plug = jointFn.findPlug("unParticleEmissionRate"); float emissionRate; plug.getValue(emissionRate); plug = jointFn.findPlug("unParticleEmitterWidth"); float width; plug.getValue(width); plug = jointFn.findPlug("unParticleEmitterLength"); float length; plug.getValue(length); plug = jointFn.findPlug("unParticleEmitterHeight"); float height = 0.0f; if(!plug.isNull()) { plug.getValue(height); } keyframeVisible.time = time * frameTime; keyframeSpeed.time = time * frameTime; keyframeVariation.time = time * frameTime; keyframeConeAngle.time = time * frameTime; keyframeGravity.time = time * frameTime; keyframeExplosiveForce.time = time * frameTime; keyframeEmissionRate.time = time * frameTime; keyframeWidth.time = time * frameTime; keyframeLength.time = time * frameTime; keyframeHeight.time = time * frameTime; keyframeVisible.data = visible; keyframeSpeed.data = speed; keyframeVariation.data = variation / 100.0f; keyframeConeAngle.data = coneAngle; keyframeGravity.data = gravity; keyframeExplosiveForce.data = explosiveForce; keyframeEmissionRate.data = emissionRate; keyframeWidth.data = width; keyframeLength.data = length; keyframeHeight.data = height; addKeyFramesBool(&jt.particle.keyframesVisible,&keyframeVisible); addKeyFramesFloat(&jt.particle.keyframesSpeed,&keyframeSpeed); addKeyFramesFloat(&jt.particle.keyframesVariation,&keyframeVariation); addKeyFramesFloat(&jt.particle.keyframesConeAngle,&keyframeConeAngle); addKeyFramesFloat(&jt.particle.keyframesGravity,&keyframeGravity); addKeyFramesFloat(&jt.particle.keyframesExplosiveForce,&keyframeExplosiveForce); addKeyFramesFloat(&jt.particle.keyframesEmissionRate,&keyframeEmissionRate); addKeyFramesFloat(&jt.particle.keyframesWidth,&keyframeWidth); addKeyFramesFloat(&jt.particle.keyframesLength,&keyframeLength); addKeyFramesFloat(&jt.particle.keyframesHeight,&keyframeHeight); } } } return MS::kSuccess; }
void AnimationSystem::update(double deltaTime) { auto entities = getEntities(); for(auto& e : entities) { auto& animation = e.getComponent<AnimationComponent>(); auto& sprite = e.getComponent<SpriteComponent>().sprite; AnimationComponent::State* animationState = nullptr; if(!animation.playingState.empty()) { animationState = &animation.states[animation.playingState]; } if(animation.isPlaying && animationState) { animation.m_frameAccumulator += deltaTime * (animationState->frameRate == 0 ? getFps() : animationState->frameRate); animation.currentFrame.x = (int)animation.m_frameAccumulator; if(animation.currentFrame.x >= animationState->frameAmount.x) { // go to the next row (if necessary) if(animationState->frameAmount.y) { if(animation.currentFrame.y >= animationState->frameAmount.y) { animation.currentFrame.y = 0; } else { ++animation.currentFrame.y; } } // reset the animation animation.currentFrame.x = 0; animation.m_frameAccumulator = 0; animation.isPlaying = animation.repeat; } } if(animationState) { sf::IntRect rect( sf::Vector2i(animationState->startPosition.x + animation.frameSize.x * (int)animation.currentFrame.x, animationState->startPosition.y + animation.frameSize.y * (int)animation.currentFrame.y), sf::Vector2i(animation.frameSize)); sprite.setTextureRect(rect); } } }
QString FpsCounter::toString() const { const auto fps = getFps(); return fps < 1.f ? QString::number(fps, 'g', 3) : QString::number(int(fps)); }
int main() { //int avg = 2 * 90 + 3 * 88 + 4 * 87 + 3 * 84 + 4 * 92 + 2 * 93 + 2 * 83 + 2 * 80 + 2 * 95; //std::cout << "Avg : " << avg << std::endl; SDeviceContextSettings settings; settings.MultiSamplingCount = 4; settings.MultiSamplingQuality = 32; IDevice* device = createDevice(EDT_DIRECT3D11, 800, 600, EWS_NONE, true, settings); IVideoDriver* driver = device->getVideoDriver(); ISceneManager* smgr = device->getSceneManager(); IMeshManager* meshManager = driver->getMeshManager(); IMaterialManager* materialManager = driver->getMaterialManager(); IResourceGroupManager* resourceGroupManager = driver->getResourceGroupManager(); resourceGroupManager->init("Resources.cfg"); resourceGroupManager->loadResourceGroup("General"); ISimpleMesh* cubeMesh = meshManager->createCubeMesh("cube1"); IMeshNode* cubeMeshNode = smgr->addMeshNode(cubeMesh, nullptr, nullptr, XMFLOAT3(0, 3.0f, 0)); cubeMeshNode->setMaterialName("test/material01"); //cubeMeshNode->remove(); ISimpleMesh* planeMesh = meshManager->createPlaneMesh("plane1", 10.0, 10.0f, 50, 50, 10.0f, 10.0f); IMeshNode* planeMeshNode = smgr->addMeshNode(planeMesh, nullptr); planeMeshNode->setMaterialName("test/ground_material"); IAnimatedMesh* animMesh = meshManager->getAnimatedMesh("lxq.mesh"); IAnimatedMeshNode* animNode = smgr->addAnimatedMeshNode(animMesh); animNode->scale(0.02f, 0.02f, 0.02f); IModelMesh* heroMesh = meshManager->getModelMesh("hero.mesh"); IMeshNode* heroNode = smgr->addModelMeshNode(heroMesh); heroNode->scale(0.01f, 0.01f, 0.01f); heroNode->translate(2.0f, 0.5f, 0); // create sampler state SSamplerDesc samplerDesc; samplerDesc.Filter = ESF_FILTER_MIN_MAG_MIP_LINEAR; samplerDesc.AddressU = EAM_WRAP; samplerDesc.AddressV = EAM_WRAP; samplerDesc.AddressW = EAM_WRAP; ISampler* sampler = driver->getSamplerManager()->create(std::string("sampler1"), samplerDesc); IPipeline* pipeline = driver->getPipelineManager()->get("test/pipeline01"); //pipeline->setSampler(std::string("sampleType"), sampler); ILightNode* light = smgr->addLightNode(1); light->setType(ELT_POINT); light->setAmbient(XMFLOAT4(0.5f, 0.5f, 0.5f, 1.0f)); light->setPosition(2.0f, 5.0f, -3.0f); light->setSpecular(XMFLOAT4(1.0f, 1.0f, 1.0f, 32.0f)); light->setDiffuse(XMFLOAT4(0.8f, 0.8f, 0.8f, 1.0f)); light->setAttenuation(1.0f, 0.0f, 0.0f); light->setRange(100.0f); materialManager->destroy(std::string("test/material02")); //ICameraNode* camera = smgr->addFpsCameraNode(1, nullptr, XMFLOAT3(0, 1.0f, -4.0f), XMFLOAT3(0, 1.0f, 0.0f)); ICameraNode* camera = smgr->addFpsCameraNode(1, nullptr, XMFLOAT3(0, 1.0f, -4.0f), XMFLOAT3(0, 1.0f, 0.0f)); f32 rotx = 0; f32 roty = 0; f32 rotz = 0; char caption[200]; //FILE* fp = fopen("log.txt", "w"); ITimer* timer = device->createTimer(); timer->reset(); while (device->run()) { const float clearColor[] = { 0.0f, 0.0f, 0.0f, 1.0f }; driver->beginScene(true, true, clearColor); float dt = timer->tick(); rotx += dt * 2.0f; roty += dt * 1.0f; rotz += dt * 0.5f; if (rotx > XM_2PI) rotx -= XM_2PI; if (roty > XM_2PI) roty -= XM_2PI; if (rotz > XM_2PI) rotz -= XM_2PI; XMMATRIX Mx = XMMatrixRotationX(rotx); XMMATRIX My = XMMatrixRotationY(roty); XMMATRIX Mz = XMMatrixRotationZ(rotz); XMMATRIX rotM = Mx * My * Mz; cubeMeshNode->setOrientation(rotM); // heroNode->yaw(dt); animNode->addTime(dt * 3000.0f); updateCamera(camera, dt); // std::cout << dt << std::endl; smgr->drawAll(); driver->endScene(); sprintf(caption, "FPS:%f", getFps(dt)); device->setWindowCaption(caption); } device->drop(); return 0; }
MStatus material::loadKeyframe(float time) { uint fps = getFps(); float frameTime = 1000.0f / fps; MStatus status; MPlugArray colorSrcPlugs; MPlug colorplug = m_pShaderNode->findPlug("color"); if(colorplug.isNull())return MS::kFailure; colorplug.connectedTo(colorSrcPlugs,true,false); //透明度 keyframeT<float> kf; kf.time = time; kf.data = 1.0f; MPlug plug = m_pShaderNode->findPlug("unnamedTransparency",&status); if(status) { plug.getValue(kf.data); } size_t size = m_vTransparencies.size(); if(size > 0) { keyframeT<float>& t = m_vTransparencies[size - 1]; if(!equal(t.data,kf.data)) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(t.time / frameTime); int thisTime = time / frameTime; if(thisTime - 1 > lastTime) { keyframeT<float> temp = t; temp.time = (time - 1) * frameTime; m_vTransparencies.push_back(temp); } m_vTransparencies.push_back(kf); } } else { m_vTransparencies.push_back(kf); } //颜色 keyframeT<colorNalpha> kfC; kfC.time = time; kfC.data = colorNalpha(1.0f,1.0f,1.0f); plug = m_pShaderNode->findPlug("unnamedColor",&status); if(status) { MObject object; plug.getValue(object); MFnNumericData data(object); colorNalpha color; data.getData(kfC.data.r,kfC.data.g,kfC.data.b); } size = m_vColors.size(); if(size > 0) { keyframeT<colorNalpha>& t = m_vColors[size - 1]; if(!equal(t.data,kfC.data)) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(t.time / frameTime); int thisTime = time / frameTime; if(thisTime - 1 > lastTime) { keyframeT<colorNalpha> temp = t; temp.time = (time - 1) * frameTime; m_vColors.push_back(temp); } m_vColors.push_back(kfC); } } else { m_vColors.push_back(kfC); } //TcFlowSpeedU keyframeT<float> kfFlowSpeedU; kfFlowSpeedU.time = time; kfFlowSpeedU.data = false; plug = m_pShaderNode->findPlug("tcFlowSpeedU",&status); if(status) { plug.getValue(kfFlowSpeedU.data); } size = m_vTcFlowSpeedU.size(); if(size > 0) { keyframeT<float>& t = m_vTcFlowSpeedU[size - 1]; if(!equal(t.data,kfFlowSpeedU.data)) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(t.time / frameTime); int thisTime = time / frameTime; if(thisTime - 1 > lastTime) { keyframeT<float> temp = t; temp.time = (time - 1) * frameTime; m_vTcFlowSpeedU.push_back(temp); } m_vTcFlowSpeedU.push_back(kfFlowSpeedU); } } else { m_vTcFlowSpeedU.push_back(kfFlowSpeedU); } //TcFlowV keyframeT<float> kfFlowSpeedV; kfFlowSpeedV.time = time; kfFlowSpeedV.data = false; plug = m_pShaderNode->findPlug("tcFlowSpeedV",&status); if(status) { plug.getValue(kfFlowSpeedV.data); } size = m_vTcFlowSpeedV.size(); if(size > 0) { keyframeT<float>& t = m_vTcFlowSpeedV[size - 1]; if(!equal(t.data,kfFlowSpeedV.data)) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(t.time / frameTime); int thisTime = time / frameTime; if(thisTime - 1 > lastTime) { keyframeT<float> temp = t; temp.time = (time - 1) * frameTime; m_vTcFlowSpeedV.push_back(temp); } m_vTcFlowSpeedV.push_back(kfFlowSpeedV); } } else { m_vTcFlowSpeedV.push_back(kfFlowSpeedV); } //TcRotateSpeed keyframeT<float> kfRotateSpeed; kfRotateSpeed.time = time; kfRotateSpeed.data = false; plug = m_pShaderNode->findPlug("tcRotateSpeed",&status); if(status) { plug.getValue(kfRotateSpeed.data); } size = m_vRotateSpeed.size(); if(size > 0) { keyframeT<float>& t = m_vRotateSpeed[size - 1]; if(!equal(t.data,kfRotateSpeed.data)) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(t.time / frameTime); int thisTime = time / frameTime; if(thisTime - 1 > lastTime) { keyframeT<float> temp = t; temp.time = (time - 1) * frameTime; m_vRotateSpeed.push_back(temp); } m_vRotateSpeed.push_back(kfRotateSpeed); } } else { m_vRotateSpeed.push_back(kfRotateSpeed); } //ChangeStyle keyframeT<short> kfChangeStyle; kfChangeStyle.time = time; kfChangeStyle.data = 0; //Sequence plug = m_pShaderNode->findPlug("textureChangeStyle",&status); if(status) { plug.getValue(kfChangeStyle.data); } size = m_vChangeStyle.size(); if(size > 0) { keyframeT<short>& t = m_vChangeStyle[size - 1]; if(!equal(t.data,kfChangeStyle.data)) { //如果跟上一次不一样,并且跨越了桢,那么需要补一桢 int lastTime = Round(t.time / frameTime); int thisTime = time / frameTime; if(thisTime - 1 > lastTime) { keyframeT<short> temp = t; temp.time = (time - 1) * frameTime; m_vChangeStyle.push_back(temp); } m_vChangeStyle.push_back(kfChangeStyle); } } else { m_vChangeStyle.push_back(kfChangeStyle); } return MS::kSuccess; }
/** * Dump current FPS to stdout */ void Fps::dump() { std::cout << "FPS: " << getFps() << std::endl; }
/** * realiza a renderização de um frame do jogo */ void RobotWindow::display() { // ajusta para a matriz de modelo e zera as transformações glMatrixMode(GL_MODELVIEW); glLoadIdentity(); // obtém a posição do robô e ajusta câmera para apontar para essa direção robot.configureLookAt(lookAt); setCameraPosition(cameraLongitude, cameraLatitude); gluLookAt(eye.x, eye.y, eye.z, lookAt.x, lookAt.y, lookAt.z, 0.0, 1.0, 0.0); // limpa o atual buffer de renderização glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // verifica se o jogo terminou (o robo tem vidas?) if (robot.getLives() == 0) { // desenha o Game Over e paralisa a aplicação textGameOver.draw2D(); glutSwapBuffers(); disableEvents(); return; } else // senão existem vidas // verifica se o robot deve ser revivido if (!robot.isAlive()) { robot.revive(); newEvent(EVENT_RESET, interval); } // verifica se deve desenhar o limite espacial do jogo if (drawCage) { //glPushMatrix(); borderland.draw(); //glPopMatrix(); } // desenha a plataforma //glPushMatrix(); glColor4dv(platformColor.getVect()); platform.draw(); //glPopMatrix(); /*/ Desenha uma pequena esfera na posição lookAt para referência visual glPushMatrix(); glTranslated(lookAt.x, lookAt.y, lookAt.z); glColor3dv(laranja.getVect()); glutSolidSphere(0.1, 20, 20); glPopMatrix(); /*/ // desenha o robô //glPushMatrix(); robot.draw(); //glPopMatrix(); // desenha o número de vidas textLives.setText("Vidas: %d", robot.getLives()); textLives.draw2D(); // desenha o número de Frames Per Second textFps.setText("FPS: %d", getFps()); textFps.draw2D(); // solicita a saída em vídeo do quadro renderizado glutSwapBuffers(); }
int menu(SDL_Renderer* Renderer){ SDL_Event event; SDL_Texture* cursor_texture = getTextureFromPath("BMPimages/Cursor/1.bmp", Renderer); ButtonMenu* startButton = malloc(sizeof(ButtonMenu)); ButtonMenu* exitButton = malloc(sizeof(ButtonMenu)); VolumeMenu* volumeButton = malloc(sizeof(VolumeMenu)); TTF_Font* Font = TTF_OpenFont("TTFtext/GOST-type-B-Standard.ttf", 1000); Mix_Music* musicMenu = Mix_LoadMUS("MIXmusic/space.flac"); GameStates state = IN_MENU; int x, y; int volumeOn = 1; SDL_Rect menuRect = {0, 0, SCREEN_WIDTH, SCREEN_HEIGHT}; SDL_Texture* array_menu_bg[BG_SPRITES]; size_t displace_menu_bg_2 = 0; size_t displace_menu_bg_3 = 0; size_t displace_menu_bg_4 = 0; Mix_PlayMusic(musicMenu, -1); initButtons(Renderer, Font, startButton, exitButton, volumeButton); initMenuBgSprites(Renderer, array_menu_bg); while(state == IN_MENU){ while(SDL_PollEvent(&event)) { if (event.type == SDL_QUIT){ state = EXIT; } if (event.type == SDL_MOUSEMOTION){ SDL_GetMouseState(&x, &y); if (isInsideRect(x, y, startButton->Rect)) startButton->cur_texture = startButton->mouseInside; else startButton->cur_texture = startButton->mouseOutside; if (isInsideRect(x, y, exitButton->Rect)) exitButton->cur_texture = exitButton->mouseInside; else exitButton->cur_texture = exitButton->mouseOutside; if (isInsideRect(x, y, volumeButton->Rect)){ if (volumeOn) volumeButton->cur_texture = volumeButton->mouseInside_volOn; else volumeButton->cur_texture = volumeButton->mouseInside_volOff; } else{ if (volumeOn) volumeButton->cur_texture = volumeButton->mouseOutside_volOn; else volumeButton->cur_texture = volumeButton->mouseOutside_volOff; } } if (event.type == SDL_MOUSEBUTTONDOWN){ SDL_GetMouseState(&x, &y); if (isInsideRect(x, y, startButton->Rect)) state = START; if (isInsideRect(x, y, exitButton->Rect)) state = EXIT; if (isInsideRect(x, y, volumeButton->Rect)){ if (volumeOn){ volumeOn = 0; Mix_PauseMusic(); volumeButton->cur_texture = volumeButton->mouseInside_volOff; } else{ volumeOn = 1; Mix_ResumeMusic(); volumeButton->cur_texture = volumeButton->mouseInside_volOn; } } } } SDL_SetRenderDrawColor(Renderer, 0xFF, 0xFF, 0xFF, 0xFF); SDL_RenderClear(Renderer); SDL_RenderCopy(Renderer, array_menu_bg[0], NULL, &menuRect); renderInfinityText(Renderer, array_menu_bg[1], &displace_menu_bg_2, 544, 2, 1); renderInfinityText(Renderer, array_menu_bg[2], &displace_menu_bg_3, 544, 2, 2); renderInfinityText(Renderer, array_menu_bg[3], &displace_menu_bg_4, 544, 2, 3); SDL_RenderCopy(Renderer, startButton->cur_texture, NULL, &startButton->Rect); SDL_RenderCopy(Renderer, volumeButton->cur_texture, NULL, &volumeButton->Rect); SDL_RenderCopy(Renderer, exitButton->cur_texture, NULL, &exitButton->Rect); showCursor(Renderer, cursor_texture); SDL_RenderPresent(Renderer); getFps(); waitForFps(35); } return state; }
void VideoClip_Theora::_executeSeek() { #if _DEBUG log(this->name + " [seek]: seeking to frame " + str(this->seekFrame)); #endif int frame = 0; float time = this->seekFrame / getFps(); this->timer->seek(time); bool paused = this->timer->isPaused(); if (!paused) { this->timer->pause(); // pause until seeking is done } this->endOfFile = false; this->restarted = false; this->_resetFrameQueue(); // reset the video decoder. ogg_stream_reset(&this->info.TheoraStreamState); th_decode_free(this->info.TheoraDecoder); this->info.TheoraDecoder = th_decode_alloc(&this->info.TheoraInfo, this->info.TheoraSetup); Mutex::ScopeLock audioMutexLock; if (this->audioInterface != NULL) { audioMutexLock.acquire(this->audioMutex); ogg_stream_reset(&this->info.VorbisStreamState); vorbis_synthesis_restart(&this->info.VorbisDSPState); this->destroyAllAudioPackets(); } // first seek to desired frame, then figure out the location of the // previous key frame and seek to it. // then by setting the correct time, the decoder will skip N frames untill // we get the frame we want. frame = (int)this->_seekPage(this->seekFrame, 1); // find the key frame nearest to the target frame #ifdef _DEBUG // log(mName + " [seek]: nearest key frame for frame " + str(mSeekFrame) + " is frame: " + str(frame)); #endif this->_seekPage(std::max(0, frame - 1), 0); ogg_packet opTheora; ogg_int64_t granulePos; bool granuleSet = false; if (frame <= 1) { if (this->info.TheoraInfo.version_major == 3 && this->info.TheoraInfo.version_minor == 2 && this->info.TheoraInfo.version_subminor == 0) { granulePos = 0; } else { granulePos = 1; // because of difference in granule interpretation in theora streams 3.2.0 and newer ones } th_decode_ctl(this->info.TheoraDecoder, TH_DECCTL_SET_GRANPOS, &granulePos, sizeof(granulePos)); granuleSet = true; } // now that we've found the key frame that preceeds our desired frame, lets keep on decoding frames until we // reach our target frame. int status = 0; while (this->seekFrame != 0) { if (ogg_stream_packetout(&this->info.TheoraStreamState, &opTheora) > 0) { if (!granuleSet) { // theora decoder requires to set the granule pos after seek to be able to determine the current frame if (opTheora.granulepos < 0) { continue; // ignore prev delta frames until we hit a key frame } th_decode_ctl(this->info.TheoraDecoder, TH_DECCTL_SET_GRANPOS, &opTheora.granulepos, sizeof(opTheora.granulepos)); granuleSet = true; } status = th_decode_packetin(this->info.TheoraDecoder, &opTheora, &granulePos); if (status != 0 && status != TH_DUPFRAME) { continue; } frame = (int)th_granule_frame(this->info.TheoraDecoder, granulePos); if (frame >= this->seekFrame - 1) { break; } } else if (!this->_readData()) { log(this->name + " [seek]: fineseeking failed, _readData failed!"); return; } } #ifdef _DEBUG // log(mName + " [seek]: fineseeked to frame " + str(frame + 1) + ", requested: " + str(mSeekFrame)); #endif if (this->audioInterface != NULL) { // read audio data until we reach a timeStamp. this usually takes only one iteration, but just in case let's // wrap it in a loop float timeStamp = 0.0f; while (true) { timeStamp = this->_decodeAudio(); if (timeStamp >= 0) { break; } this->_readData(); } float rate = (float)this->audioFrequency * this->audioChannelsCount; float queuedTime = this->getAudioPacketQueueLength(); int trimmedCount = 0; // at this point there are only 2 possibilities: either we have too much packets and we have to delete // the first N ones, or we don't have enough, so let's fill the gap with silence. if (time > timeStamp - queuedTime) { while (this->audioPacketQueue != NULL) { if (time <= timeStamp - queuedTime + this->audioPacketQueue->samplesCount / rate) { trimmedCount = (int)((timeStamp - queuedTime + this->audioPacketQueue->samplesCount / rate - time) * rate); if (this->audioPacketQueue->samplesCount - trimmedCount <= 0) { this->destroyAudioPacket(this->popAudioPacket()); // if there's no data to be left, just destroy it } else { for (int i = trimmedCount, j = 0; i < this->audioPacketQueue->samplesCount; ++i, ++j) { this->audioPacketQueue->pcmData[j] = this->audioPacketQueue->pcmData[i]; } this->audioPacketQueue->samplesCount -= trimmedCount; } break; } queuedTime -= this->audioPacketQueue->samplesCount / rate; this->destroyAudioPacket(this->popAudioPacket()); } } // expand the first packet with silence. else if (this->audioPacketQueue != NULL) { int i = 0; int j = 0; int missingCount = (int)((timeStamp - queuedTime - time) * rate); if (missingCount > 0) { float* samples = new float[missingCount + this->audioPacketQueue->samplesCount]; if (missingCount > 0) { memset(samples, 0, missingCount * sizeof(float)); } for (j = 0; i < missingCount + this->audioPacketQueue->samplesCount; ++i, ++j) { samples[i] = this->audioPacketQueue->pcmData[j]; } delete[] this->audioPacketQueue->pcmData; this->audioPacketQueue->pcmData = samples; } } this->lastDecodedFrameNumber = this->seekFrame; this->readAudioSamples = (unsigned int)(timeStamp * this->audioFrequency); audioMutexLock.release(); } if (!paused) { this->timer->play(); } this->seekFrame = -1; }