void GameLogic::onControlStopped(HawkControl control) { if (m_state == LeaderBoard && m_leaderBoardReady && control == ActionA) { reset(); m_playButton.isPressed = false; m_playButton.textY--; } else if (m_state == GamePlay) { if (m_gamePaused) { if (control == Menu2) { onResume(); } } else { switch (control) { case MoveLeft: m_player->setHorizontalMovement(DynamicHawkBody::Stop); break; case MoveRight: m_player->setHorizontalMovement(DynamicHawkBody::Stop); break; case ActionA: break; case ActionB: case ActionX: case ActionY: break; case Menu2: onPause(); break; } } } }
void GameScreen::updateInputGameOver() { for (std::vector<TouchEvent>::iterator itr = m_touchEvents.begin(); itr != m_touchEvents.end(); itr++) { touchToWorld((*itr)); switch (itr->getTouchType()) { case DOWN: return; case DRAGGED: continue; case UP: if (m_gameOverDialog->isTouchingRightButton(*m_touchPoint)) { init(); onResume(); } else if (m_pausedDialog->isTouchingLeftButton(*m_touchPoint)) { m_iScreenState = SCREEN_STATE_EXIT; } return; } } }
void GuiComponent::resume() { onResume(); for(unsigned int i = 0; i < mChildren.size(); i++) mChildren.at(i)->resume(); }
void TomahawkTrayIcon::onPlay() { m_loveTrackAction->setEnabled( true ); m_stopContinueAfterTrackAction->setEnabled( true ); onResume(); }
void ApplicationContext::resume() { if (_app_state == ApplicationLifeState::PAUSED) { _event_listener.activate(); onResume(); _app_state = ApplicationLifeState::RUNNABLE; _last_update_time = System::getInstance()->getTime(); } }
void WidgetAR::onCreate() { LOGI("Creating AR"); if (w->mARState < 2) { w->mARState = 2; w->mOpUnitAnim->sendOp(w->mOpUnitAnimId, w->mNWrapper->mNKrossWrapper->mNBeta00, new Op()); } onResume(); }
void DirectRenderer::resume(const sp<IGraphicBufferProducer> &bufferProducer) { mSurfaceTex = bufferProducer; ALOGD("resume caller pid: %d, our pid: %d", IPCThreadState::self()->getCallingPid(), getpid()); if (getpid() == IPCThreadState::self()->getCallingPid()) { onResume(); } else { sp<AMessage> msg = new AMessage(kWhatPlay, id()); sp<AMessage> response; msg->postAndAwaitResponse(&response); } }
void DirectRenderer::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatDecoderNotify: { onDecoderNotify(msg); break; } case kWhatRenderVideo: { onRenderVideo(); break; } case kWhatQueueAccessUnit: onQueueAccessUnit(msg); break; case kWhatSetFormat: onSetFormat(msg); break; #ifdef MTK_AOSP_ENHANCEMENT case kWhatPause: { uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); onPause(); sp<AMessage> response = new AMessage; response->postReply(replyID); break; } case kWhatPlay: { uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); onResume(); sp<AMessage> response = new AMessage; response->postReply(replyID); break; } #endif default: TRESPASS(); } }
void GLESApplication::handleCommand(android_app *app, int32_t cmd) { switch (cmd) { case APP_CMD_SAVE_STATE: break; case APP_CMD_INIT_WINDOW: // The window is being shown, get it ready. if (app->window != NULL) { initWindow(app); } break; case APP_CMD_TERM_WINDOW: terminateWindow(app); tearDownEGLContext(); break; case APP_CMD_DESTROY: break; case APP_CMD_PAUSE: onPause(app); break; case APP_CMD_RESUME: onResume(app); break; case APP_CMD_GAINED_FOCUS: gainedFocus(app); break; case APP_CMD_LOST_FOCUS: lostFocus(app); LOGI("LOST FOCUS state"); break; case APP_CMD_START: onStart(app); break; } }
void GameScreen::updateInputWaiting(std::vector<TouchEvent> &touchEvents) { for (std::vector<TouchEvent>::iterator itr = touchEvents.begin(); itr != touchEvents.end(); itr++) { touchToWorld((*itr)); switch (itr->getTouchType()) { case DOWN: m_gameState = Game_State::RUNNING; onResume(); m_world->handleTouchUp(*itr); continue; case DRAGGED: continue; case UP: // Nothing return; } } }
TomahawkTrayIcon::TomahawkTrayIcon( QObject* parent ) : QSystemTrayIcon( parent ) , m_currentAnimationFrame( 0 ) , m_showWindowAction( 0 ) , m_stopContinueAfterTrackAction( 0 ) , m_loveTrackAction( 0 ) { #ifdef Q_OS_MAC QIcon icon( RESPATH "icons/tomahawk-grayscale-icon-128x128.png" ); #else QIcon icon( RESPATH "icons/tomahawk-icon-128x128.png" ); #endif setIcon( icon ); refreshToolTip(); m_contextMenu = new QMenu(); m_contextMenu->setFont( TomahawkUtils::systemFont() ); setContextMenu( m_contextMenu ); m_loveTrackAction = new QAction( this ); m_stopContinueAfterTrackAction = new QAction( this ); ActionCollection *ac = ActionCollection::instance(); m_contextMenu->addAction( ac->getAction( "playPause" ) ); m_contextMenu->addAction( ac->getAction( "stop" ) ); m_contextMenu->addSeparator(); m_contextMenu->addAction( m_loveTrackAction ); m_contextMenu->addAction( m_stopContinueAfterTrackAction ); m_contextMenu->addSeparator(); m_contextMenu->addAction( ac->getAction( "previousTrack" ) ); m_contextMenu->addAction( ac->getAction( "nextTrack" ) ); m_contextMenu->addSeparator(); m_contextMenu->addAction( ActionCollection::instance()->getAction( "togglePrivacy" ) ); #ifdef Q_OS_MAC // On mac you can close the windows while leaving the app open. We then need a way to show the main window again m_contextMenu->addSeparator(); m_showWindowAction = m_contextMenu->addAction( tr( "Hide Tomahawk Window" ) ); m_showWindowAction->setData( true ); connect( m_showWindowAction, SIGNAL( triggered() ), this, SLOT( showWindow() ) ); connect( m_contextMenu, SIGNAL( aboutToShow() ), this, SLOT( menuAboutToShow() ) ); #endif m_contextMenu->addSeparator(); m_contextMenu->addAction( ac->getAction( "quit" ) ); connect( m_loveTrackAction, SIGNAL( triggered() ), SLOT( loveTrackTriggered() ) ); connect( m_stopContinueAfterTrackAction, SIGNAL( triggered() ), SLOT( stopContinueAfterTrackActionTriggered() ) ); connect( AudioEngine::instance(), SIGNAL( loading( Tomahawk::result_ptr ) ), SLOT( setResult( Tomahawk::result_ptr ) ) ); connect( AudioEngine::instance(), SIGNAL( started( Tomahawk::result_ptr ) ), SLOT( onPlay() ) ); connect( AudioEngine::instance(), SIGNAL( resumed() ), SLOT( onResume() ) ); connect( AudioEngine::instance(), SIGNAL( stopped() ), SLOT( onStop() ) ); connect( AudioEngine::instance(), SIGNAL( paused() ), SLOT( onPause() ) ); connect( AudioEngine::instance(), SIGNAL( stopAfterTrackChanged() ), SLOT( onStopContinueAfterTrackChanged() ) ); connect( &m_animationTimer, SIGNAL( timeout() ), SLOT( onAnimationTimer() ) ); connect( this, SIGNAL( activated( QSystemTrayIcon::ActivationReason ) ), SLOT( onActivated( QSystemTrayIcon::ActivationReason ) ) ); onStop(); show(); }
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatDrainAudioQueue: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mAudioQueueGeneration) { break; } mDrainAudioQueuePending = false; if (onDrainAudioQueue()) { uint32_t numFramesPlayed; CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; // This is how long the audio sink will have data to // play back. int64_t delayUs = mAudioSink->msecsPerFrame() * numFramesPendingPlayout * 1000ll; // Let's give it more data after about half that time // has elapsed. postDrainAudioQueue(delayUs / 2); } break; } case kWhatDrainVideoQueue: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mVideoQueueGeneration) { break; } mDrainVideoQueuePending = false; onDrainVideoQueue(); postDrainVideoQueue(); break; } case kWhatQueueBuffer: { onQueueBuffer(msg); break; } case kWhatQueueEOS: { onQueueEOS(msg); break; } case kWhatFlush: { onFlush(msg); break; } case kWhatAudioSinkChanged: { onAudioSinkChanged(); break; } case kWhatPause: { onPause(); break; } case kWhatResume: { onResume(); break; } default: TRESPASS(); break; } }
JNIEXPORT void JNICALL Java_jk_j_1JNILib_resume(JNIEnv * env, jobject obj, jint aTextureID, jint oldTextureID) { onResume(aTextureID, oldTextureID); }
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatDrainAudioQueue: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mAudioQueueGeneration) { break; } mDrainAudioQueuePending = false; if (onDrainAudioQueue()) { uint32_t numFramesPlayed; CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; // This is how long the audio sink will have data to // play back. int64_t delayUs = mAudioSink->msecsPerFrame() * numFramesPendingPlayout * 1000ll; // Let's give it more data after about half that time // has elapsed. #ifndef ANDROID_DEFAULT_CODE // half that is to large, set it to 20ms if (numFramesPlayed > mNumFramesWritten) ALOGW("numFramesPlayed(%d) > mNumFramesWritten(%d), no reset @ kWhatDrainAudioQueue", numFramesPlayed, mNumFramesWritten); if (delayUs > 40000) delayUs = 40000; #endif postDrainAudioQueue(delayUs / 2); } break; } case kWhatDrainVideoQueue: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mVideoQueueGeneration) { break; } mDrainVideoQueuePending = false; onDrainVideoQueue(); postDrainVideoQueue(); break; } case kWhatQueueBuffer: { onQueueBuffer(msg); break; } case kWhatQueueEOS: { LOGI("kWhatQueueEOS"); mNeedCheckBuffer = false; onQueueEOS(msg); break; } case kWhatFlush: { onFlush(msg); break; } case kWhatAudioSinkChanged: { onAudioSinkChanged(); break; } case kWhatPause: { onPause(); break; } case kWhatResume: { mIsbufferempty = false; onResume(); break; } #ifndef ANDROID_DEFAULT_CODE case kWhatCheckRenderBufferStatus: { ALOGV("kWhatCheckRenderBufferStatus need check buff : %s",mNeedCheckBuffer?"YES":"NO"); sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatBufferNotify); int32_t rate = 0; ALOGV("mIsSeeking(%d) mVideoQueue.size(%d) mAudioQueue.size(%d)",mIsSeeking,mVideoQueue.size(),mAudioQueue.size()); if(mNeedCheckBuffer == true){ switch(mIsbufferempty){ case true: if(mIsSeeking == true) { // only check VideoQueue after seek if (mVideoQueue.size()>0){ ALOGV("kWhatCheckRenderBufferStatus full"); rate = 100; notify->setInt32("bufRate", rate); notify->post(); mIsSeeking = false; } } else { if(mDebugDisableAVsync)//Disable AV SYNC for debug { ALOGV("kWhatCheckRenderBufferStatus full"); rate = 100; notify->setInt32("bufRate", rate); notify->post(); } else if (mAudioQueue.size()>2)//Wait audio { ALOGV("kWhatCheckRenderBufferStatus full"); rate = 100; notify->setInt32("bufRate", rate); notify->post(); } } break; case false: if(mIsSeeking == true) { // only check VideoQueue after seek if (mVideoQueue.empty()) { mIsbufferempty = true; ALOGV("kWhatCheckRenderBufferStatus empty"); rate = 0; notify->setInt32("bufRate", rate); notify->post(); } } else { if(mDebugDisableAVsync)//Disable AV SYNC for debug { mIsbufferempty = true; ALOGV("kWhatCheckRenderBufferStatus empty"); rate = 0; notify->setInt32("bufRate", rate); notify->post(); } else if (mAudioQueue.empty())//Wait audio empty { mIsbufferempty = true; ALOGV("kWhatCheckRenderBufferStatus empty"); rate = 0; notify->setInt32("bufRate", rate); notify->post(); } } break; default: break; } }else{ rate = 100; notify->setInt32("bufRate", rate); notify->post(); } (new AMessage(kWhatCheckRenderBufferStatus, id()))->post(300000); break; } case kWhatStatusNotify: { int32_t status; sp<AMessage> message; CHECK(msg->findMessage("message", &message)); CHECK(message->findInt32("status", &status)); LOGI(" kWhatStatusNotify %d",status); switch(status){ case MEDIA_SEEK_COMPLETE: mIsSeeking = true; break; default: break; } break; } #endif default: TRESPASS(); break; } }
void SWGameScene::resume() { onResume(); }