示例#1
0
void
Tomahawk::InfoSystem::XmppInfoPlugin::pushInfo(QString caller, Tomahawk::InfoSystem::InfoType type, QVariant input)
{
    tDebug() << Q_FUNC_INFO << m_sipPlugin->m_client->jid().full();

    if( m_sipPlugin->m_account->configuration().value("publishtracks").toBool() == false )
    {
        tDebug() << Q_FUNC_INFO <<  m_sipPlugin->m_client->jid().full() << "Not publishing now playing info (disabled in account config)";
        return;
    }

    switch ( type )
    {
        case InfoNowPlaying:
        case InfoNowResumed:
            m_pauseTimer.stop();
            if ( input.canConvert< Tomahawk::InfoSystem::InfoStringHash >() )
                audioStarted( input.value< Tomahawk::InfoSystem::InfoStringHash >() );
            break;
        case InfoNowPaused:
            m_pauseTimer.start( PAUSE_TIMEOUT * 1000 );
            audioPaused();
            break;
        case InfoNowStopped:
            m_pauseTimer.stop();
            audioStopped();
            break;

        default:
            return;
    }
}
示例#2
0
// InfoPlugin Methods
void
MprisPlugin::pushInfo( Tomahawk::InfoSystem::InfoPushData pushData )
{
    bool isPlayingInfo = false;

    switch ( pushData.type )
    {
        case InfoNowPlaying:
          isPlayingInfo = true;
          audioStarted( pushData.infoPair.second );
          break;
        case InfoNowPaused:
          isPlayingInfo = true;
          audioPaused();
          break;
        case InfoNowResumed:
          isPlayingInfo = true;
          audioResumed( pushData.infoPair.second );
          break;
        case InfoNowStopped:
          isPlayingInfo = true;
          audioStopped();
          break;

        default:
          break;
    }

    if ( isPlayingInfo )
        notifyPropertyChanged( "org.mpris.MediaPlayer2.Player", "PlaybackStatus" );
}
示例#3
0
Tomahawk::InfoSystem::XmppInfoPlugin::XmppInfoPlugin(XmppSipPlugin* sipPlugin)
    : m_sipPlugin( sipPlugin )
    , m_pubSubManager( 0 )
    , m_pauseTimer( this )
{
    Q_ASSERT( sipPlugin->m_client );

    m_supportedPushTypes << InfoNowPlaying << InfoNowPaused << InfoNowResumed << InfoNowStopped;

    m_pubSubManager = new Jreen::PubSub::Manager( sipPlugin->m_client );
    m_pubSubManager->addEntityType< Jreen::Tune >();

    m_pauseTimer.setSingleShot( true );
    connect( &m_pauseTimer, SIGNAL( timeout() ),
             this, SLOT( audioStopped() ) );
}
示例#4
0
void VoiceMessagesFader::onTimer() {
	bool hasFading = false, hasPlaying = false;
	QMutexLocker lock(&voicemsgsMutex);
	VoiceMessages *voice = audioVoice();
	if (!voice) return;

	for (int32 i = 0; i < AudioVoiceMsgSimultaneously; ++i) {
		VoiceMessages::Msg &m(voice->_data[i]);
		if (m.state == VoiceMessageStopped || m.state == VoiceMessagePaused || !m.source) continue;

		bool playing = false, fading = false;
		ALint pos = 0;
		ALint state = AL_INITIAL;
		alGetSourcei(m.source, AL_SAMPLE_OFFSET, &pos);
		alGetSourcei(m.source, AL_SOURCE_STATE, &state);
		if (!_checkALError()) {
			m.state = VoiceMessageStopped;
			emit error(m.audio);
		} else {
			switch (m.state) {
			case VoiceMessageFinishing:
			case VoiceMessagePausing:
			case VoiceMessageStarting:
			case VoiceMessageResuming:
				fading = true;
			break;
			case VoiceMessagePlaying:
				playing = true;
			break;
			}
			if (fading && (state == AL_PLAYING || !m.loading)) {
				if (state != AL_PLAYING) {
					fading = false;
					if (m.source) {
						alSourcef(m.source, AL_GAIN, 1);
						alSourceStop(m.source);
					}
					m.state = VoiceMessageStopped;
					emit audioStopped(m.audio);
				} else if (1000 * (pos + m.skipStart - m.started) >= AudioFadeDuration * AudioVoiceMsgFrequency) {
					fading = false;
					alSourcef(m.source, AL_GAIN, 1);
					switch (m.state) {
					case VoiceMessageFinishing: alSourceStop(m.source); m.state = VoiceMessageStopped; break;
					case VoiceMessagePausing: alSourcePause(m.source); m.state = VoiceMessagePaused; break;
					case VoiceMessageStarting:
					case VoiceMessageResuming:
						m.state = VoiceMessagePlaying;
						playing = true;
					break;
					}
				} else {
					float64 newGain = 1000. * (pos + m.skipStart - m.started) / (AudioFadeDuration * AudioVoiceMsgFrequency);
					if (m.state == VoiceMessagePausing || m.state == VoiceMessageFinishing) {
						newGain = 1. - newGain;
					}
					if (newGain < 0) {
						int a = 0, b;
						b = a;
					}
					alSourcef(m.source, AL_GAIN, newGain);
					LOG(("Now volume is: %1").arg(newGain));
				}
			} else if (playing && (state == AL_PLAYING || !m.loading)) {
				if (state != AL_PLAYING) {
					playing = false;
					if (m.source) {
						alSourceStop(m.source);
						alSourcef(m.source, AL_GAIN, 1);
					}
					m.state = VoiceMessageStopped;
					emit audioStopped(m.audio);
				}
			}
			if (pos + m.skipStart - m.position >= AudioCheckPositionDelta) {
				m.position = pos + m.skipStart;
				emit playPositionUpdated(m.audio);
			}
			if (!m.loading && m.skipEnd > 0 && m.position + AudioPreloadSamples + m.skipEnd > m.duration) {
				m.loading = true;
				emit needToPreload(m.audio);
			}
			if (playing) hasPlaying = true;
			if (fading) hasFading = true;
		}
	}
	if (hasFading) {
		_timer.start(AudioFadeTimeout);
	} else if (hasPlaying) {
		_timer.start(AudioCheckPositionTimeout);
	}
}