/** * Toogle Mute state. * @return The new state. */ bool master_volume::toggleMute() { auto newMuteState = !isMute(); setMute(newMuteState); return newMuteState; }
NxGroup::NxGroup(ApplicationCurrent *parent, QTreeWidgetItem *ccParentItem) : QObject(parent), QTreeWidgetItem(ccParentItem) { setText(0, tr("GROUP")); setSolo(0); setMute(0); scale = scaleDest = 1; }
bool Part::setProperty(P_ID id, const QVariant& property) { switch (id) { case P_ID::VISIBLE: setShow(property.toBool()); break; case P_ID::USE_DRUMSET: instrument()->setUseDrumset(property.toBool()); break; case P_ID::PART_VOLUME: setVolume(property.toInt()); break; case P_ID::PART_MUTE: setMute(property.toBool()); break; case P_ID::PART_PAN: setPan(property.toInt()); break; case P_ID::PART_REVERB: setReverb(property.toInt()); break; case P_ID::PART_CHORUS: setChorus(property.toInt()); break; default: qDebug("Part::setProperty: unknown id %d", int(id)); break; } score()->setLayoutAll(); return true; }
void SampleChannel::parseAction(recorder::action *a, int localFrame, int globalFrame) { if (readActions == false) return; switch (a->type) { case ACTION_KEYPRESS: if (mode & SINGLE_ANY) start(localFrame, false); break; case ACTION_KEYREL: if (mode & SINGLE_ANY) stop(); break; case ACTION_KILLCHAN: if (mode & SINGLE_ANY) kill(localFrame); break; case ACTION_MUTEON: setMute(true); // internal mute break; case ACTION_MUTEOFF: unsetMute(true); // internal mute break; case ACTION_VOLUME: calcVolumeEnv(globalFrame); break; } }
//--------------------------------------------------------------------------- // //! \brief one checkbox was muted -> emit mute signal // //! \author Jo2003 //! \date 26.05.2014 // //! \param val (bool) muted or not // //! \return -- //--------------------------------------------------------------------------- void QFusionControl::slotMute(bool val) { // synchronize ... setMute(val); emit sigMute(val); }
// void TDA8425::Init () // инициализация void TDA8425::Init (boolean mute, byte volume, byte bass, byte treble, byte source) { setVolume(volume); setBass(bass); setTreble(treble); setMute(mute); setSource(source); }
void SoundButton::clicked() { setMute(!m_mute); if (ChatClient::channel()->status() == Status::DnD) ChatCore::settings()->setValue(LS("Alerts/Sounds.DnD"), !m_mute); else ChatCore::settings()->setValue(LS("Alerts/Sounds"), !m_mute); }
SoundButton::SoundButton(QWidget *parent) : QToolButton(parent) , m_mute(ChatAlerts::isMute()) { setAutoRaise(true); setMute(m_mute); connect(this, SIGNAL(clicked(bool)), SLOT(clicked())); connect(ChatCore::settings(), SIGNAL(changed(QString,QVariant)), SLOT(settingsChanged(QString,QVariant))); }
void WebConferenceDialog::onSessionStart(const AmSipReply& rep) { time(&connect_ts); setMute(false); DBG("########## dialout: connect to conference '%s' #########\n", dlg.user.c_str()); state = InConference; setAudioLocal(AM_AUDIO_IN, false); setAudioLocal(AM_AUDIO_OUT, false); connectConference(dlg.user); }
void MpvHandler::Mute(bool m) { if(playState > 0) { const char *args[] = {"set", "mute", m ? "yes" : "no", NULL}; AsyncCommand(args); } else setMute(m); }
void EarlyRecordDialog::onEarlySessionStart(const AmSipReply& req) { DBG("Early Session Start\n"); msg_filename = "/tmp/" + getLocalTag() + ".wav"; if(a_msg.open(msg_filename,AmAudioFile::Write,false)) throw string("EarlyRecordDialog: couldn't open ") + msg_filename + string(" for writing"); setInput(&a_msg); setMute(true); }
// this performs a volume change on the device void AudioDevice::setVolume(int volume) { if (m_volume == volume) return; setVolumeNoCommit(volume); setMute(false); if (m_engine) m_engine->commitDeviceVolume(this); }
boolean Synth::charEv( char code ) { switch ( code ) { #ifdef INTERN_CONSOLE case 'p': // choose a preset { preset_loading = true; quiet_reset = false; if ( flags & RSTMUTE ) // if synth is prefigured for mute reset presets.choose(); else // synth normally unmuted upon reset { presets.choose(); if ( quiet_reset ) { flags &= ~RSTMUTE; setMute( false ); } } preset_loading = false; quiet_reset = false; break; } case '\'': // perform a quiet reset (for preset) if ( preset_loading ) { flags |= RSTMUTE; quiet_reset = true; reset(); } break; #endif case '!': // reset super::charEv('!'); keybrd.setMute( false ); // enable keybrd break; case '.': audio::wait( 30 ); // wait for contextual volume to "settle" default: return super::charEv( code ); } return true; }
/* check to see if RF input level is bigger than threshold */ static bool rssiJudgement(uint16 *pll, int16 freq) { resetCtrlWord(); setPowerSW(true); setSWSTD(RSSIJDG); setMute(true); *pll = freqencyToPllWord(freq); setPllWord(*pll); return inquireSTO(); }
void WebConferenceDialog::onEarlySessionStart(const AmSipReply& rep) { if ((None == state) || (InConferenceRinging == state)) { state = InConferenceEarly; DBG("########## dialout: connect early session to conference '%s' #########\n", dlg.user.c_str()); setAudioLocal(AM_AUDIO_IN, false); setAudioLocal(AM_AUDIO_OUT, false); connectConference(dlg.user); setMute(true); } }
bool RequestAction_Mute::executeAction() { auto id = getOptionValue("id"); auto scope = getOptionValue("scope"); auto value = Raumkernel::Tools::StringUtil::tolower(getOptionValue("value")); auto zoneScope = isZoneScope(scope); bool mute = (value == "true" || value == "1" || value.empty()) ? true : false; // we have got an id that might be a room or a zone. we have to get the scope to know what we should mute if (!id.empty()) { auto mediaRenderer = getVirtualMediaRenderer(id); if (!mediaRenderer) { logError("Room or Zone with ID: " + id + " not found!", CURRENT_FUNCTION); return false; } if (zoneScope) mediaRenderer->setMute(true, sync); else mediaRenderer->setRoomMute(getRoomUDNFromId(id), mute, sync); } // if we have no id provided, we mute all renderers else { auto zoneInfoMap = getManagerEngineer()->getZoneManager()->getZoneInformationMap(); for (auto it : zoneInfoMap) { auto rendererUDN = getManagerEngineer()->getZoneManager()->getRendererUDNForZoneUDN(it.first); auto mediaRenderer = getVirtualMediaRendererFromUDN(rendererUDN); if (mediaRenderer) mediaRenderer->setMute(mute, sync); } } return true; }
void TrayIcon::iconActivated(QSystemTrayIcon::ActivationReason reason) { switch (reason) { case QSystemTrayIcon::Trigger: case QSystemTrayIcon::DoubleClick: iconPosition_ = QCursor::pos(); geometery_ = legacyTrayIcon_->geometry(); emit activated(RESTORE); break; case QSystemTrayIcon::MiddleClick: setMute(!mute_->isChecked()); break; default: break; } }
Map::Map(unsigned int order, unsigned int numberOfSources) : Ambisonic(order) { assert(numberOfSources > 0); m_number_of_sources = numberOfSources; m_harmonics_float = new float[m_number_of_harmonics]; m_harmonics_double = new double[m_number_of_harmonics]; m_gains = new double[m_number_of_harmonics]; m_muted = new bool[numberOfSources]; m_first_source = 0; for(unsigned int i = 0; i < m_number_of_sources; i++) { setMute(i, 0); m_encoders.push_back(new Encoder(order)); m_widers.push_back(new Wider(order)); } }
void NotificationItem::restoreState(QSettings* settings) { //settings = Core::ICore::instance()->settings(); setSoundCollectionPath(Utils::PathUtils().InsertDataPath(settings->value(QLatin1String("SoundCollectionPath"), tr("")).toString())); setCurrentLanguage(settings->value(QLatin1String("CurrentLanguage"), tr("")).toString()); setDataObject(settings->value(QLatin1String("DataObject"), tr("")).toString()); setObjectField(settings->value(QLatin1String("ObjectField"), tr("")).toString()); setCondition(settings->value(QLatin1String("RangeLimit"), tr("")).toInt()); setSound1(settings->value(QLatin1String("Sound1"), tr("")).toString()); setSound2(settings->value(QLatin1String("Sound2"), tr("")).toString()); setSound3(settings->value(QLatin1String("Sound3"), tr("")).toString()); setSayOrder(settings->value(QLatin1String("SayOrder"), tr("")).toInt()); QVariant value = settings->value(QLatin1String("Value1"), tr("")); setSingleValue(value); setValueRange2(settings->value(QLatin1String("Value2"), tr("")).toDouble()); setRetryValue(settings->value(QLatin1String("Repeat"), tr("")).toInt()); setLifetime(settings->value(QLatin1String("ExpireTimeout"), tr("")).toInt()); setMute(settings->value(QLatin1String("Mute"), tr("")).toInt()); }
int setMuteInterface(LtpInterfaceType *ltpInterfaceP,int muteB) { if(ltpInterfaceP->ltpObjectP->sipOnB==false) //ltp interface { ltpInterfaceP->muteB = muteB; /*if(muteB) { return pauseAudio(ltpInterfaceP->recordP); } else { return PlayAudio(ltpInterfaceP->recordP); }*/ return 0; } setMute(ltpInterfaceP->ltpObjectP,muteB); return 0; }
void KNMusicStandardBackend::setVolume(int volumeSize) { //If we want to change the volume, check the mute state first. if(mute()) { //Un-mute the backend. setMute(false); } //Check the volume size. if(volumeSize<minimalVolume()) { volumeSize=minimalVolume(); } else if(volumeSize>maximumVolume()) { volumeSize=maximumVolume(); } //Sync the thread volume. synchronizeThreadVolume(volumeSize); }
void DSMCall::mute() { setMute(true); }
void KNMusicStandardBackend::changeMuteState() { setMute(!m_mute); }
bool MpvHandler::event(QEvent *event) { if(event->type() == QEvent::User) { while(mpv) { mpv_event *event = mpv_wait_event(mpv, 0); if(event == nullptr || event->event_id == MPV_EVENT_NONE) { break; } HandleErrorCode(event->error); switch (event->event_id) { case MPV_EVENT_PROPERTY_CHANGE: { mpv_event_property *prop = (mpv_event_property*)event->data; if(QString(prop->name) == "playback-time") // playback-time does the same thing as time-pos but works for streaming media { if(prop->format == MPV_FORMAT_DOUBLE) { setTime((int)*(double*)prop->data); lastTime = time; } } else if(QString(prop->name) == "volume") { if(prop->format == MPV_FORMAT_DOUBLE) setVolume((int)*(double*)prop->data); } else if(QString(prop->name) == "sid") { if(prop->format == MPV_FORMAT_INT64) setSid(*(int*)prop->data); } else if(QString(prop->name) == "aid") { if(prop->format == MPV_FORMAT_INT64) setAid(*(int*)prop->data); } else if(QString(prop->name) == "sub-visibility") { if(prop->format == MPV_FORMAT_FLAG) setSubtitleVisibility((bool)*(unsigned*)prop->data); } else if(QString(prop->name) == "mute") { if(prop->format == MPV_FORMAT_FLAG) setMute((bool)*(unsigned*)prop->data); } else if(QString(prop->name) == "core-idle") { if(prop->format == MPV_FORMAT_FLAG) { if((bool)*(unsigned*)prop->data && playState == Mpv::Playing) ShowText(tr("Buffering..."), 0); else ShowText(QString(), 0); } } else if(QString(prop->name) == "paused-for-cache") { if(prop->format == MPV_FORMAT_FLAG) { if((bool)*(unsigned*)prop->data && playState == Mpv::Playing) ShowText(tr("Your network is slow or stuck, please wait a bit"), 0); else ShowText(QString(), 0); } } break; } case MPV_EVENT_IDLE: fileInfo.length = 0; setTime(0); setPlayState(Mpv::Idle); break; // these two look like they're reversed but they aren't. the names are misleading. case MPV_EVENT_START_FILE: setPlayState(Mpv::Loaded); break; case MPV_EVENT_FILE_LOADED: setPlayState(Mpv::Started); LoadFileInfo(); SetProperties(); case MPV_EVENT_UNPAUSE: setPlayState(Mpv::Playing); break; case MPV_EVENT_PAUSE: setPlayState(Mpv::Paused); ShowText(QString(), 0); break; case MPV_EVENT_END_FILE: if(playState == Mpv::Loaded) ShowText(tr("File couldn't be opened")); setPlayState(Mpv::Stopped); break; case MPV_EVENT_SHUTDOWN: QCoreApplication::quit(); break; case MPV_EVENT_LOG_MESSAGE: { mpv_event_log_message *message = static_cast<mpv_event_log_message*>(event->data); if(message != nullptr) emit messageSignal(message->text); break; } default: // unhandled events break; } } return true; } return QObject::event(event); }
void UBGraphicsMediaItem::toggleMute() { mMuted = !mMuted; setMute(mMuted); }
int CAudio::unmute(void) { return setMute(0); }
int CAudio::mute(void) { return setMute(1); }
void DSMCall::unmute() { setMute(false); }
void AudioEngine::unmute(AudioDevice *device) { setMute(device, false); }
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IOReturn AppleTopazAudio::setCodecMute (bool muteState) { return setMute ( muteState, kDigitalAudioSelector ); // [3435307] }