KVTGIDZA::KVTGIDZA(const Char_t * name, Int_t npar, Int_t type, Int_t light) :KVTGIDZ(name, npar, type, light, -1) { // Create A identification with given "name", using the generalised // Tassan-Got functional KVTGIDFunctions::fede. // npar = total number of parameters // type = functional type (0: standard, 1:extended) // light = with (1) or without (0) CsI light-energy dependence fZorA=0; SetParameter(2,fZorA); SetParName(3,"Z"); }
int DasaRobot_MobilityUltrasonic::Initialize(Property parameter) { if(InitializeUART(parameter) != API_SUCCESS) { PrintMessage("ERROR : DasaRobot_MobilityUltrasonic::Initialize() -> Occur a error in InitializeUART().\n"); return API_ERROR; } if(SetParameter(parameter) != API_SUCCESS) { return API_ERROR; } return API_SUCCESS; }
int Robotis_DynamixelUART::Initialize(Property parameter) { if(InitializeUART(parameter) != API_SUCCESS) { PrintMessage("ERROR : Robotis_DynamixelUART::Initialize() -> Occur a error in InitializeUART().\n"); return API_ERROR; } if(SetParameter(parameter) != API_SUCCESS) { return API_ERROR; } return API_SUCCESS; }
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // TremoloUnit::TremoloUnit //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // The constructor for new TremoloUnit audio units TremoloUnit::TremoloUnit (AudioUnit component) : AUEffectBase (component) { // This method, defined in the AUBase superclass, ensures that the required audio unit // elements are created and initialized. CreateElements (); // Invokes the use of an STL vector for parameter access. // See AUBase/AUScopeElement.cpp Globals () -> UseIndexedParameters (kNumberOfParameters); // During instantiation, sets up the parameters according to their defaults. // The parameter defaults should correspond to the settings for the default preset. SetParameter ( kParameter_Frequency, kDefaultValue_Tremolo_Freq ); SetParameter ( kParameter_Depth, kDefaultValue_Tremolo_Depth ); SetParameter ( kParameter_Waveform, kDefaultValue_Tremolo_Waveform ); // Also during instantiation, sets the preset menu to indicate the default preset, // which corresponds to the default parameters. It's possible to set this so a // fresh audio unit indicates the wrong preset, so be careful to get it right. SetAFactoryPresetAsCurrent ( kPresets [kPreset_Default] ); #if AU_DEBUG_DISPATCHER mDebugDispatcher = new AUDebugDispatcher (this); #endif }
Talkbox::Talkbox(AudioComponentInstance component) : AUEffectBase(component, true) { // init internal parameters... for (AudioUnitParameterID i=0; i < NPARAM; i++) { AudioUnitParameterInfo paramInfo; OSStatus status = GetParameterInfo(kAudioUnitScope_Global, i, paramInfo); if (status == noErr) SetParameter(i, paramInfo.defaultValue); } dspKernels = NULL; numAllocatedChannels = 0; }
/////////////////////////////////////////////////////////////////////// // Function: SetLiteralParameter // // Author: $author$ // Date: 7/21/2011 /////////////////////////////////////////////////////////////////////// virtual eError SetLiteralParameter (const char* name, const char* value) { eError error = e_ERROR_FAILED; if ((name) && (value)) { m_paramName.assign(name); m_paramExpression.assign(m_literalQuote); m_paramExpression.append(value); m_paramExpression.append(m_literalQuote); error = SetParameter(m_paramName, m_paramExpression); } return error; }
//-------------------------------------------------------------------------------- Tracker::Tracker(AudioComponentInstance inComponentInstance) : AUEffectBase(inComponentInstance) { summedInputBuffer = NULL; // init internal parameters... for (AudioUnitParameterID i=0; i < kNumParams; i++) { AudioUnitParameterInfo paramInfo; OSStatus status = GetParameterInfo(kAudioUnitScope_Global, i, paramInfo); if (status == noErr) SetParameter(i, paramInfo.defaultValue); } }
int KITECH_SDM8::Initialize(Property parameter) { if(InitializeUART(parameter) != API_SUCCESS) { PrintMessage("ERROR : KITECH_SDM8::Initialize() -> Can't initialize a UART API\n"); return API_ERROR; } if(SetParameter(parameter) != API_SUCCESS) { PrintMessage("ERROR : KITECH_SDM8::Initialize() -> Can't setup a parameter\n"); return API_ERROR; } return API_SUCCESS; }
// 开始搜索电视频道 U32 tvcore_startSearchTV(STVMode iMode, TuningParam *pTuningParam,ISearchTVNotify *pNotify) { //停止解扰 tvplayer_stop_descramb(); // 获取运营商ids; int nId = 0; int nReplyLen = sizeof(int); int nRet = GetParameter(GET_OPERATOR_ID,&nId,&nReplyLen); if(-1 == nRet || -1 == nId){ tvplayer_set_module_state(1); return StartSearchTV(iMode,pTuningParam,pNotify); } std::vector<OperatorId> ids; U16 uRet = stbca_get_operator_ids(ids); if(0 != uRet || ids.size() <= 0){ LOGTRACE(LOGINFO,"GetOperatorIds failed or not exist.\n"); return uRet; } nId = (0 == nId) ? ids[0] :nId; bool bMatched = false; std::vector<OperatorId>::iterator iter = ids.begin(); for(;iter != ids.end();iter++){ if(nId == *iter){ bMatched = true; break; } } if(!bMatched){ LOGTRACE(LOGINFO,"not exist operator id.\n"); return -1; } std::vector<U32> acs; uRet = (U32)stbca_get_operator_acs(nId,acs); if(0 != uRet || acs.size() <= 0){ LOGTRACE(LOGINFO,"GetOperatorAcs failed or not exist.\n"); return uRet; } int nReqLen = sizeof(U32) * acs.size(); SetParameter(SET_OPERATOR_ACS,&acs,nReqLen); tvplayer_set_module_state(1); return StartSearchTV(iMode,pTuningParam,pNotify); }
OSStatus AKSampler_Plugin::RestoreState(CFPropertyListRef inData) { // Base-class method restores parameter values to wherever it stashes them AUBase::RestoreState(inData); // So, we have to ask the base GetParameter() method for each of the values, then call // our own SetParameter() to put them in this class's variables. for (int paramID = 0; paramID < kNumberOfParams; paramID++) { float value; AUBase::GetParameter(paramID, kAudioUnitScope_Global, 0, value); SetParameter(paramID, kAudioUnitScope_Global, 0, value, 0); } return noErr; }
DataShared::DataShared(string confPath){ loadConf(confPath,m_confMap); SetParameter(m_confMap); loadData(); /* m_pOut = new ofstream(m_outFile.c_str()); if (m_pOut==NULL){ #ifdef _ERROR cerr <<"Can not open" <<m_outFile <<endl; #endif return; } */ m_iter = m_freq1Item.begin(); m_taskCount = m_freq1Item.size(); }
// モデルファイルからネットワークを構築 // processでcudnnが指定されなかった場合はcuDNNが呼び出されないように変更する Waifu2x::eWaifu2xError cNet::ConstractNet(const Waifu2x::eWaifu2xModelType mode, const boost::filesystem::path &model_path, const boost::filesystem::path ¶m_path, const stInfo &info, const std::string &process) { Waifu2x::eWaifu2xError ret; mMode = mode; LoadParamFromInfo(mode, info); boost::filesystem::path modelbin_path = model_path; modelbin_path += ".protobin"; boost::filesystem::path caffemodel_path = param_path; caffemodel_path += ".caffemodel"; caffe::NetParameter param_model; caffe::NetParameter param_caffemodel; const auto retModelBin = readProtoBinary(modelbin_path, ¶m_model); const auto retParamBin = readProtoBinary(caffemodel_path, ¶m_caffemodel); if (retModelBin == Waifu2x::eWaifu2xError_OK && retParamBin == Waifu2x::eWaifu2xError_OK) { ret = SetParameter(param_model, process); if (ret != Waifu2x::eWaifu2xError_OK) return ret; if (!caffe::UpgradeNetAsNeeded(caffemodel_path.string(), ¶m_caffemodel)) return Waifu2x::eWaifu2xError_FailedParseModelFile; mNet = boost::shared_ptr<caffe::Net<float>>(new caffe::Net<float>(param_model)); mNet->CopyTrainedLayersFrom(param_caffemodel); } else { const auto ret = LoadParameterFromJson(model_path, param_path, modelbin_path, caffemodel_path, process); if (ret != Waifu2x::eWaifu2xError_OK) return ret; } const auto &inputs = mNet->input_blobs(); if (inputs.empty()) return Waifu2x::eWaifu2xError_FailedConstructModel; if (mInputPlane != inputs[0]->channels()) return Waifu2x::eWaifu2xError_FailedConstructModel; return Waifu2x::eWaifu2xError_OK; }
int SerialWindows::OpenSerial(void) { if(handle != INVALID_HANDLE_VALUE) { return 0; } handle = CreateFile((char *)portName.c_str(), GENERIC_READ | GENERIC_WRITE, 0, 0, OPEN_EXISTING, 0, 0); if(handle == INVALID_HANDLE_VALUE){ return -1; } if(SetParameter(timeOut, baudRate, dataBits, stopBits, parity, flowControl) < 0) { CloseSerial(); return -1; } return 0; }
OSStatus FullBacano::NewFactoryPresetSet ( // 1 const AUPreset &inNewFactoryPreset ) { SInt32 chosenPreset = inNewFactoryPreset.presetNumber; // 2 if ( // 3 chosenPreset == kPreset_Mera || chosenPreset == kPreset_NoTanBacano ) { for (int i = 0; i < kNumberPresets; ++i) { // 4 if (chosenPreset == kPresets[i].presetNumber) { switch (chosenPreset) { // 5 case kPreset_Mera: // 6 SetParameter ( // 7 kParameter_Frequency, kParameter_Preset_Frequency_Mera ); SetParameter ( // 8 kParameter_Bacaneria, kParameter_Preset_Bacaneria_Mera ); SetParameter ( // 9 kParameter_LaMondaEnElVolco, kParameter_Preset_LaMondaEnElVolco_Mera ); break; case kPreset_NoTanBacano: // 10 SetParameter ( kParameter_Frequency, kParameter_Preset_Frequency_NoTanBacano ); SetParameter ( kParameter_Bacaneria, kParameter_Preset_Bacaneria_NoTanBacano ); SetParameter ( kParameter_LaMondaEnElVolco, kParameter_Preset_LaMondaEnElVolco_NoTanBacano ); break; } SetAFactoryPresetAsCurrent ( // 11 kPresets [i] ); return noErr; // 12 } } } return kAudioUnitErr_InvalidProperty; // 13 }
int YujinRobot_iRobiQBumper::Initialize(Property parameter) { if(SetParameter(parameter) == API_ERROR) { return API_ERROR; } if(bumper != NULL) { delete bumper; } bumper = new iRobiQBumper(); if(bumper->iRobiQ_Initialize() == false) { Finalize(); return API_ERROR; } return API_SUCCESS; }
ESPEAK_NG_API espeak_ng_STATUS espeak_ng_Cancel(void) { #ifdef USE_ASYNC fifo_stop(); event_clear_all(); #endif #ifdef HAVE_PCAUDIOLIB_AUDIO_H if ((my_mode & ENOUTPUT_MODE_SPEAK_AUDIO) == ENOUTPUT_MODE_SPEAK_AUDIO) audio_object_flush(my_audio); #endif embedded_value[EMBED_T] = 0; // reset echo for pronunciation announcements for (int i = 0; i < N_SPEECH_PARAM; i++) SetParameter(i, saved_parameters[i], 0); return ENS_OK; }
void ShowSegmentationAsSurface::Initialize(const NonBlockingAlgorithm *other) { Superclass::Initialize(other); bool syncVisibility(false); if (other) { other->GetParameter("Sync visibility", syncVisibility); } SetParameter("Sync visibility", syncVisibility); SetParameter("Median kernel size", 3u); SetParameter("Apply median", true); SetParameter("Smooth", true); SetParameter("Gaussian SD", 1.5f); SetParameter("Decimate mesh", true); SetParameter("Decimation rate", 0.8f); SetParameter("Wireframe", false); }
void SegmentationSink::Initialize(const NonBlockingAlgorithm* other) { Superclass::Initialize(other); // sinks should be called explicitly from the tool, because otherwise the order of setting "Input" and "Group node" would matter UnDefineTriggerParameter("Input"); // some basedata output DataNode::Pointer groupNode; bool showResult(true); if (other) { other->GetPointerParameter("Group node", groupNode); other->GetParameter("Show result", showResult); } SetPointerParameter("Group node", groupNode ); SetParameter("Show result", showResult ); }
ESPEAK_API espeak_ERROR espeak_Cancel(void) {//=============================== #ifdef USE_ASYNC ENTER("espeak_Cancel"); fifo_stop(); event_clear_all(); if(my_mode == AUDIO_OUTPUT_PLAYBACK) { wave_close(my_audio); } SHOW_TIME("espeak_Cancel > LEAVE"); #endif embedded_value[EMBED_T] = 0; // reset echo for pronunciation announcements for (int i=0; i < N_SPEECH_PARAM; i++) SetParameter(i, saved_parameters[i], 0); return EE_OK; } // end of espeak_Cancel
// initialize the content (attachment) by reading from a file bool MimeBody::ReadFromFile(const String &pszFilename) { File oFile; if (!oFile.Open(pszFilename, File::OTReadOnly)) { if (IniFileSettings::Instance()->GetLogLevel() > 99) LOG_DEBUG("MimeBody::ReadFromFile - Error opening file RO"); return false; } shared_ptr<ByteBuffer> pUnencodedBuffer = oFile.ReadFile(); if (!pUnencodedBuffer) { if (IniFileSettings::Instance()->GetLogLevel() > 99) LOG_DEBUG("MimeBody::ReadFromFile - pUnencodedBuffer empty"); return false; } // Encode the file, to base64 or likewise. MimeCodeBase* pCoder = MimeEnvironment::CreateCoder(GetTransferEncoding()); ASSERT(pCoder != NULL); pCoder->SetInput((const char*) pUnencodedBuffer->GetCharBuffer(), pUnencodedBuffer->GetSize(), true); // Copy the buffer pCoder->GetOutput(m_pbText); AnsiString sCharset = "utf-8"; // Set params to this String sFileName = FileUtilities::GetFileNameFromFullPath(pszFilename); AnsiString sEncodedValue = MIMEUnicodeEncoder::EncodeValue(sCharset, sFileName); SetName(sEncodedValue); // set 'name' parameter: // Create an content-disposition header as well. SetRawFieldValue(CMimeConst::ContentDisposition(), CMimeConst::Inline(), ""); SetParameter(CMimeConst::ContentDisposition(), CMimeConst::Filename(), sEncodedValue); if (IniFileSettings::Instance()->GetLogLevel() > 99) LOG_DEBUG("MimeBody::ReadFromFile - Attachment encoded successfully"); return true; }
//近地点加速 bool EventSpeedUp(const CEGUI::EventArgs& e) { /*if(SatelliteStatus::current_status != SatelliteStatus::NEAR_TRACK){ //TODO: Anouce BUG return true; } SatelliteStatus::current_status = SatelliteStatus::ECLLIPSE;*/ if(SatelliteStatus::current_status != SatelliteStatus::UNLAUNCHED){ return false; } //Read the Data and set it double near_ = 468.55; double far_ = 800.00; CEGUI::WindowManager* winMgr = CEGUI::WindowManager::getSingletonPtr(); CEGUI::String& strnear = const_cast<CEGUI::String&>(winMgr->getWindow("edit_near")->getText() ); CEGUI::String& strfar = const_cast<CEGUI::String&>(winMgr->getWindow("edit_far")->getText() ); near_ = atof(strnear.c_str() ); far_ = atof(strfar.c_str() ); SetParameter(near_,far_); return true; }
// モデルファイルからネットワークを構築 // processでcudnnが指定されなかった場合はcuDNNが呼び出されないように変更する Waifu2x::eWaifu2xError Waifu2x::ConstractNet(boost::shared_ptr<caffe::Net<float>> &net, const boost::filesystem::path &model_path, const boost::filesystem::path ¶m_path, const std::string &process) { boost::filesystem::path modelbin_path = model_path; modelbin_path += ".protobin"; boost::filesystem::path caffemodel_path = param_path; caffemodel_path += ".caffemodel"; caffe::NetParameter param_model; caffe::NetParameter param_caffemodel; const auto retModelBin = readProtoBinary(modelbin_path, ¶m_model); const auto retParamBin = readProtoBinary(caffemodel_path, ¶m_caffemodel); if (retModelBin == eWaifu2xError_OK && retParamBin == eWaifu2xError_OK) { Waifu2x::eWaifu2xError ret; ret = SetParameter(param_model, process); if (ret != eWaifu2xError_OK) return ret; if (!caffe::UpgradeNetAsNeeded(caffemodel_path.string(), ¶m_caffemodel)) return Waifu2x::eWaifu2xError_FailedParseModelFile; net = boost::shared_ptr<caffe::Net<float>>(new caffe::Net<float>(param_model)); net->CopyTrainedLayersFrom(param_caffemodel); input_plane = param_model.input_dim(1); } else { const auto ret = LoadParameterFromJson(net, model_path, param_path, modelbin_path, caffemodel_path, process); if (ret != eWaifu2xError_OK) return ret; } return eWaifu2xError_OK; }
void ChebyshevPol() { auto legend = new TLegend(0.88,0.4,1.,1.); int colors[] = { kRed, kRed+3, kMagenta, kMagenta+3, kBlue, kBlue+3, kCyan+3, kGreen, kGreen+3, kYellow, kOrange }; for (int degree=0; degree <=10; ++degree) { auto f1 = new TF1("f1",TString::Format("cheb%d",degree),-1,1); // all parameters are zero apart from the one corresponding to the degree f1->SetParameter(degree,1); f1->SetLineColor( colors[degree]); f1->SetMinimum(-1.2); f1->SetMaximum(1.2); TString opt = (degree == 0) ? "" : "same"; //f1->Print("V"); f1->SetNpx(1000); f1->SetTitle("Chebyshev Polynomial"); f1->Draw(opt); legend->AddEntry(f1,TString::Format("N=%d",degree),"L"); } legend->Draw(); }
void ReverbChannel::SetSamplerate(int samplerate) { this->samplerate = samplerate; highPass.SetSamplerate(samplerate); lowPass.SetSamplerate(samplerate); for (size_t i = 0; i < lines.size(); i++) { lines[i]->SetSamplerate(samplerate); } auto update = [&](Parameter p) { SetParameter(p, parameters[p]); }; update(Parameter::PreDelay); update(Parameter::TapLength); update(Parameter::DiffusionDelay); update(Parameter::LineDelay); update(Parameter::LateDiffusionDelay); update(Parameter::EarlyDiffusionModRate); update(Parameter::LineModRate); update(Parameter::LateDiffusionModRate); update(Parameter::LineModAmount); UpdateLines(); }
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // karoke::karoke //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ karoke::karoke(AudioUnit component) : AUEffectBase(component, false) { CreateElements(); CAStreamBasicDescription streamDescIn; streamDescIn.SetCanonical(NUM_INPUTS, false); // number of input channels streamDescIn.mSampleRate = GetSampleRate(); CAStreamBasicDescription streamDescOut; streamDescOut.SetCanonical(NUM_OUTPUTS, false); // number of output channels streamDescOut.mSampleRate = GetSampleRate(); Inputs().GetIOElement(0)->SetStreamFormat(streamDescIn); Outputs().GetIOElement(0)->SetStreamFormat(streamDescOut); Globals()->UseIndexedParameters(kNumberOfParameters); SetParameter(kParam_One, kDefaultValue_ParamOne ); #if AU_DEBUG_DISPATCHER mDebugDispatcher = new AUDebugDispatcher (this); #endif itsBassFilter = new FirFilter(200); itsBassFilter->setCoeffecients(lp_200, 200); }
//_____________________________________________________________________________ // const UInt8 * AUElement::RestoreState(const UInt8 *state) { union FloatInt32 { UInt32 i; AudioUnitParameterValue f; }; const UInt8 *p = state; UInt32 nparams = CFSwapInt32BigToHost(*(UInt32 *)p); p += sizeof(UInt32); for (UInt32 i = 0; i < nparams; ++i) { struct { AudioUnitParameterID paramID; AudioUnitParameterValue value; } entry; entry.paramID = CFSwapInt32BigToHost(*(UInt32 *)p); p += sizeof(UInt32); FloatInt32 temp; temp.i = CFSwapInt32BigToHost(*(UInt32 *)p); entry.value = temp.f; p += sizeof(AudioUnitParameterValue); SetParameter(entry.paramID, entry.value); } return p; }
void OnOptions2(int event_id) {//========================== wxString string; int value; switch(event_id) { case MENU_OPT_SPEED: value = wxGetNumberFromUser(_T(""),_T(""),_T("Speed"),option_speed,80,500); if(value > 0) { option_speed = value; SetParameter(espeakRATE,option_speed,0); SetSpeed(3); } break; case MENU_OPT_PUNCT: transldlg->t_source->SetValue(_T("<tts:style field=\"punctuation\" mode=\"all\">\n")); transldlg->t_source->SetInsertionPointEnd(); notebook->SetSelection(1); break; case MENU_OPT_SPELL: transldlg->t_source->SetValue(_T("<say-as interpret-as=\"tts:char\">\n")); transldlg->t_source->SetInsertionPointEnd(); notebook->SetSelection(1); break; case MENU_PATH0: string = wxFileSelector(_T("Master phonemes file"),wxFileName(path_phfile).GetPath(), _T("phonemes"),_T(""),_T("*"),wxOPEN); if(!string.IsEmpty()) { path_phfile = string; } break; case MENU_PATH1: string = wxDirSelector(_T("Phoneme source directory"),path_phsource); if(!string.IsEmpty()) { path_phsource = string; } break; case MENU_PATH2: string = wxDirSelector(_T("Dictionary source directory"),path_dictsource); if(!string.IsEmpty()) { path_dictsource = string; } break; case MENU_PATH3: string = wxFileSelector(_T("Sound output file"),wxFileName(path_speech).GetPath(), _T(""),_T("WAV"),_T("*"),wxSAVE); if(!string.IsEmpty()) { path_speech = string; } break; case MENU_PATH4: string = wxFileSelector(_T("Voice file to modify formant peaks"),wxFileName(path_speech).GetPath(), _T(""),_T(""),_T("*"),wxOPEN); if(!string.IsEmpty()) { path_modifiervoice = string; } break; } ConfigSetPaths(); }
MyFrame::MyFrame(wxWindow *parent, const wxWindowID id, const wxString& title, const wxPoint& pos, const wxSize& size, const long style): wxFrame(parent, id, title, pos, size, style) {//=================================================================================================================== // Main Frame constructor int error_flag = 0; int result; int param; int srate; notebook = new wxNotebook(this, ID_NOTEBOOK, wxDefaultPosition, wxSize(312,760)); // notebook->AddPage(voicedlg,_T("Voice"),FALSE); formantdlg = new FormantDlg(notebook); notebook->AddPage(formantdlg,_T(" Spect"),FALSE); voicedlg = new VoiceDlg(notebook); transldlg = new TranslDlg(notebook); notebook->AddPage(transldlg,_T("Text"),TRUE); screenpages = new wxNotebook(this, ID_SCREENPAGES, wxDefaultPosition, wxSize(554,702)); wxBoxSizer *framesizer = new wxBoxSizer( wxHORIZONTAL ); framesizer->Add( notebook, 0, // make horizontally stretchable wxEXPAND | // make vertically stretchable wxALL, // and make border all around 4 ); // set border width framesizer->Add( screenpages, 1, // make horizontally stretchable wxEXPAND | // make vertically stretchable wxALL, // and make border all around 4 ); // set border width SetSizer( framesizer ); // use the sizer for layout framesizer->SetSizeHints( this ); // set size hints to honour minimum size SetSize(pos.x, pos.y, size.GetWidth(), size.GetHeight()); LoadConfig(); if((result = LoadPhData(&srate)) != 1) { if(result == -1) wxLogError(_T("Failed to read espeak-data/phontab,phondata,phonindex\nPath = ")+wxString(path_home,wxConvLocal)+_T("\n\nThe 'eSpeak' package needs to be installed")); else wxLogError(_T("Wrong version of espeak-data at:\n")+ wxString(path_home,wxConvLocal)+_T("\nVersion 0x%x (expects 0x%x)"),result,version_phdata); error_flag = 1; srate = 22050; } WavegenInit(srate,0); WavegenInitSound(); f_trans = stdout; option_ssml = 1; option_phoneme_input = 1; // if(LoadVoice(voice_name,0) == NULL) if(SetVoiceByName(voice_name2) != EE_OK) { if(error_flag==0) wxLogError(_T("Failed to load voice data")); strcpy(dictionary_name,"en"); } WavegenSetVoice(voice); for(param=0; param<N_SPEECH_PARAM; param++) param_stack[0].parameter[param] = param_defaults[param]; SetParameter(espeakRATE,option_speed,0); SetSpeed(3); SynthesizeInit(); InitSpectrumDisplay(); InitProsodyDisplay(); // InitWaveDisplay(); espeak_ListVoices(NULL); m_timer.SetOwner(this,1); m_timer.Start(500); /* 0.5 timer */ } // end of MyFrame::MyFrame
//! SetFactory is for expert users only. To change configuration of the preconditioner, use a factory manager. virtual void SetFactory(const std::string & varName, const RCP<const FactoryBase> & factory) { RCP<const FactoryBase> f = factory; SetParameter(varName, ParameterEntry(f)); // parameter validation done in ParameterListAcceptorImpl }
void SaPFactory<Scalar, LocalOrdinal, GlobalOrdinal, Node>::SetDampingFactor(Scalar dampingFactor) { SetParameter("sa: damping factor", ParameterEntry(dampingFactor)); // revalidate }