void processAudio(AudioBuffer &buffer){ setCoeffs(getLpFreq(), 0.8f); float delayTime = getParameterValue(PARAMETER_A); // get delay time value float feedback = getParameterValue(PARAMETER_B); // get feedback value float wetDry = getParameterValue(PARAMETER_D); // get gain value float delaySamples = delayTime * (DELAY_BUFFER_LENGTH-1); int size = buffer.getSize(); for (int ch = 0; ch<buffer.getChannels(); ++ch) { float* buf = buffer.getSamples(ch); process(size, buf, outBuf); // low pass filter for delay buffer for(int i = 0; i < size; i++){ outBuf[i] = outBuf[i] + feedback * delayBuffer.read(delaySamples); buf[i] = (1.f - wetDry) * buf[i] + wetDry * outBuf[i]; //crossfade for wet/dry balance delayBuffer.write(buf[i]); } } }
void processAudio(AudioBuffer &buffer){ FloatArray fa=buffer.getSamples(0); float mix=getParameterValue(PARAMETER_A); zcc.setHighPassCutoff(getParameterValue(PARAMETER_B)*15000+150); zcc.setLowPassCutoff(getParameterValue(PARAMETER_C)*500+50); zcc.process(fa); float frequency=zcc.getFrequency(); float envelope=fa.getRms(); fa.multiply(1-mix); for(int n=0;n<fa.getSize(); n++){ static float phase=0; static float pastEnvelope=0; phase += 2.0 * M_PI * frequency/getSampleRate(); if(phase > 2.0 * M_PI) phase -= 2.0 * M_PI; if(phase > 4.0*M_PI) phase=0; envelope=0.1*envelope + pastEnvelope*0.9; pastEnvelope=envelope; fa[n]+=sin(phase)*mix*envelope; } fa.multiply(getParameterValue(PARAMETER_D)*10); fa.copyTo(buffer.getSamples(1)); debugMessage("frequency/envelope: ", frequency, envelope); // float *coeffs=zcc.getFilter()->getFilterStage(0).getCoefficients(); // debugMessage("coeffs: ", coeffs[3], coeffs[4], coeffs[2] ); }
void processAudio(AudioBuffer &buffer) { float delayTime, feedback, wetDry; delayTime = getParameterValue(PARAMETER_A); feedback = getParameterValue(PARAMETER_B); wetDry = getParameterValue(PARAMETER_D); int size = buffer.getSize(); int32_t newDelay; if(abs(time - delayTime) > 0.01){ newDelay = delayTime * (delayBuffer.getSize()-1); time = delayTime; }else{ newDelay = delay; } float* x = buffer.getSamples(0); float y; for (int n = 0; n < size; n++){ // y = buf[i] + feedback * delayBuffer.read(delay); // buf[i] = wetDry * y + (1.f - wetDry) * buf[i]; // delayBuffer.write(buf[i]); if(newDelay - delay > 4){ y = getDelayAverage(delay-5, 5); delay -= 5; }else if(delay - newDelay > 4){ y = getDelayAverage(delay+5, 5); delay += 5; }else{ y = delayBuffer.read(delay); } x[n] = wetDry * y + (1.f - wetDry) * x[n]; // crossfade for wet/dry balance delayBuffer.write(feedback * x[n]); } }
void processAudio(AudioBuffer &buffer) { float paramA = getParameterValue(PARAMETER_A); float paramB = getParameterValue(PARAMETER_B); float paramC = getParameterValue(PARAMETER_C); float paramD = getParameterValue(PARAMETER_D); // Note: The 0.0 parameter is the timestamp at which to execute the message, // but in this case it simply means to execute it immediately. "f" says that // the message contains one element and its type is float. paramA is then the // value. hv_vscheduleMessageForReceiver(context, "Channel-A", 0.0, "f", paramA); hv_vscheduleMessageForReceiver(context, "Channel-B", 0.0, "f", paramB); hv_vscheduleMessageForReceiver(context, "Channel-C", 0.0, "f", paramC); hv_vscheduleMessageForReceiver(context, "Channel-D", 0.0, "f", paramD); // int nbSples = buffer.getSize()*buffer.getChannels(); // int nbSples = buffer.getSize()*HEAVY_CHANNELS; // float* inputCopy = (float*)malloc(nbSples*sizeof(float)); // memcpy(inputCopy, buffer.getSamples(0), nbSples*sizeof(float)); // float** inputs = { &inputCopy, &inputCopy+getBlockSize()}; float* outputs[] = {buffer.getSamples(0), buffer.getSamples(1) }; hv_owl_process(context, outputs, outputs, getBlockSize()); }
bool parametersChanged() { return getParameterValue(PARAMETER_A) != knobs[PARAMETER_A] || getParameterValue(PARAMETER_B) != knobs[PARAMETER_B] || getParameterValue(PARAMETER_C) != knobs[PARAMETER_C] || getParameterValue(PARAMETER_D) != knobs[PARAMETER_D] || getParameterValue(PARAMETER_E) != knobs[PARAMETER_E]; }
void processAudio(AudioBuffer &buffer) { int size = buffer.getSize(); float y; rate = Rate(getParameterValue(PARAMETER_A)); depth = getParameterValue(PARAMETER_B); feedback = getParameterValue(PARAMETER_C); //calculate and update phaser sweep lfo... float d = _dmin + (_dmax-_dmin) * ((sin( _lfoPhase ) + 1.f)/2.f); _lfoPhase += rate; if( _lfoPhase >= M_PI * 2.f ) _lfoPhase -= M_PI * 2.f; //update filter coeffs for( int i=0; i<6; i++ ) _alps[i].Delay( d ); // for (int ch = 0; ch<buffer.getChannels(); ++ch) { float* buf = buffer.getSamples(0); for (int i = 0; i < size; i++) { //calculate output y = _alps[0].Update(_alps[1].Update(_alps[2].Update(_alps[3].Update(_alps[4].Update( _alps[5].Update( buf[i] + _zm1 * feedback )))))); _zm1 = y; buf[i] = buf[i] + y * depth; // } } }
void ReceiverThread::transmitGroup() { uint8_t packet[18]; int16_t value, current, voltage; int8_t checksum=0, i; value = getParameterValue(parameter); voltage = getParameterValue(TOTAL_SPANNUNG); current = getParameterValue(IST_STROM); packet[ 0] = FRAME; packet[ 1] = address; packet[ 2] = TRM_DATA; packet[ 3] = 0; packet[ 4] = 0; packet[ 5] = current >> 8; packet[ 6] = current & 0xff; packet[ 7] = voltage >> 8; packet[ 8] = voltage & 0xff; packet[ 9] = value >> 8; packet[10] = value & 0xff; for(i=2; i<=10; i++) checksum ^= packet[i]; packet[11] = checksum; length = frame_stuffing(packet, 12); uart_write( packet, length); logTransmit( packet, length); }
void processAudio(AudioBuffer& buf){ float minf = getParameterValue(PARAMETER_A)*0.1 + 0.001; float maxf = min(0.4, minf + getParameterValue(PARAMETER_B)*0.2); // range should be exponentially related to minf // int tones = getParameterValue(PARAMETER_C)*(TONES-1) + 1; int tones = 12; float spread = getParameterValue(PARAMETER_C) + 1.0; float rate = 1.0 + (getParameterValue(PARAMETER_D) - 0.5)*0.00002; int size = buf.getSize(); FloatArray out = buf.getSamples(LEFT_CHANNEL); float amp; for(int t=1; t<tones; ++t) inc[t] = inc[t-1]*spread; for(int i=0; i<size; ++i){ for(int t=0; t<tones; ++t){ amp = getAmplitude((inc[t]-minf)/(maxf-minf)); out[i] += amp * getWave(acc[t]); acc[t] += inc[t]; if(acc[t] > 1.0) acc[t] -= 1.0; else if(acc[t] < 0.0) acc[t] += 1.0; inc[t] *= rate; } } if(inc[0] > maxf) inc[0] = minf; // while(inc[0] > minf) // inc[0] *= 0.5; else if(inc[0] < minf) inc[0] = maxf; // while(inc[0] < maxf) // inc[0] *= 2.0; }
void processAudio(AudioBuffer &buffer){ float y[getBlockSize()]; setCoeffs(getLpFreq(), 0.8f); float delayTime = getParameterValue(PARAMETER_A); // get delay time value float feedback = getParameterValue(PARAMETER_B); // get feedback value float wetDry = getParameterValue(PARAMETER_D); // get gain value if(abs(time - delayTime) < 0.01) delayTime = time; else time = delayTime; float delaySamples = delayTime * (delayBuffer.getSize()-1); int size = buffer.getSize(); float* x = buffer.getSamples(0); process(size, x, y); // low pass filter for delay buffer for(int n = 0; n < size; n++){ //linear interpolation for delayBuffer index dSamples = olddelaySamples + (delaySamples - olddelaySamples) * n / size; y[n] = y[n] + feedback * delayBuffer.read(dSamples); x[n] = (1.f - wetDry) * x[n] + wetDry * y[n]; //crossfade for wet/dry balance delayBuffer.write(x[n]); } olddelaySamples = delaySamples; }
// virtual void CScanWidgetRandom::load(const CCopasiParameterGroup * pItem) { if (pItem == NULL) return; *mpData = *pItem; if (mpData->getValue< unsigned C_INT32 >("Type") != CScanProblem::SCAN_RANDOM) return; const std::string String = mpData->getValue< std::string >("Object"); if (String == "") mpObject = NULL; else { CDataModel* pDataModel = ListViews::dataModel(this); assert(pDataModel != NULL); mpObject = CObjectInterface::DataObject(pDataModel->getObjectFromCN(String)); } if (mpObject) lineEditObject->setText(FROM_UTF8(mpObject->getObjectDisplayName())); else lineEditObject->setText(""); comboBoxType->setCurrentIndex(mpData->getValue< unsigned C_INT32 >("Distribution type")); changeType(); lineEditMin->setText(getParameterValue(mpData, "Minimum").toString()); lineEditMax->setText(getParameterValue(mpData, "Maximum").toString()); checkBoxLog->setChecked(mpData->getValue< bool >("log")); return; }
void processAudio(AudioInputBuffer &input, AudioOutputBuffer &output){ int totalBits = 8; float bitReduce = floor((getParameterValue(PARAMETER_A) * (totalBits -1)) + .5); int bitStutter = (int)floor((getParameterValue(PARAMETER_B) * 50) + .5); int gain = (int)ceil((getParameterValue(PARAMETER_C) * 25) + .5); int crushedMax = pow(2, totalBits - bitReduce) - 1; int loopCount = bitStutter; float currentSample = 0; float* in = input.getSamples(); float* out = output.getSamples(); int size = input.getSize(); for(int i = 0; i < size; i++) { if(--loopCount <= 0) { float x = in[i]; x = (x + 1.0) * crushedMax; x = x > 0 ? floor(x + 0.5) : ceil(x - 0.5); x = (x / crushedMax) - 1.0; x = x * gain; currentSample = x; loopCount = bitStutter; } out[i] = currentSample; } }
void Globals::cacheParameters() { cout << "ExtraActivationUpdates" << endl; extraActivationUpdates = int(getParameterValue("ExtraActivationUpdates")); if(getParameterValue("SignedActivation")>0.5) { cout << "SignedActivation" << endl; signedActivation = true; } else { signedActivation = false; } if(hasParameterValue("UseTanhSigmoid") && getParameterValue("UseTanhSigmoid")>0.5) { cout << "UseTanhSigmoid" << endl; useTanhSigmoid = true; } else { useTanhSigmoid = false; } }
void processAudio(AudioBuffer &buffer){ if(isButtonPressed(PUSHBUTTON)) reset(); dt = getParameterValue(PARAMETER_A)*getParameterValue(PARAMETER_A)*0.0250; float rotateX = getParameterValue(PARAMETER_B)*M_PI; float rotateY = getParameterValue(PARAMETER_C)*M_PI; float gainL, gainR; gainL = gainR = getParameterValue(PARAMETER_D)*2/25.0; int size = buffer.getSize(); float* left = buffer.getSamples(0); float* right = buffer.getSamples(1); float dx, dy, dz; updateMatrix(rotateX, rotateY); for(int i=0;i<size;i++){ dx = a*(y - x); dy = (x * (c - z) - y); dz = (x*y - b * z); x += dx*dt; y += dy*dt; z += dz*dt; P[0] = x; P[1] = y; P[2] = z; rotateP(); left[i] = Pprime[0] * gainL; right[i] = Pprime[1] * gainR; } // debugMessage("x/y/z", (float)x, (float)y, (float)z); }
bool initialize(StateP state) { voidP lBound = state->getGenotypes()[0]->getParameterValue(state, "lbound"); lbound = *((double*) lBound.get()); voidP uBound = state->getGenotypes()[0]->getParameterValue(state, "ubound"); ubound = *((double*) uBound.get()); voidP dimension_ = state->getGenotypes()[0]->getParameterValue(state, "dimension"); dimension = *((uint*) dimension_.get()); voidP dup_ = getParameterValue(state, "dup"); dup = *((uint*) dup_.get()); if( *((int*) dup_.get()) <= 0 ) { ECF_LOG(state, 1, "Error: opt-IA requires parameter 'dup' to be an integer greater than 0"); throw "";} voidP c_ = getParameterValue(state, "c"); c = *((double*) c_.get()); if( c <= 0 ) { ECF_LOG(state, 1, "Error: opt-IA requires parameter 'c' to be a double greater than 0"); throw "";} voidP tauB_ = getParameterValue(state, "tauB"); tauB = *((double*) tauB_.get()); if( tauB < 0 ) { ECF_LOG(state, 1, "Error: opt-IA requires parameter 'tauB' to be a nonnegative double value"); throw "";} voidP elitism_ = getParameterValue(state, "elitism"); elitism = *((string*) elitism_.get()); if( elitism != "true" && elitism != "false" ) { ECF_LOG(state, 1, "Error: opt-IA requires parameter 'elitism' to be either 'true' or 'false'"); throw "";} // algorithm accepts a single FloatingPoint Genotype FloatingPointP flp (new FloatingPoint::FloatingPoint); if(state->getGenotypes()[0]->getName() != flp->getName()) { ECF_LOG_ERROR(state, "Error: opt-IA algorithm accepts only a FloatingPoint genotype!"); throw ("");} // algorithm adds another FloatingPoint genotype (age) FloatingPointP flpoint[2]; for(uint iGen = 1; iGen < 2; iGen++) { flpoint[iGen] = (FloatingPointP) new FloatingPoint::FloatingPoint; state->setGenotype(flpoint[iGen]); flpoint[iGen]->setParameterValue(state, "dimension", (voidP) new uint(1)); // initial value of age parameter should be (or as close as possible to) 0 flpoint[iGen]->setParameterValue(state, "lbound", (voidP) new double(0)); flpoint[iGen]->setParameterValue(state, "ubound", (voidP) new double(0.01)); } ECF_LOG(state, 1, "opt-IA algorithm: added 1 FloatingPoint genotype (antibody age)"); return true; }
void processAudio(AudioBuffer &buffer){ float cutoff=getParameterValue(PARAMETER_A); float resonance=10*getParameterValue(PARAMETER_B); FloatArray fa=buffer.getSamples(0); // fa.noise(); filter->setLowPass(cutoff, resonance); filter->process(fa, fa, fa.getSize()); buffer.getSamples(1).copyFrom(fa); }
void processAudio(AudioBuffer &buffer) { float* x = buffer.getSamples(0); float feedback = getParameterValue(PARAMETER_A); float mix = getParameterValue(PARAMETER_B); for(int n = 0; n < buffer.getSize(); n++){ x[n] = delayBuffer.tail()*mix + x[n]*(1.0f-mix); delayBuffer.write(feedback * x[n]); } }
void processAudio(AudioBuffer &buffer) { int size = buffer.getSize(); float samp_float = getParameterValue(PARAMETER_A); int samp_freq = ceil(samp_float*63+0.1); float mayhem_rate = getParameterValue(PARAMETER_B); mayhem_rate *= 0.03; float mayhem = 1; if(abs(getParameterValue(PARAMETER_C)*2+1-prev_freq)>0.01) //if the knob was turned { mayhem_freq = getParameterValue(PARAMETER_C); //update center frequency mayhem_freq *= 2; mayhem_freq += 1; //mayhem_freq range = 1 to 3 --> 375 -- 1125 Hz prev_freq = mayhem_freq; //store value to compare next time } float mayhem_depth = getParameterValue(PARAMETER_D); mayhem_depth *= depth; //for(int ch=0; ch<buffer.getChannels(); ++ch){ float* buf = buffer.getSamples(0); for(int i=0; i<size; ++i) { if(i%samp_freq==0) { buf[i] = buf[i]*((1-mayhem)+mayhem*abs(cos(2*M_PI*mayhem_freq*(i+update_freq_cnt*size)/size))); samp = buf[i]; } else buf[i] = samp; // buf[i] = samp*(1-mayhem)+buf[i]*mayhem*abs(cos(2*M_PI*mayhem_freq*(i+update_freq_cnt*size)/size)); } // update_freq_cnt++; // if(update_freq_cnt == 10) { update_freq_cnt = 0; if(mayhem_freq>=prev_freq+mayhem_depth || mayhem_freq>=3) inc_flag = 0; //sets maximum freq 3*fs/size = 1125 Hz if(mayhem_freq<=prev_freq-mayhem_depth || mayhem_freq<=1)inc_flag = 1; //minimum freq that can be achieved in 128 samples is 375 Hz if(inc_flag == 0) { mayhem_freq /= 1+mayhem_rate*mayhem_depth/depth; // freq = floor(fs/size*mayhem_freq); //only integer frequencies } if(inc_flag == 1) { mayhem_freq *= 1+mayhem_rate*mayhem_depth/depth; // freq = ceil(fs/size*mayhem_freq); //only integer frequencies } // mayhem_freq = freq*size/fs; //only integer frequencies } }
void processAudio(AudioBuffer &buffer) { float tune = getParameterValue(PARAMETER_A)*10.0 - 6.0; float fc = getParameterValue(PARAMETER_B)*10.0 - 4.0; float q = getParameterValue(PARAMETER_C)*3+0.75; float shape = getParameterValue(PARAMETER_E)*2; float pw = 0.5; if(shape > 1.0){ pw += 0.49*(shape-1.0); // pw 0.5 to 0.99 shape = 1.0; // square wave } float df = getParameterValue(PARAMETER_D)*4; int di = (int)df; float gain = 0.0f; switch(di){ // a/d case 0: // l/s env.setAttack(1.0-df); env.setRelease(0.0); break; case 1: // s/s env.setAttack(0.0); env.setRelease(df-1); break; case 2: // s/l env.setAttack(df-2); env.setRelease(1.0); break; case 3: // l/l env.setAttack(1.0); env.setRelease(1.0); gain = df-3; break; } env.trigger(isButtonPressed(PUSHBUTTON), getSamplesSinceButtonPressed(PUSHBUTTON)); FloatArray left = buffer.getSamples(LEFT_CHANNEL); FloatArray right = buffer.getSamples(RIGHT_CHANNEL); // vco hz.setTune(tune); float lfreq = hz.getFrequency(left[0]); osc.setFrequency(lfreq); osc.setShape(shape); osc.setPulseWidth(pw); osc.getSamples(left); // vcf hz.setTune(fc); fc = hz.getFrequency(right[0]); fc = min(0.999, max(0.01, fc/(getSampleRate()*2))); // normalised and bounded filter->setLowPass(fc, q); right.copyFrom(left); filter->process(right); right.multiply(0.8-q*0.2); // gain compensation for high q // vca env.getEnvelope(envelope); envelope.add(gain); left.multiply(envelope); right.multiply(envelope); }
void processAudio(AudioInputBuffer &input, AudioOutputBuffer &output){ float drive = 1+ getParameterValue(PARAMETER_A) * 30 ; // get input drive value float gain = getParameterValue(PARAMETER_C) / 2.0 ; // get output gain value int size = input.getSize(); float* x = input.getSamples(); float* y = output.getSamples(); for(int i=0; i<size; i++) y[i] = gain*clip(nonLinear((x[i])*drive)); // process each sample }
void prepare(){ float fc; fc = getParameterValue(PARAMETER_A); q = getParameterValue(PARAMETER_B); gain = getParameterValue(PARAMETER_D); // get gain value fc /= 2; f = sin(M_PI * fc); q = 1 - q; // fc = cutoff freq in Hz // fs = sampling frequency //(e.g. 44100Hz) // q = resonance/bandwidth [0 < q <= 1] most res: q=1, less: q=0 }
void processAudio(AudioBuffer &buffer) { float frequency = getParameterValue(PARAMETER_A) * 10000; float amplitude = getParameterValue(PARAMETER_B); float* left = buffer.getSamples(LEFT_CHANNEL); float linc = frequency/getSampleRate(); int size = buffer.getSize(); for(int n = 0; n<size; n++){ left[n] = sinf(2*M_PI*pos) * amplitude; if((pos += linc) > 1.0f) pos -= 1.0f; } }
void processAudio(AudioBuffer &buffer){ eg1.setAttack(getParameterValue(PARAMETER_A)*2); eg1.setRelease(getParameterValue(PARAMETER_B)*2); eg2.setAttack(getParameterValue(PARAMETER_C)*2); eg2.setRelease(getParameterValue(PARAMETER_D)*2); FloatArray left = buffer.getSamples(LEFT_CHANNEL); FloatArray right = buffer.getSamples(RIGHT_CHANNEL); eg1.getEnvelope(left); eg2.getEnvelope(right); left.multiply(-1); right.multiply(-1); }
void processAudio(AudioBuffer &buffer){ assert_param(buffer.getChannels() > 1); float gainL = getParameterValue(PARAMETER_A)*2; float gainR = getParameterValue(PARAMETER_B)*2; int size = buffer.getSize(); float* left = buffer.getSamples(0); float* right = buffer.getSamples(1); for(int i=0; i<size; ++i){ left[i] = gainL*left[i]; right[i] = gainR*right[i]; } }
void processAudio(AudioBuffer &buffer) { int size = buffer.getSize(); float reverb_scale = getParameterValue(PARAMETER_A); //get reverb length from knob if(reverb_scale<0.1) reverb_scale=0.1; //apply lower limit to reverb length reverb_time = round(reverb_scale*reverb_buf_length/2); //apply scaling factor to the window size to obtain reverb_time (in samples) int mod = reverb_time%size; //ensure that reverb_time is an even multiple of audio buffer size reverb_time -= mod; if(reverse_cnt>reverb_time) reverse_cnt = 0; float wet = getParameterValue(PARAMETER_B); //get wet/dry mix from knob float level = getParameterValue(PARAMETER_C); //get output level from knob level*=2; //for(int ch=0; ch<buffer.getChannels(); ch++) { float* buf = buffer.getSamples(0); for(int i=0; i<size; i++) { reverb_buffer[reverb_buf_length-1-i-reverb_index*size] = reverb_buffer[i+reverb_index*size]; //load reverse into end of buffer reverb_buffer[i+reverb_index*size] = buf[i]; //load number of samples into the reverse buffer equal to size of audio buffer if (reverse_flag == 1) { buf[i] = level*((1-wet)*buf[i]+wet*reverb_buffer[reverb_buf_length-1-reverse_cnt]*abs(reverse_cnt-reverb_time)*reverse_cnt/reverb_time/200); reverse_cnt++; if(reverse_cnt==reverb_time) { reverse_cnt=0; reverse_flag=0; // reverb_index=0; } } else buf[i] = level*buf[i]; } reverb_index++; //increment the window index if(reverse_flag==0) { reverb_index=0; //reset the window index to 0 reverse_flag = 1; //set flag to trigger reverse } } }
// return the feature dimensionality int ConfigurationFeatures::getDimensionality() { int iStatic = atoi(getParameterValue("cepstralCoefficients")); if (strcmp(getParameterValue("energy"),"yes") == 0) { iStatic += 1; } if (isParameterSet("derivatives.order")) { return iStatic*(atoi(getParameterValue("derivatives.order"))+1); } else { assert(isParameterSet("spliced.size")); return iStatic*atoi(getParameterValue("spliced.size")); } }
void processAudio(AudioBuffer &buffer){ float freq = getParameterValue(PARAMETER_A)*6; float decay = getParameterValue(PARAMETER_C); freq = 110.f * powf(2, freq); osc->setFrequency(freq); osc->setDecay(decay); if(buttonstate != isButtonPressed(PUSHBUTTON)){ buttonstate = isButtonPressed(PUSHBUTTON); if(buttonstate) // rising edge osc->trigger(); } FloatArray left = buffer.getSamples(LEFT_CHANNEL); osc->getSamples(left); }
void processAudio(AudioBuffer &buffer){ float gain = getParameterValue(PARAMETER_A); gain = gain*gain*2.0; float iterations = getParameterValue(PARAMETER_B); float r = getParameterValue(PARAMETER_C)*(maxR-minR) + minR; iterations = iterations*iterations*maxI; int size = buffer.getSize(); FloatArray left = buffer.getSamples(LEFT_CHANNEL); FloatArray right = buffer.getSamples(RIGHT_CHANNEL); float wet = getParameterValue(PARAMETER_D); for(int i=0; i<size; i++){ left[i] = processSample(gain*left[i], iterations, r) * wet + left[i]*(1-wet); right[i] = processSample(gain*right[i], iterations, r) * wet + right[i]*(1-wet); } }
bool CScanWidgetRandom::initFromScanItem(CCopasiParameterGroup * pg, const CModel* model) { if (!model) return false; mpModel = model; unsigned C_INT32 * tmp; if (!(tmp = pg->getValue("Type").pUINT)) return false; if (*(CScanProblem::Type *) tmp != CScanProblem::SCAN_RANDOM) return false; std::string *pString; if (!(pString = pg->getValue("Object").pSTRING)) return false; if (*pString == "") mpObject = NULL; else { assert(CCopasiRootContainer::getDatamodelList()->size() > 0); CCopasiDataModel* pDataModel = (*CCopasiRootContainer::getDatamodelList())[0]; assert(pDataModel != NULL); mpObject = pDataModel->getObject(*pString); } if (mpObject) lineEditObject->setText(FROM_UTF8(mpObject->getObjectDisplayName())); else lineEditObject->setText(""); if (!(tmp = pg->getValue("Distribution type").pUINT)) return false; comboBoxType->setCurrentItem(*tmp); changeType(); lineEditMin->setText(getParameterValue(pg, "Minimum")); lineEditMax->setText(getParameterValue(pg, "Maximum")); bool * pBool; if (!(pBool = pg->getValue("log").pBOOL)) return false; checkBoxLog->setChecked(* pBool); return true; }
void ParticleScriptCompiler::compileAffector(const ScriptNodePtr &node) { if(node->children.empty() || node->children.front()->type != SNT_WORD) return; // Create the emitter based on the first child ParticleAffector *affector = 0; String type = node->children.front()->token; try{ affector = mSystem->addAffector(type); }catch(...){ addError(CE_OBJECTALLOCATIONERROR, node->children.front()->file, node->children.front()->line, node->children.front()->column); return; } // Jump ahead now to the '{' as the emitter does not support other parameters in the header ScriptNodeList::iterator i = findNode(node->children.begin(), node->children.end(), SNT_LBRACE); if(i == node->children.end()) return; ScriptNodeList::iterator j = (*i)->children.begin(); while(j != (*i)->children.end()) { if(!processNode(j, (*i)->children.end())) { String name = (*j)->token, value = getParameterValue((*j)->children.begin(), (*j)->children.end()); if(!affector->setParameter(name, value)) addError(CE_INVALIDPROPERTY, (*j)->file, (*j)->line, (*j)->column); ++j; } } }
XmlElement BiasedDelay::getStateInformation(){ XmlElement state("BiasedDelayState"); for (int i=0; i<getNumParameters(); i++) state.setAttribute(String::formatted("parameter%d", i), getParameterValue(i)); // state.setAttribute(getParameterName(i), getParameterValue(i)); return state; }