void simple_envelope::process(unsigned int buf_size, sample &in, sample &CV, bool smooth) { // a bit of a crap filter to smooth clicks static float SMOOTH = 0.999; static float ONEMINUS_SMOOTH = 1-SMOOTH; float one_over_decay=1/m_decay; float temp=0; if (m_t==-1000) { in.zero(); CV.zero(); m_current=0; return; } for (unsigned int n=0; n<buf_size; n++) { // if we are in the delay (before really being triggered) if (m_t<0) { in[n]*=m_current; CV[n]=m_current; } else // in the envelope { // if we are in the envelope... if (m_t<m_decay) { // in the decay temp=(1-m_t*one_over_decay)*m_volume; if (!feq(temp,m_current,0.01) && smooth) { // only filter if necc temp=(temp*ONEMINUS_SMOOTH+m_current*SMOOTH); } in[n]*=temp; CV[n]=temp; m_current=temp; } else { in[n]*=0; CV[n]=0; m_current=0; // we've run off the end m_t=-1000; } } m_t+=m_sample_time; } }
void sample :: append(const sample& s) { if (!assertWarning(rate() == s.rate(), "append failed: different rates") || !assertWarning(channels() == s.channels(), "append failed: different channel counts")) return; audioSample* snd = new audioSample[audioSize() + s.audioSize()]; memcpy(snd, data, bytes()); memcpy(snd + audioSize(), s.data, s.bytes()); delete[] data; data = snd; info.length += s.length(); } // append()
void sample :: paste(const sample& clip, int start, bool replaceFlag) { if (!assertWarning(rate() == clip.rate(),"paste failed: different rates") || !assertWarning(channels() == clip.channels(), "paste failed: different channel counts")) return; int limit = clip.length(); if (start + limit > length()) limit = length() - start; if (replaceFlag) memcpy(data+start*channels(), clip.data, limit * channels() * sizeof(audioSample)); else for (int i = 0; i < limit * channels(); i++, start++) data[start] = audioLimit(clip.data[i] + data[start]); } // paste()
int sample :: diff(const sample& t) const { if (!assertWarning(rate() == t.rate(),"diff: sample rates") || !assertWarning(length() == t.length(),"diff: different lengths") || !assertWarning(channels() == t.channels(), "diff: channel counts")) return 1; int diffs = 0; for (int i=0; (i<audioSize()) && (diffs<10); i++) if (data[i] != t.data[i]) { if (parameters->debug("sample", "basic")) cerr << "Data differs at " << i << " of " << audioSize() <<endl; diffs++; } if (parameters->debug("sample", "basic") && !diffs) cerr << "No diffs" << endl; return diffs; } // diffs()
sample transition(sample& init_sample) { this->sample_stepsize(); this->seed(init_sample.cont_params()); this->hamiltonian_.sample_p(this->z_, this->rand_int_); this->hamiltonian_.init(this->z_); ps_point z_init(this->z_); double H0 = this->hamiltonian_.H(this->z_); for (int i = 0; i < L_; ++i) this->integrator_.evolve(this->z_, this->hamiltonian_, this->epsilon_); double h = this->hamiltonian_.H(this->z_); if (boost::math::isnan(h)) h = std::numeric_limits<double>::infinity(); double acceptProb = std::exp(H0 - h); if (acceptProb < 1 && this->rand_uniform_() > acceptProb) this->z_.ps_point::operator=(z_init); acceptProb = acceptProb > 1 ? 1 : acceptProb; return sample(this->z_.q, - this->hamiltonian_.V(this->z_), acceptProb); }
void play(double *output) {//this is where the magic happens. Very slow magic. temp=beats.play(1.,0,beats.length()); filtered=beat.play(1.); //now we send the sounds to some stereo busses. // mymix.stereo(more+mixed+delayed, outputs, 1-pan); bobbins.stereo(temp+filtered, moreoutputs, 0.5);//invert the pan //mixing output[0]=moreoutputs[0];//stick it in the out!! output[1]=moreoutputs[1]; }
void distort(sample &buf, float amount) { if (amount>=0.99) amount = 0.99; float k=2*amount/(1-amount); for(unsigned int i=0; i<buf.get_length(); i++) { buf[i]=((1+k)*buf[i]/(1+k*fabs(buf[i])))*(1-amount); } }
void moving_distort(sample &buf, const sample &amount) { for(unsigned int i=0; i<buf.get_length(); i++) { float a =fabs(amount[i]); if (a>0.99) a = 0.99; float k=2*a/(1-a); buf[i]=((1+k)*buf[i]/(1+k*fabs(buf[i])))*(1-a); } }
void moving_hard_clip(sample &buf, const sample &level) { for(unsigned int i=0; i<buf.get_length(); i++) { float l=fabs(level[i]); if (feq(l,0,0.0001)) l=0.0001; if (buf[i]>l) buf[i]=l; if (buf[i]<-l) buf[i]=-l; buf[i]*=1/l; } }
void hard_clip(sample &buf, float level) { if (feq(level,0,0.0001)) level==0.0001; for(unsigned int i=0; i<buf.get_length(); i++) { if (buf[i]>level) buf[i]=level; if (buf[i]<-level) buf[i]=-level; buf[i]*=1/level; } }
void test_sample_is_empty(const sample &sam, bool is_empty) { assert(sam.empty() == is_empty); cout << " Testing empty() of: " << sam; if(is_empty){ cout << " is true"; } else { cout << " is false"; } cout << endl; }
void connection::notify_listeners(const sample& sample) { switch(sample.getType()) { case sample_type::new_frame: notify_listeners_about_new_frame(); break; case sample_type::allocation: notify_listeners_about_allocation(sample); break; } }
void crush(sample &buf, float freq, float bits) { float step = pow((float)0.5,(float)bits); float phasor = 1; float last = 0; for(unsigned int i=0; i<buf.get_length(); i++) { phasor = phasor + freq; if (phasor >= 1.0) { phasor = phasor - 1.0; last = step * floor( buf[i]/step + 0.5 ); } buf[i] = last; } }
sample operator-(sample a, sample b) { return sample{a.value() - b.value()}; }
sample operator/(sample a, sample b) { return sample{a.value() / b.value()}; }
sample sample::operator*(sample other) const { sample result; result.value_ = value() * other.value(); // can't overflow return result; }
sample operator+(sample a, sample b) { return sample{a.value() + b.value()}; }
//Sample Tests void test_sample_size(const sample &sam, uint size) { vector<double> data(sam.get_data()); assert(data.size() == size); cout << " Testing size() of: " << sam << " is " << size << "(" << data.size() << ")" << endl; }
sample interpolate(sample a, sample weight, sample b) { return sample{(1 - weight.value()) * a.value() + weight.value() * b.value()}; }
sample transition(sample& init_sample) { // Initialize the algorithm this->sample_stepsize(); nuts_util util; this->seed(init_sample.cont_params()); this->_hamiltonian.sample_p(this->_z, this->_rand_int); this->_hamiltonian.init(this->_z); ps_point z_plus(this->_z); ps_point z_minus(z_plus); ps_point z_sample(z_plus); ps_point z_propose(z_plus); int n_cont = init_sample.cont_params().size(); Eigen::VectorXd rho_init = this->_z.p; Eigen::VectorXd rho_plus(n_cont); rho_plus.setZero(); Eigen::VectorXd rho_minus(n_cont); rho_minus.setZero(); util.H0 = this->_hamiltonian.H(this->_z); // Sample the slice variable util.log_u = std::log(this->_rand_uniform()); // Build a balanced binary tree until the NUTS criterion fails util.criterion = true; int n_valid = 0; this->_depth = 0; this->_n_divergent = 0; util.n_tree = 0; util.sum_prob = 0; while (util.criterion && (this->_depth <= this->_max_depth) ) { // Randomly sample a direction in time ps_point* z = 0; Eigen::VectorXd* rho = 0; if (this->_rand_uniform() > 0.5) { z = &z_plus; rho = &rho_plus; util.sign = 1; } else { z = &z_minus; rho = &rho_minus; util.sign = -1; } // And build a new subtree in that direction this->_z.ps_point::operator=(*z); int n_valid_subtree = build_tree(_depth, *rho, 0, z_propose, util); *z = this->_z; // Metropolis-Hastings sample the fresh subtree if (!util.criterion) break; double subtree_prob = 0; if (n_valid) { subtree_prob = static_cast<double>(n_valid_subtree) / static_cast<double>(n_valid); } else { subtree_prob = n_valid_subtree ? 1 : 0; } if (this->_rand_uniform() < subtree_prob) z_sample = z_propose; n_valid += n_valid_subtree; // Check validity of completed tree this->_z.ps_point::operator=(z_plus); Eigen::VectorXd delta_rho = rho_minus + rho_init + rho_plus; util.criterion = compute_criterion(z_minus, this->_z, delta_rho); ++(this->_depth); } --(this->_depth); // Correct for increment at end of loop double accept_prob = util.sum_prob / static_cast<double>(util.n_tree); this->_z.ps_point::operator=(z_sample); return sample(this->_z.q, - this->_z.V, accept_prob); }
void test_sample_max(const sample &sam, double max) { double epsilon = 0.0001; assert(abs(sam.maximum() - max) < epsilon); cout << " Testing maximum() of: " << sam << " is " << max << "(" << sam.maximum() << ")" << endl; }
static color32::byte to_byte(sample s) noexcept { return static_cast<color32::byte>(color32::BYTE_MAX * s.value()); }
sample transition(sample& init_sample) { this->seed(init_sample.cont_params(), init_sample.disc_params()); return sample(this->_z.q, this->_z.r, - this->_hamiltonian.V(this->_z), 0); }
void envelope::process(unsigned int buf_size, sample &CV, bool smooth) { if (m_attack==0 && m_decay==0 && m_release==0) { return; } smooth=true; // a bit of a crap filter to smooth clicks static float SMOOTH = 0.98; static float ONEMINUS_SMOOTH = 1-SMOOTH; float temp=0; bool freeze=false; float nt; if (m_t==-1000) { CV.zero(); m_current=0; return; } for (unsigned int n=0; n<buf_size; n++) { // if we are in the delay (before really being triggered) if (m_t<0) { float temp=0; if (!feq(temp,m_current,0.01) && smooth) { // only filter if necc temp=(temp*ONEMINUS_SMOOTH+m_current*SMOOTH); } CV[n]=temp; m_current=temp; m_t+=m_sample_time; } else // in the envelope { // if we are in the envelope... if (m_t>=0 && m_t<m_attack+m_decay+m_release) { // find out what part of the envelope we are in // in the attack if (m_t<m_attack) { // get normalised position to // get the volume between 0 and 1 temp=m_t/m_attack; } else // in the decay if (m_t<m_attack+m_decay) { // normalised position in m_attack->m_decay range nt=(m_t-m_attack)/m_decay; // volume between 1 and m_sustain temp=(1-nt)+(m_sustain*nt); } else // in the release { // normalised position in m_decay->m_release range nt=(m_t-(m_attack+m_decay))/m_release; // volume between m_sustain and 0 temp=m_sustain*(1-nt); if (m_release<0.2f) { temp=m_sustain; } //if (m_trigger) freeze=true; } temp*=m_volume; if (!feq(temp,m_current,0.01) && smooth) { // only filter if necc temp=(temp*ONEMINUS_SMOOTH+m_current*SMOOTH); } CV[n]=temp; m_current=temp; if (!freeze) m_t+=m_sample_time; } else { if (!feq(temp,m_current,0.01) && smooth) { temp=m_current*SMOOTH; } CV[n]=temp; m_current=temp; // if we've run off the end if (m_t>m_attack+m_decay+m_release) { m_t=-1000; } } } } }
void setup() {//some inits beats.load("/Users/mick/Desktop/beat2.wav"); beat.load("/Users/mick/Desktop/remoteatmos.wav"); }
void test_sample_midrange(const sample &sam, double midrange) { double epsilon = 0.0001; assert(abs(sam.midrange() - midrange) < epsilon); cout << " Testing midrange() of: " << sam << " is " << midrange << "(" << sam.midrange() << ")" << endl; }
sample transition(sample& init_sample, interface_callbacks::writer::base_writer& info_writer, interface_callbacks::writer::base_writer& error_writer) { // Initialize the algorithm this->sample_stepsize(); nuts_util util; this->seed(init_sample.cont_params()); this->hamiltonian_.sample_p(this->z_, this->rand_int_); this->hamiltonian_.init(this->z_, info_writer, error_writer); ps_point z_plus(this->z_); ps_point z_minus(z_plus); ps_point z_sample(z_plus); ps_point z_propose(z_plus); int n_cont = init_sample.cont_params().size(); Eigen::VectorXd rho_init = this->z_.p; Eigen::VectorXd rho_plus(n_cont); rho_plus.setZero(); Eigen::VectorXd rho_minus(n_cont); rho_minus.setZero(); util.H0 = this->hamiltonian_.H(this->z_); // Sample the slice variable util.log_u = std::log(this->rand_uniform_()); // Build a balanced binary tree until the NUTS criterion fails util.criterion = true; int n_valid = 0; this->depth_ = 0; this->divergent_ = 0; util.n_tree = 0; util.sum_prob = 0; while (util.criterion && (this->depth_ <= this->max_depth_)) { // Randomly sample a direction in time ps_point* z = 0; Eigen::VectorXd* rho = 0; if (this->rand_uniform_() > 0.5) { z = &z_plus; rho = &rho_plus; util.sign = 1; } else { z = &z_minus; rho = &rho_minus; util.sign = -1; } // And build a new subtree in that direction this->z_.ps_point::operator=(*z); int n_valid_subtree = build_tree(depth_, *rho, 0, z_propose, util, info_writer, error_writer); ++(this->depth_); *z = this->z_; // Metropolis-Hastings sample the fresh subtree if (!util.criterion) break; double subtree_prob = 0; if (n_valid) { subtree_prob = static_cast<double>(n_valid_subtree) / static_cast<double>(n_valid); } else { subtree_prob = n_valid_subtree ? 1 : 0; } if (this->rand_uniform_() < subtree_prob) z_sample = z_propose; n_valid += n_valid_subtree; // Check validity of completed tree this->z_.ps_point::operator=(z_plus); Eigen::VectorXd delta_rho = rho_minus + rho_init + rho_plus; util.criterion = compute_criterion(z_minus, this->z_, delta_rho); } this->n_leapfrog_ = util.n_tree; double accept_prob = util.sum_prob / static_cast<double>(util.n_tree); this->z_.ps_point::operator=(z_sample); this->energy_ = this->hamiltonian_.H(this->z_); return sample(this->z_.q, - this->z_.V, accept_prob); }
void test_sample_mean(const sample &sam, double mean) { double epsilon = 0.0001; assert(abs(sam.mean() - mean) < epsilon); cout << " Testing mean() of: " << sam << " is " << mean << "(" << sam.mean() << ")" << endl; }
void test_sample_variance(const sample &sam, double variance) { double epsilon = 0.0001; assert(abs(sam.variance() - variance) < epsilon); cout << " Testing variance() of: " << sam << " is " << variance << "(" << sam.variance() << ")" << endl; }
void test_sample_std_dev(const sample &sam, double std_deviation) { double epsilon = 0.0001; assert(abs(sam.std_deviation() - std_deviation) < epsilon); cout << " Testing std_deviation() of: " << sam << " is " << std_deviation << "(" << sam.std_deviation() << ")" << endl; }