//------------------------------------------------------------
bool ofxSoundFile::mpg123ReadFile(ofSoundBuffer & buffer){
	size_t done=0;
	size_t block_size = mpg123_outblock( mp3File );
	int err;
	if(buffer.size()==0){
		buffer.resize(block_size);
		do{
			err = mpg123_read(mp3File,(unsigned char*)&buffer[buffer.size()-block_size],block_size*4,&done);
			buffer.resize(buffer.size()+block_size);
		}while(err==MPG123_OK);
		buffer.resize(buffer.size()-(block_size-done/4));
		if(err != MPG123_DONE){
			ofLogError() <<  "Warning: Decoding ended prematurely because: " << (err == MPG123_ERR ? mpg123_strerror(mp3File) : mpg123_plain_strerror(err));
			return false;
		}
		duration = float(buffer.size())/float(channels) / float(samplerate);
	}else{
		err = mpg123_read(mp3File,(unsigned char*)&buffer[0],buffer.size()*sizeof(float),&done);
		if(err != MPG123_OK){
			ofLogError() <<  "Warning: Error decoding mp3: " << (err == MPG123_ERR ? mpg123_strerror(mp3File) : mpg123_plain_strerror(err));
			return false;
		}
	}

	return true;
}
//--------------------------------------------------------------
bool ofxSoundFile::save(string path, const ofSoundBuffer &buff){
	// check that we're writing a wav and complain if the file extension is wrong.
	ofFile f(path);
	if(ofToLower(f.getExtension())!="wav") {
		path += ".wav";
		ofLogWarning() << "Can only write wav files - will save file as " << path;
	}
	
	fstream file(ofToDataPath(path).c_str(), ios::out | ios::binary);
	if(!file.is_open()) {
		ofLogError() << "Error opening sound file '" << path << "' for writing";
		return false;
	}
	
	// write a wav header
	short myFormat = 1; // for pcm
	int mySubChunk1Size = 16;
	int bitsPerSample = 16; // assume 16 bit pcm
	int myByteRate = buff.getSampleRate() * buff.getNumChannels() * bitsPerSample/8;
	short myBlockAlign = buff.getNumChannels() * bitsPerSample/8;
	int myChunkSize = 36 + buff.size()*bitsPerSample/8;
	int myDataSize = buff.size()*bitsPerSample/8;
	int channels = buff.getNumChannels();
	int samplerate = buff.getSampleRate();
	
	file.seekp (0, ios::beg);
	file.write ("RIFF", 4);
	file.write ((char*) &myChunkSize, 4);
	file.write ("WAVE", 4);
	file.write ("fmt ", 4);
	file.write ((char*) &mySubChunk1Size, 4);
	file.write ((char*) &myFormat, 2); // should be 1 for PCM
	file.write ((char*) &channels, 2); // # channels (1 or 2)
	file.write ((char*) &samplerate, 4); // 44100
	file.write ((char*) &myByteRate, 4); //
	file.write ((char*) &myBlockAlign, 2);
	file.write ((char*) &bitsPerSample, 2); //16
	file.write ("data", 4);
	file.write ((char*) &myDataSize, 4);
	
	// write the wav file per the wav file format, 4096 bytes of data at a time.
	#define WRITE_BUFF_SIZE 4096
	
	short writeBuff[WRITE_BUFF_SIZE];
	int pos = 0;
	while(pos<buff.size()) {
		int len = MIN(WRITE_BUFF_SIZE, buff.size()-pos);
		for(int i = 0; i < len; i++) {
			writeBuff[i] = (int)(buff[pos]*32767.f);
			pos++;
		}
		file.write((char*)writeBuff, len*bitsPerSample/8);
	}
	
	file.close();
	return true;
}
bool ofxSoundFile::sfReadFile(ofSoundBuffer & buffer){
	samples_read = sf_read_float (sndFile, &buffer[0], buffer.size());
	/*if(samples_read<(int)buffer.size()){
		ofLogError() <<  "ofxSoundFile: couldnt read " << path;
		return false;
	}*/
	if (subformat == SF_FORMAT_FLOAT || subformat == SF_FORMAT_DOUBLE){
		for (int i = 0 ; i < int(buffer.size()) ; i++){
			buffer[i] *= scale ;
		}
	}
	return true;
}
Beispiel #4
0
//--------------------------------------------------------------
void ofApp::audioOut(ofSoundBuffer & buffer){
    
    if(spectrum->playing){
        
        //        float *ptr = output;
        
        for (int i = 0; i < buffer.getNumFrames(); i++){
            
            wave = 0.0;
            
            for(int n=0; n<BIT; n++){
                
                if (amp[n]>0.00001) {
                    phases[n] += 512./(44100.0/(hertzScale[n]));
                    
                    if ( phases[n] >= 511 ) phases[n] -=512;
                    if ( phases[n] < 0 ) phases[n] = 0;
                    
                    //remainder = phases[n] - floor(phases[n]);
                    //wave+=(float) ((1-remainder) * sineBuffer[1+ (long) phases[n]] + remainder * sineBuffer[2+(long) phases[n]])*amp[n];
                    
                    wave += ( sineBuffer[1 + (long) phases[n]] ) * amp[n];
                }
            }
            
            wave/=10.0;
            if(wave>1.0) wave=1.0;
            if(wave<-1.0) wave=-1.0;
            
            buffer[i * buffer.getNumChannels()    ] = wave * volume;
            buffer[i * buffer.getNumChannels() + 1] = wave * volume;
            //            outp[i] = wave;
            
            //            *ptr++ = wave * volume;
            
        }
        
        
    } else {
        //        for (int i = 0; i < bufferSize; i++){
        //            output[i*nChannels    ] = 0;
        //            output[i*nChannels + 1] = 0;
        //            outp[i] = 0;
        //        }
    }
    
    
    
}
Beispiel #5
0
void ofApp::audioOut(ofSoundBuffer& buffer){
    for (unsigned i = 0 ; i<bufferSize; i++) {
        currentSample = (osc.square(frequency))/2;
        buffer[i * buffer.getNumChannels()] = currentSample;
        buffer[i * buffer.getNumChannels()+1] = currentSample;
    }
}
Beispiel #6
0
void WaveStripe::newSoundBuffer(ofSoundBuffer & buffer){
	float ampL = buffer.getRMSAmplitudeChannel(0);
	float ampR = buffer.getRMSAmplitudeChannel(1);
	mutex.lock();
	amplitudesLeftBack.push_back(ampL);
	amplitudesRightBack.push_back(ampR);
	mutex.unlock();
}
Beispiel #7
0
void ofApp::audioOut(ofSoundBuffer &buffer) {
    for (unsigned i = 0; i<bufferSize; i++) {
        currentSample = sample.playOnce(1.0);
        mix.stereo(currentSample, outputs, 0.5);
        buffer[i*buffer.getNumChannels()] = outputs[0];
        buffer[i*buffer.getNumChannels()] = outputs[1];
    }
}
Beispiel #8
0
//--------------------------------------------------------------
void ofApp::audioIn(ofSoundBuffer & input){
	sb = input;
	for (int i = 0; i < input.getNumFrames(); i++){
		left[i] = input[i*2];
		right[i] = input[i*2+1];
		mono[i] = (left[i] + right[i]) / 2;
	}
}
Beispiel #9
0
void ofApp::audioOut(ofSoundBuffer &buffer){
    for (unsigned i = 0; i<bufferSize; i++){
        currentSample = osc.sinewave(freq + env.adsr(mod.sinewave(modFreq), env.trigger) * modIndex);
        
        mix.stereo(currentSample, outputs, 0.5);
        buffer[i*buffer.getNumChannels()] = outputs[0];
        buffer[i*buffer.getNumChannels()+1] = outputs[1];
    }
}
Beispiel #10
0
void ofApp::audioOut(ofSoundBuffer &buffer){
    for (unsigned i = 0; i < bufferSize; i++){
        currentSample = osc.saw(frequency) * modulator.sinewave(modSpeed);
//        currentSample = osc.saw(frequency) * modulator.square(modSpeed);
        mix.stereo(currentSample, outputs, 0.5);
        
        buffer[i*buffer.getNumChannels()] = outputs[0];
        buffer[i*buffer.getNumChannels() + 1] = outputs[1];
    }
}
Beispiel #11
0
//--------------------------------------------------------------
void ofApp::audioOut(ofSoundBuffer &outBuffer) {
	
	// base frequency of the lowest sine wave in cycles per second (hertz)
	float frequency = 172.5;
	
	// mapping frequencies from Hz into full oscillations of sin() (two pi)
	float wavePhaseStep = (frequency / sampleRate) * TWO_PI;
	float pulsePhaseStep = (0.5 / sampleRate) * TWO_PI;
	
	// this loop builds a buffer of audio containing 3 sine waves at different
	// frequencies, and pulses the volume of each sine wave individually. In
	// other words, 3 oscillators and 3 LFOs.
	
	for(int i = 0; i < outBuffer.getNumFrames(); i++) {
		
		// build up a chord out of sine waves at 3 different frequencies
		float sampleLow = sin(wavePhase);
		float sampleMid = sin(wavePhase * 1.5);
		float sampleHi = sin(wavePhase * 2.0);
		
		// pulse each sample's volume
		sampleLow *= sin(pulsePhase);
		sampleMid *= sin(pulsePhase * 1.04);
		sampleHi *= sin(pulsePhase * 1.09);
		
		float fullSample = (sampleLow + sampleMid + sampleHi);
		
		// reduce the full sample's volume so it doesn't exceed 1
		fullSample *= 0.3;
		
		// write the computed sample to the left and right channels
		outBuffer.getSample(i, 0) = fullSample;
		outBuffer.getSample(i, 1) = fullSample;
		
		// get the two phase variables ready for the next sample
		wavePhase += wavePhaseStep;
		pulsePhase += pulsePhaseStep;
	}
	
	ofScopedLock lock(audioMutex);
	lastBuffer = outBuffer;
}
void ofxBasicSoundPlayer::audioOut(ofSoundBuffer& outputBuffer){
	if(bIsPlaying){
	int nFrames = outputBuffer.getNumFrames();
	int nChannels = outputBuffer.getNumChannels();
        if (playerNumChannels != nChannels || playerNumFrames != nFrames || playerSampleRate != outputBuffer.getSampleRate()) {
            audioOutBuffersChanged(nFrames, nChannels, outputBuffer.getSampleRate());
        }
		if(streaming){
			int samplesRead = soundFile.readTo(buffer,nFrames);
			if ( samplesRead==0 ){
				bIsPlaying=false;
				soundFile.seekTo(0);
			}
			else{
				buffer.stereoPan(volumesLeft.back(),volumesRight.back());
				newBufferE.notify(this,buffer);
				buffer.copyTo(outputBuffer);
			}
		}else{
  	        if (positions.size() == 1 && abs(speed - 1)<FLT_EPSILON) {
                buffer.copyTo(outputBuffer,nFrames,nChannels,positions[0],loop);
            }else{
                for(int i=0;i<(int)positions.size();i++){
                    //assert( resampledBuffer.getNumFrames() == bufferSize*relativeSpeed[i] );
                    if(abs(relativeSpeed[i] - 1)<FLT_EPSILON){
                        buffer.copyTo(resampledBuffer,nFrames,nChannels,positions[i],loop);
                    }else{
                    	buffer.resampleTo(resampledBuffer,positions[i],nFrames,relativeSpeed[i],loop, ofSoundBuffer::Linear);
                    }
                    resampledBuffer.stereoPan(volumesLeft[i],volumesRight[i]);
                    newBufferE.notify(this,resampledBuffer);
                    resampledBuffer.addTo(outputBuffer,0,loop);
                }
            }
			updatePositions(nFrames);
		}
	}
}
Beispiel #13
0
			//----------
			void Focus::audioOut(ofSoundBuffer & out) {
				int intervalFrames;
				{
					lock_guard<mutex> lock(this->resultMutex);
					if(!this->result.active) {
						return;
					}
					
					auto interval = 1.0f / pow (2.0f, this->result.valueNormalised / 0.12f);
					intervalFrames = int(interval * 44100.0f);
				}
				
				auto & soundEngine = ofxRulr::Utils::SoundEngine::X();
				auto & assetRegister = ofxAssets::Register::X();
				
				auto tickBig = assetRegister.getSoundPointer("ofxRulr::tick_big");
				auto tickSmall = assetRegister.getSoundPointer("ofxRulr::tick_small");
				
				auto numFrames = out.getNumFrames();
				
				for(int i=0; i<numFrames; i++) {
					//check if this frame we start a tick
					if(this->ticks.framesUntilNext <= 0) {
						//select the tick sound
						auto isBigTick = this->ticks.index++ == 0;
						this->ticks.index %= 6;

						auto tickSoundAsset = isBigTick ? tickBig : tickSmall;
						
						//add it to the active sounds (delayed by 1 buffer always)
						ofxRulr::Utils::SoundEngine::ActiveSound activeSound;
						activeSound.delay = i;
						activeSound.sound = tickSoundAsset;
						soundEngine.play(activeSound);
						
						//set the next tick sound
						this->ticks.framesUntilNext = intervalFrames;
					}
					
					//check interval doesn't go too long
					if(this->ticks.framesUntilNext > intervalFrames) {
						//e.g. this might happen at next buffer fill
						this->ticks.framesUntilNext = intervalFrames;
					}
					
					this->ticks.framesUntilNext--;
				}
			}
//--------------------------------------------------------------
bool ofxSoundFile::readTo(ofSoundBuffer & buffer, unsigned int _samples){
	buffer.setNumChannels(channels);
	buffer.setSampleRate(samplerate);
	if(_samples!=0){
		// will read the requested number of samples
		// clamp to the number of samples we actually have
		_samples = min(_samples,samples);
		buffer.resize(_samples*channels);
	}
#ifdef OF_USING_SNDFILE
	else if (sndFile){
		// will read entire file
		buffer.resize(samples);
	}
#endif
#ifdef OF_USING_LAD
	else if (audioDecoder) {
		// will read entire file
		buffer.resize(samples);
	}
#endif
#ifdef OF_USING_MPG123
	else if(mp3File){
		buffer.clear();
	}
#endif
	
#ifdef OF_USING_SNDFILE
	if(sndFile) return sfReadFile(buffer);
#elif defined( OF_USING_MPG123 )
	if(mp3File) return mpg123ReadFile(buffer);
#elif defined( OF_USING_LAD )
	if(audioDecoder) return ladReadFile(buffer);
#endif
	return false;
}
Beispiel #15
0
void ofxGSTT::audioIn(ofSoundBuffer & buffer){
	//TODO make better use of soundbuffer obj
	audioIn(&buffer[0], buffer.getNumFrames(), buffer.getNumChannels(), OFXGSTT_DEFAULTDEVICE_ID); //TODO multidevice business
}
Beispiel #16
0
void ofBaseSoundOutput::audioOut( ofSoundBuffer& buffer ){
	audioOut(&buffer[0], buffer.getNumFrames(), buffer.getNumChannels(), buffer.getDeviceID(), buffer.getTickCount());
}
bool ofxSoundFile::ladReadFile(ofSoundBuffer &buffer){
	
	int samplesRead = audioDecoder->read( buffer.size(), &buffer[0] );
	return samplesRead;
}