예제 #1
0
//--------------------------------------------------------------
void ofApp::audioIn(ofSoundBuffer & input){
	sb = input;
	for (int i = 0; i < input.getNumFrames(); i++){
		left[i] = input[i*2];
		right[i] = input[i*2+1];
		mono[i] = (left[i] + right[i]) / 2;
	}
}
예제 #2
0
//--------------------------------------------------------------
void ofApp::audioOut(ofSoundBuffer & buffer){
    
    if(spectrum->playing){
        
        //        float *ptr = output;
        
        for (int i = 0; i < buffer.getNumFrames(); i++){
            
            wave = 0.0;
            
            for(int n=0; n<BIT; n++){
                
                if (amp[n]>0.00001) {
                    phases[n] += 512./(44100.0/(hertzScale[n]));
                    
                    if ( phases[n] >= 511 ) phases[n] -=512;
                    if ( phases[n] < 0 ) phases[n] = 0;
                    
                    //remainder = phases[n] - floor(phases[n]);
                    //wave+=(float) ((1-remainder) * sineBuffer[1+ (long) phases[n]] + remainder * sineBuffer[2+(long) phases[n]])*amp[n];
                    
                    wave += ( sineBuffer[1 + (long) phases[n]] ) * amp[n];
                }
            }
            
            wave/=10.0;
            if(wave>1.0) wave=1.0;
            if(wave<-1.0) wave=-1.0;
            
            buffer[i * buffer.getNumChannels()    ] = wave * volume;
            buffer[i * buffer.getNumChannels() + 1] = wave * volume;
            //            outp[i] = wave;
            
            //            *ptr++ = wave * volume;
            
        }
        
        
    } else {
        //        for (int i = 0; i < bufferSize; i++){
        //            output[i*nChannels    ] = 0;
        //            output[i*nChannels + 1] = 0;
        //            outp[i] = 0;
        //        }
    }
    
    
    
}
예제 #3
0
			//----------
			void Focus::audioOut(ofSoundBuffer & out) {
				int intervalFrames;
				{
					lock_guard<mutex> lock(this->resultMutex);
					if(!this->result.active) {
						return;
					}
					
					auto interval = 1.0f / pow (2.0f, this->result.valueNormalised / 0.12f);
					intervalFrames = int(interval * 44100.0f);
				}
				
				auto & soundEngine = ofxRulr::Utils::SoundEngine::X();
				auto & assetRegister = ofxAssets::Register::X();
				
				auto tickBig = assetRegister.getSoundPointer("ofxRulr::tick_big");
				auto tickSmall = assetRegister.getSoundPointer("ofxRulr::tick_small");
				
				auto numFrames = out.getNumFrames();
				
				for(int i=0; i<numFrames; i++) {
					//check if this frame we start a tick
					if(this->ticks.framesUntilNext <= 0) {
						//select the tick sound
						auto isBigTick = this->ticks.index++ == 0;
						this->ticks.index %= 6;

						auto tickSoundAsset = isBigTick ? tickBig : tickSmall;
						
						//add it to the active sounds (delayed by 1 buffer always)
						ofxRulr::Utils::SoundEngine::ActiveSound activeSound;
						activeSound.delay = i;
						activeSound.sound = tickSoundAsset;
						soundEngine.play(activeSound);
						
						//set the next tick sound
						this->ticks.framesUntilNext = intervalFrames;
					}
					
					//check interval doesn't go too long
					if(this->ticks.framesUntilNext > intervalFrames) {
						//e.g. this might happen at next buffer fill
						this->ticks.framesUntilNext = intervalFrames;
					}
					
					this->ticks.framesUntilNext--;
				}
			}
예제 #4
0
//--------------------------------------------------------------
void ofApp::audioOut(ofSoundBuffer &outBuffer) {
	
	// base frequency of the lowest sine wave in cycles per second (hertz)
	float frequency = 172.5;
	
	// mapping frequencies from Hz into full oscillations of sin() (two pi)
	float wavePhaseStep = (frequency / sampleRate) * TWO_PI;
	float pulsePhaseStep = (0.5 / sampleRate) * TWO_PI;
	
	// this loop builds a buffer of audio containing 3 sine waves at different
	// frequencies, and pulses the volume of each sine wave individually. In
	// other words, 3 oscillators and 3 LFOs.
	
	for(int i = 0; i < outBuffer.getNumFrames(); i++) {
		
		// build up a chord out of sine waves at 3 different frequencies
		float sampleLow = sin(wavePhase);
		float sampleMid = sin(wavePhase * 1.5);
		float sampleHi = sin(wavePhase * 2.0);
		
		// pulse each sample's volume
		sampleLow *= sin(pulsePhase);
		sampleMid *= sin(pulsePhase * 1.04);
		sampleHi *= sin(pulsePhase * 1.09);
		
		float fullSample = (sampleLow + sampleMid + sampleHi);
		
		// reduce the full sample's volume so it doesn't exceed 1
		fullSample *= 0.3;
		
		// write the computed sample to the left and right channels
		outBuffer.getSample(i, 0) = fullSample;
		outBuffer.getSample(i, 1) = fullSample;
		
		// get the two phase variables ready for the next sample
		wavePhase += wavePhaseStep;
		pulsePhase += pulsePhaseStep;
	}
	
	ofScopedLock lock(audioMutex);
	lastBuffer = outBuffer;
}
void ofxBasicSoundPlayer::audioOut(ofSoundBuffer& outputBuffer){
	if(bIsPlaying){
	int nFrames = outputBuffer.getNumFrames();
	int nChannels = outputBuffer.getNumChannels();
        if (playerNumChannels != nChannels || playerNumFrames != nFrames || playerSampleRate != outputBuffer.getSampleRate()) {
            audioOutBuffersChanged(nFrames, nChannels, outputBuffer.getSampleRate());
        }
		if(streaming){
			int samplesRead = soundFile.readTo(buffer,nFrames);
			if ( samplesRead==0 ){
				bIsPlaying=false;
				soundFile.seekTo(0);
			}
			else{
				buffer.stereoPan(volumesLeft.back(),volumesRight.back());
				newBufferE.notify(this,buffer);
				buffer.copyTo(outputBuffer);
			}
		}else{
  	        if (positions.size() == 1 && abs(speed - 1)<FLT_EPSILON) {
                buffer.copyTo(outputBuffer,nFrames,nChannels,positions[0],loop);
            }else{
                for(int i=0;i<(int)positions.size();i++){
                    //assert( resampledBuffer.getNumFrames() == bufferSize*relativeSpeed[i] );
                    if(abs(relativeSpeed[i] - 1)<FLT_EPSILON){
                        buffer.copyTo(resampledBuffer,nFrames,nChannels,positions[i],loop);
                    }else{
                    	buffer.resampleTo(resampledBuffer,positions[i],nFrames,relativeSpeed[i],loop, ofSoundBuffer::Linear);
                    }
                    resampledBuffer.stereoPan(volumesLeft[i],volumesRight[i]);
                    newBufferE.notify(this,resampledBuffer);
                    resampledBuffer.addTo(outputBuffer,0,loop);
                }
            }
			updatePositions(nFrames);
		}
	}
}
예제 #6
0
void ofxGSTT::audioIn(ofSoundBuffer & buffer){
	//TODO make better use of soundbuffer obj
	audioIn(&buffer[0], buffer.getNumFrames(), buffer.getNumChannels(), OFXGSTT_DEFAULTDEVICE_ID); //TODO multidevice business
}
예제 #7
0
void ofBaseSoundOutput::audioOut( ofSoundBuffer& buffer ){
	audioOut(&buffer[0], buffer.getNumFrames(), buffer.getNumChannels(), buffer.getDeviceID(), buffer.getTickCount());
}