void RadiumCompressorAudioProcessor::setParameter (int index, float newValue)
{
  bool updated = false;

  switch(index){

  case radium_compressor::COMP_EFF_RATIO:
    updated=wrapper->update_parameter_if_different(index,get_ratio_from_slider(newValue));
    break;
  case radium_compressor::COMP_EFF_THRESHOLD:
    updated=wrapper->update_parameter_if_different(index,get_threshold_from_slider(newValue));
    break;

  case radium_compressor::COMP_EFF_ATTACK:
    updated=wrapper->update_parameter_if_different(index,get_attack_release_from_slider(newValue));
    break;

  case radium_compressor::COMP_EFF_RELEASE:
    updated=wrapper->update_parameter_if_different(index,get_attack_release_from_slider(newValue));
    break;

  case radium_compressor::COMP_EFF_OUTPUT_VOLUME:
    updated=wrapper->update_parameter_if_different(index,get_makeup_gain_from_slider(newValue));
    break;

  case radium_compressor::COMP_EFF_BYPASS:
    {
      bool old=is_bypassing;
      is_bypassing = newValue>=0.5 ? true : false;
      updated = old!=is_bypassing;
      break;
    }
  }

  if(updated==true){
    RadiumCompressorAudioProcessorEditor *editor = dynamic_cast<RadiumCompressorAudioProcessorEditor *>(getActiveEditor()); // I guess we can use static_cast instead of dynamic_cast though.
    if(editor!=NULL)
      editor->update_gui(); // (update_gui() is RT safe.)
  }
}
Exemplo n.º 2
0
void PitchTuneAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    AudioPlayHead::CurrentPositionInfo posInfo;
    bool isHostGoing_ = false;
    if (getPlayHead() != 0 && getPlayHead()->getCurrentPosition(posInfo)) {
        isHostGoing_ = posInfo.isPlaying;//(posInfo.isPlaying || posInfo.isRecording);
        updateBpm(posInfo.bpm, posInfo.timeSigDenominator);
    }
    
    PitchTuneAudioProcessorEditor *e = (PitchTuneAudioProcessorEditor*)getActiveEditor();
    if (e) {
        if (e->isVisible()) {
            e->ppq = posInfo.ppqPosition;
            e->type = 1;
            e->triggerAsyncUpdate();
        }
    }

    if (isRecording) {
        if (isHostGoing_) {
            //record when host is playing
            if (sampleBeingRecorded && sampleBeingRecorded->getCursor() == 0) {
                // first time recording, store ppq
                sampleBeingRecorded->startPpq = posInfo.ppqPosition;
                //set recording flag
                sampleBeingRecorded->startRecording();
            }
            float* channelData = buffer.getSampleData (0);
            sampleBeingRecorded->record(channelData, buffer.getNumSamples());
        }
        else {
            if (sampleBeingRecorded && sampleBeingRecorded->isRecording) {
                //store stop ppq
                sampleBeingRecorded->stopPpq = posInfo.ppqPosition;
                //daw has stopped
                stopTransferring();
                //process pitch
                processPitch();
            }
        }
    }
    else {
        //playback the processed
        float* channelData = buffer.getSampleData (0);
        if (isHostGoing_) {
            int nClips = (int)samples.size();
            for (int i = 0; i < nClips; ++i) {
                Sample *curSample = samples[i];
                if (curSample ->startPpq >= posInfo.ppqPosition && !curSample ->isPlaying) {
                    //reach the start ppq
                    curSample ->startPlay();
                }
                
                if (posInfo.ppqPosition >= curSample ->stopPpq && curSample ->isPlaying) {
                    //reach the end ppq
                    curSample->stopPlay();
                }
                
                if (curSample ->isPlaying) {
                    curSample ->play(channelData, buffer.getNumSamples(), pitchProcessor->psola, (long)(posInfo.ppqPosition / Utility::numBeatsPerSample));
                }
            }
        }
        
    }

    for (int i = getNumInputChannels(); i < getNumOutputChannels(); ++i)
    {
        buffer.clear (i, 0, buffer.getNumSamples());
    }
    
    lastBlockPpq = posInfo.ppqPosition;
}
Exemplo n.º 3
0
void DynamicEditor::buttonUp(Vec2i mouse, unsigned char button)
{
    getActiveEditor()->buttonUp(mouse,button);
}
Exemplo n.º 4
0
void DynamicEditor::mouseMove(Vec2i mouse)
{
    getActiveEditor()->mouseMove(mouse);
}
Exemplo n.º 5
0
void MLPluginProcessor::getStateAsXML (XmlElement& xml)
{
	if( !(mEngine.getCompileStatus() == MLProc::OK)) return;
	
#if DEMO	
	xml.setAttribute ("pluginVersion", JucePlugin_VersionCode);	
    xml.setAttribute ("presetName", String("----"));	
#else

  	const unsigned numParams = getNumParameters();

	// TODO use string properties of model instead of these JUCE strings.
	// also move to JSON.
	xml.setAttribute ("pluginVersion", JucePlugin_VersionCode);
	xml.setAttribute ("presetName", String(getStringProperty("preset").c_str()));
	xml.setAttribute ("scaleName", String(getStringProperty("key_scale").c_str()));

	// store parameter values to xml as a bunch of attributes.
	// not XML best practice in general but takes fewer characters.
	for(unsigned i=0; i<numParams; ++i)
	{
		const String paramName = symbolToXMLAttr(getParameterAlias(i));
		const float defaultVal = getParameterDefault(i);
		const float paramVal = getParameter(i);
		if (paramVal != defaultVal)
		{
			xml.setAttribute(paramName, paramVal);		
			//debug() << "setting XML param " << paramName << " to " << paramVal << "\n";
		}
	}

	// store patcher info to xml
	{			
		MLProcList patchers = getPatcherList();
		if (!patchers.empty())
		{
			MLProcPatcher& firstPatcher = static_cast<MLProcPatcher&>(**patchers.begin());
			const int inputs = firstPatcher.getParam("inputs");
			const int outputs = firstPatcher.getParam("outputs");
			String outStr;
			String patcherInput = "patcher_input_";
			
			for(unsigned i=1; i<=inputs; ++i)
			{
				bool differentFromDefault = false;
				outStr = "";
				for(unsigned j=1; j<=outputs; ++j)
				{
					if (firstPatcher.getConnection(i, j))
					{
						outStr += "1";
						differentFromDefault = true;
					}
					else
					{
						outStr += "0";
					}
				}
				if(differentFromDefault)
				{
					String outNum (i); 
					xml.setAttribute(patcherInput + outNum, outStr);	
				}				
			}
		}
	}	
	
	// store editor state to XML if one exists	
	MLPluginEditor* pEditor = static_cast<MLPluginEditor*>(getActiveEditor());
	if(pEditor)
	{
		MLRect r = pEditor->getWindowBounds();
		xml.setAttribute("editor_x", r.x());	
		xml.setAttribute("editor_y", r.y());	
		xml.setAttribute("editor_width", r.getWidth());	
		xml.setAttribute("editor_height", r.getHeight());
		xml.setAttribute("editor_num", getFloatProperty("patch_num"));	
		xml.setAttribute("editor_anim", getFloatProperty("patch_anim"));	
	}
	
	// save blob as most recently saved state
	mpLatestStateLoaded = XmlElementPtr(new XmlElement(xml));
	
#endif

}
Exemplo n.º 6
0
void AdmvAudioProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
	double cp[2];
	
	int channelCount = 0;
	size_t sampleRate = getSampleRate();

	for (int channel = 0; channel < (getNumInputChannels() - 1); channel += 2)
	{
		// No need to process signal if editor is closed
		if (getActiveEditor() == NULL)
		{
			break;
		}

		// TODO: investigate how to get number of input channels really connected to the plugin ATM.
		// It seems that getNumInputChannels() will always return max possible defined by JucePlugin_MaxNumInputChannels
		// This solution is bad, because it iterates through all input buffers.
		if (!isBlockInformative(buffer, channel / 2))
		{
			mGonioSegments[channel / 2] = GonioPoints<double>();
			mSpectroSegments[channel / 2] = tomatl::dsp::SpectrumBlock();

			continue;
		}
		
		channelCount += 2;

		float* l = buffer.getWritePointer(channel + 0);
		float* r = buffer.getWritePointer(channel + 1);

		for (int i = 0; i < buffer.getNumSamples(); ++i)
		{
			std::pair<double, double>* res = mGonioCalcs[channel / 2]->handlePoint(l[i], r[i], sampleRate);

			cp[0] = l[i];
			cp[1] = r[i];

			mSpectroCalcs[channel / 2]->checkSampleRate(getSampleRate());
			tomatl::dsp::SpectrumBlock spectroResult = mSpectroCalcs[channel / 2]->process((double*)&cp);

			if (res != NULL)
			{
				mGonioSegments[channel / 2] = GonioPoints<double>(res, mGonioCalcs[channel / 2]->getSegmentLength(), channel / 2, sampleRate);
				mLastGonioScale = mGonioCalcs[channel / 2]->getCurrentScaleValue();
			}

			if (spectroResult.mLength > 0)
			{
				mSpectroSegments[channel / 2] = spectroResult;
			}
		}
	}
	
	mCurrentInputCount = channelCount;

	if (getState().mOutputMode == AdmvPluginState::outputMute)
	{
		buffer.clear();
	}
	else
	{
		// In case we have more outputs than inputs, we'll clear any output
		// channels that didn't contain input data, (because these aren't
		// guaranteed to be empty - they may contain garbage).
		for (int i = getNumInputChannels(); i < getNumOutputChannels(); ++i)
		{
			buffer.clear(i, 0, buffer.getNumSamples());
		}
	}
}