tresult PLUGIN_API Instrument::process (ProcessData& data) { // MIDI Events aus der Liste aller Events für diesen Block auslesen IEventList* inputEvents = data.inputEvents; if (inputEvents != 0) { Event e; int32 numEvents = inputEvents->getEventCount (); for (int32 i = 0; i < numEvents; i++) { if (inputEvents->getEvent (i, e) == kResultTrue) { if (e.type == Event::kNoteOnEvent) { if (e.noteOn.noteId == -1) { // for host which don't send unique noteId's e.noteOn.noteId = e.noteOn.pitch; } leftProcessor.noteOn(e.noteOn.pitch, e.noteOn.velocity); rightProcessor.noteOn(e.noteOn.pitch, e.noteOn.velocity); } else if (e.type == Event::kNoteOffEvent) { if (e.noteOn.noteId == -1) { // for host which don't send unique noteId's e.noteOn.noteId = e.noteOn.pitch; } leftProcessor.noteOff(); rightProcessor.noteOff(); } } } } if (hasInputParameterChanged(data, kGainId)){ float paramValue = getInputParameterChange(data, kGainId); float dB = 106 * paramValue - 100; float gain = pow(10, dB/20); leftProcessor.setGain(gain); rightProcessor.setGain(gain); } if (numChannels > 0){ float* leftOutputChannel = data.outputs[0].channelBuffers32[0]; leftProcessor.process(leftOutputChannel, data.numSamples); } if (numChannels > 1){ float* rightOutputChannel = data.outputs[0].channelBuffers32[1]; rightProcessor.process(rightOutputChannel, data.numSamples); } return kResultTrue; }
//------------------------------------------------------------------------ tresult PLUGIN_API Plug::process (ProcessData& data) { //---1) Read inputs parameter changes----------- IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount (); // for each parameter which are some changes in this audio block: for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData (i); if (paramQueue) { int32 offsetSamples; double value; int32 numPoints = paramQueue->getPointCount (); switch (paramQueue->getParameterId ()) { case kBypassId: if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) { bBypass = (value > 0.5f); } break; } } } } //---2) Read input events------------- IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount (); for (int32 i = 0; i < numEvent; i++) { Event event; if (eventList->getEvent (i, event) == kResultOk) { switch (event.type) { //---------------------- case Event::kNoteOnEvent: { mLastNoteOnPitch = event.noteOn.pitch; mLastNoteOnId = event.noteOn.noteId; /*String str; str.printf (STR("noteON %d"), event.noteOff.noteId); sendTextMessage (str);*/ } break; //---------------------- case Event::kNoteOffEvent: { /* String str; str.printf (STR("noteOff %d"), event.noteOff.noteId); sendTextMessage (str); */} break; //---------------------- case Event::kNoteExpressionTextEvent: // noteOff reset the reduction if (event.noteExpressionText.typeId == kTextTypeID) { //if (mLastNoteOnId == event.noteExpressionText.noteId) { String str (STR("Text: ")); str += event.noteExpressionText.text; String tmp1; tmp1.printInt64 (mLastNoteOnId); String tmp2; tmp2.printInt64 (event.noteExpressionText.noteId); str += STR(" - id:"); str += tmp2; str += STR(" - noteOn id:"); str += tmp1; sendTextMessage (str); } } else if (event.noteExpressionText.typeId == kPhonemeTypeID) { //if (mLastNoteOnId == event.noteExpressionText.noteId) { String str (STR("Phoneme: ")); str += event.noteExpressionText.text; String tmp1; tmp1.printInt64 (mLastNoteOnId); String tmp2; tmp2.printInt64 (event.noteExpressionText.noteId); str += STR(" - id:"); str += tmp2; str += STR(" - noteOn id:"); str += tmp1; } } break; } } } } //------------------------------------- //---3) Process Audio--------------------- //------------------------------------- if (data.numOutputs == 0) { // nothing to do return kResultOk; } // no output float** out = data.outputs[0].channelBuffers32; for (int32 i = 0; i < data.outputs[0].numChannels; i++) { memset (out[i], 0, data.numSamples * sizeof (float)); } data.outputs[0].silenceFlags = 0x7fff; return kResultOk; }
tresult PLUGIN_API IPlugVST3Plugin::process(ProcessData& data) { TRACE_PROCESS; IMutexLock lock(this); if(data.processContext) memcpy(&mProcessContext, data.processContext, sizeof(ProcessContext)); //process parameters IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount(); //it is possible to get a finer resolution of control here by retrieving more values (points) from the queue //for now we just grab the last one for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData(i); if (paramQueue) { int32 numPoints = paramQueue->getPointCount(); int32 offsetSamples; double value; if (paramQueue->getPoint(numPoints - 1, offsetSamples, value) == kResultTrue) { int idx = paramQueue->getParameterId(); switch (idx) { case kBypassParam: { bool bypassed = (value > 0.5); if (bypassed != mIsBypassed) { mIsBypassed = bypassed; } break; } case kPresetParam: RestorePreset(FromNormalizedParam(value, 0, NPresets(), 1.)); break; //TODO pitch bend, modwheel etc default: if (idx >= 0 && idx < NParams()) { GetParam(idx)->SetNormalized((double)value); if (GetGUI()) GetGUI()->SetParameterFromPlug(idx, (double)value, true); OnParamChange(idx); } break; } } } } } if(DoesMIDI()) { //process events.. only midi note on and note off? IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount(); for (int32 i=0; i<numEvent; i++) { Event event; if (eventList->getEvent(i, event) == kResultOk) { IMidiMsg msg; switch (event.type) { case Event::kNoteOnEvent: { msg.MakeNoteOnMsg(event.noteOn.pitch, event.noteOn.velocity * 127, event.sampleOffset, event.noteOn.channel); ProcessMidiMsg(&msg); break; } case Event::kNoteOffEvent: { msg.MakeNoteOffMsg(event.noteOff.pitch, event.sampleOffset, event.noteOff.channel); ProcessMidiMsg(&msg); break; } } } } } } #pragma mark process single precision if (processSetup.symbolicSampleSize == kSample32) { if (data.numInputs) { if (mScChans) { if (getAudioInput(1)->isActive()) // Sidechain is active { mSidechainActive = true; SetInputChannelConnections(0, NInChannels(), true); } else { if (mSidechainActive) { ZeroScratchBuffers(); mSidechainActive = false; } SetInputChannelConnections(0, NInChannels(), true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - mScChans, false); } AttachInputBuffers(0, NInChannels() - mScChans, data.inputs[0].channelBuffers32, data.numSamples); AttachInputBuffers(mScChans, NInChannels() - mScChans, data.inputs[1].channelBuffers32, data.numSamples); } else { SetInputChannelConnections(0, data.inputs[0].numChannels, true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - data.inputs[0].numChannels, false); AttachInputBuffers(0, NInChannels(), data.inputs[0].channelBuffers32, data.numSamples); } } for (int outBus = 0, chanOffset = 0; outBus < data.numOutputs; outBus++) { int busChannels = data.outputs[outBus].numChannels; SetOutputChannelConnections(chanOffset, busChannels, (bool) getAudioOutput(outBus)->isActive()); SetOutputChannelConnections(chanOffset + busChannels, NOutChannels() - (chanOffset + busChannels), false); AttachOutputBuffers(chanOffset, busChannels, data.outputs[outBus].channelBuffers32); chanOffset += busChannels; } if (mIsBypassed) PassThroughBuffers(0.0f, data.numSamples); else ProcessBuffers(0.0f, data.numSamples); // process buffers single precision } #pragma mark process double precision else if (processSetup.symbolicSampleSize == kSample64) { if (data.numInputs) { if (mScChans) { if (getAudioInput(1)->isActive()) // Sidechain is active { mSidechainActive = true; SetInputChannelConnections(0, NInChannels(), true); } else { if (mSidechainActive) { ZeroScratchBuffers(); mSidechainActive = false; } SetInputChannelConnections(0, NInChannels(), true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - mScChans, false); } AttachInputBuffers(0, NInChannels() - mScChans, data.inputs[0].channelBuffers64, data.numSamples); AttachInputBuffers(mScChans, NInChannels() - mScChans, data.inputs[1].channelBuffers64, data.numSamples); } else { SetInputChannelConnections(0, data.inputs[0].numChannels, true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - data.inputs[0].numChannels, false); AttachInputBuffers(0, NInChannels(), data.inputs[0].channelBuffers64, data.numSamples); } } for (int outBus = 0, chanOffset = 0; outBus < data.numOutputs; outBus++) { int busChannels = data.outputs[outBus].numChannels; SetOutputChannelConnections(chanOffset, busChannels, (bool) getAudioOutput(outBus)->isActive()); SetOutputChannelConnections(chanOffset + busChannels, NOutChannels() - (chanOffset + busChannels), false); AttachOutputBuffers(chanOffset, busChannels, data.outputs[outBus].channelBuffers64); chanOffset += busChannels; } if (mIsBypassed) PassThroughBuffers(0.0, data.numSamples); else ProcessBuffers(0.0, data.numSamples); // process buffers double precision } // Midi Out // if (mDoesMidi) { // IEventList eventList = data.outputEvents; // // if (eventList) // { // Event event; // // while (!mMidiOutputQueue.Empty()) { // //TODO: parse events and add // eventList.addEvent(event); // } // } // } return kResultOk; }
tresult PLUGIN_API Instrument::process (ProcessData& data) { if (hasInputParameterChanged(data, kGainId)){ float gain = getInputParameterChange(data, kGainId); float dB = 20 * log(gain); processor.setGain(dB); } if (hasInputParameterChanged(data, kAttackId)){ float attack = getInputParameterChange(data, kAttackId) * 1000; processor.setAttack(attack); } if (hasInputParameterChanged(data, kDecayId)){ float decay = getInputParameterChange(data, kDecayId) * 1000; processor.setDecay(decay); } if (hasInputParameterChanged(data, kSustainRateId)){ float sustainRate = 20 * log(getInputParameterChange(data, kSustainRateId)); processor.setSustainRate(sustainRate); } if (hasInputParameterChanged(data, kReleaseId)){ float release = getInputParameterChange(data, kReleaseId) * 1000; processor.setRelease(release); } if (hasInputParameterChanged(data, kWaveTypeId)){ float value = getInputParameterChange(data, kWaveTypeId); WaveType waveType = (WaveType) std::min<int8> ((int8)(NUMTYPES * value), NUMTYPES - 1); processor.setCutOff(CUTOFF_DEFAULT_FREQUENCY); processor.setType(waveType); } if (hasInputParameterChanged(data, kPWMId)){ float pwm = getInputParameterChange(data, kPWMId) * 2 - 1; pwm = std::max<float>(-0.99,std::min<float>(0.99,pwm)); processor.setPWM(pwm); } if (hasInputParameterChanged(data, kCutOffId)){ float cutoff = getInputParameterChange(data, kCutOffId); cutoff *= cutoff * 19980; cutoff = cutoff + 20; processor.setCutOff(cutoff); } if (hasInputParameterChanged(data, kResId)){ float res = getInputParameterChange(data, kResId); res *= 1.4; processor.setRes(res); } if (hasInputParameterChanged(data, kLFO_PWM_freqId)){ float lfo_PWM_freq = getInputParameterChange(data, kLFO_PWM_freqId); lfo_PWM_freq *= 10; processor.setLFO_PWM_freq(lfo_PWM_freq); } if (hasInputParameterChanged(data, kLFO_autopan_freqId)){ float lfo_autopan_freq = getInputParameterChange(data, kLFO_autopan_freqId); lfo_autopan_freq *= 15; processor.setLFO_autopan_freq(lfo_autopan_freq); } if (hasInputParameterChanged(data, kLFO_autopan_phaseId)){ float lfo_autopan_phase = getInputParameterChange(data, kLFO_autopan_phaseId); lfo_autopan_phase *= 3.14; processor.setLFO_autopan_phase(lfo_autopan_phase); } // MIDI Events aus der Liste aller Events für diesen Block auslesen IEventList* inputEvents = data.inputEvents; if (inputEvents != 0) { Event e; int32 numEvents = inputEvents->getEventCount (); for (int32 i = 0; i < numEvents; i++) { if (inputEvents->getEvent (i, e) == kResultTrue) { if (e.type == Event::kNoteOnEvent) { if (e.noteOn.noteId == -1) { // for host which don't send unique noteId's e.noteOn.noteId = e.noteOn.pitch; } processor.noteOn(e.noteOn.pitch, e.noteOn.velocity); } else if (e.type == Event::kNoteOffEvent) { if (e.noteOn.noteId == -1) { // for host which don't send unique noteId's e.noteOn.noteId = e.noteOn.pitch; } processor.noteOff(e.noteOn.pitch); } } } } /* if (numChannels > 0){ float* leftOutputChannel = data.outputs[0].channelBuffers32[0]; processor.process(leftOutputChannel, leftOutputChannel, data.numSamples); } */ if (numChannels > 1){ float* leftOutputChannel = data.outputs[0].channelBuffers32[0]; float* rightOutputChannel = data.outputs[0].channelBuffers32[1]; processor.process(leftOutputChannel, rightOutputChannel, data.numSamples); } return kResultTrue; }
//------------------------------------------------------------------------ tresult PLUGIN_API AGainSimple::process (ProcessData& data) { // finally the process function // In this example there are 4 steps: // 1) Read inputs parameters coming from host (in order to adapt our model values) // 2) Read inputs events coming from host (we apply a gain reduction depending of the velocity of pressed key) // 3) Process the gain of the input buffer to the output buffer // 4) Write the new VUmeter value to the output Parameters queue //---1) Read inputs parameter changes----------- IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount (); // for each parameter which are some changes in this audio block: for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData (i); if (paramQueue) { int32 offsetSamples; double value; int32 numPoints = paramQueue->getPointCount (); switch (paramQueue->getParameterId ()) { case kGainId: // we use in this example only the last point of the queue. // in some wanted case for specific kind of parameter it makes sense to retrieve all points // and process the whole audio block in small blocks. if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) fGain = (float)value; break; case kBypassId: if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) bBypass = (value > 0.5f); break; } } } } //---2) Read input events------------- IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount (); for (int32 i = 0; i < numEvent; i++) { Event event; if (eventList->getEvent (i, event) == kResultOk) { switch (event.type) { //---------------------- case Event::kNoteOnEvent: // use the velocity as gain modifier fGainReduction = event.noteOn.velocity; break; //---------------------- case Event::kNoteOffEvent: // noteOff reset the reduction fGainReduction = 0.f; break; } } } } //------------------------------------- //---3) Process Audio--------------------- //------------------------------------- if (data.numInputs == 0 || data.numOutputs == 0) { // nothing to do return kResultOk; } // (simplification) we suppose in this example that we have the same input channel count than the output int32 numChannels = data.inputs[0].numChannels; //---get audio buffers---------------- float** in = data.inputs[0].channelBuffers32; float** out = data.outputs[0].channelBuffers32; //---check if silence--------------- // normally we have to check each channel (simplification) if (data.inputs[0].silenceFlags != 0) { // mark output silence too data.outputs[0].silenceFlags = data.inputs[0].silenceFlags; // the Plug-in has to be sure that if it sets the flags silence that the output buffer are clear int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { // dont need to be cleared if the buffers are the same (in this case input buffer are already cleared by the host) if (in[i] != out[i]) { memset (out[i], 0, sampleFrames * sizeof (float)); } } // nothing to do at this point return kResultOk; } // mark our outputs has not silent data.outputs[0].silenceFlags = 0; //---in bypass mode outputs should be like inputs----- if (bBypass) { int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { // dont need to be copied if the buffers are the same if (in[i] != out[i]) memcpy (out[i], in[i], sampleFrames * sizeof (float)); } // in this example we dont update the VuMeter in Bypass } else { float fVuPPM = 0.f; //---apply gain factor---------- float gain = (fGain - fGainReduction); if (bHalfGain) { gain = gain * 0.5f; } if (gain < 0.0000001) { int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { memset (out[i], 0, sampleFrames * sizeof (float)); } data.outputs[0].silenceFlags = (1 << numChannels) - 1; // this will set to 1 all channels fVuPPM = 0.f; } else { // in real Plug-in it would be better to do dezippering to avoid jump (click) in gain value for (int32 i = 0; i < numChannels; i++) { int32 sampleFrames = data.numSamples; float* ptrIn = in[i]; float* ptrOut = out[i]; float tmp; while (--sampleFrames >= 0) { // apply gain tmp = (*ptrIn++) * gain; (*ptrOut++) = tmp; // check only positiv values if (tmp > fVuPPM) fVuPPM = tmp; } } } //---3) Write outputs parameter changes----------- IParameterChanges* paramChanges = data.outputParameterChanges; // a new value of VuMeter will be send to the host // (the host will send it back in sync to our controller for updating our editor) if (paramChanges && fVuPPMOld != fVuPPM) { int32 index = 0; IParamValueQueue* paramQueue = paramChanges->addParameterData (kVuPPMId, index); if (paramQueue) { int32 index2 = 0; paramQueue->addPoint (0, fVuPPM, index2); } } fVuPPMOld = fVuPPM; } return kResultOk; }
tresult PLUGIN_API IPlugVST3::process(ProcessData& data) { TRACE_PROCESS; IMutexLock lock(this); // TODO: is this the best place to lock the mutex? memcpy(&mProcessContext, data.processContext, sizeof(ProcessContext)); //process parameters IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount(); //it is possible to get a finer resolution of control here by retrieving more values (points) from the queue //for now we just grab the last one for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData(i); if (paramQueue) { int32 numPoints = paramQueue->getPointCount(); int32 offsetSamples; double value; if (paramQueue->getPoint(numPoints - 1, offsetSamples, value) == kResultTrue) { int idx = paramQueue->getParameterId(); if (idx >= 0 && idx < NParams()) { GetParam(idx)->SetNormalized((double)value); if (GetGUI()) GetGUI()->SetParameterFromPlug(idx, (double)value, true); OnParamChange(idx); } } } } } if(mDoesMidi) { //process events.. only midi note on and note off? IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount(); for (int32 i=0; i<numEvent; i++) { Event event; if (eventList->getEvent(i, event) == kResultOk) { IMidiMsg msg; switch (event.type) { case Event::kNoteOnEvent: { msg.MakeNoteOnMsg(event.noteOn.pitch, event.noteOn.velocity * 127, event.sampleOffset, event.noteOn.channel); ProcessMidiMsg(&msg); break; } case Event::kNoteOffEvent: { msg.MakeNoteOffMsg(event.noteOff.pitch, event.sampleOffset, event.noteOff.channel); ProcessMidiMsg(&msg); break; } } } } } } //process audio if (data.numInputs == 0 || data.numOutputs == 0) { // nothing to do return kResultOk; } if (processSetup.symbolicSampleSize == kSample32) { float** in = data.inputs[0].channelBuffers32; float** out = data.outputs[0].channelBuffers32; if (mScChans) { float** side = data.inputs[1].channelBuffers32; if (getAudioInput(1)->isActive()) { int totalNInputs = data.inputs[0].numChannels + data.inputs[1].numChannels; float** allInputs = new float*[totalNInputs]; for (int i = 0; i < data.inputs[0].numChannels; i ++) { allInputs[i] = in[i]; } for (int i = 0; i < data.inputs[1].numChannels; i ++) { allInputs[i + data.inputs[0].numChannels] = side[i]; } AttachInputBuffers(0, totalNInputs, allInputs, data.numSamples); mSideChainIsConnected = true; delete [] allInputs; } else { AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); mSideChainIsConnected = false; } } else { AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); } AttachOutputBuffers(0, data.outputs[0].numChannels, out); ProcessBuffers(0.0f, data.numSamples); } else if (processSetup.symbolicSampleSize == kSample64) // TODO: parity for double precision { double** in = data.inputs[0].channelBuffers64; double** out = data.outputs[0].channelBuffers64; AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); AttachOutputBuffers(0, data.outputs[0].numChannels, out); ProcessBuffers(0.0, data.numSamples); } // Midi Out // if (mDoesMidi) { // IEventList eventList = data.outputEvents; // // if (eventList) // { // Event event; // // while (!mMidiOutputQueue.Empty()) { // //TODO: parse events and add // eventList.addEvent(event); // } // } // } return kResultOk; }