tresult PLUGIN_API Processor::process(ProcessData& data) { if (data.inputParameterChanges) { int32 numParamsChanged = data.inputParameterChanges->getParameterCount(); for (int32 index = 0; index < numParamsChanged; index++) { IParamValueQueue* paramQueue = data.inputParameterChanges->getParameterData(index); if (paramQueue) { ParamValue value; int32 sampleOffset; int32 numPoints = paramQueue->getPointCount(); switch (paramQueue->getParameterId()) { case kParamInputGain: if (paramQueue->getPoint(numPoints - 1, sampleOffset, value) == kResultTrue) paramState.inputgain = value; break; case kParamOutputGain: if (paramQueue->getPoint(numPoints - 1, sampleOffset, value) == kResultTrue) { paramState.outputgain = value; } break; } } } } SpeakerArrangement arr; getBusArrangement(kOutput, 0, arr); int32 numChannels = SpeakerArr::getChannelCount(arr); for (int32 channel = 0; channel < numChannels; channel++) { float* inputChannel = data.inputs[0].channelBuffers32[channel]; float* outputChannel = data.outputs[0].channelBuffers32[channel]; for (int32 sample = 0; sample < data.numSamples; sample++) { outputChannel[sample] = inputChannel[sample]*1.5; } } return kResultTrue; }
//----------------------------------------------------------------------------- tresult PLUGIN_API ADelayProcessor::process (ProcessData& data) { if (data.inputParameterChanges) { int32 paramChangeCount = data.inputParameterChanges->getParameterCount (); for (int32 index = 0; index < paramChangeCount; index++) { IParamValueQueue* queue = data.inputParameterChanges->getParameterData (index); if (queue && queue->getParameterId () == kDelayTag) { int32 valueChangeCount = queue->getPointCount (); ParamValue value; int32 sampleOffset; if (queue->getPoint (valueChangeCount-1, sampleOffset, value) == kResultTrue) delay = value; } } } if (data.numSamples > 0) { SpeakerArrangement arr; getBusArrangement (kOutput, 0, arr); int32 numChannels = SpeakerArr::getChannelCount (arr); int32 delayInSamples = std::max<int32> (1, (int32)(delay * processSetup.sampleRate)); // we have a minimum of 1 sample delay here for (int32 channel = 0; channel < numChannels; channel++) { float* inputChannel = data.inputs[0].channelBuffers32[channel]; float* outputChannel = data.outputs[0].channelBuffers32[channel]; int32 tempBufferPos = bufferPos; for (int32 sample = 0; sample < data.numSamples; sample++) { float tempSample = inputChannel[sample]; outputChannel[sample] = buffer[channel][tempBufferPos]; buffer[channel][tempBufferPos] = tempSample; tempBufferPos++; if (tempBufferPos >= delayInSamples) tempBufferPos = 0; } } bufferPos += data.numSamples; while (delayInSamples && bufferPos >= delayInSamples) bufferPos -= delayInSamples; } return kResultTrue; }
//-------------------------------------------------------------------------------------------------------------- void ParameterChangeTransfer::transferChangesFrom (ParameterChanges& source) { ParamValue value; int32 sampleOffset; for (int32 i = 0; i < source.getParameterCount (); i++) { IParamValueQueue* queue = source.getParameterData (i); if (queue) { for (int32 j = 0; j < queue->getPointCount (); j++) { if (queue->getPoint (j, sampleOffset, value) == kResultTrue) { addChange (queue->getParameterId (), value, sampleOffset); } } } } }
//------------------------------------------------------------------------ tresult PLUGIN_API Plug::process (ProcessData& data) { //---1) Read inputs parameter changes----------- IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount (); // for each parameter which are some changes in this audio block: for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData (i); if (paramQueue) { int32 offsetSamples; double value; int32 numPoints = paramQueue->getPointCount (); switch (paramQueue->getParameterId ()) { case kBypassId: if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) { bBypass = (value > 0.5f); } break; } } } } //---2) Read input events------------- IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount (); for (int32 i = 0; i < numEvent; i++) { Event event; if (eventList->getEvent (i, event) == kResultOk) { switch (event.type) { //---------------------- case Event::kNoteOnEvent: { mLastNoteOnPitch = event.noteOn.pitch; mLastNoteOnId = event.noteOn.noteId; /*String str; str.printf (STR("noteON %d"), event.noteOff.noteId); sendTextMessage (str);*/ } break; //---------------------- case Event::kNoteOffEvent: { /* String str; str.printf (STR("noteOff %d"), event.noteOff.noteId); sendTextMessage (str); */} break; //---------------------- case Event::kNoteExpressionTextEvent: // noteOff reset the reduction if (event.noteExpressionText.typeId == kTextTypeID) { //if (mLastNoteOnId == event.noteExpressionText.noteId) { String str (STR("Text: ")); str += event.noteExpressionText.text; String tmp1; tmp1.printInt64 (mLastNoteOnId); String tmp2; tmp2.printInt64 (event.noteExpressionText.noteId); str += STR(" - id:"); str += tmp2; str += STR(" - noteOn id:"); str += tmp1; sendTextMessage (str); } } else if (event.noteExpressionText.typeId == kPhonemeTypeID) { //if (mLastNoteOnId == event.noteExpressionText.noteId) { String str (STR("Phoneme: ")); str += event.noteExpressionText.text; String tmp1; tmp1.printInt64 (mLastNoteOnId); String tmp2; tmp2.printInt64 (event.noteExpressionText.noteId); str += STR(" - id:"); str += tmp2; str += STR(" - noteOn id:"); str += tmp1; } } break; } } } } //------------------------------------- //---3) Process Audio--------------------- //------------------------------------- if (data.numOutputs == 0) { // nothing to do return kResultOk; } // no output float** out = data.outputs[0].channelBuffers32; for (int32 i = 0; i < data.outputs[0].numChannels; i++) { memset (out[i], 0, data.numSamples * sizeof (float)); } data.outputs[0].silenceFlags = 0x7fff; return kResultOk; }
tresult PLUGIN_API IPlugVST3Plugin::process(ProcessData& data) { TRACE_PROCESS; IMutexLock lock(this); if(data.processContext) memcpy(&mProcessContext, data.processContext, sizeof(ProcessContext)); //process parameters IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount(); //it is possible to get a finer resolution of control here by retrieving more values (points) from the queue //for now we just grab the last one for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData(i); if (paramQueue) { int32 numPoints = paramQueue->getPointCount(); int32 offsetSamples; double value; if (paramQueue->getPoint(numPoints - 1, offsetSamples, value) == kResultTrue) { int idx = paramQueue->getParameterId(); switch (idx) { case kBypassParam: { bool bypassed = (value > 0.5); if (bypassed != mIsBypassed) { mIsBypassed = bypassed; } break; } case kPresetParam: RestorePreset(FromNormalizedParam(value, 0, NPresets(), 1.)); break; //TODO pitch bend, modwheel etc default: if (idx >= 0 && idx < NParams()) { GetParam(idx)->SetNormalized((double)value); if (GetGUI()) GetGUI()->SetParameterFromPlug(idx, (double)value, true); OnParamChange(idx); } break; } } } } } if(DoesMIDI()) { //process events.. only midi note on and note off? IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount(); for (int32 i=0; i<numEvent; i++) { Event event; if (eventList->getEvent(i, event) == kResultOk) { IMidiMsg msg; switch (event.type) { case Event::kNoteOnEvent: { msg.MakeNoteOnMsg(event.noteOn.pitch, event.noteOn.velocity * 127, event.sampleOffset, event.noteOn.channel); ProcessMidiMsg(&msg); break; } case Event::kNoteOffEvent: { msg.MakeNoteOffMsg(event.noteOff.pitch, event.sampleOffset, event.noteOff.channel); ProcessMidiMsg(&msg); break; } } } } } } #pragma mark process single precision if (processSetup.symbolicSampleSize == kSample32) { if (data.numInputs) { if (mScChans) { if (getAudioInput(1)->isActive()) // Sidechain is active { mSidechainActive = true; SetInputChannelConnections(0, NInChannels(), true); } else { if (mSidechainActive) { ZeroScratchBuffers(); mSidechainActive = false; } SetInputChannelConnections(0, NInChannels(), true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - mScChans, false); } AttachInputBuffers(0, NInChannels() - mScChans, data.inputs[0].channelBuffers32, data.numSamples); AttachInputBuffers(mScChans, NInChannels() - mScChans, data.inputs[1].channelBuffers32, data.numSamples); } else { SetInputChannelConnections(0, data.inputs[0].numChannels, true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - data.inputs[0].numChannels, false); AttachInputBuffers(0, NInChannels(), data.inputs[0].channelBuffers32, data.numSamples); } } for (int outBus = 0, chanOffset = 0; outBus < data.numOutputs; outBus++) { int busChannels = data.outputs[outBus].numChannels; SetOutputChannelConnections(chanOffset, busChannels, (bool) getAudioOutput(outBus)->isActive()); SetOutputChannelConnections(chanOffset + busChannels, NOutChannels() - (chanOffset + busChannels), false); AttachOutputBuffers(chanOffset, busChannels, data.outputs[outBus].channelBuffers32); chanOffset += busChannels; } if (mIsBypassed) PassThroughBuffers(0.0f, data.numSamples); else ProcessBuffers(0.0f, data.numSamples); // process buffers single precision } #pragma mark process double precision else if (processSetup.symbolicSampleSize == kSample64) { if (data.numInputs) { if (mScChans) { if (getAudioInput(1)->isActive()) // Sidechain is active { mSidechainActive = true; SetInputChannelConnections(0, NInChannels(), true); } else { if (mSidechainActive) { ZeroScratchBuffers(); mSidechainActive = false; } SetInputChannelConnections(0, NInChannels(), true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - mScChans, false); } AttachInputBuffers(0, NInChannels() - mScChans, data.inputs[0].channelBuffers64, data.numSamples); AttachInputBuffers(mScChans, NInChannels() - mScChans, data.inputs[1].channelBuffers64, data.numSamples); } else { SetInputChannelConnections(0, data.inputs[0].numChannels, true); SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - data.inputs[0].numChannels, false); AttachInputBuffers(0, NInChannels(), data.inputs[0].channelBuffers64, data.numSamples); } } for (int outBus = 0, chanOffset = 0; outBus < data.numOutputs; outBus++) { int busChannels = data.outputs[outBus].numChannels; SetOutputChannelConnections(chanOffset, busChannels, (bool) getAudioOutput(outBus)->isActive()); SetOutputChannelConnections(chanOffset + busChannels, NOutChannels() - (chanOffset + busChannels), false); AttachOutputBuffers(chanOffset, busChannels, data.outputs[outBus].channelBuffers64); chanOffset += busChannels; } if (mIsBypassed) PassThroughBuffers(0.0, data.numSamples); else ProcessBuffers(0.0, data.numSamples); // process buffers double precision } // Midi Out // if (mDoesMidi) { // IEventList eventList = data.outputEvents; // // if (eventList) // { // Event event; // // while (!mMidiOutputQueue.Empty()) { // //TODO: parse events and add // eventList.addEvent(event); // } // } // } return kResultOk; }
//------------------------------------------------------------------------ tresult PLUGIN_API AGainSimple::process (ProcessData& data) { // finally the process function // In this example there are 4 steps: // 1) Read inputs parameters coming from host (in order to adapt our model values) // 2) Read inputs events coming from host (we apply a gain reduction depending of the velocity of pressed key) // 3) Process the gain of the input buffer to the output buffer // 4) Write the new VUmeter value to the output Parameters queue //---1) Read inputs parameter changes----------- IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount (); // for each parameter which are some changes in this audio block: for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData (i); if (paramQueue) { int32 offsetSamples; double value; int32 numPoints = paramQueue->getPointCount (); switch (paramQueue->getParameterId ()) { case kGainId: // we use in this example only the last point of the queue. // in some wanted case for specific kind of parameter it makes sense to retrieve all points // and process the whole audio block in small blocks. if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) fGain = (float)value; break; case kBypassId: if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) bBypass = (value > 0.5f); break; } } } } //---2) Read input events------------- IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount (); for (int32 i = 0; i < numEvent; i++) { Event event; if (eventList->getEvent (i, event) == kResultOk) { switch (event.type) { //---------------------- case Event::kNoteOnEvent: // use the velocity as gain modifier fGainReduction = event.noteOn.velocity; break; //---------------------- case Event::kNoteOffEvent: // noteOff reset the reduction fGainReduction = 0.f; break; } } } } //------------------------------------- //---3) Process Audio--------------------- //------------------------------------- if (data.numInputs == 0 || data.numOutputs == 0) { // nothing to do return kResultOk; } // (simplification) we suppose in this example that we have the same input channel count than the output int32 numChannels = data.inputs[0].numChannels; //---get audio buffers---------------- float** in = data.inputs[0].channelBuffers32; float** out = data.outputs[0].channelBuffers32; //---check if silence--------------- // normally we have to check each channel (simplification) if (data.inputs[0].silenceFlags != 0) { // mark output silence too data.outputs[0].silenceFlags = data.inputs[0].silenceFlags; // the Plug-in has to be sure that if it sets the flags silence that the output buffer are clear int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { // dont need to be cleared if the buffers are the same (in this case input buffer are already cleared by the host) if (in[i] != out[i]) { memset (out[i], 0, sampleFrames * sizeof (float)); } } // nothing to do at this point return kResultOk; } // mark our outputs has not silent data.outputs[0].silenceFlags = 0; //---in bypass mode outputs should be like inputs----- if (bBypass) { int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { // dont need to be copied if the buffers are the same if (in[i] != out[i]) memcpy (out[i], in[i], sampleFrames * sizeof (float)); } // in this example we dont update the VuMeter in Bypass } else { float fVuPPM = 0.f; //---apply gain factor---------- float gain = (fGain - fGainReduction); if (bHalfGain) { gain = gain * 0.5f; } if (gain < 0.0000001) { int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { memset (out[i], 0, sampleFrames * sizeof (float)); } data.outputs[0].silenceFlags = (1 << numChannels) - 1; // this will set to 1 all channels fVuPPM = 0.f; } else { // in real Plug-in it would be better to do dezippering to avoid jump (click) in gain value for (int32 i = 0; i < numChannels; i++) { int32 sampleFrames = data.numSamples; float* ptrIn = in[i]; float* ptrOut = out[i]; float tmp; while (--sampleFrames >= 0) { // apply gain tmp = (*ptrIn++) * gain; (*ptrOut++) = tmp; // check only positiv values if (tmp > fVuPPM) fVuPPM = tmp; } } } //---3) Write outputs parameter changes----------- IParameterChanges* paramChanges = data.outputParameterChanges; // a new value of VuMeter will be send to the host // (the host will send it back in sync to our controller for updating our editor) if (paramChanges && fVuPPMOld != fVuPPM) { int32 index = 0; IParamValueQueue* paramQueue = paramChanges->addParameterData (kVuPPMId, index); if (paramQueue) { int32 index2 = 0; paramQueue->addPoint (0, fVuPPM, index2); } } fVuPPMOld = fVuPPM; } return kResultOk; }
tresult PLUGIN_API IPlugVST3::process(ProcessData& data) { TRACE_PROCESS; IMutexLock lock(this); // TODO: is this the best place to lock the mutex? memcpy(&mProcessContext, data.processContext, sizeof(ProcessContext)); //process parameters IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount(); //it is possible to get a finer resolution of control here by retrieving more values (points) from the queue //for now we just grab the last one for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData(i); if (paramQueue) { int32 numPoints = paramQueue->getPointCount(); int32 offsetSamples; double value; if (paramQueue->getPoint(numPoints - 1, offsetSamples, value) == kResultTrue) { int idx = paramQueue->getParameterId(); if (idx >= 0 && idx < NParams()) { GetParam(idx)->SetNormalized((double)value); if (GetGUI()) GetGUI()->SetParameterFromPlug(idx, (double)value, true); OnParamChange(idx); } } } } } if(mDoesMidi) { //process events.. only midi note on and note off? IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount(); for (int32 i=0; i<numEvent; i++) { Event event; if (eventList->getEvent(i, event) == kResultOk) { IMidiMsg msg; switch (event.type) { case Event::kNoteOnEvent: { msg.MakeNoteOnMsg(event.noteOn.pitch, event.noteOn.velocity * 127, event.sampleOffset, event.noteOn.channel); ProcessMidiMsg(&msg); break; } case Event::kNoteOffEvent: { msg.MakeNoteOffMsg(event.noteOff.pitch, event.sampleOffset, event.noteOff.channel); ProcessMidiMsg(&msg); break; } } } } } } //process audio if (data.numInputs == 0 || data.numOutputs == 0) { // nothing to do return kResultOk; } if (processSetup.symbolicSampleSize == kSample32) { float** in = data.inputs[0].channelBuffers32; float** out = data.outputs[0].channelBuffers32; if (mScChans) { float** side = data.inputs[1].channelBuffers32; if (getAudioInput(1)->isActive()) { int totalNInputs = data.inputs[0].numChannels + data.inputs[1].numChannels; float** allInputs = new float*[totalNInputs]; for (int i = 0; i < data.inputs[0].numChannels; i ++) { allInputs[i] = in[i]; } for (int i = 0; i < data.inputs[1].numChannels; i ++) { allInputs[i + data.inputs[0].numChannels] = side[i]; } AttachInputBuffers(0, totalNInputs, allInputs, data.numSamples); mSideChainIsConnected = true; delete [] allInputs; } else { AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); mSideChainIsConnected = false; } } else { AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); } AttachOutputBuffers(0, data.outputs[0].numChannels, out); ProcessBuffers(0.0f, data.numSamples); } else if (processSetup.symbolicSampleSize == kSample64) // TODO: parity for double precision { double** in = data.inputs[0].channelBuffers64; double** out = data.outputs[0].channelBuffers64; AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); AttachOutputBuffers(0, data.outputs[0].numChannels, out); ProcessBuffers(0.0, data.numSamples); } // Midi Out // if (mDoesMidi) { // IEventList eventList = data.outputEvents; // // if (eventList) // { // Event event; // // while (!mMidiOutputQueue.Empty()) { // //TODO: parse events and add // eventList.addEvent(event); // } // } // } return kResultOk; }
tresult PLUGIN_API BLITSineHardSync_processor::process(ProcessData& data) { //------------------- // update parameters //------------------- if (data.inputParameterChanges) { int32 numParamsChanged = data.inputParameterChanges->getParameterCount(); for (int32 ii = 0; ii < numParamsChanged; ii++) { IParamValueQueue* paramQueue = data.inputParameterChanges->getParameterData(ii); if (paramQueue) { int32 offsetSamples; double valueNormalized; // get parameter if (paramQueue->getPoint(paramQueue->getPointCount() - 1, offsetSamples, valueNormalized) == kResultTrue) { ParamID id = paramQueue->getParameterId(); if (id == Leak) { // -> [0.99, 1.0] double value = 0.99 + 0.01 * valueNormalized; _blit.setLeak(value); } else if (id == Slave) { // -> [1.0, 2.0] double value = 1.0 + valueNormalized; _blit.setSlave(value); } } } } } //---------------- // process events //---------------- if (data.inputEvents) { int nEventCount = data.inputEvents->getEventCount(); for (int ii = 0; ii < nEventCount; ii++) { Event e; tresult result = data.inputEvents->getEvent(ii, e); if (result != kResultOk)continue; if (e.type == Event::kNoteOnEvent) { _blit.trigger(e.noteOn, processSetup.sampleRate); } else if (e.type == Event::kNoteOffEvent) { _blit.release(e.noteOff); } } } if (_blit.is_silent()) { return kResultOk; } // if (data.numInputs == 0 && data.numOutputs == 1 && data.outputs[0].numChannels == 2) { Sample32** out = data.outputs[0].channelBuffers32; const int32 sampleFrames = data.numSamples; for (int ii = 0; ii < sampleFrames; ii++) { out[0][ii] = out[1][ii] = _blit.render(); _blit.next(); } } return kResultOk; }