//------------------------------------------------------------------------ tresult PLUGIN_API AGainSimple::process (ProcessData& data) { // finally the process function // In this example there are 4 steps: // 1) Read inputs parameters coming from host (in order to adapt our model values) // 2) Read inputs events coming from host (we apply a gain reduction depending of the velocity of pressed key) // 3) Process the gain of the input buffer to the output buffer // 4) Write the new VUmeter value to the output Parameters queue //---1) Read inputs parameter changes----------- IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount (); // for each parameter which are some changes in this audio block: for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData (i); if (paramQueue) { int32 offsetSamples; double value; int32 numPoints = paramQueue->getPointCount (); switch (paramQueue->getParameterId ()) { case kGainId: // we use in this example only the last point of the queue. // in some wanted case for specific kind of parameter it makes sense to retrieve all points // and process the whole audio block in small blocks. if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) fGain = (float)value; break; case kBypassId: if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) bBypass = (value > 0.5f); break; } } } } //---2) Read input events------------- IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount (); for (int32 i = 0; i < numEvent; i++) { Event event; if (eventList->getEvent (i, event) == kResultOk) { switch (event.type) { //---------------------- case Event::kNoteOnEvent: // use the velocity as gain modifier fGainReduction = event.noteOn.velocity; break; //---------------------- case Event::kNoteOffEvent: // noteOff reset the reduction fGainReduction = 0.f; break; } } } } //------------------------------------- //---3) Process Audio--------------------- //------------------------------------- if (data.numInputs == 0 || data.numOutputs == 0) { // nothing to do return kResultOk; } // (simplification) we suppose in this example that we have the same input channel count than the output int32 numChannels = data.inputs[0].numChannels; //---get audio buffers---------------- float** in = data.inputs[0].channelBuffers32; float** out = data.outputs[0].channelBuffers32; //---check if silence--------------- // normally we have to check each channel (simplification) if (data.inputs[0].silenceFlags != 0) { // mark output silence too data.outputs[0].silenceFlags = data.inputs[0].silenceFlags; // the Plug-in has to be sure that if it sets the flags silence that the output buffer are clear int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { // dont need to be cleared if the buffers are the same (in this case input buffer are already cleared by the host) if (in[i] != out[i]) { memset (out[i], 0, sampleFrames * sizeof (float)); } } // nothing to do at this point return kResultOk; } // mark our outputs has not silent data.outputs[0].silenceFlags = 0; //---in bypass mode outputs should be like inputs----- if (bBypass) { int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { // dont need to be copied if the buffers are the same if (in[i] != out[i]) memcpy (out[i], in[i], sampleFrames * sizeof (float)); } // in this example we dont update the VuMeter in Bypass } else { float fVuPPM = 0.f; //---apply gain factor---------- float gain = (fGain - fGainReduction); if (bHalfGain) { gain = gain * 0.5f; } if (gain < 0.0000001) { int32 sampleFrames = data.numSamples; for (int32 i = 0; i < numChannels; i++) { memset (out[i], 0, sampleFrames * sizeof (float)); } data.outputs[0].silenceFlags = (1 << numChannels) - 1; // this will set to 1 all channels fVuPPM = 0.f; } else { // in real Plug-in it would be better to do dezippering to avoid jump (click) in gain value for (int32 i = 0; i < numChannels; i++) { int32 sampleFrames = data.numSamples; float* ptrIn = in[i]; float* ptrOut = out[i]; float tmp; while (--sampleFrames >= 0) { // apply gain tmp = (*ptrIn++) * gain; (*ptrOut++) = tmp; // check only positiv values if (tmp > fVuPPM) fVuPPM = tmp; } } } //---3) Write outputs parameter changes----------- IParameterChanges* paramChanges = data.outputParameterChanges; // a new value of VuMeter will be send to the host // (the host will send it back in sync to our controller for updating our editor) if (paramChanges && fVuPPMOld != fVuPPM) { int32 index = 0; IParamValueQueue* paramQueue = paramChanges->addParameterData (kVuPPMId, index); if (paramQueue) { int32 index2 = 0; paramQueue->addPoint (0, fVuPPM, index2); } } fVuPPMOld = fVuPPM; } return kResultOk; }
//------------------------------------------------------------------------ tresult PLUGIN_API Plug::process (ProcessData& data) { //---1) Read inputs parameter changes----------- IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount (); // for each parameter which are some changes in this audio block: for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData (i); if (paramQueue) { int32 offsetSamples; double value; int32 numPoints = paramQueue->getPointCount (); switch (paramQueue->getParameterId ()) { case kBypassId: if (paramQueue->getPoint (numPoints - 1, offsetSamples, value) == kResultTrue) { bBypass = (value > 0.5f); } break; } } } } //---2) Read input events------------- IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount (); for (int32 i = 0; i < numEvent; i++) { Event event; if (eventList->getEvent (i, event) == kResultOk) { switch (event.type) { //---------------------- case Event::kNoteOnEvent: { mLastNoteOnPitch = event.noteOn.pitch; mLastNoteOnId = event.noteOn.noteId; /*String str; str.printf (STR("noteON %d"), event.noteOff.noteId); sendTextMessage (str);*/ } break; //---------------------- case Event::kNoteOffEvent: { /* String str; str.printf (STR("noteOff %d"), event.noteOff.noteId); sendTextMessage (str); */} break; //---------------------- case Event::kNoteExpressionTextEvent: // noteOff reset the reduction if (event.noteExpressionText.typeId == kTextTypeID) { //if (mLastNoteOnId == event.noteExpressionText.noteId) { String str (STR("Text: ")); str += event.noteExpressionText.text; String tmp1; tmp1.printInt64 (mLastNoteOnId); String tmp2; tmp2.printInt64 (event.noteExpressionText.noteId); str += STR(" - id:"); str += tmp2; str += STR(" - noteOn id:"); str += tmp1; sendTextMessage (str); } } else if (event.noteExpressionText.typeId == kPhonemeTypeID) { //if (mLastNoteOnId == event.noteExpressionText.noteId) { String str (STR("Phoneme: ")); str += event.noteExpressionText.text; String tmp1; tmp1.printInt64 (mLastNoteOnId); String tmp2; tmp2.printInt64 (event.noteExpressionText.noteId); str += STR(" - id:"); str += tmp2; str += STR(" - noteOn id:"); str += tmp1; } } break; } } } } //------------------------------------- //---3) Process Audio--------------------- //------------------------------------- if (data.numOutputs == 0) { // nothing to do return kResultOk; } // no output float** out = data.outputs[0].channelBuffers32; for (int32 i = 0; i < data.outputs[0].numChannels; i++) { memset (out[i], 0, data.numSamples * sizeof (float)); } data.outputs[0].silenceFlags = 0x7fff; return kResultOk; }
tresult PLUGIN_API IPlugVST3::process(ProcessData& data) { TRACE_PROCESS; IMutexLock lock(this); // TODO: is this the best place to lock the mutex? memcpy(&mProcessContext, data.processContext, sizeof(ProcessContext)); //process parameters IParameterChanges* paramChanges = data.inputParameterChanges; if (paramChanges) { int32 numParamsChanged = paramChanges->getParameterCount(); //it is possible to get a finer resolution of control here by retrieving more values (points) from the queue //for now we just grab the last one for (int32 i = 0; i < numParamsChanged; i++) { IParamValueQueue* paramQueue = paramChanges->getParameterData(i); if (paramQueue) { int32 numPoints = paramQueue->getPointCount(); int32 offsetSamples; double value; if (paramQueue->getPoint(numPoints - 1, offsetSamples, value) == kResultTrue) { int idx = paramQueue->getParameterId(); if (idx >= 0 && idx < NParams()) { GetParam(idx)->SetNormalized((double)value); if (GetGUI()) GetGUI()->SetParameterFromPlug(idx, (double)value, true); OnParamChange(idx); } } } } } if(mDoesMidi) { //process events.. only midi note on and note off? IEventList* eventList = data.inputEvents; if (eventList) { int32 numEvent = eventList->getEventCount(); for (int32 i=0; i<numEvent; i++) { Event event; if (eventList->getEvent(i, event) == kResultOk) { IMidiMsg msg; switch (event.type) { case Event::kNoteOnEvent: { msg.MakeNoteOnMsg(event.noteOn.pitch, event.noteOn.velocity * 127, event.sampleOffset, event.noteOn.channel); ProcessMidiMsg(&msg); break; } case Event::kNoteOffEvent: { msg.MakeNoteOffMsg(event.noteOff.pitch, event.sampleOffset, event.noteOff.channel); ProcessMidiMsg(&msg); break; } } } } } } //process audio if (data.numInputs == 0 || data.numOutputs == 0) { // nothing to do return kResultOk; } if (processSetup.symbolicSampleSize == kSample32) { float** in = data.inputs[0].channelBuffers32; float** out = data.outputs[0].channelBuffers32; if (mScChans) { float** side = data.inputs[1].channelBuffers32; if (getAudioInput(1)->isActive()) { int totalNInputs = data.inputs[0].numChannels + data.inputs[1].numChannels; float** allInputs = new float*[totalNInputs]; for (int i = 0; i < data.inputs[0].numChannels; i ++) { allInputs[i] = in[i]; } for (int i = 0; i < data.inputs[1].numChannels; i ++) { allInputs[i + data.inputs[0].numChannels] = side[i]; } AttachInputBuffers(0, totalNInputs, allInputs, data.numSamples); mSideChainIsConnected = true; delete [] allInputs; } else { AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); mSideChainIsConnected = false; } } else { AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); } AttachOutputBuffers(0, data.outputs[0].numChannels, out); ProcessBuffers(0.0f, data.numSamples); } else if (processSetup.symbolicSampleSize == kSample64) // TODO: parity for double precision { double** in = data.inputs[0].channelBuffers64; double** out = data.outputs[0].channelBuffers64; AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples); AttachOutputBuffers(0, data.outputs[0].numChannels, out); ProcessBuffers(0.0, data.numSamples); } // Midi Out // if (mDoesMidi) { // IEventList eventList = data.outputEvents; // // if (eventList) // { // Event event; // // while (!mMidiOutputQueue.Empty()) { // //TODO: parse events and add // eventList.addEvent(event); // } // } // } return kResultOk; }