Пример #1
0
void IPlugAAX::RenderAudio(AAX_SIPlugRenderInfo* ioRenderInfo)
{
  TRACE_PROCESS;

  IMutexLock lock(this);

  // Get bypass parameter value
  bool bypass;
  mBypassParameter->GetValueAsBool(&bypass);
  
  AAX_EStemFormat inFormat, outFormat;
  Controller()->GetInputStemFormat(&inFormat);
  Controller()->GetOutputStemFormat(&outFormat);
  
  if (DoesMIDI()) 
  {
    AAX_IMIDINode* midiIn = ioRenderInfo->mInputNode;
    AAX_CMidiStream* midiBuffer = midiIn->GetNodeBuffer();
    AAX_CMidiPacket* midiBufferPtr = midiBuffer->mBuffer;
    uint32_t packets_count = midiBuffer->mBufferSize;
    
    // Setup MIDI Out node pointers 
//		AAX_IMIDINode* midiNodeOut = instance->mMIDINodeOutP;
//		AAX_CMidiStream* midiBufferOut = midiNodeOut->GetNodeBuffer();
//		AAX_CMidiPacket* midiBufferOutPtr = midiBufferOut->mBuffer;
        
    for (int i = 0; i<packets_count; i++, midiBufferPtr++) 
    {
      IMidiMsg msg(midiBufferPtr->mTimestamp, midiBufferPtr->mData[0], midiBufferPtr->mData[1], midiBufferPtr->mData[2]);
      ProcessMidiMsg(&msg);
    }
  }
  
  AAX_IMIDINode* transportNode = ioRenderInfo->mTransportNode;
  mTransport = transportNode->GetTransport();

  int32_t numSamples = *(ioRenderInfo->mNumSamples);
  int32_t numInChannels = AAX_STEM_FORMAT_CHANNEL_COUNT(inFormat);
  int32_t numOutChannels = AAX_STEM_FORMAT_CHANNEL_COUNT(outFormat);

  SetInputChannelConnections(0, numInChannels, true);
  SetInputChannelConnections(numInChannels, NInChannels() - numInChannels, false);
  AttachInputBuffers(0, NInChannels(), ioRenderInfo->mAudioInputs, numSamples);
  
  SetOutputChannelConnections(0, numOutChannels, true);
  SetOutputChannelConnections(numOutChannels, NOutChannels() - numOutChannels, false);
  
  AttachOutputBuffers(0, NOutChannels(), ioRenderInfo->mAudioOutputs);
  
  if (bypass) 
  {
    PassThroughBuffers(0.0f, numSamples);
  }
  else 
  {
    ProcessBuffers(0.0f, numSamples);
  }
}
/**
*
* ProcessL
* @param aSrc src buffer
* @param aDst destination buffer
* @return CMMFSwCodec::TCodecProcessResult
*  This function converts PCM samples to IMA ADPCM samples in
*  blocks of KImaAdpcmBlockAlign (256) bytes. 1010 source 
*  bytes are required to fill a 256 byte block.
* @pre if last buffer and src contains < 1010 bytes discard input
* This function throws away the last buffer if it contains < 1010 bytes
* (ie we must have sufficient data to process an entire frame )
* All other src buffers must contain
* All destination buffer must contain
*
**/
CMMFSwCodec::TCodecProcessResult CMMFPcm16ToImaAdpcmCodec::ProcessL(const CMMFBuffer& aSrc, CMMFBuffer& aDst)
	{
	CMMFSwCodec::TCodecProcessResult result;
	result.iCodecProcessStatus = TCodecProcessResult::EProcessComplete;
	
	//convert from generic CMMFBuffer to CMMFDataBuffer
	const CMMFDataBuffer* source = STATIC_CAST(const CMMFDataBuffer*, &aSrc);
	CMMFDataBuffer* destination  = STATIC_CAST(CMMFDataBuffer*, &aDst);
	
	//[ source and destination must not be null ]
	if( !source || !destination )
		User::Leave( KErrArgument );
	
	//[ check preconditions ]
	if( !BuffersStatus( source, destination ))
		{
		User::Leave( KErrArgument );
		}
	
	//[ code the buffers ]
	ProcessBuffers( *source, *destination, result );
	
	return result;	
	}
tresult PLUGIN_API IPlugVST3Plugin::process(ProcessData& data)
{
  TRACE_PROCESS;

  IMutexLock lock(this);

  if(data.processContext)
    memcpy(&mProcessContext, data.processContext, sizeof(ProcessContext));

  //process parameters
  IParameterChanges* paramChanges = data.inputParameterChanges;
  if (paramChanges)
  {
    int32 numParamsChanged = paramChanges->getParameterCount();

    //it is possible to get a finer resolution of control here by retrieving more values (points) from the queue
    //for now we just grab the last one

    for (int32 i = 0; i < numParamsChanged; i++)
    {
      IParamValueQueue* paramQueue = paramChanges->getParameterData(i);
      if (paramQueue)
      {
        int32 numPoints = paramQueue->getPointCount();
        int32 offsetSamples;
        double value;

        if (paramQueue->getPoint(numPoints - 1,  offsetSamples, value) == kResultTrue)
        {
          int idx = paramQueue->getParameterId();

          switch (idx)
          {
            case kBypassParam:
            {
              bool bypassed = (value > 0.5);
              
              if (bypassed != mIsBypassed)
              {
                mIsBypassed = bypassed;
              }

              break;
            }
            case kPresetParam:
              RestorePreset(FromNormalizedParam(value, 0, NPresets(), 1.));
              break;
              //TODO pitch bend, modwheel etc
            default:
              if (idx >= 0 && idx < NParams())
              {
                GetParam(idx)->SetNormalized((double)value);
                if (GetGUI()) GetGUI()->SetParameterFromPlug(idx, (double)value, true);
                OnParamChange(idx);
              }
              break;
          }

        }
      }
    }
  }

  if(DoesMIDI())
  {
    //process events.. only midi note on and note off?
    IEventList* eventList = data.inputEvents;
    if (eventList)
    {
      int32 numEvent = eventList->getEventCount();
      for (int32 i=0; i<numEvent; i++)
      {
        Event event;
        if (eventList->getEvent(i, event) == kResultOk)
        {
          IMidiMsg msg;
          switch (event.type)
          {
            case Event::kNoteOnEvent:
            {
              msg.MakeNoteOnMsg(event.noteOn.pitch, event.noteOn.velocity * 127, event.sampleOffset, event.noteOn.channel);
              ProcessMidiMsg(&msg);
              break;
            }

            case Event::kNoteOffEvent:
            {
              msg.MakeNoteOffMsg(event.noteOff.pitch, event.sampleOffset, event.noteOff.channel);
              ProcessMidiMsg(&msg);
              break;
            }
          }
        }
      }
    }
  }

#pragma mark process single precision

  if (processSetup.symbolicSampleSize == kSample32)
  {
    if (data.numInputs)
    {
      if (mScChans)
      {
        if (getAudioInput(1)->isActive()) // Sidechain is active
        {
          mSidechainActive = true;
          SetInputChannelConnections(0, NInChannels(), true);
        }
        else
        {
          if (mSidechainActive)
          {
            ZeroScratchBuffers();
            mSidechainActive = false;
          }

          SetInputChannelConnections(0, NInChannels(), true);
          SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - mScChans, false);
        }

        AttachInputBuffers(0, NInChannels() - mScChans, data.inputs[0].channelBuffers32, data.numSamples);
        AttachInputBuffers(mScChans, NInChannels() - mScChans, data.inputs[1].channelBuffers32, data.numSamples);
      }
      else
      {
        SetInputChannelConnections(0, data.inputs[0].numChannels, true);
        SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - data.inputs[0].numChannels, false);
        AttachInputBuffers(0, NInChannels(), data.inputs[0].channelBuffers32, data.numSamples);
      }
    }

    for (int outBus = 0, chanOffset = 0; outBus < data.numOutputs; outBus++)
    {
      int busChannels = data.outputs[outBus].numChannels;
      SetOutputChannelConnections(chanOffset, busChannels, (bool) getAudioOutput(outBus)->isActive());
      SetOutputChannelConnections(chanOffset + busChannels, NOutChannels() - (chanOffset + busChannels), false);
      AttachOutputBuffers(chanOffset, busChannels, data.outputs[outBus].channelBuffers32);
      chanOffset += busChannels;
    }

    if (mIsBypassed)
      PassThroughBuffers(0.0f, data.numSamples);
    else
      ProcessBuffers(0.0f, data.numSamples); // process buffers single precision
  }

#pragma mark process double precision

  else if (processSetup.symbolicSampleSize == kSample64)
  {
    if (data.numInputs)
    {
      if (mScChans)
      {
        if (getAudioInput(1)->isActive()) // Sidechain is active
        {
          mSidechainActive = true;
          SetInputChannelConnections(0, NInChannels(), true);
        }
        else
        {
          if (mSidechainActive)
          {
            ZeroScratchBuffers();
            mSidechainActive = false;
          }

          SetInputChannelConnections(0, NInChannels(), true);
          SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - mScChans, false);
        }

        AttachInputBuffers(0, NInChannels() - mScChans, data.inputs[0].channelBuffers64, data.numSamples);
        AttachInputBuffers(mScChans, NInChannels() - mScChans, data.inputs[1].channelBuffers64, data.numSamples);
      }
      else
      {
        SetInputChannelConnections(0, data.inputs[0].numChannels, true);
        SetInputChannelConnections(data.inputs[0].numChannels, NInChannels() - data.inputs[0].numChannels, false);
        AttachInputBuffers(0, NInChannels(), data.inputs[0].channelBuffers64, data.numSamples);
      }
    }

    for (int outBus = 0, chanOffset = 0; outBus < data.numOutputs; outBus++)
    {
      int busChannels = data.outputs[outBus].numChannels;
      SetOutputChannelConnections(chanOffset, busChannels, (bool) getAudioOutput(outBus)->isActive());
      SetOutputChannelConnections(chanOffset + busChannels, NOutChannels() - (chanOffset + busChannels), false);
      AttachOutputBuffers(chanOffset, busChannels, data.outputs[outBus].channelBuffers64);
      chanOffset += busChannels;
    }

    if (mIsBypassed)
      PassThroughBuffers(0.0, data.numSamples);
    else
      ProcessBuffers(0.0, data.numSamples); // process buffers double precision
  }

// Midi Out
//  if (mDoesMidi) {
//    IEventList eventList = data.outputEvents;
//
//    if (eventList)
//    {
//      Event event;
//
//      while (!mMidiOutputQueue.Empty()) {
//        //TODO: parse events and add
//        eventList.addEvent(event);
//      }
//    }
//  }

  return kResultOk;
}
Пример #4
0
tresult PLUGIN_API IPlugVST3::process(ProcessData& data)
{ 
  TRACE_PROCESS;
  
  IMutexLock lock(this); // TODO: is this the best place to lock the mutex?
  
  memcpy(&mProcessContext, data.processContext, sizeof(ProcessContext));
  
  //process parameters
  IParameterChanges* paramChanges = data.inputParameterChanges;
  if (paramChanges)
  {
    int32 numParamsChanged = paramChanges->getParameterCount();
    
    //it is possible to get a finer resolution of control here by retrieving more values (points) from the queue
    //for now we just grab the last one
    
    for (int32 i = 0; i < numParamsChanged; i++)
    {
      IParamValueQueue* paramQueue = paramChanges->getParameterData(i);
      if (paramQueue)
      {
        int32 numPoints = paramQueue->getPointCount();
        int32 offsetSamples;
        double value;
        
        if (paramQueue->getPoint(numPoints - 1,  offsetSamples, value) == kResultTrue)
        {
          int idx = paramQueue->getParameterId();
          if (idx >= 0 && idx < NParams()) 
          {
            GetParam(idx)->SetNormalized((double)value);
            if (GetGUI()) GetGUI()->SetParameterFromPlug(idx, (double)value, true);
            OnParamChange(idx);
          }
        }
      }
    }
  }
  
  if(mDoesMidi) {
    //process events.. only midi note on and note off?
    IEventList* eventList = data.inputEvents;
    if (eventList) 
    {
      int32 numEvent = eventList->getEventCount();
      for (int32 i=0; i<numEvent; i++)
      {
        Event event;
        if (eventList->getEvent(i, event) == kResultOk)
        {
          IMidiMsg msg;
          switch (event.type)
          {
            case Event::kNoteOnEvent:
            {
              msg.MakeNoteOnMsg(event.noteOn.pitch, event.noteOn.velocity * 127, event.sampleOffset, event.noteOn.channel);
              ProcessMidiMsg(&msg);
              break;
            }
              
            case Event::kNoteOffEvent:
            {
              msg.MakeNoteOffMsg(event.noteOff.pitch, event.sampleOffset, event.noteOff.channel);
              ProcessMidiMsg(&msg);
              break;
            }
          }
        }
      }
    }
  }
  
  //process audio
  if (data.numInputs == 0 || data.numOutputs == 0)
  {
    // nothing to do
    return kResultOk;
  }
  
  if (processSetup.symbolicSampleSize == kSample32)
  {
    float** in  = data.inputs[0].channelBuffers32;
    float** out = data.outputs[0].channelBuffers32;
    
    if (mScChans) 
    {
      float** side = data.inputs[1].channelBuffers32;

      if (getAudioInput(1)->isActive()) 
      {
        int totalNInputs = data.inputs[0].numChannels + data.inputs[1].numChannels;
        
        float** allInputs = new float*[totalNInputs];

        for (int i = 0; i < data.inputs[0].numChannels; i ++) {
          allInputs[i] = in[i];
        }
        
        for (int i = 0; i < data.inputs[1].numChannels; i ++) {
          allInputs[i + data.inputs[0].numChannels] = side[i];
        }
        
        AttachInputBuffers(0, totalNInputs, allInputs, data.numSamples);
        mSideChainIsConnected = true;
        
        delete [] allInputs;
      }
      else 
      {
        AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples);
        mSideChainIsConnected = false;
      }
    }
    else 
    {
      AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples);
    }
    
    AttachOutputBuffers(0, data.outputs[0].numChannels, out);
    ProcessBuffers(0.0f, data.numSamples);
  }
  else if (processSetup.symbolicSampleSize == kSample64) // TODO: parity for double precision
  {
    double** in  = data.inputs[0].channelBuffers64;
    double** out = data.outputs[0].channelBuffers64;
    
    AttachInputBuffers(0, data.inputs[0].numChannels, in, data.numSamples);
    AttachOutputBuffers(0, data.outputs[0].numChannels, out);
    
    ProcessBuffers(0.0, data.numSamples);
  } 
  
  // Midi Out
//  if (mDoesMidi) {
//    IEventList eventList = data.outputEvents;
//    
//    if (eventList) 
//    {
//      Event event;
//      
//      while (!mMidiOutputQueue.Empty()) {
//        //TODO: parse events and add
//        eventList.addEvent(event);
//      }
//    }
//  }
  
  return kResultOk; 
}