void ImageSequenceSource::setImageSequence(ImageSequence* sequence) {

    if (!isInitialized()) {
        LERROR("loadImageSequence(): not initialized");
        return;
    }

    clearSequence();  // now owner of the sequence -> delete it before assigning
    delete imageSequence_;

    imageSequence_ = sequence;
    sequenceOwner_ = false;

    outport_.setData(imageSequence_);
    outport_.invalidate();
    invalidate();

    numImages_.set(sequence ? sequence->size() : 0);
}
status_t EmulatedCameraDevice::Initialize()
{
    if (isInitialized()) {
        ALOGW("%s: Emulated camera device is already initialized: mState = %d",
             __FUNCTION__, mState);
        return NO_ERROR;
    }

    /* Instantiate worker thread object. */
    mWorkerThread = new WorkerThread(this);
    if (getWorkerThread() == NULL) {
        ALOGE("%s: Unable to instantiate worker thread object", __FUNCTION__);
        return ENOMEM;
    }

    mState = ECDS_INITIALIZED;

    return NO_ERROR;
}
status_t EmulatedQemuCameraDevice::connectDevice()
{
    LOGV("%s", __FUNCTION__);

    Mutex::Autolock locker(&mObjectLock);
    if (!isInitialized()) {
        LOGE("%s: Fake camera device is not initialized.", __FUNCTION__);
        return EINVAL;
    }
    if (isConnected()) {
        LOGW("%s: Fake camera device is already connected.", __FUNCTION__);
        return NO_ERROR;
    }

    /* There is no device to connect to. */
    mState = ECDS_CONNECTED;

    return NO_ERROR;
}
Example #4
0
void AnalyserNode::process(size_t framesToProcess)
{
    AudioBus* outputBus = output(0)->bus();

    if (!isInitialized() || !input(0)->isConnected()) {
        outputBus->zero();
        return;
    }

    AudioBus* inputBus = input(0)->bus();

    // Give the analyser the audio which is passing through this AudioNode.
    m_analyser.writeInput(inputBus, framesToProcess);

    // For in-place processing, our override of pullInputs() will just pass the audio data through unchanged if the channel count matches from input to output
    // (resulting in inputBus == outputBus). Otherwise, do an up-mix to stereo.
    if (inputBus != outputBus)
        outputBus->copyFrom(*inputBus);
}
Example #5
0
void AvatarManager::clearOtherAvatars() {
    // clear any avatars that came from an avatar-mixer
    QWriteLocker locker(&_hashLock);

    AvatarHash::iterator avatarIterator =  _avatarHash.begin();
    while (avatarIterator != _avatarHash.end()) {
        auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
        if (avatar == _myAvatar || !avatar->isInitialized()) {
            // don't remove myAvatar or uninitialized avatars from the list
            ++avatarIterator;
        } else {
            auto removedAvatar = avatarIterator.value();
            avatarIterator = _avatarHash.erase(avatarIterator);

            handleRemovedAvatar(removedAvatar);
        }
    }
    _myAvatar->clearLookAtTargetAvatar();
}
Example #6
0
void initialize() {
    if (!isInitialized()) {
        // Allow libsawyer to initialize itself if necessary.  Among other things, this makes Saywer::Message::merr actually
        // point to something.  This is also the place where one might want to assign some other message plumbing to
        // rose::Diagnostics::destination (such as sending messages to additional locations).
        Sawyer::initializeLibrary();
        if (mprefix==NULL)
            mprefix = Sawyer::Message::Prefix::instance();
        if (destination==NULL) {
            // use FileSink or FdSink because StreamSink can't tell whether output is a tty or not.
            destination = Sawyer::Message::FileSink::instance(stderr)->prefix(mprefix);
        }

        // (Re)construct the main librose Facility.  A Facility is constructed with all Stream objects enabled, so we'll
        // disable those that we deem are too noisy for most users.  However, the insertAndAdjust might make other choices if
        // mfacilities already has some stream inserted or the user has already called mfacilities.impset().
        mlog = Sawyer::Message::Facility("rose", destination);
        mlog[DEBUG].disable();
        mlog[TRACE].disable();
        mlog[WHERE].disable();
        mfacilities.insertAndAdjust(mlog);

        // Where should failed assertions go for the Sawyer::Assert macros like ASSERT_require()?
        Sawyer::Message::assertionStream = mlog[FATAL];

        // Turn down the progress bar rates
        Sawyer::ProgressBarSettings::initialDelay(12.0);
        Sawyer::ProgressBarSettings::minimumUpdateInterval(2.5);

        // Register logging facilities from other software layers.  These facilities should already be in a usable, but
        // default, state. They probably have all streams enabled (debug through fatal) and are emitting to standard error
        // using the POSIX unbuffered output functions.  Calling these initializers should make all the streams point to the
        // rose::Diagnostics::destination that we set above.
        BinaryLoader::initDiagnostics();
        BinaryAnalysis::Disassembler::initDiagnostics();
        BinaryAnalysis::Partitioner::initDiagnostics();
        BinaryAnalysis::AsmUnparser::initDiagnostics();
        BinaryAnalysis::DataFlow::initDiagnostics();
        BinaryAnalysis::TaintedFlow::initDiagnostics();
        BinaryAnalysis::Partitioner2::initDiagnostics();
        EditDistance::initDiagnostics();
    }
}
Example #7
0
void ImageSequenceSave::saveSequence() {
    if (!isInitialized())
        return;

    if (!inport_.hasData() || inport_.getData()->empty())
        return;
    if (outputDirectory_.get() == "")
        return;

    std::string directory = outputDirectory_.get();
    const ImageSequence* inputSequence = inport_.getData();
    tgtAssert(inputSequence, "no collection");
    const std::string maxNr = itos(inputSequence->size() - 1);
    for (size_t i=0; i<inputSequence->size(); i++) {
        const tgt::Texture* texture = inputSequence->at(i);
        tgtAssert(texture, "imagesequence contains null pointer");
        std::string imageFilename;

        std::string nrSuffix = itos(i);
        nrSuffix = std::string(maxNr.length()-nrSuffix.length(), '0') + nrSuffix;
        if (baseName_.get() != "")
            imageFilename += baseName_.get() + nrSuffix;
        else if (texture->getName()!= "")
            imageFilename = tgt::FileSystem::fileName(inputSequence->at(i)->getName());
        else
            imageFilename = "image" + nrSuffix;

        tgtAssert(imageFilename != "", "no imageFilename");
        if (tgt::FileSystem::fileExtension(imageFilename) == "")
            imageFilename += ".png";

        std::string outputFilename = tgt::FileSystem::cleanupPath(directory + "/" + imageFilename);
        try {
            LINFO("Saving image to file: " << outputFilename);
            saveImage(outputFilename, inputSequence->at(i));
        }
        catch (VoreenException& e) {
            LERROR("Failed to save image to file '" << outputFilename << "': " << e.what());
        }

    }
}
bool DirectShowVideoWrapper::seek(Real64 SeekPos)
{
    if(isInitialized())
    {
		HRESULT hr;
        TCHAR szErr[MAX_ERROR_TEXT_LEN];

        Real64 CurPos(getPosition());
        if((SeekPos < CurPos) &&
            (!canSeekBackward()))
        {
            SWARNING << "Unable to seek backwards on this type of media." << std::endl;
			return false;
        }
        if((SeekPos > CurPos) &&
            (!canSeekForward()))
        {
            SWARNING << "Unable to seek forwards on this type of media." << std::endl;
			return false;
        }

		CComPtr<IMediaPosition> mediaPosition;
		hr = _pGraphBuilder->QueryInterface(&mediaPosition);
		if (FAILED(hr))
        {
            AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
            SWARNING << "Unable to get IMediaPosition, error: " << szErr << std::endl;
			return false;
		}

		hr = mediaPosition->put_CurrentPosition(SeekPos);
		if (FAILED(hr))
        {
            AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
            SWARNING << "Unable to set position to value: " << SeekPos << ", error: " << szErr << std::endl;
			return false;
		}

		return true;
	}
	return false;
}
void WaveShaperProcessor::process(const AudioBus* source, AudioBus* destination, size_t framesToProcess)
{
    if (!isInitialized()) {
        destination->zero();
        return;
    }

    // The audio thread can't block on this lock, so we call tryLock() instead.
    // Careful - this is a tryLock() and not an autolocker, so we must unlock() before every return.
    if (m_processLock.tryLock()) {        
        // For each channel of our input, process using the corresponding WaveShaperDSPKernel into the output channel.
        for (unsigned i = 0; i < m_kernels.size(); ++i)
            m_kernels[i]->process(source->channel(i)->data(), destination->channel(i)->mutableData(), framesToProcess);

        m_processLock.unlock();
    } else {
        // Too bad - the tryLock() failed. We must be in the middle of a setCurve() call.
        destination->zero();
    }
}
 bool DirectShowVideoWrapper::hasAudio(void) const
{
    if(isInitialized())
    {
		HRESULT hr;
        TCHAR szErr[MAX_ERROR_TEXT_LEN];

        IBaseFilter* pAudioRenderer = NULL;
        hr = FindAudioRenderer(_pGraphBuilder,&pAudioRenderer);
        if (FAILED(hr))
        {
            AMGetErrorText(hr, szErr, MAX_ERROR_TEXT_LEN);
            SWARNING << "Failed to find audio renderer, error: " << szErr << std::endl;
            return false;
        }

        return pAudioRenderer != NULL;
	}
	return false;
}
Example #11
0
Error Display::createImage(gl::Context *context,
                           EGLenum target,
                           EGLClientBuffer buffer,
                           const AttributeMap &attribs,
                           Image **outImage)
{
    ASSERT(isInitialized());

    if (mImplementation->testDeviceLost())
    {
        Error error = restoreLostDevice();
        if (error.isError())
        {
            return error;
        }
    }

    UNIMPLEMENTED();
    return Error(EGL_SUCCESS);
}
Example #12
0
void JavaClass::cacheField(JNIEnv *env, const char *field_name, const char *field_type) {
    LOG_DEBUG("Caching field '%s' (type %s) in class '%s'", field_name, field_type, getSimpleName());
    if (!isInitialized()) {
        JavaExceptionUtils::throwExceptionOfType(env, kTypeIllegalStateException,
                "Attempt to call cacheField without having set class info");
        return;
    }

    std::string fieldTypeSignature;
    JavaClassUtils::makeNameForSignature(fieldTypeSignature, field_type);
    jfieldID field = env->GetFieldID(_clazz_global.get(), field_name, fieldTypeSignature.c_str());
    JavaExceptionUtils::checkException(env);
    if (field != NULL) {
        _fields_global[field_name] = field;
    } else {
        JavaExceptionUtils::throwExceptionOfType(env, kTypeJavaClass(NoSuchFieldError),
                "Field '%s' (type '%s') not found on class %s",
                field_name, field_type, getCanonicalName());
    }
}
Example #13
0
void VendorDataComponent::initializeTransientMembers() {

	AuctionTerminalDataComponent::initializeTransientMembers();

	lastBark = 0;
	ManagedReference<SceneObject*> strongParent = parent.get();

	if(strongParent != NULL) {

		if (isInitialized()) {
			scheduleVendorCheckTask(VENDORCHECKDELAY + System::random(VENDORCHECKINTERVAL));

			if(originalDirection == 1000)
				originalDirection = strongParent->getDirectionAngle();

			if(isRegistered() && strongParent->getZone() != NULL)
				strongParent->getZone()->registerObjectWithPlanetaryMap(strongParent);
		}
	}
}
void OsgSkeletonRepresentation::setNeutralBonePose(const std::string& name,
												   const SurgSim::Math::RigidTransform3d& pose)
{
	boost::unique_lock<boost::shared_mutex> lock(m_mutex);

	auto found = m_bones->find(name);
	if (found != m_bones->end())
	{
		found->second.neutralPose = pose;
	}
	else if (isInitialized())
	{
		SURGSIM_FAILURE() << "Bone with name " << name << " is not present in mesh.";
	}
	else
	{
		auto newBone = m_bones->emplace(std::make_pair(name, BoneData())).first;
		newBone->second.neutralPose = pose;
	}
}
Example #15
0
bool NetworkDB::initDatabase() {
	if (isInitialized()) {
		return true;
	}

	if (!open()) {
		return false;
	}

	myInitialized = true;

	shared_ptr<DBRunnable> runnable = new InitNetworkDBRunnable(connection());
	if (!executeAsTransaction(*runnable)) {
		myInitialized = false;
		close();
		return false;
	}

	return true;
}
VideoCaptureDeviceImpl::RGBVideoFormatList
VideoCaptureDeviceImpl::videoFormatList() const {
  if (!isInitialized()) {
    RGBVideoFormatList emptyList;
    return emptyList;
  }

  RGBVideoFormatList rgbVideoFormatList;
  for (VideoFormatConstIterator iterator(m_videoFormatsByUuid.begin());
       iterator != m_videoFormatsByUuid.end();
       ++iterator) {
    const UuidVideoFormatPair uuidVideoFormatPair(*iterator);
    RGBVideoFormat rgbVideoFormat(rgbVideoFormatFromPair(uuidVideoFormatPair));
    if (!rgbVideoFormat) {
      continue;
    }
    rgbVideoFormatList.push_back(rgbVideoFormat);
  }
  return rgbVideoFormatList;
}
Example #17
0
bool FloppyService::setImage(int iSlot, std::string sLocalFileWPath)
{
    if( !isInitialized() )
    {
        return false;
    }
    if( iSlot < 0 || iSlot > 2 ) 
    {                        
        return false;        // index out of range? fail
    } 
    
    std::string sPath;
    std::string sFile;
    Utils::splitFilenameFromPath(sLocalFileWPath, sPath, sFile);
    
    std::string sEmpty;
    pxImageSilo->add(iSlot, sFile, sLocalFileWPath, sEmpty, sEmpty, true); 
        
    return true;
}
Example #18
0
void NuPIC::init()
{
  if (isInitialized())
    return;
  
  // internal consistency check. Nonzero should be impossible. 
  NTA_CHECK(networks_.size() == 0) << "Internal error in NuPIC::init()";
  
  // Initialize APR
  int argc=1;
  const char *argv[1] = {"NuPIC"};
  // TODO: move to OS::initialize()?
  int result = apr_app_initialize(&argc, (const char* const **)&argv, nullptr /*env*/);
  if (result) 
    NTA_THROW << "Error initializing APR (code " << result << ")";

  // TODO: license checking will be done in NuPIC::init()

  initialized_ = true;
}
Example #19
0
bool Display::getOcclusionQuerySupport() const
{
    if (!isInitialized())
    {
        return false;
    }

    IDirect3DQuery9 *query = NULL;
    HRESULT result = mDevice->CreateQuery(D3DQUERYTYPE_OCCLUSION, &query);

    if (SUCCEEDED(result) && query)
    {
        query->Release();
        return true;
    }
    else
    {
        return false;
    }
}
Example #20
0
Error Display::createPixmapSurface(const Config *configuration, NativePixmapType nativePixmap, const AttributeMap &attribs,
                                   Surface **outSurface)
{
    ASSERT(isInitialized());

    if (mImplementation->testDeviceLost())
    {
        ANGLE_TRY(restoreLostDevice());
    }

    std::unique_ptr<Surface> surface(
        new PixmapSurface(mImplementation, configuration, nativePixmap, attribs));
    ANGLE_TRY(surface->initialize());

    ASSERT(outSurface != nullptr);
    *outSurface = surface.release();
    mImplementation->getSurfaceSet().insert(*outSurface);

    return egl::Error(EGL_SUCCESS);
}
Example #21
0
Error Display::createPbufferFromClientBuffer(const Config *configuration, EGLClientBuffer shareHandle,
        const AttributeMap &attribs, Surface **outSurface)
{
    ASSERT(isInitialized());

    if (mImplementation->testDeviceLost())
    {
        ANGLE_TRY(restoreLostDevice());
    }

    std::unique_ptr<Surface> surface(
        new PbufferSurface(mImplementation, configuration, shareHandle, attribs));
    ANGLE_TRY(surface->initialize());

    ASSERT(outSurface != nullptr);
    *outSurface = surface.release();
    mImplementation->getSurfaceSet().insert(*outSurface);

    return egl::Error(EGL_SUCCESS);
}
Example #22
0
HRESULT
AsdkSheetSet::createNestedCategory(char* categoryName,		   // Category Name
							       char* categoryDesc,		   // Category Description
								   IAcSmSubset *pParentSet,    // Parent category 
								   IAcSmSubset **pNewSubset  // OUT - Newly created category		
								  )
{
	if(FAILED(isInitialized("createNestedCategory")))
		return E_FAIL;
	
	 // lock the the database first before doing any operation on it
    if (FAILED(LockDatabase()))
	{
		acutPrintf("\n Database lock failed!");
        return E_FAIL;
	}

	CComQIPtr<IAcSmSubset> pSubSet = pParentSet;
	if(!pSubSet.p)
		// Create a category at the root
		pSubSet = m_pSheetSet;

	CComBSTR bstrCatName(categoryName); // Category name
	CComBSTR bstrCatDesc(categoryDesc); // Category description

	// create a new subset 
	if(FAILED(pSubSet->CreateSubset(bstrCatName, bstrCatDesc, pNewSubset)))
	{
		acutPrintf("\n Subset creation failed");
		return E_FAIL;
	}

	// Unlock database
	if (FAILED(UnlockDatabase())) 
	{
		acutPrintf("\n Cannot unlock database");
        return E_FAIL;
	}

	return S_OK;
}
Example #23
0
bool AMySQLServer::getFields(const AString& table, VECTOR_AString& sv, AString& error)
{
  if (!isInitialized())
  {
    error.assign("Database has not been initialized;");
    return false;
  }

  if (table.isEmpty())
  {
    error = "Please use a namespace;";
    return false;
  }

  sv.clear();

  AString query("SHOW COLUMNS FROM `");
  query += table;
  query += "`";
  
  MYSQL_RES *pmyresult = executeSQL(query, error);
  if (pmyresult)
  {
    MYSQL_ROW myrow;
    int iSize = (int)mysql_num_rows(pmyresult);
    for (int i=0; i < iSize; ++i)
    {
      myrow = mysql_fetch_row(pmyresult);
      if (myrow)
      {
        sv.push_back(myrow[0]);
      }
    }

    mysql_free_result(pmyresult);
  }
  else
    return false;

  return true;
}
Example #24
0
/**
 * Sendet zum Sensor ein Befehl zum Auswahl von HardwareMode.
 */
bool AS_BH1750::selectResolutionMode(uint8_t mode) {
#if BH1750_DEBUG == 1
    Serial.print("selectResolutionMode: ");
    Serial.println(mode, DEC);
#endif
  if(!isInitialized()) {
    return false;
#if BH1750_DEBUG == 1
    Serial.println("sensor not initialized");
#endif
  }

  _hardwareMode=mode;
  _valueReaded=false;

  // Prüfen, ob ein valides Modus vorliegt und im positiven Fall das gewünschte Modus aktivieren
  switch (mode) {
  case BH1750_CONTINUOUS_HIGH_RES_MODE:
  case BH1750_CONTINUOUS_HIGH_RES_MODE_2:
  case BH1750_CONTINUOUS_LOW_RES_MODE:
  case BH1750_ONE_TIME_HIGH_RES_MODE:
  case BH1750_ONE_TIME_HIGH_RES_MODE_2:
  case BH1750_ONE_TIME_LOW_RES_MODE:
    // Modus aktivieren
    if(write8(mode)) {
    // Kurze pause nötig, sonst wird das Modus nicht sicher Aktiviert
    // (z.B. liefert Sensor im AutoHigh Modus abwechselnd übersteuerte Werte, etwa so: 54612.5, 68123.4, 54612.5, 69345.3,..)
    delay(5);
      return true;
    }
    break;
  default:
    // Invalid measurement mode
#if BH1750_DEBUG == 1
    Serial.println("Invalid measurement mode");
#endif
    break;
  }

  return false;
}
Example #25
0
egolib_rv CameraSystem::renderAll(std::function<void(std::shared_ptr<Camera>, std::shared_ptr<Ego::Graphics::TileList>, std::shared_ptr<Ego::Graphics::EntityList>)> renderFunction)
{
    if ( NULL == renderFunction ) {
        return rv_error;
    }

    if ( !isInitialized() ) {
        return rv_fail;
    }

    //Store main camera to restore
    std::shared_ptr<Camera> storeMainCam = _mainCamera;

    for(const std::shared_ptr<Camera> &camera : _cameraList) 
    {
        // set the "global" camera pointer to this camera
        _mainCamera = camera;

	    // has this camera already rendered this frame?
        if ( camera->getLastFrame() >= 0 && static_cast<uint32_t>(camera->getLastFrame()) >= game_frame_all ) {
            continue;
        }

        // set up everything for this camera
        beginCameraMode(camera);

        // render the world for this camera
        renderFunction(camera, camera->getTileList(), camera->getEntityList());

        // undo the camera setup
        endCameraMode();

        //Set last update frame
        camera->setLastFrame(game_frame_all);
    }

    // reset the "global" camera pointer to whatever it was
    _mainCamera = storeMainCam;

    return rv_success;
}
Example #26
0
	bool CAudio_AL::init()
	{
		m_device = ::alcOpenDevice(nullptr);
		
		if (!m_device)
		{
			return false;
		}
		
		m_context = ::alcCreateContext(m_device, nullptr);
		
		if (!m_context)
		{
			return false;
		}
		
		if (!::alcMakeContextCurrent(m_context))
		{
			return false;
		}
		
		::alListener3f(AL_POSITION, 0, 0, 1.0f);
		::alListener3f(AL_VELOCITY, 0, 0, 0);
		const ALfloat listenerOri[] = { 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f };
		::alListenerfv(AL_ORIENTATION, listenerOri);
		
		const auto nullAudio = std::make_shared<Audio_AL>(
			Wave(SecondsF(0.5), Arg::generator = [](double t) {
				return 0.5 * std::sin(t * Math::TwoPi) * std::sin(t * Math::TwoPi * 220.0 * (t * 4.0 + 1.0)); }));
		
		if (!nullAudio->isInitialized())
		{
			return false;
		}
		
		m_audios.setNullData(nullAudio);

		LOG_INFO(U"ℹ️ Audio initialized");

		return true;
	}
bool DirectShowVideoWrapper::isPaused(void) const
{
    if(isInitialized())
    {
		HRESULT hr;

		IMediaControl* mediaControl;
		hr = _pGraphBuilder->QueryInterface(IID_IMediaControl,(void**)&mediaControl);

		OAFilterState GraphState;
		if (SUCCEEDED(mediaControl->GetState(0.1f, &GraphState)))
        {
			return GraphState == State_Paused;
		}
        else
        {
			return false;
		}
    }
    return false;
}
void PlotDataFitFunction::recalculate() {
    tgtAssert(isInitialized(), "not initialized");
    if (!pData_)
        return;
    if (fittingValues_.column >= 0) {
        PlotFunction* newPlotData;
        newPlotData = new PlotFunction(*pDataOut_);
        newPlotData->getPlotExpression().setExpressionName(expressionNameInput_.get());
        newPlotData->setExpressionLength(static_cast<PlotFunction::ExpressionDescriptionLengthType>(expressionText_.getValue()),
            maxLength_.get(),selfDescription_.get());
        PlotFunction* oldData;
        oldData = pDataOut_;
        pDataOut_ = newPlotData;
        setOutPortData();
        if (getProcessorWidget()){
            getProcessorWidget()->updateFromProcessor();
        }
        oldData->getPlotExpression().deletePlotExpressionNodes();
        delete oldData;
    }
}
Example #29
0
/**
 * \return Value of the given setting as color or defaultValue.
 */
RColor RSettings::getColor(const QString& key, const RColor& defaultValue) {
    // colors are 'different' and need to be handled without RSettings::getValue:
    if (!isInitialized()) {
        return defaultValue;
    }
    if (cache.contains(key)) {
        return cache[key].value<RColor>();
    }

    // slow operation:
    QVariant stored = getQSettings()->value(key);
    if (!stored.isValid()) {
        return defaultValue;
    }
    RColor ret = stored.value<RColor>();
    QVariant v;
    v.setValue(ret);
    cache[key] = v;

    return ret;
}
Example #30
0
void PannerNode::process(size_t framesToProcess)
{
    AudioBus* destination = output(0)->bus();

    if (!isInitialized() || !input(0)->isConnected() || !m_panner.get()) {
        destination->zero();
        return;
    }

    AudioBus* source = input(0)->bus();

    if (!source) {
        destination->zero();
        return;
    }

    // The audio thread can't block on this lock, so we use std::try_to_lock instead.
    std::unique_lock<std::mutex> lock(m_pannerMutex, std::try_to_lock);
    if (!lock.owns_lock()) {
        // Too bad - The try_lock() failed. We must be in the middle of changing the panner.
        destination->zero();
        return;
    }

    // Apply the panning effect.
    double azimuth;
    double elevation;
    getAzimuthElevation(&azimuth, &elevation);
    m_panner->pan(azimuth, elevation, source, destination, framesToProcess);

    // Get the distance and cone gain.
    double totalGain = distanceConeGain();

    // Snap to desired gain at the beginning.
    if (m_lastGain == -1.0)
        m_lastGain = totalGain;

    // Apply gain in-place with de-zippering.
    destination->copyWithGainFrom(*destination, &m_lastGain, totalGain);
}