void PropDict::setSource( const std::string& src ) { std::list< std::string > sources; sources.push_back( src ); setSource( sources ); }
void QmlApplicationViewer::setMainQmlFile(const QString &file) { d->mainQmlFile = QmlApplicationViewerPrivate::adjustPath(file); setSource(QUrl::fromLocalFile(d->mainQmlFile)); }
/*! Changes the document displayed to be the first document the browser displayed. */ void Q3TextBrowser::home() { if (!d->home.isNull()) setSource(d->home); }
void AnimationPlayer::cancel() { setSource(0); }
AudioFilePlayer::~AudioFilePlayer() { setSource (0); deleteAndZero (currentAudioFileSource); }
void DeclarativeDragArea::resetSource() { setSource(0); }
QT_BEGIN_NAMESPACE MainWindow::MainWindow(CmdLineParser *cmdLine, QWidget *parent) : QMainWindow(parent) , m_bookmarkWidget(0) , m_filterCombo(0) , m_toolBarMenu(0) , m_cmdLine(cmdLine) , m_progressWidget(0) , m_qtDocInstaller(0) , m_connectedInitSignals(false) { TRACE_OBJ setToolButtonStyle(Qt::ToolButtonFollowStyle); setDockOptions(dockOptions() | AllowNestedDocks); QString collectionFile; if (usesDefaultCollection()) { MainWindow::collectionFileDirectory(true); collectionFile = MainWindow::defaultHelpCollectionFileName(); } else { collectionFile = cmdLine->collectionFile(); } HelpEngineWrapper &helpEngineWrapper = HelpEngineWrapper::instance(collectionFile); BookmarkManager *bookMarkManager = BookmarkManager::instance(); if (!initHelpDB(!cmdLine->collectionFileGiven())) { qDebug("Fatal error: Help engine initialization failed. " "Error message was: %s\nAssistant will now exit.", qPrintable(HelpEngineWrapper::instance().error())); std::exit(1); } m_centralWidget = new CentralWidget(this); setCentralWidget(m_centralWidget); m_indexWindow = new IndexWindow(this); QDockWidget *indexDock = new QDockWidget(tr("Index"), this); indexDock->setObjectName(QLatin1String("IndexWindow")); indexDock->setWidget(m_indexWindow); addDockWidget(Qt::LeftDockWidgetArea, indexDock); m_contentWindow = new ContentWindow; QDockWidget *contentDock = new QDockWidget(tr("Contents"), this); contentDock->setObjectName(QLatin1String("ContentWindow")); contentDock->setWidget(m_contentWindow); addDockWidget(Qt::LeftDockWidgetArea, contentDock); m_searchWindow = new SearchWidget(helpEngineWrapper.searchEngine()); m_searchWindow->setFont(!helpEngineWrapper.usesBrowserFont() ? qApp->font() : helpEngineWrapper.browserFont()); QDockWidget *searchDock = new QDockWidget(tr("Search"), this); searchDock->setObjectName(QLatin1String("SearchWindow")); searchDock->setWidget(m_searchWindow); addDockWidget(Qt::LeftDockWidgetArea, searchDock); QDockWidget *bookmarkDock = new QDockWidget(tr("Bookmarks"), this); bookmarkDock->setObjectName(QLatin1String("BookmarkWindow")); bookmarkDock->setWidget(m_bookmarkWidget = bookMarkManager->bookmarkDockWidget()); addDockWidget(Qt::LeftDockWidgetArea, bookmarkDock); QDockWidget *openPagesDock = new QDockWidget(tr("Open Pages"), this); openPagesDock->setObjectName(QLatin1String("Open Pages")); OpenPagesManager *openPagesManager = OpenPagesManager::createInstance(this, usesDefaultCollection(), m_cmdLine->url()); openPagesDock->setWidget(openPagesManager->openPagesWidget()); addDockWidget(Qt::LeftDockWidgetArea, openPagesDock); connect(m_centralWidget, SIGNAL(addBookmark(QString, QString)), bookMarkManager, SLOT(addBookmark(QString, QString))); connect(bookMarkManager, SIGNAL(escapePressed()), this, SLOT(activateCurrentCentralWidgetTab())); connect(bookMarkManager, SIGNAL(setSource(QUrl)), m_centralWidget, SLOT(setSource(QUrl))); connect(bookMarkManager, SIGNAL(setSourceInNewTab(QUrl)), openPagesManager, SLOT(createPage(QUrl))); QHelpSearchEngine *searchEngine = helpEngineWrapper.searchEngine(); connect(searchEngine, SIGNAL(indexingStarted()), this, SLOT(indexingStarted())); connect(searchEngine, SIGNAL(indexingFinished()), this, SLOT(indexingFinished())); QString defWindowTitle = tr("Qt Assistant"); setWindowTitle(defWindowTitle); setupActions(); statusBar()->show(); m_centralWidget->connectTabBar(); setupFilterToolbar(); setupAddressToolbar(); const QString windowTitle = helpEngineWrapper.windowTitle(); setWindowTitle(windowTitle.isEmpty() ? defWindowTitle : windowTitle); QByteArray iconArray = helpEngineWrapper.applicationIcon(); if (iconArray.size() > 0) { QPixmap pix; pix.loadFromData(iconArray); QIcon appIcon(pix); qApp->setWindowIcon(appIcon); } else { QIcon appIcon(QLatin1String(":/trolltech/assistant/images/assistant-128.png")); qApp->setWindowIcon(appIcon); } QToolBar *toolBar = addToolBar(tr("Bookmark Toolbar")); toolBar->setObjectName(QLatin1String("Bookmark Toolbar")); bookMarkManager->setBookmarksToolbar(toolBar); // Show the widget here, otherwise the restore geometry and state won't work // on x11. show(); toolBar->hide(); toolBarMenu()->addAction(toolBar->toggleViewAction()); QByteArray ba(helpEngineWrapper.mainWindow()); if (!ba.isEmpty()) restoreState(ba); ba = helpEngineWrapper.mainWindowGeometry(); if (!ba.isEmpty()) { restoreGeometry(ba); } else { tabifyDockWidget(contentDock, indexDock); tabifyDockWidget(indexDock, bookmarkDock); tabifyDockWidget(bookmarkDock, searchDock); contentDock->raise(); const QRect screen = QApplication::desktop()->screenGeometry(); resize(4*screen.width()/5, 4*screen.height()/5); } if (!helpEngineWrapper.hasFontSettings()) { helpEngineWrapper.setUseAppFont(false); helpEngineWrapper.setUseBrowserFont(false); helpEngineWrapper.setAppFont(qApp->font()); helpEngineWrapper.setAppWritingSystem(QFontDatabase::Latin); helpEngineWrapper.setBrowserFont(qApp->font()); helpEngineWrapper.setBrowserWritingSystem(QFontDatabase::Latin); } else { updateApplicationFont(); } updateAboutMenuText(); QTimer::singleShot(0, this, SLOT(insertLastPages())); if (m_cmdLine->enableRemoteControl()) (void)new RemoteControl(this); if (m_cmdLine->contents() == CmdLineParser::Show) showContents(); else if (m_cmdLine->contents() == CmdLineParser::Hide) hideContents(); if (m_cmdLine->index() == CmdLineParser::Show) showIndex(); else if (m_cmdLine->index() == CmdLineParser::Hide) hideIndex(); if (m_cmdLine->bookmarks() == CmdLineParser::Show) showBookmarksDockWidget(); else if (m_cmdLine->bookmarks() == CmdLineParser::Hide) hideBookmarksDockWidget(); if (m_cmdLine->search() == CmdLineParser::Show) showSearch(); else if (m_cmdLine->search() == CmdLineParser::Hide) hideSearch(); if (m_cmdLine->contents() == CmdLineParser::Activate) showContents(); else if (m_cmdLine->index() == CmdLineParser::Activate) showIndex(); else if (m_cmdLine->bookmarks() == CmdLineParser::Activate) showBookmarksDockWidget(); if (!m_cmdLine->currentFilter().isEmpty()) { const QString &curFilter = m_cmdLine->currentFilter(); if (helpEngineWrapper.customFilters().contains(curFilter)) helpEngineWrapper.setCurrentFilter(curFilter); } if (usesDefaultCollection()) QTimer::singleShot(0, this, SLOT(lookForNewQtDocumentation())); else checkInitState(); connect(&helpEngineWrapper, SIGNAL(documentationRemoved(QString)), this, SLOT(documentationRemoved(QString))); connect(&helpEngineWrapper, SIGNAL(documentationUpdated(QString)), this, SLOT(documentationUpdated(QString))); setTabPosition(Qt::AllDockWidgetAreas, QTabWidget::North); GlobalActions::instance()->updateActions(); if (helpEngineWrapper.addressBarEnabled()) showNewAddress(); }
FXbool GMSourceView::listSources() { listsources(); setSource((GMSource*)sourcelist->getItemData(sourcelist->getCurrentItem()),false); return true; }
Animated::Animated(float _x, float _y, int _type):Object(_x,_y,_type) { setSource(sf::Vector2i(1,0)); update(); } // end constructor
// destructor ImageSource::~ImageSource (void) { // release source setSource(NULL); }
bool setFromFile(const std::string &fileName) { return setSource(getFileContent(fileName)); }
inline QResourceRoot(const uchar *t, const uchar *n, const uchar *d) { setSource(t, n, d); }
void MediaObjectPrivate::setupBackendObject() { Q_Q(MediaObject); Q_ASSERT(m_backendObject); //pDebug() << Q_FUNC_INFO; #ifndef QT_NO_PHONON_ABSTRACTMEDIASTREAM QObject::connect(m_backendObject, SIGNAL(stateChanged(Phonon::State, Phonon::State)), q, SLOT(_k_stateChanged(Phonon::State, Phonon::State))); #else QObject::connect(m_backendObject, SIGNAL(stateChanged(Phonon::State, Phonon::State)), q, SIGNAL(stateChanged(Phonon::State, Phonon::State))); #endif // QT_NO_PHONON_ABSTRACTMEDIASTREAM QObject::connect(m_backendObject, SIGNAL(tick(qint64)), q, SIGNAL(tick(qint64))); QObject::connect(m_backendObject, SIGNAL(seekableChanged(bool)), q, SIGNAL(seekableChanged(bool))); #ifndef QT_NO_PHONON_VIDEO QObject::connect(m_backendObject, SIGNAL(hasVideoChanged(bool)), q, SIGNAL(hasVideoChanged(bool))); #endif //QT_NO_PHONON_VIDEO QObject::connect(m_backendObject, SIGNAL(bufferStatus(int)), q, SIGNAL(bufferStatus(int))); QObject::connect(m_backendObject, SIGNAL(finished()), q, SIGNAL(finished())); QObject::connect(m_backendObject, SIGNAL(aboutToFinish()), q, SLOT(_k_aboutToFinish())); QObject::connect(m_backendObject, SIGNAL(prefinishMarkReached(qint32)), q, SIGNAL(prefinishMarkReached(qint32))); QObject::connect(m_backendObject, SIGNAL(totalTimeChanged(qint64)), q, SIGNAL(totalTimeChanged(qint64))); QObject::connect(m_backendObject, SIGNAL(metaDataChanged(const QMultiMap<QString, QString> &)), q, SLOT(_k_metaDataChanged(const QMultiMap<QString, QString> &))); QObject::connect(m_backendObject, SIGNAL(currentSourceChanged(const MediaSource&)), q, SLOT(_k_currentSourceChanged(const MediaSource&))); // set up attributes pINTERFACE_CALL(setTickInterval(tickInterval)); pINTERFACE_CALL(setPrefinishMark(prefinishMark)); pINTERFACE_CALL(setTransitionTime(transitionTime)); switch(state) { case LoadingState: case StoppedState: case ErrorState: break; case PlayingState: case BufferingState: QTimer::singleShot(0, q, SLOT(_k_resumePlay())); break; case PausedState: QTimer::singleShot(0, q, SLOT(_k_resumePause())); break; } const State backendState = pINTERFACE_CALL(state()); if (state != backendState && state != ErrorState) { // careful: if state is ErrorState we might be switching from a // MediaObject to a ByteStream for KIO fallback. In that case the state // change to ErrorState was already suppressed. pDebug() << "emitting a state change because the backend object has been replaced"; emit q->stateChanged(backendState, state); state = backendState; } #ifndef QT_NO_PHONON_MEDIACONTROLLER for (int i = 0 ; i < interfaceList.count(); ++i) { interfaceList.at(i)->_backendObjectChanged(); } #endif //QT_NO_PHONON_MEDIACONTROLLER // set up attributes if (isPlayable(mediaSource.type())) { #ifndef QT_NO_PHONON_ABSTRACTMEDIASTREAM if (mediaSource.type() == MediaSource::Stream) { Q_ASSERT(mediaSource.stream()); mediaSource.stream()->d_func()->setMediaObjectPrivate(this); } #endif //QT_NO_PHONON_ABSTRACTMEDIASTREAM pINTERFACE_CALL(setSource(mediaSource)); } }
void MediaObjectPrivate::_k_stateChanged(Phonon::State newstate, Phonon::State oldstate) { Q_Q(MediaObject); if (mediaSource.type() != MediaSource::Url) { // special handling only necessary for URLs because of the fallback emit q->stateChanged(newstate, oldstate); return; } if (errorOverride) { errorOverride = false; if (newstate == ErrorState) { return; } oldstate = ErrorState; } // backend MediaObject reached ErrorState, try a KioMediaSource if (newstate == Phonon::ErrorState && !kiofallback) { kiofallback = Platform::createMediaStream(mediaSource.url(), q); if (!kiofallback) { pDebug() << "backend MediaObject reached ErrorState, no KIO fallback available"; emit q->stateChanged(newstate, oldstate); return; } pDebug() << "backend MediaObject reached ErrorState, trying Platform::createMediaStream now"; ignoreLoadingToBufferingStateChange = false; ignoreErrorToLoadingStateChange = false; switch (oldstate) { case Phonon::BufferingState: // play() has already been called, we need to make sure it is called // on the backend with the KioMediaStream MediaSource now, too ignoreLoadingToBufferingStateChange = true; break; case Phonon::LoadingState: ignoreErrorToLoadingStateChange = true; // no extras break; default: pError() << "backend MediaObject reached ErrorState after " << oldstate << ". It seems a KioMediaStream will not help here, trying anyway."; emit q->stateChanged(Phonon::LoadingState, oldstate); break; } kiofallback->d_func()->setMediaObjectPrivate(this); MediaSource mediaSource(kiofallback); mediaSource.setAutoDelete(true); pINTERFACE_CALL(setSource(mediaSource)); if (oldstate == Phonon::BufferingState) { q->play(); } return; } else if (ignoreLoadingToBufferingStateChange && kiofallback && oldstate == Phonon::LoadingState) { if (newstate != Phonon::BufferingState) { emit q->stateChanged(newstate, Phonon::BufferingState); } return; } else if (ignoreErrorToLoadingStateChange && kiofallback && oldstate == ErrorState) { if (newstate != LoadingState) { emit q->stateChanged(newstate, Phonon::LoadingState); } return; } emit q->stateChanged(newstate, oldstate); }
/** * Creates a new SourceRef object from an existing Source. * * @param source the source to extract the reference from - NOT NULL * * */ SourceRef::SourceRef(Source* source) { this->value = NULL; this->source = NULL; setSource(source); setValue(source->getLocURI()); }
Animated::Animated(int _type):Object(_type) { setSource(sf::Vector2i(1,0)); update(); } // end constructor
void TextShow::emitLinkClicked(const QString &name) { setSource(name); }
status_t BnWfdVideoService::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { switch(code) { case WFDVIDEOSERVICE_SET_SOURCE: { CHECK_INTERFACE(IWfdVideoService, data, reply); int32_t source = data.readInt32(); setSource(source); } break; case WFDVIDEOSERVICE_GET_CURSOURCE: { CHECK_INTERFACE(IWfdVideoService, data, reply); int32_t source = getCurSource(); reply->writeInt32(source); } break; case WFDVIDEOSERVICE_START: { CHECK_INTERFACE(IWfdVideoService, data, reply); start(); } break; case WFDVIDEOSERVICE_STOP: { CHECK_INTERFACE(IWfdVideoService, data, reply); stop(); } break; case WFDVIDEOSERVICE_WRITE: { CHECK_INTERFACE(IWfdVideoService, data, reply); uint32_t srcColorFormat = data.readInt32(); uint32_t srcWidth = data.readInt32(); uint32_t srcHeight = data.readInt32(); uint32_t srcCropX = data.readInt32(); uint32_t srcCropY = data.readInt32(); uint32_t srcCropW = data.readInt32(); uint32_t srcCropH = data.readInt32(); uint32_t srcYAddr = data.readInt32(); uint32_t srcCbAddr = data.readInt32(); uint32_t srcCrAddr = data.readInt32(); uint32_t rotate = data.readInt32(); int64_t timestamp = data.readInt64(); write( srcColorFormat, srcWidth, srcHeight, srcCropX, srcCropY, srcCropW, srcCropH, srcYAddr, srcCbAddr, srcCrAddr, rotate, timestamp); } break; case WFDVIDEOSERVICE_READ: { CHECK_INTERFACE(IWfdVideoService, data, reply); uint32_t dstColorFormat; uint32_t dstWidth; uint32_t dstHeight; uint32_t dstStride; uint32_t dstYAddr; uint32_t dstCbAddr; uint32_t dstCrAddr; uint32_t dstBufIndex; int64_t timestamp; int32_t ret = read( &dstColorFormat, &dstWidth, &dstHeight, &dstStride, &dstYAddr, &dstCbAddr, &dstCrAddr, &dstBufIndex, ×tamp); reply->writeInt32(dstColorFormat); reply->writeInt32(dstWidth); reply->writeInt32(dstHeight); reply->writeInt32(dstStride); reply->writeInt32(dstYAddr); reply->writeInt32(dstCbAddr); reply->writeInt32(dstCrAddr); reply->writeInt32(dstBufIndex); reply->writeInt64(timestamp); reply->writeInt32(ret); } break; case WFDVIDEOSERVICE_RELEASEBUFFER: { CHECK_INTERFACE(IWfdVideoService, data, reply); uint32_t bufIndex = data.readInt32(); releaseBuffer(bufIndex); } break; default: return BBinder::onTransact(code, data, reply, flags); } return NO_ERROR; }
void DeviceBTTV::init(Miro::Server& _server, FilterParameters const * _params) { int err = 0; MIRO_DBG(VIDEO, LL_DEBUG, "Video::DeviceBTTV: Connecting DeviceBTTV."); params_ = dynamic_cast<DeviceBTTVParameters const *>(_params); assert(params_ != NULL); devName_.set(params_->device.c_str()); if (connector_.connect(ioBuffer_, devName_, 0, ACE_Addr::sap_any, 0, O_RDWR) == -1) { MIRO_LOG_OSTR(LL_ERROR, "Failed to open device: " << params_->device << "\nPropably running on the wrong machine?"); throw Miro::CException(errno, std::strerror(errno)); } fcntl(ioBuffer_.get_handle(), F_SETFD, FD_CLOEXEC); /* video capabilities */ getCapabilities(); /* video channels aka inputs */ getChannels(); if (capability.type & VID_TYPE_CAPTURE) { err = ioctl(ioBuffer_.get_handle(), VIDIOCGMBUF, &gb_buffers); if (err == -1) throw Miro::CException(errno, "DeviceBTTV::handleConnect() - VIDIOCGMBUF"); map_ = (unsigned char*)mmap(0, gb_buffers.size, PROT_READ, MAP_SHARED, ioBuffer_.get_handle(), 0); if ((int)map_ == -1) throw Miro::CException(errno, "DeviceBTTV::handleConnect() - mmap()"); } iNBuffers = gb_buffers.frames; MIRO_DBG_OSTR(VIDEO, LL_DEBUG, "Video::DeviceBTTV: buffersize: " << gb_buffers.size << "\nbuffersize/frame: " << gb_buffers.size/gb_buffers.frames << "\nframes: " << gb_buffers.frames); if (gb_buffers.frames < (int)params_->buffers) { throw Miro::Exception("Number of requested buffers in the configuration\n" \ "exceeds the number of frames available by the device."); } if (gb_buffers.frames < 2) { throw Miro::Exception("Only one frame available by the video device.\n" \ "At least two are needed."); } if (params_->buffers < 2) { throw Miro::Exception("Only one buffer available by the configuration.\n" \ "At least two are needed."); } probeAllFormats(); setFormat(); setSource(); setPalette(); setSize(); // preparing buffers VideoSubfield subfield = getSubfield(params_->subfield); if (subfield != SUBFIELD_ALL) { if (capability.type & VID_TYPE_SUBCAPTURE) { struct video_capture vc; vc.x = 0; vc.y = 0; vc.width = outputFormat_.width; vc.height = outputFormat_.height; vc.decimation = 1; vc.flags = (subfield == SUBFIELD_ODD)? VIDEO_CAPTURE_ODD : VIDEO_CAPTURE_EVEN; err = ioctl(ioBuffer_.get_handle(), VIDIOCSCAPTURE, &vc); if (err == -1) throw Miro::CException(errno, "DeviceBTTV::handleConnect() - VIDIOCGMBUF"); } else { throw Miro::Exception("Hardware doesn't support subfields."); } } Super::init(_server, _params); }
void AudioTransportSource::setSource (PositionableAudioSource* const newSource, int readAheadSize, TimeSliceThread* readAheadThread, double sourceSampleRateToCorrectFor, int maxNumChannels) { if (source == newSource) { if (source == nullptr) return; setSource (nullptr, 0, nullptr); // deselect and reselect to avoid releasing resources wrongly } readAheadBufferSize = readAheadSize; sourceSampleRate = sourceSampleRateToCorrectFor; ResamplingAudioSource* newResamplerSource = nullptr; BufferingAudioSource* newBufferingSource = nullptr; PositionableAudioSource* newPositionableSource = nullptr; AudioSource* newMasterSource = nullptr; ScopedPointer<ResamplingAudioSource> oldResamplerSource (resamplerSource); ScopedPointer<BufferingAudioSource> oldBufferingSource (bufferingSource); AudioSource* oldMasterSource = masterSource; if (newSource != nullptr) { newPositionableSource = newSource; if (readAheadSize > 0) { // If you want to use a read-ahead buffer, you must also provide a TimeSliceThread // for it to use! jassert (readAheadThread != nullptr); newPositionableSource = newBufferingSource = new BufferingAudioSource (newPositionableSource, *readAheadThread, false, readAheadSize, maxNumChannels); } newPositionableSource->setNextReadPosition (0); if (sourceSampleRateToCorrectFor > 0) newMasterSource = newResamplerSource = new ResamplingAudioSource (newPositionableSource, false, maxNumChannels); else newMasterSource = newPositionableSource; if (isPrepared) { if (newResamplerSource != nullptr && sourceSampleRate > 0 && sampleRate > 0) newResamplerSource->setResamplingRatio (sourceSampleRate / sampleRate); newMasterSource->prepareToPlay (blockSize, sampleRate); } } { const ScopedLock sl (callbackLock); source = newSource; resamplerSource = newResamplerSource; bufferingSource = newBufferingSource; masterSource = newMasterSource; positionableSource = newPositionableSource; inputStreamEOF = false; playing = false; } if (oldMasterSource != nullptr) oldMasterSource->releaseResources(); }
void FunctionBodyNode::finishParsing(const SourceCode& source, ParameterNode* firstParameter, const Identifier& ident) { setSource(source); finishParsing(FunctionParameters::create(firstParameter), ident); }
Browser::Browser(QWidget *parent) : QTextBrowser(parent) { setSource(QUrl("qrc:/html/index.html")); }
QGLPixmapColorizeFilter::QGLPixmapColorizeFilter() { setSource(qt_gl_colorize_filter); }
AudioTransportSource::~AudioTransportSource() { setSource (nullptr); releaseMasterResources(); }
void ReferenceBrowser::setSourceFile( const QString & source ) { if( QFileInfo( docFolder.filePath( source ) ).exists() ) { setSource( QUrl( source ) ); } }
void AudioTransportSource::setSource (PositionableAudioSource* const newSource, int readAheadBufferSize_, double sourceSampleRateToCorrectFor, int maxNumChannels) { if (source == newSource) { if (source == nullptr) return; setSource (0, 0, 0); // deselect and reselect to avoid releasing resources wrongly } readAheadBufferSize = readAheadBufferSize_; sourceSampleRate = sourceSampleRateToCorrectFor; ResamplingAudioSource* newResamplerSource = nullptr; BufferingAudioSource* newBufferingSource = nullptr; PositionableAudioSource* newPositionableSource = nullptr; AudioSource* newMasterSource = nullptr; ScopedPointer <ResamplingAudioSource> oldResamplerSource (resamplerSource); ScopedPointer <BufferingAudioSource> oldBufferingSource (bufferingSource); AudioSource* oldMasterSource = masterSource; if (newSource != nullptr) { newPositionableSource = newSource; if (readAheadBufferSize_ > 0) newPositionableSource = newBufferingSource = new BufferingAudioSource (newPositionableSource, false, readAheadBufferSize_, maxNumChannels); newPositionableSource->setNextReadPosition (0); if (sourceSampleRateToCorrectFor > 0) newMasterSource = newResamplerSource = new ResamplingAudioSource (newPositionableSource, false, maxNumChannels); else newMasterSource = newPositionableSource; if (isPrepared) { if (newResamplerSource != nullptr && sourceSampleRate > 0 && sampleRate > 0) newResamplerSource->setResamplingRatio (sourceSampleRate / sampleRate); newMasterSource->prepareToPlay (blockSize, sampleRate); } } { const ScopedLock sl (callbackLock); source = newSource; resamplerSource = newResamplerSource; bufferingSource = newBufferingSource; masterSource = newMasterSource; positionableSource = newPositionableSource; playing = false; } if (oldMasterSource != nullptr) oldMasterSource->releaseResources(); }
void Q3TextBrowser::reload() { QString s = d->curmain; d->curmain = QLatin1String(""); setSource(s); }
void QtQuick2ApplicationViewer::setMainQmlFile(const QString &file) { d->mainQmlFile = file; setSource(QUrl(d->mainQmlFile)); }
AudioSourcePlayer::~AudioSourcePlayer() { setSource (nullptr); }
virtual MaterialsVector CreateEffectMaterialPrototypes() override { Ogre::MaterialPtr material1 = Ogre::MaterialManager::getSingleton().create( "Material/Downsample1/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME); { Ogre::Technique* techniqueGL = material1->getTechnique(0); Ogre::Pass* pass = techniqueGL->getPass(0); { auto vprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/V/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_VERTEX_PROGRAM); vprogram->setSource(Shader_GL_Copy_V); pass->setVertexProgram(vprogram->getName()); } { auto fprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/F/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_FRAGMENT_PROGRAM); fprogram->setSource(Shader_GL_Copy_F); auto unit0 = pass->createTextureUnitState(TEXTURE_MARKER_SCENE); unit0->setTextureAddressingMode(Ogre::TextureUnitState::TAM_CLAMP); unit0->setTextureFiltering(Ogre::TFO_NONE); pass->setFragmentProgram(fprogram->getName()); } } auto output1 = CreateOutputTexture(material1->getName(), mRenderWindow->getWidth() / 2, mRenderWindow->getHeight() / 2); Ogre::MaterialPtr material2 = Ogre::MaterialManager::getSingleton().create( "Material/Downsample2/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME); { Ogre::Technique* techniqueGL = material2->getTechnique(0); Ogre::Pass* pass = techniqueGL->getPass(0); { auto vprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/V/2/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_VERTEX_PROGRAM); vprogram->setSource(Shader_GL_Copy_V); pass->setVertexProgram(vprogram->getName()); } { auto fprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/F/2/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_FRAGMENT_PROGRAM); fprogram->setSource(Shader_GL_Copy_F); auto unit0 = pass->createTextureUnitState(output1); unit0->setTextureAddressingMode(Ogre::TextureUnitState::TAM_CLAMP); unit0->setTextureFiltering(Ogre::TFO_NONE); pass->setFragmentProgram(fprogram->getName()); } } auto output2 = CreateOutputTexture(material2->getName(), mRenderWindow->getWidth() / 4, mRenderWindow->getHeight() / 4); Ogre::MaterialPtr material3 = Ogre::MaterialManager::getSingleton().create( "Material/Downsample3/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME); { Ogre::Technique* techniqueGL = material3->getTechnique(0); Ogre::Pass* pass = techniqueGL->getPass(0); { auto vprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/V/3/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_VERTEX_PROGRAM); vprogram->setSource(Shader_GL_Copy_V); pass->setVertexProgram(vprogram->getName()); } { auto fprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/F/3/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_FRAGMENT_PROGRAM); fprogram->setSource(Shader_GL_Copy_F); auto unit0 = pass->createTextureUnitState(output2); unit0->setTextureAddressingMode(Ogre::TextureUnitState::TAM_CLAMP); unit0->setTextureFiltering(Ogre::TFO_NONE); pass->setFragmentProgram(fprogram->getName()); } } auto output3 = CreateOutputTexture(material3->getName(), mRenderWindow->getWidth() / 8, mRenderWindow->getHeight() / 8); Ogre::MaterialPtr material4 = Ogre::MaterialManager::getSingleton().create( "Material/Downsample4/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME); { Ogre::Technique* techniqueGL = material4->getTechnique(0); Ogre::Pass* pass = techniqueGL->getPass(0); { auto vprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/V/4/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_VERTEX_PROGRAM); vprogram->setSource(Shader_GL_Copy_V); pass->setVertexProgram(vprogram->getName()); } { auto fprogram = Ogre::HighLevelGpuProgramManager::getSingleton().createProgram("Shader/GL/F/4/" + GetUniquePostfix(), Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, "glsl", Ogre::GPT_FRAGMENT_PROGRAM); fprogram->setSource(Shader_GL_Copy_F); auto unit0 = pass->createTextureUnitState(output3); unit0->setTextureAddressingMode(Ogre::TextureUnitState::TAM_CLAMP); unit0->setTextureFiltering(Ogre::TFO_NONE); pass->setFragmentProgram(fprogram->getName()); } } return { material1.get(), material2.get(), material3.get(), material4.get() }; }