void MediaStream::addTrack(PassRefPtr<MediaStreamTrack> prpTrack, ExceptionCode& ec) { if (ended()) { ec = INVALID_STATE_ERR; return; } if (!prpTrack) { ec = TYPE_MISMATCH_ERR; return; } RefPtr<MediaStreamTrack> track = prpTrack; if (getTrackById(track->id())) return; RefPtr<MediaStreamComponent> component = MediaStreamComponent::create(m_descriptor.get(), track->component()->source()); RefPtr<MediaStreamTrack> newTrack = MediaStreamTrack::create(scriptExecutionContext(), component.get()); switch (component->source()->type()) { case MediaStreamSource::TypeAudio: m_descriptor->addAudioComponent(component.release()); m_audioTracks.append(newTrack); break; case MediaStreamSource::TypeVideo: m_descriptor->addVideoComponent(component.release()); m_videoTracks.append(newTrack); break; } MediaStreamCenter::instance().didAddMediaStreamTrack(m_descriptor.get(), newTrack->component()); scheduleDispatchEvent(MediaStreamTrackEvent::create(eventNames().addtrackEvent, false, false, newTrack.release())); }
/** * @brief * * @return */ int QueuedVideoFilter::run() { if ( waitForProviders() ) { while ( !mStop ) { mQueueMutex.lock(); if ( !mFrameQueue.empty() ) { Debug( 3, "Got %zd frames on queue", mFrameQueue.size() ); for ( FrameQueue::iterator iter = mFrameQueue.begin(); iter != mFrameQueue.end(); iter++ ) { distributeFrame( *iter ); mFrameCount++; } mFrameQueue.clear(); } mQueueMutex.unlock(); checkProviders(); // Quite short so we can always keep up with the required packet rate for 25/30 fps usleep( INTERFRAME_TIMEOUT ); } } FeedProvider::cleanup(); FeedConsumer::cleanup(); return( !ended() ); }
void MediaStream::removeRemoteSource(MediaStreamSource* source) { if (ended()) return; MediaStreamTrackVector* tracks = 0; switch (source->type()) { case MediaStreamSource::Audio: tracks = &m_audioTracks; break; case MediaStreamSource::Video: tracks = &m_videoTracks; break; } size_t index = notFound; for (size_t i = 0; i < tracks->size(); ++i) { if ((*tracks)[i]->source() == source) { index = i; break; } } if (index == notFound) return; m_descriptor->removeSource(source); RefPtr<MediaStreamTrack> track = (*tracks)[index]; tracks->remove(index); scheduleDispatchEvent(MediaStreamTrackEvent::create(eventNames().removetrackEvent, false, false, track)); }
void MediaStream::addTrack(PassRefPtr<MediaStreamTrack> prpTrack, ExceptionCode& ec) { if (ended()) { ec = INVALID_STATE_ERR; return; } if (!prpTrack) { ec = TYPE_MISMATCH_ERR; return; } RefPtr<MediaStreamTrack> track = prpTrack; if (getTrackById(track->id())) return; switch (track->source()->type()) { case MediaStreamSource::Audio: m_audioTracks.append(track); break; case MediaStreamSource::Video: m_videoTracks.append(track); break; } MediaStreamCenter::shared().didAddMediaStreamTrack(track->source()); }
void MediaStream::streamDidEnd() { if (ended()) return; scheduleDispatchEvent(Event::create(eventNames().endedEvent, false, false)); }
void MediaStream::removeRemoteTrack(MediaStreamComponent* component) { if (ended()) return; MediaStreamTrackVector* tracks = 0; switch (component->source()->type()) { case MediaStreamSource::TypeAudio: tracks = &m_audioTracks; break; case MediaStreamSource::TypeVideo: tracks = &m_videoTracks; break; } size_t index = notFound; for (size_t i = 0; i < tracks->size(); ++i) { if ((*tracks)[i]->component() == component) { index = i; break; } } if (index == notFound) return; RefPtr<MediaStreamTrack> track = (*tracks)[index]; tracks->remove(index); scheduleDispatchEvent(MediaStreamTrackEvent::create(eventNames().removetrackEvent, false, false, track)); }
void Animator::timerEvent(QTimerEvent *event){ vX += accelerationX; vY += accelerationY; for(int i=data.length()-1; i>=0; i--){ QWidget* w = data.at(i).first; QPoint p = w->pos(); int end = 2; if((vX > 0 && p.x() + (int) vX < data.at(i).second.x() )|| (vX < 0 && p.x() + (int) vX > data.at(i).second.x())) p.setX(p.x()+ (int) vX); else{ p.setX(data.at(i).second.x()); end--; } if((vY > 0 && p.y() + (int) vY < data.at(i).second.y() )|| (vY < 0 && p.y() + (int) vY > data.at(i).second.y())) p.setY(p.y()+ (int) vY); else{ p.setY(data.at(i).second.y()); end--; } w->move(p); if(end==0){ data.removeAt(i); } } if(data.length()==0){ this->setup(); emit ended(); } }
void MediaStream::addTrack(PassRefPtr<MediaStreamTrack> prpTrack, ExceptionState& es) { if (ended()) { es.throwUninformativeAndGenericDOMException(InvalidStateError); return; } if (!prpTrack) { es.throwUninformativeAndGenericDOMException(TypeMismatchError); return; } RefPtr<MediaStreamTrack> track = prpTrack; if (getTrackById(track->id())) return; RefPtr<MediaStreamComponent> component = MediaStreamComponent::create(m_descriptor.get(), track->component()->source()); RefPtr<MediaStreamTrack> newTrack = MediaStreamTrack::create(executionContext(), component.get()); switch (component->source()->type()) { case MediaStreamSource::TypeAudio: m_audioTracks.append(newTrack); break; case MediaStreamSource::TypeVideo: m_videoTracks.append(newTrack); break; } m_descriptor->addComponent(component.release()); MediaStreamCenter::instance().didAddMediaStreamTrack(m_descriptor.get(), newTrack->component()); }
void MediaStream::streamEnded() { if (ended()) return; m_descriptor->setEnded(); scheduleDispatchEvent(Event::create(EventTypeNames::ended)); }
void MediaStream::streamEnded() { if (ended()) return; m_descriptor->setEnded(); scheduleDispatchEvent(Event::create(eventNames().endedEvent, false, false)); }
void MediaStream::stop() { if (ended()) return; MediaStreamCenter::shared().didStopLocalMediaStream(descriptor()); setEnded(); }
void MediaStreamTrack::stopTrack(ExceptionState& exceptionState) { if (ended()) return; m_readyState = MediaStreamSource::ReadyStateEnded; MediaStreamCenter::instance().didStopMediaStreamTrack(component()); dispatchEvent(Event::create(EventTypeNames::ended)); propagateTrackEnded(); }
void LocalMediaStream::stop() { if (ended()) return; MediaStreamCenter::instance().didStopLocalMediaStream(descriptor()); streamEnded(); }
void MediaStreamTrack::setEnabled(bool enabled) { if (enabled == m_component->enabled()) return; m_component->setEnabled(enabled); if (!ended()) MediaStreamCenter::instance().didSetMediaStreamTrackEnabled(m_component.get()); }
FlatButton::FlatButton(const QString &text, QWidget *parent) : ButtonWidget(parent), _opacity(this), _eff(this), _hover(this) { setText(text); setFont(QFont(_st.font_family, _st.font_size)); _eff.setStartValue(float(_st.height)); _hover.setStartValue(float(_st.fill_opacity * 2)); _eff.setTimer(15); _hover.setTimer(15); _opacity.setTimer(12); QObject::connect(&_opacity, SIGNAL(ended()), this, SLOT(onEnd())); }
void SemestreSuivi::Desinscription(QString Name) //! Desincription d'une UV du Semestre. { auto iter = Where<UVEncours>(UVs.begin(), UVs.end(), [=](const UVEncours& x) { return x.get_uv().get_code() == Name; }); if (iter.ended()) return; auto& item = *iter; auto stditer = find(UVs.begin(), UVs.end(), item); UVs.erase(stditer); }
/** * @brief * * @return */ int FilterSwapUV::run() { if ( waitForProviders() ) { uint16_t inputWidth = videoProvider()->width(); uint16_t inputHeight = videoProvider()->height(); PixelFormat inputPixelFormat = videoProvider()->pixelFormat(); ByteBuffer tempBuffer; int yChannelSize = inputWidth*inputHeight; int uvChannelSize = yChannelSize/4; if ( inputPixelFormat != PIX_FMT_YUV420P ) Fatal( "Can't swap UV for pixel format %d", inputPixelFormat ); while ( !mStop ) { mQueueMutex.lock(); if ( !mFrameQueue.empty() ) { Debug( 3, "Got %zd frames on queue", mFrameQueue.size() ); for ( FrameQueue::iterator iter = mFrameQueue.begin(); iter != mFrameQueue.end(); iter++ ) { //const VideoFrame *frame = dynamic_cast<const VideoFrame *>(iter->get()); //FramePtr framePtr( *iter ); const FeedFrame *frame = (*iter).get(); Debug(1, "%s / Provider: %s, Source: %s, Frame: %p (%ju / %.3f) - %lu", cname(), frame->provider()->cidentity(), frame->originator()->cidentity(), frame, frame->id(), frame->age(), frame->buffer().size() ); //Image image( inputPixelFormat, inputWidth, inputHeight, frame->buffer().data() ); tempBuffer.size( frame->buffer().size() ); memcpy( tempBuffer.data(), frame->buffer().data(), yChannelSize ); memcpy( tempBuffer.data()+yChannelSize, frame->buffer().data()+yChannelSize+uvChannelSize, uvChannelSize); memcpy( tempBuffer.data()+yChannelSize+uvChannelSize, frame->buffer().data()+yChannelSize, uvChannelSize); VideoFrame *videoFrame = new VideoFrame( this, *iter, mFrameCount, frame->timestamp(), tempBuffer ); distributeFrame( FramePtr( videoFrame ) ); //delete *iter; mFrameCount++; } mFrameQueue.clear(); } mQueueMutex.unlock(); checkProviders(); // Quite short so we can always keep up with the required packet rate for 25/30 fps usleep( INTERFRAME_TIMEOUT ); } } FeedProvider::cleanup(); FeedConsumer::cleanup(); return( !ended() ); }
void artsPlayer::checkEnded() { #ifndef WITHOUT_ARTS if (blinkenSettings::playSounds()) { if (m_playobj -> state() != Arts::posPlaying) { m_endChecker -> stop(); emit ended(); if (m_nextSounds.size() > 0) play(); } } else { m_endChecker -> stop(); emit ended(); } #else m_endChecker -> stop(); emit ended(); #endif }
QCsvModel::QCsvModel( QObject *parent ) : QAbstractTableModel( parent ), d( new Private( this ) ) { d->mParser = new CsvParser( this ); connect( d->mParser, SIGNAL(columnCountChanged(int)), this, SLOT(columnCountChanged(int)), Qt::QueuedConnection ); connect( d->mParser, SIGNAL(rowCountChanged(int)), this, SLOT(rowCountChanged(int)), Qt::QueuedConnection ); connect( d->mParser, SIGNAL(dataChanged(QString,int,int)), this, SLOT(fieldChanged(QString,int,int)), Qt::QueuedConnection ); connect( d->mParser, SIGNAL(ended()), this, SLOT(finishedLoading()) ); }
SoundManager::SoundManager() : WMediaPlayer(MediaType::Audio) { resize(0, 0); setAttributeValue("style", "overflow: hidden"); controlsWidget()->hide(); decorationStyle().setBorder(WBorder()); WStringStream ss; ss << "function() { " """var s = " << jsRef() << ", l = s.getAttribute('loops');" """if (l && l != '0') {" "" "s.setAttribute('loops', l - 1);" "" << jsPlayerRef() << ".jPlayer('play');" """}" "}"; ended().connect(ss.str()); ended().setNotExposed(); }
void RectList::findRectList(sf::IntRect rect, const sf::Image& image) { sf::Vector2i p_tmp; rect_list = std::vector<sf::IntRect>({ rect }); while (rect.left + rect.width + 1 < image.getSize().x) if (image.getPixel(rect.left + rect.width + 1, rect.top - 1) == sf::Color::Magenta) { rect.left = rect.left + rect.width + 1; rect_list.push_back(rect); } else break; emit ended(); }
/** * @brief * * @return */ int ImageTimestamper::run() { if ( waitForProviders() ) { uint16_t inputWidth = videoProvider()->width(); uint16_t inputHeight = videoProvider()->height(); PixelFormat inputPixelFormat = videoProvider()->pixelFormat(); while ( !mStop ) { mQueueMutex.lock(); if ( !mFrameQueue.empty() ) { Debug( 3, "Got %zd frames on queue", mFrameQueue.size() ); for ( FrameQueue::iterator iter = mFrameQueue.begin(); iter != mFrameQueue.end(); iter++ ) { //const VideoFrame *frame = dynamic_cast<const VideoFrame *>(iter->get()); //FramePtr framePtr( *iter ); const FeedFrame *frame = (*iter).get(); Debug(1, "%s / Provider: %s, Source: %s, Frame: %p (%ju / %.3f) - %lu", cname(), frame->provider()->cidentity(), frame->originator()->cidentity(), frame, frame->id(), frame->age(), frame->buffer().size() ); Image image( inputPixelFormat, inputWidth, inputHeight, frame->buffer().data() ); if ( timestampImage( &image, frame->timestamp() ) ) { VideoFrame *videoFrame = new VideoFrame( this, *iter, mFrameCount, frame->timestamp(), image.buffer() ); distributeFrame( FramePtr( videoFrame ) ); } else { distributeFrame( *iter ); } //delete *iter; mFrameCount++; } mFrameQueue.clear(); } mQueueMutex.unlock(); checkProviders(); // Quite short so we can always keep up with the required packet rate for 25/30 fps usleep( INTERFRAME_TIMEOUT ); } } FeedProvider::cleanup(); FeedConsumer::cleanup(); return( !ended() ); }
void WordChecker::checkWord(const QString currentLine) { QString compareLine = mSpeedText.section('\n', mLine, mLine); QString compareString = compareLine.left(currentLine.length()); if (currentLine.compare(compareString) == 0) { // Update the number of correct characters entered. mNbrOfCharacters = mEnteredLines.length() + currentLine.length(); emit nbrOfCharactersChanged(mNbrOfCharacters); // Update the number of characters that is left to enter. mRemainingTextLength = mSpeedTextLength - mNbrOfCharacters; // Updates the Qstring holding the text that is not yet typed. mRemainingText = mSpeedText.right(mRemainingTextLength); emit remainingTextChanged(mRemainingText); if (compareLine.length() == currentLine.length()) { // If at the end of a line, update the line and enteredLines // property with the number of lines and the text entered so far mLine++; // Update the entered lines. mEnteredLines = mSpeedText.section('\n', 0, mLine - 1) + "\n"; emit enteredLinesChanged(mEnteredLines); // lineChanged signal needs to be emitted after enteredLinesChanged. If not, the // speedtext will be updated with the just entered line instead of a new one. emit lineChanged(mLine); // Start a new line by setting the current correct line to an empty string. mCurrentCorrectLine = ""; emit currentCorrectLineChanged(mCurrentCorrectLine); if (mNbrOfCharacters >= mSpeedTextLength) { // When the entire text has been correctly entered return end. emit ended(); } } else { mCurrentCorrectLine = currentLine; emit currentCorrectLineChanged(mCurrentCorrectLine); } setValid(true); } else { setValid(false); } }
void VPlayer::stop() { if(mp){ if(state!=Stop){ state=Invalid; libvlc_media_player_stop(mp); Utils::delayExec(50,[this](){ state=Stop; frame=QPixmap(); subtitle.clear(); emit ended(); }); } } }
bool MediaStreamTrack::hasPendingActivity() const { // If 'ended' listeners exist and the object hasn't yet reached // that state, keep the object alive. // // An otherwise unreachable MediaStreamTrack object in an non-ended // state will otherwise indirectly be transitioned to the 'ended' state // while finalizing m_component. Which dispatches an 'ended' event, // referring to this object as the target. If this object is then GCed // at the same time, v8 objects will retain (wrapper) references to // this dead MediaStreamTrack object. Bad. // // Hence insisting on keeping this object alive until the 'ended' // state has been reached & handled. return !ended() && hasEventListeners(EventTypeNames::ended); }
/** * @brief * * @return */ int MemoryInputV1::run() { SharedData sharedData; memset( &sharedData, 0, sizeof(sharedData) ); while( !mStop ) { Info( "Querying memory" ); if ( queryMemory( &sharedData ) && sharedData.valid ) break; Info( "Can't query shared memory" ); usleep( 500000 ); } //Info( "SHV: %d", sharedData.valid ); //mImageCount = 40; //mPixelFormat = sharedData.imageFormat; //mPixelFormat = PIX_FMT_UYVY422; //mPixelFormat = PIX_FMT_YUYV422; //mPixelFormat = PIX_FMT_RGB24; //mFrameRate = 15; ////mImageWidth = sharedData.imageWidth; //mImageWidth = 720; ////mImageHeight = sharedData.imageHeight; //mImageHeight = 576; attachMemory( mImageCount, mPixelFormat, mImageWidth, mImageHeight ); int lastWriteIndex = 0; while( !mStop ) { if ( !mSharedData || !mSharedData->valid ) { stop(); break; } if ( mSharedData->last_write_index != lastWriteIndex ) { const FeedFrame *frame = loadFrame(); //Info( "Sending frame %d", frame->id() ); lastWriteIndex = mSharedData->last_write_index; distributeFrame( FramePtr( frame ) ); //delete frame; mFrameCount++; } usleep( INTERFRAME_TIMEOUT ); } cleanup(); return( !ended() ); }
String MediaStreamTrack::readyState() const { if (ended()) return "ended"; switch (m_readyState) { case MediaStreamSource::ReadyStateLive: return "live"; case MediaStreamSource::ReadyStateMuted: return "muted"; case MediaStreamSource::ReadyStateEnded: return "ended"; } NOTREACHED(); return String(); }
soundsPlayer::soundsPlayer() : m_audioOutput(Phonon::GameCategory) { m_audioOutput.setVolume( 0.8f ); Phonon::createPath(&m_mediaObject, &m_audioOutput); connect(&m_mediaObject, SIGNAL(finished()), this, SLOT(playEnded())); m_allSound = "sounds/lose.wav"; m_greenSound = "sounds/1.wav"; m_redSound = "appdata","sounds/2.wav"; m_blueSound = "appdata","sounds/3.wav"; m_yellowSound = "appdata","sounds/4.wav"; connect(&m_warnTimer, SIGNAL(timeout()), this, SIGNAL(ended())); m_warnTimer.setSingleShot(true); }
tmpl(void)::read_done( req_t *req ) { if ( ! req->failed() && m_chunck_offset < m_transfer_size ) { #ifdef SOCLIB_MODULE_DEBUG std::cout << name() << " completed transferring a chunck. Do now the next one..." << std::endl; #endif next_req(); return; } ended( req->failed() ? BLOCK_DEVICE_READ_ERROR : BLOCK_DEVICE_READ_SUCCESS ); delete m_data; delete req; }
void MediaStreamTrack::sourceChangedState() { if (ended()) return; m_readyState = m_component->source()->readyState(); switch (m_readyState) { case MediaStreamSource::ReadyStateLive: dispatchEvent(Event::create(EventTypeNames::unmute)); break; case MediaStreamSource::ReadyStateMuted: dispatchEvent(Event::create(EventTypeNames::mute)); break; case MediaStreamSource::ReadyStateEnded: dispatchEvent(Event::create(EventTypeNames::ended)); propagateTrackEnded(); break; } }