예제 #1
0
void
ChannelMediaDecoder::NotifyDownloadEnded(nsresult aStatus)
{
  MOZ_ASSERT(NS_IsMainThread());
  MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
  AbstractThread::AutoEnter context(AbstractMainThread());

  LOG("NotifyDownloadEnded, status=%" PRIx32, static_cast<uint32_t>(aStatus));

  if (aStatus == NS_BINDING_ABORTED) {
    // Download has been cancelled by user.
    GetOwner()->LoadAborted();
    return;
  }

  UpdatePlaybackRate();

  if (NS_SUCCEEDED(aStatus)) {
    // A final progress event will be fired by the MediaResource calling
    // DownloadSuspended on the element.
    // Also NotifySuspendedStatusChanged() will be called to update readyState
    // if download ended with success.
  } else if (aStatus != NS_BASE_STREAM_CLOSED) {
    NetworkError();
  }
}
예제 #2
0
already_AddRefed<SourceBufferDecoder>
MediaSourceReader::CreateSubDecoder(const nsACString& aType)
{
  if (IsShutdown()) {
    return nullptr;
  }
  MOZ_ASSERT(GetTaskQueue());
  nsRefPtr<SourceBufferDecoder> decoder =
    new SourceBufferDecoder(new SourceBufferResource(aType), mDecoder);
  nsRefPtr<MediaDecoderReader> reader(CreateReaderForType(aType, decoder));
  if (!reader) {
    return nullptr;
  }
  // Set a callback on the subreader that forwards calls to this reader.
  // This reader will then forward them onto the state machine via this
  // reader's callback.
  RefPtr<MediaDataDecodedListener<MediaSourceReader>> callback =
    new MediaDataDecodedListener<MediaSourceReader>(this, GetTaskQueue());
  reader->SetCallback(callback);
  reader->SetTaskQueue(GetTaskQueue());
  reader->Init(nullptr);

  MSE_DEBUG("MediaSourceReader(%p)::CreateSubDecoder subdecoder %p subreader %p",
            this, decoder.get(), reader.get());
  decoder->SetReader(reader);
  return decoder.forget();
}
void
MediaOmxCommonDecoder::ResumeStateMachine()
{
  MOZ_ASSERT(NS_IsMainThread());
  DECODER_LOG(LogLevel::Debug, ("%s current time %f", __PRETTY_FUNCTION__, mLogicalPosition));

  if (IsShutdown()) {
    return;
  }

  if (!GetStateMachine()) {
    return;
  }

  GetStateMachine()->DispatchAudioOffloading(false);

  mFallbackToStateMachine = true;
  mAudioOffloadPlayer = nullptr;
  SeekTarget target = SeekTarget(mLogicalPosition,
                                 SeekTarget::Accurate,
                                 MediaDecoderEventVisibility::Suppressed);
  // Call Seek of MediaDecoderStateMachine to suppress seek events.
  GetStateMachine()->InvokeSeek(target);

  // exit dormant state
  GetStateMachine()->DispatchSetDormant(false);
  UpdateLogicalPosition();
}
예제 #4
0
void CDownloadEngine::ThreadProc()
{
	CURLM *multi_curl = curl_multi_init();
	if (!multi_curl)
	{
		return;
	}

	while (!IsShutdown())
	{
		ScanPrepareTaskList();
		
		int still_running = 0;
		curl_multi_perform(multi_curl,&still_running);

		bool bHasHandle = ScanRunningTaskList(multi_curl);

		if (!bHasHandle)
		{
			::Sleep(SLEEP_MS);
			continue;
		}

		PerformMutliCurl(multi_curl,bHasHandle);

		DispatchTaskStatus(multi_curl);
	}
	
	TaskListClearnUp(multi_curl);
	curl_multi_cleanup(multi_curl);
}
예제 #5
0
bool TMdbPeerInfo::AddEventMonitor(MDB_UINT16 events, bool bLock /* = true */)
{
    if(pPeerProactor == NULL) return false;
    bool bRet = true;
    if(bLock)
    {
        m_oSpinLock.Lock();
    }    
    if(!IsShutdown() &&  (events == 0 || GetPeerFlag(events) != events))//只要任一个指定的事件不存在,则需要添加
    {
        TADD_DETAIL("add event:peer[%d],flag[%0x],event[%0x]", m_uiSocketID, m_uiPeerFlag, events);
        SetPeerFlag(events);
        //如果是前摄器线程,则无需唤醒前摄器以触发更新
        if(pPeerProactor->GetThreadHandle() == mdb_ntc_zthread_handle())
        {
            bRet = pPeerProactor->AddEventMonitor(m_uiSocketID, events);
        }
        else
        {
            bRet = pPeerProactor->AsyncAddEventMonitor(m_uiSocketID, events);
        }
    }
    else
    {
        TADD_DETAIL("no need to add event:peer[%d],flag[%0x],event[%0x]", m_uiSocketID, m_uiPeerFlag, events);
    }
    if(bLock)
    {
        m_oSpinLock.Unlock();
    }
    return bRet;
}
예제 #6
0
nsRefPtr<MediaDecoderReader::SeekPromise>
MediaSourceReader::Seek(int64_t aTime, int64_t aIgnored /* Used only for ogg which is non-MSE */)
{
  MSE_DEBUG("Seek(aTime=%lld, aEnd=%lld, aCurrent=%lld)",
            aTime);

  MOZ_DIAGNOSTIC_ASSERT(mSeekPromise.IsEmpty());
  MOZ_DIAGNOSTIC_ASSERT(mAudioPromise.IsEmpty());
  MOZ_DIAGNOSTIC_ASSERT(mVideoPromise.IsEmpty());
  nsRefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);

  if (IsShutdown()) {
    mSeekPromise.Reject(NS_ERROR_FAILURE, __func__);
    return p;
  }

  // Store pending seek target in case the track buffers don't contain
  // the desired time and we delay doing the seek.
  mPendingSeekTime = aTime;

  {
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mWaitingForSeekData = true;
    mDropAudioBeforeThreshold = false;
    mDropVideoBeforeThreshold = false;
    mTimeThreshold = 0;
  }

  AttemptSeek();
  return p;
}
void TSmartMonitor::Run()
{
	while( !IsShutdown() )
	{
		m_pServer->Monitor();
		Sleep(300);
	}
}
예제 #8
0
// XXX
// 现状:瞎写的,可以通过test的写法
// 疑问:这里好像有很多种处理方法,无锁队列、消费者生产者、领导者跟随者等等,具体要用哪一种
Work* WorkQueue::GetWork() {
	pthread_mutex_lock(&mutex);
	// XXX 这里逻辑不优美
	while(works.empty() && IsShutdown() == false){
		block_cnt++;
		pthread_cond_wait(&cond, &mutex);
		block_cnt--;
	}
	if (IsShutdown()){
		if (block_cnt) pthread_cond_signal(&cond);
		pthread_mutex_unlock(&mutex);
		return NULL;
	}
	Work* ret = works.front();
	works.pop_front();
	pthread_mutex_unlock(&mutex);
	return ret;
}
void
MediaOmxCommonDecoder::AudioOffloadTearDown()
{
  MOZ_ASSERT(NS_IsMainThread());
  MOZ_ASSERT(!IsShutdown());
  DECODER_LOG(LogLevel::Debug, ("%s", __PRETTY_FUNCTION__));

  // mAudioOffloadPlayer can be null here if ResumeStateMachine was called
  // just before because of some other error.
  if (mAudioOffloadPlayer) {
    ResumeStateMachine();
  }
}
예제 #10
0
void TextTrackManager::DispatchUpdateCueDisplay() {
  if (!mUpdateCueDisplayDispatched && !IsShutdown() &&
      mMediaElement->IsCurrentlyPlaying()) {
    WEBVTT_LOG("DispatchUpdateCueDisplay");
    nsPIDOMWindowInner* win = mMediaElement->OwnerDoc()->GetInnerWindow();
    if (win) {
      nsGlobalWindowInner::Cast(win)->Dispatch(
          TaskCategory::Other,
          NewRunnableMethod("dom::TextTrackManager::UpdateCueDisplay", this,
                            &TextTrackManager::UpdateCueDisplay));
      mUpdateCueDisplayDispatched = true;
    }
  }
}
예제 #11
0
void
ChannelMediaDecoder::DownloadProgressed()
{
  MOZ_ASSERT(NS_IsMainThread());
  MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());

  GetOwner()->DownloadProgressed();

  using StatsPromise = MozPromise<MediaStatistics, bool, true>;
  InvokeAsync(GetStateMachine()->OwnerThread(),
              __func__,
              [
                playbackStats = mPlaybackStatistics,
                res = RefPtr<BaseMediaResource>(mResource),
                duration = mDuration,
                pos = mPlaybackPosition
              ]() {
                auto rate = ComputePlaybackRate(playbackStats, res, duration);
                UpdatePlaybackRate(rate, res);
                MediaStatistics stats = GetStatistics(rate, res, pos);
                return StatsPromise::CreateAndResolve(stats, __func__);
              })
    ->Then(
      mAbstractMainThread,
      __func__,
      [ =, self = RefPtr<ChannelMediaDecoder>(this) ](MediaStatistics aStats) {
        if (IsShutdown()) {
          return;
        }
        mCanPlayThrough = aStats.CanPlayThrough();
        GetStateMachine()->DispatchCanPlayThrough(mCanPlayThrough);
        mResource->ThrottleReadahead(ShouldThrottleDownload(aStats));
        // Update readyState since mCanPlayThrough might have changed.
        GetOwner()->UpdateReadyState();
      },
      []() { MOZ_ASSERT_UNREACHABLE("Promise not resolved"); });
}
예제 #12
0
void
ChannelMediaDecoder::NotifyBytesConsumed(int64_t aBytes, int64_t aOffset)
{
  MOZ_ASSERT(NS_IsMainThread());
  MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
  AbstractThread::AutoEnter context(AbstractMainThread());

  if (mIgnoreProgressData) {
    return;
  }

  MOZ_ASSERT(GetStateMachine());
  if (aOffset >= mDecoderPosition) {
    mPlaybackStatistics.AddBytes(aBytes);
  }
  mDecoderPosition = aOffset + aBytes;
}
예제 #13
0
bool TMdbPeerInfo::PostMessage(TMdbPacketInfo* pPacketInfo)
{
    if(IsShutdown())
    {
        mdb_ntc_errstr = "peer is already shutdown";
        return false;
    }
    //pSocketEvent->AddRef();//将事件放到定时器线程中
    oSendPackets.Push(pPacketInfo);//加入到socketinfo的队列中
    //发起ev_write事件的监听
    TADD_DETAIL("PostMessage:fd[%d],queueSendEvent[%u]", m_uiSocketID, oSendPackets.GetSize());
    if(oSendPackets.GetSize() == 1)
    {
        this->AddEventMonitor(MDB_NTC_PEER_EV_WRITE_FLAG);
    }
    return true;
}
예제 #14
0
void TMdbPeerInfo::CheckRecvPackets()
{
    /*if((oRecvPackets.pSplicingMsg == NULL || (SpliceMsg() && !IsShutdown())) && pProtocol)
    {
        
    }*/
    if( NULL == pProtocol )
    {
        return ;
    }
    if( (oRecvPackets.pSplicingMsg == NULL)
            || ( SpliceMsg() && !IsShutdown()) 
       )
    {
        pProtocol->CheckPackets(this);
    }
}
예제 #15
0
int64_t MediaOmxReader::ProcessCachedData(int64_t aOffset, bool aWaitForCompletion)
{
  // Could run on decoder thread or IO thread.
  if (IsShutdown()) {
    return -1;
  }
  // We read data in chunks of 32 KiB. We can reduce this
  // value if media, such as sdcards, is too slow.
  // Because of SD card's slowness, need to keep sReadSize to small size.
  // See Bug 914870.
  static const int64_t sReadSize = 32 * 1024;

  NS_ASSERTION(!NS_IsMainThread(), "Should not be on main thread.");

  MOZ_ASSERT(mDecoder->GetResource());
  int64_t resourceLength = mDecoder->GetResource()->GetCachedDataEnd(0);
  NS_ENSURE_TRUE(resourceLength >= 0, -1);

  if (aOffset >= resourceLength) {
    return 0; // Cache is empty, nothing to do
  }

  int64_t bufferLength = std::min<int64_t>(resourceLength-aOffset, sReadSize);

  nsAutoArrayPtr<char> buffer(new char[bufferLength]);

  nsresult rv = mDecoder->GetResource()->ReadFromCache(buffer.get(),
                                                       aOffset, bufferLength);
  NS_ENSURE_SUCCESS(rv, -1);

  nsRefPtr<OmxReaderNotifyDataArrivedRunnable> runnable(
    new OmxReaderNotifyDataArrivedRunnable(this,
                                           buffer.forget(),
                                           bufferLength,
                                           aOffset,
                                           resourceLength));
  if (aWaitForCompletion) {
    rv = NS_DispatchToMainThread(runnable.get(), NS_DISPATCH_SYNC);
  } else {
    rv = NS_DispatchToMainThread(runnable.get());
  }
  NS_ENSURE_SUCCESS(rv, -1);

  return resourceLength - aOffset - bufferLength;
}
예제 #16
0
void TextTrackManager::DispatchTimeMarchesOn() {
  // Run the algorithm if no previous instance is still running, otherwise
  // enqueue the current playback position and whether only that changed
  // through its usual monotonic increase during normal playback; current
  // executing call upon completion will check queue for further 'work'.
  if (!mTimeMarchesOnDispatched && !IsShutdown() &&
      mMediaElement->IsCurrentlyPlaying()) {
    WEBVTT_LOG("DispatchTimeMarchesOn");
    nsPIDOMWindowInner* win = mMediaElement->OwnerDoc()->GetInnerWindow();
    if (win) {
      nsGlobalWindowInner::Cast(win)->Dispatch(
          TaskCategory::Other,
          NewRunnableMethod("dom::TextTrackManager::TimeMarchesOn", this,
                            &TextTrackManager::TimeMarchesOn));
      mTimeMarchesOnDispatched = true;
    }
  }
}
예제 #17
0
already_AddRefed<SourceBufferDecoder>
MediaSourceReader::CreateSubDecoder(const nsACString& aType, int64_t aTimestampOffset)
{
  if (IsShutdown()) {
    return nullptr;
  }
  MOZ_ASSERT(GetTaskQueue());
  nsRefPtr<SourceBufferDecoder> decoder =
    new SourceBufferDecoder(new SourceBufferResource(aType), mDecoder, aTimestampOffset);
  nsRefPtr<MediaDecoderReader> reader(CreateReaderForType(aType, decoder));
  if (!reader) {
    return nullptr;
  }

  // MSE uses a start time of 0 everywhere. Set that immediately on the
  // subreader to make sure that it's always in a state where we can invoke
  // GetBuffered on it.
  {
    ReentrantMonitorAutoEnter mon(decoder->GetReentrantMonitor());
    reader->SetStartTime(0);
  }

  // This part is icky. It would be nicer to just give each subreader its own
  // task queue. Unfortunately though, Request{Audio,Video}Data implementations
  // currently assert that they're on "the decode thread", and so having
  // separate task queues makes MediaSource stuff unnecessarily cumbersome. We
  // should remove the need for these assertions (which probably involves making
  // all Request*Data implementations fully async), and then get rid of the
  // borrowing.
  reader->SetBorrowedTaskQueue(GetTaskQueue());

#ifdef MOZ_FMP4
  reader->SetSharedDecoderManager(mSharedDecoderManager);
#endif
  reader->Init(nullptr);

  MSE_DEBUG("subdecoder %p subreader %p",
            decoder.get(), reader.get());
  decoder->SetReader(reader);
#ifdef MOZ_EME
  decoder->SetCDMProxy(mCDMProxy);
#endif
  return decoder.forget();
}
void CCMatchShutdown::OnRun(unsigned long nClock)
{
	if (IsShutdown() == false) return;

	int nIndex = GetProgressIndex();
	if (nIndex < (int)m_ShutdownNotifyArray.size())
	{
		MShutdownNotify* pNotify = m_ShutdownNotifyArray[nIndex];
		if (nClock - GetTimeLastProgress() < pNotify->GetDelay())
			return;
		Notify(nIndex);
		SetProgress(nIndex+1, nClock);
	}
	else if (nIndex == m_ShutdownNotifyArray.size())
	{
		Terminate();
		SetProgress(nIndex+1, nClock);
	}
}
nsRefPtr<MediaDecoderReader::SeekPromise>
MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
                        int64_t aCurrentTime)
{
  MSE_DEBUG("MediaSourceReader(%p)::Seek(aTime=%lld, aStart=%lld, aEnd=%lld, aCurrent=%lld)",
            this, aTime, aStartTime, aEndTime, aCurrentTime);

  mSeekPromise.RejectIfExists(NS_OK, __func__);
  nsRefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);

  if (IsShutdown()) {
    mSeekPromise.Reject(NS_ERROR_FAILURE, __func__);
    return p;
  }

  // Store pending seek target in case the track buffers don't contain
  // the desired time and we delay doing the seek.
  mPendingSeekTime = aTime;
  mPendingStartTime = aStartTime;
  mPendingEndTime = aEndTime;
  mPendingCurrentTime = aCurrentTime;

  // Only increment the number of expected OnSeekCompleted
  // notifications if we weren't already waiting for AttemptSeek
  // to complete (and they would have been accounted for already).
  {
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());

    if (!mWaitingForSeekData) {
      mWaitingForSeekData = true;
      if (mAudioTrack) {
        mPendingSeeks++;
      }
      if (mVideoTrack) {
        mPendingSeeks++;
      }
    }
  }

  AttemptSeek();
  return p;
}
예제 #20
0
void TextTrackManager::UpdateCueDisplay() {
  WEBVTT_LOG("UpdateCueDisplay");
  mUpdateCueDisplayDispatched = false;

  if (!mMediaElement || !mTextTracks || IsShutdown()) {
    return;
  }

  nsIFrame* frame = mMediaElement->GetPrimaryFrame();
  nsVideoFrame* videoFrame = do_QueryFrame(frame);
  if (!videoFrame) {
    return;
  }

  nsCOMPtr<nsIContent> overlay = videoFrame->GetCaptionOverlay();
  nsCOMPtr<nsIContent> controls = videoFrame->GetVideoControls();
  if (!overlay) {
    return;
  }

  nsTArray<RefPtr<TextTrackCue>> showingCues;
  mTextTracks->GetShowingCues(showingCues);

  if (showingCues.Length() > 0) {
    WEBVTT_LOG("UpdateCueDisplay, processCues, showingCuesNum=%zu",
               showingCues.Length());
    RefPtr<nsVariantCC> jsCues = new nsVariantCC();

    jsCues->SetAsArray(nsIDataType::VTYPE_INTERFACE, &NS_GET_IID(EventTarget),
                       showingCues.Length(),
                       static_cast<void*>(showingCues.Elements()));
    nsPIDOMWindowInner* window = mMediaElement->OwnerDoc()->GetInnerWindow();
    if (window) {
      sParserWrapper->ProcessCues(window, jsCues, overlay, controls);
    }
  } else if (overlay->Length() > 0) {
    WEBVTT_LOG("UpdateCueDisplay EmptyString");
    nsContentUtils::SetNodeTextContent(overlay, EmptyString(), true);
  }
}
예제 #21
0
void
ChannelMediaDecoder::NotifyDownloadEnded(nsresult aStatus)
{
  MOZ_ASSERT(NS_IsMainThread());
  MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
  AbstractThread::AutoEnter context(AbstractMainThread());

  LOG("NotifyDownloadEnded, status=%" PRIx32, static_cast<uint32_t>(aStatus));

  if (NS_SUCCEEDED(aStatus)) {
    // Download ends successfully. This is a stream with a finite length.
    GetStateMachine()->DispatchIsLiveStream(false);
  }

  MediaDecoderOwner* owner = GetOwner();
  if (NS_SUCCEEDED(aStatus) || aStatus == NS_BASE_STREAM_CLOSED) {
    nsCOMPtr<nsIRunnable> r =
      NS_NewRunnableFunction("ChannelMediaDecoder::UpdatePlaybackRate", [
        stats = mPlaybackStatistics,
        res = RefPtr<BaseMediaResource>(mResource),
        duration = mDuration
      ]() {
        auto rate = ComputePlaybackRate(stats, res, duration);
        UpdatePlaybackRate(rate, res);
      });
    nsresult rv = GetStateMachine()->OwnerThread()->Dispatch(r.forget());
    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    Unused << rv;
    owner->DownloadSuspended();
    // NotifySuspendedStatusChanged will tell the element that download
    // has been suspended "by the cache", which is true since we never
    // download anything. The element can then transition to HAVE_ENOUGH_DATA.
    owner->NotifySuspendedByCache(true);
  } else if (aStatus == NS_BINDING_ABORTED) {
    // Download has been cancelled by user.
    owner->LoadAborted();
  } else {
    NetworkError(MediaResult(aStatus, "Download aborted"));
  }
}
예제 #22
0
void MediaOmxReader::NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset)
{
  MOZ_ASSERT(NS_IsMainThread());
  if (IsShutdown()) {
    return;
  }
  if (HasVideo()) {
    return;
  }

  if (!mMP3FrameParser.NeedsData()) {
    return;
  }

  mMP3FrameParser.Parse(aBuffer, aLength, aOffset);
  int64_t duration = mMP3FrameParser.GetDuration();
  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
  if (duration != mLastParserDuration && mUseParserDuration) {
    mLastParserDuration = duration;
    mDecoder->UpdateEstimatedMediaDuration(mLastParserDuration);
  }
}
예제 #23
0
already_AddRefed<SourceBufferDecoder>
MediaSourceReader::CreateSubDecoder(const nsACString& aType)
{
  if (IsShutdown()) {
    return nullptr;
  }
  MOZ_ASSERT(GetTaskQueue());
  nsRefPtr<SourceBufferDecoder> decoder =
    new SourceBufferDecoder(new SourceBufferResource(aType), mDecoder);
  nsRefPtr<MediaDecoderReader> reader(CreateReaderForType(aType, decoder));
  if (!reader) {
    return nullptr;
  }

  // MSE uses a start time of 0 everywhere. Set that immediately on the
  // subreader to make sure that it's always in a state where we can invoke
  // GetBuffered on it.
  {
    ReentrantMonitorAutoEnter mon(decoder->GetReentrantMonitor());
    reader->SetStartTime(0);
  }

  // Set a callback on the subreader that forwards calls to this reader.
  // This reader will then forward them onto the state machine via this
  // reader's callback.
  RefPtr<MediaDataDecodedListener<MediaSourceReader>> callback =
    new MediaDataDecodedListener<MediaSourceReader>(this, GetTaskQueue());
  reader->SetCallback(callback);
  reader->SetTaskQueue(GetTaskQueue());
  reader->Init(nullptr);

  MSE_DEBUG("MediaSourceReader(%p)::CreateSubDecoder subdecoder %p subreader %p",
            this, decoder.get(), reader.get());
  decoder->SetReader(reader);
#ifdef MOZ_EME
  decoder->SetCDMProxy(mCDMProxy);
#endif
  return decoder.forget();
}
//*=================================================================================
//*原型: void TSmartOutThread::Run()
//*功能: 读入客户端的请求, 并给出回应
//*参数: 无
//*返回: 无
//*说明: 
//*=================================================================================
void TSmartOutThread::Run()
{
	SOCKET       hClient;
	SOCKADDR_IN  sockAddr;
	int nLen = sizeof(sockAddr);
	int nBlockMode = 1 ;

	DWORD dwTick = GetTickCount();

	while( !IsShutdown() )
	{
		if( Wait(m_hServerSocket, 0, 1000) )
		{
			ZeroMemory(&sockAddr, sizeof(sockAddr));

			hClient = accept(m_hServerSocket, (SOCKADDR*)&sockAddr, &nLen);
			if( hClient != INVALID_SOCKET )
			{	
				if( ioctlsocket(hClient, FIONBIO, (DWORD*)&nBlockMode) != SOCKET_ERROR )
				{
					ProcessRequest(hClient, &sockAddr);
				}
				shutdown(hClient, SB_BOTH);
				closesocket(hClient);
				hClient = INVALID_SOCKET;
			}
		}

		if( GetTickCount() - dwTick > 10000 )
		{
			Monitor();
			dwTick = GetTickCount();
		}
	}

	CloseOutSocket();
}
예제 #25
0
bool BasicThread::ShouldStop(void) const {
  auto context = ContextMember::GetContext();
  return m_stop || !context || context->IsShutdown();
}
예제 #26
0
// https://html.spec.whatwg.org/multipage/embedded-content.html#time-marches-on
void TextTrackManager::TimeMarchesOn() {
  NS_ASSERTION(NS_IsMainThread(), "Wrong thread!");
  mTimeMarchesOnDispatched = false;

  CycleCollectedJSContext* context = CycleCollectedJSContext::Get();
  if (context && context->IsInStableOrMetaStableState()) {
    // FireTimeUpdate can be called while at stable state following a
    // current position change which triggered a state watcher in MediaDecoder
    // (see bug 1443429).
    // TimeMarchesOn() will modify JS attributes which is forbidden while in
    // stable state. So we dispatch a task to perform such operation later
    // instead.
    DispatchTimeMarchesOn();
    return;
  }
  WEBVTT_LOG("TimeMarchesOn");

  // Early return if we don't have any TextTracks or shutting down.
  if (!mTextTracks || mTextTracks->Length() == 0 || IsShutdown()) {
    return;
  }

  nsISupports* parentObject = mMediaElement->OwnerDoc()->GetParentObject();
  if (NS_WARN_IF(!parentObject)) {
    return;
  }
  nsCOMPtr<nsPIDOMWindowInner> window = do_QueryInterface(parentObject);

  if (mMediaElement &&
      (!(mMediaElement->GetPlayedOrSeeked()) || mMediaElement->Seeking())) {
    WEBVTT_LOG("TimeMarchesOn seeking or post return");
    return;
  }

  // Step 3.
  double currentPlaybackTime = mMediaElement->CurrentTime();
  bool hasNormalPlayback = !mHasSeeked;
  mHasSeeked = false;
  WEBVTT_LOG(
      "TimeMarchesOn mLastTimeMarchesOnCalled %lf currentPlaybackTime %lf "
      "hasNormalPlayback %d",
      mLastTimeMarchesOnCalled, currentPlaybackTime, hasNormalPlayback);

  // Step 1, 2.
  RefPtr<TextTrackCueList> currentCues = new TextTrackCueList(window);
  RefPtr<TextTrackCueList> otherCues = new TextTrackCueList(window);
  bool dummy;
  for (uint32_t index = 0; index < mTextTracks->Length(); ++index) {
    TextTrack* ttrack = mTextTracks->IndexedGetter(index, dummy);
    if (ttrack && ttrack->Mode() != TextTrackMode::Disabled) {
      // TODO: call GetCueListByTimeInterval on mNewCues?
      ttrack->GetCurrentCueList(currentCues);
    }
  }
  WEBVTT_LOGV("TimeMarchesOn currentCues %d", currentCues->Length());
  // Populate otherCues with 'non-active" cues.
  if (hasNormalPlayback) {
    if (currentPlaybackTime < mLastTimeMarchesOnCalled) {
      // TODO: Add log and find the root cause why the
      // playback position goes backward.
      mLastTimeMarchesOnCalled = currentPlaybackTime;
    }
    media::Interval<double> interval(mLastTimeMarchesOnCalled,
                                     currentPlaybackTime);
    otherCues = mNewCues->GetCueListByTimeInterval(interval);
    ;
  } else {
    // Seek case. Put the mLastActiveCues into otherCues.
    otherCues = mLastActiveCues;
  }
  for (uint32_t i = 0; i < currentCues->Length(); ++i) {
    TextTrackCue* cue = (*currentCues)[i];
    otherCues->RemoveCue(*cue);
  }
  WEBVTT_LOGV("TimeMarchesOn otherCues %d", otherCues->Length());
  // Step 4.
  RefPtr<TextTrackCueList> missedCues = new TextTrackCueList(window);
  if (hasNormalPlayback) {
    for (uint32_t i = 0; i < otherCues->Length(); ++i) {
      TextTrackCue* cue = (*otherCues)[i];
      if (cue->StartTime() >= mLastTimeMarchesOnCalled &&
          cue->EndTime() <= currentPlaybackTime) {
        missedCues->AddCue(*cue);
      }
    }
  }
  WEBVTT_LOGV("TimeMarchesOn missedCues %d", missedCues->Length());
  // Step 5. Empty now.
  // TODO: Step 6: fire timeupdate?

  // Step 7. Abort steps if condition 1, 2, 3 are satisfied.
  // 1. All of the cues in current cues have their active flag set.
  // 2. None of the cues in other cues have their active flag set.
  // 3. Missed cues is empty.
  bool c1 = true;
  for (uint32_t i = 0; i < currentCues->Length(); ++i) {
    if (!(*currentCues)[i]->GetActive()) {
      c1 = false;
      break;
    }
  }
  bool c2 = true;
  for (uint32_t i = 0; i < otherCues->Length(); ++i) {
    if ((*otherCues)[i]->GetActive()) {
      c2 = false;
      break;
    }
  }
  bool c3 = (missedCues->Length() == 0);
  if (c1 && c2 && c3) {
    mLastTimeMarchesOnCalled = currentPlaybackTime;
    WEBVTT_LOG("TimeMarchesOn step 7 return, mLastTimeMarchesOnCalled %lf",
               mLastTimeMarchesOnCalled);
    return;
  }

  // Step 8. Respect PauseOnExit flag if not seek.
  if (hasNormalPlayback) {
    for (uint32_t i = 0; i < otherCues->Length(); ++i) {
      TextTrackCue* cue = (*otherCues)[i];
      if (cue && cue->PauseOnExit() && cue->GetActive()) {
        WEBVTT_LOG("TimeMarchesOn pause the MediaElement");
        mMediaElement->Pause();
        break;
      }
    }
    for (uint32_t i = 0; i < missedCues->Length(); ++i) {
      TextTrackCue* cue = (*missedCues)[i];
      if (cue && cue->PauseOnExit()) {
        WEBVTT_LOG("TimeMarchesOn pause the MediaElement");
        mMediaElement->Pause();
        break;
      }
    }
  }

  // Step 15.
  // Sort text tracks in the same order as the text tracks appear
  // in the media element's list of text tracks, and remove
  // duplicates.
  TextTrackListInternal affectedTracks;
  // Step 13, 14.
  nsTArray<RefPtr<SimpleTextTrackEvent>> eventList;
  // Step 9, 10.
  // For each text track cue in missed cues, prepare an event named
  // enter for the TextTrackCue object with the cue start time.
  for (uint32_t i = 0; i < missedCues->Length(); ++i) {
    TextTrackCue* cue = (*missedCues)[i];
    if (cue) {
      SimpleTextTrackEvent* event = new SimpleTextTrackEvent(
          NS_LITERAL_STRING("enter"), cue->StartTime(), cue->GetTrack(), cue);
      eventList.InsertElementSorted(
          event, CompareSimpleTextTrackEvents(mMediaElement));
      affectedTracks.AddTextTrack(cue->GetTrack(),
                                  CompareTextTracks(mMediaElement));
    }
  }

  // Step 11, 17.
  for (uint32_t i = 0; i < otherCues->Length(); ++i) {
    TextTrackCue* cue = (*otherCues)[i];
    if (cue->GetActive() || missedCues->IsCueExist(cue)) {
      double time =
          cue->StartTime() > cue->EndTime() ? cue->StartTime() : cue->EndTime();
      SimpleTextTrackEvent* event = new SimpleTextTrackEvent(
          NS_LITERAL_STRING("exit"), time, cue->GetTrack(), cue);
      eventList.InsertElementSorted(
          event, CompareSimpleTextTrackEvents(mMediaElement));
      affectedTracks.AddTextTrack(cue->GetTrack(),
                                  CompareTextTracks(mMediaElement));
    }
    cue->SetActive(false);
  }

  // Step 12, 17.
  for (uint32_t i = 0; i < currentCues->Length(); ++i) {
    TextTrackCue* cue = (*currentCues)[i];
    if (!cue->GetActive()) {
      SimpleTextTrackEvent* event = new SimpleTextTrackEvent(
          NS_LITERAL_STRING("enter"), cue->StartTime(), cue->GetTrack(), cue);
      eventList.InsertElementSorted(
          event, CompareSimpleTextTrackEvents(mMediaElement));
      affectedTracks.AddTextTrack(cue->GetTrack(),
                                  CompareTextTracks(mMediaElement));
    }
    cue->SetActive(true);
  }

  // Fire the eventList
  for (uint32_t i = 0; i < eventList.Length(); ++i) {
    eventList[i]->Dispatch();
  }

  // Step 16.
  for (uint32_t i = 0; i < affectedTracks.Length(); ++i) {
    TextTrack* ttrack = affectedTracks[i];
    if (ttrack) {
      ttrack->DispatchAsyncTrustedEvent(NS_LITERAL_STRING("cuechange"));
      HTMLTrackElement* trackElement = ttrack->GetTrackElement();
      if (trackElement) {
        trackElement->DispatchTrackRunnable(NS_LITERAL_STRING("cuechange"));
      }
    }
  }

  mLastTimeMarchesOnCalled = currentPlaybackTime;
  mLastActiveCues = currentCues;

  // Step 18.
  UpdateCueDisplay();
}
예제 #27
0
// workQueue 不负责内存管理
WorkQueue::~WorkQueue() {
	if (!IsShutdown()) Shutdown();
	while(!works.empty()) works.pop_front();
}
//*=================================================================================
//*原型: void TSmartServer::Run()
//*功能: 服务器执行服务
//*参数: 无
//*返回: 无
//*说明: 前置采集服务器类
//*=================================================================================
void TSmartServer::Run()
{
	bool bReloadFlag = false;
	try
	{
		m_OutThread.Start();
		m_SmartMonitor.Start();

		//如果没有获取到系统的设备档案数据,则始终向金仕达发获取档案的任务.
		while(1 )
		{
			if( GetDocCount() <= 0 )
			{
				ReportLog("没有设备档案信息! 服务器自动重新装入设备档案表!");
				InitSmartDocList();
				Sleep(4000);				
			}
			else
			{
				break;
			}
		}

		bool  bRet=false;
		long  nNoTaskTick=0,nTick=0,nCollDataTick=0;
		
		m_CollDataTaskObj.nTaskPlanID  = 100 ;
		strcpy(m_CollDataTaskObj.szTaskName, "收数");
		m_CollDataTaskObj.nTaskCycle   = 1 ;
		GetCurDateTime(m_CollDataTaskObj.szBeginTime);
		m_CollDataTaskObj.nRepeatTime  = 0 ;                 //持续多长时间(分钟)
		m_CollDataTaskObj.nRepeatTimes = 0;                //重复次数

		int  k = 0;
		long i = 0 ;
		for(i=0; i< m_DocList.GetCount(); i++)
		{
			TSmartDocObj *pObj = (TSmartDocObj*)m_DocList[i];
			if(!pObj->m_nParentID)
			{
				m_CollDataTaskObj.pTask[k].nTaskID     = i + 100 ;
				m_CollDataTaskObj.pTask[k].nTaskPlanID = 01;
				m_CollDataTaskObj.pTask[k].nAuthID     = pObj->m_nAuthID ;
				strcpy(m_CollDataTaskObj.pTask[k].szTaskCode, "01");
				strcpy(m_CollDataTaskObj.pTask[k].szName, "实时收数");
				strcpy(m_CollDataTaskObj.pTask[k].szType, "01");
				m_CollDataTaskObj.pTask[k].nPriority = 0 ;
				m_CollDataTaskObj.nTask++;  //设备总数量
				k++;
			}
		}
		m_CollDataTaskObj.SetBeginTime();
		ExecuteTask(&m_CollDataTaskObj);
		//执行任务
		static TSmartTaskObj curObj;
		BOOL bNoTask=FALSE;
		int ret=-1;
		nCollDataTick = GetTickCount();
		while( !IsShutdown() )
		{	
				bRet = false;
				//如果后台没有任务要处理,则开始采集数据任务
				while(bNoTask)
				{
					ExecuteCollDataTask();
					nTick = GetTickCount();
					nCollDataTick = nTick;
					//如果采集数据时间超过了2秒钟,则跳出,开始向后台请求新的任务
					if(nTick-nNoTaskTick>=m_nNoTask_Tick)//2秒
					{
						break;
					}
					Sleep(1000);
				}
				nTick=GetTickCount();
				//如果距离上次采集数据的时间已经超过了5秒钟,则再发起一次采集数据的任务
				if(nTick-nCollDataTick>=m_nCollData_Tick)
				{
					nCollDataTick = nTick;
					ExecuteCollDataTask();
				}
				//发送心跳请求包
				ret=m_Channels.SendTick();
				if(ret!=RET_OK)
				{
					continue;
				}
				ZeroMemory(&curObj, sizeof(curObj));
				curObj.Clear();
				ret=m_Channels.GetSmartTaskPlan(&curObj);
				switch(ret)
				{	
					case RET_OK:
						ReportLog("选中任务: %s(%d), 执行次数:%d, 执行时长:%d, 设备数:%d.\n",curObj.szTaskName, curObj.nTaskPlanID,curObj.nRepeatTimes, curObj.nRepeatTime, curObj.nTask); 
						curObj.SetBeginTime();
						bRet = ExecuteTask(&curObj);
						if(!bRet)
						{
							for(int j=0; j< curObj.nTask; j++)
							{
								TSResultData  data;
								ZeroMemory(&data, sizeof(data));
								strcpy(data.sMsg, "任务执行失败[可能终端正忙]!");
								m_Channels.ReportTaskResult(&curObj.pTask[j], RET_TERME_NOANSWER, &data);
							}
						}
						bNoTask=FALSE;
						break;
					case RET_NOTASK:
						ReportLog("金仕达没有任务下达"); 
						nNoTaskTick = GetTickCount();
						bNoTask=TRUE;
						break;
					default:
						ReportLog("向金仕达请求任务失败"); 
						bNoTask=FALSE;
						break;
				}
			/*
			if(!GetMemoryInfo())
			{
				ReportError("获取系统内存信息失败,请关闭前置程序,重新启动!");				
			}
			printf("TotalMemory %dK,UseMemory %dK,FreeMemory %dK\n",nTotalMemory,nUseMemory,nFreeMemory);	
			if(nFreeMemory/1024<10)
			{
				ReportError("可用内存小于10M,请关闭一些其它无用的程序");				
			}
			*/
		}
		m_OutThread.Shutdown();
	}
	catch(TException& e)
	{
		ReportError(e.GetText());
		printf("------ e error ----\n");
		Shutdown();
	}
	catch(...)
	{
		ReportError("未知的错误导致服务器终止!");
		Shutdown();
	}

	PostQuitMessage(0);
}