コード例 #1
0
nsresult
MediaCodecDataDecoder::ProcessOutput(
    BufferInfo::Param aInfo, MediaFormat::Param aFormat, int32_t aStatus)
{
  AutoLocalJNIFrame frame(jni::GetEnvForThread(), 1);

  const Maybe<TimeUnit> duration = GetOutputDuration();
  if (!duration) {
    // Some devices report failure in QueueSample while actually succeeding at
    // it, in which case we get an output buffer without having a cached duration
    // (bug 1273523).
    return NS_OK;
  }

  const auto buffer = jni::Object::LocalRef::Adopt(
      frame.GetEnv()->GetObjectArrayElement(mOutputBuffers.Get(), aStatus));

  if (buffer) {
    // The buffer will be null on Android L if we are decoding to a Surface.
    void* directBuffer = frame.GetEnv()->GetDirectBufferAddress(buffer.Get());
    Output(aInfo, directBuffer, aFormat, duration.value());
  }

  // The Surface will be updated at this point (for video).
  mDecoder->ReleaseOutputBuffer(aStatus, true);
  PostOutput(aInfo, aFormat, duration.value());

  return NS_OK;
}
コード例 #2
0
ファイル: OutputWnd.cpp プロジェクト: MGraefe/deferred
void PostOutput( TCHAR *TextAsPrintf, ... )
{
	va_list argptr;
	va_start( argptr, TextAsPrintf );
	
	CString s;
	s.FormatV( TextAsPrintf, argptr );

	PostOutput(s);
}
コード例 #3
0
nsresult
MediaCodecDataDecoder::ProcessOutput(
    BufferInfo::Param aInfo, MediaFormat::Param aFormat, int32_t aStatus)
{
  AutoLocalJNIFrame frame(jni::GetEnvForThread(), 1);

  const TimeUnit duration = GetOutputDuration();
  const auto buffer = jni::Object::LocalRef::Adopt(
      frame.GetEnv()->GetObjectArrayElement(mOutputBuffers.Get(), aStatus));

  if (buffer) {
    // The buffer will be null on Android L if we are decoding to a Surface.
    void* directBuffer = frame.GetEnv()->GetDirectBufferAddress(buffer.Get());
    Output(aInfo, directBuffer, aFormat, duration);
  }

  // The Surface will be updated at this point (for video).
  mDecoder->ReleaseOutputBuffer(aStatus, true);
  PostOutput(aInfo, aFormat, duration);

  return NS_OK;
}
コード例 #4
0
void MediaCodecDataDecoder::DecoderLoop()
{
  bool outputDone = false;

  bool draining = false;
  bool waitingEOF = false;

  AutoLocalJNIFrame frame(GetJNIForThread(), 1);
  nsRefPtr<MediaRawData> sample;

  MediaFormat::LocalRef outputFormat(frame.GetEnv());
  nsresult res;

  for (;;) {
    {
      MonitorAutoLock lock(mMonitor);
      while (!mStopping && !mDraining && !mFlushing && mQueue.empty()) {
        if (mQueue.empty()) {
          // We could be waiting here forever if we don't signal that we need more input
          ENVOKE_CALLBACK(InputExhausted);
        }
        lock.Wait();
      }

      if (mStopping) {
        // Get out of the loop. This is the only exit point.
        break;
      }

      if (mFlushing) {
        mDecoder->Flush();
        ClearQueue();
        mFlushing =  false;
        lock.Notify();
        continue;
      }

      if (mDraining && !sample && !waitingEOF) {
        draining = true;
      }

      // We're not stopping or draining, so try to get a sample
      if (!mQueue.empty()) {
        sample = mQueue.front();
      }
    }

    if (draining && !waitingEOF) {
      MOZ_ASSERT(!sample, "Shouldn't have a sample when pushing EOF frame");

      int32_t inputIndex;
      res = mDecoder->DequeueInputBuffer(DECODER_TIMEOUT, &inputIndex);
      HANDLE_DECODER_ERROR();

      if (inputIndex >= 0) {
        res = mDecoder->QueueInputBuffer(inputIndex, 0, 0, 0, MediaCodec::BUFFER_FLAG_END_OF_STREAM);
        HANDLE_DECODER_ERROR();

        waitingEOF = true;
      }
    }

    if (sample) {
      // We have a sample, try to feed it to the decoder
      int inputIndex;
      res = mDecoder->DequeueInputBuffer(DECODER_TIMEOUT, &inputIndex);
      HANDLE_DECODER_ERROR();

      if (inputIndex >= 0) {
        jni::Object::LocalRef buffer(frame.GetEnv());
        res = GetInputBuffer(frame.GetEnv(), inputIndex, &buffer);
        HANDLE_DECODER_ERROR();

        void* directBuffer = frame.GetEnv()->GetDirectBufferAddress(buffer.Get());

        MOZ_ASSERT(frame.GetEnv()->GetDirectBufferCapacity(buffer.Get()) >= sample->Size(),
          "Decoder buffer is not large enough for sample");

        {
          // We're feeding this to the decoder, so remove it from the queue
          MonitorAutoLock lock(mMonitor);
          mQueue.pop();
        }

        PodCopy((uint8_t*)directBuffer, sample->Data(), sample->Size());

        res = mDecoder->QueueInputBuffer(inputIndex, 0, sample->Size(),
                                         sample->mTime, 0);
        HANDLE_DECODER_ERROR();

        mDurations.push(media::TimeUnit::FromMicroseconds(sample->mDuration));
        sample = nullptr;
        outputDone = false;
      }
    }

    if (!outputDone) {
      BufferInfo::LocalRef bufferInfo;
      res = BufferInfo::New(&bufferInfo);
      HANDLE_DECODER_ERROR();

      int32_t outputStatus;
      res = mDecoder->DequeueOutputBuffer(bufferInfo, DECODER_TIMEOUT, &outputStatus);
      HANDLE_DECODER_ERROR();

      if (outputStatus == MediaCodec::INFO_TRY_AGAIN_LATER) {
        // We might want to call mCallback->InputExhausted() here, but there seems to be
        // some possible bad interactions here with the threading
      } else if (outputStatus == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
        res = ResetOutputBuffers();
        HANDLE_DECODER_ERROR();
      } else if (outputStatus == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
        res = mDecoder->GetOutputFormat(ReturnTo(&outputFormat));
        HANDLE_DECODER_ERROR();
      } else if (outputStatus < 0) {
        NS_WARNING("unknown error from decoder!");
        ENVOKE_CALLBACK(Error);

        // Don't break here just in case it's recoverable. If it's not, others stuff will fail later and
        // we'll bail out.
      } else {
        int32_t flags;
        res = bufferInfo->Flags(&flags);
        HANDLE_DECODER_ERROR();

        // We have a valid buffer index >= 0 here
        if (flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM) {
          if (draining) {
            draining = false;
            waitingEOF = false;

            mMonitor.Lock();
            mDraining = false;
            mMonitor.Notify();
            mMonitor.Unlock();

            ENVOKE_CALLBACK(DrainComplete);
          }

          mDecoder->ReleaseOutputBuffer(outputStatus, false);
          outputDone = true;

          // We only queue empty EOF frames, so we're done for now
          continue;
        }

        MOZ_ASSERT(!mDurations.empty(), "Should have had a duration queued");

        media::TimeUnit duration;
        if (!mDurations.empty()) {
          duration = mDurations.front();
          mDurations.pop();
        }

        auto buffer = jni::Object::LocalRef::Adopt(
            frame.GetEnv()->GetObjectArrayElement(mOutputBuffers.Get(), outputStatus));
        if (buffer) {
          // The buffer will be null on Android L if we are decoding to a Surface
          void* directBuffer = frame.GetEnv()->GetDirectBufferAddress(buffer.Get());
          Output(bufferInfo, directBuffer, outputFormat, duration);
        }

        // The Surface will be updated at this point (for video)
        mDecoder->ReleaseOutputBuffer(outputStatus, true);

        PostOutput(bufferInfo, outputFormat, duration);
      }
    }
  }

  Cleanup();

  // We're done
  MonitorAutoLock lock(mMonitor);
  mStopping = false;
  mMonitor.Notify();
}
コード例 #5
0
void IocpTransmitStrategy::Launch() {
    PostOutput();
}