bool
WMFReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  HRESULT hr;
  hr = mSourceReader->ReadSample(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
                                 0, // control flags
                                 0, // read stream index
                                 nullptr,
                                 nullptr,
                                 nullptr);

  if (FAILED(hr)) {
    DECODER_LOG("WMFReader::DecodeAudioData() ReadSample failed with hr=0x%x", hr);
    // End the stream.
    return false;
  }

  DWORD flags = 0;
  LONGLONG timestampHns = 0;
  RefPtr<IMFSample> sample;
  hr = mSourceReaderCallback->Wait(&flags, &timestampHns, byRef(sample));
  if (FAILED(hr) ||
      (flags & MF_SOURCE_READERF_ERROR) ||
      (flags & MF_SOURCE_READERF_ENDOFSTREAM) ||
      (flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)) {
    DECODER_LOG("WMFReader::DecodeAudioData() ReadSample failed with hr=0x%x flags=0x%x",
                hr, flags);
    // End the stream.
    return false;
  }

  if (!sample) {
    // Not enough data? Try again...
    return true;
  }

  RefPtr<IMFMediaBuffer> buffer;
  hr = sample->ConvertToContiguousBuffer(byRef(buffer));
  NS_ENSURE_TRUE(SUCCEEDED(hr), false);

  BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we don't need to free it.
  DWORD maxLength = 0, currentLength = 0;
  hr = buffer->Lock(&data, &maxLength, &currentLength);
  NS_ENSURE_TRUE(SUCCEEDED(hr), false);

  uint32_t numFrames = currentLength / mAudioBytesPerSample / mAudioChannels;
  NS_ASSERTION(sizeof(AudioDataValue) == mAudioBytesPerSample, "Size calculation is wrong");
  nsAutoArrayPtr<AudioDataValue> pcmSamples(new AudioDataValue[numFrames * mAudioChannels]);
  memcpy(pcmSamples.get(), data, currentLength);
  buffer->Unlock();

  // We calculate the timestamp and the duration based on the number of audio
  // frames we've already played. We don't trust the timestamp stored on the
  // IMFSample, as sometimes it's wrong, possibly due to buggy encoders?

  // If this sample block comes after a discontinuity (i.e. a gap or seek)
  // reset the frame counters, and capture the timestamp. Future timestamps
  // will be offset from this block's timestamp.
  UINT32 discontinuity = false;
  sample->GetUINT32(MFSampleExtension_Discontinuity, &discontinuity);
  if (mMustRecaptureAudioPosition || discontinuity) {
    mAudioFrameSum = 0;
    hr = HNsToFrames(timestampHns, mAudioRate, &mAudioFrameOffset);
    NS_ENSURE_TRUE(SUCCEEDED(hr), false);
    mMustRecaptureAudioPosition = false;
  }

  int64_t timestamp;
  hr = FramesToUsecs(mAudioFrameOffset + mAudioFrameSum, mAudioRate, &timestamp);
  NS_ENSURE_TRUE(SUCCEEDED(hr), false);

  mAudioFrameSum += numFrames;

  int64_t duration;
  hr = FramesToUsecs(numFrames, mAudioRate, &duration);
  NS_ENSURE_TRUE(SUCCEEDED(hr), false);

  mAudioQueue.Push(new AudioData(mDecoder->GetResource()->Tell(),
                                 timestamp,
                                 duration,
                                 numFrames,
                                 pcmSamples.forget(),
                                 mAudioChannels,
                                 mAudioRate));

  #ifdef LOG_SAMPLE_DECODE
  DECODER_LOG("Decoded audio sample! timestamp=%lld duration=%lld currentLength=%u",
              timestamp, duration, currentLength);
  #endif

  return true;
}
bool
WMFReader::DecodeVideoFrame(bool &aKeyframeSkip,
                            int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  // Record number of frames decoded and parsed. Automatically update the
  // stats counters using the AutoNotifyDecoded stack-based class.
  AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);

  HRESULT hr;

  hr = mSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                                 0, // control flags
                                 0, // read stream index
                                 nullptr,
                                 nullptr,
                                 nullptr);
  if (FAILED(hr)) {
    DECODER_LOG("WMFReader::DecodeVideoData() ReadSample failed with hr=0x%x", hr);
    return false;
  }

  DWORD flags = 0;
  LONGLONG timestampHns = 0;
  RefPtr<IMFSample> sample;
  hr = mSourceReaderCallback->Wait(&flags, &timestampHns, byRef(sample));

  if (flags & MF_SOURCE_READERF_ERROR) {
    NS_WARNING("WMFReader: Catastrophic failure reading video sample");
    // Future ReadSample() calls will fail, so give up and report end of stream.
    return false;
  }

  if (FAILED(hr)) {
    // Unknown failure, ask caller to try again?
    return true;
  }

  if (!sample) {
    if ((flags & MF_SOURCE_READERF_ENDOFSTREAM)) {
      DECODER_LOG("WMFReader; Null sample after video decode, at end of stream");
      return false;
    }
    DECODER_LOG("WMFReader; Null sample after video decode. Maybe insufficient data...");
    return true;
  }

  if ((flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)) {
    DECODER_LOG("WMFReader: Video media type changed!");
    RefPtr<IMFMediaType> mediaType;
    hr = mSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                                            byRef(mediaType));
    if (FAILED(hr) ||
        FAILED(ConfigureVideoFrameGeometry(mediaType))) {
      NS_WARNING("Failed to reconfigure video media type");
      return false;
    }
  }

  int64_t timestamp = HNsToUsecs(timestampHns);
  if (timestamp < aTimeThreshold) {
    return true;
  }
  int64_t offset = mDecoder->GetResource()->Tell();
  int64_t duration = GetSampleDuration(sample);

  VideoData* v = nullptr;
  if (mUseHwAccel) {
    hr = CreateD3DVideoFrame(sample, timestamp, duration, offset, &v);
  } else {
    hr = CreateBasicVideoFrame(sample, timestamp, duration, offset, &v);
  }
  NS_ENSURE_TRUE(SUCCEEDED(hr) && v, false);

  a.mParsed++;
  a.mDecoded++;
  mVideoQueue.Push(v);

  #ifdef LOG_SAMPLE_DECODE
  DECODER_LOG("Decoded video sample timestamp=%lld duration=%lld stride=%d height=%u flags=%u",
              timestamp, duration, mVideoStride, mVideoHeight, flags);
  #endif

  if ((flags & MF_SOURCE_READERF_ENDOFSTREAM)) {
    // End of stream.
    DECODER_LOG("End of video stream");
    return false;
  }

  return true;
}
Exemple #3
0
nsresult
WMFReader::ReadMetadata(MediaInfo* aInfo,
                        MetadataTags** aTags)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  DECODER_LOG("WMFReader::ReadMetadata()");
  HRESULT hr;

  RefPtr<IMFAttributes> attr;
  hr = wmf::MFCreateAttributes(byRef(attr), 1);
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  hr = attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, mSourceReaderCallback);
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  if (mUseHwAccel) {
    hr = attr->SetUnknown(MF_SOURCE_READER_D3D_MANAGER,
                          mDXVA2Manager->GetDXVADeviceManager());
    if (FAILED(hr)) {
      DECODER_LOG("Failed to set DXVA2 D3D Device manager on source reader attributes");
      mUseHwAccel = false;
    }
  }

  hr = wmf::MFCreateSourceReaderFromByteStream(mByteStream, attr, byRef(mSourceReader));
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  hr = ConfigureVideoDecoder();
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  hr = ConfigureAudioDecoder();
  NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);

  if (mUseHwAccel && mInfo.mVideo.mHasVideo) {
    RefPtr<IMFTransform> videoDecoder;
    hr = mSourceReader->GetServiceForStream(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                                            GUID_NULL,
                                            IID_IMFTransform,
                                            (void**)(IMFTransform**)(byRef(videoDecoder)));

    if (SUCCEEDED(hr)) {
      ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
      hr = videoDecoder->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER,
                                        manager);
      if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
        // Ignore MF_E_TRANSFORM_TYPE_NOT_SET. Vista returns this here
        // on some, perhaps all, video cards. This may be because activating
        // DXVA changes the available output types. It seems to be safe to
        // ignore this error.
        hr = S_OK;
      }
    }
    if (FAILED(hr)) {
      DECODER_LOG("Failed to set DXVA2 D3D Device manager on decoder hr=0x%x", hr);
      mUseHwAccel = false;
      // Re-run the configuration process, so that the output video format
      // is set correctly to reflect that hardware acceleration is disabled.
      // Without this, we'd be running with !mUseHwAccel and the output format
      // set to NV12, which is the format we expect when using hardware
      // acceleration. This would cause us to misinterpret the frame contents.
      hr = ConfigureVideoDecoder();
    }
  }
  if (mInfo.HasVideo()) {
    DECODER_LOG("Using DXVA: %s", (mUseHwAccel ? "Yes" : "No"));
  }

  // Abort if both video and audio failed to initialize.
  NS_ENSURE_TRUE(mInfo.HasValidMedia(), NS_ERROR_FAILURE);

  // Get the duration, and report it to the decoder if we have it.
  int64_t duration = 0;
  hr = GetSourceReaderDuration(mSourceReader, duration);
  if (SUCCEEDED(hr)) {
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mDecoder->SetMediaDuration(duration);
  }
  // We can seek if we get a duration *and* the reader reports that it's
  // seekable.
  bool canSeek = false;
  if (FAILED(hr) ||
      FAILED(GetSourceReaderCanSeek(mSourceReader, canSeek)) ||
      !canSeek) {
    mDecoder->SetMediaSeekable(false);
  }

  *aInfo = mInfo;
  *aTags = nullptr;
  // aTags can be retrieved using techniques like used here:
  // http://blogs.msdn.com/b/mf/archive/2010/01/12/mfmediapropdump.aspx

  return NS_OK;
}