Moof::Moof(Box& aBox, Trex& aTrex, Mvhd& aMvhd, Mdhd& aMdhd, Edts& aEdts, Sinf& aSinf, bool aIsAudio) : mRange(aBox.Range()) , mMaxRoundingError(35000) { for (Box box = aBox.FirstChild(); box.IsAvailable(); box = box.Next()) { if (box.IsType("traf")) { ParseTraf(box, aTrex, aMvhd, aMdhd, aEdts, aSinf, aIsAudio); } } if (IsValid()) { ProcessCenc(); } }
Moof::Moof(Box& aBox, Trex& aTrex, Mvhd& aMvhd, Mdhd& aMdhd, Edts& aEdts, Sinf& aSinf, uint64_t* aDecodeTime, bool aIsAudio) : mRange(aBox.Range()) , mMaxRoundingError(35000) { for (Box box = aBox.FirstChild(); box.IsAvailable(); box = box.Next()) { if (box.IsType("traf")) { ParseTraf(box, aTrex, aMvhd, aMdhd, aEdts, aSinf, aDecodeTime, aIsAudio); } } if (IsValid()) { if (mIndex.Length()) { // Ensure the samples are contiguous with no gaps. nsTArray<Sample*> ctsOrder; for (auto& sample : mIndex) { ctsOrder.AppendElement(&sample); } ctsOrder.Sort(CtsComparator()); for (size_t i = 1; i < ctsOrder.Length(); i++) { ctsOrder[i-1]->mCompositionRange.end = ctsOrder[i]->mCompositionRange.start; } // In MP4, the duration of a sample is defined as the delta between two decode // timestamps. The operation above has updated the duration of each sample // as a Sample's duration is mCompositionRange.end - mCompositionRange.start // MSE's TrackBuffersManager expects dts that increased by the sample's // duration, so we rewrite the dts accordingly. int64_t presentationDuration = ctsOrder.LastElement()->mCompositionRange.end - ctsOrder[0]->mCompositionRange.start; int64_t endDecodeTime = aMdhd.ToMicroseconds((int64_t)*aDecodeTime - aEdts.mMediaStart) + aMvhd.ToMicroseconds(aEdts.mEmptyOffset); int64_t decodeDuration = endDecodeTime - mIndex[0].mDecodeTime; double adjust = (double)decodeDuration / presentationDuration; int64_t dtsOffset = mIndex[0].mDecodeTime; int64_t compositionDuration = 0; // Adjust the dts, ensuring that the new adjusted dts will never be greater // than decodeTime (the next moof's decode start time). for (auto& sample : mIndex) { sample.mDecodeTime = dtsOffset + int64_t(compositionDuration * adjust); compositionDuration += sample.mCompositionRange.Length(); } mTimeRange = Interval<Microseconds>(ctsOrder[0]->mCompositionRange.start, ctsOrder.LastElement()->mCompositionRange.end); } ProcessCenc(); } }
Moof::Moof(Box& aBox, Trex& aTrex, Mvhd& aMvhd, Mdhd& aMdhd, Edts& aEdts, Sinf& aSinf, uint64_t* aDecodeTime, bool aIsAudio) : mRange(aBox.Range()) , mMaxRoundingError(35000) { nsTArray<Box> psshBoxes; for (Box box = aBox.FirstChild(); box.IsAvailable(); box = box.Next()) { if (box.IsType("traf")) { ParseTraf(box, aTrex, aMvhd, aMdhd, aEdts, aSinf, aDecodeTime, aIsAudio); } if (box.IsType("pssh")) { psshBoxes.AppendElement(box); } } // The EME spec requires that PSSH boxes which are contiguous in the // file are dispatched to the media element in a single "encrypted" event. // So append contiguous boxes here. for (size_t i = 0; i < psshBoxes.Length(); ++i) { Box box = psshBoxes[i]; if (i == 0 || box.Offset() != psshBoxes[i - 1].NextOffset()) { mPsshes.AppendElement(); } nsTArray<uint8_t>& pssh = mPsshes.LastElement(); pssh.AppendElements(box.Header()); pssh.AppendElements(box.Read()); } if (IsValid()) { if (mIndex.Length()) { // Ensure the samples are contiguous with no gaps. nsTArray<Sample*> ctsOrder; for (auto& sample : mIndex) { ctsOrder.AppendElement(&sample); } ctsOrder.Sort(CtsComparator()); for (size_t i = 1; i < ctsOrder.Length(); i++) { ctsOrder[i-1]->mCompositionRange.end = ctsOrder[i]->mCompositionRange.start; } // In MP4, the duration of a sample is defined as the delta between two decode // timestamps. The operation above has updated the duration of each sample // as a Sample's duration is mCompositionRange.end - mCompositionRange.start // MSE's TrackBuffersManager expects dts that increased by the sample's // duration, so we rewrite the dts accordingly. int64_t presentationDuration = ctsOrder.LastElement()->mCompositionRange.end - ctsOrder[0]->mCompositionRange.start; auto decodeOffset = aMdhd.ToMicroseconds((int64_t)*aDecodeTime - aEdts.mMediaStart); auto offsetOffset = aMvhd.ToMicroseconds(aEdts.mEmptyOffset); int64_t endDecodeTime = decodeOffset.isOk() & offsetOffset.isOk() ? decodeOffset.unwrap() + offsetOffset.unwrap() : 0; int64_t decodeDuration = endDecodeTime - mIndex[0].mDecodeTime; double adjust = !!presentationDuration ? (double)decodeDuration / presentationDuration : 0; int64_t dtsOffset = mIndex[0].mDecodeTime; int64_t compositionDuration = 0; // Adjust the dts, ensuring that the new adjusted dts will never be greater // than decodeTime (the next moof's decode start time). for (auto& sample : mIndex) { sample.mDecodeTime = dtsOffset + int64_t(compositionDuration * adjust); compositionDuration += sample.mCompositionRange.Length(); } mTimeRange = MP4Interval<Microseconds>(ctsOrder[0]->mCompositionRange.start, ctsOrder.LastElement()->mCompositionRange.end); } ProcessCenc(); } }