void QGraphicsBattleAnimationItem::updatePix() { if (m_pix.isNull()) return; if (m_index == -1) { QPixmap n_pix = QPixmap(QSize(frameSize()*4,frameSize()*2)); QPainter p(&n_pix); for (int index = 0; index < 8; index++) { int src_x = m_frame * frameSize(); int src_y = index * frameSize(); p.drawPixmap((index%4)*frameSize(), (index/4)*frameSize(), frameSize(), frameSize(), m_pix.copy(src_x, src_y, frameSize(), frameSize())); } p.end(); this->setPixmap(n_pix); } else { int x = m_frame * frameSize(); int y = m_index * frameSize(); this->setPixmap(m_pix.copy(x,y,frameSize(),frameSize())); } }
void MediaOmxReader::HandleResourceAllocated() { EnsureActive(); // After resources are available, set the metadata. if (!mOmxDecoder->EnsureMetadata()) { mMetadataPromise.Reject(NS_ERROR_DOM_MEDIA_METADATA_ERR, __func__); return; } bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3); if (isMP3 && mMP3FrameParser.IsMP3()) { // Check if the MP3 frame parser found a duration. mLastParserDuration = mMP3FrameParser.GetDuration(); } if (mLastParserDuration >= 0) { // Prefer the parser duration if we have it. mInfo.mMetadataDuration = Some(TimeUnit::FromMicroseconds(mLastParserDuration)); } else { // MP3 parser failed to find a duration. // Set the total duration (the max of the audio and video track). int64_t durationUs; mOmxDecoder->GetDuration(&durationUs); if (durationUs) { mInfo.mMetadataDuration = Some(TimeUnit::FromMicroseconds(durationUs)); } } if (mOmxDecoder->HasVideo()) { int32_t displayWidth, displayHeight, width, height; mOmxDecoder->GetVideoParameters(&displayWidth, &displayHeight, &width, &height); nsIntRect pictureRect(0, 0, width, height); // Validate the container-reported frame and pictureRect sizes. This ensures // that our video frame creation code doesn't overflow. nsIntSize displaySize(displayWidth, displayHeight); nsIntSize frameSize(width, height); if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) { mMetadataPromise.Reject(NS_ERROR_DOM_MEDIA_METADATA_ERR, __func__); return; } // Video track's frame sizes will not overflow. Activate the video track. mHasVideo = true; mInfo.mVideo.mDisplay = displaySize; mPicture = pictureRect; mInitialFrame = frameSize; VideoFrameContainer* container = mDecoder->GetVideoFrameContainer(); if (container) { container->ClearCurrentFrame(IntSize(displaySize.width, displaySize.height)); } } if (mOmxDecoder->HasAudio()) { int32_t numChannels, sampleRate; mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate); mHasAudio = true; mInfo.mAudio.mChannels = numChannels; mInfo.mAudio.mRate = sampleRate; } mInfo.mMediaSeekable = mExtractor->flags() & MediaExtractor::CAN_SEEK; RefPtr<MetadataHolder> metadata = new MetadataHolder(); metadata->mInfo = mInfo; metadata->mTags = nullptr; #ifdef MOZ_AUDIO_OFFLOAD CheckAudioOffload(); #endif mMetadataPromise.Resolve(metadata, __func__); }
nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) { NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); EnsureActive(); *aTags = nullptr; // Initialize the internal OMX Decoder. nsresult rv = InitOmxDecoder(); if (NS_FAILED(rv)) { return rv; } bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3); if (isMP3) { // When read sdcard's file on b2g platform at constructor, // the mDecoder->GetResource()->GetLength() would return -1. // Delay set the total duration on this function. mMP3FrameParser.SetLength(mDecoder->GetResource()->GetLength()); ProcessCachedData(0, true); } if (!mOmxDecoder->TryLoad()) { return NS_ERROR_FAILURE; } if (IsWaitingMediaResources()) { return NS_OK; } if (isMP3 && mMP3FrameParser.IsMP3()) { int64_t duration = mMP3FrameParser.GetDuration(); // The MP3FrameParser may reported a duration; // return -1 if no frame has been parsed. if (duration >= 0) { ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); mUseParserDuration = true; mLastParserDuration = duration; mDecoder->SetMediaDuration(mLastParserDuration); } } else { // Set the total duration (the max of the audio and video track). int64_t durationUs; mOmxDecoder->GetDuration(&durationUs); if (durationUs) { ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); mDecoder->SetMediaDuration(durationUs); } } if (mOmxDecoder->HasVideo()) { int32_t displayWidth, displayHeight, width, height; mOmxDecoder->GetVideoParameters(&displayWidth, &displayHeight, &width, &height); nsIntRect pictureRect(0, 0, width, height); // Validate the container-reported frame and pictureRect sizes. This ensures // that our video frame creation code doesn't overflow. nsIntSize displaySize(displayWidth, displayHeight); nsIntSize frameSize(width, height); if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) { return NS_ERROR_FAILURE; } // Video track's frame sizes will not overflow. Activate the video track. mHasVideo = mInfo.mVideo.mHasVideo = true; mInfo.mVideo.mDisplay = displaySize; mPicture = pictureRect; mInitialFrame = frameSize; VideoFrameContainer* container = mDecoder->GetVideoFrameContainer(); if (container) { container->SetCurrentFrame(gfxIntSize(displaySize.width, displaySize.height), nullptr, mozilla::TimeStamp::Now()); } } if (mOmxDecoder->HasAudio()) { int32_t numChannels, sampleRate; mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate); mHasAudio = mInfo.mAudio.mHasAudio = true; mInfo.mAudio.mChannels = numChannels; mInfo.mAudio.mRate = sampleRate; } *aInfo = mInfo; #ifdef MOZ_AUDIO_OFFLOAD CheckAudioOffload(); #endif return NS_OK; }
nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) { NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); *aTags = nullptr; // Initialize the internal OMX Decoder. nsresult rv = InitOmxDecoder(); if (NS_FAILED(rv)) { return rv; } if (!mOmxDecoder->TryLoad()) { return NS_ERROR_FAILURE; } if (IsWaitingMediaResources()) { return NS_OK; } // Set the total duration (the max of the audio and video track). int64_t durationUs; mOmxDecoder->GetDuration(&durationUs); if (durationUs) { ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); mDecoder->SetMediaDuration(durationUs); } // Check the MediaExtract flag if the source is seekable. mDecoder->SetMediaSeekable(mExtractor->flags() & MediaExtractor::CAN_SEEK); if (mOmxDecoder->HasVideo()) { int32_t width, height; mOmxDecoder->GetVideoParameters(&width, &height); nsIntRect pictureRect(0, 0, width, height); // Validate the container-reported frame and pictureRect sizes. This ensures // that our video frame creation code doesn't overflow. nsIntSize displaySize(width, height); nsIntSize frameSize(width, height); if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) { return NS_ERROR_FAILURE; } // Video track's frame sizes will not overflow. Activate the video track. mHasVideo = mInfo.mVideo.mHasVideo = true; mInfo.mVideo.mDisplay = displaySize; mPicture = pictureRect; mInitialFrame = frameSize; VideoFrameContainer* container = mDecoder->GetVideoFrameContainer(); if (container) { container->SetCurrentFrame(gfxIntSize(displaySize.width, displaySize.height), nullptr, mozilla::TimeStamp::Now()); } } if (mOmxDecoder->HasAudio()) { int32_t numChannels, sampleRate; mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate); mHasAudio = mInfo.mAudio.mHasAudio = true; mInfo.mAudio.mChannels = numChannels; mInfo.mAudio.mRate = sampleRate; } *aInfo = mInfo; return NS_OK; }
nsresult nsRawReader::ReadMetadata(nsVideoInfo* aInfo, nsHTMLMediaElement::MetadataTags** aTags) { NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); MediaResource* resource = mDecoder->GetResource(); NS_ASSERTION(resource, "Decoder has no media resource"); if (!ReadFromResource(resource, reinterpret_cast<uint8_t*>(&mMetadata), sizeof(mMetadata))) return NS_ERROR_FAILURE; // Validate the header if (!(mMetadata.headerPacketID == 0 /* Packet ID of 0 for the header*/ && mMetadata.codecID == RAW_ID /* "YUV" */ && mMetadata.majorVersion == 0 && mMetadata.minorVersion == 1)) return NS_ERROR_FAILURE; CheckedUint32 dummy = CheckedUint32(static_cast<uint32_t>(mMetadata.frameWidth)) * static_cast<uint32_t>(mMetadata.frameHeight); NS_ENSURE_TRUE(dummy.isValid(), NS_ERROR_FAILURE); if (mMetadata.aspectDenominator == 0 || mMetadata.framerateDenominator == 0) return NS_ERROR_FAILURE; // Invalid data // Determine and verify frame display size. float pixelAspectRatio = static_cast<float>(mMetadata.aspectNumerator) / mMetadata.aspectDenominator; nsIntSize display(mMetadata.frameWidth, mMetadata.frameHeight); ScaleDisplayByAspectRatio(display, pixelAspectRatio); mPicture = nsIntRect(0, 0, mMetadata.frameWidth, mMetadata.frameHeight); nsIntSize frameSize(mMetadata.frameWidth, mMetadata.frameHeight); if (!nsVideoInfo::ValidateVideoRegion(frameSize, mPicture, display)) { // Video track's frame sizes will overflow. Fail. return NS_ERROR_FAILURE; } mInfo.mHasVideo = true; mInfo.mHasAudio = false; mInfo.mDisplay = display; mFrameRate = static_cast<float>(mMetadata.framerateNumerator) / mMetadata.framerateDenominator; // Make some sanity checks if (mFrameRate > 45 || mFrameRate == 0 || pixelAspectRatio == 0 || mMetadata.frameWidth > 2000 || mMetadata.frameHeight > 2000 || mMetadata.chromaChannelBpp != 4 || mMetadata.lumaChannelBpp != 8 || mMetadata.colorspace != 1 /* 4:2:0 */) return NS_ERROR_FAILURE; mFrameSize = mMetadata.frameWidth * mMetadata.frameHeight * (mMetadata.lumaChannelBpp + mMetadata.chromaChannelBpp) / 8.0 + sizeof(nsRawPacketHeader); int64_t length = resource->GetLength(); if (length != -1) { mozilla::ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor()); mDecoder->GetStateMachine()->SetDuration(USECS_PER_S * (length - sizeof(nsRawVideoHeader)) / (mFrameSize * mFrameRate)); } *aInfo = mInfo; *aTags = nullptr; return NS_OK; }
ssize_t MediaPlayerService::AudioOutput::bufferSize() const { if (mTrack == 0) return NO_INIT; return mTrack->frameCount() * frameSize(); }
RefPtr<MediaDataDecoder::InitPromise> GonkVideoDecoderManager::Init() { mNeedsCopyBuffer = false; nsIntSize displaySize(mDisplayWidth, mDisplayHeight); nsIntRect pictureRect(0, 0, mVideoWidth, mVideoHeight); uint32_t maxWidth, maxHeight; char propValue[PROPERTY_VALUE_MAX]; property_get("ro.moz.omx.hw.max_width", propValue, "-1"); maxWidth = -1 == atoi(propValue) ? MAX_VIDEO_WIDTH : atoi(propValue); property_get("ro.moz.omx.hw.max_height", propValue, "-1"); maxHeight = -1 == atoi(propValue) ? MAX_VIDEO_HEIGHT : atoi(propValue) ; if (mVideoWidth * mVideoHeight > maxWidth * maxHeight) { GVDM_LOG("Video resolution exceeds hw codec capability"); return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__); } // Validate the container-reported frame and pictureRect sizes. This ensures // that our video frame creation code doesn't overflow. nsIntSize frameSize(mVideoWidth, mVideoHeight); if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) { GVDM_LOG("It is not a valid region"); return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__); } mReaderTaskQueue = AbstractThread::GetCurrent()->AsTaskQueue(); MOZ_ASSERT(mReaderTaskQueue); if (mDecodeLooper.get() != nullptr) { return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__); } if (!InitLoopers(MediaData::VIDEO_DATA)) { return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__); } RefPtr<InitPromise> p = mInitPromise.Ensure(__func__); android::sp<GonkVideoDecoderManager> self = this; mDecoder = MediaCodecProxy::CreateByType(mDecodeLooper, mMimeType.get(), false); uint32_t capability = MediaCodecProxy::kEmptyCapability; if (mDecoder->getCapability(&capability) == OK && (capability & MediaCodecProxy::kCanExposeGraphicBuffer)) { #if ANDROID_VERSION >= 21 sp<IGonkGraphicBufferConsumer> consumer; GonkBufferQueue::createBufferQueue(&mGraphicBufferProducer, &consumer); mNativeWindow = new GonkNativeWindow(consumer); #else mNativeWindow = new GonkNativeWindow(); #endif } mVideoCodecRequest.Begin(mDecoder->AsyncAllocateVideoMediaCodec() ->Then(mReaderTaskQueue, __func__, [self] (bool) -> void { self->mVideoCodecRequest.Complete(); self->codecReserved(); }, [self] (bool) -> void { self->mVideoCodecRequest.Complete(); self->codecCanceled(); })); return p; }
QSGNode *FrameSvgItem::updatePaintNode(QSGNode *oldNode, QQuickItem::UpdatePaintNodeData *) { if (!window() || !m_frameSvg || (!m_frameSvg->hasElementPrefix(m_frameSvg->actualPrefix()) && !m_frameSvg->hasElementPrefix(m_prefix))) { delete oldNode; return Q_NULLPTR; } if (m_fastPath) { if (m_textureChanged) { delete oldNode; oldNode = 0; } if (!oldNode) { QString prefix = m_frameSvg->actualPrefix(); oldNode = new FrameNode(prefix, m_frameSvg); bool tileCenter = (m_frameSvg->hasElement(QStringLiteral("hint-tile-center")) || m_frameSvg->hasElement(prefix % QLatin1String("hint-tile-center"))); bool stretchBorders = (m_frameSvg->hasElement(QStringLiteral("hint-stretch-borders")) || m_frameSvg->hasElement(prefix % QLatin1String("hint-stretch-borders"))); FrameItemNode::FitMode borderFitMode = stretchBorders ? FrameItemNode::Stretch : FrameItemNode::Tile; FrameItemNode::FitMode centerFitMode = tileCenter ? FrameItemNode::Tile: FrameItemNode::Stretch; new FrameItemNode(this, FrameSvg::NoBorder, centerFitMode, oldNode); new FrameItemNode(this, FrameSvg::TopBorder | FrameSvg::LeftBorder, FrameItemNode::FastStretch, oldNode); new FrameItemNode(this, FrameSvg::TopBorder | FrameSvg::RightBorder, FrameItemNode::FastStretch, oldNode); new FrameItemNode(this, FrameSvg::TopBorder, borderFitMode, oldNode); new FrameItemNode(this, FrameSvg::BottomBorder, borderFitMode, oldNode); new FrameItemNode(this, FrameSvg::BottomBorder | FrameSvg::LeftBorder, FrameItemNode::FastStretch, oldNode); new FrameItemNode(this, FrameSvg::BottomBorder | FrameSvg::RightBorder, FrameItemNode::FastStretch, oldNode); new FrameItemNode(this, FrameSvg::LeftBorder, borderFitMode, oldNode); new FrameItemNode(this, FrameSvg::RightBorder, borderFitMode, oldNode); m_sizeChanged = true; m_textureChanged = false; } if (m_sizeChanged) { FrameNode* frameNode = static_cast<FrameNode*>(oldNode); QSize frameSize(width(), height()); QRect geometry = frameNode->contentsRect(frameSize); for(int i = 0; i<oldNode->childCount(); ++i) { FrameItemNode* it = static_cast<FrameItemNode*>(oldNode->childAtIndex(i)); it->reposition(geometry, frameSize); } m_sizeChanged = false; } } else { ManagedTextureNode *textureNode = dynamic_cast<ManagedTextureNode *>(oldNode); if (!textureNode) { delete oldNode; textureNode = new ManagedTextureNode; textureNode->setFiltering(QSGTexture::Nearest); m_textureChanged = true; //force updating the texture on our newly created node oldNode = textureNode; } if ((m_textureChanged || m_sizeChanged) || textureNode->texture()->textureSize() != m_frameSvg->size()) { QImage image = m_frameSvg->framePixmap().toImage(); textureNode->setTexture(s_cache->loadTexture(window(), image)); textureNode->setRect(0, 0, width(), height()); m_textureChanged = false; m_sizeChanged = false; } } return oldNode; }