void Track::setDefaults() { auto &settings = Settings::get(); if (isAudio() && settings.m_setAudioDelayFromFileName) m_delay = extractAudioDelayFromFileName(); if (settings.m_disableAVCompression && (isVideo() || isAudio())) m_compression = CompNone; m_forcedTrackFlag = m_properties[Q("forced_track")] == "1"; m_defaultTrackFlagWasSet = m_properties[Q("default_track")] == "1"; m_name = m_properties[Q("track_name")]; m_cropping = m_properties[Q("cropping")]; if (!m_properties[Q("stereo_mode")].isEmpty()) m_stereoscopy = m_properties[Q("stereo_mode")].toUInt() + 1; auto idx = map_to_iso639_2_code(to_utf8(m_properties[Q("language")]), true); if (0 <= idx) m_language = to_qs(iso639_languages[idx].iso639_2_code); QRegExp re_displayDimensions{"^(\\d+)x(\\d+)$"}; if (-1 != re_displayDimensions.indexIn(m_properties[Q("display_dimensions")])) { m_displayWidth = re_displayDimensions.cap(1); m_displayHeight = re_displayDimensions.cap(2); } }
bool TvPlayer::play( ServiceManager *mgr, Service *srv ) { bool result = false; bool addPCR = true; PlayInfo playInfo; // Check that provider support a valid pipe of stream playInfo.url = mgr->streamPipe(); if (playInfo.url.size()) { playInfo.pcrPID = srv->pcrPID(); const std::vector<tuner::Pmt::ElementaryInfo> &elements = srv->elements(); BOOST_FOREACH( tuner::Pmt::ElementaryInfo info, elements ) { if (isAudio(info.streamType) || isVideo(info.streamType)) { printf( "[TvPlayer] Add PES filter: pid=%04x, type=%04x\n", info.pid, info.streamType ); // Add PES filter result = addFilter( mgr, info.pid ); if (!result) { break; } // Check if PES pid is PCR pid if (playInfo.pcrPID == info.pid) { addPCR = false; } playInfo.pids.push_back( std::make_pair( info.pid, info.streamType ) ); } } // Add PCR PES if (result && addPCR) { result=addFilter( mgr, srv->pcrPID() ); } // play TS if (result) { result = player()->play( playInfo ); } // Check result if (!result) { stop( mgr, srv ); } }
// ----------------------------------------------------------------------------- // CPhoneRingingTone::IsVideoRingingTone // (other items were commented in a header). // ----------------------------------------------------------------------------- // TBool CPhoneRingingTone::IsVideoRingingTone() { #ifdef RD_VIDEO_AS_RINGING_TONE if ( RefreshMime() != KErrNone ) { // try to handle as audio return EFalse; } TBool isVideo( EFalse ); if ( iMimeType && iMimeType->MatchF( KPhoneRingingToneVideoMime ) != KErrNotFound ) { isVideo = ETrue; } else if ( iMimeType && iMimeType->MatchF( KPhoneRingingToneRealVideoMime ) != KErrNotFound ) { isVideo = ETrue; } if ( isVideo ) { if ( IsFileInRom() && !IsFileInVideoDirectory() ) { // For ROM files check also location, because // MIME check is not fully reliable. isVideo = EFalse; } } return isVideo; #else // if extended security -> refresh MIME if ( iDrmInPlayback ) { RefreshMime(); } return EFalse; #endif }
RenderImageResource* ImageLoader::renderImageResource() { auto renderer = element().renderer(); if (!renderer) return nullptr; // We don't return style generated image because it doesn't belong to the ImageLoader. // See <https://bugs.webkit.org/show_bug.cgi?id=42840> if (renderer->isRenderImage() && !toRenderImage(*renderer).isGeneratedContent()) return &toRenderImage(*renderer).imageResource(); if (renderer->isSVGImage()) return &toRenderSVGImage(renderer)->imageResource(); #if ENABLE(VIDEO) if (renderer->isVideo()) return &toRenderVideo(*renderer).imageResource(); #endif return nullptr; }
LayoutRect RenderReplaced::replacedContentRect(const LayoutSize* overriddenIntrinsicSize) const { LayoutRect contentRect = contentBoxRect(); ObjectFit objectFit = style()->objectFit(); if (objectFit == ObjectFitFill && style()->objectPosition() == RenderStyle::initialObjectPosition()) { if (!isVideo() || RuntimeEnabledFeatures::objectFitPositionEnabled()) return contentRect; objectFit = ObjectFitContain; } LayoutSize intrinsicSize = overriddenIntrinsicSize ? *overriddenIntrinsicSize : this->intrinsicSize(); if (!intrinsicSize.width() || !intrinsicSize.height()) return contentRect; LayoutRect finalRect = contentRect; switch (objectFit) { case ObjectFitContain: case ObjectFitScaleDown: case ObjectFitCover: finalRect.setSize(finalRect.size().fitToAspectRatio(intrinsicSize, objectFit == ObjectFitCover ? AspectRatioFitGrow : AspectRatioFitShrink)); if (objectFit != ObjectFitScaleDown || finalRect.width() <= intrinsicSize.width()) break; // fall through case ObjectFitNone: finalRect.setSize(intrinsicSize); break; case ObjectFitFill: break; default: ASSERT_NOT_REACHED(); } LayoutUnit xOffset = minimumValueForLength(style()->objectPosition().x(), contentRect.width() - finalRect.width()); LayoutUnit yOffset = minimumValueForLength(style()->objectPosition().y(), contentRect.height() - finalRect.height()); finalRect.move(xOffset, yOffset); return finalRect; }
uint Frame::height() const { assert(pImpl->frame); assert(isVideo()); return pImpl->frame->height; }
uint Frame::width() const { assert(pImpl->frame); assert(isVideo()); return pImpl->frame->width; }
uint Frame::frameNumber() const { assert(isVideo()); return ::av_frame_get_best_effort_timestamp(pImpl->frame); }
uint Frame::lineSizeCr() const { assert(pImpl->frame); assert(isVideo()); return pImpl->frame->linesize[0]; }
uint8* Frame::dataCr() const { assert(pImpl->frame); assert(isVideo()); return pImpl->frame->data[2]; }
void NewsSite::parseRSS(QDomDocument domDoc) { QMutexLocker locker(&m_lock); QDomNode channelNode = domDoc.documentElement().namedItem("channel"); m_desc = channelNode.namedItem("description") .toElement().text().simplified(); QDomNode imageNode = channelNode.namedItem("image"); if (!imageNode.isNull()) m_imageURL = imageNode.namedItem("url").toElement().text().simplified(); QDomNodeList items = domDoc.elementsByTagName("item"); for (unsigned int i = 0; i < (unsigned) items.count(); i++) { QDomNode itemNode = items.item(i); QString title = ReplaceHtmlChar(itemNode.namedItem("title") .toElement().text().simplified()); QDomNode descNode = itemNode.namedItem("description"); QString description = QString::null; if (!descNode.isNull()) { description = descNode.toElement().text().simplified(); description = ReplaceHtmlChar(description); } QDomNode linkNode = itemNode.namedItem("link"); QString url = QString::null; if (!linkNode.isNull()) url = linkNode.toElement().text().simplified(); QDomNode enclosureNode = itemNode.namedItem("enclosure"); QString enclosure = QString::null; QString enclosure_type = QString::null; QString thumbnail = QString::null; if (!enclosureNode.isNull()) { QDomAttr enclosureURL = enclosureNode.toElement() .attributeNode("url"); if (!enclosureURL.isNull()) enclosure = enclosureURL.value(); QDomAttr enclosureType = enclosureNode.toElement() .attributeNode("type"); if (!enclosureType.isNull()) { enclosure_type = enclosureType.value(); if (enclosure_type == "image/jpeg") { thumbnail = enclosure; enclosure = QString::null; } // fix for broken feeds that don't add the enclosure type if (enclosure_type == "" || enclosure_type.isNull()) { QStringList imageExtensions = QStringList() << ".jpg" << ".jpeg" << ".png" << ".gif"; for (int x = 0; x < imageExtensions.count(); x++) { if (enclosure.toLower().endsWith(imageExtensions[x])) { thumbnail = enclosure; enclosure = QString::null; break; } } } } } ////////////////////////////////////////////////////////////// // From this point forward, we process RSS 2.0 media tags. // Please put all other tag processing before this comment. // See http://www.rssboard.org/media-rss for details ////////////////////////////////////////////////////////////// // Some media: tags can be enclosed in a media:group item. // If this item is present, use it to find the media tags, // otherwise, proceed. QDomNode mediaGroup = itemNode.namedItem("media:group"); if (!mediaGroup.isNull()) itemNode = mediaGroup; QDomNode thumbNode = itemNode.namedItem("media:thumbnail"); if (!thumbNode.isNull()) { QDomAttr thumburl = thumbNode.toElement().attributeNode("url"); if (!thumburl.isNull()) thumbnail = thumburl.value(); } QDomNode playerNode = itemNode.namedItem("media:player"); QString mediaurl = QString::null; if (!playerNode.isNull()) { QDomAttr mediaURL = playerNode.toElement().attributeNode("url"); if (!mediaURL.isNull()) mediaurl = mediaURL.value(); } // If present, the media:description superscedes the RSS description descNode = itemNode.namedItem("media:description"); if (!descNode.isNull()) description = descNode.toElement().text().simplified(); // parse any media:content items looking for any images or videos QDomElement e = itemNode.toElement(); QDomNodeList mediaNodes = e.elementsByTagName("media:content"); for (int x = 0; x < mediaNodes.count(); x++) { QString medium; QString type; QString url; QDomElement mediaElement = mediaNodes.at(x).toElement(); if (mediaElement.isNull()) continue; if (mediaElement.hasAttribute("medium")) medium = mediaElement.attributeNode("medium").value(); if (mediaElement.hasAttribute("type")) type = mediaElement.attributeNode("type").value(); if (mediaElement.hasAttribute("url")) url = mediaElement.attributeNode("url").value(); LOG(VB_GENERAL, LOG_DEBUG, QString("parseRSS found media:content: medium: %1, type: %2, url: %3") .arg(medium).arg(type).arg(url)); // if this is an image use it as the thumbnail if we haven't found one yet if (thumbnail.isEmpty() && (medium == "image" || isImage(type))) thumbnail = url; // if this is a video use it as the enclosure if we haven't found one yet if (enclosure.isEmpty() && (medium == "video" || isVideo(type))) enclosure = url; } insertNewsArticle(NewsArticle(title, description, url, thumbnail, mediaurl, enclosure)); } }
bool FFmpegStreamInfo::findCodec() { if (!_pAvStream || !_pAvStream->codec) { LOGNS(Omm::AvStream, avstream, error, "missing stream info in " + getName() + " while trying to find decoder"); return false; } _pAvCodecContext = _pAvStream->codec; //////////// find decoders for audio and video stream //////////// LOGNS(Omm::AvStream, avstream, debug, "searching codec with codec id: " +\ Poco::NumberFormatter::format(_pAvCodecContext->codec_id)); LOG(ffmpeg, trace, "ffmpeg::avcodec_find_decoder() ..."); _pAvCodec = avcodec_find_decoder(_pAvCodecContext->codec_id); if(!_pAvCodec) { LOGNS(Omm::AvStream, avstream, error, "could not find decoder for codec id: " +\ Poco::NumberFormatter::format(_pAvCodecContext->codec_id)); return false; } // Inform the codec that we can handle truncated bitstreams -- i.e., // bitstreams where frame boundaries can fall in the middle of packets // if(_pAvCodec->capabilities & CODEC_CAP_TRUNCATED) { // _pAvCodecContext->flags |= CODEC_FLAG_TRUNCATED; // } LOG(ffmpeg, trace, "ffmpeg::avcodec_open() ..."); if(avcodec_open(_pAvCodecContext, _pAvCodec) < 0) { LOGNS(Omm::AvStream, avstream, error, "could not open decoder for codec id: " +\ Poco::NumberFormatter::format(_pAvCodecContext->codec_id)); return false; } LOGNS(Omm::AvStream, avstream, information, "found codec: " + std::string(_pAvCodec->name) + " (" + std::string(_pAvCodec->long_name) + ")"); LOGNS(Omm::AvStream, avstream, information, "start time: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->start_time) + ", duration: " +\ Poco::NumberFormatter::format((Poco::Int64)_pAvStream->duration)); // time_base: fundamental unit of time (in seconds) in terms of which frame timestamps are represented. // This is the fundamental unit of time (in seconds) in terms // of which frame timestamps are represented. For fixed-fps content, // time base should be 1/framerate and timestamp increments should be 1. LOGNS(Omm::AvStream, avstream, information, "time base numerator: " + Poco::NumberFormatter::format(_pAvStream->time_base.num) + ", denominator: " +Poco::NumberFormatter::format(_pAvStream->time_base.den)); // r_frame_rate: Real base framerate of the stream. LOGNS(Omm::AvStream, avstream, information, "base frame rate numerator: " + Poco::NumberFormatter::format(_pAvStream->r_frame_rate.num) + ", denominator: " + Poco::NumberFormatter::format(_pAvStream->r_frame_rate.den)); // LOGNS(Omm::AvStream, avstream, information, Poco::format("average frame rate numerator: %s, denominator: %s",\ // Poco::NumberFormatter::format(_pAvStream->avg_frame_rate.num),\ // Poco::NumberFormatter::format(_pAvStream->avg_frame_rate.den))); // reference dts (for timestamp generation): Timestamp corresponding to the last dts sync point // Initialized when AVCodecParserContext.dts_sync_point >= 0 and // a DTS is received from the underlying container. Otherwise set to // AV_NOPTS_VALUE by default. LOGNS(Omm::AvStream, avstream, information, "first dts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->first_dts) + ", current dts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->cur_dts) + ", reference dts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->reference_dts) + ", last IP pts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->last_IP_pts) + ", last IP duration: " +Poco::NumberFormatter::format((Poco::Int64)_pAvStream->last_IP_duration)); // LOGNS(Omm::AvStream, avstream, trace, Poco::format("_pStreamInfo->_pAvCodecContext->codec_id %s",\ // Poco::NumberFormatter::format(_pAvCodecContext->codec_id))); if (isAudio()) { // _maxDecodedAudioFrameSize = (AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2; _pDecodedAudioFrame = new FFmpegFrame(0, this, _maxDecodedAudioFrameSize); // FIXME: set _data[_maxDecodedAudioFrameSize - 1] = 0 in base class // _pDecodedAudioFrame->data()[_maxDecodedAudioFrameSize - 1] = 0; } else if (isVideo()) { LOG(ffmpeg, trace, "ffmpeg::avcodec_alloc_frame() ..."); _pDecodedVideoFrame = new FFmpegFrame(0, this, avcodec_alloc_frame()); } return true; // if(_pVideoCodec->frame_rate > 1000 && _pVideoCodec->frame_rate_base == 1) { // _pVideoCodec->frame_rate_base = 1000; // } }
bool Track::isRegular() const { return isAudio() || isVideo() || isSubtitles(); }
bool AccessibilityMediaControlsContainer::controllingVideoElement() const { auto element = parentMediaElement(*m_renderer); return !element || element->isVideo(); }