コード例 #1
0
ファイル: track.cpp プロジェクト: Azpidatziak/mkvtoolnix
void
Track::setDefaults() {
  auto &settings = Settings::get();

  if (isAudio() && settings.m_setAudioDelayFromFileName)
    m_delay = extractAudioDelayFromFileName();

  if (settings.m_disableAVCompression && (isVideo() || isAudio()))
    m_compression = CompNone;

  m_forcedTrackFlag        = m_properties[Q("forced_track")] == "1";
  m_defaultTrackFlagWasSet = m_properties[Q("default_track")] == "1";
  m_name                   = m_properties[Q("track_name")];
  m_cropping               = m_properties[Q("cropping")];
  if (!m_properties[Q("stereo_mode")].isEmpty())
    m_stereoscopy = m_properties[Q("stereo_mode")].toUInt() + 1;

  auto idx = map_to_iso639_2_code(to_utf8(m_properties[Q("language")]), true);
  if (0 <= idx)
    m_language = to_qs(iso639_languages[idx].iso639_2_code);

  QRegExp re_displayDimensions{"^(\\d+)x(\\d+)$"};
  if (-1 != re_displayDimensions.indexIn(m_properties[Q("display_dimensions")])) {
    m_displayWidth  = re_displayDimensions.cap(1);
    m_displayHeight = re_displayDimensions.cap(2);
  }
}
コード例 #2
0
ファイル: media.cpp プロジェクト: 0x7678/evilbts
// Put the list of net media in a parameter list
void SDPMedia::putMedia(NamedList& msg, bool putPort)
{
    msg.addParam("media" + suffix(),"yes");
    msg.addParam("formats" + suffix(),formats());
    msg.addParam("transport" + suffix(),transport());
    if (mappings())
	msg.addParam("rtp_mapping" + suffix(),mappings());
    if (isAudio())
	msg.addParam("rtp_rfc2833",rfc2833());
    if (putPort)
	msg.addParam("rtp_port" + suffix(),remotePort());
    if (remoteCrypto())
	msg.addParam("crypto" + suffix(),remoteCrypto());
    // must handle encryption differently
    const char* enc = getValue("encryption");
    if (enc)
	msg.addParam("encryption" + suffix(),enc);
    clearParam("encryption");
    unsigned int n = length();
    for (unsigned int i = 0; i < n; i++) {
	const NamedString* param = getParam(i);
	if (param)
	    msg.addParam("sdp" + suffix() + "_" + param->name(),*param);
    }
}
コード例 #3
0
bool TvPlayer::play( ServiceManager *mgr, Service *srv ) {
	bool result = false;
	bool addPCR = true;
	PlayInfo playInfo;

	//	Check that provider support a valid pipe of stream
	playInfo.url = mgr->streamPipe();
	if (playInfo.url.size()) {
		playInfo.pcrPID = srv->pcrPID();
		
		const std::vector<tuner::Pmt::ElementaryInfo> &elements = srv->elements();
		BOOST_FOREACH( tuner::Pmt::ElementaryInfo info, elements ) {
			if (isAudio(info.streamType) || isVideo(info.streamType)) {
				printf( "[TvPlayer] Add PES filter: pid=%04x, type=%04x\n",
					info.pid, info.streamType );
				
				//	Add PES filter
				result = addFilter( mgr, info.pid );
				if (!result) {
					break;
				}

				//	Check if PES pid is PCR pid
				if (playInfo.pcrPID == info.pid) {
					addPCR = false;
				}

				playInfo.pids.push_back( std::make_pair( info.pid, info.streamType ) );
			}
		}

		//	Add PCR PES
		if (result && addPCR) {
			result=addFilter( mgr, srv->pcrPID() );
		}

		//	play TS
		if (result) {
			result = player()->play( playInfo );
		}

		//	Check result
		if (!result) {
			stop( mgr, srv );
		}
	}
コード例 #4
0
ファイル: media.cpp プロジェクト: CimpianAlin/yate
// Put the list of net media in a parameter list
void SDPMedia::putMedia(NamedList& msg, bool putPort)
{
    msg.addParam("media" + suffix(),"yes");
    msg.addParam("formats" + suffix(),formats());
    msg.addParam("transport" + suffix(),transport());
    if (mappings())
	msg.addParam("rtp_mapping" + suffix(),mappings());
    if (isAudio())
	msg.addParam("rtp_rfc2833",rfc2833());
    if (putPort)
	msg.addParam("rtp_port" + suffix(),remotePort());
    if (remoteCrypto())
	msg.addParam("crypto" + suffix(),remoteCrypto());
    // must handle encryption differently
    const char* enc = m_rAttrs.getValue("encryption");
    if (enc)
	msg.addParam("encryption" + suffix(),enc);
    putNamedList(msg, m_rAttrs, "sdp" + suffix() + "_");
    putNamedList(msg, m_fmtps, "fmtp_");
}
コード例 #5
0
ファイル: frame.cpp プロジェクト: libyuni/libyuni
	uint Frame::audioSize() const
	{
		assert(pImpl->frame);
		assert(isAudio());
		return pImpl->frame->linesize[0];
	}
コード例 #6
0
ファイル: frame.cpp プロジェクト: libyuni/libyuni
	uint8* Frame::audioData()
	{
		assert(pImpl->frame);
		assert(isAudio());
		return pImpl->frame->extended_data[0];
	}
コード例 #7
0
bool
FFmpegStreamInfo::findCodec()
{
    if (!_pAvStream || !_pAvStream->codec) {
        LOGNS(Omm::AvStream, avstream, error, "missing stream info in " + getName() + " while trying to find decoder");
        return false;
    }
    _pAvCodecContext = _pAvStream->codec;

    //////////// find decoders for audio and video stream ////////////
    LOGNS(Omm::AvStream, avstream, debug, "searching codec with codec id: " +\
        Poco::NumberFormatter::format(_pAvCodecContext->codec_id));

    LOG(ffmpeg, trace, "ffmpeg::avcodec_find_decoder() ...");
    _pAvCodec = avcodec_find_decoder(_pAvCodecContext->codec_id);

    if(!_pAvCodec) {
        LOGNS(Omm::AvStream, avstream, error, "could not find decoder for codec id: " +\
            Poco::NumberFormatter::format(_pAvCodecContext->codec_id));
        return false;
    }

    // Inform the codec that we can handle truncated bitstreams -- i.e.,
    // bitstreams where frame boundaries can fall in the middle of packets
//     if(_pAvCodec->capabilities & CODEC_CAP_TRUNCATED) {
//         _pAvCodecContext->flags |= CODEC_FLAG_TRUNCATED;
//     }

    LOG(ffmpeg, trace, "ffmpeg::avcodec_open() ...");
    if(avcodec_open(_pAvCodecContext, _pAvCodec) < 0) {
        LOGNS(Omm::AvStream, avstream, error, "could not open decoder for codec id: " +\
            Poco::NumberFormatter::format(_pAvCodecContext->codec_id));
        return false;
    }
    LOGNS(Omm::AvStream, avstream, information, "found codec: " + std::string(_pAvCodec->name) + " (" + std::string(_pAvCodec->long_name) + ")");
    LOGNS(Omm::AvStream, avstream, information, "start time: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->start_time) + ", duration: " +\
        Poco::NumberFormatter::format((Poco::Int64)_pAvStream->duration));

    // time_base: fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
    // This is the fundamental unit of time (in seconds) in terms
    // of which frame timestamps are represented. For fixed-fps content,
    // time base should be 1/framerate and timestamp increments should be 1.
    LOGNS(Omm::AvStream, avstream, information, "time base numerator: " + Poco::NumberFormatter::format(_pAvStream->time_base.num) + ", denominator: " +Poco::NumberFormatter::format(_pAvStream->time_base.den));

    // r_frame_rate: Real base framerate of the stream.
    LOGNS(Omm::AvStream, avstream, information, "base frame rate numerator: " + Poco::NumberFormatter::format(_pAvStream->r_frame_rate.num) + ", denominator: " + Poco::NumberFormatter::format(_pAvStream->r_frame_rate.den));

//     LOGNS(Omm::AvStream, avstream, information, Poco::format("average frame rate numerator: %s, denominator: %s",\
//         Poco::NumberFormatter::format(_pAvStream->avg_frame_rate.num),\
//         Poco::NumberFormatter::format(_pAvStream->avg_frame_rate.den)));

    // reference dts (for timestamp generation): Timestamp corresponding to the last dts sync point
    // Initialized when AVCodecParserContext.dts_sync_point >= 0 and
    // a DTS is received from the underlying container. Otherwise set to
    // AV_NOPTS_VALUE by default.
    LOGNS(Omm::AvStream, avstream, information, "first dts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->first_dts) + ", current dts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->cur_dts) + ", reference dts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->reference_dts) + ", last IP pts: " + Poco::NumberFormatter::format((Poco::Int64)_pAvStream->last_IP_pts) + ", last IP duration: " +Poco::NumberFormatter::format((Poco::Int64)_pAvStream->last_IP_duration));

//     LOGNS(Omm::AvStream, avstream, trace, Poco::format("_pStreamInfo->_pAvCodecContext->codec_id %s",\
//         Poco::NumberFormatter::format(_pAvCodecContext->codec_id)));

    if (isAudio()) {
    //     _maxDecodedAudioFrameSize = (AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2;
        _pDecodedAudioFrame = new FFmpegFrame(0, this, _maxDecodedAudioFrameSize);
        // FIXME: set _data[_maxDecodedAudioFrameSize - 1] = 0 in base class
//         _pDecodedAudioFrame->data()[_maxDecodedAudioFrameSize - 1] = 0;
    }
    else if (isVideo()) {
        LOG(ffmpeg, trace, "ffmpeg::avcodec_alloc_frame() ...");
        _pDecodedVideoFrame = new FFmpegFrame(0, this, avcodec_alloc_frame());
    }

    return true;


//     if(_pVideoCodec->frame_rate > 1000 && _pVideoCodec->frame_rate_base == 1) {
//         _pVideoCodec->frame_rate_base = 1000;
//     }
}
コード例 #8
0
ファイル: track.cpp プロジェクト: Azpidatziak/mkvtoolnix
bool
Track::isRegular()
  const {
  return isAudio() || isVideo() || isSubtitles();
}