示例#1
0
void dl_vlive_live(std::string url)
{
	streamInfo currentInfo;
	vidUrlInfo currentLiveInfo;
	m3u8Info currentMediaInfo;
	std::string v_title;
	std::string strUrl;
	int cnt = 0;
	float time = 0;

	getStreamInfo(retrievePage((char *)url.c_str()), &currentInfo, NULL, TYPE_VLIVE);
	getStreamInfo(retrievePage((char *)url.c_str()), NULL, &currentLiveInfo, TYPE_VLIVE_LIVE);
	for (int i = 0; i <= currentLiveInfo.num_list; i++)
		modifyUrl(&currentLiveInfo.url[i], TYPE_VLIVE_LIVE);
	printLine(3);
	getline(cin, v_title);
	printLine(5);
	while (strcmp(currentInfo.v_stat.c_str(), "LIVE_END") != 0)
	{
		parseStream(retrievePage((char*)currentLiveInfo.url[currentLiveInfo.num_list].c_str()), &currentMediaInfo);
		for (int i = 0; i <= currentMediaInfo.list_num; i++)
		{
			strUrl = "http://vlive.hls.edgesuite.net/" + parseUrl(currentLiveInfo.url[currentLiveInfo.num_list]) + currentMediaInfo.fname[i];
			retrieveFileFromUrl(strUrl, v_title, TYPE_VLIVE_LIVE, cnt, &time);
			std::this_thread::sleep_for(std::chrono::milliseconds((int)(currentMediaInfo.sec_sleep[i] - time) - 250));
			cnt++;
		}
		getStreamInfo(retrievePage((char *)url.c_str()), &currentInfo, NULL, TYPE_VLIVE);
	}
	cout << v_title;
	printLine(6);
}
示例#2
0
void dl_tvpot(std::string url)
{
	streamInfo currentInfo;
	vidUrlInfo currentVidInfo;
	std::string url_xml;
	std::string url_json;
	std::string v_title;

	int list = 0;

	if (getStreamInfo(retrievePage((char *)url.c_str()), &currentInfo, NULL, TYPE_TVPOT) == RETRIEVE_FAIL)
		printLine(-1);
	else
	{
		printLine(2);
		url_json = "http://videofarm.daum.net/controller/api/closed/v1_2/IntegratedMovieData.json?vid=" + currentInfo.v_id + "&dte_type=WEB";
		parseJson(url_json, &currentVidInfo, &list, TYPE_TVPOT);
		url_xml = "http://videofarm.daum.net/controller/api/open/v1_2/MovieLocation.apixml?vid=" + currentInfo.v_id + "&profile=" + profile_list[currentVidInfo.profile];
		parseXML(url_xml, &currentVidInfo, TYPE_TVPOT);
		printLine(3);
		getline(cin, v_title);
		printLine(4);
		retrieveFileFromUrl(currentVidInfo.url[0], v_title, TYPE_TVPOT, 1, NULL);
		cout << v_title;
		printLine(0);
	}
}
示例#3
0
int ProCamera::dropFrameBuffer(int streamId, int count) {
    StreamInfo& si = getStreamInfo(streamId);

    if (!si.cpuStream) {
        return BAD_VALUE;
    } else if (count < 0) {
        return BAD_VALUE;
    }

    if (!si.synchronousMode) {
        ALOGW("%s: No need to drop frames on asynchronous streams,"
              " as asynchronous mode only keeps 1 latest frame around.",
              __FUNCTION__);
        return BAD_VALUE;
    }

    int numDropped = 0;
    for (int i = 0; i < count; ++i) {
        CpuConsumer::LockedBuffer buffer;
        if (si.cpuConsumer->lockNextBuffer(&buffer) != OK) {
            break;
        }

        si.cpuConsumer->unlockBuffer(buffer);
        numDropped++;
    }

    return numDropped;
}
示例#4
0
void dl_facebook(std::string url)
{
	vidUrlInfo currentVidInfo;
	streamInfo currentStreamInfo;
	std::regex exp_facebook("^https?:\\/\\/[mw]+\\.facebook\\.com\\/([^]+)\\/videos\\/\\d+\\W?$");
	std::smatch match_page;
	std::string tokenUrl = "https://graph.facebook.com/oauth/access_token?client_id=841023869361760&client_secret=e742f0e162c7f0cf412c80434f07a95c&grant_type=client_credentials";
	std::string jsonUrl;
	std::string v_title;
	int stat_param = 0;
	
	if(getStreamInfo(retrievePage((char*)tokenUrl.c_str()), &currentStreamInfo, NULL, TYPE_FB_TOKEN) == RETRIEVE_FAIL)
		printLine(-1);
	else
	{
		printLine(2);
		std::regex_search(url, match_page, exp_facebook);
		jsonUrl = "https://graph.facebook.com/v2.7/" + match_page[1].str() + "/videos?fields=source,description,permalink_url,format&access_token=" + currentStreamInfo.key;
		currentVidInfo.url_orig = url;
		stat_param = parseJson(jsonUrl, &currentVidInfo, NULL, TYPE_FB);
		if (stat_param == PARSE_SUCC)
		{
			printLine(3);
			getline(cin, v_title);
			printLine(4);
			retrieveFileFromUrl(currentVidInfo.url[0], v_title, TYPE_FB, 1, NULL);
			cout << v_title;
			printLine(0);
		}
		else
			printLine(7);
	}
}
示例#5
0
void displayStreamInfo(){
    LcdBackLight(LCD_BACKLIGHT_ON);
    char offset = getScrollOffset();


    if (offset == 0)
        (*write_display_ptr[1])("                ", 17);

    (*write_display_ptr[0])("  Station Info  ", 17);

    //char* streamInfo = getStreamInfo();

    // I have to copy the StreamInfo buffer, I kept overwriting it.
    char streamInfo[48] = "    No  info    ";
    char* streamInfoPtr = &streamInfo[0];
    strncpy(streamInfo, getStreamInfo(), 48); // copy the streamInfo buffer.
    streamInfo[48] = '\0'; // To be sure...

    if (offset >= 0)
        streamInfoPtr += offset;
    streamInfoPtr[16] = '\0';

    (*write_display_ptr[1])(streamInfoPtr, 17);

    incrementScrollOffset();

    NutDelay(500);
}
示例#6
0
int ProCamera::waitForFrameBuffer(int streamId) {
    status_t stat = BAD_VALUE;
    Mutex::Autolock al(mWaitMutex);

    StreamInfo& si = getStreamInfo(streamId);

    if (si.frameReady > 0) {
        int numFrames = si.frameReady;
        si.frameReady = 0;
        return numFrames;
    } else {
        while (true) {
            stat = mWaitCondition.waitRelative(mWaitMutex,
                                                mWaitTimeout);
            if (stat != OK) {
                ALOGE("%s: Error while waiting for frame buffer: %d",
                    __FUNCTION__, stat);
                return stat;
            }

            if (si.frameReady > 0) {
                int numFrames = si.frameReady;
                si.frameReady = 0;
                return numFrames;
            }
            // else it was some other stream that got unblocked
        }
    }

    return stat;
}
示例#7
0
status_t ProCamera::createStreamCpu(int width, int height, int format,
                                    int heapCount,
                                    bool synchronousMode,
                                    /*out*/
                                    sp<CpuConsumer>* cpuConsumer,
                                    int* streamId)
{
    ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height,
                                                                        format);

    *cpuConsumer = NULL;

    sp <IProCameraUser> c = mCamera;
    if (c == 0) return NO_INIT;

    sp<BufferQueue> bq = new BufferQueue();
    sp<CpuConsumer> cc = new CpuConsumer(bq, heapCount/*, synchronousMode*/);
    cc->setName(String8("ProCamera::mCpuConsumer"));

    sp<Surface> stc = new Surface(bq);

    status_t s = createStream(width, height, format,
                              stc->getIGraphicBufferProducer(),
                              streamId);

    if (s != OK) {
        ALOGE("%s: Failure to create stream %dx%d (fmt=0x%x)", __FUNCTION__,
                    width, height, format);
        return s;
    }

    sp<ProFrameListener> frameAvailableListener =
        new ProFrameListener(this, *streamId);

    getStreamInfo(*streamId).cpuStream = true;
    getStreamInfo(*streamId).cpuConsumer = cc;
    getStreamInfo(*streamId).synchronousMode = synchronousMode;
    getStreamInfo(*streamId).stc = stc;
    // for lifetime management
    getStreamInfo(*streamId).frameAvailableListener = frameAvailableListener;

    cc->setFrameAvailableListener(frameAvailableListener);

    *cpuConsumer = cc;

    return s;
}
示例#8
0
void ProCamera::onFrameAvailable(int streamId) {
    ALOGV("%s: streamId = %d", __FUNCTION__, streamId);

    sp<ProCameraListener> listener = mListener;
    StreamInfo& stream = getStreamInfo(streamId);

    if (listener.get() != NULL) {
        listener->onFrameAvailable(streamId, stream.cpuConsumer);
    }

    // Unblock waitForFrame(id) callers
    {
        Mutex::Autolock al(mWaitMutex);
        getStreamInfo(streamId).frameReady++;
        mWaitCondition.broadcast();
    }
}
void
FFmpegFrame::write(Omm::AvStream::Overlay* pOverlay)
{
    LOGNS(Omm::AvStream, avstream, debug, "convert video frame to overlay pixel format ... ");

    getStreamInfo()->printInfo();
    int streamWidth = getStreamInfo()->width();
    int streamHeight = getStreamInfo()->height();
    Omm::AvStream::Meta::ColorCoding streamPixelFormat = getStreamInfo()->pixelFormat();

//     int targetWidth = pOverlay->getWidth();
//     int targetHeight = pOverlay->getHeight();
    int targetWidth = getStreamInfo()->width();
    int targetHeight = getStreamInfo()->height();

    Omm::AvStream::Meta::ColorCoding targetPixelFormat = pOverlay->getFormat();

    LOGNS(Omm::AvStream, avstream, debug, "stream pixelFormat: " + Poco::NumberFormatter::format(streamPixelFormat) + ", target pixelFormat: " +\
        Poco::NumberFormatter::format(targetPixelFormat));

    int scaleAlgo = SWS_BICUBIC;

    struct SwsContext* pConvertContext = static_cast<FFmpegStreamInfo*>(getStreamInfo())->_pImgConvertContext;
    LOG(ffmpeg, trace, "ffmpeg::sws_getCachedContext() ... ");
    static_cast<FFmpegStreamInfo*>(getStreamInfo())->_pImgConvertContext = sws_getCachedContext(pConvertContext,
                                              streamWidth, streamHeight, FFmpegMeta::toFFmpegPixFmt(streamPixelFormat),
                                              targetWidth, targetHeight, FFmpegMeta::toFFmpegPixFmt(targetPixelFormat),
                                              scaleAlgo, NULL, NULL, NULL);
    pConvertContext = static_cast<FFmpegStreamInfo*>(getStreamInfo())->_pImgConvertContext;

    if (pConvertContext == 0) {
        LOGNS(Omm::AvStream, avstream, warning, "cannot initialize image conversion context");
        return;
    }
    else {
        LOGNS(Omm::AvStream, avstream, debug, "success: image conversion context set up.");
    }

    printInfo();
    LOG(ffmpeg, trace, "ffmpeg::sws_scale() ...");
    int outSlizeHeight = sws_scale(pConvertContext,
                                   _pAvFrame->data, _pAvFrame->linesize,
                                   0, streamHeight,
                                   pOverlay->_data, pOverlay->_pitch);
    LOG(ffmpeg, debug, "ffmpeg::sws_scale() frame written to overlay.");
}
示例#10
0
Omm::AvStream::Frame*
FFmpegFrame::convert(Omm::AvStream::Meta::ColorCoding targetFormat, int targetWidth, int targetHeight)
{
    getStreamInfo()->printInfo();
    int width = getStreamInfo()->width();
    int height = getStreamInfo()->height();
    if (targetWidth == -1) {
        targetWidth = width;
    }
    if (targetHeight == -1) {
        targetHeight = height;
    }
    Omm::AvStream::Meta::ColorCoding inPixFormat = getStreamInfo()->pixelFormat();

    LOGNS(Omm::AvStream, avstream, debug, "source pixelFormat: " + Poco::NumberFormatter::format(inPixFormat) + ", target pixelFormat: " + Poco::NumberFormatter::format(targetFormat));

    int scaleAlgo = SWS_BICUBIC;
//     struct SwsContext *pImgConvertContext = 0;

    struct SwsContext* pConvertContext = static_cast<FFmpegStreamInfo*>(getStreamInfo())->_pImgConvertContext;
    LOG(ffmpeg, trace, "ffmpeg::sws_getCachedContext() ...");
    static_cast<FFmpegStreamInfo*>(getStreamInfo())->_pImgConvertContext = sws_getCachedContext(pConvertContext,
                                              width, height, FFmpegMeta::toFFmpegPixFmt(inPixFormat),
                                              width, height, FFmpegMeta::toFFmpegPixFmt(targetFormat),
                                              scaleAlgo, NULL, NULL, NULL);
    pConvertContext = static_cast<FFmpegStreamInfo*>(getStreamInfo())->_pImgConvertContext;

    if (pConvertContext == 0) {
        LOGNS(Omm::AvStream, avstream, warning, "cannot initialize image conversion context");
        return 0;
    }
    else {
        LOGNS(Omm::AvStream, avstream, debug, "success: image conversion context set up.");
    }

    // FIXME: _pStream->pCodecContext is wrong with pOutFrame, because e.g. pix_fmt changed
    FFmpegFrame* pRes = static_cast<FFmpegFrame*>(getStreamInfo()->allocateVideoFrame(targetFormat));

    printInfo();
    LOG(ffmpeg, trace, "ffmpeg::sws_scale() ...");
    sws_scale(pConvertContext,
              _pAvFrame->data, _pAvFrame->linesize,
              0, height,
              pRes->_pAvFrame->data, pRes->_pAvFrame->linesize);

    pRes->printInfo();

    return pRes;
}
示例#11
0
void dl_vlive(std::string url)
{
	streamInfo currentInfo;
	vidUrlInfo currentVidInfo;
	std::string url_json;
	std::string v_title;
	int num_list = 0;

	if (getStreamInfo(retrievePage((char *)url.c_str()), &currentInfo, NULL, TYPE_VLIVE) == RETRIEVE_FAIL)
		printLine(-1);
	else
	{
		printLine(2);
		if (strcmp(currentInfo.v_stat.c_str(), "VOD_ON_AIR") == 0 || strcmp(currentInfo.v_stat.c_str(), "BIG_EVENT_INTRO") == 0)
		{
			url_json = "http://global.apis.naver.com/rmcnmv/rmcnmv/vod_play_videoInfo.json?videoId=" + currentInfo.longID + "&key=" + currentInfo.key + "&ptc=http&doct=json&cpt=vtt";
			parseJson(url_json, &currentVidInfo, &num_list, TYPE_VLIVE);
			printLine(3);
			getline(cin, v_title);
			printLine(4);
			retrieveFileFromUrl(currentVidInfo.url[num_list - 1], v_title, TYPE_VLIVE, 1, NULL);
			cout << v_title;
			printLine(0);
		}
		else if (strcmp(currentInfo.v_stat.c_str(), "LIVE_END") == 0)
			cout << "다시보기 준비중입니다. 나중에 다시 시도해주세요" << endl;
		else if (strcmp(currentInfo.v_stat.c_str(), "COMING_SOON") == 0)
		{
			while (strcmp(currentInfo.v_stat.c_str(), "COMING_SOON") == 0)
			{
				cout << "지금은 방송 준비중입니다. 1초 후 다시 녹화를 시도합니다..." << endl;
				std::this_thread::sleep_for(std::chrono::milliseconds(1000));
			}
			dl_vlive_live(url);
		}
		else if (strcmp(currentInfo.v_stat.c_str(), "CANCELED") == 0)
			cout << "방송이 예기치 않게 취소되었습니다." << endl;
		else
			dl_vlive_live(url);
	}
}
示例#12
0
void dl_tvcast(std::string url)
{
	streamInfo currentInfo;
	vidUrlInfo currentVidInfo;
	std::string url_json;
	std::string v_title;
	int num_list = 0;

	if (getStreamInfo(retrievePage((char *)url.c_str()), &currentInfo, NULL, TYPE_TVCAST) == RETRIEVE_FAIL)
		printLine(-1);
	else
	{
		printLine(2);
		url_json = "http://play.rmcnmv.naver.com/vod/play/" + currentInfo.longID + "?skn=tvcast_white&key=" + currentInfo.key + "&cc=KR";
		parseJson(url_json, &currentVidInfo, &num_list, TYPE_TVCAST);
		printLine(3);
		getline(cin, v_title);
		printLine(4);
		retrieveFileFromUrl(currentVidInfo.url[num_list - 1], v_title, TYPE_TVCAST, 1, NULL);
		cout << v_title;
		printLine(0);
	}
}
示例#13
0
int main(int argc, char* argv[])
{
    int     i;
    FILE*   pFile;
    size_t  iLineSize;
    bool    bInitSuccess;
    int     iCurrLine;
    struct sigaction  sSigAct;

    memset(&sSigAct,0,sizeof(struct sigaction));
    sSigAct.sa_handler = inrptHandler;
    if ( 0 != sigaction(SIGINT, &sSigAct,NULL))
    {
        DEBUG_PRINT("Signal Handler Registration Failed\n");
        return 0;
    }

    bInitSuccess = false;
    iCurrLine = 0,i = 0,pFile = NULL;
    iLineSize = MAX_CONFIG_LINE;
    memset(m_apArgs,0,sizeof(void*)*MAX_NUMBER_STREAMS);
    memset(m_apStreamContext,0,sizeof(void*)*MAX_NUMBER_STREAMS);

    pFile = openFile(CONFIG_FILE_NAME,"r");
    if (pFile)
    {
        char* pcLine = malloc(iLineSize);

        bInitSuccess = true;
        while((iCurrLine<MAX_NUMBER_STREAMS) && (getline((char**)&pcLine,&iLineSize,pFile) != -1))
        {
            m_apArgs[iCurrLine] = (ThreadArgs*)malloc(sizeof(ThreadArgs));
            memset(m_apArgs[iCurrLine],0,sizeof(ThreadArgs));

            m_apStreamContext[iCurrLine] = (StreamContext*)malloc(sizeof(StreamContext));
            memset(m_apStreamContext[iCurrLine],0,sizeof(StreamContext));

            parseConfigLine(pcLine,m_apArgs[iCurrLine]);
            /*assign cmd args to thread context*/
            m_apStreamContext[iCurrLine]->psArgs = m_apArgs[iCurrLine];
            printArg(m_apArgs[iCurrLine]);
            /*get MPEG2TS data from file*/
            if (!getStreamInfo(m_apArgs[iCurrLine]->acFileName,&(m_apStreamContext[iCurrLine]->iBitrate),
                              &(m_apStreamContext[iCurrLine]->dDuration),
                              &(m_apStreamContext[iCurrLine]->dAvgPeriod)))
            {
                DEBUG_PRINT("Failed to retriev MPEG2TS data for %s\n",m_apArgs[iCurrLine]->acFileName);
                bInitSuccess = false;
                break;
            }

            /*create queue names*/
            memset(m_apStreamContext[iCurrLine]->acStreamQueueName,0,MAX_QUEUE_NAME);
            sprintf(m_apStreamContext[iCurrLine]->acStreamQueueName,"/StreamingQueue%d",iCurrLine);

            memset(m_apStreamContext[iCurrLine]->acEmptyQueueName,0,MAX_QUEUE_NAME);
            sprintf(m_apStreamContext[iCurrLine]->acEmptyQueueName,"/EmptyQueue%d",iCurrLine);

            if (createQueue(m_apStreamContext[iCurrLine]->acStreamQueueName,sizeof(AppMsg),MAX_DATA_MSG + 1,&(m_apStreamContext[iCurrLine]->iStreamQid)) < 0)
            {
                DEBUG_PRINT("Failed to create %s queue\n",m_apStreamContext[iCurrLine]->acStreamQueueName);
                bInitSuccess = false;
                break;
            }
            if (createQueue(m_apStreamContext[iCurrLine]->acEmptyQueueName,sizeof(AppMsg),MAX_DATA_MSG + 1,&(m_apStreamContext[iCurrLine]->iEmptyQid)) < 0)
            {
                DEBUG_PRINT("Failed to create %s queue\n",m_apStreamContext[iCurrLine]->acEmptyQueueName);
                bInitSuccess = false;
                break;
            }

            iCurrLine+=1;
            memset(pcLine,0,iLineSize);
        }
        free(pcLine);
    }
    closeFile(pFile);

    if (!m_apArgs[0])
    {
        DEBUG_PRINT("No configuration found\n");
        return 0;
    }

    if (!bInitSuccess)
    {
        DEBUG_PRINT("The Application will exit\n");
        cleanUp();
        return 0;
    }

    while (m_apStreamContext[i]!=NULL)
    {
        populateFileReaderEmptyQueue(m_apStreamContext[i]->iEmptyQid);
        startFileReader(m_apStreamContext[i]);
        startStreamer(m_apStreamContext[i]);
        i+=1;
    }

    i=0;
    /*join threads here until the end*/
    while (m_apStreamContext[i]!=NULL)
    {
        joinPosixThread(m_apStreamContext[i]->lStreamerId,NULL);
        joinPosixThread(m_apStreamContext[i]->lReaderId,NULL);
        i+=1;
    }

    cleanUp();
    return 0;
}
示例#14
0
bool FormatContext::open(const QString &_url, const QString &param)
{
    static const QStringList disabledDemuxers {
        "ass",
        "tty", //txt files
        "srt",
    };

    const QByteArray scheme = Functions::getUrlScheme(_url).toUtf8();
    if (scheme.isEmpty() || scheme == "sftp")
        return false;

    const Settings &settings = QMPlay2Core.getSettings();

    artistWithTitle = !settings.getBool("HideArtistMetadata");

    bool limitedLength = false;
    qint64 oggOffset = -1, oggSize = -1;
    int oggTrack = -1;
    QString url;

    if (param.startsWith("CUE:")) //For CUE files
    {
        const QStringList splitted = param.split(':');
        if (splitted.count() != 3)
            return false;
        bool ok1 = false, ok2 = false;
        startTime = splitted[1].toDouble(&ok1);
        lengthToPlay = splitted[2].toDouble(&ok2);
        if (!ok1 || !ok2 || startTime < 0.0 || (!qFuzzyCompare(lengthToPlay, -1.0) && lengthToPlay <= 0.0))
            return false;
        if (lengthToPlay > 0.0)
            lengthToPlay -= startTime;
        limitedLength = true;
    }
    else if (param.startsWith("OGG:")) //For chained OGG files
    {
        const QStringList splitted = param.split(':');
        if (splitted.count() != 4)
            return false;
        oggTrack = splitted[1].toInt();
        oggOffset = splitted[2].toLongLong();
        oggSize = splitted[3].toLongLong();
        if (oggTrack <= 0 || oggOffset < 0 || (oggSize != -1 && oggSize <= 0))
            return false;
    }

    AVInputFormat *inputFmt = nullptr;
    if (scheme == "file")
        isLocal = true;
    else
    {
        inputFmt = av_find_input_format(scheme);
        if (inputFmt)
            url = _url.right(_url.length() - scheme.length() - 3);
        isLocal = false;
    }

    AVDictionary *options = nullptr;
    if (!inputFmt)
    {
        url = Functions::prepareFFmpegUrl(_url, options);
        if (!isLocal && reconnectStreamed)
            av_dict_set(&options, "reconnect_streamed", "1", 0);
    }

    formatCtx = avformat_alloc_context();
    formatCtx->interrupt_callback.callback = (int(*)(void *))interruptCB;
    formatCtx->interrupt_callback.opaque = &abortCtx->isAborted;

    if (oggOffset >= 0)
    {
        oggHelper = new OggHelper(url, oggTrack, oggSize, formatCtx->interrupt_callback);
        if (!oggHelper->pb)
            return false;
        formatCtx->pb = oggHelper->pb;
        av_dict_set(&options, "skip_initial_bytes", QString::number(oggOffset).toLatin1(), 0);
    }

    // Useful, e.g. CUVID decoder needs valid PTS
    formatCtx->flags |= AVFMT_FLAG_GENPTS;

    OpenFmtCtxThr *openThr = new OpenFmtCtxThr(formatCtx, url.toUtf8(), inputFmt, options, abortCtx);
    formatCtx = openThr->getFormatCtx();
    openThr->drop();
    if (!formatCtx || disabledDemuxers.contains(name()))
        return false;

    if (name().startsWith("image2") || name().endsWith("_pipe"))
    {
        if (!settings.getBool("StillImages"))
            return false;
        stillImage = true;
    }

    if (name() == "mp3")
        formatCtx->flags |= AVFMT_FLAG_FAST_SEEK; //This should be set before "avformat_open_input", but seems to be working for MP3...

    if (avformat_find_stream_info(formatCtx, nullptr) < 0)
        return false;

    isStreamed = !isLocal && formatCtx->duration <= 0; //QMPLAY2_NOPTS_VALUE is negative

#ifdef QMPlay2_libavdevice
    forceCopy = name().contains("v4l2"); //Workaround for v4l2 - if many buffers are referenced demuxer doesn't produce proper timestamps (FFmpeg BUG?).
#else
    forceCopy = false;
#endif

    if (!limitedLength && (startTime = formatCtx->start_time / (double)AV_TIME_BASE) < 0.0)
        startTime = 0.0;

    if (limitedLength && lengthToPlay < 0.0)
    {
        lengthToPlay = length() - startTime;
        if (lengthToPlay <= 0.0)
            return false;
    }

    index_map.resize(formatCtx->nb_streams);
    streamsTS.resize(formatCtx->nb_streams);
    streamsOffset.resize(formatCtx->nb_streams);
    nextDts.resize(formatCtx->nb_streams);
    for (unsigned i = 0; i < formatCtx->nb_streams; ++i)
    {
        fixFontsAttachment(formatCtx->streams[i]);
        StreamInfo *streamInfo = getStreamInfo(formatCtx->streams[i]);
        if (!streamInfo)
            index_map[i] = -1;
        else
        {
            index_map[i] = streamsInfo.count();
            streamsInfo += streamInfo;
        }
        if (!fixMkvAss && formatCtx->streams[i]->codecpar->codec_id == AV_CODEC_ID_ASS && !strncasecmp(formatCtx->iformat->name, "matroska", 8))
            fixMkvAss = true;
        formatCtx->streams[i]->event_flags = 0;
        streams += formatCtx->streams[i];

        streamsTS[i] = 0.0;
    }
    if (streamsInfo.isEmpty())
        return false;

    isOneStreamOgg = (name() == "ogg" && streamsInfo.count() == 1); //Workaround for OGG network streams

    if (isStreamed && streamsInfo.count() == 1 && streamsInfo.at(0)->type == QMPLAY2_TYPE_SUBTITLE && formatCtx->pb && avio_size(formatCtx->pb) > 0)
        isStreamed = false; //Allow subtitles streams to be non-streamed if size is known

    formatCtx->event_flags = 0;

    packet = av_packet_alloc();

    if (lengthToPlay > 0.0)
        return seek(0.0, false);
    return true;
}