AVFormatContext *ff_rtp_chain_mux_open(AVFormatContext *s, AVStream *st, URLContext *handle, int packet_size) { AVFormatContext *rtpctx; int ret; AVOutputFormat *rtp_format = av_guess_format("rtp", NULL, NULL); if (!rtp_format) return NULL; /* Allocate an AVFormatContext for each output stream */ rtpctx = avformat_alloc_context(); if (!rtpctx) return NULL; rtpctx->oformat = rtp_format; if (!av_new_stream(rtpctx, 0)) { av_free(rtpctx); return NULL; } /* Copy the max delay setting; the rtp muxer reads this. */ rtpctx->max_delay = s->max_delay; /* Copy other stream parameters. */ rtpctx->streams[0]->sample_aspect_ratio = st->sample_aspect_ratio; /* Set the synchronized start time. */ rtpctx->start_time_realtime = s->start_time_realtime; /* Remove the local codec, link to the original codec * context instead, to give the rtp muxer access to * codec parameters. */ av_free(rtpctx->streams[0]->codec); rtpctx->streams[0]->codec = st->codec; if (handle) { url_fdopen(&rtpctx->pb, handle); } else url_open_dyn_packet_buf(&rtpctx->pb, packet_size); ret = av_write_header(rtpctx); if (ret) { if (handle) { url_fclose(rtpctx->pb); } else { uint8_t *ptr; url_close_dyn_buf(rtpctx->pb, &ptr); av_free(ptr); } av_free(rtpctx->streams[0]); av_free(rtpctx); return NULL; } /* Copy the RTP AVStream timebase back to the original AVStream */ st->time_base = rtpctx->streams[0]->time_base; return rtpctx; }
// Open a file with a (possibly) Unicode filename int ufile_fopen(AVIOContext **s, const wxString & name, int flags) { wxString url(wxString(wxT(UFILE_PROTOCOL)) + wxT(":") + name); URLContext *h; int err; // Open the file using our custom protocol and passing the (possibly) Unicode // filename. We convert the name to UTF8 here and it will be converted back // to original encoding in ufile_open(). This allows us to support Unicode // filenames even though FFmpeg does not. err = url_open(&h, (const char *) url.ToUTF8(), flags); if (err < 0) { return err; } // Associate the file with a context err = url_fdopen(s, h); if (err < 0) { url_close(h); return err; } return 0; }
ULONG ASFDemuxer::Init() { unsigned ret = RET_OK; av_register_all(); m_urlProtocol.url_open = OpenASF; m_urlProtocol.url_read = ReadASF; m_urlProtocol.url_close = CloseASF; m_urlCtx.flags = URL_RDONLY; m_urlCtx.is_streamed = 1; m_urlCtx.prot = &m_urlProtocol; m_urlCtx.max_packet_size = 0; m_urlCtx.priv_data = this; memset(&m_byteCtx, 0, sizeof(m_byteCtx)); url_fdopen(&m_byteCtx, &m_urlCtx); BYTE *pBufTmp = new BYTE[m_byteCtx.buffer_size]; if (pBufTmp) { get_buffer(&m_byteCtx, pBufTmp, 2048); delete pBufTmp; m_byteCtx.buf_ptr = m_byteCtx.buffer; AVFormatParameters fmtParam; memset(&fmtParam, 0, sizeof(AVFormatParameters)); fmtParam.initial_pause = 1; /* we force a pause when starting an RTSP stream */ fmtParam.width = 0; fmtParam.height = 0; fmtParam.time_base.num = 1; fmtParam.time_base.den = 25; fmtParam.pix_fmt = PIX_FMT_NONE; AVInputFormat* pInputFmt = av_find_input_format("asf"); if (pInputFmt) { if (av_open_input_stream(&m_pFmtCtx, &m_byteCtx, "", pInputFmt, &fmtParam) == 0) { m_bInit = true; } else { ret = RET_ERROR; } } else { ret = RET_ERROR; } } else { ret = RET_LOW_MEMORY; } return ret; }