AVCodecEncoder::~AVCodecEncoder() { TRACE("AVCodecEncoder::~AVCodecEncoder()\n"); _CloseCodecIfNeeded(); if (fSwsContext != NULL) sws_freeContext(fSwsContext); av_fifo_free(fAudioFifo); avpicture_free(&fDstFrame); // NOTE: Do not use avpicture_free() on fSrcFrame!! We fill the picture // data on the fly with the media buffer data passed to Encode(). if (fFrame != NULL) { fFrame->data[0] = NULL; fFrame->data[1] = NULL; fFrame->data[2] = NULL; fFrame->data[3] = NULL; fFrame->linesize[0] = 0; fFrame->linesize[1] = 0; fFrame->linesize[2] = 0; fFrame->linesize[3] = 0; free(fFrame); } free(fOwnContext); delete[] fChunkBuffer; }
void dc_audio_decoder_close(AudioInputFile *audio_input_file) { /* * Close the audio format context */ avformat_close_input(&audio_input_file->av_fmt_ctx); if (audio_input_file->av_pkt_list_mutex) { gf_mx_p(audio_input_file->av_pkt_list_mutex); while (gf_list_count(audio_input_file->av_pkt_list)) { AVPacket *pkt = gf_list_last(audio_input_file->av_pkt_list); av_free_packet(pkt); gf_list_rem_last(audio_input_file->av_pkt_list); } gf_list_del(audio_input_file->av_pkt_list); gf_mx_v(audio_input_file->av_pkt_list_mutex); gf_mx_del(audio_input_file->av_pkt_list_mutex); } av_fifo_free(audio_input_file->fifo); #ifdef DC_AUDIO_RESAMPLER avresample_free(&audio_input_file->aresampler); #endif }
void av_fifo_freep(AVFifoBuffer **f) { if (f) { av_fifo_free(*f); *f = NULL; } }
static void uninit(struct ao *ao, bool cut_audio) { struct priv *priv = ao->priv; if (!priv) return; // abort the callback priv->paused = 1; if (SDL_WasInit(SDL_INIT_AUDIO)) { if (priv->buffer_mutex) SDL_LockMutex(priv->buffer_mutex); if (priv->underrun_cond) SDL_CondSignal(priv->underrun_cond); if (priv->buffer_mutex) SDL_UnlockMutex(priv->buffer_mutex); // make sure the callback exits SDL_LockAudio(); // close audio device SDL_QuitSubSystem(SDL_INIT_AUDIO); } // get rid of the mutex if (priv->underrun_cond) SDL_DestroyCond(priv->underrun_cond); if (priv->buffer_mutex) SDL_DestroyMutex(priv->buffer_mutex); if (priv->buffer) av_fifo_free(priv->buffer); talloc_free(ao->priv); ao->priv = NULL; }
int main(void) { /* create a FIFO buffer */ AVFifoBuffer *fifo = av_fifo_alloc(13 * sizeof(int)); int i, j, n; /* fill data */ for (i = 0; av_fifo_space(fifo) >= sizeof(int); i++) av_fifo_generic_write(fifo, &i, sizeof(int), NULL); /* peek at FIFO */ n = av_fifo_size(fifo)/sizeof(int); for (i = -n+1; i < n; i++) { int *v = (int *)av_fifo_peek2(fifo, i*sizeof(int)); printf("%d: %d\n", i, *v); } printf("\n"); /* read data */ for (i = 0; av_fifo_size(fifo) >= sizeof(int); i++) { av_fifo_generic_read(fifo, &j, sizeof(int), NULL); printf("%d ", j); } printf("\n"); av_fifo_free(fifo); return 0; }
int ff_qsv_decode_close(QSVContext *q) { QSVFrame *cur = q->work_frames; if (q->session) MFXVideoDECODE_Close(q->session); while (q->async_fifo && av_fifo_size(q->async_fifo)) { QSVFrame *out_frame; mfxSyncPoint *sync; av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL); av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL); av_freep(&sync); } while (cur) { q->work_frames = cur->next; av_frame_free(&cur->frame); av_freep(&cur); cur = q->work_frames; } av_fifo_free(q->async_fifo); q->async_fifo = NULL; av_parser_close(q->parser); avcodec_free_context(&q->avctx_internal); if (q->internal_session) MFXClose(q->internal_session); return 0; }
static int gxf_write_trailer(AVFormatContext *s) { ByteIOContext *pb = s->pb; GXFContext *gxf = s->priv_data; int64_t end; int i; for (i = 0; i < s->nb_streams; ++i) { if (s->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO) { av_fifo_free(&gxf->streams[i].audio_buffer); } else if (s->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) { gxf->nb_frames = 2 * s->streams[i]->codec->frame_number; } } gxf_write_eos_packet(pb, gxf); end = url_ftell(pb); url_fseek(pb, 0, SEEK_SET); /* overwrite map and umf packets with new values */ gxf_write_map_packet(pb, gxf); //gxf_write_flt_packet(pb, gxf); gxf_write_umf_packet(pb, gxf); url_fseek(pb, end, SEEK_SET); return 0; }
static int swf_write_trailer(AVFormatContext *s) { SWFContext *swf = s->priv_data; ByteIOContext *pb = s->pb; AVCodecContext *enc, *video_enc; int file_size, i; video_enc = NULL; for(i=0;i<s->nb_streams;i++) { enc = s->streams[i]->codec; if (enc->codec_type == CODEC_TYPE_VIDEO) video_enc = enc; else av_fifo_free(&swf->audio_fifo); } put_swf_tag(s, TAG_END); put_swf_end_tag(s); put_flush_packet(s->pb); /* patch file size and number of frames if not streamed */ if (!url_is_streamed(s->pb) && video_enc) { file_size = url_ftell(pb); url_fseek(pb, 4, SEEK_SET); put_le32(pb, file_size); url_fseek(pb, swf->duration_pos, SEEK_SET); put_le16(pb, swf->video_frame_number); url_fseek(pb, swf->vframes_pos, SEEK_SET); put_le16(pb, swf->video_frame_number); url_fseek(pb, file_size, SEEK_SET); } return 0; }
static int swf_write_trailer(AVFormatContext *s) { SWFContext *swf = s->priv_data; AVIOContext *pb = s->pb; AVCodecParameters *par, *video_par; int file_size, i; video_par = NULL; for(i=0;i<s->nb_streams;i++) { par = s->streams[i]->codecpar; if (par->codec_type == AVMEDIA_TYPE_VIDEO) video_par = par; else av_fifo_free(swf->audio_fifo); } put_swf_tag(s, TAG_END); put_swf_end_tag(s); /* patch file size and number of frames if not streamed */ if ((s->pb->seekable & AVIO_SEEKABLE_NORMAL) && video_par) { file_size = avio_tell(pb); avio_seek(pb, 4, SEEK_SET); avio_wl32(pb, file_size); avio_seek(pb, swf->duration_pos, SEEK_SET); avio_wl16(pb, swf->video_frame_number); avio_seek(pb, swf->vframes_pos, SEEK_SET); avio_wl16(pb, swf->video_frame_number); avio_seek(pb, file_size, SEEK_SET); } return 0; }
FFmpegDecoder::~FFmpegDecoder() { Close(); if (av && av->temp_data) av_free(av->temp_data); if (av && av->fifo) av_fifo_free(av->fifo); }
static void free_pkt_fifo(AVFifoBuffer *fifo) { AVPacket pkt; while (av_fifo_size(fifo)) { av_fifo_generic_read(fifo, &pkt, sizeof(pkt), NULL); av_free_packet(&pkt); } av_fifo_free(fifo); }
// close audio device static void uninit(int immed) { if (!immed) usec_sleep(get_delay() * 1000 * 1000); // HACK, make sure jack doesn't loop-output dirty buffers reset(); usec_sleep(100 * 1000); jack_client_close(client); av_fifo_free(buffer); buffer = NULL; }
int ff_qsv_decode_close(QSVContext *q) { close_decoder(q); q->session = NULL; ff_qsv_close_internal_session(&q->internal_qs); av_fifo_free(q->async_fifo); q->async_fifo = NULL; av_fifo_free(q->input_fifo); q->input_fifo = NULL; av_fifo_free(q->pkt_fifo); q->pkt_fifo = NULL; return 0; }
void ff_audio_interleave_close(AVFormatContext *s) { int i; for (i = 0; i < s->nb_streams; i++) { AVStream *st = s->streams[i]; AudioInterleaveContext *aic = st->priv_data; if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) av_fifo_free(aic->fifo); } }
static av_cold void uninit(AVFilterContext *ctx) { BufferSinkContext *sink = ctx->priv; while (sink->fifo && av_fifo_size(sink->fifo)) { AVFilterBufferRef *buf; av_fifo_generic_read(sink->fifo, &buf, sizeof(buf), NULL); avfilter_unref_buffer(buf); } av_fifo_free(sink->fifo); }
static int udp_close(URLContext *h) { UDPContext *s = h->priv_data; if (s->is_multicast && (h->flags & AVIO_FLAG_READ)) udp_leave_multicast_group(s->udp_fd, (struct sockaddr *)&s->dest_addr); closesocket(s->udp_fd); av_fifo_free(s->fifo); av_free(s); return 0; }
static av_cold void uninit(AVFilterContext *ctx) { BufferSourceContext *s = ctx->priv; while (s->fifo && av_fifo_size(s->fifo)) { AVFrame *frame; av_fifo_generic_read(s->fifo, &frame, sizeof(frame), NULL); av_frame_free(&frame); } av_fifo_free(s->fifo); s->fifo = NULL; }
static av_cold void uninit(AVFilterContext *ctx) { FPSContext *s = ctx->priv; if (s->fifo) { s->drop += av_fifo_size(s->fifo) / sizeof(AVFilterBufferRef*); flush_fifo(s->fifo); av_fifo_free(s->fifo); } av_log(ctx, AV_LOG_VERBOSE, "%d frames in, %d frames out; %d frames dropped, " "%d frames duplicated.\n", s->frames_in, s->frames_out, s->drop, s->dup); }
static av_cold void uninit(AVFilterContext *ctx) { BufferSourceContext *s = ctx->priv; while (s->fifo && av_fifo_size(s->fifo)) { AVFilterBufferRef *buf; av_fifo_generic_read(s->fifo, &buf, sizeof(buf), NULL); avfilter_unref_buffer(buf); } av_fifo_free(s->fifo); s->fifo = NULL; avfilter_free(s->scale); s->scale = NULL; }
static av_cold int mediacodec_decode_close(AVCodecContext *avctx) { MediaCodecH264DecContext *s = avctx->priv_data; ff_mediacodec_dec_close(avctx, &s->ctx); av_fifo_free(s->fifo); av_bsf_free(&s->bsf); av_packet_unref(&s->filtered_pkt); return 0; }
static av_cold void uninit(AVFilterContext *ctx) { SelectContext *select = ctx->priv; AVFrame *frame; av_expr_free(select->expr); select->expr = NULL; while (select->pending_frames && av_fifo_generic_read(select->pending_frames, &frame, sizeof(frame), NULL) == sizeof(frame)) av_frame_free(&frame); av_fifo_free(select->pending_frames); select->pending_frames = NULL; }
// close audio device static void uninit(int immed){ mp_msg(MSGT_AO,MSGL_V,"SDL: Audio Subsystem shutting down!\n"); if (!immed) usec_sleep(get_delay() * 1000 * 1000); #ifdef _WIN32 if (!hSDL) return; #endif SDL_CloseAudio(); SDL_QuitSubSystem(SDL_INIT_AUDIO); av_fifo_free(buffer); #ifdef _WIN32 FreeLibrary(hSDL); #endif }
static int udp_close(URLContext *h) { UDPContext *s = h->priv_data; if (s->is_multicast && (h->flags & AVIO_FLAG_READ)) udp_leave_multicast_group(s->udp_fd, (struct sockaddr *)&s->dest_addr); closesocket(s->udp_fd); av_fifo_free(s->fifo); #if HAVE_PTHREADS pthread_mutex_destroy(&s->mutex); pthread_cond_destroy(&s->cond); #endif return 0; }
static av_cold void uninit(AVFilterContext *ctx) { SelectContext *select = ctx->priv; AVFilterBufferRef *picref; av_expr_free(select->expr); select->expr = NULL; while (select->pending_frames && av_fifo_generic_read(select->pending_frames, &picref, sizeof(picref), NULL) == sizeof(picref)) avfilter_unref_buffer(picref); av_fifo_free(select->pending_frames); select->pending_frames = NULL; }
static void flac_parse_close(AVCodecParserContext *c) { FLACParseContext *fpc = c->priv_data; FLACHeaderMarker *curr = fpc->headers, *temp; while (curr) { temp = curr->next; av_freep(&curr->link_penalty); av_free(curr); curr = temp; } av_fifo_free(fpc->fifo_buf); av_free(fpc->wrap_buf); }
static av_cold void common_uninit(AVFilterContext *ctx) { BufferSinkContext *buf = ctx->priv; AVFilterBufferRef *picref; if (buf->fifo) { while (av_fifo_size(buf->fifo) >= sizeof(AVFilterBufferRef *)) { av_fifo_generic_read(buf->fifo, &picref, sizeof(picref), NULL); avfilter_unref_buffer(picref); } av_fifo_free(buf->fifo); buf->fifo = NULL; } }
void av_audio_fifo_free(AVAudioFifo *af) { if (af) { if (af->buf) { int i; for (i = 0; i < af->nb_buffers; i++) { if (af->buf[i]) av_fifo_free(af->buf[i]); } av_freep(&af->buf); } av_free(af); } }
static av_cold void uninit(AVFilterContext *ctx) { SelectContext *select = ctx->priv; AVFilterBufferRef *picref; int i; av_expr_free(select->expr); select->expr = NULL; for (i = 0; i < av_fifo_size(select->pending_frames)/sizeof(picref); i++) { av_fifo_generic_read(select->pending_frames, &picref, sizeof(picref), NULL); avfilter_unref_buffer(picref); } av_fifo_free(select->pending_frames); }
int CacheHttp_Close(void * handle) { if(!handle) return AVERROR(EIO); CacheHttpContext * s = (CacheHttpContext *)handle; s->EXIT = 1; ffmpeg_pthread_join(s->circular_buffer_thread, NULL); av_log(NULL,AV_LOG_DEBUG,"-----------%s:%d\n",__FUNCTION__,__LINE__); if(s->fifo) { av_fifo_free(s->fifo); } pthread_mutex_destroy(&s->read_mutex); bandwidth_measure_free(s->bandwidth_measure); return 0; }
void dc_audio_encoder_close(AudioOutputFile * p_aoutf) { // int i; // // /* free the streams */ // for (i = 0; i < p_aout->p_fmt->nb_streams; i++) { // avcodec_close(p_aout->p_fmt->streams[i]->codec); // av_freep(&p_aout->p_fmt->streams[i]->info); // } av_fifo_free(p_aoutf->p_fifo); av_free(p_aoutf->p_adata_buf); av_free(p_aoutf->p_aframe); avcodec_close(p_aoutf->p_codec_ctx); av_free(p_aoutf->p_codec_ctx); }