static av_cold void cudascale_uninit(AVFilterContext *ctx) { CUDAScaleContext *s = ctx->priv; av_frame_free(&s->frame); av_buffer_unref(&s->frames_ctx); av_frame_free(&s->tmp_frame); }
void av_packet_unref(AVPacket *pkt) { av_packet_free_side_data(pkt); av_buffer_unref(&pkt->buf); av_init_packet(pkt); pkt->data = NULL; pkt->size = 0; }
static void free_side_data(AVFrameSideData **ptr_sd) { AVFrameSideData *sd = *ptr_sd; av_buffer_unref(&sd->buf); av_dict_free(&sd->metadata); av_freep(ptr_sd); }
static void input_callback(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) { if (!buffer->cmd) { AVBufferRef *buf = buffer->user_data; av_buffer_unref(&buf); } mmal_buffer_header_release(buffer); }
static int scale_vaapi_pipeline_uninit(ScaleVAAPIContext *ctx) { if (ctx->va_context != VA_INVALID_ID) { vaDestroyContext(ctx->hwctx->display, ctx->va_context); ctx->va_context = VA_INVALID_ID; } if (ctx->va_config != VA_INVALID_ID) { vaDestroyConfig(ctx->hwctx->display, ctx->va_config); ctx->va_config = VA_INVALID_ID; } av_buffer_unref(&ctx->output_frames_ref); av_buffer_unref(&ctx->device_ref); ctx->hwctx = 0; return 0; }
AVBufferRef *av_hwframe_ctx_alloc(AVBufferRef *device_ref_in) { AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref_in->data; const HWContextType *hw_type = device_ctx->internal->hw_type; AVHWFramesContext *ctx; AVBufferRef *buf, *device_ref = NULL;; ctx = av_mallocz(sizeof(*ctx)); if (!ctx) return NULL; ctx->internal = av_mallocz(sizeof(*ctx->internal)); if (!ctx->internal) goto fail; if (hw_type->frames_priv_size) { ctx->internal->priv = av_mallocz(hw_type->frames_priv_size); if (!ctx->internal->priv) goto fail; } if (hw_type->frames_hwctx_size) { ctx->hwctx = av_mallocz(hw_type->frames_hwctx_size); if (!ctx->hwctx) goto fail; } device_ref = av_buffer_ref(device_ref_in); if (!device_ref) goto fail; buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx), hwframe_ctx_free, NULL, AV_BUFFER_FLAG_READONLY); if (!buf) goto fail; ctx->av_class = &hwframe_ctx_class; ctx->device_ref = device_ref; ctx->device_ctx = device_ctx; ctx->format = AV_PIX_FMT_NONE; ctx->sw_format = AV_PIX_FMT_NONE; ctx->internal->hw_type = hw_type; return buf; fail: if (device_ref) av_buffer_unref(&device_ref); if (ctx->internal) av_freep(&ctx->internal->priv); av_freep(&ctx->internal); av_freep(&ctx->hwctx); av_freep(&ctx); return NULL; }
void av_frame_unref(AVFrame *frame) { int i; for (i = 0; i < frame->nb_side_data; i++) { av_freep(&frame->side_data[i]->data); av_dict_free(&frame->side_data[i]->metadata); av_freep(&frame->side_data[i]); } av_freep(&frame->side_data); for (i = 0; i < FF_ARRAY_ELEMS(frame->buf); i++) av_buffer_unref(&frame->buf[i]); for (i = 0; i < frame->nb_extended_buf; i++) av_buffer_unref(&frame->extended_buf[i]); av_freep(&frame->extended_buf); get_frame_defaults(frame); }
JNIEXPORT void JNICALL Java_bits_jav_codec_JavFrame_nExtendedBufElem__JIJ ( JNIEnv *env, jclass clazz, jlong pointer, jint idx, jlong refPtr ) { AVFrame *frame = *(AVFrame**)&pointer; av_buffer_unref( &frame->extended_buf[idx] ); if( refPtr ) { frame->extended_buf[idx] = av_buffer_ref( *(AVBufferRef**)&refPtr ); } }
AVFrameSideData *ffmpeg_garbage(AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef *buf) { AVFrameSideData *sd = av_frame_new_side_data(frame, type, buf->size); if (sd) memcpy(sd->data, buf->data, buf->size); av_buffer_unref(&buf); return sd; }
static void vdpau_uninit(AVCodecContext *s) { VDPAUContext *ctx = s->opaque; av_buffer_unref(&ctx->hw_frames_ctx); //av_freep(&ist->hwaccel_ctx); av_freep(&s->hwaccel_context); }
static void uninit(struct ra_hwdec *hw) { struct priv_owner *p = hw->priv; hwdec_devices_remove(hw->devs, &p->hwctx); av_buffer_unref(&p->hwctx.av_device_ref); if (p->device) IDirect3DDevice9Ex_Release(p->device); }
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session, AVBufferRef *hw_frames_ref) { int ret; if (session) { q->session = session; } else if (hw_frames_ref) { if (q->internal_session) { MFXClose(q->internal_session); q->internal_session = NULL; } av_buffer_unref(&q->frames_ctx.hw_frames_ctx); q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref); if (!q->frames_ctx.hw_frames_ctx) return AVERROR(ENOMEM); ret = ff_qsv_init_session_hwcontext(avctx, &q->internal_session, &q->frames_ctx, q->load_plugins, q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY); if (ret < 0) { av_buffer_unref(&q->frames_ctx.hw_frames_ctx); return ret; } q->session = q->internal_session; } else { if (!q->internal_session) { ret = ff_qsv_init_internal_session(avctx, &q->internal_session, q->load_plugins); if (ret < 0) return ret; } q->session = q->internal_session; } /* make sure the decoder is uninitialized */ MFXVideoDECODE_Close(q->session); return 0; }
void av_frame_unref(AVFrame *frame) { int i; if (!frame) return; wipe_side_data(frame); for (i = 0; i < FF_ARRAY_ELEMS(frame->buf); i++) av_buffer_unref(&frame->buf[i]); for (i = 0; i < frame->nb_extended_buf; i++) av_buffer_unref(&frame->extended_buf[i]); av_freep(&frame->extended_buf); av_dict_free(&frame->metadata); av_buffer_unref(&frame->qp_table_buf); get_frame_defaults(frame); }
STDMETHODIMP CDecD3D11::DestroyDecoder(bool bFull, bool bNoAVCodec) { for (int i = 0; i < D3D11_QUEUE_SURFACES; i++) { ReleaseFrame(&m_FrameQueue[i]); } if (m_pOutputViews) { for (int i = 0; i < m_nOutputViews; i++) { SafeRelease(&m_pOutputViews[i]); } av_freep(&m_pOutputViews); m_nOutputViews = 0; } SafeRelease(&m_pDecoder); SafeRelease(&m_pD3D11StagingTexture); av_buffer_unref(&m_pFramesCtx); if (!bNoAVCodec) { CDecAvcodec::DestroyDecoder(); } if (bFull) { av_buffer_unref(&m_pDevCtx); if (dx.d3d11lib) { FreeLibrary(dx.d3d11lib); dx.d3d11lib = nullptr; } if (dx.dxgilib) { FreeLibrary(dx.dxgilib); dx.dxgilib = nullptr; } } return S_OK; }
static void vs_video_unref_buffer_handler ( void *opaque, uint8_t *data ) { /* Decrement the reference-counter to the video buffer handler by 1. * Delete it by vs_video_release_buffer_handler() if there are no reference to it i.e. the reference-counter equals zero. */ AVBufferRef *vs_buffer_ref = (AVBufferRef *)opaque; av_buffer_unref( &vs_buffer_ref ); }
void ff_h264_unref_picture(H264Context *h, H264Picture *pic) { int off = offsetof(H264Picture, tf) + sizeof(pic->tf); int i; if (!pic->f.buf[0]) return; ff_thread_release_buffer(h->avctx, &pic->tf); av_buffer_unref(&pic->hwaccel_priv_buf); av_buffer_unref(&pic->qscale_table_buf); av_buffer_unref(&pic->mb_type_buf); for (i = 0; i < 2; i++) { av_buffer_unref(&pic->motion_val_buf[i]); av_buffer_unref(&pic->ref_index_buf[i]); } memset((uint8_t*)pic + off, 0, sizeof(*pic) - off); }
void av_free_packet(AVPacket *pkt) { if (pkt) { if (pkt->buf) av_buffer_unref(&pkt->buf); pkt->data = NULL; pkt->size = 0; av_packet_free_side_data(pkt); } }
static void qsv_device_free(AVHWDeviceContext *ctx) { AVQSVDeviceContext *hwctx = ctx->hwctx; QSVDevicePriv *priv = ctx->user_opaque; if (hwctx->session) MFXClose(hwctx->session); av_buffer_unref(&priv->child_device_ctx); av_freep(&priv); }
static av_cold void uninit(AVFilterContext *ctx) { BufferSourceContext *s = ctx->priv; while (s->fifo && av_fifo_size(s->fifo)) { AVFrame *frame; av_fifo_generic_read(s->fifo, &frame, sizeof(frame), NULL); av_frame_free(&frame); } av_buffer_unref(&s->hw_frames_ctx); av_fifo_freep(&s->fifo); }
static void dxva2_uninit(AVCodecContext *s) { InputStream *ist = s->opaque; DXVA2Context *ctx = ist->hwaccel_ctx; ist->hwaccel_uninit = NULL; ist->hwaccel_get_buffer = NULL; ist->hwaccel_retrieve_data = NULL; if (ctx->decoder_service) IDirectXVideoDecoderService_Release(ctx->decoder_service); av_buffer_unref(&ctx->hw_frames_ctx); av_buffer_unref(&ctx->hw_device_ctx); av_frame_free(&ctx->tmp_frame); av_freep(&ist->hwaccel_ctx); av_freep(&s->hwaccel_context); }
FF_DISABLE_DEPRECATION_WARNINGS void av_free_packet(AVPacket *pkt) { if (pkt) { if (pkt->buf) av_buffer_unref(&pkt->buf); pkt->data = NULL; pkt->size = 0; av_packet_free_side_data(pkt); } }
int av_frame_set_qp_table(AVFrame *f, AVBufferRef *buf, int stride, int qp_type) { av_buffer_unref(&f->qp_table_buf); f->qp_table_buf = buf; f->qscale_table = buf->data; f->qstride = stride; f->qscale_type = qp_type; return 0; }
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags) { AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data; int ret; if (ctx->internal->source_frames) { // This is a derived frame context, so we allocate in the source // and map the frame immediately. AVFrame *src_frame; src_frame = av_frame_alloc(); if (!src_frame) return AVERROR(ENOMEM); ret = av_hwframe_get_buffer(ctx->internal->source_frames, src_frame, 0); if (ret < 0) return ret; ret = av_hwframe_map(frame, src_frame, 0); if (ret) { av_log(ctx, AV_LOG_ERROR, "Failed to map frame into derived " "frame context: %d.\n", ret); av_frame_free(&src_frame); return ret; } // Free the source frame immediately - the mapped frame still // contains a reference to it. av_frame_free(&src_frame); return 0; } if (!ctx->internal->hw_type->frames_get_buffer) return AVERROR(ENOSYS); if (!ctx->pool) return AVERROR(EINVAL); frame->hw_frames_ctx = av_buffer_ref(hwframe_ref); if (!frame->hw_frames_ctx) return AVERROR(ENOMEM); ret = ctx->internal->hw_type->frames_get_buffer(ctx, frame); if (ret < 0) { av_buffer_unref(&frame->hw_frames_ctx); return ret; } return 0; }
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void (*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv) { AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data; HWMapDescriptor *hwmap; int ret; hwmap = av_mallocz(sizeof(*hwmap)); if (!hwmap) { ret = AVERROR(ENOMEM); goto fail; } hwmap->source = av_frame_alloc(); if (!hwmap->source) { ret = AVERROR(ENOMEM); goto fail; } ret = av_frame_ref(hwmap->source, src); if (ret < 0) goto fail; hwmap->hw_frames_ctx = av_buffer_ref(hwframe_ref); if (!hwmap->hw_frames_ctx) { ret = AVERROR(ENOMEM); goto fail; } hwmap->unmap = unmap; hwmap->priv = priv; dst->buf[0] = av_buffer_create((uint8_t*)hwmap, sizeof(*hwmap), &ff_hwframe_unmap, ctx, 0); if (!dst->buf[0]) { ret = AVERROR(ENOMEM); goto fail; } return 0; fail: if (hwmap) { av_buffer_unref(&hwmap->hw_frames_ctx); av_frame_free(&hwmap->source); } av_free(hwmap); return ret; }
static int get_video_buffer(AVFrame *frame, int align) { const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format); int ret, i; if (!desc) return AVERROR(EINVAL); if ((ret = av_image_check_size(frame->width, frame->height, 0, NULL)) < 0) return ret; if (!frame->linesize[0]) { for(i=1; i<=align; i+=i) { ret = av_image_fill_linesizes(frame->linesize, frame->format, FFALIGN(frame->width, i)); if (ret < 0) return ret; if (!(frame->linesize[0] & (align-1))) break; } for (i = 0; i < 4 && frame->linesize[i]; i++) frame->linesize[i] = FFALIGN(frame->linesize[i], align); } for (i = 0; i < 4 && frame->linesize[i]; i++) { int h = FFALIGN(frame->height, 32); if (i == 1 || i == 2) h = FF_CEIL_RSHIFT(h, desc->log2_chroma_h); frame->buf[i] = av_buffer_alloc(frame->linesize[i] * h + 16 + 16/*STRIDE_ALIGN*/ - 1); if (!frame->buf[i]) goto fail; frame->data[i] = frame->buf[i]->data; } if (desc->flags & AV_PIX_FMT_FLAG_PAL || desc->flags & AV_PIX_FMT_FLAG_PSEUDOPAL) { av_buffer_unref(&frame->buf[1]); frame->buf[1] = av_buffer_alloc(1024); if (!frame->buf[1]) goto fail; frame->data[1] = frame->buf[1]->data; } frame->extended_data = frame->data; return 0; fail: av_frame_unref(frame); return AVERROR(ENOMEM); }
int ff_qsv_decode_close(QSVContext *q) { QSVFrame *cur = q->work_frames; if (q->session) MFXVideoDECODE_Close(q->session); while (q->async_fifo && av_fifo_size(q->async_fifo)) { QSVFrame *out_frame; mfxSyncPoint *sync; av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL); av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL); av_freep(&sync); } while (cur) { q->work_frames = cur->next; av_frame_free(&cur->frame); av_freep(&cur); cur = q->work_frames; } av_fifo_free(q->async_fifo); q->async_fifo = NULL; av_parser_close(q->parser); avcodec_free_context(&q->avctx_internal); if (q->internal_session) MFXClose(q->internal_session); av_buffer_unref(&q->frames_ctx.hw_frames_ctx); av_buffer_unref(&q->frames_ctx.mids_buf); return 0; }
static void ff_hwframe_unmap(void *opaque, uint8_t *data) { HWMapDescriptor *hwmap = (HWMapDescriptor*)data; AVHWFramesContext *ctx = opaque; if (hwmap->unmap) hwmap->unmap(ctx, hwmap); av_frame_free(&hwmap->source); av_buffer_unref(&hwmap->hw_frames_ctx); av_free(hwmap); }
static void input_callback(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) { AVCodecContext *avctx = (AVCodecContext*)port->userdata; MMALDecodeContext *ctx = avctx->priv_data; if (!buffer->cmd) { FFBufferEntry *entry = buffer->user_data; av_buffer_unref(&entry->ref); if (entry->flags & MMAL_BUFFER_HEADER_FLAG_FRAME_END) avpriv_atomic_int_add_and_fetch(&ctx->packets_buffered, -1); av_free(entry); } mmal_buffer_header_release(buffer); }
static void decode_data_free(void *opaque, uint8_t *data) { FrameDecodeData *fdd = (FrameDecodeData*)data; av_buffer_unref(&fdd->user_opaque_ref); if (fdd->post_process_opaque_free) fdd->post_process_opaque_free(fdd->post_process_opaque); if (fdd->hwaccel_priv_free) fdd->hwaccel_priv_free(fdd->hwaccel_priv); av_freep(&fdd); }
int av_frame_set_qp_table(AVFrame *f, AVBufferRef *buf, int stride, int qp_type) { av_buffer_unref(&f->qp_table_buf); f->qp_table_buf = buf; FF_DISABLE_DEPRECATION_WARNINGS f->qscale_table = buf->data; f->qstride = stride; f->qscale_type = qp_type; FF_ENABLE_DEPRECATION_WARNINGS return 0; }