static void nv50_context_unreference_resources(struct nv50_context *nv50) { unsigned s, i; nouveau_bufctx_del(&nv50->bufctx_3d); nouveau_bufctx_del(&nv50->bufctx); nouveau_bufctx_del(&nv50->bufctx_cp); util_unreference_framebuffer_state(&nv50->framebuffer); assert(nv50->num_vtxbufs <= PIPE_MAX_ATTRIBS); for (i = 0; i < nv50->num_vtxbufs; ++i) pipe_resource_reference(&nv50->vtxbuf[i].buffer, NULL); pipe_resource_reference(&nv50->idxbuf.buffer, NULL); for (s = 0; s < 3; ++s) { assert(nv50->num_textures[s] <= PIPE_MAX_SAMPLERS); for (i = 0; i < nv50->num_textures[s]; ++i) pipe_sampler_view_reference(&nv50->textures[s][i], NULL); for (i = 0; i < NV50_MAX_PIPE_CONSTBUFS; ++i) if (!nv50->constbuf[s][i].user) pipe_resource_reference(&nv50->constbuf[s][i].u.buf, NULL); } for (i = 0; i < nv50->global_residents.size / sizeof(struct pipe_resource *); ++i) { struct pipe_resource **res = util_dynarray_element( &nv50->global_residents, struct pipe_resource *, i); pipe_resource_reference(res, NULL); } util_dynarray_fini(&nv50->global_residents); }
VAStatus vlVaDestroySurfaces(VADriverContextP ctx, VASurfaceID *surface_list, int num_surfaces) { vlVaDriver *drv; int i; if (!ctx) return VA_STATUS_ERROR_INVALID_CONTEXT; drv = VL_VA_DRIVER(ctx); pipe_mutex_lock(drv->mutex); for (i = 0; i < num_surfaces; ++i) { vlVaSurface *surf = handle_table_get(drv->htab, surface_list[i]); if (!surf) { pipe_mutex_unlock(drv->mutex); return VA_STATUS_ERROR_INVALID_SURFACE; } if (surf->buffer) surf->buffer->destroy(surf->buffer); util_dynarray_fini(&surf->subpics); FREE(surf); handle_table_remove(drv->htab, surface_list[i]); } pipe_mutex_unlock(drv->mutex); return VA_STATUS_SUCCESS; }
static void nv30_vertprog_destroy(struct nv30_vertprog *vp) { util_dynarray_fini(&vp->branch_relocs); nouveau_heap_free(&vp->exec); FREE(vp->insns); vp->insns = NULL; vp->nr_insns = 0; util_dynarray_fini(&vp->const_relocs); nouveau_heap_free(&vp->data); FREE(vp->consts); vp->consts = NULL; vp->nr_consts = 0; vp->translated = FALSE; }
static void nvc0_context_unreference_resources(struct nvc0_context *nvc0) { unsigned s, i; nouveau_bufctx_del(&nvc0->bufctx_3d); nouveau_bufctx_del(&nvc0->bufctx); nouveau_bufctx_del(&nvc0->bufctx_cp); util_unreference_framebuffer_state(&nvc0->framebuffer); for (i = 0; i < nvc0->num_vtxbufs; ++i) pipe_resource_reference(&nvc0->vtxbuf[i].buffer, NULL); pipe_resource_reference(&nvc0->idxbuf.buffer, NULL); for (s = 0; s < 6; ++s) { for (i = 0; i < nvc0->num_textures[s]; ++i) pipe_sampler_view_reference(&nvc0->textures[s][i], NULL); for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) if (!nvc0->constbuf[s][i].user) pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, NULL); for (i = 0; i < NVC0_MAX_BUFFERS; ++i) pipe_resource_reference(&nvc0->buffers[s][i].buffer, NULL); for (i = 0; i < NVC0_MAX_IMAGES; ++i) { pipe_resource_reference(&nvc0->images[s][i].resource, NULL); if (nvc0->screen->base.class_3d >= GM107_3D_CLASS) pipe_sampler_view_reference(&nvc0->images_tic[s][i], NULL); } } for (s = 0; s < 2; ++s) { for (i = 0; i < NVC0_MAX_SURFACE_SLOTS; ++i) pipe_surface_reference(&nvc0->surfaces[s][i], NULL); } for (i = 0; i < nvc0->num_tfbbufs; ++i) pipe_so_target_reference(&nvc0->tfbbuf[i], NULL); for (i = 0; i < nvc0->global_residents.size / sizeof(struct pipe_resource *); ++i) { struct pipe_resource **res = util_dynarray_element( &nvc0->global_residents, struct pipe_resource *, i); pipe_resource_reference(res, NULL); } util_dynarray_fini(&nvc0->global_residents); if (nvc0->tcp_empty) nvc0->base.pipe.delete_tcs_state(&nvc0->base.pipe, nvc0->tcp_empty); }
void ilo_state_vector_cleanup(struct ilo_state_vector *vec) { unsigned i, sh; for (i = 0; i < Elements(vec->vb.states); i++) { if (vec->vb.enabled_mask & (1 << i)) pipe_resource_reference(&vec->vb.states[i].buffer, NULL); } pipe_resource_reference(&vec->ib.buffer, NULL); pipe_resource_reference(&vec->ib.hw_resource, NULL); for (i = 0; i < vec->so.count; i++) pipe_so_target_reference(&vec->so.states[i], NULL); for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) { for (i = 0; i < vec->view[sh].count; i++) { struct pipe_sampler_view *view = vec->view[sh].states[i]; pipe_sampler_view_reference(&view, NULL); } for (i = 0; i < Elements(vec->cbuf[sh].cso); i++) { struct ilo_cbuf_cso *cbuf = &vec->cbuf[sh].cso[i]; pipe_resource_reference(&cbuf->resource, NULL); } } for (i = 0; i < vec->resource.count; i++) pipe_surface_reference(&vec->resource.states[i], NULL); for (i = 0; i < vec->fb.state.nr_cbufs; i++) pipe_surface_reference(&vec->fb.state.cbufs[i], NULL); if (vec->fb.state.zsbuf) pipe_surface_reference(&vec->fb.state.zsbuf, NULL); for (i = 0; i < vec->cs_resource.count; i++) pipe_surface_reference(&vec->cs_resource.states[i], NULL); for (i = 0; i < vec->global_binding.count; i++) { struct ilo_global_binding_cso *cso = util_dynarray_element(&vec->global_binding.bindings, struct ilo_global_binding_cso, i); pipe_resource_reference(&cso->resource, NULL); } util_dynarray_fini(&vec->global_binding.bindings); }
static void fd3_context_destroy(struct pipe_context *pctx) { struct fd3_context *fd3_ctx = fd3_context(fd_context(pctx)); util_dynarray_fini(&fd3_ctx->rbrc_patches); fd_bo_del(fd3_ctx->vs_pvt_mem); fd_bo_del(fd3_ctx->fs_pvt_mem); fd_bo_del(fd3_ctx->vsc_size_mem); pipe_resource_reference(&fd3_ctx->solid_vbuf, NULL); pipe_resource_reference(&fd3_ctx->blit_texcoord_vbuf, NULL); fd_context_destroy(pctx); }
static void nvc0_context_unreference_resources(struct nvc0_context *nvc0) { unsigned s, i; nouveau_bufctx_del(&nvc0->bufctx_3d); nouveau_bufctx_del(&nvc0->bufctx); nouveau_bufctx_del(&nvc0->bufctx_cp); util_unreference_framebuffer_state(&nvc0->framebuffer); for (i = 0; i < nvc0->num_vtxbufs; ++i) pipe_resource_reference(&nvc0->vtxbuf[i].buffer, NULL); pipe_resource_reference(&nvc0->idxbuf.buffer, NULL); for (s = 0; s < 6; ++s) { for (i = 0; i < nvc0->num_textures[s]; ++i) pipe_sampler_view_reference(&nvc0->textures[s][i], NULL); for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) if (!nvc0->constbuf[s][i].user) pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, NULL); } for (s = 0; s < 2; ++s) { for (i = 0; i < NVC0_MAX_SURFACE_SLOTS; ++i) pipe_surface_reference(&nvc0->surfaces[s][i], NULL); } for (i = 0; i < nvc0->num_tfbbufs; ++i) pipe_so_target_reference(&nvc0->tfbbuf[i], NULL); for (i = 0; i < nvc0->global_residents.size / sizeof(struct pipe_resource *); ++i) { struct pipe_resource **res = util_dynarray_element( &nvc0->global_residents, struct pipe_resource *, i); pipe_resource_reference(res, NULL); } util_dynarray_fini(&nvc0->global_residents); }
static void fd4_context_destroy(struct pipe_context *pctx) { struct fd4_context *fd4_ctx = fd4_context(fd_context(pctx)); util_dynarray_fini(&fd4_ctx->rbrc_patches); fd_bo_del(fd4_ctx->vs_pvt_mem); fd_bo_del(fd4_ctx->fs_pvt_mem); fd_bo_del(fd4_ctx->vsc_size_mem); pctx->delete_vertex_elements_state(pctx, fd4_ctx->solid_vbuf_state.vtx); pctx->delete_vertex_elements_state(pctx, fd4_ctx->blit_vbuf_state.vtx); pipe_resource_reference(&fd4_ctx->solid_vbuf, NULL); pipe_resource_reference(&fd4_ctx->blit_texcoord_vbuf, NULL); u_upload_destroy(fd4_ctx->border_color_uploader); fd_context_destroy(pctx); }
void fd_context_destroy(struct pipe_context *pctx) { struct fd_context *ctx = fd_context(pctx); unsigned i; DBG(""); fd_prog_fini(pctx); fd_hw_query_fini(pctx); util_dynarray_fini(&ctx->draw_patches); if (ctx->blitter) util_blitter_destroy(ctx->blitter); if (ctx->primconvert) util_primconvert_destroy(ctx->primconvert); util_slab_destroy(&ctx->transfer_pool); fd_ringmarker_del(ctx->draw_start); fd_ringmarker_del(ctx->draw_end); fd_ringmarker_del(ctx->binning_start); fd_ringmarker_del(ctx->binning_end); for (i = 0; i < ARRAY_SIZE(ctx->rings); i++) fd_ringbuffer_del(ctx->rings[i]); for (i = 0; i < ARRAY_SIZE(ctx->pipe); i++) { struct fd_vsc_pipe *pipe = &ctx->pipe[i]; if (!pipe->bo) break; fd_bo_del(pipe->bo); } fd_device_del(ctx->dev); FREE(ctx); }
VAStatus vlVaDestroySurfaces(VADriverContextP ctx, VASurfaceID *surface_list, int num_surfaces) { vlVaDriver *drv; int i; if (!ctx) return VA_STATUS_ERROR_INVALID_CONTEXT; drv = VL_VA_DRIVER(ctx); for (i = 0; i < num_surfaces; ++i) { vlVaSurface *surf = handle_table_get(drv->htab, surface_list[i]); if (surf->buffer) surf->buffer->destroy(surf->buffer); if(surf->fence) drv->pipe->screen->fence_reference(drv->pipe->screen, &surf->fence, NULL); util_dynarray_fini(&surf->subpics); FREE(surf); handle_table_remove(drv->htab, surface_list[i]); } return VA_STATUS_SUCCESS; }
/* * pipe_context */ static void si_destroy_context(struct pipe_context *context) { struct si_context *sctx = (struct si_context *)context; int i; util_queue_finish(&sctx->screen->shader_compiler_queue); util_queue_finish(&sctx->screen->shader_compiler_queue_low_priority); /* Unreference the framebuffer normally to disable related logic * properly. */ struct pipe_framebuffer_state fb = {}; if (context->set_framebuffer_state) context->set_framebuffer_state(context, &fb); si_release_all_descriptors(sctx); pipe_resource_reference(&sctx->esgs_ring, NULL); pipe_resource_reference(&sctx->gsvs_ring, NULL); pipe_resource_reference(&sctx->tess_rings, NULL); pipe_resource_reference(&sctx->null_const_buf.buffer, NULL); pipe_resource_reference(&sctx->sample_pos_buffer, NULL); si_resource_reference(&sctx->border_color_buffer, NULL); free(sctx->border_color_table); si_resource_reference(&sctx->scratch_buffer, NULL); si_resource_reference(&sctx->compute_scratch_buffer, NULL); si_resource_reference(&sctx->wait_mem_scratch, NULL); si_pm4_free_state(sctx, sctx->init_config, ~0); if (sctx->init_config_gs_rings) si_pm4_free_state(sctx, sctx->init_config_gs_rings, ~0); for (i = 0; i < ARRAY_SIZE(sctx->vgt_shader_config); i++) si_pm4_delete_state(sctx, vgt_shader_config, sctx->vgt_shader_config[i]); if (sctx->fixed_func_tcs_shader.cso) sctx->b.delete_tcs_state(&sctx->b, sctx->fixed_func_tcs_shader.cso); if (sctx->custom_dsa_flush) sctx->b.delete_depth_stencil_alpha_state(&sctx->b, sctx->custom_dsa_flush); if (sctx->custom_blend_resolve) sctx->b.delete_blend_state(&sctx->b, sctx->custom_blend_resolve); if (sctx->custom_blend_fmask_decompress) sctx->b.delete_blend_state(&sctx->b, sctx->custom_blend_fmask_decompress); if (sctx->custom_blend_eliminate_fastclear) sctx->b.delete_blend_state(&sctx->b, sctx->custom_blend_eliminate_fastclear); if (sctx->custom_blend_dcc_decompress) sctx->b.delete_blend_state(&sctx->b, sctx->custom_blend_dcc_decompress); if (sctx->vs_blit_pos) sctx->b.delete_vs_state(&sctx->b, sctx->vs_blit_pos); if (sctx->vs_blit_pos_layered) sctx->b.delete_vs_state(&sctx->b, sctx->vs_blit_pos_layered); if (sctx->vs_blit_color) sctx->b.delete_vs_state(&sctx->b, sctx->vs_blit_color); if (sctx->vs_blit_color_layered) sctx->b.delete_vs_state(&sctx->b, sctx->vs_blit_color_layered); if (sctx->vs_blit_texcoord) sctx->b.delete_vs_state(&sctx->b, sctx->vs_blit_texcoord); if (sctx->cs_clear_buffer) sctx->b.delete_compute_state(&sctx->b, sctx->cs_clear_buffer); if (sctx->cs_copy_buffer) sctx->b.delete_compute_state(&sctx->b, sctx->cs_copy_buffer); if (sctx->cs_copy_image) sctx->b.delete_compute_state(&sctx->b, sctx->cs_copy_image); if (sctx->cs_copy_image_1d_array) sctx->b.delete_compute_state(&sctx->b, sctx->cs_copy_image_1d_array); if (sctx->cs_clear_render_target) sctx->b.delete_compute_state(&sctx->b, sctx->cs_clear_render_target); if (sctx->cs_clear_render_target_1d_array) sctx->b.delete_compute_state(&sctx->b, sctx->cs_clear_render_target_1d_array); if (sctx->cs_dcc_retile) sctx->b.delete_compute_state(&sctx->b, sctx->cs_dcc_retile); if (sctx->blitter) util_blitter_destroy(sctx->blitter); /* Release DCC stats. */ for (int i = 0; i < ARRAY_SIZE(sctx->dcc_stats); i++) { assert(!sctx->dcc_stats[i].query_active); for (int j = 0; j < ARRAY_SIZE(sctx->dcc_stats[i].ps_stats); j++) if (sctx->dcc_stats[i].ps_stats[j]) sctx->b.destroy_query(&sctx->b, sctx->dcc_stats[i].ps_stats[j]); si_texture_reference(&sctx->dcc_stats[i].tex, NULL); } if (sctx->query_result_shader) sctx->b.delete_compute_state(&sctx->b, sctx->query_result_shader); if (sctx->gfx_cs) sctx->ws->cs_destroy(sctx->gfx_cs); if (sctx->dma_cs) sctx->ws->cs_destroy(sctx->dma_cs); if (sctx->ctx) sctx->ws->ctx_destroy(sctx->ctx); if (sctx->b.stream_uploader) u_upload_destroy(sctx->b.stream_uploader); if (sctx->b.const_uploader) u_upload_destroy(sctx->b.const_uploader); if (sctx->cached_gtt_allocator) u_upload_destroy(sctx->cached_gtt_allocator); slab_destroy_child(&sctx->pool_transfers); slab_destroy_child(&sctx->pool_transfers_unsync); if (sctx->allocator_zeroed_memory) u_suballocator_destroy(sctx->allocator_zeroed_memory); sctx->ws->fence_reference(&sctx->last_gfx_fence, NULL); sctx->ws->fence_reference(&sctx->last_sdma_fence, NULL); si_resource_reference(&sctx->eop_bug_scratch, NULL); si_destroy_compiler(&sctx->compiler); si_saved_cs_reference(&sctx->current_saved_cs, NULL); _mesa_hash_table_destroy(sctx->tex_handles, NULL); _mesa_hash_table_destroy(sctx->img_handles, NULL); util_dynarray_fini(&sctx->resident_tex_handles); util_dynarray_fini(&sctx->resident_img_handles); util_dynarray_fini(&sctx->resident_tex_needs_color_decompress); util_dynarray_fini(&sctx->resident_img_needs_color_decompress); util_dynarray_fini(&sctx->resident_tex_needs_depth_decompress); si_unref_sdma_uploads(sctx); FREE(sctx); }