void model_unloadocc(struct gfx_model_occ* occ, struct allocator* alloc) { if (occ->indexes != NULL) A_FREE(alloc, occ->indexes); if (occ->poss != NULL) A_ALIGNED_FREE(alloc, occ->poss); }
void mem_stack_destroy(struct stack_alloc* stack) { ASSERT(stack != NULL); if (stack->buffer != NULL) { A_ALIGNED_FREE(stack->alloc, stack->buffer); } }
void arr_destroy(struct array* arr) { ASSERT(arr != NULL); if (arr->buffer != NULL) { ASSERT(arr->alloc != NULL); A_ALIGNED_FREE(arr->alloc, arr->buffer); } }
void gfx_model_unload(struct gfx_model* model) { if (model->geos != NULL) { for (uint i = 0; i < model->geo_cnt; i++) model_unloadgeo(&model->geos[i]); } A_ALIGNED_FREE(model->alloc, model); }
void gfx_model_destroyinstance(struct gfx_model_instance* inst) { struct allocator* alloc = inst->alloc; ASSERT(inst->model != INVALID_HANDLE); struct gfx_model* m = rs_get_model(inst->model); if (inst->mtls != NULL) { /* release cblocks and textures for materials */ for (uint i = 0; i < m->mtl_cnt; i++) { struct gfx_model_mtlgpu* gmtl = inst->mtls[i]; if (gmtl != NULL) model_destroy_gpumtl(alloc, gmtl); } } A_ALIGNED_FREE(alloc, inst); }