static GstGLBuffer * _gl_buffer_copy (GstGLBuffer * src, gssize offset, gssize size) { GstAllocator *allocator = src->mem.mem.allocator; GstAllocationParams params = { 0, src->mem.mem.align, 0, 0 }; GstGLBuffer *dest = NULL; dest = _gl_buffer_new (allocator, NULL, src->mem.context, src->target, src->usage_hints, ¶ms, src->mem.mem.maxsize); /* If not doing a full copy, then copy to sysmem, the 2D represention of the * texture would become wrong */ if (GST_MEMORY_FLAG_IS_SET (src, GST_GL_BASE_MEMORY_TRANSFER_NEED_UPLOAD)) { if (!gst_gl_base_memory_memcpy (GST_GL_BASE_MEMORY_CAST (src), GST_GL_BASE_MEMORY_CAST (dest), offset, size)) { GST_CAT_WARNING (GST_CAT_GL_BUFFER, "Could not copy GL Buffer"); gst_memory_unref (GST_MEMORY_CAST (dest)); dest = NULL; } } else { if (!gst_gl_buffer_copy_buffer_sub_data (src, dest, offset, size)) { if (!gst_gl_base_memory_memcpy (GST_GL_BASE_MEMORY_CAST (src), GST_GL_BASE_MEMORY_CAST (dest), offset, size)) { GST_CAT_WARNING (GST_CAT_GL_BUFFER, "Could not copy GL Buffer"); gst_memory_unref (GST_MEMORY_CAST (dest)); dest = NULL; } } } return dest; }
/** * gst_memory_make_mapped: * @mem: (transfer full): a #GstMemory * @info: (out): pointer for info * @flags: mapping flags * * Create a #GstMemory object that is mapped with @flags. If @mem is mappable * with @flags, this function returns the mapped @mem directly. Otherwise a * mapped copy of @mem is returned. * * This function takes ownership of old @mem and returns a reference to a new * #GstMemory. * * Returns: (transfer full) (nullable): a #GstMemory object mapped * with @flags or %NULL when a mapping is not possible. */ GstMemory * gst_memory_make_mapped (GstMemory * mem, GstMapInfo * info, GstMapFlags flags) { GstMemory *result; if (gst_memory_map (mem, info, flags)) { result = mem; } else { result = gst_memory_copy (mem, 0, -1); gst_memory_unref (mem); if (result == NULL) goto cannot_copy; if (!gst_memory_map (result, info, flags)) goto cannot_map; } return result; /* ERRORS */ cannot_copy: { GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot copy memory %p", mem); return NULL; } cannot_map: { GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot map memory %p with flags %d", mem, flags); gst_memory_unref (result); return NULL; } }
static void gst_gl_upload_reset (GstGLUpload * upload) { guint i; if (upload->priv->tex_id) { gst_gl_context_del_texture (upload->context, &upload->priv->tex_id); upload->priv->tex_id = 0; } if (upload->convert) { gst_object_unref (upload->convert); upload->convert = NULL; } if (upload->out_tex) { gst_memory_unref ((GstMemory *) upload->out_tex); upload->out_tex = NULL; } for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) { if (upload->in_tex[i]) { gst_memory_unref ((GstMemory *) upload->in_tex[i]); upload->in_tex[i] = NULL; } } gst_gl_upload_release_buffer (upload); }
static GstMemory * _gl_mem_copy (GstGLMemoryPBO * src, gssize offset, gssize size) { GstAllocationParams params = { 0, GST_MEMORY_CAST (src)->align, 0, 0 }; GstGLBaseMemoryAllocator *base_mem_allocator; GstAllocator *allocator; GstMemory *dest = NULL; allocator = GST_MEMORY_CAST (src)->allocator; base_mem_allocator = (GstGLBaseMemoryAllocator *) allocator; if (src->mem.tex_target == GST_GL_TEXTURE_TARGET_EXTERNAL_OES) { GST_CAT_ERROR (GST_CAT_GL_MEMORY, "Cannot copy External OES textures"); return NULL; } /* If not doing a full copy, then copy to sysmem, the 2D represention of the * texture would become wrong */ if (offset > 0 || size < GST_MEMORY_CAST (src)->size) { return base_mem_allocator->fallback_mem_copy (GST_MEMORY_CAST (src), offset, size); } dest = (GstMemory *) _gl_mem_new (allocator, NULL, src->mem.mem.context, src->mem.tex_target, ¶ms, &src->mem.info, src->mem.plane, &src->mem.valign, NULL, NULL); if (GST_MEMORY_FLAG_IS_SET (src, GST_GL_BASE_MEMORY_TRANSFER_NEED_UPLOAD)) { if (!gst_gl_base_memory_memcpy ((GstGLBaseMemory *) src, (GstGLBaseMemory *) dest, offset, size)) { GST_CAT_WARNING (GST_CAT_GL_MEMORY, "Could not copy GL Memory"); gst_memory_unref (GST_MEMORY_CAST (dest)); return NULL; } } else { GstMapInfo dinfo; if (!gst_memory_map (GST_MEMORY_CAST (dest), &dinfo, GST_MAP_WRITE | GST_MAP_GL)) { GST_CAT_WARNING (GST_CAT_GL_MEMORY, "Failed not map destination " "for writing"); gst_memory_unref (GST_MEMORY_CAST (dest)); return NULL; } if (!gst_gl_memory_copy_into ((GstGLMemory *) src, ((GstGLMemory *) dest)->tex_id, src->mem.tex_target, src->mem.tex_type, src->mem.tex_width, GL_MEM_HEIGHT (src))) { GST_CAT_WARNING (GST_CAT_GL_MEMORY, "Could not copy GL Memory"); gst_memory_unmap (GST_MEMORY_CAST (dest), &dinfo); gst_memory_unref (GST_MEMORY_CAST (dest)); return NULL; } gst_memory_unmap (GST_MEMORY_CAST (dest), &dinfo); } return dest; }
static void deinit (gpointer data) { GstGLContext *context = data; GstGLFuncs *gl = context->gl_vtable; if (vao) gl->DeleteVertexArrays (1, &vao); gst_object_unref (fbo); gst_object_unref (shader); gst_memory_unref (GST_MEMORY_CAST (gl_tex)); gst_memory_unref (GST_MEMORY_CAST (gl_tex2)); }
static CMBlockBufferRef cm_block_buffer_from_gst_buffer (GstBuffer * buf, GstMapFlags flags) { OSStatus status; CMBlockBufferRef bbuf; CMBlockBufferCustomBlockSource blockSource; guint memcount, i; /* Initialize custom block source structure */ blockSource.version = kCMBlockBufferCustomBlockSourceVersion; blockSource.AllocateBlock = NULL; blockSource.FreeBlock = cm_block_buffer_freeblock; /* Determine number of memory blocks */ memcount = gst_buffer_n_memory (buf); status = CMBlockBufferCreateEmpty (NULL, memcount, 0, &bbuf); if (status != kCMBlockBufferNoErr) { GST_ERROR ("CMBlockBufferCreateEmpty returned %d", (int) status); return NULL; } /* Go over all GstMemory objects and add them to the CMBlockBuffer */ for (i = 0; i < memcount; ++i) { GstMemory *mem; GstMapInfo *info; mem = gst_buffer_get_memory (buf, i); info = g_slice_new (GstMapInfo); if (!gst_memory_map (mem, info, flags)) { GST_ERROR ("failed mapping memory"); g_slice_free (GstMapInfo, info); gst_memory_unref (mem); CFRelease (bbuf); return NULL; } blockSource.refCon = info; status = CMBlockBufferAppendMemoryBlock (bbuf, info->data, info->size, NULL, &blockSource, 0, info->size, 0); if (status != kCMBlockBufferNoErr) { GST_ERROR ("CMBlockBufferAppendMemoryBlock returned %d", (int) status); gst_memory_unmap (mem, info); g_slice_free (GstMapInfo, info); gst_memory_unref (mem); CFRelease (bbuf); return NULL; } } return bbuf; }
static GstVulkanMemory * _vk_mem_new (GstAllocator * allocator, GstMemory * parent, GstVulkanDevice * device, guint32 memory_type_index, GstAllocationParams * params, gsize size, VkMemoryPropertyFlags mem_props_flags, gpointer user_data, GDestroyNotify notify) { GstVulkanMemory *mem = g_new0 (GstVulkanMemory, 1); GError *error = NULL; VkResult err; _vk_mem_init (mem, allocator, parent, device, memory_type_index, params, size, mem_props_flags, user_data, notify); err = vkAllocateMemory (device->device, &mem->alloc_info, NULL, &mem->mem_ptr); if (gst_vulkan_error_to_g_error (err, &error, "vkAllocMemory") < 0) { GST_CAT_ERROR (GST_CAT_VULKAN_MEMORY, "Failed to allocate device memory %s", error->message); gst_memory_unref ((GstMemory *) mem); g_clear_error (&error); return NULL; } return mem; }
static void gst_imx_vpu_framebuffer_array_finalize(GObject *object) { guint i; GstImxVpuFramebufferArray *framebuffer_array = GST_IMX_VPU_FRAMEBUFFER_ARRAY(object); GST_DEBUG_OBJECT(object, "shutting down framebuffer array %p", (gpointer)object); if (framebuffer_array->framebuffers != NULL) { for (i = 0; i < framebuffer_array->num_framebuffers; ++i) { ImxVpuFramebuffer *framebuffer = &(framebuffer_array->framebuffers[i]); GstImxPhysMemory *memory = gst_imx_vpu_framebuffer_array_get_gst_phys_memory(framebuffer); GST_DEBUG_OBJECT(object, "freeing gstmemory block %p with physical address %" GST_IMX_PHYS_ADDR_FORMAT " and ref count %d", (gpointer)memory, memory->phys_addr, GST_MINI_OBJECT_REFCOUNT_VALUE(memory)); /* at this point, the memory's refcount is 1, so unref'ing will deallocate it */ gst_memory_unref((GstMemory *)memory); } g_slice_free1(sizeof(ImxVpuFramebuffer) * framebuffer_array->num_framebuffers, framebuffer_array->framebuffers); } if (framebuffer_array->allocator != NULL) gst_object_unref(GST_OBJECT(framebuffer_array->allocator)); G_OBJECT_CLASS(gst_imx_vpu_framebuffer_array_parent_class)->finalize(object); }
GstFlowReturn on_new_preroll(GstAppSink *appsink, gpointer user_data) { GstSample* sample = NULL; GstBuffer* buffer; GstMemory* memory; GstMapInfo info; GstClockTime clocktime; g_debug("on_new_preroll "); sample = gst_app_sink_pull_sample (appsink); if (sample) { g_debug("pulled sample\n"); buffer = gst_sample_get_buffer(sample); clocktime = GST_BUFFER_PTS(buffer); memory = gst_buffer_get_memory(buffer, 0); gst_memory_map(memory, &info, GST_MAP_READ); /* You can access raw memory at info.data */ if(app.output_callback) app.output_callback(info.data, info.size); //fwrite(info.data, 1, info.size, app.outfile); gst_memory_unmap(memory, &info); gst_memory_unref(memory); gst_sample_unref(sample); } return GST_FLOW_OK; }
static void user_write_data (png_structp png_ptr, png_bytep data, png_uint_32 length) { GstPngEnc *pngenc; GstMemory *mem; GstMapInfo minfo; pngenc = (GstPngEnc *) png_get_io_ptr (png_ptr); mem = gst_allocator_alloc (NULL, length, NULL); if (!mem) { GST_ERROR_OBJECT (pngenc, "Failed to allocate memory"); png_error (png_ptr, "Failed to allocate memory"); /* never reached */ return; } if (!gst_memory_map (mem, &minfo, GST_MAP_WRITE)) { GST_ERROR_OBJECT (pngenc, "Failed to map memory"); gst_memory_unref (mem); png_error (png_ptr, "Failed to map memory"); /* never reached */ return; } memcpy (minfo.data, data, length); gst_memory_unmap (mem, &minfo); gst_buffer_append_memory (pngenc->buffer_out, mem); }
static void gst_gl_color_convert_reset (GstGLColorConvert * convert) { guint i; if (convert->fbo || convert->depth_buffer) { gst_gl_context_del_fbo (convert->context, convert->fbo, convert->depth_buffer); convert->fbo = 0; convert->depth_buffer = 0; } for (i = 0; i < convert->priv->convert_info.out_n_textures; i++) { if (convert->priv->out_tex[i]) gst_memory_unref ((GstMemory *) convert->priv->out_tex[i]); convert->priv->out_tex[i] = NULL; } convert->priv->convert_info.chroma_sampling[0] = 1.0f; convert->priv->convert_info.chroma_sampling[1] = 1.0f; if (convert->shader) { gst_object_unref (convert->shader); convert->shader = NULL; } }
/* * Chain list function for testing buffer lists */ static GstFlowReturn rtp_pipeline_chain_list (GstPad * pad, GstObject * parent, GstBufferList * list) { guint i, len; fail_if (!list); /* * Count the size of the payload in the buffer list. */ len = gst_buffer_list_length (list); /* Loop through all groups */ for (i = 0; i < len; i++) { GstBuffer *paybuf; GstMemory *mem; gint size; paybuf = gst_buffer_list_get (list, i); /* only count real data which is expected in last memory block */ fail_unless (gst_buffer_n_memory (paybuf) > 1); mem = gst_buffer_get_memory_range (paybuf, gst_buffer_n_memory (paybuf) - 1, 1); size = gst_memory_get_sizes (mem, NULL, NULL); gst_memory_unref (mem); chain_list_bytes_received += size; } gst_buffer_list_unref (list); return GST_FLOW_OK; }
void gst_v4l2_allocator_flush (GstV4l2Allocator * allocator) { gint i; GST_OBJECT_LOCK (allocator); if (!g_atomic_int_get (&allocator->active)) goto done; for (i = 0; i < allocator->count; i++) { GstV4l2MemoryGroup *group = allocator->groups[i]; gint n; if (IS_QUEUED (group->buffer)) { UNSET_QUEUED (group->buffer); gst_v4l2_allocator_reset_group (allocator, group); for (n = 0; n < group->n_mem; n++) gst_memory_unref (group->mem[n]); } } done: GST_OBJECT_UNLOCK (allocator); }
static void flush_internal (APP_STATE_T * state) { if (state->current_mem) { gst_memory_unref (state->current_mem); } state->current_mem = NULL; }
static GstMemory * gst_droid_codec_gralloc_allocator_alloc (GstAllocator * allocator, gsize size, GstAllocationParams * params) { GstMemory *gralloc; OMX_ERRORTYPE err; GstDroidCodecGrallocMemory *mem; struct ANativeWindowBuffer *native; GstDroidCodecGrallocAllocator *alloc = GST_GRALLOC_ALLOCATOR (allocator); OMX_BUFFERHEADERTYPE *omx_buf = NULL; if (size != alloc->port->def.nBufferSize) { GST_ERROR_OBJECT (alloc->port->comp->parent, "invalid size passed %i vs requested %li", size, alloc->port->def.nBufferSize); return NULL; } gralloc = gst_gralloc_allocator_alloc (alloc->gralloc, alloc->port->def.format.video.nFrameWidth, alloc->port->def.format.video.nFrameHeight, alloc->port->def.format.video.eColorFormat, alloc->port->usage); if (!gralloc) { GST_ERROR_OBJECT (alloc->port->comp->parent, "error allocating gralloc memory"); return NULL; } mem = g_slice_new0 (GstDroidCodecGrallocMemory); native = gst_memory_get_native_buffer (gralloc); err = OMX_UseBuffer (alloc->port->comp->omx, &omx_buf, alloc->port->def.nPortIndex, mem, alloc->port->def.nBufferSize, (OMX_U8 *) native->handle); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (alloc->port->comp->parent, "Failed to use buffer for port %li: %s (0x%08x)", alloc->port->def.nPortIndex, gst_omx_error_to_string (err), err); gst_memory_unref (gralloc); g_slice_free (GstDroidCodecGrallocMemory, mem); return NULL; } mem->gralloc = gralloc; mem->omx_buf = omx_buf; gst_memory_init (GST_MEMORY_CAST (mem), GST_MEMORY_FLAG_NO_SHARE, allocator, NULL, omx_buf->nAllocLen, alloc->port->def.nBufferAlignment, 0, omx_buf->nAllocLen); GST_DEBUG_OBJECT (alloc->port->comp->parent, "Allocated buffer for port %li", alloc->port->def.nPortIndex); return GST_MEMORY_CAST (mem); }
/* Custom FreeBlock function for CMBlockBuffer */ static void cm_block_buffer_freeblock (void *refCon, void *doomedMemoryBlock, size_t sizeInBytes) { GstMapInfo *info = (GstMapInfo *) refCon; gst_memory_unmap (info->memory, info); gst_memory_unref (info->memory); g_slice_free (GstMapInfo, info); }
static void _gl_mem_destroy (GstGLMemoryPBO * gl_mem) { if (gl_mem->pbo) gst_memory_unref (GST_MEMORY_CAST (gl_mem->pbo)); gl_mem->pbo = NULL; GST_GL_BASE_MEMORY_ALLOCATOR_CLASS (parent_class)->destroy ((GstGLBaseMemory *) gl_mem); }
static void _ios_gl_memory_destroy (GstGLBaseMemory * gl_mem) { GstIOSGLMemory *mem = (GstIOSGLMemory *) gl_mem; mem->gl_notify (mem->gl_data); gst_memory_unref (GST_MEMORY_CAST (mem->cv_mem)); GST_GL_BASE_MEMORY_ALLOCATOR_CLASS (gst_ios_gl_memory_allocator_parent_class)->destroy (gl_mem); }
static void _gst_memory_free (GstMemory * mem) { GST_CAT_DEBUG (GST_CAT_MEMORY, "free memory %p", mem); if (mem->parent) { gst_memory_unlock (mem->parent, GST_LOCK_FLAG_EXCLUSIVE); gst_memory_unref (mem->parent); } gst_allocator_free (mem->allocator, mem); }
static void gst_gl_download_reset (GstGLDownload * download) { guint i; for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) { if (download->priv->in_tex[i]) { gst_memory_unref ((GstMemory *) download->priv->in_tex[i]); download->priv->in_tex[i] = NULL; } } }
gboolean gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group) { GstV4l2Object *obj = allocator->obj; gboolean ret = TRUE; gint i; g_return_val_if_fail (g_atomic_int_get (&allocator->active), FALSE); /* update sizes */ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) { for (i = 0; i < group->n_mem; i++) group->planes[i].bytesused = gst_memory_get_sizes (group->mem[i], NULL, NULL); } else { group->buffer.bytesused = gst_memory_get_sizes (group->mem[0], NULL, NULL); } /* Ensure the memory will stay around and is RO */ for (i = 0; i < group->n_mem; i++) gst_memory_ref (group->mem[i]); if (obj->ioctl (obj->video_fd, VIDIOC_QBUF, &group->buffer) < 0) { GST_ERROR_OBJECT (allocator, "failed queueing buffer %i: %s", group->buffer.index, g_strerror (errno)); /* Release the memory, possibly making it RW again */ for (i = 0; i < group->n_mem; i++) gst_memory_unref (group->mem[i]); ret = FALSE; if (IS_QUEUED (group->buffer)) { GST_DEBUG_OBJECT (allocator, "driver pretends buffer is queued even if queue failed"); UNSET_QUEUED (group->buffer); } goto done; } GST_LOG_OBJECT (allocator, "queued buffer %i (flags 0x%X)", group->buffer.index, group->buffer.flags); if (!IS_QUEUED (group->buffer)) { GST_DEBUG_OBJECT (allocator, "driver pretends buffer is not queued even if queue succeeded"); SET_QUEUED (group->buffer); } done: return ret; }
static void gst_vulkan_swapper_finalize (GObject * object) { GstVulkanSwapper *swapper = GST_VULKAN_SWAPPER (object); int i; if (swapper->swap_chain_images) { for (i = 0; i < swapper->n_swap_chain_images; i++) { gst_memory_unref ((GstMemory *) swapper->swap_chain_images[i]); swapper->swap_chain_images[i] = NULL; } g_free (swapper->swap_chain_images); } swapper->swap_chain_images = NULL; if (swapper->swap_chain) swapper->DestroySwapchainKHR (swapper->device->device, swapper->swap_chain, NULL); swapper->swap_chain = VK_NULL_HANDLE; if (swapper->queue) gst_object_unref (swapper->queue); swapper->queue = NULL; if (swapper->device) gst_object_unref (swapper->device); swapper->device = NULL; g_signal_handler_disconnect (swapper->window, swapper->draw_id); swapper->draw_id = 0; g_signal_handler_disconnect (swapper->window, swapper->close_id); swapper->close_id = 0; if (swapper->window) gst_object_unref (swapper->window); swapper->window = NULL; g_free (swapper->surf_present_modes); swapper->surf_present_modes = NULL; g_free (swapper->surf_formats); swapper->surf_formats = NULL; gst_buffer_replace (&swapper->current_buffer, NULL); gst_caps_replace (&swapper->caps, NULL); g_mutex_clear (&swapper->priv->render_lock); G_OBJECT_CLASS (parent_class)->finalize (object); }
static void gst_v4l2_memory_group_free (GstV4l2MemoryGroup * group) { gint i; for (i = 0; i < group->n_mem; i++) { GstMemory *mem = group->mem[i]; group->mem[i] = NULL; if (mem) gst_memory_unref (mem); } g_slice_free (GstV4l2MemoryGroup, group); }
static void gst_udpsrc_reset_memory_allocator (GstUDPSrc * src) { if (src->mem != NULL) { gst_memory_unmap (src->mem, &src->map); gst_memory_unref (src->mem); src->mem = NULL; } if (src->mem_max != NULL) { gst_memory_unmap (src->mem_max, &src->map_max); gst_memory_unref (src->mem_max); src->mem_max = NULL; } src->vec[0].buffer = NULL; src->vec[0].size = 0; src->vec[1].buffer = NULL; src->vec[1].size = 0; if (src->allocator != NULL) { gst_object_unref (src->allocator); src->allocator = NULL; } }
static void _cleanup_failed_alloc (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group) { if (group->mems_allocated > 0) { gint i; /* If one or more mmap worked, we need to unref the memory, otherwise * they will keep a ref on the allocator and leak it. This will put back * the group into the free_queue */ for (i = 0; i < group->n_mem; i++) gst_memory_unref (group->mem[i]); } else { /* Otherwise, group has to be on free queue for _stop() to work */ gst_atomic_queue_push (allocator->free_queue, group); } }
static gboolean gst_udpsrc_alloc_mem (GstUDPSrc * src, GstMemory ** p_mem, GstMapInfo * map, gsize size) { GstMemory *mem; mem = gst_allocator_alloc (src->allocator, size, &src->params); if (!gst_memory_map (mem, map, GST_MAP_WRITE)) { gst_memory_unref (mem); memset (map, 0, sizeof (GstMapInfo)); return FALSE; } *p_mem = mem; return TRUE; }
static void gst_gl_composition_overlay_finalize (GObject * object) { GstGLCompositionOverlay *overlay; overlay = GST_GL_COMPOSITION_OVERLAY (object); if (overlay->gl_memory) gst_memory_unref ((GstMemory *) overlay->gl_memory); if (overlay->context) { gst_gl_context_thread_add (overlay->context, gst_gl_composition_overlay_free_vertex_buffer, overlay); gst_object_unref (overlay->context); } G_OBJECT_CLASS (gst_gl_composition_overlay_parent_class)->finalize (object); }
static void update_image (APP_STATE_T * state, GstBuffer * buffer) { GstMemory *mem = gst_buffer_peek_memory (buffer, 0); if (state->current_mem) { gst_memory_unref (state->current_mem); } state->current_mem = gst_memory_ref (mem); TRACE_VC_MEMORY_ONCE_FOR_ID ("before glEGLImageTargetTexture2DOES", gid0); glBindTexture (GL_TEXTURE_2D, state->tex); glEGLImageTargetTexture2DOES (GL_TEXTURE_2D, gst_egl_image_memory_get_image (mem)); TRACE_VC_MEMORY_ONCE_FOR_ID ("after glEGLImageTargetTexture2DOES", gid1); }
static gboolean _swapchain_resize (GstVulkanSwapper * swapper, GError ** error) { int i; if (!swapper->queue) { if (!_vulkan_swapper_retrieve_surface_properties (swapper, error)) { return FALSE; } } if (swapper->swap_chain_images) { for (i = 0; i < swapper->n_swap_chain_images; i++) { if (swapper->swap_chain_images[i]) gst_memory_unref ((GstMemory *) swapper->swap_chain_images[i]); } g_free (swapper->swap_chain_images); } return _allocate_swapchain (swapper, swapper->caps, error); }
static void gst_droid_codec_gralloc_allocator_free (GstAllocator * allocator, GstMemory * mem) { GstDroidCodecGrallocMemory *omx_mem; OMX_ERRORTYPE err; GstDroidCodecGrallocAllocator *alloc = GST_GRALLOC_ALLOCATOR (allocator); omx_mem = (GstDroidCodecGrallocMemory *) mem; err = OMX_FreeBuffer (alloc->port->comp->omx, alloc->port->def.nPortIndex, omx_mem->omx_buf); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (alloc->port->comp->parent, "Failed to free buffer for port %li: %s (0x%08x)", alloc->port->def.nPortIndex, gst_omx_error_to_string (err), err); } gst_memory_unref (omx_mem->gralloc); g_slice_free (GstDroidCodecGrallocMemory, omx_mem); }