/* This method is called by the need-data signal callback, we feed data into the * appsrc with the requested size. */ static void feed_data (GstElement * appsrc, guint size, App * app) { GstBuffer *buffer; GstFlowReturn ret; if (app->offset >= app->length) { /* we are EOS, send end-of-stream */ g_signal_emit_by_name (app->appsrc, "end-of-stream", &ret); return; } /* read the amount of data, we are allowed to return less if we are EOS */ buffer = gst_buffer_new (); if (app->offset + size > app->length) size = app->length - app->offset; gst_buffer_append_memory (buffer, gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, app->data, app->length, app->offset, size, NULL, NULL)); /* we need to set an offset for random access */ GST_BUFFER_OFFSET (buffer) = app->offset; GST_BUFFER_OFFSET_END (buffer) = app->offset + size; GST_DEBUG ("feed buffer %p, offset %" G_GUINT64_FORMAT "-%u", buffer, app->offset, size); g_signal_emit_by_name (app->appsrc, "push-buffer", buffer, &ret); gst_buffer_unref (buffer); app->offset += size; return; }
static GstFlowReturn gst_wayland_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstWaylandBufferPool *self = GST_WAYLAND_BUFFER_POOL_CAST (pool); gint width, height, stride; gsize size; enum wl_shm_format format; gint offset; void *data; GstWlMeta *meta; width = GST_VIDEO_INFO_WIDTH (&self->info); height = GST_VIDEO_INFO_HEIGHT (&self->info); stride = GST_VIDEO_INFO_PLANE_STRIDE (&self->info, 0); size = GST_VIDEO_INFO_SIZE (&self->info); format = gst_video_format_to_wayland_format (GST_VIDEO_INFO_FORMAT (&self->info)); GST_DEBUG_OBJECT (self, "Allocating buffer of size %" G_GSSIZE_FORMAT " (%d x %d, stride %d), format %s", size, width, height, stride, gst_wayland_format_to_string (format)); /* try to reserve another memory block from the shm pool */ if (self->used + size > self->size) goto no_buffer; offset = self->used; self->used += size; data = ((gchar *) self->data) + offset; /* create buffer and its metadata object */ *buffer = gst_buffer_new (); meta = (GstWlMeta *) gst_buffer_add_meta (*buffer, GST_WL_META_INFO, NULL); meta->pool = self; meta->wbuffer = wl_shm_pool_create_buffer (self->wl_pool, offset, width, height, stride, format); meta->used_by_compositor = FALSE; /* configure listening to wl_buffer.release */ g_mutex_lock (&self->buffers_map_mutex); g_hash_table_insert (self->buffers_map, meta->wbuffer, *buffer); g_mutex_unlock (&self->buffers_map_mutex); wl_buffer_add_listener (meta->wbuffer, &buffer_listener, self); /* add the allocated memory on the GstBuffer */ gst_buffer_append_memory (*buffer, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create buffer"); return GST_FLOW_ERROR; } }
static GstCaps * typefind_test_file (const gchar * filename) { GstBuffer *buf; GError *err = NULL; GstCaps *caps = NULL; gchar *path, *data = NULL; gsize data_len; path = g_build_filename (GST_TEST_FILES_PATH, filename, NULL); GST_LOG ("reading file '%s'", path); if (!g_file_get_contents (path, &data, &data_len, &err)) { g_error ("error loading test file: %s", err->message); } buf = gst_buffer_new (); gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, (gpointer) data, data_len, 0, data_len, NULL, NULL)); GST_BUFFER_OFFSET (buf) = 0; caps = gst_type_find_helper_for_buffer (NULL, buf, NULL); fail_unless (caps != NULL); GST_LOG ("Found type: %" GST_PTR_FORMAT, caps); gst_buffer_unref (buf); g_free (data); g_free (path); return caps; }
static inline GstBuffer * gst_y4m_encode_get_stream_header (GstY4mEncode * filter, gboolean tff) { gpointer header; GstBuffer *buf; gchar interlaced; gsize len; if (GST_VIDEO_INFO_IS_INTERLACED (&filter->info)) { if (tff) interlaced = 't'; else interlaced = 'b'; } else { interlaced = 'p'; } header = g_strdup_printf ("YUV4MPEG2 C%s W%d H%d I%c F%d:%d A%d:%d\n", filter->colorspace, GST_VIDEO_INFO_WIDTH (&filter->info), GST_VIDEO_INFO_HEIGHT (&filter->info), interlaced, GST_VIDEO_INFO_FPS_N (&filter->info), GST_VIDEO_INFO_FPS_D (&filter->info), GST_VIDEO_INFO_PAR_N (&filter->info), GST_VIDEO_INFO_PAR_D (&filter->info)); len = strlen (header); buf = gst_buffer_new (); gst_buffer_append_memory (buf, gst_memory_new_wrapped (0, header, len, 0, len, header, g_free)); return buf; }
static void user_write_data (png_structp png_ptr, png_bytep data, png_uint_32 length) { GstPngEnc *pngenc; GstMemory *mem; GstMapInfo minfo; pngenc = (GstPngEnc *) png_get_io_ptr (png_ptr); mem = gst_allocator_alloc (NULL, length, NULL); if (!mem) { GST_ERROR_OBJECT (pngenc, "Failed to allocate memory"); png_error (png_ptr, "Failed to allocate memory"); /* never reached */ return; } if (!gst_memory_map (mem, &minfo, GST_MAP_WRITE)) { GST_ERROR_OBJECT (pngenc, "Failed to map memory"); gst_memory_unref (mem); png_error (png_ptr, "Failed to map memory"); /* never reached */ return; } memcpy (minfo.data, data, length); gst_memory_unmap (mem, &minfo); gst_buffer_append_memory (pngenc->buffer_out, mem); }
bool gst_vlc_picture_plane_allocator_alloc( GstVlcPicturePlaneAllocator *p_allocator, GstBuffer *p_buffer ) { int i_plane; gsize i_max_size, i_align, i_offset, i_size; picture_t *p_pic; p_pic = &p_allocator->pic_info; for( i_plane = 0; i_plane < p_pic->i_planes; i_plane++ ) { GstVlcPicturePlane *p_mem = (GstVlcPicturePlane*) g_slice_new0( GstVlcPicturePlane ); i_size = p_pic->p[ i_plane ].i_pitch * p_pic->p[ i_plane ].i_lines; i_max_size = p_pic->p[ i_plane ].i_pitch * p_pic->p[ i_plane ].i_lines; i_align = 0; i_offset = 0; gst_memory_init( GST_MEMORY_CAST( p_mem ), GST_MEMORY_FLAG_NO_SHARE, GST_ALLOCATOR_CAST( p_allocator ), NULL, i_max_size, i_align, i_offset, i_size ); gst_buffer_append_memory( p_buffer, (GstMemory*) p_mem ); } return true; }
static GstBuffer * gst_fake_src_alloc_buffer (GstFakeSrc * src, guint size) { GstBuffer *buf; gpointer data; gboolean do_prepare = FALSE; buf = gst_buffer_new (); if (size != 0) { switch (src->filltype) { case FAKE_SRC_FILLTYPE_NOTHING: data = g_malloc (size); break; case FAKE_SRC_FILLTYPE_ZERO: data = g_malloc0 (size); break; case FAKE_SRC_FILLTYPE_RANDOM: case FAKE_SRC_FILLTYPE_PATTERN: case FAKE_SRC_FILLTYPE_PATTERN_CONT: default: data = g_malloc (size); do_prepare = TRUE; break; } if (do_prepare) gst_fake_src_prepare_buffer (src, data, size); gst_buffer_append_memory (buf, gst_memory_new_wrapped (0, data, size, 0, size, data, g_free)); } return buf; }
static gboolean gst_core_media_buffer_wrap_block_buffer (GstBuffer * buf, CMBlockBufferRef block_buf) { OSStatus status; gchar *data = NULL; size_t offset = 0, length_at_offset, total_length; /* CMBlockBuffer can contain multiple non-continuous memory blocks */ do { status = CMBlockBufferGetDataPointer (block_buf, offset, &length_at_offset, &total_length, &data); if (status != kCMBlockBufferNoErr) { return FALSE; } /* retaining the CMBlockBuffer so it won't go away for the lifetime of the GstMemory */ gst_buffer_append_memory (buf, gst_memory_new_wrapped (0, data, length_at_offset, 0, length_at_offset, (gpointer) CFRetain (block_buf), (GDestroyNotify) CFRelease)); offset += length_at_offset; } while (offset < total_length); return TRUE; }
/* This method is called by the need-data signal callback, we feed data into the * appsrc. */ static void feed_data (GstElement * appsrc, guint size, App * app) { GstBuffer *buffer; guint len; GstFlowReturn ret; if (app->offset >= app->length) { /* we are EOS, send end-of-stream */ g_signal_emit_by_name (app->appsrc, "end-of-stream", &ret); return; } /* read the next chunk */ buffer = gst_buffer_new (); len = CHUNK_SIZE; if (app->offset + len > app->length) len = app->length - app->offset; gst_buffer_append_memory (buffer, gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, app->data, app->length, app->offset, len, NULL, NULL)); GST_DEBUG ("feed buffer %p, offset %" G_GUINT64_FORMAT "-%u", buffer, app->offset, len); g_signal_emit_by_name (app->appsrc, "push-buffer", buffer, &ret); gst_buffer_unref (buffer); app->offset += len; return; }
gboolean gst_imx_vpu_framebuffer_array_set_framebuffer_in_gstbuffer(GstImxVpuFramebufferArray *framebuffer_array, GstBuffer *buffer, ImxVpuFramebuffer *framebuffer) { GstVideoMeta *video_meta; GstImxVpuFramebufferMeta *vpu_meta; GstImxPhysMemMeta *phys_mem_meta; GstImxPhysMemory *memory; video_meta = gst_buffer_get_video_meta(buffer); if (video_meta == NULL) { GST_ERROR("buffer with pointer %p has no video metadata", (gpointer)buffer); return FALSE; } vpu_meta = GST_IMX_VPU_FRAMEBUFFER_META_GET(buffer); if (vpu_meta == NULL) { GST_ERROR("buffer with pointer %p has no VPU metadata", (gpointer)buffer); return FALSE; } phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(buffer); if (phys_mem_meta == NULL) { GST_ERROR("buffer with pointer %p has no phys mem metadata", (gpointer)buffer); return FALSE; } { gsize x_padding = 0, y_padding = 0; if (framebuffer_array->framebuffer_sizes.aligned_frame_width > video_meta->width) x_padding = framebuffer_array->framebuffer_sizes.aligned_frame_width - video_meta->width; if (framebuffer_array->framebuffer_sizes.aligned_frame_height > video_meta->height) y_padding = framebuffer_array->framebuffer_sizes.aligned_frame_height - video_meta->height; vpu_meta->framebuffer = framebuffer; phys_mem_meta->phys_addr = (gst_imx_phys_addr_t)imx_vpu_dma_buffer_get_physical_address(framebuffer->dma_buffer); phys_mem_meta->x_padding = x_padding; phys_mem_meta->y_padding = y_padding; GST_LOG("setting phys mem meta for buffer with pointer %p: phys addr %" GST_IMX_PHYS_ADDR_FORMAT " x/y padding %" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT, (gpointer)buffer, phys_mem_meta->phys_addr, phys_mem_meta->x_padding, phys_mem_meta->y_padding); } memory = gst_imx_vpu_framebuffer_array_get_gst_phys_memory(framebuffer); /* remove any existing memory blocks */ gst_buffer_remove_all_memory(buffer); /* and append the new memory block * the memory is ref'd to prevent deallocation when it is later removed * (either because this function is called again, or because the buffer * is deallocated); refcount is 1 already at this point, since the memory * is ref'd inside the framebuffer array, and unref'd when the array is * shut down */ gst_buffer_append_memory(buffer, gst_memory_ref((GstMemory *)memory)); return TRUE; }
static void gst_gl_composition_overlay_upload (GstGLCompositionOverlay * overlay, GstBuffer * buf) { GstGLMemory *comp_gl_memory = NULL; GstBuffer *comp_buffer = NULL; GstBuffer *overlay_buffer = NULL; GstVideoInfo vinfo; GstVideoMeta *vmeta; GstVideoFrame *comp_frame; GstVideoFrame gl_frame; comp_buffer = gst_video_overlay_rectangle_get_pixels_unscaled_argb (overlay->rectangle, GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA); comp_frame = g_slice_new (GstVideoFrame); vmeta = gst_buffer_get_video_meta (comp_buffer); gst_video_info_set_format (&vinfo, vmeta->format, vmeta->width, vmeta->height); vinfo.stride[0] = vmeta->stride[0]; if (gst_video_frame_map (comp_frame, &vinfo, comp_buffer, GST_MAP_READ)) { gst_gl_composition_overlay_add_transformation (overlay, buf); comp_gl_memory = gst_gl_memory_wrapped (overlay->context, GST_GL_TEXTURE_TARGET_2D, &comp_frame->info, 0, NULL, comp_frame->data[0], comp_frame, _video_frame_unmap_and_free); overlay_buffer = gst_buffer_new (); gst_buffer_append_memory (overlay_buffer, (GstMemory *) comp_gl_memory); if (!gst_video_frame_map (&gl_frame, &comp_frame->info, overlay_buffer, GST_MAP_READ | GST_MAP_GL)) { gst_buffer_unref (overlay_buffer); _video_frame_unmap_and_free (comp_frame); GST_WARNING_OBJECT (overlay, "Cannot upload overlay texture"); return; } gst_memory_ref ((GstMemory *) comp_gl_memory); overlay->gl_memory = comp_gl_memory; overlay->texture_id = comp_gl_memory->tex_id; gst_buffer_unref (overlay_buffer); gst_video_frame_unmap (&gl_frame); GST_DEBUG ("uploaded overlay texture %d", overlay->texture_id); } else { g_slice_free (GstVideoFrame, comp_frame); } }
static void src_handoff_float32 (GstElement * element, GstBuffer * buffer, GstPad * pad, gboolean interleaved, gpointer user_data) { gint n = GPOINTER_TO_INT (user_data); gfloat *data; gint i; gsize size; GstCaps *caps; guint64 mask; GstAudioChannelPosition pos; fail_unless (gst_buffer_is_writable (buffer)); switch (n) { case 0: case 1: case 2: pos = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT; break; case 3: pos = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT; break; default: pos = GST_AUDIO_CHANNEL_POSITION_INVALID; break; } mask = G_GUINT64_CONSTANT (1) << pos; caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, GST_AUDIO_NE (F32), "channels", G_TYPE_INT, 1, "layout", G_TYPE_STRING, interleaved ? "interleaved" : "non-interleaved", "channel-mask", GST_TYPE_BITMASK, mask, "rate", G_TYPE_INT, 48000, NULL); gst_pad_set_caps (pad, caps); gst_caps_unref (caps); size = 48000 * sizeof (gfloat); data = g_malloc (size); for (i = 0; i < 48000; i++) data[i] = (n % 2 == 0) ? -1.0 : 1.0; gst_buffer_append_memory (buffer, gst_memory_new_wrapped (0, data, size, 0, size, data, g_free)); GST_BUFFER_OFFSET (buffer) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (buffer) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_DURATION (buffer) = GST_SECOND; }
static GstBuffer * buffer_from_static_string (const gchar * s) { GstBuffer *buf; gsize len; len = strlen (s); buf = gst_buffer_new (); gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, (gpointer) s, len, 0, len, NULL, NULL)); return buf; }
static gboolean _upload_memory (GstGLUpload * upload) { guint in_width, in_height; guint in_texture[GST_VIDEO_MAX_PLANES]; GstBuffer *inbuf; GstVideoFrame out_frame; GstVideoInfo out_info; gint i; in_width = GST_VIDEO_INFO_WIDTH (&upload->in_info); in_height = GST_VIDEO_INFO_HEIGHT (&upload->in_info); if (!upload->initted) { if (!_init_upload (upload)) { return FALSE; } } inbuf = gst_buffer_new (); for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&upload->in_info); i++) { in_texture[i] = upload->in_tex[i]->tex_id; gst_buffer_append_memory (inbuf, gst_memory_ref ((GstMemory *) upload->in_tex[i])); } GST_TRACE ("uploading with textures:%u,%u,%u dimensions:%ux%u", in_texture[0], in_texture[1], in_texture[2], in_width, in_height); upload->priv->outbuf = gst_gl_color_convert_perform (upload->convert, inbuf); gst_buffer_unref (inbuf); gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA, in_width, in_height); if (!gst_video_frame_map (&out_frame, &out_info, upload->priv->outbuf, GST_MAP_READ | GST_MAP_GL)) { gst_buffer_unref (upload->priv->outbuf); upload->priv->outbuf = NULL; return FALSE; } upload->out_tex->tex_id = *(guint *) out_frame.data[0]; gst_video_frame_unmap (&out_frame); upload->priv->released = FALSE; return TRUE; }
static inline GstBuffer * gst_y4m_encode_get_frame_header (GstY4mEncode * filter) { gpointer header; GstBuffer *buf; gsize len; header = g_strdup_printf ("FRAME\n"); len = strlen (header); buf = gst_buffer_new (); gst_buffer_append_memory (buf, gst_memory_new_wrapped (0, header, len, 0, len, header, g_free)); return buf; }
static gboolean _do_download (GstGLDownload * download, guint texture_id, gpointer data[GST_VIDEO_MAX_PLANES]) { guint out_width, out_height; GstBuffer *inbuf, *outbuf; GstMapInfo map_info; gboolean ret = TRUE; gint i; out_width = GST_VIDEO_INFO_WIDTH (&download->info); out_height = GST_VIDEO_INFO_HEIGHT (&download->info); if (!download->initted) { if (!_init_download (download)) return FALSE; } GST_TRACE ("doing download of texture:%u (%ux%u)", download->priv->in_tex[0]->tex_id, out_width, out_height); inbuf = gst_buffer_new (); gst_buffer_append_memory (inbuf, gst_memory_ref ((GstMemory *) download->priv->in_tex[0])); outbuf = gst_gl_color_convert_perform (download->convert, inbuf); if (!outbuf) return FALSE; for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&download->info); i++) { GstMemory *out_mem = gst_buffer_peek_memory (outbuf, i); gpointer temp_data = ((GstGLMemory *) out_mem)->data; ((GstGLMemory *) out_mem)->data = data[i]; if (!gst_memory_map (out_mem, &map_info, GST_MAP_READ)) { GST_ERROR_OBJECT (download, "Failed to map memory"); ret = FALSE; } gst_memory_unmap (out_mem, &map_info); ((GstGLMemory *) out_mem)->data = temp_data; } gst_buffer_unref (inbuf); gst_buffer_unref (outbuf); return ret; }
/* This function handles GstBuffer creation */ static GstFlowReturn gst_vdp_video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstVdpVideoBufferPool *vdppool = GST_VDP_VIDEO_BUFFER_POOL_CAST (pool); GstVideoInfo *info; GstBuffer *buf; GstMemory *vdp_mem; info = &vdppool->info; if (!(buf = gst_buffer_new ())) goto no_buffer; if (!(vdp_mem = gst_vdp_video_memory_alloc (vdppool->device, info))) goto mem_create_failed; gst_buffer_append_memory (buf, vdp_mem); if (vdppool->add_videometa) { GstVideoMeta *vmeta; GST_DEBUG_OBJECT (pool, "adding GstVideoMeta"); /* these are just the defaults for now */ vmeta = gst_buffer_add_video_meta (buf, 0, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info)); vmeta->map = gst_vdp_video_memory_map; vmeta->unmap = gst_vdp_video_memory_unmap; } *buffer = buf; return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create image"); return GST_FLOW_ERROR; } mem_create_failed: { GST_WARNING_OBJECT (pool, "Could create GstVdpVideo Memory"); return GST_FLOW_ERROR; } }
static GstWlMeta * gst_buffer_add_wayland_meta (GstBuffer * buffer, GstWaylandBufferPool * wpool) { GstWlMeta *wmeta; GstWaylandSink *sink; void *data; gint offset; guint stride = 0; guint size = 0; sink = wpool->sink; stride = wpool->width * 4; size = stride * wpool->height; wmeta = (GstWlMeta *) gst_buffer_add_meta (buffer, GST_WL_META_INFO, NULL); wmeta->sink = gst_object_ref (sink); /*Fixme: size calculation should be more grcefull, have to consider the padding */ if (!sink->shm_pool) { sink->shm_pool = shm_pool_create (sink->display, size * 15); shm_pool_reset (sink->shm_pool); } if (!sink->shm_pool) { GST_ERROR ("Failed to create shm_pool"); return NULL; } data = shm_pool_allocate (sink->shm_pool, size, &offset); if (!data) return NULL; wmeta->wbuffer = wl_shm_pool_create_buffer (sink->shm_pool->pool, offset, sink->video_width, sink->video_height, stride, sink->format); wmeta->data = data; wmeta->size = size; gst_buffer_append_memory (buffer, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return wmeta; }
static gboolean buffer_list_copy_data (GstBuffer ** buf, guint idx, gpointer data) { GstBuffer *dest = data; guint num, i; if (idx == 0) gst_buffer_copy_into (dest, *buf, GST_BUFFER_COPY_METADATA, 0, -1); num = gst_buffer_n_memory (*buf); for (i = 0; i < num; ++i) { GstMemory *mem; mem = gst_buffer_get_memory (*buf, i); gst_buffer_append_memory (dest, mem); } return TRUE; }
/* This function handles GstXImageBuffer creation depending on XShm availability */ static GstFlowReturn xvimage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstXvImageBufferPool *xvpool = GST_XVIMAGE_BUFFER_POOL_CAST (pool); GstVideoInfo *info; GstBuffer *xvimage; GstMemory *mem; GError *err = NULL; info = &xvpool->info; xvimage = gst_buffer_new (); mem = gst_xvimage_allocator_alloc (xvpool->allocator, xvpool->im_format, info, xvpool->padded_width, xvpool->padded_height, &xvpool->crop, &err); if (mem == NULL) { gst_buffer_unref (xvimage); goto no_buffer; } gst_buffer_append_memory (xvimage, mem); if (xvpool->add_metavideo) { GST_DEBUG_OBJECT (pool, "adding GstVideoMeta"); gst_buffer_add_video_meta_full (xvimage, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride); } *buffer = xvimage; return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create image: %s", err->message); g_clear_error (&err); return GST_FLOW_ERROR; } }
static GstCaps * typefind_data (const guint8 * data, gsize data_size, GstTypeFindProbability * prob) { GstBuffer *buf; GstCaps *caps; GST_MEMDUMP ("typefind data", data, data_size); buf = gst_buffer_new (); gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, (guint8 *) data, data_size, 0, data_size, NULL, NULL)); GST_BUFFER_OFFSET (buf) = 0; caps = gst_type_find_helper_for_buffer (NULL, buf, prob); GST_INFO ("caps: %" GST_PTR_FORMAT ", probability=%u", caps, *prob); gst_buffer_unref (buf); return caps; }
static gboolean gst_core_media_buffer_wrap_block_buffer (GstBuffer * buf, CMBlockBufferRef block_buf) { OSStatus status; gchar *data = NULL; UInt32 size; status = CMBlockBufferGetDataPointer (block_buf, 0, 0, 0, &data); if (status != noErr) { return FALSE; } size = CMBlockBufferGetDataLength (block_buf); gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return TRUE; }
static GstFlowReturn gst_imx_v4l2_buffer_pool_acquire_buffer(GstBufferPool *bpool, GstBuffer **buffer, G_GNUC_UNUSED GstBufferPoolAcquireParams *params) { GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool); struct v4l2_buffer vbuffer; GstBuffer *buf; GstImxV4l2Meta *meta; if (GST_BUFFER_POOL_IS_FLUSHING(bpool)) return GST_FLOW_FLUSHING; memset(&vbuffer, 0, sizeof(vbuffer)); vbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vbuffer.memory = V4L2_MEMORY_MMAP; if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_DQBUF, &vbuffer) < 0) { GST_ERROR_OBJECT(pool, "VIDIOC_DQBUF failed: %s", g_strerror(errno)); return GST_FLOW_ERROR; } buf = pool->buffers[vbuffer.index]; GST_DEBUG_OBJECT(pool, "dqbuf %u %p", vbuffer.index, (gpointer)buf); pool->buffers[vbuffer.index] = NULL; g_assert(buf); meta = GST_IMX_V4L2_META_GET(buf); g_assert(meta); gst_buffer_remove_all_memory(buf); gst_buffer_append_memory(buf, gst_memory_new_wrapped(0, meta->mem, meta->vbuffer.length, 0, vbuffer.bytesused, NULL, NULL)); GST_BUFFER_TIMESTAMP(buf) = GST_TIMEVAL_TO_TIME(vbuffer.timestamp); *buffer = buf; return GST_FLOW_OK; }
/* This function handles GstXImageBuffer creation depending on XShm availability */ static GstFlowReturn ximage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstXImageBufferPool *xpool = GST_XIMAGE_BUFFER_POOL_CAST (pool); GstXImageBufferPoolPrivate *priv = xpool->priv; GstVideoInfo *info; GstBuffer *ximage; GstMemory *mem; info = &priv->info; ximage = gst_buffer_new (); mem = ximage_memory_alloc (xpool); if (mem == NULL) { gst_buffer_unref (ximage); goto no_buffer; } gst_buffer_append_memory (ximage, mem); if (priv->add_metavideo) { GST_DEBUG_OBJECT (pool, "adding GstVideoMeta"); /* these are just the defaults for now */ gst_buffer_add_video_meta_full (ximage, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride); } *buffer = ximage; return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create image"); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_shm_sink_render (GstBaseSink * bsink, GstBuffer * buf) { GstShmSink *self = GST_SHM_SINK (bsink); int rv = 0; GstMapInfo map; gboolean need_new_memory = FALSE; GstFlowReturn ret = GST_FLOW_OK; GstMemory *memory = NULL; GstBuffer *sendbuf = NULL; gsize written_bytes; GST_OBJECT_LOCK (self); while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) GST_OBJECT_LOCK (self); else return ret; } } while (!gst_shm_sink_can_render (self, GST_BUFFER_TIMESTAMP (buf))) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) GST_OBJECT_LOCK (self); else return ret; } } if (gst_buffer_n_memory (buf) > 1) { GST_LOG_OBJECT (self, "Buffer %p has %d GstMemory, we only support a single" " one, need to do a memcpy", buf, gst_buffer_n_memory (buf)); need_new_memory = TRUE; } else { memory = gst_buffer_peek_memory (buf, 0); if (memory->allocator != GST_ALLOCATOR (self->allocator)) { need_new_memory = TRUE; GST_LOG_OBJECT (self, "Memory in buffer %p was not allocated by " "%" GST_PTR_FORMAT ", will memcpy", buf, memory->allocator); } } if (need_new_memory) { if (gst_buffer_get_size (buf) > sp_writer_get_max_buf_size (self->pipe)) { gsize area_size = sp_writer_get_max_buf_size (self->pipe); GST_ELEMENT_ERROR (self, RESOURCE, NO_SPACE_LEFT, (NULL), ("Shared memory area of size %" G_GSIZE_FORMAT " is smaller than" "buffer of size %" G_GSIZE_FORMAT, area_size, gst_buffer_get_size (buf))); goto error; } while ((memory = gst_shm_sink_allocator_alloc_locked (self->allocator, gst_buffer_get_size (buf), &self->params)) == NULL) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) GST_OBJECT_LOCK (self); else return ret; } } while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) { GST_OBJECT_LOCK (self); } else { gst_memory_unref (memory); return ret; } } } if (!gst_memory_map (memory, &map, GST_MAP_WRITE)) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to map memory")); goto error; } GST_DEBUG_OBJECT (self, "Copying %" G_GSIZE_FORMAT " bytes into map of size %" G_GSIZE_FORMAT " bytes.", gst_buffer_get_size (buf), map.size); written_bytes = gst_buffer_extract (buf, 0, map.data, map.size); GST_DEBUG_OBJECT (self, "Copied %" G_GSIZE_FORMAT " bytes.", written_bytes); gst_memory_unmap (memory, &map); sendbuf = gst_buffer_new (); if (!gst_buffer_copy_into (sendbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1)) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to copy data into send buffer")); gst_buffer_unref (sendbuf); goto error; } gst_buffer_append_memory (sendbuf, memory); } else { sendbuf = gst_buffer_ref (buf); } if (!gst_buffer_map (sendbuf, &map, GST_MAP_READ)) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to map data into send buffer")); goto error; } /* Make the memory readonly as of now as we've sent it to the other side * We know it's not mapped for writing anywhere as we just mapped it for * reading */ rv = sp_writer_send_buf (self->pipe, (char *) map.data, map.size, sendbuf); if (rv == -1) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to send data over SHM")); gst_buffer_unmap (sendbuf, &map); goto error; } gst_buffer_unmap (sendbuf, &map); GST_OBJECT_UNLOCK (self); if (rv == 0) { GST_DEBUG_OBJECT (self, "No clients connected, unreffing buffer"); gst_buffer_unref (sendbuf); } return ret; error: GST_OBJECT_UNLOCK (self); return GST_FLOW_ERROR; }
/* called with the object lock held */ static gboolean gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer) { GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer); GstBuffer *converted_buffer, *inbuf; GstVideoInfo *out_info = &vagg->info; #ifndef G_DISABLE_ASSERT gint n; #endif gint v, views; gint valid_views = 0; GList *walk; inbuf = gst_buffer_new (); walk = GST_ELEMENT (mixer)->sinkpads; while (walk) { GstGLStereoMixPad *pad = walk->data; GstMemory *in_mem; GST_LOG_OBJECT (mixer, "Handling frame %d", valid_views); if (!pad || !pad->current_buffer) { GST_DEBUG ("skipping texture, null frame"); walk = g_list_next (walk); continue; } in_mem = gst_buffer_get_memory (pad->current_buffer, 0); GST_LOG_OBJECT (mixer, "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem); /* Appending the memory to a 2nd buffer locks it * exclusive a 2nd time, which will mark it for * copy-on-write. The ref will keep the memory * alive but we add a parent_buffer_meta to also * prevent the input buffer from returning to any buffer * pool it might belong to */ gst_buffer_append_memory (inbuf, in_mem); /* Use parent buffer meta to keep input buffer alive */ gst_buffer_add_parent_buffer_meta (inbuf, pad->current_buffer); valid_views++; walk = g_list_next (walk); } if (mixer->mix_info.views != valid_views) { GST_WARNING_OBJECT (mixer, "Not enough input views to process"); return FALSE; } if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) == GST_VIDEO_MULTIVIEW_MODE_SEPARATED) views = out_info->views; else views = 1; if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert, FALSE, inbuf) != GST_FLOW_OK) return FALSE; /* Clear any existing buffers, just in case */ gst_buffer_replace (&mixer->primary_out, NULL); gst_buffer_replace (&mixer->auxilliary_out, NULL); if (gst_gl_view_convert_get_output (mixer->viewconvert, &mixer->primary_out) != GST_FLOW_OK) return FALSE; if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) == GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) { if (gst_gl_view_convert_get_output (mixer->viewconvert, &mixer->auxilliary_out) != GST_FLOW_OK) return FALSE; } if (mixer->primary_out == NULL) return FALSE; converted_buffer = mixer->primary_out; #ifndef G_DISABLE_ASSERT n = gst_buffer_n_memory (converted_buffer); g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views); #endif for (v = 0; v < views; v++) { gst_buffer_add_video_meta_full (converted_buffer, v, GST_VIDEO_INFO_FORMAT (out_info), GST_VIDEO_INFO_WIDTH (out_info), GST_VIDEO_INFO_HEIGHT (out_info), GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride); if (mixer->auxilliary_out) { gst_buffer_add_video_meta_full (mixer->auxilliary_out, v, GST_VIDEO_INFO_FORMAT (out_info), GST_VIDEO_INFO_WIDTH (out_info), GST_VIDEO_INFO_HEIGHT (out_info), GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride); } } return TRUE; }
static GstFlowReturn gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame) { GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder); GstFlowReturn ret = GST_FLOW_OK; #ifdef HAVE_OPENJPEG_1 opj_cinfo_t *enc; GstMapInfo map; guint length; opj_cio_t *io; #else opj_codec_t *enc; opj_stream_t *stream; MemStream mstream; #endif opj_image_t *image; GstVideoFrame vframe; GST_DEBUG_OBJECT (self, "Handling frame"); enc = opj_create_compress (self->codec_format); if (!enc) goto initialization_error; #ifdef HAVE_OPENJPEG_1 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE)) { opj_event_mgr_t callbacks; callbacks.error_handler = gst_openjpeg_enc_opj_error; callbacks.warning_handler = gst_openjpeg_enc_opj_warning; callbacks.info_handler = gst_openjpeg_enc_opj_info; opj_set_event_mgr ((opj_common_ptr) enc, &callbacks, self); } else { opj_set_event_mgr ((opj_common_ptr) enc, NULL, NULL); } #else if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE)) { opj_set_info_handler (enc, gst_openjpeg_enc_opj_info, self); opj_set_warning_handler (enc, gst_openjpeg_enc_opj_warning, self); opj_set_error_handler (enc, gst_openjpeg_enc_opj_error, self); } else { opj_set_info_handler (enc, NULL, NULL); opj_set_warning_handler (enc, NULL, NULL); opj_set_error_handler (enc, NULL, NULL); } #endif if (!gst_video_frame_map (&vframe, &self->input_state->info, frame->input_buffer, GST_MAP_READ)) goto map_read_error; image = gst_openjpeg_enc_fill_image (self, &vframe); if (!image) goto fill_image_error; gst_video_frame_unmap (&vframe); opj_setup_encoder (enc, &self->params, image); #ifdef HAVE_OPENJPEG_1 io = opj_cio_open ((opj_common_ptr) enc, NULL, 0); if (!io) goto open_error; if (!opj_encode (enc, io, image, NULL)) goto encode_error; opj_image_destroy (image); length = cio_tell (io); ret = gst_video_encoder_allocate_output_frame (encoder, frame, length + (self->is_jp2c ? 8 : 0)); if (ret != GST_FLOW_OK) goto allocate_error; gst_buffer_fill (frame->output_buffer, self->is_jp2c ? 8 : 0, io->buffer, length); if (self->is_jp2c) { gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE); GST_WRITE_UINT32_BE (map.data, length + 8); GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c')); gst_buffer_unmap (frame->output_buffer, &map); } opj_cio_close (io); opj_destroy_compress (enc); #else stream = opj_stream_create (4096, OPJ_FALSE); if (!stream) goto open_error; mstream.allocsize = 4096; mstream.data = g_malloc (mstream.allocsize); mstream.offset = 0; mstream.size = 0; opj_stream_set_read_function (stream, read_fn); opj_stream_set_write_function (stream, write_fn); opj_stream_set_skip_function (stream, skip_fn); opj_stream_set_seek_function (stream, seek_fn); opj_stream_set_user_data (stream, &mstream); opj_stream_set_user_data_length (stream, mstream.size); if (!opj_start_compress (enc, image, stream)) goto encode_error; if (!opj_encode (enc, stream)) goto encode_error; if (!opj_end_compress (enc, stream)) goto encode_error; opj_image_destroy (image); opj_stream_destroy (stream); opj_destroy_codec (enc); frame->output_buffer = gst_buffer_new (); if (self->is_jp2c) { GstMapInfo map; GstMemory *mem; mem = gst_allocator_alloc (NULL, 8, NULL); gst_memory_map (mem, &map, GST_MAP_WRITE); GST_WRITE_UINT32_BE (map.data, mstream.size + 8); GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c')); gst_memory_unmap (mem, &map); gst_buffer_append_memory (frame->output_buffer, mem); } gst_buffer_append_memory (frame->output_buffer, gst_memory_new_wrapped (0, mstream.data, mstream.allocsize, 0, mstream.size, NULL, (GDestroyNotify) g_free)); #endif ret = gst_video_encoder_finish_frame (encoder, frame); return ret; initialization_error: { gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to initialize OpenJPEG encoder"), (NULL)); return GST_FLOW_ERROR; } map_read_error: { #ifdef HAVE_OPENJPEG_1 opj_destroy_compress (enc); #else opj_destroy_codec (enc); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to map input buffer"), (NULL)); return GST_FLOW_ERROR; } fill_image_error: { #ifdef HAVE_OPENJPEG_1 opj_destroy_compress (enc); #else opj_destroy_codec (enc); #endif gst_video_frame_unmap (&vframe); gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to fill OpenJPEG image"), (NULL)); return GST_FLOW_ERROR; } open_error: { opj_image_destroy (image); #ifdef HAVE_OPENJPEG_1 opj_destroy_compress (enc); #else opj_destroy_codec (enc); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to open OpenJPEG data"), (NULL)); return GST_FLOW_ERROR; } encode_error: { #ifdef HAVE_OPENJPEG_1 opj_cio_close (io); opj_image_destroy (image); opj_destroy_compress (enc); #else opj_stream_destroy (stream); g_free (mstream.data); opj_image_destroy (image); opj_destroy_codec (enc); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, STREAM, ENCODE, ("Failed to encode OpenJPEG stream"), (NULL)); return GST_FLOW_ERROR; } #ifdef HAVE_OPENJPEG_1 allocate_error: { opj_cio_close (io); opj_destroy_compress (enc); gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to allocate output buffer"), (NULL)); return ret; } #endif }
static GstBuffer * gst_egl_allocate_eglimage (APP_STATE_T * ctx, GstAllocator * allocator, GstVideoFormat format, gint width, gint height) { GstEGLGLESImageData *data = NULL; GstBuffer *buffer; GstVideoInfo info; gint i; gint stride[3]; gsize offset[3]; GstMemory *mem[3] = { NULL, NULL, NULL }; guint n_mem; GstMemoryFlags flags = 0; memset (stride, 0, sizeof (stride)); memset (offset, 0, sizeof (offset)); if (!gst_egl_image_memory_is_mappable ()) flags |= GST_MEMORY_FLAG_NOT_MAPPABLE; /* See https://bugzilla.gnome.org/show_bug.cgi?id=695203 */ flags |= GST_MEMORY_FLAG_NO_SHARE; gst_video_info_set_format (&info, format, width, height); GST_DEBUG ("Allocating EGL Image format %s width %d height %d", gst_video_format_to_string (format), width, height); switch (format) { case GST_VIDEO_FORMAT_RGBA:{ gsize size; EGLImageKHR image; mem[0] = gst_egl_image_allocator_alloc (allocator, ctx->gst_display, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), &size); if (mem[0]) { stride[0] = size / GST_VIDEO_INFO_HEIGHT (&info); n_mem = 1; GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE); } else { data = g_slice_new0 (GstEGLGLESImageData); stride[0] = GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (&info) * 4); size = stride[0] * GST_VIDEO_INFO_HEIGHT (&info); glGenTextures (1, &data->texture); if (got_gl_error ("glGenTextures")) goto mem_error; glBindTexture (GL_TEXTURE_2D, data->texture); if (got_gl_error ("glBindTexture")) goto mem_error; /* Set 2D resizing params */ glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); /* If these are not set the texture image unit will return * * (R, G, B, A) = black on glTexImage2D for non-POT width/height * * frames. For a deeper explanation take a look at the OpenGL ES * * documentation for glTexParameter */ glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); if (got_gl_error ("glTexParameteri")) goto mem_error; glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); if (got_gl_error ("glTexImage2D")) goto mem_error; image = eglCreateImageKHR (gst_egl_display_get (ctx->gst_display), ctx->context, EGL_GL_TEXTURE_2D_KHR, (EGLClientBuffer) (guintptr) data->texture, NULL); if (got_egl_error ("eglCreateImageKHR")) goto mem_error; mem[0] = gst_egl_image_allocator_wrap (allocator, ctx->gst_display, image, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, flags, size, data, NULL); n_mem = 1; } } break; default: goto mem_error; break; } buffer = gst_buffer_new (); gst_buffer_add_video_meta_full (buffer, 0, format, width, height, GST_VIDEO_INFO_N_PLANES (&info), offset, stride); /* n_mem could be reused for planar colorspaces, for now its == 1 for RGBA */ for (i = 0; i < n_mem; i++) gst_buffer_append_memory (buffer, mem[i]); return buffer; mem_error: { GST_ERROR ("Failed to create EGLImage"); if (data) gst_egl_gles_image_data_free (data); if (mem[0]) gst_memory_unref (mem[0]); return NULL; } }
GstBuffer * gst_core_media_buffer_new (GstCoreMediaCtx * ctx, CMSampleBufferRef sample_buf) { GstCVApi *cv = ctx->cv; GstCMApi *cm = ctx->cm; CVImageBufferRef image_buf; CVPixelBufferRef pixel_buf; CMBlockBufferRef block_buf; Byte *data = NULL; UInt32 size; OSStatus status; GstBuffer *buf; GstCoreMediaMeta *meta; image_buf = cm->CMSampleBufferGetImageBuffer (sample_buf); pixel_buf = NULL; block_buf = cm->CMSampleBufferGetDataBuffer (sample_buf); if (image_buf != NULL && CFGetTypeID (image_buf) == cv->CVPixelBufferGetTypeID ()) { pixel_buf = (CVPixelBufferRef) image_buf; if (cv->CVPixelBufferLockBaseAddress (pixel_buf, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) { goto error; } if (cv->CVPixelBufferIsPlanar (pixel_buf)) { gint plane_count, plane_idx; data = cv->CVPixelBufferGetBaseAddressOfPlane (pixel_buf, 0); size = 0; plane_count = cv->CVPixelBufferGetPlaneCount (pixel_buf); for (plane_idx = 0; plane_idx != plane_count; plane_idx++) { size += cv->CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, plane_idx) * cv->CVPixelBufferGetHeightOfPlane (pixel_buf, plane_idx); } } else { data = cv->CVPixelBufferGetBaseAddress (pixel_buf); size = cv->CVPixelBufferGetBytesPerRow (pixel_buf) * cv->CVPixelBufferGetHeight (pixel_buf); } } else if (block_buf != NULL) { status = cm->CMBlockBufferGetDataPointer (block_buf, 0, 0, 0, &data); if (status != noErr) goto error; size = cm->CMBlockBufferGetDataLength (block_buf); } else { goto error; } buf = gst_buffer_new (); meta = (GstCoreMediaMeta *) gst_buffer_add_meta (buf, gst_core_media_meta_get_info (), NULL); meta->ctx = g_object_ref (ctx); meta->sample_buf = cm->FigSampleBufferRetain (sample_buf); meta->image_buf = image_buf; meta->pixel_buf = pixel_buf; meta->block_buf = block_buf; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return buf; error: return NULL; }
static void src_need_data_cb (GstElement * src, guint size, gpointer data) { GstBuffer *buf; GstFlowReturn ret; buf = gst_buffer_new (); gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, (gpointer) dummytext, sizeof (dummytext), 0, sizeof (dummytext), NULL, NULL)); GST_BUFFER_OFFSET (buf) = 0; g_signal_emit_by_name (src, "push-buffer", buf, &ret); gst_buffer_unref (buf); fail_unless (ret == GST_FLOW_OK); }