/** * gst_dmabuf_memory_get_fd: * @mem: the memory to get the file descriptor * * Return the file descriptor associated with @mem. * * Returns: the file descriptor associated with the memory, or -1 * * Since: 1.2 */ gint gst_dmabuf_memory_get_fd (GstMemory * mem) { GstDmaBufMemory *dbmem = (GstDmaBufMemory *) mem; g_return_val_if_fail (gst_is_dmabuf_memory (mem), -1); return dbmem->fd; }
static gboolean is_dma_buffer (GstBuffer * buf) { GstMemory *mem; if (gst_buffer_n_memory (buf) < 1) return FALSE; mem = gst_buffer_peek_memory (buf, 0); if (!mem || !gst_is_dmabuf_memory (mem)) return FALSE; return TRUE; }
static gboolean gst_v4l2_is_buffer_valid (GstBuffer * buffer, GstV4l2MemoryGroup ** out_group) { GstMemory *mem = gst_buffer_peek_memory (buffer, 0); gboolean valid = FALSE; if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_TAG_MEMORY)) goto done; if (gst_is_dmabuf_memory (mem)) mem = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem), GST_V4L2_MEMORY_QUARK); if (mem && gst_is_v4l2_memory (mem)) { GstV4l2Memory *vmem = (GstV4l2Memory *) mem; GstV4l2MemoryGroup *group = vmem->group; gint i; if (group->n_mem != gst_buffer_n_memory (buffer)) goto done; for (i = 0; i < group->n_mem; i++) { if (group->mem[i] != gst_buffer_peek_memory (buffer, i)) goto done; if (!gst_memory_is_writable (group->mem[i])) goto done; } valid = TRUE; if (out_group) *out_group = group; } done: return valid; }
static MsdkSurface * get_msdk_surface_from_input_buffer (GstMsdkVPP * thiz, GstBuffer * inbuf) { GstVideoFrame src_frame, out_frame; MsdkSurface *msdk_surface; GstMemory *mem = NULL; if (gst_msdk_is_msdk_buffer (inbuf)) { msdk_surface = g_slice_new0 (MsdkSurface); msdk_surface->surface = gst_msdk_get_surface_from_buffer (inbuf); msdk_surface->buf = gst_buffer_ref (inbuf); return msdk_surface; } /* If upstream hasn't accpeted the proposed msdk bufferpool, * just copy frame (if not dmabuf backed) to msdk buffer and * take a surface from it. */ if (!(msdk_surface = get_surface_from_pool (thiz, thiz->sinkpad_buffer_pool, NULL))) goto error; #ifndef _WIN32 /************ dmabuf-import ************* */ /* if upstream provided a dmabuf backed memory, but not an msdk * buffer, we could export the dmabuf to underlined vasurface */ mem = gst_buffer_peek_memory (inbuf, 0); if (gst_is_dmabuf_memory (mem)) { if (import_dmabuf_to_msdk_surface (thiz, inbuf, msdk_surface)) return msdk_surface; else GST_INFO_OBJECT (thiz, "Upstream dmabuf-backed memory is not imported" "to the msdk surface, fall back to the copy input frame method"); } #endif if (!gst_video_frame_map (&src_frame, &thiz->sinkpad_info, inbuf, GST_MAP_READ)) { GST_ERROR_OBJECT (thiz, "failed to map the frame for source"); goto error; } if (!gst_video_frame_map (&out_frame, &thiz->sinkpad_buffer_pool_info, msdk_surface->buf, GST_MAP_WRITE)) { GST_ERROR_OBJECT (thiz, "failed to map the frame for destination"); gst_video_frame_unmap (&src_frame); goto error; } if (!gst_video_frame_copy (&out_frame, &src_frame)) { GST_ERROR_OBJECT (thiz, "failed to copy frame"); gst_video_frame_unmap (&out_frame); gst_video_frame_unmap (&src_frame); goto error; } gst_video_frame_unmap (&out_frame); gst_video_frame_unmap (&src_frame); return msdk_surface; error: return NULL; }
gboolean gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group, gint n_mem, GstMemory ** dma_mem) { GstV4l2Memory *mem; gint i; g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, FALSE); if (group->n_mem != n_mem) goto n_mem_missmatch; for (i = 0; i < group->n_mem; i++) { gint dmafd; gsize size, offset, maxsize; if (!gst_is_dmabuf_memory (dma_mem[i])) goto not_dmabuf; size = gst_memory_get_sizes (dma_mem[i], &offset, &maxsize); if ((dmafd = dup (gst_dmabuf_memory_get_fd (dma_mem[i]))) < 0) goto dup_failed; GST_LOG_OBJECT (allocator, "imported DMABUF as fd %i plane %d", dmafd, i); mem = (GstV4l2Memory *) group->mem[i]; /* Update memory */ mem->mem.maxsize = maxsize; mem->mem.offset = offset; mem->mem.size = size; mem->dmafd = dmafd; /* Update v4l2 structure */ group->planes[i].length = maxsize; group->planes[i].bytesused = size; group->planes[i].m.fd = dmafd; group->planes[i].data_offset = offset; } /* Copy into buffer structure if not using planes */ if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) { group->buffer.bytesused = group->planes[0].bytesused; group->buffer.length = group->planes[0].length; group->buffer.m.fd = group->planes[0].m.userptr; } else { group->buffer.length = group->n_mem; } return TRUE; n_mem_missmatch: { GST_ERROR_OBJECT (allocator, "Got %i dmabuf but needed %i", n_mem, group->n_mem); return FALSE; } not_dmabuf: { GST_ERROR_OBJECT (allocator, "Memory %i is not of DMABUF", i); return FALSE; } dup_failed: { GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s", g_strerror (errno)); return FALSE; } }
static gboolean gst_kms_sink_import_dmabuf (GstKMSSink * self, GstBuffer * inbuf, GstBuffer ** outbuf) { gint prime_fds[GST_VIDEO_MAX_PLANES] = { 0, }; GstVideoMeta *meta; guint i, n_mem, n_planes; GstKMSMemory *kmsmem; guint mems_idx[GST_VIDEO_MAX_PLANES]; gsize mems_skip[GST_VIDEO_MAX_PLANES]; GstMemory *mems[GST_VIDEO_MAX_PLANES]; if (!self->has_prime_import) return FALSE; /* This will eliminate most non-dmabuf out there */ if (!gst_is_dmabuf_memory (gst_buffer_peek_memory (inbuf, 0))) return FALSE; n_planes = GST_VIDEO_INFO_N_PLANES (&self->vinfo); n_mem = gst_buffer_n_memory (inbuf); meta = gst_buffer_get_video_meta (inbuf); GST_TRACE_OBJECT (self, "Found a dmabuf with %u planes and %u memories", n_planes, n_mem); /* We cannot have multiple dmabuf per plane */ if (n_mem > n_planes) return FALSE; /* Update video info based on video meta */ if (meta) { GST_VIDEO_INFO_WIDTH (&self->vinfo) = meta->width; GST_VIDEO_INFO_HEIGHT (&self->vinfo) = meta->height; for (i = 0; i < meta->n_planes; i++) { GST_VIDEO_INFO_PLANE_OFFSET (&self->vinfo, i) = meta->offset[i]; GST_VIDEO_INFO_PLANE_STRIDE (&self->vinfo, i) = meta->stride[i]; } } /* Find and validate all memories */ for (i = 0; i < n_planes; i++) { guint length; if (!gst_buffer_find_memory (inbuf, GST_VIDEO_INFO_PLANE_OFFSET (&self->vinfo, i), 1, &mems_idx[i], &length, &mems_skip[i])) return FALSE; mems[i] = gst_buffer_peek_memory (inbuf, mems_idx[i]); /* And all memory found must be dmabuf */ if (!gst_is_dmabuf_memory (mems[i])) return FALSE; } kmsmem = (GstKMSMemory *) get_cached_kmsmem (mems[0]); if (kmsmem) { GST_LOG_OBJECT (self, "found KMS mem %p in DMABuf mem %p with fb id = %d", kmsmem, mems[0], kmsmem->fb_id); goto wrap_mem; } for (i = 0; i < n_planes; i++) prime_fds[i] = gst_dmabuf_memory_get_fd (mems[i]); GST_LOG_OBJECT (self, "found these prime ids: %d, %d, %d, %d", prime_fds[0], prime_fds[1], prime_fds[2], prime_fds[3]); kmsmem = gst_kms_allocator_dmabuf_import (self->allocator, prime_fds, n_planes, mems_skip, &self->vinfo); if (!kmsmem) return FALSE; GST_LOG_OBJECT (self, "setting KMS mem %p to DMABuf mem %p with fb id = %d", kmsmem, mems[0], kmsmem->fb_id); set_cached_kmsmem (mems[0], GST_MEMORY_CAST (kmsmem)); wrap_mem: *outbuf = gst_buffer_new (); if (!*outbuf) return FALSE; gst_buffer_append_memory (*outbuf, gst_memory_ref (GST_MEMORY_CAST (kmsmem))); gst_buffer_add_parent_buffer_meta (*outbuf, inbuf); return TRUE; }