static GstFlowReturn rsn_parsetter_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { RsnParSetter *parset = RSN_PARSETTER (gst_pad_get_parent (pad)); GstFlowReturn ret; GST_LOG_OBJECT (parset, "Entering bufferalloc"); if (rsn_parsetter_check_caps (parset, caps)) { ret = gst_pad_alloc_buffer (parset->srcpad, offset, size, caps, buf); GST_LOG_OBJECT (parset, "Not wrapping buf %p", *buf); } else { /* Allocate and wrap a downstream buffer */ GstBuffer *orig_buf; GstBuffer *outbuf; GstCaps *override_caps = rsn_parsetter_convert_caps (parset, caps, parset->is_widescreen); ret = gst_pad_alloc_buffer (parset->srcpad, offset, size, override_caps, &orig_buf); gst_caps_unref (override_caps); if (ret != GST_FLOW_OK) return ret; outbuf = (GstBuffer *) rsn_wrapped_buffer_new (orig_buf); if (!outbuf) { /* FIXME: Throw error */ return GST_FLOW_ERROR; } rsn_wrapped_buffer_set_owner (RSN_WRAPPEDBUFFER (outbuf), GST_ELEMENT (parset)); gst_buffer_set_caps (outbuf, caps); GST_LOG_OBJECT (parset, "Wrapped ds buf %p with caps %" GST_PTR_FORMAT " into new buf %p with caps %" GST_PTR_FORMAT, orig_buf, GST_BUFFER_CAPS (orig_buf), outbuf, GST_BUFFER_CAPS (outbuf)); *buf = outbuf; } gst_object_unref (GST_OBJECT (parset)); return ret; }
static GstFlowReturn gst_teletextdec_export_html_page (GstTeletextDec * teletext, vbi_page * page, GstBuffer ** buf) { GstCaps *caps; GstFlowReturn ret; gchar *html; gssize size; vbi_export *ex; gchar *err; if (!(ex = vbi_export_new ("html", &err))) { GST_ELEMENT_ERROR (teletext, LIBRARY, SETTINGS, ("Can't open the HTML export module: %s", err), (NULL)); g_free (err); return GST_FLOW_ERROR; } /* export to NULL to get size of the memory needed to allocate the page */ size = vbi_export_mem (ex, NULL, 0, page); if (size < 0) return GST_FLOW_ERROR; html = g_malloc (size); vbi_export_mem (ex, html, size, page); /* Allocate new buffer */ caps = gst_caps_new_simple ("text/html", NULL); ret = gst_pad_alloc_buffer (teletext->srcpad, GST_BUFFER_OFFSET_NONE, size, caps, &(*buf)); if (G_LIKELY (ret == GST_FLOW_OK)) GST_BUFFER_DATA (*buf) = GST_BUFFER_MALLOCDATA (*buf) = (guint8 *) html; gst_caps_unref (caps); return ret; }
static GstFlowReturn gst_ffmpegdeinterlace_chain (GstPad * pad, GstBuffer * inbuf) { GstFFMpegDeinterlace *deinterlace = GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad)); GstBuffer *outbuf = NULL; GstFlowReturn result; result = gst_pad_alloc_buffer (deinterlace->srcpad, GST_BUFFER_OFFSET_NONE, deinterlace->to_size, GST_PAD_CAPS (deinterlace->srcpad), &outbuf); if (result == GST_FLOW_OK) { gst_ffmpeg_avpicture_fill (&deinterlace->from_frame, GST_BUFFER_DATA (inbuf), deinterlace->pixfmt, deinterlace->width, deinterlace->height); gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, GST_BUFFER_DATA (outbuf), deinterlace->pixfmt, deinterlace->width, deinterlace->height); avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame, deinterlace->pixfmt, deinterlace->width, deinterlace->height); gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS); result = gst_pad_push (deinterlace->srcpad, outbuf); } gst_buffer_unref (inbuf); return result; }
GstFlowReturn gst_vdp_output_src_pad_push (GstVdpOutputSrcPad * vdp_pad, GstVdpOutputBuffer * output_buf, GError ** error) { GstPad *pad; GstBuffer *outbuf; g_return_val_if_fail (GST_IS_VDP_OUTPUT_SRC_PAD (vdp_pad), GST_FLOW_ERROR); g_return_val_if_fail (GST_IS_VDP_OUTPUT_BUFFER (output_buf), GST_FLOW_ERROR); pad = (GstPad *) vdp_pad; if (G_UNLIKELY (!GST_PAD_CAPS (pad))) return GST_FLOW_NOT_NEGOTIATED; switch (vdp_pad->output_format) { case GST_VDP_OUTPUT_SRC_PAD_FORMAT_RGB: { GstFlowReturn ret; guint size; gst_vdp_output_buffer_calculate_size (output_buf, &size); vdp_pad->lock_caps = TRUE; ret = gst_pad_alloc_buffer (pad, 0, size, GST_PAD_CAPS (vdp_pad), &outbuf); vdp_pad->lock_caps = FALSE; if (ret != GST_FLOW_OK) { gst_buffer_unref (GST_BUFFER_CAST (output_buf)); return ret; } if (!gst_vdp_output_buffer_download (output_buf, outbuf, error)) { gst_buffer_unref (GST_BUFFER_CAST (output_buf)); gst_buffer_unref (outbuf); return GST_FLOW_ERROR; } gst_buffer_copy_metadata (outbuf, (const GstBuffer *) output_buf, GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (GST_BUFFER_CAST (output_buf)); break; } case GST_VDP_OUTPUT_SRC_PAD_FORMAT_VDPAU: { outbuf = GST_BUFFER_CAST (output_buf); break; } default: g_assert_not_reached (); break; } gst_buffer_set_caps (outbuf, GST_PAD_CAPS (vdp_pad)); return gst_pad_push (pad, outbuf); }
static GstFlowReturn gst_teletextdec_export_pango_page (GstTeletextDec * teletext, vbi_page * page, GstBuffer ** buf) { vbi_char *acp; const guint rows = page->rows; gchar **colors; gchar **lines; GString *subs; GstCaps *caps; GstFlowReturn ret; guint start, stop; guint i, j; colors = (gchar **) g_malloc (sizeof (gchar *) * (rows + 1)); colors[rows] = g_strdup ('\0'); /* parse all the lines and approximate it's foreground color using the first * non null character */ for (acp = page->text, i = 0; i < page->rows; acp += page->columns, i++) { for (j = 0; j < page->columns; j++) { colors[i] = g_strdup (default_color_map[7]); if (acp[j].unicode != 0x20) { colors[i] = g_strdup (default_color_map[acp[j].foreground]); break; } } } /* get an array of strings with each line of the telext page */ start = teletext->subtitles_mode ? 1 : 0; stop = teletext->subtitles_mode ? rows - 2 : rows - 1; lines = gst_teletextdec_vbi_page_to_text_lines (teletext, start, stop, page); /* format each line in pango markup */ subs = g_string_new (""); for (i = start; i <= stop; i++) { g_string_append_printf (subs, PANGO_TEMPLATE, teletext->font_description, colors[i], lines[i - start]); } /* Allocate new buffer */ caps = gst_caps_new_simple ("text/x-pango-markup", NULL); ret = gst_pad_alloc_buffer (teletext->srcpad, GST_BUFFER_OFFSET_NONE, subs->len + 1, caps, &(*buf)); if (G_LIKELY (ret == GST_FLOW_OK)) GST_BUFFER_DATA (*buf) = GST_BUFFER_MALLOCDATA (*buf) = (guint8 *) subs->str; else gst_buffer_unref (*buf); g_strfreev (lines); g_strfreev (colors); g_string_free (subs, FALSE); gst_caps_unref (caps); return ret; }
static GstFlowReturn gst_win_inet_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer) { GstWinInetSrc *self = GST_WIN_INET_SRC (pushsrc); GstBaseSrc *basesrc = GST_BASE_SRC (pushsrc); GstBuffer *buf = NULL; GstFlowReturn ret = GST_FLOW_OK; DWORD bytes_read = 0; do { GstCaps *caps = GST_PAD_CAPS (GST_BASE_SRC_PAD (self)); if (self->icy_caps != NULL) caps = self->icy_caps; ret = gst_pad_alloc_buffer (GST_BASE_SRC_PAD (basesrc), self->cur_offset, basesrc->blocksize, caps, &buf); if (G_LIKELY (ret == GST_FLOW_OK)) { if (InternetReadFile (self->url, GST_BUFFER_DATA (buf), basesrc->blocksize, &bytes_read)) { if (bytes_read == 0) { if (self->poll_mode) { if (gst_win_inet_src_open (self)) { gst_buffer_unref (buf); buf = NULL; } else { ret = GST_FLOW_ERROR; } } else { GST_ERROR_OBJECT (self, "short read (eof?)"); ret = GST_FLOW_UNEXPECTED; } } } else { GST_ERROR_OBJECT (self, "InternetReadFile failed: 0x%08lx", GetLastError ()); ret = GST_FLOW_ERROR; } } } while (bytes_read == 0 && ret == GST_FLOW_OK); if (ret == GST_FLOW_OK) { GST_BUFFER_SIZE (buf) = bytes_read; self->cur_offset += bytes_read; *buffer = buf; } else { if (buf != NULL) gst_buffer_unref (buf); } return ret; }
static GstFlowReturn gst_selector_pad_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstInputSelector *sel; GstFlowReturn result; GstPad *active_sinkpad; GstPad *prev_active_sinkpad; GstSelectorPad *selpad; sel = GST_INPUT_SELECTOR (gst_pad_get_parent (pad)); selpad = GST_SELECTOR_PAD_CAST (pad); GST_LOG_OBJECT (pad, "received alloc"); GST_INPUT_SELECTOR_LOCK (sel); prev_active_sinkpad = sel->active_sinkpad; active_sinkpad = gst_input_selector_activate_sinkpad (sel, pad); if (pad != active_sinkpad) goto not_active; GST_INPUT_SELECTOR_UNLOCK (sel); if (prev_active_sinkpad != active_sinkpad && pad == active_sinkpad) g_object_notify (G_OBJECT (sel), "active-pad"); result = gst_pad_alloc_buffer (sel->srcpad, offset, size, caps, buf); done: gst_object_unref (sel); return result; /* ERRORS */ not_active: { GST_INPUT_SELECTOR_UNLOCK (sel); /* unselected pad, perform fallback alloc or return unlinked when * asked */ GST_OBJECT_LOCK (selpad); if (selpad->always_ok) { GST_DEBUG_OBJECT (pad, "Not selected, performing fallback allocation"); *buf = NULL; result = GST_FLOW_OK; } else { GST_DEBUG_OBJECT (pad, "Not selected, return NOT_LINKED"); result = GST_FLOW_NOT_LINKED; } GST_OBJECT_UNLOCK (selpad); goto done; } }
static GstFlowReturn gst_vdp_vpp_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstVdpOutputBuffer *outbuf; GstFlowReturn ret = GST_FLOW_ERROR; GstVdpDevice *device = NULL; GstStructure *structure; gint width, height; gint chroma_type; if (!vpp->device) { /* if we haven't got a device yet we must alloc a buffer downstream to get it */ GstCaps *src_caps = gst_pad_get_allowed_caps (vpp->srcpad); gst_pad_fixate_caps (vpp->srcpad, src_caps); ret = gst_pad_alloc_buffer (vpp->srcpad, 0, 0, src_caps, (GstBuffer **) & outbuf); gst_caps_unref (src_caps); if (ret != GST_FLOW_OK) goto error; device = outbuf->device; gst_buffer_unref (GST_BUFFER (outbuf)); } else device = vpp->device; structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (structure, "width", &width) || !gst_structure_get_int (structure, "height", &height) || !gst_structure_get_int (structure, "chroma-type", &chroma_type)) goto error; *buf = GST_BUFFER (gst_vdp_video_buffer_new (device, chroma_type, width, height)); if (*buf == NULL) goto error; GST_BUFFER_SIZE (*buf) = size; GST_BUFFER_OFFSET (*buf) = offset; gst_buffer_set_caps (*buf, caps); ret = GST_FLOW_OK; error: gst_object_unref (vpp); return ret; }
static GstFlowReturn gst_proxy_pad_do_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstFlowReturn result; GstPad *internal = GST_PROXY_PAD_INTERNAL (pad); result = gst_pad_alloc_buffer (internal, offset, size, caps, buf); return result; }
static GstFlowReturn gst_real_audio_dec_chain (GstPad * pad, GstBuffer * in) { GstRealAudioDec *dec = GST_REAL_AUDIO_DEC (GST_PAD_PARENT (pad)); GstFlowReturn flow; GstClockTime timestamp; GstBuffer *out = NULL; guint16 res = 0; guint len; if (G_UNLIKELY (dec->lib.RADecode == NULL || dec->lib.module == NULL)) goto not_negotiated; timestamp = GST_BUFFER_TIMESTAMP (in); flow = gst_pad_alloc_buffer (dec->src, GST_BUFFER_OFFSET_NONE, dec->width * dec->leaf_size * dec->height * 16, GST_PAD_CAPS (dec->src), &out); if (flow != GST_FLOW_OK) goto done; res = dec->lib.RADecode (dec->lib.context, GST_BUFFER_DATA (in), GST_BUFFER_SIZE (in), GST_BUFFER_DATA (out), &len, -1); if (res != 0) goto could_not_decode; GST_BUFFER_SIZE (out) = len; GST_BUFFER_TIMESTAMP (out) = timestamp; flow = gst_pad_push (dec->src, out); done: gst_buffer_unref (in); return flow; /* Errors */ could_not_decode: { gst_buffer_unref (out); GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Could not decode buffer (%i).", res)); flow = GST_FLOW_ERROR; goto done; } not_negotiated: { GST_WARNING_OBJECT (dec, "decoder not open, probably no input caps set " "yet, caps on input buffer: %" GST_PTR_FORMAT, GST_BUFFER_CAPS (in)); flow = GST_FLOW_NOT_NEGOTIATED; goto done; } }
static GstFlowReturn fs_funnel_buffer_alloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { FsFunnel *funnel = FS_FUNNEL (gst_pad_get_parent_element (pad)); GstFlowReturn ret = GST_FLOW_OK; ret = gst_pad_alloc_buffer (funnel->srcpad, offset, size, caps, buf); gst_object_unref (funnel); return ret; }
static GstFlowReturn gst_audio_ringbuffer_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstAudioRingbuffer *ringbuffer; GstFlowReturn result; ringbuffer = GST_AUDIO_RINGBUFFER (GST_PAD_PARENT (pad)); /* Forward to src pad, without setting caps on the src pad */ result = gst_pad_alloc_buffer (ringbuffer->srcpad, offset, size, caps, buf); return result; }
static GstFlowReturn gst_ffmpegdeinterlace_chain (GstPad * pad, GstBuffer * inbuf) { GstFFMpegDeinterlace *deinterlace = GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad)); GstBuffer *outbuf = NULL; GstFlowReturn result; GST_OBJECT_LOCK (deinterlace); if (deinterlace->reconfigure) { if (deinterlace->new_mode != -1) deinterlace->mode = deinterlace->new_mode; deinterlace->new_mode = -1; deinterlace->reconfigure = FALSE; GST_OBJECT_UNLOCK (deinterlace); if (GST_PAD_CAPS (deinterlace->srcpad)) gst_ffmpegdeinterlace_sink_setcaps (deinterlace->sinkpad, GST_PAD_CAPS (deinterlace->sinkpad)); } else { GST_OBJECT_UNLOCK (deinterlace); } if (deinterlace->passthrough) return gst_pad_push (deinterlace->srcpad, inbuf); result = gst_pad_alloc_buffer (deinterlace->srcpad, GST_BUFFER_OFFSET_NONE, deinterlace->to_size, GST_PAD_CAPS (deinterlace->srcpad), &outbuf); if (result == GST_FLOW_OK) { gst_ffmpeg_avpicture_fill (&deinterlace->from_frame, GST_BUFFER_DATA (inbuf), deinterlace->pixfmt, deinterlace->width, deinterlace->height); gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, GST_BUFFER_DATA (outbuf), deinterlace->pixfmt, deinterlace->width, deinterlace->height); avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame, deinterlace->pixfmt, deinterlace->width, deinterlace->height); gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS); result = gst_pad_push (deinterlace->srcpad, outbuf); } gst_buffer_unref (inbuf); return result; }
static GstFlowReturn gst_teletextdec_export_text_page (GstTeletextDec * teletext, vbi_page * page, GstBuffer ** buf) { GstCaps *caps; GstFlowReturn ret; gchar *text; guint size; if (teletext->subtitles_mode) { gchar **lines; GString *subs; guint i; lines = gst_teletextdec_vbi_page_to_text_lines (teletext, 1, 23, page); subs = g_string_new (""); /* Strip white spaces and squash blank lines */ for (i = 0; i < 23; i++) { g_strstrip (lines[i]); if (g_strcmp0 (lines[i], "")) g_string_append_printf (subs, teletext->subtitles_template, lines[i]); } /* if the page is blank and doesn't contain any line of text, just add a * line break */ if (!g_strcmp0 (subs->str, "")) g_string_append (subs, "\n"); text = subs->str; size = subs->len + 1; g_string_free (subs, FALSE); g_strfreev (lines); } else { size = page->columns * page->rows; text = g_malloc (size); vbi_print_page (page, text, size, "UTF-8", FALSE, TRUE); } /* Allocate new buffer */ caps = gst_caps_new_simple ("text/plain", NULL); ret = gst_pad_alloc_buffer (teletext->srcpad, GST_BUFFER_OFFSET_NONE, size, caps, &(*buf)); if (G_LIKELY (ret == GST_FLOW_OK)) GST_BUFFER_DATA (*buf) = GST_BUFFER_MALLOCDATA (*buf) = (guint8 *) text; else gst_buffer_unref (*buf); gst_caps_unref (caps); return ret; }
static GstFlowReturn gst_neonhttp_src_create (GstPushSrc * psrc, GstBuffer ** outbuf) { GstNeonhttpSrc *src; GstBaseSrc *basesrc; GstFlowReturn ret; gint read; src = GST_NEONHTTP_SRC (psrc); basesrc = GST_BASE_SRC_CAST (psrc); /* The caller should know the number of bytes and not read beyond EOS. */ if (G_UNLIKELY (src->eos)) goto eos; /* Create the buffer. */ ret = gst_pad_alloc_buffer (GST_BASE_SRC_PAD (basesrc), basesrc->segment.last_stop, basesrc->blocksize, src->icy_caps ? src->icy_caps : GST_PAD_CAPS (GST_BASE_SRC_PAD (basesrc)), outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto done; read = gst_neonhttp_src_request_dispatch (src, *outbuf); if (G_UNLIKELY (read < 0)) goto read_error; GST_LOG_OBJECT (src, "returning %u bytes", GST_BUFFER_SIZE (*outbuf)); done: return ret; /* ERRORS */ eos: { GST_DEBUG_OBJECT (src, "EOS reached"); return GST_FLOW_UNEXPECTED; } read_error: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Could not read any bytes (%i, %s)", read, ne_get_error (src->session))); gst_buffer_unref (*outbuf); *outbuf = NULL; return GST_FLOW_ERROR; } }
static GstFlowReturn gst_shape_wipe_video_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; GST_DEBUG_OBJECT (pad, "Allocating buffer with offset 0x%" G_GINT64_MODIFIER "x and size %u with caps: %" GST_PTR_FORMAT, offset, size, caps); *buf = NULL; ret = gst_pad_alloc_buffer (self->srcpad, offset, size, caps, buf); gst_object_unref (self); return ret; }
static GstFlowReturn gst_cmml_enc_new_buffer (GstCmmlEnc * enc, guchar * data, gint size, GstBuffer ** buffer) { GstFlowReturn res; res = gst_pad_alloc_buffer (enc->srcpad, GST_BUFFER_OFFSET_NONE, size, NULL, buffer); if (res == GST_FLOW_OK) { if (data) memcpy (GST_BUFFER_DATA (*buffer), data, size); } else { GST_WARNING_OBJECT (enc, "alloc function returned error %s", gst_flow_get_name (res)); } return res; }
static GstFlowReturn gst_selector_pad_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { RsnStreamSelector *sel; GstFlowReturn result; GstPad *active_sinkpad; sel = RSN_STREAM_SELECTOR (gst_pad_get_parent (pad)); active_sinkpad = rsn_stream_selector_get_active (sel, pad); /* Fallback allocation for buffers from pads except the selected one */ if (pad != active_sinkpad) { GST_DEBUG_OBJECT (sel, "Pad %s:%s is not selected. Performing fallback allocation", GST_DEBUG_PAD_NAME (pad)); *buf = NULL; result = GST_FLOW_OK; } else { result = gst_pad_alloc_buffer (sel->srcpad, offset, size, caps, buf); /* FIXME: HACK. If buffer alloc returns not-linked, perform a fallback * allocation. This should NOT be necessary, because playbin should * properly block the source pad from running until it's finished hooking * everything up, but playbin needs refactoring first. */ if (result == GST_FLOW_NOT_LINKED) { GST_DEBUG_OBJECT (sel, "No peer pad yet - performing fallback allocation for pad %s:%s", GST_DEBUG_PAD_NAME (pad)); *buf = NULL; result = GST_FLOW_OK; } } gst_object_unref (sel); return result; }
static GstFlowReturn gst_segment_clip_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstSegmentClip *self = GST_SEGMENT_CLIP (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; GST_LOG_OBJECT (pad, "Allocating buffer with offset 0x%" G_GINT64_MODIFIER "x and size %u with caps: %" GST_PTR_FORMAT, offset, size, caps); *buf = NULL; ret = gst_pad_alloc_buffer (self->srcpad, offset, size, caps, buf); if (G_UNLIKELY (ret != GST_FLOW_OK)) GST_ERROR_OBJECT (pad, "Allocating buffer failed: %s", gst_flow_get_name (ret)); gst_object_unref (self); return ret; }
static GstFlowReturn gst_valve_buffer_alloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstValve *valve = GST_VALVE (gst_pad_get_parent_element (pad)); GstFlowReturn ret = GST_FLOW_OK; if (g_atomic_int_get (&valve->drop)) *buf = NULL; else ret = gst_pad_alloc_buffer (valve->srcpad, offset, size, caps, buf); /* Ignore errors if "drop" was changed while the thread was blocked * downwards */ if (g_atomic_int_get (&valve->drop)) ret = GST_FLOW_OK; gst_object_unref (valve); return ret; }
static GstBuffer * gst_ks_video_src_alloc_buffer (guint size, guint alignment, gpointer user_data) { GstKsVideoSrc *self = GST_KS_VIDEO_SRC (user_data); GstBuffer *buf; GstCaps *caps; GstFlowReturn flow_ret; caps = gst_pad_get_negotiated_caps (GST_BASE_SRC_PAD (self)); if (caps == NULL) goto error_no_caps; flow_ret = gst_pad_alloc_buffer (GST_BASE_SRC_PAD (self), 0, size + (alignment - 1), caps, &buf); gst_caps_unref (caps); if (G_UNLIKELY (flow_ret != GST_FLOW_OK)) goto error_alloc_buffer; GST_BUFFER_DATA (buf) = GSIZE_TO_POINTER ((GPOINTER_TO_SIZE (GST_BUFFER_DATA (buf)) + (alignment - 1)) & ~(alignment - 1)); GST_BUFFER_SIZE (buf) = size; return buf; error_no_caps: { GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("not negotiated"), ("maybe setcaps failed?")); return NULL; } error_alloc_buffer: { GST_ELEMENT_ERROR (self, CORE, PAD, ("alloc_buffer failed"), (NULL)); return NULL; } }
/* creates a new buffer and sets caps and timestamp on it */ static GstFlowReturn gst_cmml_dec_new_buffer (GstCmmlDec * dec, guchar * data, gint size, GstBuffer ** buffer) { GstFlowReturn res; res = gst_pad_alloc_buffer (dec->srcpad, GST_BUFFER_OFFSET_NONE, size, gst_static_pad_template_get_caps (&gst_cmml_dec_src_factory), buffer); if (res == GST_FLOW_OK) { if (data) memcpy (GST_BUFFER_DATA (*buffer), data, size); GST_BUFFER_TIMESTAMP (*buffer) = dec->timestamp; } else if (res == GST_FLOW_NOT_LINKED) { GST_DEBUG_OBJECT (dec, "alloc function return NOT-LINKED, ignoring"); } else { GST_WARNING_OBJECT (dec, "alloc function returned error %s", gst_flow_get_name (res)); } return res; }
/* push remaining data in the buffers out */ static GstFlowReturn audioresample_pushthrough (GstAudioresample * audioresample) { int outsize; ResampleState *r; GstBuffer *outbuf; GstFlowReturn res = GST_FLOW_OK; GstBaseTransform *trans; r = audioresample->resample; outsize = resample_get_output_size (r); if (outsize == 0) { GST_DEBUG_OBJECT (audioresample, "no internal buffers needing flush"); goto done; } trans = GST_BASE_TRANSFORM (audioresample); res = gst_pad_alloc_buffer (trans->srcpad, GST_BUFFER_OFFSET_NONE, outsize, GST_PAD_CAPS (trans->srcpad), &outbuf); if (G_UNLIKELY (res != GST_FLOW_OK)) { GST_WARNING_OBJECT (audioresample, "failed allocating buffer of %d bytes", outsize); goto done; } res = audioresample_do_output (audioresample, outbuf); if (G_UNLIKELY (res != GST_FLOW_OK)) goto done; res = gst_pad_push (trans->srcpad, outbuf); done: return res; }
static SoupBuffer * gst_soup_http_src_chunk_allocator (SoupMessage * msg, gsize max_len, gpointer user_data) { GstSoupHTTPSrc *src = (GstSoupHTTPSrc *) user_data; GstBaseSrc *basesrc = GST_BASE_SRC_CAST (src); GstBuffer *gstbuf; SoupBuffer *soupbuf; gsize length; GstFlowReturn rc; if (max_len) length = MIN (basesrc->blocksize, max_len); else length = basesrc->blocksize; GST_DEBUG_OBJECT (src, "alloc %" G_GSIZE_FORMAT " bytes <= %" G_GSIZE_FORMAT, length, max_len); rc = gst_pad_alloc_buffer (GST_BASE_SRC_PAD (basesrc), GST_BUFFER_OFFSET_NONE, length, src->src_caps ? src->src_caps : GST_PAD_CAPS (GST_BASE_SRC_PAD (basesrc)), &gstbuf); if (G_UNLIKELY (rc != GST_FLOW_OK)) { /* Failed to allocate buffer. Stall SoupSession and return error code * to create(). */ src->ret = rc; g_main_loop_quit (src->loop); return NULL; } soupbuf = soup_buffer_new_with_owner (GST_BUFFER_DATA (gstbuf), length, gstbuf, gst_soup_http_src_chunk_free); return soupbuf; }
static GstFlowReturn gst_deinterleave_process (GstDeinterleave * self, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; guint channels = self->channels; guint pads_pushed = 0, buffers_allocated = 0; guint nframes = GST_BUFFER_SIZE (buf) / channels / (self->width / 8); guint bufsize = nframes * (self->width / 8); guint i; GList *srcs; GstBuffer **buffers_out = g_new0 (GstBuffer *, channels); guint8 *in, *out; /* Send any pending events to all src pads */ GST_OBJECT_LOCK (self); if (self->pending_events) { GList *events; GstEvent *event; GST_DEBUG_OBJECT (self, "Sending pending events to all src pads"); for (events = self->pending_events; events != NULL; events = events->next) { event = GST_EVENT (events->data); for (srcs = self->srcpads; srcs != NULL; srcs = srcs->next) gst_pad_push_event (GST_PAD (srcs->data), gst_event_ref (event)); gst_event_unref (event); } g_list_free (self->pending_events); self->pending_events = NULL; } GST_OBJECT_UNLOCK (self); /* Allocate buffers */ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) { GstPad *pad = (GstPad *) srcs->data; buffers_out[i] = NULL; ret = gst_pad_alloc_buffer (pad, GST_BUFFER_OFFSET_NONE, bufsize, GST_PAD_CAPS (pad), &buffers_out[i]); /* Make sure we got a correct buffer. The only other case we allow * here is an unliked pad */ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED) goto alloc_buffer_failed; else if (buffers_out[i] && GST_BUFFER_SIZE (buffers_out[i]) != bufsize) goto alloc_buffer_bad_size; else if (buffers_out[i] && !gst_caps_is_equal (GST_BUFFER_CAPS (buffers_out[i]), GST_PAD_CAPS (pad))) goto invalid_caps; if (buffers_out[i]) { gst_buffer_copy_metadata (buffers_out[i], buf, GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS); buffers_allocated++; } } /* Return NOT_LINKED if no pad was linked */ if (!buffers_allocated) { GST_WARNING_OBJECT (self, "Couldn't allocate any buffers because no pad was linked"); ret = GST_FLOW_NOT_LINKED; goto done; } /* deinterleave */ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) { GstPad *pad = (GstPad *) srcs->data; in = (guint8 *) GST_BUFFER_DATA (buf); in += i * (self->width / 8); if (buffers_out[i]) { out = (guint8 *) GST_BUFFER_DATA (buffers_out[i]); self->func (out, in, channels, nframes); ret = gst_pad_push (pad, buffers_out[i]); buffers_out[i] = NULL; if (ret == GST_FLOW_OK) pads_pushed++; else if (ret == GST_FLOW_NOT_LINKED) ret = GST_FLOW_OK; else goto push_failed; } } /* Return NOT_LINKED if no pad was linked */ if (!pads_pushed) ret = GST_FLOW_NOT_LINKED; done: gst_buffer_unref (buf); g_free (buffers_out); return ret; alloc_buffer_failed: { GST_WARNING ("gst_pad_alloc_buffer() returned %s", gst_flow_get_name (ret)); goto clean_buffers; } alloc_buffer_bad_size: { GST_WARNING ("called alloc_buffer(), but didn't get requested bytes"); ret = GST_FLOW_NOT_NEGOTIATED; goto clean_buffers; } invalid_caps: { GST_WARNING ("called alloc_buffer(), but didn't get requested caps"); ret = GST_FLOW_NOT_NEGOTIATED; goto clean_buffers; } push_failed: { GST_DEBUG ("push() failed, flow = %s", gst_flow_get_name (ret)); goto clean_buffers; } clean_buffers: { for (i = 0; i < channels; i++) { if (buffers_out[i]) gst_buffer_unref (buffers_out[i]); } gst_buffer_unref (buf); g_free (buffers_out); return ret; } }
static gboolean gst_dshowvideodec_push_buffer (byte * buffer, long size, byte * src_object, UINT64 start, UINT64 stop) { GstDshowVideoDec *vdec = (GstDshowVideoDec *) src_object; GstDshowVideoDecClass *klass = (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec); GstBuffer *buf = NULL; gboolean in_seg = FALSE; gint64 clip_start = 0, clip_stop = 0; /* check if this buffer is in our current segment */ in_seg = gst_segment_clip (vdec->segment, GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop); /* if the buffer is out of segment do not push it downstream */ if (!in_seg) { GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec, "buffer is out of segment, start %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop)); return FALSE; } /* buffer is in our segment allocate a new out buffer and clip its * timestamps */ vdec->last_ret = gst_pad_alloc_buffer (vdec->srcpad, GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (vdec->srcpad), &buf); if (!buf) { GST_CAT_WARNING_OBJECT (dshowvideodec_debug, vdec, "can't not allocate a new GstBuffer"); return FALSE; } /* set buffer properties */ GST_BUFFER_TIMESTAMP (buf) = clip_start; GST_BUFFER_DURATION (buf) = clip_stop - clip_start; if (strstr (klass->entry->srccaps, "rgb")) { /* FOR RGB directshow decoder will return bottom-up BITMAP * There is probably a way to get top-bottom video frames from * the decoder... */ gint line = 0; guint stride = vdec->width * 4; for (; line < vdec->height; line++) { memcpy (GST_BUFFER_DATA (buf) + (line * stride), buffer + (size - ((line + 1) * (stride))), stride); } } else { memcpy (GST_BUFFER_DATA (buf), buffer, MIN (size, GST_BUFFER_SIZE (buf))); } GST_CAT_LOG_OBJECT (dshowvideodec_debug, vdec, "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); /* push the buffer downstream */ vdec->last_ret = gst_pad_push (vdec->srcpad, buf); return TRUE; }
static GstFlowReturn gst_real_video_dec_chain (GstPad * pad, GstBuffer * in) { GstRealVideoDec *dec; guint8 *data; guint size; GstFlowReturn ret; RVInData tin; RVOutData tout; GstClockTime timestamp, duration; GstBuffer *out; guint32 result; guint frag_count, frag_size; dec = GST_REAL_VIDEO_DEC (GST_PAD_PARENT (pad)); if (G_UNLIKELY (dec->lib.Transform == NULL || dec->lib.module == NULL)) goto not_negotiated; data = GST_BUFFER_DATA (in); size = GST_BUFFER_SIZE (in); timestamp = GST_BUFFER_TIMESTAMP (in); duration = GST_BUFFER_DURATION (in); GST_DEBUG_OBJECT (dec, "got buffer of size %u, timestamp %" GST_TIME_FORMAT, size, GST_TIME_ARGS (timestamp)); /* alloc output buffer */ ret = gst_pad_alloc_buffer (dec->src, GST_BUFFER_OFFSET_NONE, dec->width * dec->height * 3 / 2, GST_PAD_CAPS (dec->src), &out); if (ret != GST_FLOW_OK) goto alloc_failed; GST_BUFFER_TIMESTAMP (out) = timestamp; GST_BUFFER_DURATION (out) = duration; frag_count = *data++; frag_size = (frag_count + 1) * 8; size -= (frag_size + 1); GST_DEBUG_OBJECT (dec, "frag_count %u, frag_size %u, data size %u", frag_count, frag_size, size); /* Decode. * * The Buffers contain * * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | nfragments | fragment1 ... | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | .... | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | ... | fragment2 ... | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * .... * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | ... | fragment data | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * * nfragments: number of fragments * fragmentN: 8 bytes of fragment data (nfragements + 1) of them * fragment data: the data of the fragments. */ tin.datalen = size; tin.interpolate = 0; tin.nfragments = frag_count; tin.fragments = data; tin.flags = 0; tin.timestamp = timestamp; /* jump over the frag table to the fragments */ data += frag_size; result = dec->lib.Transform ( (gchar *) data, (gchar *) GST_BUFFER_DATA (out), &tin, &tout, dec->lib.context); if (result) goto could_not_transform; /* When we decoded a frame, reset the error counter. We only fail after N * consecutive decoding errors. */ dec->error_count = 0; gst_buffer_unref (in); /* Check for new dimensions */ if (tout.frames && ((dec->width != tout.width) || (dec->height != tout.height))) { GstCaps *caps = gst_caps_copy (GST_PAD_CAPS (dec->src)); GstStructure *s = gst_caps_get_structure (caps, 0); GST_DEBUG_OBJECT (dec, "New dimensions: %" G_GUINT32_FORMAT " x %" G_GUINT32_FORMAT, tout.width, tout.height); gst_structure_set (s, "width", G_TYPE_INT, (gint) tout.width, "height", G_TYPE_INT, (gint) tout.height, NULL); gst_pad_set_caps (dec->src, caps); gst_buffer_set_caps (out, caps); gst_caps_unref (caps); dec->width = tout.width; dec->height = tout.height; GST_BUFFER_SIZE (out) = dec->width * dec->height * 3 / 2; } GST_DEBUG_OBJECT (dec, "Pushing out buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out))); if ((ret = gst_pad_push (dec->src, out)) != GST_FLOW_OK) goto could_not_push; return ret; /* Errors */ not_negotiated: { GST_WARNING_OBJECT (dec, "decoder not open, probably no input caps set " "yet, caps on input buffer: %" GST_PTR_FORMAT, GST_BUFFER_CAPS (in)); gst_buffer_unref (in); return GST_FLOW_NOT_NEGOTIATED; } alloc_failed: { GST_DEBUG_OBJECT (dec, "buffer alloc failed: %s", gst_flow_get_name (ret)); gst_buffer_unref (in); return ret; } could_not_transform: { gst_buffer_unref (out); gst_buffer_unref (in); dec->error_count++; if (dec->max_errors && dec->error_count >= dec->max_errors) { GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Could not decode buffer: %" G_GUINT32_FORMAT, result), (NULL)); return GST_FLOW_ERROR; } else { GST_ELEMENT_WARNING (dec, STREAM, DECODE, ("Could not decode buffer: %" G_GUINT32_FORMAT, result), (NULL)); return GST_FLOW_OK; } } could_not_push: { GST_DEBUG_OBJECT (dec, "Could not push buffer: %s", gst_flow_get_name (ret)); return ret; } }
static GstFlowReturn gst_xviddec_chain (GstPad * pad, GstBuffer * buf) { GstXvidDec *dec; GstBuffer *outbuf = NULL; xvid_dec_frame_t xframe; xvid_dec_stats_t xstats; gint ret; guint8 *data, *dupe = NULL; guint size; GstFlowReturn fret; dec = GST_XVIDDEC (GST_OBJECT_PARENT (pad)); if (!dec->handle) goto not_negotiated; fret = GST_FLOW_OK; GST_LOG_OBJECT (dec, "Received buffer of time %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT ", size %d", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_SIZE (buf)); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { /* FIXME: should we do anything here, like flush the decoder? */ } data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); /* xvidcore overreads the input buffer, we need to alloc some extra padding * to make things work reliably */ #define EXTRA_PADDING 16 if (EXTRA_PADDING > 0) { dupe = g_malloc (size + EXTRA_PADDING); memcpy (dupe, data, size); memset (dupe + size, 0, EXTRA_PADDING); data = dupe; } do { /* loop needed because xvidcore may return vol information */ /* decode and so ... */ gst_xvid_init_struct (xframe); xframe.general = XVID_LOWDELAY; xframe.bitstream = (void *) data; xframe.length = size; gst_xvid_init_struct (xstats); if (outbuf == NULL) { fret = gst_pad_alloc_buffer (dec->srcpad, GST_BUFFER_OFFSET_NONE, dec->outbuf_size, GST_PAD_CAPS (dec->srcpad), &outbuf); if (fret != GST_FLOW_OK) goto done; } gst_xvid_image_fill (&xframe.output, GST_BUFFER_DATA (outbuf), dec->csp, dec->width, dec->height); ret = xvid_decore (dec->handle, XVID_DEC_DECODE, &xframe, &xstats); if (ret < 0) goto decode_error; GST_LOG_OBJECT (dec, "xvid produced output, type %d, consumed %d", xstats.type, ret); if (xstats.type == XVID_TYPE_VOL) gst_xviddec_negotiate (dec, &xstats); data += ret; size -= ret; } while (xstats.type <= 0 && size > 0); /* 1 byte is frequently left over */ if (size > 1) { GST_WARNING_OBJECT (dec, "decoder did not consume all input"); } /* FIXME, reflow the multiple return exit points */ if (xstats.type > 0) { /* some real output was produced */ if (G_UNLIKELY (dec->waiting_for_key)) { if (xstats.type != XVID_TYPE_IVOP) goto dropping; dec->waiting_for_key = FALSE; } /* bframes can cause a delay in frames being returned non keyframe timestamps can permute a bit between encode and display order, but should match for keyframes */ if (dec->have_ts) { GST_BUFFER_TIMESTAMP (outbuf) = dec->next_ts; GST_BUFFER_DURATION (outbuf) = dec->next_dur; dec->next_ts = GST_BUFFER_TIMESTAMP (buf); dec->next_dur = GST_BUFFER_DURATION (buf); } else { GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf); } gst_buffer_set_caps (outbuf, GST_PAD_CAPS (dec->srcpad)); GST_LOG_OBJECT (dec, "pushing buffer with pts %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf))); fret = gst_pad_push (dec->srcpad, outbuf); } else { /* no real output yet, delay in frames being returned */ if (G_UNLIKELY (dec->have_ts)) { GST_WARNING_OBJECT (dec, "xvid decoder produced no output, but timestamp %" GST_TIME_FORMAT " already queued", GST_TIME_ARGS (dec->next_ts)); } else { dec->have_ts = TRUE; dec->next_ts = GST_BUFFER_TIMESTAMP (buf); dec->next_dur = GST_BUFFER_DURATION (buf); } gst_buffer_unref (outbuf); } done: g_free (dupe); gst_buffer_unref (buf); return fret; /* ERRORS */ not_negotiated: { GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL), ("format wasn't negotiated before chain function")); fret = GST_FLOW_NOT_NEGOTIATED; goto done; } decode_error: { /* FIXME: shouldn't error out fatally/properly after N decoding errors? */ GST_ELEMENT_WARNING (dec, STREAM, DECODE, (NULL), ("Error decoding xvid frame: %s (%d)", gst_xvid_error (ret), ret)); if (outbuf) gst_buffer_unref (outbuf); goto done; } dropping: { GST_WARNING_OBJECT (dec, "Dropping non-keyframe (seek/init)"); if (outbuf) gst_buffer_unref (outbuf); goto done; } }
HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample) { gboolean in_seg = FALSE; gint64 clip_start = 0, clip_stop = 0; GstDshowVideoDecClass *klass = (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (mDec); GstBuffer *buf = NULL; GstClockTime start, stop; if(pMediaSample) { BYTE *pBuffer = NULL; LONGLONG lStart = 0, lStop = 0; long size = pMediaSample->GetActualDataLength(); pMediaSample->GetPointer(&pBuffer); pMediaSample->GetTime(&lStart, &lStop); start = lStart * 100; stop = lStop * 100; /* check if this buffer is in our current segment */ in_seg = gst_segment_clip (mDec->segment, GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop); /* if the buffer is out of segment do not push it downstream */ if (!in_seg) { GST_DEBUG_OBJECT (mDec, "buffer is out of segment, start %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop)); goto done; } /* buffer is in our segment, allocate a new out buffer and clip its * timestamps */ mDec->last_ret = gst_pad_alloc_buffer (mDec->srcpad, GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (mDec->srcpad), &buf); if (!buf) { GST_WARNING_OBJECT (mDec, "cannot allocate a new GstBuffer"); goto done; } /* set buffer properties */ GST_BUFFER_TIMESTAMP (buf) = clip_start; GST_BUFFER_DURATION (buf) = clip_stop - clip_start; if (strstr (klass->entry->srccaps, "rgb")) { /* FOR RGB directshow decoder will return bottom-up BITMAP * There is probably a way to get top-bottom video frames from * the decoder... */ gint line = 0; guint stride = mDec->width * 4; for (; line < mDec->height; line++) { memcpy (GST_BUFFER_DATA (buf) + (line * stride), pBuffer + (size - ((line + 1) * (stride))), stride); } } else { memcpy (GST_BUFFER_DATA (buf), pBuffer, MIN ((unsigned int)size, GST_BUFFER_SIZE (buf))); } GST_LOG_OBJECT (mDec, "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); /* push the buffer downstream */ mDec->last_ret = gst_pad_push (mDec->srcpad, buf); } done: return S_OK; }
static GstFlowReturn gst_dasf_src_create (GstAudioSrc *audiosrc, guint64 offset, guint length, GstBuffer **buffer) { GstDasfSrc* self = GST_DASF_SRC (audiosrc); GstBaseAudioSrc *baseaudiosrc = GST_BASE_AUDIO_SRC (self); GstBuffer* gst_buffer = NULL; OMX_BUFFERHEADERTYPE* omx_buffer = NULL; GstDasfSrcPrivate* priv = GST_DASF_SRC_GET_PRIVATE (self); GstGooAudioFilter* me = self->peer_element; GST_DEBUG (""); if (me->component->cur_state != OMX_StateExecuting) { return GST_FLOW_UNEXPECTED; } GST_DEBUG ("goo stuff"); { omx_buffer = goo_port_grab_buffer (me->outport); if (gst_pad_alloc_buffer (GST_BASE_SRC_PAD (self), priv->outcount, omx_buffer->nFilledLen, GST_PAD_CAPS (GST_BASE_SRC_PAD (self)), &gst_buffer) == GST_FLOW_OK) { if (GST_IS_GOO_BUFFER (gst_buffer)) { memcpy (GST_BUFFER_DATA (gst_buffer), omx_buffer->pBuffer, omx_buffer->nFilledLen); goo_component_release_buffer (me->component, omx_buffer); } else { gst_buffer_unref (gst_buffer); gst_buffer = (GstBuffer*) gst_goo_buffer_new (); gst_goo_buffer_set_data (gst_buffer, me->component, omx_buffer); } } else { goto fail; } } GST_DEBUG ("gst stuff"); { GstClock* clock = NULL; GstClockTime timestamp, duration; clock = gst_element_get_clock (GST_ELEMENT (self)); timestamp = gst_clock_get_time (clock); timestamp -= gst_element_get_base_time (GST_ELEMENT (self)); gst_object_unref (clock); GST_BUFFER_TIMESTAMP (gst_buffer) = gst_util_uint64_scale_int (GST_SECOND, priv->outcount, 50); /* Set 20 millisecond duration */ duration = gst_util_uint64_scale_int (GST_SECOND, 1, 50); GST_BUFFER_DURATION (gst_buffer) = duration; GST_BUFFER_OFFSET (gst_buffer) = priv->outcount++; GST_BUFFER_OFFSET_END (gst_buffer) = priv->outcount; gst_buffer_set_caps (gst_buffer, GST_PAD_CAPS (GST_BASE_SRC_PAD (self))); } beach: *buffer = gst_buffer; return GST_FLOW_OK; fail: if (G_LIKELY (*buffer)) { gst_buffer_unref (*buffer); } return GST_FLOW_ERROR; }