static GstFlowReturn gst_wildmidi_parse_song (GstWildmidi * wildmidi) { struct _WM_Info *info; GstCaps *outcaps; guint8 *data; guint size; GST_DEBUG_OBJECT (wildmidi, "Parsing song"); size = gst_adapter_available (wildmidi->adapter); data = gst_adapter_take (wildmidi->adapter, size); /* this method takes our memory block */ GST_OBJECT_LOCK (wildmidi); wildmidi->song = WildMidi_OpenBuffer (data, size); if (!wildmidi->song) goto open_failed; #ifdef HAVE_WILDMIDI_0_2_2 WildMidi_LoadSamples (wildmidi->song); #endif #ifdef HAVE_WILDMIDI_0_2_2 WildMidi_SetOption (wildmidi->song, WM_MO_LINEAR_VOLUME, wildmidi->linear_volume); WildMidi_SetOption (wildmidi->song, WM_MO_EXPENSIVE_INTERPOLATION, wildmidi->high_quality); #else WildMidi_SetOption (wildmidi->song, WM_MO_LOG_VOLUME, !wildmidi->linear_volume); WildMidi_SetOption (wildmidi->song, WM_MO_ENHANCED_RESAMPLING, wildmidi->high_quality); #endif info = WildMidi_GetInfo (wildmidi->song); GST_OBJECT_UNLOCK (wildmidi); wildmidi->o_len = info->approx_total_samples; outcaps = gst_caps_copy (gst_pad_get_pad_template_caps (wildmidi->srcpad)); gst_pad_set_caps (wildmidi->srcpad, outcaps); gst_caps_unref (outcaps); /* we keep an internal segment in samples */ gst_segment_init (wildmidi->o_segment, GST_FORMAT_DEFAULT); gst_pad_push_event (wildmidi->srcpad, gst_wildmidi_get_new_segment_event (wildmidi, GST_FORMAT_TIME)); GST_DEBUG_OBJECT (wildmidi, "Parsing song done"); return GST_FLOW_OK; /* ERRORS */ open_failed: { GST_OBJECT_UNLOCK (wildmidi); GST_ELEMENT_ERROR (wildmidi, STREAM, DECODE, (NULL), ("Unable to parse midi data")); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) { GstDiceTV *filter = GST_DICETV (trans); guint32 *src, *dest; gint i, map_x, map_y, map_i, base, dx, dy, di; gint video_width, g_cube_bits, g_cube_size; gint g_map_height, g_map_width; GstFlowReturn ret = GST_FLOW_OK; GstClockTime timestamp, stream_time; const guint8 *dicemap; src = (guint32 *) GST_BUFFER_DATA (in); dest = (guint32 *) GST_BUFFER_DATA (out); timestamp = GST_BUFFER_TIMESTAMP (in); stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); if (GST_CLOCK_TIME_IS_VALID (stream_time)) gst_object_sync_values (G_OBJECT (filter), stream_time); GST_OBJECT_LOCK (filter); video_width = filter->width; g_cube_bits = filter->g_cube_bits; g_cube_size = filter->g_cube_size; g_map_height = filter->g_map_height; g_map_width = filter->g_map_width; dicemap = filter->dicemap; map_i = 0; for (map_y = 0; map_y < g_map_height; map_y++) { for (map_x = 0; map_x < g_map_width; map_x++) { base = (map_y << g_cube_bits) * video_width + (map_x << g_cube_bits); switch (dicemap[map_i]) { case DICE_UP: for (dy = 0; dy < g_cube_size; dy++) { i = base + dy * video_width; for (dx = 0; dx < g_cube_size; dx++) { dest[i] = src[i]; i++; } } break; case DICE_LEFT: for (dy = 0; dy < g_cube_size; dy++) { i = base + dy * video_width; for (dx = 0; dx < g_cube_size; dx++) { di = base + (dx * video_width) + (g_cube_size - dy - 1); dest[di] = src[i]; i++; } } break; case DICE_DOWN: for (dy = 0; dy < g_cube_size; dy++) { di = base + dy * video_width; i = base + (g_cube_size - dy - 1) * video_width + g_cube_size; for (dx = 0; dx < g_cube_size; dx++) { i--; dest[di] = src[i]; di++; } } break; case DICE_RIGHT: for (dy = 0; dy < g_cube_size; dy++) { i = base + (dy * video_width); for (dx = 0; dx < g_cube_size; dx++) { di = base + dy + (g_cube_size - dx - 1) * video_width; dest[di] = src[i]; i++; } } break; default: g_assert_not_reached (); break; } map_i++; } } GST_OBJECT_UNLOCK (filter); return ret; }
static GstFlowReturn gst_shm_sink_render (GstBaseSink * bsink, GstBuffer * buf) { GstShmSink *self = GST_SHM_SINK (bsink); int rv = 0; GstMapInfo map; gboolean need_new_memory = FALSE; GstFlowReturn ret = GST_FLOW_OK; GstMemory *memory = NULL; GstBuffer *sendbuf = NULL; GST_OBJECT_LOCK (self); while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) goto flushing; } while (!gst_shm_sink_can_render (self, GST_BUFFER_TIMESTAMP (buf))) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) goto flushing; } if (gst_buffer_n_memory (buf) > 1) { GST_LOG_OBJECT (self, "Buffer %p has %d GstMemory, we only support a single" " one, need to do a memcpy", buf, gst_buffer_n_memory (buf)); need_new_memory = TRUE; } else { memory = gst_buffer_peek_memory (buf, 0); if (memory->allocator != GST_ALLOCATOR (self->allocator)) { need_new_memory = TRUE; GST_LOG_OBJECT (self, "Memory in buffer %p was not allocated by " "%" GST_PTR_FORMAT ", will memcpy", buf, memory->allocator); } } if (need_new_memory) { if (gst_buffer_get_size (buf) > sp_writer_get_max_buf_size (self->pipe)) { gsize area_size = sp_writer_get_max_buf_size (self->pipe); GST_OBJECT_UNLOCK (self); GST_ELEMENT_ERROR (self, RESOURCE, NO_SPACE_LEFT, ("Shared memory area is too small"), ("Shared memory area of size %" G_GSIZE_FORMAT " is smaller than" "buffer of size %" G_GSIZE_FORMAT, area_size, gst_buffer_get_size (buf))); return GST_FLOW_ERROR; } while ((memory = gst_shm_sink_allocator_alloc_locked (self->allocator, gst_buffer_get_size (buf), &self->params)) == NULL) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) goto flushing; } while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { gst_memory_unref (memory); GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; } } gst_memory_map (memory, &map, GST_MAP_WRITE); gst_buffer_extract (buf, 0, map.data, map.size); gst_memory_unmap (memory, &map); sendbuf = gst_buffer_new (); gst_buffer_copy_into (sendbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1); gst_buffer_append_memory (sendbuf, memory); } else { sendbuf = gst_buffer_ref (buf); } gst_buffer_map (sendbuf, &map, GST_MAP_READ); /* Make the memory readonly as of now as we've sent it to the other side * We know it's not mapped for writing anywhere as we just mapped it for * reading */ rv = sp_writer_send_buf (self->pipe, (char *) map.data, map.size, sendbuf); gst_buffer_unmap (sendbuf, &map); GST_OBJECT_UNLOCK (self); if (rv == 0) { GST_DEBUG_OBJECT (self, "No clients connected, unreffing buffer"); gst_buffer_unref (sendbuf); } else if (rv == -1) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Invalid allocated buffer"), ("The shmpipe library rejects our buffer, this is a bug")); ret = GST_FLOW_ERROR; } /* If we allocated our own memory, then unmap it */ return ret; flushing: GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; }
static gboolean gst_gl_mixer_sink_query (GstAggregator * agg, GstAggregatorPad * bpad, GstQuery * query) { gboolean ret = FALSE; GstGLMixer *mix = GST_GL_MIXER (agg); GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix); GST_TRACE ("QUERY %" GST_PTR_FORMAT, query); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_CAPS: { GstCaps *filter, *caps; gst_query_parse_caps (query, &filter); caps = gst_gl_mixer_pad_sink_getcaps (GST_PAD (bpad), mix, filter); gst_query_set_caps_result (query, caps); gst_caps_unref (caps); ret = TRUE; break; } case GST_QUERY_ACCEPT_CAPS: { GstCaps *caps; gst_query_parse_accept_caps (query, &caps); ret = gst_gl_mixer_pad_sink_acceptcaps (GST_PAD (bpad), mix, caps); gst_query_set_accept_caps_result (query, ret); ret = TRUE; break; } case GST_QUERY_ALLOCATION: { GstQuery *decide_query = NULL; GST_OBJECT_LOCK (mix); if (G_UNLIKELY (!mix->priv->negotiated)) { GST_DEBUG_OBJECT (mix, "not negotiated yet, can't answer ALLOCATION query"); GST_OBJECT_UNLOCK (mix); return FALSE; } if ((decide_query = mix->priv->query)) gst_query_ref (decide_query); GST_OBJECT_UNLOCK (mix); GST_DEBUG_OBJECT (mix, "calling propose allocation with query %" GST_PTR_FORMAT, decide_query); /* pass the query to the propose_allocation vmethod if any */ ret = gst_gl_mixer_propose_allocation (mix, decide_query, query); if (decide_query) gst_query_unref (decide_query); GST_DEBUG_OBJECT (mix, "ALLOCATION ret %d, %" GST_PTR_FORMAT, ret, query); break; } case GST_QUERY_CONTEXT: { ret = gst_gl_handle_context_query ((GstElement *) mix, query, &mix->display, &mix->other_context); if (mix->display) gst_gl_display_filter_gl_api (mix->display, mix_class->supported_gl_api); break; } default: ret = GST_AGGREGATOR_CLASS (parent_class)->sink_query (agg, bpad, query); break; } return ret; }
static gboolean gst_au_parse_src_query (GstPad * pad, GstQuery * query) { GstAuParse *auparse; gboolean ret = FALSE; auparse = GST_AU_PARSE (gst_pad_get_parent (pad)); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_DURATION:{ GstFormat bformat = GST_FORMAT_BYTES; GstFormat format; gint64 len, val; gst_query_parse_duration (query, &format, NULL); if (!gst_pad_query_peer_duration (auparse->sinkpad, &bformat, &len)) { GST_DEBUG_OBJECT (auparse, "failed to query upstream length"); break; } GST_OBJECT_LOCK (auparse); len -= auparse->offset; GST_OBJECT_UNLOCK (auparse); ret = gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, len, format, &val); if (ret) { gst_query_set_duration (query, format, val); } break; } case GST_QUERY_POSITION:{ GstFormat bformat = GST_FORMAT_BYTES; GstFormat format; gint64 pos, val; gst_query_parse_position (query, &format, NULL); if (!gst_pad_query_peer_position (auparse->sinkpad, &bformat, &pos)) { GST_DEBUG_OBJECT (auparse, "failed to query upstream position"); break; } GST_OBJECT_LOCK (auparse); pos -= auparse->offset; GST_OBJECT_UNLOCK (auparse); ret = gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, pos, format, &val); if (ret) { gst_query_set_position (query, format, val); } break; } default: ret = gst_pad_query_default (pad, query); break; } gst_object_unref (auparse); return ret; }
static void fs_rtp_conference_handle_message ( GstBin * bin, GstMessage * message) { FsRtpConference *self = FS_RTP_CONFERENCE (bin); if (!self->rtpbin) goto out; switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ELEMENT: { const GstStructure *s = gst_message_get_structure (message); /* we change the structure name and add the session ID to it */ if (gst_structure_has_name (s, "application/x-rtp-source-sdes") && gst_structure_has_field_typed (s, "session", G_TYPE_UINT) && gst_structure_has_field_typed (s, "ssrc", G_TYPE_UINT) && gst_structure_has_field_typed (s, "cname", G_TYPE_STRING)) { guint session_id; guint ssrc; const GValue *val; FsRtpSession *session; const gchar *cname; val = gst_structure_get_value (s, "session"); session_id = g_value_get_uint (val); val = gst_structure_get_value (s, "ssrc"); ssrc = g_value_get_uint (val); cname = gst_structure_get_string (s, "cname"); if (!ssrc || !cname) { GST_WARNING_OBJECT (self, "Got GstRTPBinSDES without a ssrc or a cname (ssrc:%u cname:%p)", ssrc, cname); break; } session = fs_rtp_conference_get_session_by_id (self, session_id); if (session) { fs_rtp_session_associate_ssrc_cname (session, ssrc, cname); g_object_unref (session); } else { GST_WARNING_OBJECT (self,"Our RtpBin announced a new association" "for non-existent session %u for ssrc: %u and cname %s", session_id, ssrc, cname); } } else if (gst_structure_has_name (s, "dtmf-event-processed") || gst_structure_has_name (s, "dtmf-event-dropped")) { GList *item; guint cookie; GST_OBJECT_LOCK (self); restart: cookie = self->priv->sessions_cookie; for (item = self->priv->sessions; item; item = item->next) { GST_OBJECT_UNLOCK (self); if (fs_rtp_session_handle_dtmf_event_message (item->data, message)) { gst_message_unref (message); message = NULL; goto out; } GST_OBJECT_LOCK (self); if (cookie != self->priv->sessions_cookie) goto restart; } GST_OBJECT_UNLOCK (self); } } break; case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType type; guint i; gst_message_parse_stream_status (message, &type, NULL); switch (type) { case GST_STREAM_STATUS_TYPE_ENTER: GST_OBJECT_LOCK (self); for (i = 0; i < self->priv->threads->len; i++) { if (g_ptr_array_index (self->priv->threads, i) == g_thread_self ()) goto done; } g_ptr_array_add (self->priv->threads, g_thread_self ()); done: GST_OBJECT_UNLOCK (self); break; case GST_STREAM_STATUS_TYPE_LEAVE: GST_OBJECT_LOCK (self); while (g_ptr_array_remove_fast (self->priv->threads, g_thread_self ())); GST_OBJECT_UNLOCK (self); break; default: /* Do nothing */ break; } } break; default: break; } out: /* forward all messages to the parent */ if (message) GST_BIN_CLASS (fs_rtp_conference_parent_class)->handle_message (bin, message); }
static void gst_netfilter_set_property(GObject *object, guint prop_id, GValue const *value, GParamSpec *pspec) { GstNetfilter *netfilter; GST_OBJECT_LOCK(object); netfilter = GST_NETFILTER(object); switch (prop_id) { case PROP_FILTER_ADDRESS: { /* The address can be specified in many ways, as hostname, as IPv4 address, as IPv6 address.. getaddrinfo is used to get an IP address out of what has been specified */ int error; struct addrinfo *result; char const *address_str; address_str = g_value_get_string(value); error = getaddrinfo(address_str, NULL, NULL, &result); if (error != 0) { GST_ERROR_OBJECT(netfilter, "Could not set filter address property: %s", gai_strerror(error)); } else { switch (result->ai_family) { case AF_INET: { struct sockaddr_in const *sockaddr_ipv4 = (struct sockaddr_in const *)(result->ai_addr); struct in_addr const *addr = &(sockaddr_ipv4->sin_addr); gst_netaddress_set_ip4_address(&(netfilter->filter_address), addr->s_addr, DUMMY_PORT); break; } case AF_INET6: { struct sockaddr_in6 const *sockaddr_ipv6 = (struct sockaddr_in6 const *)(result->ai_addr); struct in6_addr const *addr = &(sockaddr_ipv6->sin6_addr); gst_netaddress_set_ip6_address(&(netfilter->filter_address), (guint8 *)(addr->s6_addr), DUMMY_PORT); break; } default: GST_ERROR_OBJECT(netfilter, "Could not set filter address property: unknown address family %d", result->ai_family); } } freeaddrinfo(result); break; } case PROP_ENABLED: { netfilter->filtering_enabled = g_value_get_boolean(value); GST_DEBUG_OBJECT(netfilter, "Filtering is %s", netfilter->filtering_enabled ? "enabled" : "disabled"); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); break; } GST_OBJECT_UNLOCK(object); }
static gboolean gst_type_find_handle_src_query (GstPad * pad, GstObject * parent, GstQuery * query) { GstTypeFindElement *typefind; gboolean res = FALSE; typefind = GST_TYPE_FIND_ELEMENT (parent); GST_DEBUG_OBJECT (typefind, "Handling src query %s", GST_QUERY_TYPE_NAME (query)); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_SCHEDULING: /* FIXME, filter out the scheduling modes that we understand */ res = gst_pad_peer_query (typefind->sink, query); break; case GST_QUERY_CAPS: { GST_DEBUG_OBJECT (typefind, "Got caps query, our caps are %" GST_PTR_FORMAT, typefind->caps); /* We can hijack caps query if we typefind already */ if (typefind->caps) { gst_query_set_caps_result (query, typefind->caps); res = TRUE; } else { res = gst_pad_peer_query (typefind->sink, query); } break; } case GST_QUERY_POSITION: { gint64 peer_pos; GstFormat format; if (!(res = gst_pad_peer_query (typefind->sink, query))) goto out; gst_query_parse_position (query, &format, &peer_pos); GST_OBJECT_LOCK (typefind); /* FIXME: this code assumes that there's no discont in the queue */ switch (format) { case GST_FORMAT_BYTES: peer_pos -= gst_adapter_available (typefind->adapter); if (peer_pos < 0) /* Clamp result to 0 */ peer_pos = 0; break; default: /* FIXME */ break; } GST_OBJECT_UNLOCK (typefind); gst_query_set_position (query, format, peer_pos); break; } default: res = gst_pad_query_default (pad, parent, query); break; } out: return res; }
static void stop_typefinding (GstTypeFindElement * typefind) { GstState state; gboolean push_cached_buffers; gsize avail; GstBuffer *buffer; GstClockTime pts, dts; gst_element_get_state (GST_ELEMENT (typefind), &state, NULL, 0); push_cached_buffers = (state >= GST_STATE_PAUSED && typefind->caps); GST_DEBUG_OBJECT (typefind, "stopping typefinding%s", push_cached_buffers ? " and pushing cached events and buffers" : ""); typefind->mode = MODE_NORMAL; if (push_cached_buffers) gst_type_find_element_send_cached_events (typefind); GST_OBJECT_LOCK (typefind); avail = gst_adapter_available (typefind->adapter); if (avail == 0) goto no_data; pts = gst_adapter_prev_pts (typefind->adapter, NULL); dts = gst_adapter_prev_dts (typefind->adapter, NULL); buffer = gst_adapter_take_buffer (typefind->adapter, avail); GST_BUFFER_PTS (buffer) = pts; GST_BUFFER_DTS (buffer) = dts; GST_BUFFER_OFFSET (buffer) = typefind->initial_offset; GST_OBJECT_UNLOCK (typefind); if (!push_cached_buffers) { gst_buffer_unref (buffer); } else { GstPad *peer = gst_pad_get_peer (typefind->src); /* make sure the user gets a meaningful error message in this case, * which is not a core bug or bug of any kind (as the default error * message emitted by gstpad.c otherwise would make you think) */ if (peer && GST_PAD_CHAINFUNC (peer) == NULL) { GST_DEBUG_OBJECT (typefind, "upstream only supports push mode, while " "downstream element only works in pull mode, erroring out"); GST_ELEMENT_ERROR (typefind, STREAM, FAILED, ("%s cannot work in push mode. The operation is not supported " "with this source element or protocol.", G_OBJECT_TYPE_NAME (GST_PAD_PARENT (peer))), ("Downstream pad %s:%s has no chainfunction, and the upstream " "element does not support pull mode", GST_DEBUG_PAD_NAME (peer))); typefind->mode = MODE_ERROR; /* make the chain function error out */ gst_buffer_unref (buffer); } else { gst_pad_push (typefind->src, buffer); } if (peer) gst_object_unref (peer); } return; /* ERRORS */ no_data: { GST_DEBUG_OBJECT (typefind, "we have no data to typefind"); GST_OBJECT_UNLOCK (typefind); return; } }
static GstFlowReturn gst_imx_compositor_aggregate_frames(GstImxBPVideoAggregator *videoaggregator, GstBuffer *outbuffer) { GstFlowReturn ret = GST_FLOW_OK; GList *walk; GstImxCompositor *compositor = GST_IMX_COMPOSITOR(videoaggregator); GstImxCompositorClass *klass = GST_IMX_COMPOSITOR_CLASS(G_OBJECT_GET_CLASS(videoaggregator)); g_assert(klass->set_output_frame != NULL); g_assert(klass->fill_region != NULL); g_assert(klass->draw_frame != NULL); /* This function is the heart of the compositor. Here, input frames * are drawn on the output frame, with their specific parameters. */ /* Set the output buffer */ if (!(klass->set_output_frame(compositor, outbuffer))) { GST_ERROR_OBJECT(compositor, "could not set the output frame"); return GST_FLOW_ERROR; } /* TODO: are the update_overall_region calls here necessary? * If the video aggregator calls update_caps when a pad is added/removed, * there is no need for these calls */ /* Update the overall region first if necessary to ensure that it is valid * and that the region_fill_necessary flag is set to the proper value */ gst_imx_compositor_update_overall_region(compositor); GST_LOG_OBJECT(compositor, "aggregating frames, region_fill_necessary: %d", (gint)(compositor->region_fill_necessary)); /* Check if the overall region needs to be filled. This is the case if none * of the input frames completely cover the overall region with 100% alpha * (this is determined by gst_imx_compositor_update_overall_region() ) */ if (!(compositor->region_fill_necessary) || klass->fill_region(compositor, &(compositor->overall_region), compositor->background_color)) { /* Lock object to ensure nothing is changed during composition */ GST_OBJECT_LOCK(compositor); /* First walk: check if there is a new pad. If so, recompute the * overall region, since it might need to be expanded to encompass * the new additional input frames */ walk = GST_ELEMENT(videoaggregator)->sinkpads; while (walk != NULL) { GstImxCompositorPad *compositor_pad = GST_IMX_COMPOSITOR_PAD_CAST(walk->data); if (compositor_pad->pad_is_new) { GST_DEBUG_OBJECT(compositor, "there is a new pad; invalidate overall region"); compositor_pad->pad_is_new = FALSE; compositor->overall_region_valid = FALSE; /* While this call might seem redundant, there is one * benefit in calling this function apparently twice * (once above, and once here): the earlier call * happens outside of the object lock. New pads are less * common than overall region changes, so it is good * if most update calls happen outside of the object * lock (the overall_region_valid flag ensures redundant * calls don't compute anything). */ gst_imx_compositor_update_overall_region(compositor); break; } /* Move to next pad */ walk = g_list_next(walk); } /* Second walk: draw the input frames on the output frame */ walk = GST_ELEMENT(videoaggregator)->sinkpads; while (walk != NULL) { GstImxBPVideoAggregatorPad *videoaggregator_pad = walk->data; GstImxCompositorPad *compositor_pad = GST_IMX_COMPOSITOR_PAD_CAST(videoaggregator_pad); /* If there actually is a buffer, draw it * Sometimes, pads don't deliver data right from the start; * in these cases, their buffers will be NULL * Just skip to the next pad in that case */ if (videoaggregator_pad->buffer != NULL) { GstVideoCropMeta *video_crop_meta; if (compositor_pad->input_crop && ((video_crop_meta = gst_buffer_get_video_crop_meta(videoaggregator_pad->buffer)) != NULL)) { /* Crop metadata present. Reconfigure canvas. */ GstVideoInfo *info = &(videoaggregator_pad->info); GstImxRegion source_region; source_region.x1 = video_crop_meta->x; source_region.y1 = video_crop_meta->y; source_region.x2 = video_crop_meta->x + video_crop_meta->width; source_region.y2 = video_crop_meta->y + video_crop_meta->height; /* Make sure the source region does not exceed valid bounds */ source_region.x1 = MAX(0, source_region.x1); source_region.y1 = MAX(0, source_region.y1); source_region.x2 = MIN(GST_VIDEO_INFO_WIDTH(info), source_region.x2); source_region.y2 = MIN(GST_VIDEO_INFO_HEIGHT(info), source_region.y2); GST_LOG_OBJECT(compositor, "retrieved crop rectangle %" GST_IMX_REGION_FORMAT, GST_IMX_REGION_ARGS(&source_region)); /* Canvas needs to be updated if either one of these applies: * - the current frame has crop metadata, the last one didn't * - the new crop rectangle and the last are different */ if (!(compositor_pad->last_frame_with_cropdata) || !gst_imx_region_equal(&source_region, &(compositor_pad->last_source_region))) { GST_LOG_OBJECT(compositor, "using new crop rectangle %" GST_IMX_REGION_FORMAT, GST_IMX_REGION_ARGS(&source_region)); compositor_pad->last_source_region = source_region; compositor_pad->canvas_needs_update = TRUE; } compositor_pad->last_frame_with_cropdata = TRUE; /* Update canvas and input region if necessary */ if (compositor_pad->canvas_needs_update) gst_imx_compositor_pad_update_canvas(compositor_pad, &(compositor_pad->last_source_region)); } else { /* Force an update if this frame has no crop metadata but the last one did */ if (compositor_pad->last_frame_with_cropdata) compositor_pad->canvas_needs_update = TRUE; compositor_pad->last_frame_with_cropdata = FALSE; /* Update the pad's canvas if necessary, * to ensure there is a valid canvas to draw to */ gst_imx_compositor_pad_update_canvas(compositor_pad, NULL); } GST_LOG_OBJECT( compositor, "pad %p frame %p format: %s width/height: %d/%d regions: outer %" GST_IMX_REGION_FORMAT " inner %" GST_IMX_REGION_FORMAT " source subset %" GST_IMX_REGION_FORMAT, (gpointer)(videoaggregator_pad), (gpointer)(videoaggregator_pad->buffer), gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(&(videoaggregator_pad->info))), GST_VIDEO_INFO_WIDTH(&(videoaggregator_pad->info)), GST_VIDEO_INFO_HEIGHT(&(videoaggregator_pad->info)), GST_IMX_REGION_ARGS(&(compositor_pad->canvas.outer_region)), GST_IMX_REGION_ARGS(&(compositor_pad->canvas.inner_region)), GST_IMX_REGION_ARGS(&(compositor_pad->source_subset)) ); if (!klass->draw_frame( compositor, &(videoaggregator_pad->info), &(compositor_pad->source_subset), &(compositor_pad->canvas), videoaggregator_pad->buffer, (guint8)(compositor_pad->alpha * 255.0) )) { GST_ERROR_OBJECT(compositor, "error while drawing composition frame"); ret = GST_FLOW_ERROR; break; } } else { GST_LOG_OBJECT(compositor, "pad %p buffer is NULL, no frame to aggregate - skipping to next pad", (gpointer)(videoaggregator_pad)); } /* Move to next pad */ walk = g_list_next(walk); } GST_OBJECT_UNLOCK(compositor); } /* Release the output buffer, since we don't need it anymore, and * there is no reason to retain it */ klass->set_output_frame(compositor, NULL); return ret; }
static void gst_type_find_element_loop (GstPad * pad) { GstTypeFindElement *typefind; GstFlowReturn ret = GST_FLOW_OK; typefind = GST_TYPE_FIND_ELEMENT (GST_PAD_PARENT (pad)); if (typefind->need_stream_start) { gchar *stream_id; GstEvent *event; stream_id = gst_pad_create_stream_id (typefind->src, GST_ELEMENT_CAST (typefind), NULL); GST_DEBUG_OBJECT (typefind, "Pushing STREAM_START"); event = gst_event_new_stream_start (stream_id); gst_event_set_group_id (event, gst_util_group_id_next ()); gst_pad_push_event (typefind->src, event); typefind->need_stream_start = FALSE; g_free (stream_id); } if (typefind->mode == MODE_TYPEFIND) { GstPad *peer = NULL; GstCaps *found_caps = NULL; GstTypeFindProbability probability = GST_TYPE_FIND_NONE; GST_DEBUG_OBJECT (typefind, "find type in pull mode"); GST_OBJECT_LOCK (typefind); if (typefind->force_caps) { found_caps = gst_caps_ref (typefind->force_caps); probability = GST_TYPE_FIND_MAXIMUM; } GST_OBJECT_UNLOCK (typefind); if (!found_caps) { peer = gst_pad_get_peer (pad); if (peer) { gint64 size; gchar *ext; if (!gst_pad_query_duration (peer, GST_FORMAT_BYTES, &size)) { GST_WARNING_OBJECT (typefind, "Could not query upstream length!"); gst_object_unref (peer); ret = GST_FLOW_ERROR; goto pause; } /* the size if 0, we cannot continue */ if (size == 0) { /* keep message in sync with message in sink event handler */ GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (_("Stream contains no data.")), ("Can't typefind empty stream")); gst_object_unref (peer); ret = GST_FLOW_ERROR; goto pause; } ext = gst_type_find_get_extension (typefind, pad); found_caps = gst_type_find_helper_get_range (GST_OBJECT_CAST (peer), GST_OBJECT_PARENT (peer), (GstTypeFindHelperGetRangeFunction) (GST_PAD_GETRANGEFUNC (peer)), (guint64) size, ext, &probability); g_free (ext); GST_DEBUG ("Found caps %" GST_PTR_FORMAT, found_caps); gst_object_unref (peer); } } if (!found_caps || probability < typefind->min_probability) { GST_DEBUG ("Trying to guess using extension"); gst_caps_replace (&found_caps, NULL); found_caps = gst_type_find_guess_by_extension (typefind, pad, &probability); } if (!found_caps || probability < typefind->min_probability) { GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (NULL), (NULL)); gst_caps_replace (&found_caps, NULL); ret = GST_FLOW_ERROR; goto pause; } GST_DEBUG ("Emiting found caps %" GST_PTR_FORMAT, found_caps); gst_type_find_element_emit_have_type (typefind, probability, found_caps); typefind->mode = MODE_NORMAL; gst_caps_unref (found_caps); } else if (typefind->mode == MODE_NORMAL) { GstBuffer *outbuf = NULL; if (typefind->need_segment) { typefind->need_segment = FALSE; gst_pad_push_event (typefind->src, gst_event_new_segment (&typefind->segment)); } /* Pull 4k blocks and send downstream */ ret = gst_pad_pull_range (typefind->sink, typefind->offset, 4096, &outbuf); if (ret != GST_FLOW_OK) goto pause; typefind->offset += gst_buffer_get_size (outbuf); ret = gst_pad_push (typefind->src, outbuf); if (ret != GST_FLOW_OK) goto pause; } else { /* Error out */ ret = GST_FLOW_ERROR; goto pause; } return; pause: { const gchar *reason = gst_flow_get_name (ret); gboolean push_eos = FALSE; GST_LOG_OBJECT (typefind, "pausing task, reason %s", reason); gst_pad_pause_task (typefind->sink); if (ret == GST_FLOW_EOS) { /* perform EOS logic */ if (typefind->segment.flags & GST_SEGMENT_FLAG_SEGMENT) { gint64 stop; /* for segment playback we need to post when (in stream time) * we stopped, this is either stop (when set) or the duration. */ if ((stop = typefind->segment.stop) == -1) stop = typefind->offset; GST_LOG_OBJECT (typefind, "Sending segment done, at end of segment"); gst_element_post_message (GST_ELEMENT (typefind), gst_message_new_segment_done (GST_OBJECT (typefind), GST_FORMAT_BYTES, stop)); gst_pad_push_event (typefind->src, gst_event_new_segment_done (GST_FORMAT_BYTES, stop)); } else { push_eos = TRUE; } } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) { /* for fatal errors we post an error message */ GST_ELEMENT_FLOW_ERROR (typefind, ret); push_eos = TRUE; } if (push_eos) { /* send EOS, and prevent hanging if no streams yet */ GST_LOG_OBJECT (typefind, "Sending EOS, at end of stream"); gst_pad_push_event (typefind->src, gst_event_new_eos ()); } return; } }
guint gst_v4l2_allocator_start (GstV4l2Allocator * allocator, guint32 count, guint32 memory) { struct v4l2_requestbuffers breq = { count, allocator->type, memory }; gboolean can_allocate; gint i; g_return_val_if_fail (count != 0, 0); GST_OBJECT_LOCK (allocator); if (g_atomic_int_get (&allocator->active)) goto already_active; if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) < 0) goto reqbufs_failed; if (breq.count < 1) goto out_of_memory; switch (memory) { case V4L2_MEMORY_MMAP: can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, MMAP); break; case V4L2_MEMORY_USERPTR: can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, USERPTR); break; case V4L2_MEMORY_DMABUF: can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, DMABUF); break; default: can_allocate = FALSE; break; } GST_DEBUG_OBJECT (allocator, "allocated %u %s buffers out of %u requested", breq.count, memory_type_to_str (memory), count); allocator->can_allocate = can_allocate; allocator->count = breq.count; allocator->memory = memory; /* Create memory groups */ for (i = 0; i < allocator->count; i++) { allocator->groups[i] = gst_v4l2_memory_group_new (allocator, i); if (allocator->groups[i] == NULL) goto error; gst_atomic_queue_push (allocator->free_queue, allocator->groups[i]); } g_atomic_int_set (&allocator->active, TRUE); done: GST_OBJECT_UNLOCK (allocator); return breq.count; already_active: { GST_ERROR_OBJECT (allocator, "allocator already active"); goto error; } reqbufs_failed: { GST_ERROR_OBJECT (allocator, "error requesting %d buffers: %s", count, g_strerror (errno)); goto error; } out_of_memory: { GST_ERROR_OBJECT (allocator, "Not enough memory to allocate buffers"); goto error; } error: { breq.count = 0; goto done; } }
static GstFlowReturn gst_dtls_enc_chain (GstDtlsBase * base, GstBuffer * buffer) { GstDtlsEnc *self = GST_DTLS_ENC (base); gssize ret; GstMapInfo map; GError *error = NULL; if (gst_buffer_get_size (buffer) == 0) { gst_buffer_unref (buffer); return GST_FLOW_OK; } if (!gst_buffer_map (buffer, &map, GST_MAP_READ)) { GST_ELEMENT_ERROR (base, RESOURCE, READ, ("Can't map buffer"), ("Can't map buffer")); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } GST_OBJECT_LOCK (self); self->src_buffer = buffer; self->running_thread = g_thread_self (); GST_OBJECT_UNLOCK (self); ret = g_output_stream_write (g_io_stream_get_output_stream (G_IO_STREAM (base->conn->conn)), map.data, map.size, NULL, &error); g_assert (ret < 0 || ret == map.size); if (ret > 0 && ret != map.size) ret = -10; GST_OBJECT_LOCK (self); self->src_buffer = NULL; GST_OBJECT_UNLOCK (self); gst_buffer_unmap (buffer, &map); gst_buffer_unref (buffer); if (ret > 0) { return GST_FLOW_OK; } else { if (error) { GstFlowReturn flow = GST_FLOW_ERROR; if (error->domain == GST_IO_STREAM_FLOW_RETURN) { flow = error->code; } else { GST_ELEMENT_ERROR (base, LIBRARY, FAILED, ("DTLS encoding failed: %s", error->message), ("DTLS encoding failed: %s", error->message)); } g_clear_error (&error); return flow; } else { GST_ELEMENT_ERROR (base, LIBRARY, FAILED, ("Unknown encoding error"), ("Unknown encoding error")); return GST_FLOW_ERROR; } } }
static gboolean gst_ffmpegauddec_set_format (GstAudioDecoder * decoder, GstCaps * caps) { GstFFMpegAudDec *ffmpegdec = (GstFFMpegAudDec *) decoder; GstFFMpegAudDecClass *oclass; gboolean ret = TRUE; oclass = (GstFFMpegAudDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); GST_DEBUG_OBJECT (ffmpegdec, "setcaps called"); GST_OBJECT_LOCK (ffmpegdec); if (ffmpegdec->last_caps && gst_caps_is_equal (ffmpegdec->last_caps, caps)) { GST_DEBUG_OBJECT (ffmpegdec, "same caps"); GST_OBJECT_UNLOCK (ffmpegdec); return TRUE; } gst_caps_replace (&ffmpegdec->last_caps, caps); /* close old session */ if (ffmpegdec->opened) { GST_OBJECT_UNLOCK (ffmpegdec); gst_ffmpegauddec_drain (ffmpegdec); GST_OBJECT_LOCK (ffmpegdec); gst_ffmpegauddec_close (ffmpegdec); } /* get size and so */ gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id, oclass->in_plugin->type, caps, ffmpegdec->context); /* workaround encoder bugs */ ffmpegdec->context->workaround_bugs |= FF_BUG_AUTODETECT; ffmpegdec->context->err_recognition = 1; ffmpegdec->context->opaque = ffmpegdec; ffmpegdec->context->get_buffer = gst_ffmpegauddec_get_buffer; ffmpegdec->context->reget_buffer = NULL; ffmpegdec->context->release_buffer = NULL; /* open codec - we don't select an output pix_fmt yet, * simply because we don't know! We only get it * during playback... */ if (!gst_ffmpegauddec_open (ffmpegdec)) goto open_failed; done: GST_OBJECT_UNLOCK (ffmpegdec); return ret; /* ERRORS */ open_failed: { GST_DEBUG_OBJECT (ffmpegdec, "Failed to open"); ret = FALSE; goto done; } }
static GstFlowReturn gst_video_scale_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) { GstVideoScale *videoscale = GST_VIDEO_SCALE (trans); GstFlowReturn ret = GST_FLOW_OK; VSImage dest = { NULL, }; VSImage src = { NULL, }; VSImage dest_u = { NULL, }; VSImage dest_v = { NULL, }; VSImage src_u = { NULL, }; VSImage src_v = { NULL, }; gint method; const guint8 *black = _get_black_for_format (videoscale->format); gboolean add_borders; GST_OBJECT_LOCK (videoscale); method = videoscale->method; add_borders = videoscale->add_borders; GST_OBJECT_UNLOCK (videoscale); if (videoscale->from_width == 1) { method = GST_VIDEO_SCALE_NEAREST; } if (method == GST_VIDEO_SCALE_4TAP && (videoscale->from_width < 4 || videoscale->from_height < 4)) { method = GST_VIDEO_SCALE_BILINEAR; } gst_video_scale_setup_vs_image (&src, videoscale->format, 0, videoscale->from_width, videoscale->from_height, 0, 0, GST_BUFFER_DATA (in)); gst_video_scale_setup_vs_image (&dest, videoscale->format, 0, videoscale->to_width, videoscale->to_height, videoscale->borders_w, videoscale->borders_h, GST_BUFFER_DATA (out)); if (videoscale->format == GST_VIDEO_FORMAT_I420 || videoscale->format == GST_VIDEO_FORMAT_YV12 || videoscale->format == GST_VIDEO_FORMAT_Y444 || videoscale->format == GST_VIDEO_FORMAT_Y42B || videoscale->format == GST_VIDEO_FORMAT_Y41B) { gst_video_scale_setup_vs_image (&src_u, videoscale->format, 1, videoscale->from_width, videoscale->from_height, 0, 0, GST_BUFFER_DATA (in)); gst_video_scale_setup_vs_image (&src_v, videoscale->format, 2, videoscale->from_width, videoscale->from_height, 0, 0, GST_BUFFER_DATA (in)); gst_video_scale_setup_vs_image (&dest_u, videoscale->format, 1, videoscale->to_width, videoscale->to_height, videoscale->borders_w, videoscale->borders_h, GST_BUFFER_DATA (out)); gst_video_scale_setup_vs_image (&dest_v, videoscale->format, 2, videoscale->to_width, videoscale->to_height, videoscale->borders_w, videoscale->borders_h, GST_BUFFER_DATA (out)); } switch (videoscale->format) { case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_AYUV: if (add_borders) vs_fill_borders_RGBA (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_RGBA (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_RGBA (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_RGBA (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_ARGB64: case GST_VIDEO_FORMAT_AYUV64: if (add_borders) vs_fill_borders_AYUV64 (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_AYUV64 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_AYUV64 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_AYUV64 (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_v308: if (add_borders) vs_fill_borders_RGB (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_RGB (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_RGB (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_RGB (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_YVYU: if (add_borders) vs_fill_borders_YUYV (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_YUYV (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_YUYV (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_YUYV (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_UYVY: if (add_borders) vs_fill_borders_UYVY (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_UYVY (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_UYVY (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_UYVY (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_Y800: case GST_VIDEO_FORMAT_GRAY8: if (add_borders) vs_fill_borders_Y (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_Y (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_Y (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_Y (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_GRAY16_LE: case GST_VIDEO_FORMAT_GRAY16_BE: case GST_VIDEO_FORMAT_Y16: if (add_borders) vs_fill_borders_Y16 (&dest, 0); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_Y16 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_Y16 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_Y16 (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_YV12: case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_Y42B: case GST_VIDEO_FORMAT_Y41B: if (add_borders) { vs_fill_borders_Y (&dest, black); vs_fill_borders_Y (&dest_u, black + 1); vs_fill_borders_Y (&dest_v, black + 2); } switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_Y (&dest, &src, videoscale->tmp_buf); vs_image_scale_nearest_Y (&dest_u, &src_u, videoscale->tmp_buf); vs_image_scale_nearest_Y (&dest_v, &src_v, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_Y (&dest, &src, videoscale->tmp_buf); vs_image_scale_linear_Y (&dest_u, &src_u, videoscale->tmp_buf); vs_image_scale_linear_Y (&dest_v, &src_v, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_Y (&dest, &src, videoscale->tmp_buf); vs_image_scale_4tap_Y (&dest_u, &src_u, videoscale->tmp_buf); vs_image_scale_4tap_Y (&dest_v, &src_v, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_RGB16: if (add_borders) vs_fill_borders_RGB565 (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_RGB565 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_RGB565 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_RGB565 (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; case GST_VIDEO_FORMAT_RGB15: if (add_borders) vs_fill_borders_RGB555 (&dest, black); switch (method) { case GST_VIDEO_SCALE_NEAREST: vs_image_scale_nearest_RGB555 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_BILINEAR: vs_image_scale_linear_RGB555 (&dest, &src, videoscale->tmp_buf); break; case GST_VIDEO_SCALE_4TAP: vs_image_scale_4tap_RGB555 (&dest, &src, videoscale->tmp_buf); break; default: goto unknown_mode; } break; default: goto unsupported; } GST_LOG_OBJECT (videoscale, "pushing buffer of %d bytes", GST_BUFFER_SIZE (out)); return ret; /* ERRORS */ unsupported: { GST_ELEMENT_ERROR (videoscale, STREAM, NOT_IMPLEMENTED, (NULL), ("Unsupported format %d for scaling method %d", videoscale->format, method)); return GST_FLOW_ERROR; } unknown_mode: { GST_ELEMENT_ERROR (videoscale, STREAM, NOT_IMPLEMENTED, (NULL), ("Unknown scaling method %d", videoscale->method)); return GST_FLOW_ERROR; } }
static gboolean gst_type_find_element_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { gboolean res = FALSE; GstTypeFindElement *typefind = GST_TYPE_FIND_ELEMENT (parent); GST_DEBUG_OBJECT (typefind, "got %s event in mode %d", GST_EVENT_TYPE_NAME (event), typefind->mode); switch (typefind->mode) { case MODE_TYPEFIND: switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { GstCaps *caps; /* Parse and push out our caps and data */ gst_event_parse_caps (event, &caps); res = gst_type_find_element_setcaps (typefind, caps); gst_event_unref (event); break; } case GST_EVENT_GAP: { GST_FIXME_OBJECT (typefind, "GAP events during typefinding not handled properly"); /* FIXME: These would need to be inserted in the stream at * the right position between buffers, but we combine all * buffers with a GstAdapter. Drop the GAP event for now, * which will only cause an implicit GAP between buffers. */ gst_event_unref (event); res = TRUE; break; } case GST_EVENT_EOS: { GST_INFO_OBJECT (typefind, "Got EOS and no type found yet"); gst_type_find_element_chain_do_typefinding (typefind, FALSE, TRUE); res = gst_pad_push_event (typefind->src, event); break; } case GST_EVENT_FLUSH_STOP:{ GList *l; GST_OBJECT_LOCK (typefind); for (l = typefind->cached_events; l; l = l->next) { if (GST_EVENT_IS_STICKY (l->data) && GST_EVENT_TYPE (l->data) != GST_EVENT_SEGMENT && GST_EVENT_TYPE (l->data) != GST_EVENT_EOS) { gst_pad_store_sticky_event (typefind->src, l->data); } gst_event_unref (l->data); } g_list_free (typefind->cached_events); typefind->cached_events = NULL; gst_adapter_clear (typefind->adapter); GST_OBJECT_UNLOCK (typefind); /* fall through */ } case GST_EVENT_FLUSH_START: res = gst_pad_push_event (typefind->src, event); break; default: /* Forward events that would happen before the caps event * directly instead of storing them. There's no reason not * to send them directly and we should only store events * for later sending that would need to come after the caps * event */ if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) { res = gst_pad_push_event (typefind->src, event); } else { GST_DEBUG_OBJECT (typefind, "Saving %s event to send later", GST_EVENT_TYPE_NAME (event)); GST_OBJECT_LOCK (typefind); typefind->cached_events = g_list_append (typefind->cached_events, event); GST_OBJECT_UNLOCK (typefind); res = TRUE; } break; } break; case MODE_NORMAL: res = gst_pad_push_event (typefind->src, event); break; case MODE_ERROR: break; default: g_assert_not_reached (); } return res; }
static GstFlowReturn gst_geometric_transform_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame, GstVideoFrame * out_frame) { GstGeometricTransform *gt; GstGeometricTransformClass *klass; gint x, y; GstFlowReturn ret = GST_FLOW_OK; gdouble *ptr; guint8 *in_data; guint8 *out_data; gt = GST_GEOMETRIC_TRANSFORM_CAST (vfilter); klass = GST_GEOMETRIC_TRANSFORM_GET_CLASS (gt); in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0); out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0); if (GST_VIDEO_FRAME_FORMAT (out_frame) == GST_VIDEO_FORMAT_AYUV) { /* in AYUV black is not just all zeros: * 0x10 is black for Y, * 0x80 is black for Cr and Cb */ for (int i = 0; i < out_frame->map[0].size; i += 4) GST_WRITE_UINT32_BE (out_data + i, 0xff108080); } else { memset (out_data, 0, out_frame->map[0].size); } GST_OBJECT_LOCK (gt); if (gt->precalc_map) { if (gt->needs_remap) { if (klass->prepare_func) if (!klass->prepare_func (gt)) { ret = FALSE; goto end; } gst_geometric_transform_generate_map (gt); } g_return_val_if_fail (gt->map, GST_FLOW_ERROR); ptr = gt->map; for (y = 0; y < gt->height; y++) { for (x = 0; x < gt->width; x++) { /* do the mapping */ gst_geometric_transform_do_map (gt, in_data, out_data, x, y, ptr[0], ptr[1]); ptr += 2; } } } else { for (y = 0; y < gt->height; y++) { for (x = 0; x < gt->width; x++) { gdouble in_x, in_y; if (klass->map_func (gt, x, y, &in_x, &in_y)) { gst_geometric_transform_do_map (gt, in_data, out_data, x, y, in_x, in_y); } else { GST_WARNING_OBJECT (gt, "Failed to do mapping for %d %d", x, y); ret = GST_FLOW_ERROR; goto end; } } } } end: GST_OBJECT_UNLOCK (gt); return ret; }
static GstFlowReturn gst_type_find_element_chain_do_typefinding (GstTypeFindElement * typefind, gboolean check_avail, gboolean at_eos) { GstTypeFindProbability probability; GstCaps *caps = NULL; gsize avail; const guint8 *data; gboolean have_min, have_max; GST_OBJECT_LOCK (typefind); if (typefind->force_caps) { caps = gst_caps_ref (typefind->force_caps); probability = GST_TYPE_FIND_MAXIMUM; } if (!caps) { avail = gst_adapter_available (typefind->adapter); if (check_avail) { have_min = avail >= TYPE_FIND_MIN_SIZE; have_max = avail >= TYPE_FIND_MAX_SIZE; } else { have_min = avail > 0; have_max = TRUE; } if (!have_min) goto not_enough_data; /* map all available data */ data = gst_adapter_map (typefind->adapter, avail); caps = gst_type_find_helper_for_data (GST_OBJECT (typefind), data, avail, &probability); gst_adapter_unmap (typefind->adapter); if (caps == NULL && have_max) goto no_type_found; else if (caps == NULL) goto wait_for_data; /* found a type */ if (probability < typefind->min_probability) goto low_probability; } GST_OBJECT_UNLOCK (typefind); /* probability is good enough too, so let's make it known ... emiting this * signal calls our object handler which sets the caps. */ gst_type_find_element_emit_have_type (typefind, probability, caps); /* .. and send out the accumulated data */ stop_typefinding (typefind); gst_caps_unref (caps); return GST_FLOW_OK; not_enough_data: { GST_OBJECT_UNLOCK (typefind); if (at_eos) { GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (_("Stream doesn't contain enough data.")), ("Can't typefind stream")); return GST_FLOW_ERROR; } else { GST_DEBUG_OBJECT (typefind, "not enough data for typefinding yet " "(%" G_GSIZE_FORMAT " bytes)", avail); return GST_FLOW_OK; } } no_type_found: { GST_OBJECT_UNLOCK (typefind); GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (NULL), (NULL)); stop_typefinding (typefind); return GST_FLOW_ERROR; } wait_for_data: { GST_OBJECT_UNLOCK (typefind); if (at_eos) { GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (_("Stream doesn't contain enough data.")), ("Can't typefind stream")); return GST_FLOW_ERROR; } else { GST_DEBUG_OBJECT (typefind, "no caps found with %" G_GSIZE_FORMAT " bytes of data, " "waiting for more data", avail); return GST_FLOW_OK; } } low_probability: { GST_DEBUG_OBJECT (typefind, "found caps %" GST_PTR_FORMAT ", but " "probability is %u which is lower than the required minimum of %u", caps, probability, typefind->min_probability); gst_caps_unref (caps); if (have_max) goto no_type_found; GST_OBJECT_UNLOCK (typefind); GST_DEBUG_OBJECT (typefind, "waiting for more data to try again"); return GST_FLOW_OK; } }
static gboolean gst_data_uri_src_set_uri (GstURIHandler * handler, const gchar * uri, GError ** error) { GstDataURISrc *src = GST_DATA_URI_SRC (handler); gboolean ret = FALSE; gchar *mimetype = NULL; const gchar *parameters_start; const gchar *data_start; const gchar *orig_uri = uri; GstCaps *caps; GstBuffer *buffer; gboolean base64 = FALSE; gchar *charset = NULL; gpointer bdata; gsize bsize; GST_OBJECT_LOCK (src); if (GST_STATE (src) >= GST_STATE_PAUSED) goto wrong_state; GST_OBJECT_UNLOCK (src); /* uri must be an URI as defined in RFC 2397 * data:[<mediatype>][;base64],<data> */ if (strncmp ("data:", uri, 5) != 0) goto invalid_uri; uri += 5; parameters_start = strchr (uri, ';'); data_start = strchr (uri, ','); if (data_start == NULL) goto invalid_uri; if (data_start != uri && parameters_start != uri) mimetype = g_strndup (uri, (parameters_start ? parameters_start : data_start) - uri); else mimetype = g_strdup ("text/plain"); GST_DEBUG_OBJECT (src, "Mimetype: %s", mimetype); if (parameters_start != NULL) { gchar **walk; gchar *parameters = g_strndup (parameters_start + 1, data_start - parameters_start - 1); gchar **parameters_strv; parameters_strv = g_strsplit (parameters, ";", -1); GST_DEBUG_OBJECT (src, "Parameters: "); walk = parameters_strv; while (*walk) { GST_DEBUG_OBJECT (src, "\t %s", *walk); if (strcmp ("base64", *walk) == 0) { base64 = TRUE; } else if (strncmp ("charset=", *walk, 8) == 0) { charset = g_strdup (*walk + 8); } walk++; } g_free (parameters); g_strfreev (parameters_strv); } /* Skip comma */ data_start += 1; if (base64) { bdata = g_base64_decode (data_start, &bsize); } else { /* URI encoded, i.e. "percent" encoding */ bdata = g_uri_unescape_string (data_start, NULL); if (bdata == NULL) goto invalid_uri_encoded_data; bsize = strlen (bdata) + 1; } /* Convert to UTF8 */ if (strcmp ("text/plain", mimetype) == 0 && charset && g_ascii_strcasecmp ("US-ASCII", charset) != 0 && g_ascii_strcasecmp ("UTF-8", charset) != 0) { gsize read; gsize written; gpointer data; data = g_convert_with_fallback (bdata, -1, "UTF-8", charset, (char *) "*", &read, &written, NULL); g_free (bdata); bdata = data; bsize = written; } buffer = gst_buffer_new_wrapped (bdata, bsize); caps = gst_type_find_helper_for_buffer (GST_OBJECT (src), buffer, NULL); if (!caps) caps = gst_caps_new_empty_simple (mimetype); gst_base_src_set_caps (GST_BASE_SRC_CAST (src), caps); gst_caps_unref (caps); GST_OBJECT_LOCK (src); gst_buffer_replace (&src->buffer, buffer); gst_buffer_unref (buffer); g_free (src->uri); src->uri = g_strdup (orig_uri); GST_OBJECT_UNLOCK (src); ret = TRUE; out: g_free (mimetype); g_free (charset); return ret; wrong_state: { GST_WARNING_OBJECT (src, "Can't set URI in %s state", gst_element_state_get_name (GST_STATE (src))); GST_OBJECT_UNLOCK (src); g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_STATE, "Changing the 'uri' property on dataurisrc while it is running " "is not supported"); goto out; } invalid_uri: { GST_WARNING_OBJECT (src, "invalid URI '%s'", uri); g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI, "Invalid data URI"); goto out; } invalid_uri_encoded_data: { GST_WARNING_OBJECT (src, "Failed to parse data encoded in URI '%s'", uri); g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI, "Could not parse data encoded in data URI"); goto out; } }
static gboolean gst_musepackdec_src_query (GstPad * pad, GstQuery * query) { GstMusepackDec *musepackdec = GST_MUSEPACK_DEC (gst_pad_get_parent (pad)); GstFormat format; gboolean res = FALSE; gint samplerate; samplerate = g_atomic_int_get (&musepackdec->rate); if (samplerate == 0) goto done; switch (GST_QUERY_TYPE (query)) { case GST_QUERY_POSITION:{ gint64 cur, cur_off; gst_query_parse_position (query, &format, NULL); GST_OBJECT_LOCK (musepackdec); cur_off = musepackdec->segment.last_stop; GST_OBJECT_UNLOCK (musepackdec); if (format == GST_FORMAT_TIME) { cur = gst_util_uint64_scale_int (cur_off, GST_SECOND, samplerate); gst_query_set_position (query, GST_FORMAT_TIME, cur); res = TRUE; } else if (format == GST_FORMAT_DEFAULT) { gst_query_set_position (query, GST_FORMAT_DEFAULT, cur_off); res = TRUE; } break; } case GST_QUERY_DURATION:{ gint64 len, len_off; gst_query_parse_duration (query, &format, NULL); GST_OBJECT_LOCK (musepackdec); len_off = musepackdec->segment.duration; GST_OBJECT_UNLOCK (musepackdec); if (format == GST_FORMAT_TIME) { len = gst_util_uint64_scale_int (len_off, GST_SECOND, samplerate); gst_query_set_duration (query, GST_FORMAT_TIME, len); res = TRUE; } else if (format == GST_FORMAT_DEFAULT) { gst_query_set_duration (query, GST_FORMAT_DEFAULT, len_off); res = TRUE; } break; } case GST_QUERY_SEEKING:{ GstFormat fmt; res = TRUE; gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL); if (fmt == GST_FORMAT_TIME || fmt == GST_FORMAT_DEFAULT) gst_query_set_seeking (query, fmt, TRUE, 0, -1); else gst_query_set_seeking (query, fmt, FALSE, -1, -1); break; } default: res = gst_pad_query_default (pad, query); break; } done: gst_object_unref (musepackdec); return res; }
gboolean gst_gl_mixer_process_textures (GstGLMixer * mix, GstBuffer * outbuf) { guint i; GList *walk; guint out_tex, out_tex_target; gboolean res = TRUE; guint array_index = 0; GstVideoFrame out_frame; GstElement *element = GST_ELEMENT (mix); GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix); GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix); GstGLMixerPrivate *priv = mix->priv; gboolean to_download = gst_caps_features_is_equal (GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY, gst_caps_get_features (mix->out_caps, 0)); GstMapFlags out_map_flags = GST_MAP_WRITE; GST_TRACE ("Processing buffers"); to_download |= !gst_is_gl_memory (gst_buffer_peek_memory (outbuf, 0)); if (!to_download) out_map_flags |= GST_MAP_GL; if (!gst_video_frame_map (&out_frame, &vagg->info, outbuf, out_map_flags)) { return FALSE; } if (!to_download) { out_tex = *(guint *) out_frame.data[0]; out_tex_target = ((GstGLMemory *) gst_buffer_peek_memory (outbuf, 0))->tex_target; } else { GST_INFO ("Output Buffer does not contain correct memory, " "attempting to wrap for download"); if (!mix->download) mix->download = gst_gl_download_new (mix->context); gst_gl_download_set_format (mix->download, &out_frame.info); out_tex = mix->out_tex_id; out_tex_target = GL_TEXTURE_2D; } GST_OBJECT_LOCK (mix); walk = element->sinkpads; i = mix->frames->len; g_ptr_array_set_size (mix->frames, element->numsinkpads); for (; i < element->numsinkpads; i++) mix->frames->pdata[i] = g_slice_new0 (GstGLMixerFrameData); while (walk) { GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data); GstGLMixerPadClass *pad_class = GST_GL_MIXER_PAD_GET_CLASS (pad); GstVideoAggregatorPad *vaggpad = walk->data; GstGLMixerFrameData *frame; frame = g_ptr_array_index (mix->frames, array_index); frame->pad = pad; frame->texture = 0; walk = g_list_next (walk); if (vaggpad->buffer != NULL) { g_assert (pad_class->upload_buffer); if (pad->gl_buffer) gst_buffer_unref (pad->gl_buffer); pad->gl_buffer = pad_class->upload_buffer (mix, frame, vaggpad->buffer); GST_DEBUG_OBJECT (pad, "uploaded buffer %" GST_PTR_FORMAT " from buffer %" GST_PTR_FORMAT, pad->gl_buffer, vaggpad->buffer); } ++array_index; } g_mutex_lock (&priv->gl_resource_lock); if (!priv->gl_resource_ready) g_cond_wait (&priv->gl_resource_cond, &priv->gl_resource_lock); if (!priv->gl_resource_ready) { g_mutex_unlock (&priv->gl_resource_lock); GST_ERROR_OBJECT (mix, "fbo used to render can't be created, do not run process_textures"); res = FALSE; goto out; } mix_class->process_textures (mix, mix->frames, out_tex); g_mutex_unlock (&priv->gl_resource_lock); if (to_download) { if (!gst_gl_download_perform_with_data (mix->download, out_tex, out_tex_target, out_frame.data)) { GST_ELEMENT_ERROR (mix, RESOURCE, NOT_FOUND, ("%s", "Failed to download video frame"), (NULL)); res = FALSE; goto out; } } out: i = 0; walk = GST_ELEMENT (mix)->sinkpads; while (walk) { GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data); if (pad->upload) gst_gl_upload_release_buffer (pad->upload); walk = g_list_next (walk); i++; } GST_OBJECT_UNLOCK (mix); gst_video_frame_unmap (&out_frame); return res; }
static GstFlowReturn gst_rtp_mux_chain_list (GstPad * pad, GstObject * parent, GstBufferList * bufferlist) { GstRTPMux *rtp_mux; GstFlowReturn ret; GstRTPMuxPadPrivate *padpriv; gboolean changed = FALSE; struct BufferListData bd; rtp_mux = GST_RTP_MUX (parent); if (gst_pad_check_reconfigure (rtp_mux->srcpad)) { GstCaps *current_caps = gst_pad_get_current_caps (pad); if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) { ret = GST_FLOW_NOT_NEGOTIATED; gst_buffer_list_unref (bufferlist); goto out; } gst_caps_unref (current_caps); } GST_OBJECT_LOCK (rtp_mux); padpriv = gst_pad_get_element_private (pad); if (!padpriv) { GST_OBJECT_UNLOCK (rtp_mux); ret = GST_FLOW_NOT_LINKED; gst_buffer_list_unref (bufferlist); goto out; } bd.rtp_mux = rtp_mux; bd.padpriv = padpriv; bd.drop = FALSE; bufferlist = gst_buffer_list_make_writable (bufferlist); gst_buffer_list_foreach (bufferlist, process_list_item, &bd); if (!bd.drop && pad != rtp_mux->last_pad) { changed = TRUE; g_clear_object (&rtp_mux->last_pad); rtp_mux->last_pad = g_object_ref (pad); } GST_OBJECT_UNLOCK (rtp_mux); if (changed) gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux); if (bd.drop) { gst_buffer_list_unref (bufferlist); ret = GST_FLOW_OK; } else { ret = gst_pad_push_list (rtp_mux->srcpad, bufferlist); } out: return ret; }
static gboolean gst_au_parse_src_convert (GstAuParse * auparse, GstFormat src_format, gint64 srcval, GstFormat dest_format, gint64 * destval) { gboolean ret = TRUE; gint64 offset; guint samplesize, rate; if (dest_format == src_format) { *destval = srcval; return TRUE; } GST_OBJECT_LOCK (auparse); samplesize = auparse->sample_size; offset = auparse->offset; rate = auparse->samplerate; GST_OBJECT_UNLOCK (auparse); if (samplesize == 0 || rate == 0) { GST_LOG_OBJECT (auparse, "cannot convert, sample_size or rate unknown"); return FALSE; } switch (src_format) { case GST_FORMAT_BYTES: srcval /= samplesize; /* fallthrough */ case GST_FORMAT_DEFAULT:{ switch (dest_format) { case GST_FORMAT_BYTES: *destval = srcval * samplesize; break; case GST_FORMAT_TIME: *destval = gst_util_uint64_scale_int (srcval, GST_SECOND, rate); break; default: ret = FALSE; break; } break; } case GST_FORMAT_TIME:{ switch (dest_format) { case GST_FORMAT_BYTES: *destval = gst_util_uint64_scale_int (srcval, rate * samplesize, GST_SECOND); break; case GST_FORMAT_DEFAULT: *destval = gst_util_uint64_scale_int (srcval, rate, GST_SECOND); break; default: ret = FALSE; break; } break; } default:{ ret = FALSE; break; } } if (!ret) { GST_DEBUG_OBJECT (auparse, "could not convert from %s to %s format", gst_format_get_name (src_format), gst_format_get_name (dest_format)); } return ret; }
static GstFlowReturn gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstRTPMux *rtp_mux; GstFlowReturn ret; GstRTPMuxPadPrivate *padpriv; gboolean drop; gboolean changed = FALSE; GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT; rtp_mux = GST_RTP_MUX (parent); if (gst_pad_check_reconfigure (rtp_mux->srcpad)) { GstCaps *current_caps = gst_pad_get_current_caps (pad); if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) { ret = GST_FLOW_NOT_NEGOTIATED; gst_buffer_unref (buffer); goto out; } gst_caps_unref (current_caps); } GST_OBJECT_LOCK (rtp_mux); padpriv = gst_pad_get_element_private (pad); if (!padpriv) { GST_OBJECT_UNLOCK (rtp_mux); gst_buffer_unref (buffer); return GST_FLOW_NOT_LINKED; } buffer = gst_buffer_make_writable (buffer); if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) { GST_OBJECT_UNLOCK (rtp_mux); gst_buffer_unref (buffer); GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer"); return GST_FLOW_ERROR; } drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer); gst_rtp_buffer_unmap (&rtpbuffer); if (!drop) { if (pad != rtp_mux->last_pad) { changed = TRUE; g_clear_object (&rtp_mux->last_pad); rtp_mux->last_pad = g_object_ref (pad); } if (GST_BUFFER_DURATION_IS_VALID (buffer) && GST_BUFFER_PTS_IS_VALID (buffer)) rtp_mux->last_stop = GST_BUFFER_PTS (buffer) + GST_BUFFER_DURATION (buffer); else rtp_mux->last_stop = GST_CLOCK_TIME_NONE; } GST_OBJECT_UNLOCK (rtp_mux); if (changed) gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux); if (drop) { gst_buffer_unref (buffer); ret = GST_FLOW_OK; } else { ret = gst_pad_push (rtp_mux->srcpad, buffer); } out: return ret; }
static GstFlowReturn gst_visual_chain (GstPad * pad, GstBuffer * buffer) { GstBuffer *outbuf = NULL; guint i; GstVisual *visual = GST_VISUAL (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; guint avail; GST_DEBUG_OBJECT (visual, "chain function called"); /* If we don't have an output format yet, preallocate a buffer to try and * set one */ if (GST_PAD_CAPS (visual->srcpad) == NULL) { ret = get_buffer (visual, &outbuf); if (ret != GST_FLOW_OK) { gst_buffer_unref (buffer); goto beach; } } /* resync on DISCONT */ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (visual->adapter); } GST_DEBUG_OBJECT (visual, "Input buffer has %d samples, time=%" G_GUINT64_FORMAT, GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer)); gst_adapter_push (visual->adapter, buffer); while (TRUE) { gboolean need_skip; const guint16 *data; guint64 dist, timestamp; GST_DEBUG_OBJECT (visual, "processing buffer"); avail = gst_adapter_available (visual->adapter); GST_DEBUG_OBJECT (visual, "avail now %u", avail); /* we need at least 512 samples */ if (avail < 512 * visual->bps) break; /* we need at least enough samples to make one frame */ if (avail < visual->spf * visual->bps) break; /* get timestamp of the current adapter byte */ timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist); if (GST_CLOCK_TIME_IS_VALID (timestamp)) { /* convert bytes to time */ dist /= visual->bps; timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate); } if (timestamp != -1) { gint64 qostime; /* QoS is done on running time */ qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME, timestamp); GST_OBJECT_LOCK (visual); /* check for QoS, don't compute buffers that are known to be late */ need_skip = visual->earliest_time != -1 && qostime <= visual->earliest_time; GST_OBJECT_UNLOCK (visual); if (need_skip) { GST_WARNING_OBJECT (visual, "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time)); goto skip; } } /* Read 512 samples per channel */ data = (const guint16 *) gst_adapter_peek (visual->adapter, 512 * visual->bps); #if defined(VISUAL_API_VERSION) && VISUAL_API_VERSION >= 4000 && VISUAL_API_VERSION < 5000 { VisBuffer *lbuf, *rbuf; guint16 ldata[512], rdata[512]; VisAudioSampleRateType rate; lbuf = visual_buffer_new_with_buffer (ldata, sizeof (ldata), NULL); rbuf = visual_buffer_new_with_buffer (rdata, sizeof (rdata), NULL); if (visual->channels == 2) { for (i = 0; i < 512; i++) { ldata[i] = *data++; rdata[i] = *data++; } } else { for (i = 0; i < 512; i++) { ldata[i] = *data; rdata[i] = *data++; } } switch (visual->rate) { case 8000: rate = VISUAL_AUDIO_SAMPLE_RATE_8000; break; case 11250: rate = VISUAL_AUDIO_SAMPLE_RATE_11250; break; case 22500: rate = VISUAL_AUDIO_SAMPLE_RATE_22500; break; case 32000: rate = VISUAL_AUDIO_SAMPLE_RATE_32000; break; case 44100: rate = VISUAL_AUDIO_SAMPLE_RATE_44100; break; case 48000: rate = VISUAL_AUDIO_SAMPLE_RATE_48000; break; case 96000: rate = VISUAL_AUDIO_SAMPLE_RATE_96000; break; default: visual_object_unref (VISUAL_OBJECT (lbuf)); visual_object_unref (VISUAL_OBJECT (rbuf)); GST_ERROR_OBJECT (visual, "unsupported rate %d", visual->rate); ret = GST_FLOW_ERROR; goto beach; break; } visual_audio_samplepool_input_channel (visual->audio->samplepool, lbuf, rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, (char *) VISUAL_AUDIO_CHANNEL_LEFT); visual_audio_samplepool_input_channel (visual->audio->samplepool, rbuf, rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, (char *) VISUAL_AUDIO_CHANNEL_RIGHT); visual_object_unref (VISUAL_OBJECT (lbuf)); visual_object_unref (VISUAL_OBJECT (rbuf)); } #else if (visual->channels == 2) { for (i = 0; i < 512; i++) { visual->audio->plugpcm[0][i] = *data++; visual->audio->plugpcm[1][i] = *data++; } } else { for (i = 0; i < 512; i++) { visual->audio->plugpcm[0][i] = *data; visual->audio->plugpcm[1][i] = *data++; } } #endif /* alloc a buffer if we don't have one yet, this happens * when we pushed a buffer in this while loop before */ if (outbuf == NULL) { ret = get_buffer (visual, &outbuf); if (ret != GST_FLOW_OK) { goto beach; } } visual_video_set_buffer (visual->video, GST_BUFFER_DATA (outbuf)); visual_audio_analyze (visual->audio); visual_actor_run (visual->actor, visual->audio); visual_video_set_buffer (visual->video, NULL); GST_DEBUG_OBJECT (visual, "rendered one frame"); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; GST_BUFFER_DURATION (outbuf) = visual->duration; ret = gst_pad_push (visual->srcpad, outbuf); outbuf = NULL; skip: GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input", visual->spf); /* Flush out the number of samples per frame */ gst_adapter_flush (visual->adapter, visual->spf * visual->bps); /* quit the loop if something was wrong */ if (ret != GST_FLOW_OK) break; } beach: if (outbuf != NULL) gst_buffer_unref (outbuf); gst_object_unref (visual); return ret; }
static gboolean gst_rtp_mux_setcaps (GstPad * pad, GstRTPMux * rtp_mux, GstCaps * caps) { GstStructure *structure; gboolean ret = FALSE; GstRTPMuxPadPrivate *padpriv; GstCaps *peercaps; if (!gst_caps_is_fixed (caps)) return FALSE; peercaps = gst_pad_peer_query_caps (rtp_mux->srcpad, NULL); if (peercaps) { GstCaps *tcaps, *othercaps;; tcaps = gst_pad_get_pad_template_caps (pad); othercaps = gst_caps_intersect_full (peercaps, tcaps, GST_CAPS_INTERSECT_FIRST); if (gst_caps_get_size (othercaps) > 0) { structure = gst_caps_get_structure (othercaps, 0); GST_OBJECT_LOCK (rtp_mux); if (gst_structure_get_uint (structure, "ssrc", &rtp_mux->current_ssrc)) { GST_DEBUG_OBJECT (pad, "Use downstream ssrc: %x", rtp_mux->current_ssrc); rtp_mux->have_ssrc = TRUE; } GST_OBJECT_UNLOCK (rtp_mux); } gst_caps_unref (othercaps); gst_caps_unref (peercaps); gst_caps_unref (tcaps); } structure = gst_caps_get_structure (caps, 0); if (!structure) return FALSE; GST_OBJECT_LOCK (rtp_mux); padpriv = gst_pad_get_element_private (pad); if (padpriv && gst_structure_get_uint (structure, "timestamp-offset", &padpriv->timestamp_offset)) { padpriv->have_timestamp_offset = TRUE; } caps = gst_caps_copy (caps); /* if we don't have a specified ssrc, first try to take one from the caps, and if that fails, generate one */ if (!rtp_mux->have_ssrc) { if (rtp_mux->ssrc_random) { if (!gst_structure_get_uint (structure, "ssrc", &rtp_mux->current_ssrc)) rtp_mux->current_ssrc = g_random_int (); rtp_mux->have_ssrc = TRUE; } } gst_caps_set_simple (caps, "timestamp-offset", G_TYPE_UINT, rtp_mux->ts_base, "seqnum-offset", G_TYPE_UINT, rtp_mux->seqnum_base, "ssrc", G_TYPE_UINT, rtp_mux->current_ssrc, NULL); GST_OBJECT_UNLOCK (rtp_mux); if (rtp_mux->send_stream_start) { gchar s_id[32]; /* stream-start (FIXME: create id based on input ids) */ g_snprintf (s_id, sizeof (s_id), "interleave-%08x", g_random_int ()); gst_pad_push_event (rtp_mux->srcpad, gst_event_new_stream_start (s_id)); rtp_mux->send_stream_start = FALSE; } GST_DEBUG_OBJECT (rtp_mux, "setting caps %" GST_PTR_FORMAT " on src pad..", caps); ret = gst_pad_set_caps (rtp_mux->srcpad, caps); gst_caps_unref (caps); return ret; }
/** * gst_bus_post: * @bus: a #GstBus to post on * @message: (transfer full): the #GstMessage to post * * Post a message on the given bus. Ownership of the message * is taken by the bus. * * Returns: %TRUE if the message could be posted, %FALSE if the bus is flushing. * * MT safe. */ gboolean gst_bus_post (GstBus * bus, GstMessage * message) { GstBusSyncReply reply = GST_BUS_PASS; GstBusSyncHandler handler; gboolean emit_sync_message; gpointer handler_data; g_return_val_if_fail (GST_IS_BUS (bus), FALSE); g_return_val_if_fail (GST_IS_MESSAGE (message), FALSE); GST_DEBUG_OBJECT (bus, "[msg %p] posting on bus %" GST_PTR_FORMAT, message, message); /* check we didn't accidentally add a public flag that maps to same value */ g_assert (!GST_MINI_OBJECT_FLAG_IS_SET (message, GST_MESSAGE_FLAG_ASYNC_DELIVERY)); GST_OBJECT_LOCK (bus); /* check if the bus is flushing */ if (GST_OBJECT_FLAG_IS_SET (bus, GST_BUS_FLUSHING)) goto is_flushing; handler = bus->priv->sync_handler; handler_data = bus->priv->sync_handler_data; emit_sync_message = bus->priv->num_sync_message_emitters > 0; GST_OBJECT_UNLOCK (bus); /* first call the sync handler if it is installed */ if (handler) reply = handler (bus, message, handler_data); /* emit sync-message if requested to do so via gst_bus_enable_sync_message_emission. terrible but effective */ if (emit_sync_message && reply != GST_BUS_DROP && handler != gst_bus_sync_signal_handler) gst_bus_sync_signal_handler (bus, message, NULL); /* If this is a bus without async message delivery * always drop the message */ if (!bus->priv->poll) reply = GST_BUS_DROP; /* now see what we should do with the message */ switch (reply) { case GST_BUS_DROP: /* drop the message */ GST_DEBUG_OBJECT (bus, "[msg %p] dropped", message); break; case GST_BUS_PASS: /* pass the message to the async queue, refcount passed in the queue */ GST_DEBUG_OBJECT (bus, "[msg %p] pushing on async queue", message); gst_atomic_queue_push (bus->priv->queue, message); gst_poll_write_control (bus->priv->poll); GST_DEBUG_OBJECT (bus, "[msg %p] pushed on async queue", message); break; case GST_BUS_ASYNC: { /* async delivery, we need a mutex and a cond to block * on */ GCond *cond = GST_MESSAGE_GET_COND (message); GMutex *lock = GST_MESSAGE_GET_LOCK (message); g_cond_init (cond); g_mutex_init (lock); GST_MINI_OBJECT_FLAG_SET (message, GST_MESSAGE_FLAG_ASYNC_DELIVERY); GST_DEBUG_OBJECT (bus, "[msg %p] waiting for async delivery", message); /* now we lock the message mutex, send the message to the async * queue. When the message is handled by the app and destroyed, * the cond will be signalled and we can continue */ g_mutex_lock (lock); gst_atomic_queue_push (bus->priv->queue, message); gst_poll_write_control (bus->priv->poll); /* now block till the message is freed */ g_cond_wait (cond, lock); /* we acquired a new ref from gst_message_dispose() so we can clean up */ g_mutex_unlock (lock); GST_DEBUG_OBJECT (bus, "[msg %p] delivered asynchronously", message); GST_MINI_OBJECT_FLAG_UNSET (message, GST_MESSAGE_FLAG_ASYNC_DELIVERY); g_mutex_clear (lock); g_cond_clear (cond); gst_message_unref (message); break; } default: g_warning ("invalid return from bus sync handler"); break; } return TRUE; /* ERRORS */ is_flushing: { GST_DEBUG_OBJECT (bus, "bus is flushing"); GST_OBJECT_UNLOCK (bus); gst_message_unref (message); return FALSE; } }
static gboolean gst_base_video_codec_src_event (GstPad * pad, GstEvent * event) { GstBaseVideoCodec *base_video_codec; gboolean res = FALSE; base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEEK: { GstFormat format, tformat; gdouble rate; GstEvent *real_seek; GstSeekFlags flags; GstSeekType cur_type, stop_type; gint64 cur, stop; gint64 tcur, tstop; gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur, &stop_type, &stop); gst_event_unref (event); tformat = GST_FORMAT_TIME; res = gst_base_video_encoded_video_convert (&base_video_codec->state, format, cur, &tformat, &tcur); if (!res) goto convert_error; res = gst_base_video_encoded_video_convert (&base_video_codec->state, format, stop, &tformat, &tstop); if (!res) goto convert_error; real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME, flags, cur_type, tcur, stop_type, tstop); res = gst_pad_push_event (base_video_codec->sinkpad, real_seek); break; } #if 0 case GST_EVENT_QOS: { gdouble proportion; GstClockTimeDiff diff; GstClockTime timestamp; gst_event_parse_qos (event, &proportion, &diff, ×tamp); GST_OBJECT_LOCK (base_video_codec); base_video_codec->proportion = proportion; base_video_codec->earliest_time = timestamp + diff; GST_OBJECT_UNLOCK (base_video_codec); GST_DEBUG_OBJECT (base_video_codec, "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT, GST_TIME_ARGS (timestamp), diff); res = gst_pad_push_event (base_video_codec->sinkpad, event); break; } #endif default: res = gst_pad_push_event (base_video_codec->sinkpad, event); break; } done: gst_object_unref (base_video_codec); return res; convert_error: GST_DEBUG_OBJECT (base_video_codec, "could not convert format"); goto done; }
static gpointer pollthread_func (gpointer data) { GstShmSink *self = GST_SHM_SINK (data); GList *item; GstClockTime timeout = GST_CLOCK_TIME_NONE; while (!self->stop) { if (gst_poll_wait (self->poll, timeout) < 0) return NULL; timeout = GST_CLOCK_TIME_NONE; if (self->stop) return NULL; if (gst_poll_fd_has_closed (self->poll, &self->serverpollfd)) { GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed read from shmsink"), ("Control socket has closed")); return NULL; } if (gst_poll_fd_has_error (self->poll, &self->serverpollfd)) { GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsink"), ("Control socket has error")); return NULL; } if (gst_poll_fd_can_read (self->poll, &self->serverpollfd)) { ShmClient *client; struct GstShmClient *gclient; GST_OBJECT_LOCK (self); client = sp_writer_accept_client (self->pipe); GST_OBJECT_UNLOCK (self); if (!client) { GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsink"), ("Control socket returns wrong data")); return NULL; } gclient = g_slice_new (struct GstShmClient); gclient->client = client; gst_poll_fd_init (&gclient->pollfd); gclient->pollfd.fd = sp_writer_get_client_fd (client); gst_poll_add_fd (self->poll, &gclient->pollfd); gst_poll_fd_ctl_read (self->poll, &gclient->pollfd, TRUE); self->clients = g_list_prepend (self->clients, gclient); g_signal_emit (self, signals[SIGNAL_CLIENT_CONNECTED], 0, gclient->pollfd.fd); /* we need to call gst_poll_wait before calling gst_poll_* status functions on that new descriptor, so restart the loop, so _wait will have been called on all elements of self->poll, whether they have just been added or not. */ timeout = 0; continue; } again: for (item = self->clients; item; item = item->next) { struct GstShmClient *gclient = item->data; if (gst_poll_fd_has_closed (self->poll, &gclient->pollfd)) { GST_WARNING_OBJECT (self, "One client is gone, closing"); goto close_client; } if (gst_poll_fd_has_error (self->poll, &gclient->pollfd)) { GST_WARNING_OBJECT (self, "One client fd has error, closing"); goto close_client; } if (gst_poll_fd_can_read (self->poll, &gclient->pollfd)) { int rv; gpointer tag = NULL; GST_OBJECT_LOCK (self); rv = sp_writer_recv (self->pipe, gclient->client, &tag); GST_OBJECT_UNLOCK (self); if (rv < 0) { GST_WARNING_OBJECT (self, "One client has read error," " closing (retval: %d errno: %d)", rv, errno); goto close_client; } g_assert (rv == 0 || tag == NULL); if (rv == 0) gst_buffer_unref (tag); } continue; close_client: { GSList *list = NULL; GST_OBJECT_LOCK (self); sp_writer_close_client (self->pipe, gclient->client, (sp_buffer_free_callback) free_buffer_locked, (void **) &list); GST_OBJECT_UNLOCK (self); g_slist_free_full (list, (GDestroyNotify) gst_buffer_unref); } gst_poll_remove_fd (self->poll, &gclient->pollfd); self->clients = g_list_remove (self->clients, gclient); g_signal_emit (self, signals[SIGNAL_CLIENT_DISCONNECTED], 0, gclient->pollfd.fd); g_slice_free (struct GstShmClient, gclient); goto again; } g_cond_broadcast (&self->cond); }
static gboolean gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event) { gdouble rate; GstFormat src_format, dst_format; GstSeekFlags flags; GstSeekType start_type, stop_type; gint64 start, stop; gboolean flush, update; #ifdef HAVE_WILDMIDI_0_2_2 gboolean accurate; #endif gboolean res; unsigned long int sample; GstSegment *segment; if (!wildmidi->song) return FALSE; gst_event_parse_seek (event, &rate, &src_format, &flags, &start_type, &start, &stop_type, &stop); /* convert the input format to samples */ dst_format = GST_FORMAT_DEFAULT; res = TRUE; if (start_type != GST_SEEK_TYPE_NONE) { res = gst_wildmidi_src_convert (wildmidi, src_format, start, &dst_format, &start); } if (res && stop_type != GST_SEEK_TYPE_NONE) { res = gst_wildmidi_src_convert (wildmidi, src_format, stop, &dst_format, &stop); } /* unsupported format */ if (!res) return res; flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH); #ifdef HAVE_WILDMIDI_0_2_2 accurate = ((flags & GST_SEEK_FLAG_ACCURATE) == GST_SEEK_FLAG_ACCURATE); #endif if (flush) { GST_DEBUG ("performing flush"); gst_pad_push_event (wildmidi->srcpad, gst_event_new_flush_start ()); } else { gst_pad_stop_task (wildmidi->sinkpad); } segment = wildmidi->o_segment; GST_PAD_STREAM_LOCK (wildmidi->sinkpad); if (flush) { gst_pad_push_event (wildmidi->srcpad, gst_event_new_flush_stop (TRUE)); } /* update the segment now */ gst_segment_do_seek (segment, rate, dst_format, flags, start_type, start, stop_type, stop, &update); /* we need to seek to position in the segment now, sample will be updated */ sample = segment->position; GST_OBJECT_LOCK (wildmidi); #ifdef HAVE_WILDMIDI_0_2_2 if (accurate) { WildMidi_SampledSeek (wildmidi->song, &sample); } else { WildMidi_FastSeek (wildmidi->song, &sample); } #else WildMidi_FastSeek (wildmidi->song, &sample); #endif GST_OBJECT_UNLOCK (wildmidi); segment->start = segment->time = segment->position = sample; gst_pad_push_event (wildmidi->srcpad, gst_wildmidi_get_new_segment_event (wildmidi, GST_FORMAT_TIME)); gst_pad_start_task (wildmidi->sinkpad, (GstTaskFunction) gst_wildmidi_loop, wildmidi->sinkpad, NULL); wildmidi->discont = TRUE; GST_PAD_STREAM_UNLOCK (wildmidi->sinkpad); GST_DEBUG ("seek done"); return TRUE; }