static void gst_amc_audio_dec_loop (GstAmcAudioDec * self) { GstFlowReturn flow_ret = GST_FLOW_OK; gboolean is_eos; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; gint idx; GError *err = NULL; GST_AUDIO_DECODER_STREAM_LOCK (self); retry: /*if (self->input_caps_changed) { idx = INFO_OUTPUT_FORMAT_CHANGED; } else { */ GST_DEBUG_OBJECT (self, "Waiting for available output buffer"); GST_AUDIO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000, &err); GST_AUDIO_DECODER_STREAM_LOCK (self); /*} */ if (idx < 0) { if (self->flushing) { g_clear_error (&err); goto flushing; } switch (idx) { case INFO_OUTPUT_BUFFERS_CHANGED: /* Handled internally */ g_assert_not_reached (); break; case INFO_OUTPUT_FORMAT_CHANGED:{ GstAmcFormat *format; gchar *format_string; GST_DEBUG_OBJECT (self, "Output format has changed"); format = gst_amc_codec_get_output_format (self->codec, &err); if (!format) goto format_error; format_string = gst_amc_format_to_string (format, &err); if (err) { gst_amc_format_free (format); goto format_error; } GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string); g_free (format_string); if (!gst_amc_audio_dec_set_src_caps (self, format)) { gst_amc_format_free (format); goto format_error; } gst_amc_format_free (format); goto retry; } case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out"); goto retry; case G_MININT: GST_ERROR_OBJECT (self, "Failure dequeueing output buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } goto retry; } GST_DEBUG_OBJECT (self, "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.offset, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM); buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err); if (!buf) goto failed_to_get_output_buffer; if (buffer_info.size > 0) { GstBuffer *outbuf; GstMapInfo minfo; /* This sometimes happens at EOS or if the input is not properly framed, * let's handle it gracefully by allocating a new buffer for the current * caps and filling it */ if (buffer_info.size % self->info.bpf != 0) goto invalid_buffer_size; outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (self), buffer_info.size); if (!outbuf) goto failed_allocate; gst_buffer_map (outbuf, &minfo, GST_MAP_WRITE); if (self->needs_reorder) { gint i, n_samples, c, n_channels; gint *reorder_map = self->reorder_map; gint16 *dest, *source; dest = (gint16 *) minfo.data; source = (gint16 *) (buf->data + buffer_info.offset); n_samples = buffer_info.size / self->info.bpf; n_channels = self->info.channels; for (i = 0; i < n_samples; i++) { for (c = 0; c < n_channels; c++) { dest[i * n_channels + reorder_map[c]] = source[i * n_channels + c]; } } } else { orc_memcpy (minfo.data, buf->data + buffer_info.offset, buffer_info.size); } gst_buffer_unmap (outbuf, &minfo); if (self->spf != -1) { gst_adapter_push (self->output_adapter, outbuf); } else { flow_ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, 1); } } gst_amc_buffer_free (buf); buf = NULL; if (self->spf != -1) { GstBuffer *outbuf; guint avail = gst_adapter_available (self->output_adapter); guint nframes; /* On EOS we take the complete adapter content, no matter * if it is a multiple of the codec frame size or not. * Otherwise we take a multiple of codec frames and push * them downstream */ avail /= self->info.bpf; if (!is_eos) { nframes = avail / self->spf; avail = nframes * self->spf; } else { nframes = (avail + self->spf - 1) / self->spf; } avail *= self->info.bpf; if (avail > 0) { outbuf = gst_adapter_take_buffer (self->output_adapter, avail); flow_ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, nframes); } } if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err)) { if (self->flushing) { g_clear_error (&err); goto flushing; } goto failed_release; } if (is_eos || flow_ret == GST_FLOW_EOS) { GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); } else if (flow_ret == GST_FLOW_OK) { GST_DEBUG_OBJECT (self, "Component signalled EOS"); flow_ret = GST_FLOW_EOS; } g_mutex_unlock (&self->drain_lock); GST_AUDIO_DECODER_STREAM_LOCK (self); } else { GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret)); } self->downstream_flow_ret = flow_ret; if (flow_ret != GST_FLOW_OK) goto flow_error; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; dequeue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } format_error: { if (err) GST_ELEMENT_ERROR_FROM_ERROR (self, err); else GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to handle format")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_release: { GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_FLUSHING; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } flow_error: { if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } else if (flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } else if (flow_ret == GST_FLOW_FLUSHING) { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_to_get_output_buffer: { GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } invalid_buffer_size: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Invalid buffer size %u (bfp %d)", buffer_info.size, self->info.bpf)); gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_allocate: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to allocate output buffer")); gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } }
/* MT safe */ static GstStateChangeReturn gst_pipeline_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn result = GST_STATE_CHANGE_SUCCESS; GstPipeline *pipeline = GST_PIPELINE_CAST (element); GstClock *clock; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: GST_OBJECT_LOCK (element); if (element->bus) gst_bus_set_flushing (element->bus, FALSE); GST_OBJECT_UNLOCK (element); break; case GST_STATE_CHANGE_READY_TO_PAUSED: GST_OBJECT_LOCK (element); pipeline->priv->update_clock = TRUE; GST_OBJECT_UNLOCK (element); break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: { GstClockTime now, start_time, last_start_time, delay; gboolean update_clock; GstClock *cur_clock; GST_DEBUG_OBJECT (element, "selecting clock and base_time"); GST_OBJECT_LOCK (element); cur_clock = element->clock; if (cur_clock) gst_object_ref (cur_clock); /* get the desired running_time of the first buffer aka the start_time */ start_time = GST_ELEMENT_START_TIME (pipeline); last_start_time = pipeline->priv->last_start_time; pipeline->priv->last_start_time = start_time; /* see if we need to update the clock */ update_clock = pipeline->priv->update_clock; pipeline->priv->update_clock = FALSE; delay = pipeline->delay; GST_OBJECT_UNLOCK (element); /* running time changed, either with a PAUSED or a flush, we need to check * if there is a new clock & update the base time */ /* only do this for top-level, however */ if (GST_OBJECT_PARENT (element) == NULL && (update_clock || last_start_time != start_time)) { GST_DEBUG_OBJECT (pipeline, "Need to update start_time"); /* when going to PLAYING, select a clock when needed. If we just got * flushed, we don't reselect the clock. */ if (update_clock) { GST_DEBUG_OBJECT (pipeline, "Need to update clock."); clock = gst_element_provide_clock (element); } else { GST_DEBUG_OBJECT (pipeline, "Don't need to update clock, using old clock."); /* only try to ref if cur_clock is not NULL */ if (cur_clock) gst_object_ref (cur_clock); clock = cur_clock; } if (clock) { now = gst_clock_get_time (clock); } else { GST_DEBUG_OBJECT (pipeline, "no clock, using base time of NONE"); now = GST_CLOCK_TIME_NONE; } if (clock != cur_clock) { /* now distribute the clock (which could be NULL). If some * element refuses the clock, this will return FALSE and * we effectively fail the state change. */ if (!gst_element_set_clock (element, clock)) goto invalid_clock; /* if we selected and distributed a new clock, let the app * know about it */ gst_element_post_message (element, gst_message_new_new_clock (GST_OBJECT_CAST (element), clock)); } if (clock) gst_object_unref (clock); if (start_time != GST_CLOCK_TIME_NONE && now != GST_CLOCK_TIME_NONE) { GstClockTime new_base_time = now - start_time + delay; GST_DEBUG_OBJECT (element, "start_time=%" GST_TIME_FORMAT ", now=%" GST_TIME_FORMAT ", base_time %" GST_TIME_FORMAT, GST_TIME_ARGS (start_time), GST_TIME_ARGS (now), GST_TIME_ARGS (new_base_time)); gst_element_set_base_time (element, new_base_time); } else { GST_DEBUG_OBJECT (pipeline, "NOT adjusting base_time because start_time is NONE"); } } else { GST_DEBUG_OBJECT (pipeline, "NOT adjusting base_time because we selected one before"); } if (cur_clock) gst_object_unref (cur_clock); break; } case GST_STATE_CHANGE_PLAYING_TO_PAUSED: { /* we take a start_time snapshot before calling the children state changes * so that they know about when the pipeline PAUSED. */ pipeline_update_start_time (element); break; } case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_READY_TO_NULL: break; } result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: break; case GST_STATE_CHANGE_READY_TO_PAUSED: { /* READY to PAUSED starts running_time from 0 */ reset_start_time (pipeline, 0); break; } case GST_STATE_CHANGE_PAUSED_TO_PLAYING: break; case GST_STATE_CHANGE_PLAYING_TO_PAUSED: { /* Take a new snapshot of the start_time after calling the state change on * all children. This will be the running_time of the pipeline when we go * back to PLAYING */ pipeline_update_start_time (element); break; } case GST_STATE_CHANGE_PAUSED_TO_READY: break; case GST_STATE_CHANGE_READY_TO_NULL: { GstBus *bus; gboolean auto_flush; /* grab some stuff before we release the lock to flush out the bus */ GST_OBJECT_LOCK (element); if ((bus = element->bus)) gst_object_ref (bus); auto_flush = pipeline->priv->auto_flush_bus; GST_OBJECT_UNLOCK (element); if (bus) { if (auto_flush) { gst_bus_set_flushing (bus, TRUE); } else { GST_INFO_OBJECT (element, "not flushing bus, auto-flushing disabled"); } gst_object_unref (bus); } break; } } return result; /* ERRORS */ invalid_clock: { /* we generate this error when the selected clock was not * accepted by some element */ GST_ELEMENT_ERROR (pipeline, CORE, CLOCK, (_("Selected clock cannot be used in pipeline.")), ("Pipeline cannot operate with selected clock")); GST_DEBUG_OBJECT (pipeline, "Pipeline cannot operate with selected clock %p", clock); if (clock) gst_object_unref (clock); return GST_STATE_CHANGE_FAILURE; } }
/* encode the CMML head tag and push the CMML headers */ static void gst_cmml_enc_parse_tag_head (GstCmmlEnc * enc, GstCmmlTagHead * head) { GList *headers = NULL; GList *walk; guchar *head_string; GstCaps *caps; GstBuffer *ident_buf, *preamble_buf, *head_buf; GstBuffer *buffer; if (enc->preamble == NULL) goto flow_unexpected; GST_INFO_OBJECT (enc, "parsing head tag"); enc->flow_return = gst_cmml_enc_new_ident_header (enc, &ident_buf); if (enc->flow_return != GST_FLOW_OK) goto alloc_error; headers = g_list_append (headers, ident_buf); enc->flow_return = gst_cmml_enc_new_buffer (enc, enc->preamble, strlen ((gchar *) enc->preamble), &preamble_buf); if (enc->flow_return != GST_FLOW_OK) goto alloc_error; headers = g_list_append (headers, preamble_buf); head_string = gst_cmml_parser_tag_head_to_string (enc->parser, head); enc->flow_return = gst_cmml_enc_new_buffer (enc, head_string, strlen ((gchar *) head_string), &head_buf); g_free (head_string); if (enc->flow_return != GST_FLOW_OK) goto alloc_error; headers = g_list_append (headers, head_buf); caps = gst_pad_get_caps (enc->srcpad); caps = gst_cmml_enc_set_header_on_caps (enc, caps, ident_buf, preamble_buf, head_buf); while (headers) { buffer = GST_BUFFER (headers->data); /* set granulepos 0 on headers */ GST_BUFFER_OFFSET_END (buffer) = 0; gst_buffer_set_caps (buffer, caps); enc->flow_return = gst_cmml_enc_push (enc, buffer); headers = g_list_delete_link (headers, headers); if (enc->flow_return != GST_FLOW_OK) goto push_error; } gst_caps_unref (caps); enc->sent_headers = TRUE; return; flow_unexpected: GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL), ("got head tag before preamble")); enc->flow_return = GST_FLOW_ERROR; return; push_error: gst_caps_unref (caps); /* fallthrough */ alloc_error: for (walk = headers; walk; walk = walk->next) gst_buffer_unref (GST_BUFFER (walk->data)); g_list_free (headers); return; }
static GstFlowReturn gst_ebml_read_element_length (GstEbmlRead * ebml, guint64 * length, gint * rread) { GstFlowReturn ret; guint8 *buf; gint len_mask = 0x80, read = 1, n = 1, num_ffs = 0; guint64 total; guint8 b; ret = gst_ebml_read_peek_bytes (ebml, 1, NULL, &buf); if (ret != GST_FLOW_OK) return ret; b = GST_READ_UINT8 (buf); total = (guint64) b; while (read <= 8 && !(total & len_mask)) { read++; len_mask >>= 1; } if (read > 8) { guint64 pos = ebml->offset; GST_ELEMENT_ERROR (ebml, STREAM, DEMUX, (NULL), ("Invalid EBML length size tag (0x%x) at position %" G_GUINT64_FORMAT " (0x%" G_GINT64_MODIFIER "x)", (guint) b, pos, pos)); return GST_FLOW_ERROR; } if ((total &= (len_mask - 1)) == len_mask - 1) num_ffs++; ret = gst_ebml_read_peek_bytes (ebml, read, NULL, &buf); if (ret != GST_FLOW_OK) return ret; while (n < read) { guint8 b = GST_READ_UINT8 (buf + n); if (b == 0xff) num_ffs++; total = (total << 8) | b; ++n; } if (read == num_ffs) *length = G_MAXUINT64; else *length = total; if (rread) *rread = read; ebml->offset += read; return GST_FLOW_OK; }
static GstStateChangeReturn gst_visual_gl_change_state (GstElement * element, GstStateChange transition) { GstVisualGL *visual = GST_VISUAL_GL (element); GstStateChangeReturn ret; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: break; case GST_STATE_CHANGE_READY_TO_PAUSED: { GstElement *parent = GST_ELEMENT (gst_element_get_parent (visual)); GstStructure *structure = NULL; GstQuery *query = NULL; gboolean isPerformed = FALSE; gchar *name; if (!parent) { GST_ELEMENT_ERROR (visual, CORE, STATE_CHANGE, (NULL), ("A parent bin is required")); return FALSE; } name = gst_element_get_name (visual); structure = gst_structure_new (name, NULL); query = gst_query_new_application (GST_QUERY_CUSTOM, structure); g_free (name); isPerformed = gst_element_query (parent, query); if (isPerformed) { const GValue *id_value = gst_structure_get_value (structure, "gstgldisplay"); if (G_VALUE_HOLDS_POINTER (id_value)) /* at least one gl element is after in our gl chain */ visual->display = gst_object_ref (GST_GL_DISPLAY (g_value_get_pointer (id_value))); else { /* this gl filter is a sink in terms of the gl chain */ visual->display = gst_gl_display_new (); gst_gl_display_create_context (visual->display, 0); //TODO visual->external_gl_context); } gst_visual_gl_reset (visual); visual->actor = visual_actor_new (GST_VISUAL_GL_GET_CLASS (visual)->plugin->info-> plugname); visual->video = visual_video_new (); visual->audio = visual_audio_new (); if (!visual->actor || !visual->video) goto actor_setup_failed; gst_gl_display_thread_add (visual->display, (GstGLDisplayThreadFunc) actor_setup, visual); if (visual->actor_setup_result != 0) goto actor_setup_failed; else visual_actor_set_video (visual->actor, visual->video); } gst_query_unref (query); gst_object_unref (GST_OBJECT (parent)); if (!isPerformed) return GST_STATE_CHANGE_FAILURE; } break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: break; default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PLAYING_TO_PAUSED: break; case GST_STATE_CHANGE_PAUSED_TO_READY: { if (visual->fbo) { gst_gl_display_del_fbo (visual->display, visual->fbo, visual->depthbuffer); visual->fbo = 0; visual->depthbuffer = 0; } if (visual->midtexture) { gst_gl_display_del_texture (visual->display, visual->midtexture, visual->width, visual->height); visual->midtexture = 0; } if (visual->display) { gst_object_unref (visual->display); visual->display = NULL; } gst_visual_gl_clear_actors (visual); } break; case GST_STATE_CHANGE_READY_TO_NULL: break; default: break; } return ret; /* ERRORS */ actor_setup_failed: { GST_ELEMENT_ERROR (visual, LIBRARY, INIT, (NULL), ("could not set up actor")); gst_visual_gl_clear_actors (visual); return GST_STATE_CHANGE_FAILURE; } }
static GstFlowReturn gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstFlowReturn ret; GstRtpSsrcDemux *demux; guint32 ssrc; GstRTCPPacket packet; GstRTCPBuffer rtcp = { NULL, }; GstPad *srcpad; GstRtpSsrcDemuxPad *dpad; demux = GST_RTP_SSRC_DEMUX (parent); if (!gst_rtcp_buffer_validate (buf)) goto invalid_rtcp; gst_rtcp_buffer_map (buf, GST_MAP_READ, &rtcp); if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) { gst_rtcp_buffer_unmap (&rtcp); goto invalid_rtcp; } /* first packet must be SR or RR or else the validate would have failed */ switch (gst_rtcp_packet_get_type (&packet)) { case GST_RTCP_TYPE_SR: /* get the ssrc so that we can route it to the right source pad */ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, NULL, NULL, NULL); break; default: goto unexpected_rtcp; } gst_rtcp_buffer_unmap (&rtcp); GST_DEBUG_OBJECT (demux, "received RTCP of SSRC %08x", ssrc); srcpad = find_or_create_demux_pad_for_ssrc (demux, ssrc, RTCP_PAD); if (srcpad == NULL) goto create_failed; /* push to srcpad */ ret = gst_pad_push (srcpad, buf); if (ret != GST_FLOW_OK) { /* check if the ssrc still there, may have been removed */ GST_PAD_LOCK (demux); dpad = find_demux_pad_for_ssrc (demux, ssrc); if (dpad == NULL || dpad->rtcp_pad != srcpad) { /* SSRC was removed during the push ... ignore the error */ ret = GST_FLOW_OK; } GST_PAD_UNLOCK (demux); } gst_object_unref (srcpad); return ret; /* ERRORS */ invalid_rtcp: { /* this is fatal and should be filtered earlier */ GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL), ("Dropping invalid RTCP packet")); gst_buffer_unref (buf); return GST_FLOW_ERROR; } unexpected_rtcp: { GST_DEBUG_OBJECT (demux, "dropping unexpected RTCP packet"); gst_buffer_unref (buf); return GST_FLOW_OK; } create_failed: { GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL), ("Could not create new pad")); gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame) { GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder; GstBuffer *outbuf; gint ret_size = 0, c; GstVideoInfo *info = &ffmpegenc->input_state->info; GstVideoFrame vframe; if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I; if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ)) { GST_ERROR_OBJECT (encoder, "Failed to map input buffer"); return GST_FLOW_ERROR; } /* Fill avpicture */ for (c = 0; c < AV_NUM_DATA_POINTERS; c++) { if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) { ffmpegenc->picture->data[c] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, c); ffmpegenc->picture->linesize[c] = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, c); } else { ffmpegenc->picture->data[c] = NULL; ffmpegenc->picture->linesize[c] = 0; } } ffmpegenc->picture->pts = gst_ffmpeg_time_gst_to_ff (frame->pts / ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base); ffmpegenc_setup_working_buf (ffmpegenc); ret_size = avcodec_encode_video (ffmpegenc->context, ffmpegenc->working_buf, ffmpegenc->working_buf_size, ffmpegenc->picture); gst_video_frame_unmap (&vframe); if (ret_size < 0) goto encode_fail; /* Encoder needs more data */ if (!ret_size) return GST_FLOW_OK; /* save stats info if there is some as well as a stats file */ if (ffmpegenc->file && ffmpegenc->context->stats_out) if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0) GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE, (("Could not write to file \"%s\"."), ffmpegenc->filename), GST_ERROR_SYSTEM); gst_video_codec_frame_unref (frame); /* Get oldest frame */ frame = gst_video_encoder_get_oldest_frame (encoder); /* Allocate output buffer */ if (gst_video_encoder_allocate_output_frame (encoder, frame, ret_size) != GST_FLOW_OK) { gst_video_codec_frame_unref (frame); goto alloc_fail; } outbuf = frame->output_buffer; gst_buffer_fill (outbuf, 0, ffmpegenc->working_buf, ret_size); /* buggy codec may not set coded_frame */ if (ffmpegenc->context->coded_frame) { if (ffmpegenc->context->coded_frame->key_frame) GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); } else GST_WARNING_OBJECT (ffmpegenc, "codec did not provide keyframe info"); /* Reset frame type */ if (ffmpegenc->picture->pict_type) ffmpegenc->picture->pict_type = 0; return gst_video_encoder_finish_frame (encoder, frame); /* ERRORS */ encode_fail: { #ifndef GST_DISABLE_GST_DEBUG GstFFMpegVidEncClass *oclass = (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); GST_ERROR_OBJECT (ffmpegenc, "avenc_%s: failed to encode buffer", oclass->in_plugin->name); #endif /* GST_DISABLE_GST_DEBUG */ return GST_FLOW_OK; } alloc_fail: { #ifndef GST_DISABLE_GST_DEBUG GstFFMpegVidEncClass *oclass = (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); GST_ERROR_OBJECT (ffmpegenc, "avenc_%s: failed to allocate buffer", oclass->in_plugin->name); #endif /* GST_DISABLE_GST_DEBUG */ return GST_FLOW_ERROR; } }
static gboolean gst_shout2send_start (GstBaseSink * basesink) { GstShout2send *sink = GST_SHOUT2SEND (basesink); const gchar *cur_prop; gshort proto = 3; gchar *version_string; GST_DEBUG_OBJECT (sink, "starting"); sink->conn = shout_new (); switch (sink->protocol) { case SHOUT2SEND_PROTOCOL_XAUDIOCAST: proto = SHOUT_PROTOCOL_XAUDIOCAST; break; case SHOUT2SEND_PROTOCOL_ICY: proto = SHOUT_PROTOCOL_ICY; break; case SHOUT2SEND_PROTOCOL_HTTP: proto = SHOUT_PROTOCOL_HTTP; break; } cur_prop = "protocol"; GST_DEBUG_OBJECT (sink, "setting protocol: %d", sink->protocol); if (shout_set_protocol (sink->conn, proto) != SHOUTERR_SUCCESS) goto set_failed; /* --- FIXME: shout requires an ip, and fails if it is given a host. */ /* may want to put convert_to_ip(shout2send->ip) here */ cur_prop = "ip"; GST_DEBUG_OBJECT (sink, "setting ip: %s", sink->ip); if (shout_set_host (sink->conn, sink->ip) != SHOUTERR_SUCCESS) goto set_failed; cur_prop = "port"; GST_DEBUG_OBJECT (sink, "setting port: %u", sink->port); if (shout_set_port (sink->conn, sink->port) != SHOUTERR_SUCCESS) goto set_failed; cur_prop = "password"; GST_DEBUG_OBJECT (sink, "setting password: %s", sink->password); if (shout_set_password (sink->conn, sink->password) != SHOUTERR_SUCCESS) goto set_failed; cur_prop = "streamname"; GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->streamname); if (shout_set_name (sink->conn, sink->streamname) != SHOUTERR_SUCCESS) goto set_failed; cur_prop = "description"; GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->description); if (shout_set_description (sink->conn, sink->description) != SHOUTERR_SUCCESS) goto set_failed; cur_prop = "genre"; GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->genre); if (shout_set_genre (sink->conn, sink->genre) != SHOUTERR_SUCCESS) goto set_failed; cur_prop = "mount"; GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, sink->mount); if (shout_set_mount (sink->conn, sink->mount) != SHOUTERR_SUCCESS) goto set_failed; cur_prop = "username"; GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, "source"); if (shout_set_user (sink->conn, sink->username) != SHOUTERR_SUCCESS) goto set_failed; version_string = gst_version_string (); cur_prop = "agent"; GST_DEBUG_OBJECT (sink, "setting %s: %s", cur_prop, version_string); if (shout_set_agent (sink->conn, version_string) != SHOUTERR_SUCCESS) { g_free (version_string); goto set_failed; } g_free (version_string); return TRUE; /* ERROR */ set_failed: { GST_ELEMENT_ERROR (sink, LIBRARY, SETTINGS, (NULL), ("Error setting %s: %s", cur_prop, shout_get_error (sink->conn))); return FALSE; } }
static GstFlowReturn gst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame) { GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder; GstBuffer *outbuf; gint ret = 0, c; GstVideoInfo *info = &ffmpegenc->input_state->info; AVPacket *pkt; int have_data = 0; BufferInfo *buffer_info; if (ffmpegenc->interlaced) { ffmpegenc->picture->interlaced_frame = TRUE; /* if this is not the case, a filter element should be used to swap fields */ ffmpegenc->picture->top_field_first = GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_VIDEO_BUFFER_FLAG_TFF); } if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I; buffer_info = g_slice_new0 (BufferInfo); buffer_info->buffer = gst_buffer_ref (frame->input_buffer); if (!gst_video_frame_map (&buffer_info->vframe, info, frame->input_buffer, GST_MAP_READ)) { GST_ERROR_OBJECT (encoder, "Failed to map input buffer"); gst_buffer_unref (buffer_info->buffer); g_slice_free (BufferInfo, buffer_info); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } /* Fill avpicture */ ffmpegenc->picture->buf[0] = av_buffer_create (NULL, 0, buffer_info_free, buffer_info, 0); for (c = 0; c < AV_NUM_DATA_POINTERS; c++) { if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) { ffmpegenc->picture->data[c] = GST_VIDEO_FRAME_PLANE_DATA (&buffer_info->vframe, c); ffmpegenc->picture->linesize[c] = GST_VIDEO_FRAME_COMP_STRIDE (&buffer_info->vframe, c); } else { ffmpegenc->picture->data[c] = NULL; ffmpegenc->picture->linesize[c] = 0; } } ffmpegenc->picture->format = ffmpegenc->context->pix_fmt; ffmpegenc->picture->width = GST_VIDEO_FRAME_WIDTH (&buffer_info->vframe); ffmpegenc->picture->height = GST_VIDEO_FRAME_HEIGHT (&buffer_info->vframe); ffmpegenc->picture->pts = gst_ffmpeg_time_gst_to_ff (frame->pts / ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base); have_data = 0; pkt = g_slice_new0 (AVPacket); ret = avcodec_encode_video2 (ffmpegenc->context, pkt, ffmpegenc->picture, &have_data); av_frame_unref (ffmpegenc->picture); if (ret < 0 || !have_data) g_slice_free (AVPacket, pkt); if (ret < 0) goto encode_fail; /* Encoder needs more data */ if (!have_data) { gst_video_codec_frame_unref (frame); return GST_FLOW_OK; } /* save stats info if there is some as well as a stats file */ if (ffmpegenc->file && ffmpegenc->context->stats_out) if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0) GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE, (("Could not write to file \"%s\"."), ffmpegenc->filename), GST_ERROR_SYSTEM); gst_video_codec_frame_unref (frame); /* Get oldest frame */ frame = gst_video_encoder_get_oldest_frame (encoder); outbuf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data, pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket); frame->output_buffer = outbuf; if (pkt->flags & AV_PKT_FLAG_KEY) GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); else GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame); return gst_video_encoder_finish_frame (encoder, frame); /* ERRORS */ encode_fail: { #ifndef GST_DISABLE_GST_DEBUG GstFFMpegVidEncClass *oclass = (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); GST_ERROR_OBJECT (ffmpegenc, "avenc_%s: failed to encode buffer", oclass->in_plugin->name); #endif /* GST_DISABLE_GST_DEBUG */ /* avoid frame (and ts etc) piling up */ return gst_video_encoder_finish_frame (encoder, frame); } }
static gboolean gst_dvbsrc_open_frontend (GstDvbSrc * object, gboolean writable) { struct dvb_frontend_info fe_info; const char *adapter_desc = NULL; gchar *frontend_dev; GstStructure *adapter_structure; char *adapter_name = NULL; frontend_dev = g_strdup_printf ("/dev/dvb/adapter%d/frontend%d", object->adapter_number, object->frontend_number); GST_INFO_OBJECT (object, "Using frontend device: %s", frontend_dev); /* open frontend */ if ((object->fd_frontend = open (frontend_dev, writable ? O_RDWR : O_RDONLY)) < 0) { switch (errno) { case ENOENT: GST_ELEMENT_ERROR (object, RESOURCE, NOT_FOUND, (_("Device \"%s\" does not exist."), frontend_dev), (NULL)); break; default: GST_ELEMENT_ERROR (object, RESOURCE, OPEN_READ_WRITE, (_("Could not open frontend device \"%s\"."), frontend_dev), GST_ERROR_SYSTEM); break; } close (object->fd_frontend); g_free (frontend_dev); return FALSE; } GST_DEBUG_OBJECT (object, "Device opened, querying information"); if (ioctl (object->fd_frontend, FE_GET_INFO, &fe_info) < 0) { GST_ELEMENT_ERROR (object, RESOURCE, SETTINGS, (_("Could not get settings from frontend device \"%s\"."), frontend_dev), GST_ERROR_SYSTEM); close (object->fd_frontend); g_free (frontend_dev); return FALSE; } GST_DEBUG_OBJECT (object, "Got information about adapter : %s", fe_info.name); adapter_name = g_strdup (fe_info.name); object->adapter_type = fe_info.type; switch (object->adapter_type) { case FE_QPSK: adapter_desc = "DVB-S"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_QAM: adapter_desc = "DVB-C"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-inversion", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_QAM_AUTO, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_OFDM: adapter_desc = "DVB-T"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, "auto-inversion", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_QAM_AUTO, "auto-transmission-mode", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_TRANSMISSION_MODE_AUTO, "auto-guard-interval", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_GUARD_INTERVAL_AUTO, "auto-hierarchy", G_TYPE_BOOLEAN, fe_info.caps % FE_CAN_HIERARCHY_AUTO, "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL); break; case FE_ATSC: adapter_desc = "ATSC"; adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, adapter_desc, "name", G_TYPE_STRING, adapter_name, NULL); break; default: g_error ("Unknown frontend type: %d", object->adapter_type); adapter_structure = gst_structure_new ("dvb-adapter", "type", G_TYPE_STRING, "unknown", NULL); } GST_INFO_OBJECT (object, "DVB card: %s ", adapter_name); gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element (GST_OBJECT (object), adapter_structure)); g_free (frontend_dev); g_free (adapter_name); return TRUE; }
static GstFlowReturn gst_dvbsrc_read_device (GstDvbSrc * object, int size, GstBuffer ** buffer) { gint count = 0; gint ret_val = 0; GstBuffer *buf = gst_buffer_new_and_alloc (size); GstClockTime timeout = object->timeout * GST_USECOND; GstMapInfo map; g_return_val_if_fail (GST_IS_BUFFER (buf), GST_FLOW_ERROR); if (object->fd_dvr < 0) return GST_FLOW_ERROR; gst_buffer_map (buf, &map, GST_MAP_WRITE); while (count < size) { ret_val = gst_poll_wait (object->poll, timeout); GST_LOG_OBJECT (object, "select returned %d", ret_val); if (G_UNLIKELY (ret_val < 0)) { if (errno == EBUSY) goto stopped; else if (errno == EINTR) continue; else goto select_error; } else if (G_UNLIKELY (ret_val == 0)) { /* timeout, post element message */ gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element (GST_OBJECT (object), gst_structure_new_empty ("dvb-read-failure"))); } else { int nread = read (object->fd_dvr, map.data + count, size - count); if (G_UNLIKELY (nread < 0)) { GST_WARNING_OBJECT (object, "Unable to read from device: /dev/dvb/adapter%d/dvr%d (%d)", object->adapter_number, object->frontend_number, errno); gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element (GST_OBJECT (object), gst_structure_new_empty ("dvb-read-failure"))); } else count = count + nread; } } gst_buffer_unmap (buf, &map); gst_buffer_resize (buf, 0, count); *buffer = buf; return GST_FLOW_OK; stopped: { GST_DEBUG_OBJECT (object, "stop called"); gst_buffer_unmap (buf, &map); gst_buffer_unref (buf); return GST_FLOW_FLUSHING; } select_error: { GST_ELEMENT_ERROR (object, RESOURCE, READ, (NULL), ("select error %d: %s (%d)", ret_val, g_strerror (errno), errno)); gst_buffer_unmap (buf, &map); gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf) { GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src); GstBuffer *new_buf; GstFlowReturn res; gint new_buf_size; GstClock *clock; GstClockTime time = GST_CLOCK_TIME_NONE; GstClockTime base_time; if (G_UNLIKELY (!src->info.bmiHeader.biWidth || !src->info.bmiHeader.biHeight)) { GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL), ("format wasn't negotiated before create function")); return GST_FLOW_NOT_NEGOTIATED; } else if (G_UNLIKELY (src->rate_numerator == 0 && src->frames == 1)) { GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->frames); return GST_FLOW_UNEXPECTED; } new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) * (-src->info.bmiHeader.biHeight); GST_LOG_OBJECT (src, "creating buffer of %d bytes with %dx%d image for frame %d", new_buf_size, (gint) src->info.bmiHeader.biWidth, (gint) (-src->info.bmiHeader.biHeight), (gint) src->frames); res = gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (src), GST_BUFFER_OFFSET_NONE, new_buf_size, GST_PAD_CAPS (GST_BASE_SRC_PAD (push_src)), &new_buf); if (res != GST_FLOW_OK) { GST_DEBUG_OBJECT (src, "could not allocate buffer, reason %s", gst_flow_get_name (res)); return res; } clock = gst_element_get_clock (GST_ELEMENT (src)); if (clock) { /* Calculate sync time. */ GstClockTime frame_time = gst_util_uint64_scale_int (src->frames * GST_SECOND, src->rate_denominator, src->rate_numerator); time = gst_clock_get_time (clock); base_time = gst_element_get_base_time (GST_ELEMENT (src)); GST_BUFFER_TIMESTAMP (new_buf) = MAX (time - base_time, frame_time); } else { GST_BUFFER_TIMESTAMP (new_buf) = GST_CLOCK_TIME_NONE; } /* Do screen capture and put it into buffer... */ gst_gdiscreencapsrc_screen_capture (src, new_buf); if (src->rate_numerator) { GST_BUFFER_DURATION (new_buf) = gst_util_uint64_scale_int (GST_SECOND, src->rate_denominator, src->rate_numerator); if (clock) { GST_BUFFER_DURATION (new_buf) = MAX (GST_BUFFER_DURATION (new_buf), gst_clock_get_time (clock) - time); } } else { /* NONE means forever */ GST_BUFFER_DURATION (new_buf) = GST_CLOCK_TIME_NONE; } GST_BUFFER_OFFSET (new_buf) = src->frames; src->frames++; GST_BUFFER_OFFSET_END (new_buf) = src->frames; gst_object_unref (clock); *buf = new_buf; return GST_FLOW_OK; }
/* * Read a new buffer from src->reqoffset, takes care of events * and seeking and such. */ static GstFlowReturn gst_gnome_vfs_src_create (GstBaseSrc * basesrc, guint64 offset, guint size, GstBuffer ** buffer) { GnomeVFSResult res; GstBuffer *buf; GnomeVFSFileSize readbytes; guint8 *data; guint todo; GstGnomeVFSSrc *src; src = GST_GNOME_VFS_SRC (basesrc); GST_DEBUG ("now at %" G_GINT64_FORMAT ", reading from %" G_GUINT64_FORMAT ", size %u", src->curoffset, offset, size); /* seek if required */ if (G_UNLIKELY (src->curoffset != offset)) { GST_DEBUG ("need to seek"); if (src->seekable) { GST_DEBUG ("seeking to %" G_GUINT64_FORMAT, offset); res = gnome_vfs_seek (src->handle, GNOME_VFS_SEEK_START, offset); if (res != GNOME_VFS_OK) goto seek_failed; src->curoffset = offset; } else { goto cannot_seek; } } buf = gst_buffer_try_new_and_alloc (size); if (G_UNLIKELY (buf == NULL && size == 0)) { GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", size); return GST_FLOW_ERROR; } data = GST_BUFFER_DATA (buf); todo = size; while (todo > 0) { /* this can return less that we ask for */ res = gnome_vfs_read (src->handle, data, todo, &readbytes); if (G_UNLIKELY (res == GNOME_VFS_ERROR_EOF || (res == GNOME_VFS_OK && readbytes == 0))) goto eos; if (G_UNLIKELY (res != GNOME_VFS_OK)) goto read_failed; if (readbytes < todo) { data = &data[readbytes]; todo -= readbytes; } else { todo = 0; } GST_LOG (" got size %" G_GUINT64_FORMAT, readbytes); } GST_BUFFER_OFFSET (buf) = src->curoffset; src->curoffset += size; /* we're done, return the buffer */ *buffer = buf; return GST_FLOW_OK; seek_failed: { GST_ELEMENT_ERROR (src, RESOURCE, SEEK, (NULL), ("Failed to seek to requested position %" G_GINT64_FORMAT ": %s", offset, gnome_vfs_result_to_string (res))); return GST_FLOW_ERROR; } cannot_seek: { GST_ELEMENT_ERROR (src, RESOURCE, SEEK, (NULL), ("Requested seek from %" G_GINT64_FORMAT " to %" G_GINT64_FORMAT " on non-seekable stream", src->curoffset, offset)); return GST_FLOW_ERROR; } read_failed: { gst_buffer_unref (buf); GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Failed to read data: %s", gnome_vfs_result_to_string (res))); return GST_FLOW_ERROR; } eos: { gst_buffer_unref (buf); GST_DEBUG_OBJECT (src, "Reading data gave EOS"); return GST_FLOW_UNEXPECTED; } }
static GstFlowReturn gst_multi_file_src_create (GstPushSrc * src, GstBuffer ** buffer) { GstMultiFileSrc *multifilesrc; gsize size; gchar *data; gchar *filename; GstBuffer *buf; gboolean ret; GError *error = NULL; multifilesrc = GST_MULTI_FILE_SRC (src); if (multifilesrc->index < multifilesrc->start_index) { multifilesrc->index = multifilesrc->start_index; } filename = gst_multi_file_src_get_filename (multifilesrc); GST_DEBUG_OBJECT (multifilesrc, "reading from file \"%s\".", filename); ret = g_file_get_contents (filename, &data, &size, &error); if (!ret) { if (multifilesrc->successful_read) { /* If we've read at least one buffer successfully, not finding the * next file is EOS. */ g_free (filename); if (error != NULL) g_error_free (error); if (multifilesrc->loop) { error = NULL; multifilesrc->index = multifilesrc->start_index; filename = gst_multi_file_src_get_filename (multifilesrc); ret = g_file_get_contents (filename, &data, &size, &error); if (!ret) { g_free (filename); if (error != NULL) g_error_free (error); return GST_FLOW_UNEXPECTED; } } else { return GST_FLOW_UNEXPECTED; } } else { goto handle_error; } } multifilesrc->successful_read = TRUE; multifilesrc->index++; if (multifilesrc->stop_index != -1 && multifilesrc->index >= multifilesrc->stop_index) { multifilesrc->index = multifilesrc->start_index; } buf = gst_buffer_new (); GST_BUFFER_DATA (buf) = (unsigned char *) data; GST_BUFFER_MALLOCDATA (buf) = GST_BUFFER_DATA (buf); GST_BUFFER_SIZE (buf) = size; GST_BUFFER_OFFSET (buf) = multifilesrc->offset; GST_BUFFER_OFFSET_END (buf) = multifilesrc->offset + size; multifilesrc->offset += size; gst_buffer_set_caps (buf, multifilesrc->caps); GST_DEBUG_OBJECT (multifilesrc, "read file \"%s\".", filename); g_free (filename); *buffer = buf; return GST_FLOW_OK; handle_error: { if (error != NULL) { GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ, ("Error while reading from file \"%s\".", filename), ("%s", error->message)); g_error_free (error); } else { GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ, ("Error while reading from file \"%s\".", filename), ("%s", g_strerror (errno))); } g_free (filename); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_pngdec_caps_create_and_set (GstPngDec * pngdec) { GstFlowReturn ret = GST_FLOW_OK; GstCaps *caps = NULL, *res = NULL; GstPadTemplate *templ = NULL; gint bpc = 0, color_type; png_uint_32 width, height; g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR); /* Get bits per channel */ bpc = png_get_bit_depth (pngdec->png, pngdec->info); /* We don't handle 16 bits per color, strip down to 8 */ if (bpc == 16) { GST_LOG_OBJECT (pngdec, "this is a 16 bits per channel PNG image, strip down to 8 bits"); png_set_strip_16 (pngdec->png); } /* Get Color type */ color_type = png_get_color_type (pngdec->png, pngdec->info); #if 0 /* We used to have this HACK to reverse the outgoing bytes, but the problem * that originally required the hack seems to have been in ffmpegcolorspace's * RGBA descriptions. It doesn't seem needed now that's fixed, but might * still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */ if (color_type == PNG_COLOR_TYPE_RGB_ALPHA) png_set_bgr (pngdec->png); #endif /* Gray scale converted to RGB and upscaled to 8 bits */ if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) || (color_type == PNG_COLOR_TYPE_GRAY)) { GST_LOG_OBJECT (pngdec, "converting grayscale png to RGB"); png_set_gray_to_rgb (pngdec->png); if (bpc < 8) { /* Convert to 8 bits */ GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits"); #if PNG_LIBPNG_VER < 10400 png_set_gray_1_2_4_to_8 (pngdec->png); #else png_set_expand_gray_1_2_4_to_8 (pngdec->png); #endif } } /* Palette converted to RGB */ if (color_type == PNG_COLOR_TYPE_PALETTE) { GST_LOG_OBJECT (pngdec, "converting palette png to RGB"); png_set_palette_to_rgb (pngdec->png); } /* Update the info structure */ png_read_update_info (pngdec->png, pngdec->info); /* Get IHDR header again after transformation settings */ png_get_IHDR (pngdec->png, pngdec->info, &width, &height, &bpc, &pngdec->color_type, NULL, NULL, NULL); pngdec->width = width; pngdec->height = height; GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", pngdec->width, pngdec->height); switch (pngdec->color_type) { case PNG_COLOR_TYPE_RGB: GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits"); pngdec->bpp = 24; break; case PNG_COLOR_TYPE_RGB_ALPHA: GST_LOG_OBJECT (pngdec, "we have an alpha channel, depth is 32 bits"); pngdec->bpp = 32; break; default: GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL), ("pngdec does not support this color type")); ret = GST_FLOW_NOT_SUPPORTED; goto beach; } caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, pngdec->width, "height", G_TYPE_INT, pngdec->height, "bpp", G_TYPE_INT, pngdec->bpp, "framerate", GST_TYPE_FRACTION, pngdec->fps_n, pngdec->fps_d, NULL); templ = gst_static_pad_template_get (&gst_pngdec_src_pad_template); res = gst_caps_intersect (caps, gst_pad_template_get_caps (templ)); gst_caps_unref (caps); gst_object_unref (templ); if (!gst_pad_set_caps (pngdec->srcpad, res)) ret = GST_FLOW_NOT_NEGOTIATED; GST_DEBUG_OBJECT (pngdec, "our caps %" GST_PTR_FORMAT, res); gst_caps_unref (res); /* Push a newsegment event */ if (pngdec->need_newsegment) { gst_pad_push_event (pngdec->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); pngdec->need_newsegment = FALSE; } beach: return ret; }
static GstFlowReturn gst_ffmpegvidenc_flush_buffers (GstFFMpegVidEnc * ffmpegenc, gboolean send) { GstVideoCodecFrame *frame; GstFlowReturn flow_ret = GST_FLOW_OK; GstBuffer *outbuf; gint ret; AVPacket *pkt; int have_data = 0; GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send); /* no need to empty codec if there is none */ if (!ffmpegenc->opened) goto done; while ((frame = gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (ffmpegenc)))) { pkt = g_slice_new0 (AVPacket); have_data = 0; ret = avcodec_encode_video2 (ffmpegenc->context, pkt, NULL, &have_data); if (ret < 0) { /* there should be something, notify and give up */ #ifndef GST_DISABLE_GST_DEBUG GstFFMpegVidEncClass *oclass = (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); GST_WARNING_OBJECT (ffmpegenc, "avenc_%s: failed to flush buffer", oclass->in_plugin->name); #endif /* GST_DISABLE_GST_DEBUG */ g_slice_free (AVPacket, pkt); gst_video_codec_frame_unref (frame); break; } /* save stats info if there is some as well as a stats file */ if (ffmpegenc->file && ffmpegenc->context->stats_out) if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0) GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE, (("Could not write to file \"%s\"."), ffmpegenc->filename), GST_ERROR_SYSTEM); if (send && have_data) { outbuf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data, pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket); frame->output_buffer = outbuf; if (pkt->flags & AV_PKT_FLAG_KEY) GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); else GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame); flow_ret = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame); } else { /* no frame attached, so will be skipped and removed from frame list */ gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame); } } done: return flow_ret; }
static void gst_pngdec_task (GstPad * pad) { GstPngDec *pngdec; GstBuffer *buffer = NULL; size_t buffer_size = 0; gint i = 0; png_bytep *rows, inp; png_uint_32 rowbytes; GstFlowReturn ret = GST_FLOW_OK; pngdec = GST_PNGDEC (GST_OBJECT_PARENT (pad)); GST_LOG_OBJECT (pngdec, "read frame"); /* Let libpng come back here on error */ if (setjmp (png_jmpbuf (pngdec->png))) { ret = GST_FLOW_ERROR; goto pause; } /* Set reading callback */ png_set_read_fn (pngdec->png, pngdec, user_read_data); /* Read info */ png_read_info (pngdec->png, pngdec->info); /* Generate the caps and configure */ ret = gst_pngdec_caps_create_and_set (pngdec); if (ret != GST_FLOW_OK) { goto pause; } /* Allocate output buffer */ rowbytes = png_get_rowbytes (pngdec->png, pngdec->info); if (rowbytes > (G_MAXUINT32 - 3) || pngdec->height > G_MAXUINT32 / rowbytes) { ret = GST_FLOW_ERROR; goto pause; } rowbytes = GST_ROUND_UP_4 (rowbytes); buffer_size = pngdec->height * rowbytes; ret = gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE, buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer); if (ret != GST_FLOW_OK) goto pause; rows = (png_bytep *) g_malloc (sizeof (png_bytep) * pngdec->height); inp = GST_BUFFER_DATA (buffer); for (i = 0; i < pngdec->height; i++) { rows[i] = inp; inp += rowbytes; } /* Read the actual picture */ png_read_image (pngdec->png, rows); g_free (rows); /* Push the raw RGB frame */ ret = gst_pad_push (pngdec->srcpad, buffer); if (ret != GST_FLOW_OK) goto pause; /* And we are done */ gst_pad_pause_task (pngdec->sinkpad); gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ()); return; pause: { GST_INFO_OBJECT (pngdec, "pausing task, reason %s", gst_flow_get_name (ret)); gst_pad_pause_task (pngdec->sinkpad); if (ret == GST_FLOW_UNEXPECTED) { gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ()); } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) { GST_ELEMENT_ERROR (pngdec, STREAM, FAILED, (_("Internal data stream error.")), ("stream stopped, reason %s", gst_flow_get_name (ret))); gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ()); } } }
/* Process pending events. Call with ->lock held */ static void gst_sdlv_process_events (GstSDLVideoSink * sdlvideosink) { SDL_Event event; int numevents; char *keysym = NULL; do { SDL_PumpEvents (); numevents = SDL_PeepEvents (&event, 1, SDL_GETEVENT, SDL_KEYDOWNMASK | SDL_KEYUPMASK | SDL_MOUSEMOTIONMASK | SDL_MOUSEBUTTONDOWNMASK | SDL_MOUSEBUTTONUPMASK | SDL_QUITMASK | SDL_VIDEORESIZEMASK); if (numevents > 0 && (event.type == SDL_KEYUP || event.type == SDL_KEYDOWN)) { keysym = SDL_GetKeyName (event.key.keysym.sym); } if (numevents > 0) { g_mutex_unlock (sdlvideosink->lock); switch (event.type) { case SDL_MOUSEMOTION: gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink), "mouse-move", 0, event.motion.x, event.motion.y); break; case SDL_MOUSEBUTTONDOWN: gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink), "mouse-button-press", event.button.button, event.button.x, event.button.y); break; case SDL_MOUSEBUTTONUP: gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink), "mouse-button-release", event.button.button, event.button.x, event.button.y); break; case SDL_KEYUP: GST_DEBUG ("key press event %s !", SDL_GetKeyName (event.key.keysym.sym)); gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink), "key-release", keysym); break; case SDL_KEYDOWN: if (SDLK_ESCAPE != event.key.keysym.sym) { GST_DEBUG ("key press event %s !", SDL_GetKeyName (event.key.keysym.sym)); gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink), "key-press", keysym); break; } else { /* fall through */ } case SDL_QUIT: sdlvideosink->running = FALSE; GST_ELEMENT_ERROR (sdlvideosink, RESOURCE, OPEN_WRITE, ("Video output device is gone."), ("We were running fullscreen and user " "pressed the ESC key, stopping playback.")); break; case SDL_VIDEORESIZE: /* create a SDL window of the size requested by the user */ g_mutex_lock (sdlvideosink->lock); GST_VIDEO_SINK_WIDTH (sdlvideosink) = event.resize.w; GST_VIDEO_SINK_HEIGHT (sdlvideosink) = event.resize.h; gst_sdlvideosink_create (sdlvideosink); g_mutex_unlock (sdlvideosink->lock); break; } g_mutex_lock (sdlvideosink->lock); } } while (numevents > 0); }
static gboolean gst_ffmpegvidenc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state) { GstCaps *other_caps; GstCaps *allowed_caps; GstCaps *icaps; GstVideoCodecState *output_format; enum PixelFormat pix_fmt; GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder; GstFFMpegVidEncClass *oclass = (GstFFMpegVidEncClass *) G_OBJECT_GET_CLASS (ffmpegenc); /* close old session */ if (ffmpegenc->opened) { gst_ffmpeg_avcodec_close (ffmpegenc->context); ffmpegenc->opened = FALSE; } /* if we set it in _getcaps we should set it also in _link */ ffmpegenc->context->strict_std_compliance = -1; /* user defined properties */ ffmpegenc->context->bit_rate = ffmpegenc->bitrate; ffmpegenc->context->bit_rate_tolerance = ffmpegenc->bitrate; ffmpegenc->context->gop_size = ffmpegenc->gop_size; ffmpegenc->context->me_method = ffmpegenc->me_method; GST_DEBUG_OBJECT (ffmpegenc, "Setting avcontext to bitrate %d, gop_size %d", ffmpegenc->bitrate, ffmpegenc->gop_size); /* RTP payload used for GOB production (for Asterisk) */ if (ffmpegenc->rtp_payload_size) { ffmpegenc->context->rtp_payload_size = ffmpegenc->rtp_payload_size; } /* additional avcodec settings */ /* first fill in the majority by copying over */ gst_ffmpeg_cfg_fill_context (ffmpegenc, ffmpegenc->context); /* then handle some special cases */ ffmpegenc->context->lmin = (ffmpegenc->lmin * FF_QP2LAMBDA + 0.5); ffmpegenc->context->lmax = (ffmpegenc->lmax * FF_QP2LAMBDA + 0.5); if (ffmpegenc->interlaced) { ffmpegenc->context->flags |= CODEC_FLAG_INTERLACED_DCT | CODEC_FLAG_INTERLACED_ME; ffmpegenc->picture->interlaced_frame = TRUE; /* if this is not the case, a filter element should be used to swap fields */ ffmpegenc->picture->top_field_first = TRUE; } /* some other defaults */ ffmpegenc->context->rc_strategy = 2; ffmpegenc->context->b_frame_strategy = 0; ffmpegenc->context->coder_type = 0; ffmpegenc->context->context_model = 0; ffmpegenc->context->scenechange_threshold = 0; ffmpegenc->context->inter_threshold = 0; /* and last but not least the pass; CBR, 2-pass, etc */ ffmpegenc->context->flags |= ffmpegenc->pass; switch (ffmpegenc->pass) { /* some additional action depends on type of pass */ case CODEC_FLAG_QSCALE: ffmpegenc->context->global_quality = ffmpegenc->picture->quality = FF_QP2LAMBDA * ffmpegenc->quantizer; break; case CODEC_FLAG_PASS1: /* need to prepare a stats file */ /* we don't close when changing caps, fingers crossed */ if (!ffmpegenc->file) ffmpegenc->file = g_fopen (ffmpegenc->filename, "w"); if (!ffmpegenc->file) goto open_file_err; break; case CODEC_FLAG_PASS2: { /* need to read the whole stats file ! */ gsize size; if (!g_file_get_contents (ffmpegenc->filename, &ffmpegenc->context->stats_in, &size, NULL)) goto file_read_err; break; } default: break; } GST_DEBUG_OBJECT (ffmpegenc, "Extracting common video information"); /* fetch pix_fmt, fps, par, width, height... */ gst_ffmpeg_videoinfo_to_context (&state->info, ffmpegenc->context); if ((oclass->in_plugin->id == AV_CODEC_ID_MPEG4) && (ffmpegenc->context->time_base.den > 65535)) { /* MPEG4 Standards do not support time_base denominator greater than * (1<<16) - 1 . We therefore scale them down. * Agreed, it will not be the exact framerate... but the difference * shouldn't be that noticeable */ ffmpegenc->context->time_base.num = (gint) gst_util_uint64_scale_int (ffmpegenc->context->time_base.num, 65535, ffmpegenc->context->time_base.den); ffmpegenc->context->time_base.den = 65535; GST_LOG_OBJECT (ffmpegenc, "MPEG4 : scaled down framerate to %d / %d", ffmpegenc->context->time_base.den, ffmpegenc->context->time_base.num); } pix_fmt = ffmpegenc->context->pix_fmt; /* max-key-interval may need the framerate set above */ if (ffmpegenc->max_key_interval) { AVCodecContext *ctx; /* override gop-size */ ctx = ffmpegenc->context; ctx->gop_size = (ffmpegenc->max_key_interval < 0) ? (-ffmpegenc->max_key_interval * (ctx->time_base.den * ctx->ticks_per_frame / ctx->time_base.num)) : ffmpegenc->max_key_interval; } /* open codec */ if (gst_ffmpeg_avcodec_open (ffmpegenc->context, oclass->in_plugin) < 0) goto open_codec_fail; /* second pass stats buffer no longer needed */ if (ffmpegenc->context->stats_in) g_free (ffmpegenc->context->stats_in); /* is the colourspace correct? */ if (pix_fmt != ffmpegenc->context->pix_fmt) goto pix_fmt_err; /* we may have failed mapping caps to a pixfmt, * and quite some codecs do not make up their own mind about that * in any case, _NONE can never work out later on */ if (pix_fmt == PIX_FMT_NONE) goto bad_input_fmt; /* some codecs support more than one format, first auto-choose one */ GST_DEBUG_OBJECT (ffmpegenc, "picking an output format ..."); allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); if (!allowed_caps) { GST_DEBUG_OBJECT (ffmpegenc, "... but no peer, using template caps"); /* we need to copy because get_allowed_caps returns a ref, and * get_pad_template_caps doesn't */ allowed_caps = gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); } GST_DEBUG_OBJECT (ffmpegenc, "chose caps %" GST_PTR_FORMAT, allowed_caps); gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id, oclass->in_plugin->type, allowed_caps, ffmpegenc->context); /* try to set this caps on the other side */ other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id, ffmpegenc->context, TRUE); if (!other_caps) { gst_caps_unref (allowed_caps); goto unsupported_codec; } icaps = gst_caps_intersect (allowed_caps, other_caps); gst_caps_unref (allowed_caps); gst_caps_unref (other_caps); if (gst_caps_is_empty (icaps)) { gst_caps_unref (icaps); return FALSE; } icaps = gst_caps_truncate (icaps); /* Store input state and set output state */ if (ffmpegenc->input_state) gst_video_codec_state_unref (ffmpegenc->input_state); ffmpegenc->input_state = gst_video_codec_state_ref (state); output_format = gst_video_encoder_set_output_state (encoder, icaps, state); gst_video_codec_state_unref (output_format); /* success! */ ffmpegenc->opened = TRUE; return TRUE; /* ERRORS */ open_file_err: { GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, OPEN_WRITE, (("Could not open file \"%s\" for writing."), ffmpegenc->filename), GST_ERROR_SYSTEM); return FALSE; } file_read_err: { GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, READ, (("Could not get contents of file \"%s\"."), ffmpegenc->filename), GST_ERROR_SYSTEM); return FALSE; } open_codec_fail: { if (ffmpegenc->context->priv_data) gst_ffmpeg_avcodec_close (ffmpegenc->context); if (ffmpegenc->context->stats_in) g_free (ffmpegenc->context->stats_in); GST_DEBUG_OBJECT (ffmpegenc, "avenc_%s: Failed to open libav codec", oclass->in_plugin->name); return FALSE; } pix_fmt_err: { gst_ffmpeg_avcodec_close (ffmpegenc->context); GST_DEBUG_OBJECT (ffmpegenc, "avenc_%s: AV wants different colourspace (%d given, %d wanted)", oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt); return FALSE; } bad_input_fmt: { GST_DEBUG_OBJECT (ffmpegenc, "avenc_%s: Failed to determine input format", oclass->in_plugin->name); return FALSE; } unsupported_codec: { gst_ffmpeg_avcodec_close (ffmpegenc->context); GST_DEBUG ("Unsupported codec - no caps found"); return FALSE; } }
/* Must be called with the sdl lock held */ static gboolean gst_sdlvideosink_create (GstSDLVideoSink * sdlvideosink) { if (GST_VIDEO_SINK_HEIGHT (sdlvideosink) <= 0) GST_VIDEO_SINK_HEIGHT (sdlvideosink) = sdlvideosink->height; if (GST_VIDEO_SINK_WIDTH (sdlvideosink) <= 0) GST_VIDEO_SINK_WIDTH (sdlvideosink) = sdlvideosink->width; gst_sdlvideosink_destroy (sdlvideosink); if (sdlvideosink->is_xwindows && !sdlvideosink->xwindow_id) { g_mutex_unlock (sdlvideosink->lock); gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (sdlvideosink)); g_mutex_lock (sdlvideosink->lock); } /* create a SDL window of the size requested by the user */ if (sdlvideosink->full_screen) { sdlvideosink->screen = SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_SWSURFACE | SDL_FULLSCREEN); } else { sdlvideosink->screen = SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_HWSURFACE | SDL_RESIZABLE); } if (sdlvideosink->screen == NULL) goto no_screen; /* create a new YUV overlay */ sdlvideosink->overlay = SDL_CreateYUVOverlay (sdlvideosink->width, sdlvideosink->height, sdlvideosink->format, sdlvideosink->screen); if (sdlvideosink->overlay == NULL) goto no_overlay; GST_DEBUG ("Using a %dx%d %dbpp SDL screen with a %dx%d \'%" GST_FOURCC_FORMAT "\' YUV overlay", GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), sdlvideosink->screen->format->BitsPerPixel, sdlvideosink->width, sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format)); sdlvideosink->rect.x = 0; sdlvideosink->rect.y = 0; sdlvideosink->rect.w = GST_VIDEO_SINK_WIDTH (sdlvideosink); sdlvideosink->rect.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink); /*SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect)); */ GST_DEBUG ("sdlvideosink: setting %08x (%" GST_FOURCC_FORMAT ")", sdlvideosink->format, GST_FOURCC_ARGS (sdlvideosink->format)); return TRUE; /* ERRORS */ no_screen: { GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL), ("SDL: Couldn't set %dx%d: %s", GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), SDL_GetError ())); return FALSE; } no_overlay: { GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL), ("SDL: Couldn't create SDL YUV overlay (%dx%d \'%" GST_FOURCC_FORMAT "\'): %s", sdlvideosink->width, sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format), SDL_GetError ())); return FALSE; } }
static GstFlowReturn gst_ffmpegvidenc_flush_buffers (GstFFMpegVidEnc * ffmpegenc, gboolean send) { GstVideoCodecFrame *frame; GstFlowReturn flow_ret = GST_FLOW_OK; GstBuffer *outbuf; gint ret_size; GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send); /* no need to empty codec if there is none */ if (!ffmpegenc->opened) goto done; while ((frame = gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (ffmpegenc)))) { ffmpegenc_setup_working_buf (ffmpegenc); ret_size = avcodec_encode_video (ffmpegenc->context, ffmpegenc->working_buf, ffmpegenc->working_buf_size, NULL); if (ret_size < 0) { /* there should be something, notify and give up */ #ifndef GST_DISABLE_GST_DEBUG GstFFMpegVidEncClass *oclass = (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); GST_WARNING_OBJECT (ffmpegenc, "avenc_%s: failed to flush buffer", oclass->in_plugin->name); #endif /* GST_DISABLE_GST_DEBUG */ gst_video_codec_frame_unref (frame); break; } /* save stats info if there is some as well as a stats file */ if (ffmpegenc->file && ffmpegenc->context->stats_out) if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0) GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE, (("Could not write to file \"%s\"."), ffmpegenc->filename), GST_ERROR_SYSTEM); if (send) { if (gst_video_encoder_allocate_output_frame (GST_VIDEO_ENCODER (ffmpegenc), frame, ret_size) != GST_FLOW_OK) { #ifndef GST_DISABLE_GST_DEBUG GstFFMpegVidEncClass *oclass = (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc)); GST_WARNING_OBJECT (ffmpegenc, "avenc_%s: failed to allocate buffer", oclass->in_plugin->name); #endif /* GST_DISABLE_GST_DEBUG */ gst_video_codec_frame_unref (frame); break; } outbuf = frame->output_buffer; gst_buffer_fill (outbuf, 0, ffmpegenc->working_buf, ret_size); if (ffmpegenc->context->coded_frame->key_frame) GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); flow_ret = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame); } else { gst_video_codec_frame_unref (frame); } } done: return flow_ret; }
static GstFlowReturn gst_sdlvideosink_show_frame (GstBaseSink * bsink, GstBuffer * buf) { GstSDLVideoSink *sdlvideosink; sdlvideosink = GST_SDLVIDEOSINK (bsink); g_mutex_lock (sdlvideosink->lock); if (!sdlvideosink->init || !sdlvideosink->overlay || !sdlvideosink->overlay->pixels) goto not_init; /* if (GST_BUFFER_DATA (buf) != sdlvideosink->overlay->pixels[0]) */ if (TRUE) { guint8 *out; gint l; if (!gst_sdlvideosink_lock (sdlvideosink)) goto cannot_lock; /* buf->yuv - FIXME: bufferpool! */ if (sdlvideosink->format == SDL_YV12_OVERLAY) { guint8 *y, *u, *v; switch (sdlvideosink->fourcc) { case GST_MAKE_FOURCC ('I', '4', '2', '0'): y = GST_BUFFER_DATA (buf); /* I420 is YV12 with switched colour planes and different offsets */ v = y + I420_U_OFFSET (sdlvideosink->width, sdlvideosink->height); u = y + I420_V_OFFSET (sdlvideosink->width, sdlvideosink->height); break; case GST_MAKE_FOURCC ('Y', 'V', '1', '2'): y = GST_BUFFER_DATA (buf); u = y + I420_U_OFFSET (sdlvideosink->width, sdlvideosink->height); v = y + I420_V_OFFSET (sdlvideosink->width, sdlvideosink->height); break; default: gst_sdlvideosink_unlock (sdlvideosink); g_mutex_unlock (sdlvideosink->lock); g_return_val_if_reached (GST_FLOW_ERROR); } /* Y Plane */ out = sdlvideosink->overlay->pixels[0]; for (l = 0; l < sdlvideosink->height; l++) { memcpy (out, y, I420_Y_ROWSTRIDE (sdlvideosink->width)); out += sdlvideosink->overlay->pitches[0]; y += I420_Y_ROWSTRIDE (sdlvideosink->width); } /* U plane */ out = sdlvideosink->overlay->pixels[1]; for (l = 0; l < (sdlvideosink->height / 2); l++) { memcpy (out, u, I420_U_ROWSTRIDE (sdlvideosink->width)); out += sdlvideosink->overlay->pitches[1]; u += I420_U_ROWSTRIDE (sdlvideosink->width); } /* V plane */ out = sdlvideosink->overlay->pixels[2]; for (l = 0; l < (sdlvideosink->height / 2); l++) { memcpy (out, v, I420_V_ROWSTRIDE (sdlvideosink->width)); out += sdlvideosink->overlay->pitches[2]; v += I420_V_ROWSTRIDE (sdlvideosink->width); } } else { guint8 *in = GST_BUFFER_DATA (buf); gint in_stride = sdlvideosink->width * 2; out = sdlvideosink->overlay->pixels[0]; for (l = 0; l < sdlvideosink->height; l++) { memcpy (out, in, in_stride); out += sdlvideosink->overlay->pitches[0]; in += in_stride; } } gst_sdlvideosink_unlock (sdlvideosink); } /* Show, baby, show! */ SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect)); /* Handle any resize */ gst_sdlv_process_events (sdlvideosink); g_mutex_unlock (sdlvideosink->lock); return GST_FLOW_OK; /* ERRORS */ not_init: { GST_ELEMENT_ERROR (sdlvideosink, CORE, NEGOTIATION, (NULL), ("not negotiated.")); g_mutex_unlock (sdlvideosink->lock); return GST_FLOW_NOT_NEGOTIATED; } cannot_lock: { /* lock function posted detailed message */ g_mutex_unlock (sdlvideosink->lock); return GST_FLOW_ERROR; } }
GstFlowReturn gst_ebml_read_header (GstEbmlRead * ebml, gchar ** doctype, guint * version) { /* this function is the first to be called */ guint32 id; guint level_up; GstFlowReturn ret; /* default init */ if (doctype) *doctype = NULL; if (version) *version = 1; ret = gst_ebml_peek_id (ebml, &level_up, &id); if (ret != GST_FLOW_OK) return ret; GST_DEBUG_OBJECT (ebml, "id: %08x", GST_READ_UINT32_BE (&id)); if (level_up != 0 || id != GST_EBML_ID_HEADER) { GST_ELEMENT_ERROR (ebml, STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } ret = gst_ebml_read_master (ebml, &id); if (ret != GST_FLOW_OK) return ret; while (TRUE) { ret = gst_ebml_peek_id (ebml, &level_up, &id); if (ret != GST_FLOW_OK) return ret; /* end-of-header */ if (level_up) break; switch (id) { /* is our read version uptodate? */ case GST_EBML_ID_EBMLREADVERSION:{ guint64 num; ret = gst_ebml_read_uint (ebml, &id, &num); if (ret != GST_FLOW_OK) return ret; g_assert (id == GST_EBML_ID_EBMLREADVERSION); if (num != GST_EBML_VERSION) { GST_ELEMENT_ERROR (ebml, STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } GST_DEBUG_OBJECT (ebml, "EbmlReadVersion: %" G_GUINT64_FORMAT, num); break; } /* we only handle 8 byte lengths at max */ case GST_EBML_ID_EBMLMAXSIZELENGTH:{ guint64 num; ret = gst_ebml_read_uint (ebml, &id, &num); if (ret != GST_FLOW_OK) return ret; g_assert (id == GST_EBML_ID_EBMLMAXSIZELENGTH); if (num > sizeof (guint64)) { GST_ELEMENT_ERROR (ebml, STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } GST_DEBUG_OBJECT (ebml, "EbmlMaxSizeLength: %" G_GUINT64_FORMAT, num); break; } /* we handle 4 byte IDs at max */ case GST_EBML_ID_EBMLMAXIDLENGTH:{ guint64 num; ret = gst_ebml_read_uint (ebml, &id, &num); if (ret != GST_FLOW_OK) return ret; g_assert (id == GST_EBML_ID_EBMLMAXIDLENGTH); if (num > sizeof (guint32)) { GST_ELEMENT_ERROR (ebml, STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } GST_DEBUG_OBJECT (ebml, "EbmlMaxIdLength: %" G_GUINT64_FORMAT, num); break; } case GST_EBML_ID_DOCTYPE:{ gchar *text; ret = gst_ebml_read_ascii (ebml, &id, &text); if (ret != GST_FLOW_OK) return ret; g_assert (id == GST_EBML_ID_DOCTYPE); GST_DEBUG_OBJECT (ebml, "EbmlDocType: %s", GST_STR_NULL (text)); if (doctype) { g_free (*doctype); *doctype = text; } else g_free (text); break; } case GST_EBML_ID_DOCTYPEREADVERSION:{ guint64 num; ret = gst_ebml_read_uint (ebml, &id, &num); if (ret != GST_FLOW_OK) return ret; g_assert (id == GST_EBML_ID_DOCTYPEREADVERSION); if (version) *version = num; GST_DEBUG_OBJECT (ebml, "EbmlReadVersion: %" G_GUINT64_FORMAT, num); break; } default: GST_WARNING_OBJECT (ebml, "Unknown data type 0x%x in EBML header (ignored)", id); /* pass-through */ /* we ignore these two, as they don't tell us anything we care about */ case GST_EBML_ID_EBMLVERSION: case GST_EBML_ID_DOCTYPEVERSION: ret = gst_ebml_read_skip (ebml); if (ret != GST_FLOW_OK) return ret; break; } } return GST_FLOW_OK; }
static GstFlowReturn gst_amrnbenc_chain (GstPad * pad, GstBuffer * buffer) { GstAmrnbEnc *amrnbenc; GstFlowReturn ret; amrnbenc = GST_AMRNBENC (GST_PAD_PARENT (pad)); g_return_val_if_fail (amrnbenc->handle, GST_FLOW_WRONG_STATE); if (amrnbenc->rate == 0 || amrnbenc->channels == 0) goto not_negotiated; /* discontinuity clears adapter, FIXME, maybe we can set some * encoder flag to mask the discont. */ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (amrnbenc->adapter); amrnbenc->ts = 0; } /* take latest timestamp, FIXME timestamp is the one of the * first buffer in the adapter. */ if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) amrnbenc->ts = GST_BUFFER_TIMESTAMP (buffer); ret = GST_FLOW_OK; gst_adapter_push (amrnbenc->adapter, buffer); /* Collect samples until we have enough for an output frame */ while (gst_adapter_available (amrnbenc->adapter) >= 320) { GstBuffer *out; guint8 *data; gint outsize; /* get output, max size is 32 */ out = gst_buffer_new_and_alloc (32); GST_BUFFER_DURATION (out) = amrnbenc->duration; GST_BUFFER_TIMESTAMP (out) = amrnbenc->ts; if (amrnbenc->ts != -1) amrnbenc->ts += amrnbenc->duration; gst_buffer_set_caps (out, GST_PAD_CAPS (amrnbenc->srcpad)); /* The AMR encoder actually writes into the source data buffers it gets */ data = gst_adapter_take (amrnbenc->adapter, 320); /* encode */ outsize = Encoder_Interface_Encode (amrnbenc->handle, amrnbenc->bandmode, (short *) data, (guint8 *) GST_BUFFER_DATA (out), 0); g_free (data); GST_BUFFER_SIZE (out) = outsize; /* play */ if ((ret = gst_pad_push (amrnbenc->srcpad, out)) != GST_FLOW_OK) break; } return ret; /* ERRORS */ not_negotiated: { GST_ELEMENT_ERROR (amrnbenc, STREAM, TYPE_NOT_FOUND, (NULL), ("unknown type")); return GST_FLOW_NOT_NEGOTIATED; } }
static GstFlowReturn gst_rtp_celt_pay_handle_buffer (GstRTPBasePayload * basepayload, GstBuffer * buffer) { GstFlowReturn ret; GstRtpCELTPay *rtpceltpay; gsize payload_len; GstMapInfo map; GstClockTime duration, packet_dur; guint i, ssize, packet_len; rtpceltpay = GST_RTP_CELT_PAY (basepayload); ret = GST_FLOW_OK; gst_buffer_map (buffer, &map, GST_MAP_READ); switch (rtpceltpay->packet) { case 0: /* ident packet. We need to parse the headers to construct the RTP * properties. */ if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, map.data, map.size)) goto parse_error; goto cleanup; case 1: /* comment packet, we ignore it */ goto cleanup; default: /* other packets go in the payload */ break; } gst_buffer_unmap (buffer, &map); duration = GST_BUFFER_DURATION (buffer); GST_LOG_OBJECT (rtpceltpay, "got buffer of duration %" GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT, GST_TIME_ARGS (duration), map.size); /* calculate the size of the size field and the payload */ ssize = 1; for (i = map.size; i > 0xff; i -= 0xff) ssize++; GST_DEBUG_OBJECT (rtpceltpay, "bytes for size %u", ssize); /* calculate what the new size and duration would be of the packet */ payload_len = ssize + map.size + rtpceltpay->bytes + rtpceltpay->sbytes; if (rtpceltpay->qduration != -1 && duration != -1) packet_dur = rtpceltpay->qduration + duration; else packet_dur = 0; packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0); if (gst_rtp_base_payload_is_filled (basepayload, packet_len, packet_dur)) { /* size or duration would overflow the packet, flush the queued data */ ret = gst_rtp_celt_pay_flush_queued (rtpceltpay); } /* queue the packet */ gst_rtp_celt_pay_add_queued (rtpceltpay, buffer, ssize, map.size, duration); done: rtpceltpay->packet++; return ret; /* ERRORS */ cleanup: { gst_buffer_unmap (buffer, &map); goto done; } parse_error: { GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL), ("Error parsing first identification packet.")); gst_buffer_unmap (buffer, &map); return GST_FLOW_ERROR; } }
static void gst_rnd_buffer_size_loop (GstRndBufferSize * self) { GstBuffer *buf = NULL; GstFlowReturn ret; guint num_bytes; if (G_UNLIKELY (self->min > self->max)) goto bogus_minmax; if (G_UNLIKELY (self->min != self->max)) { num_bytes = g_rand_int_range (self->rand, self->min, self->max); } else { num_bytes = self->min; } GST_LOG_OBJECT (self, "pulling %u bytes at offset %" G_GUINT64_FORMAT, num_bytes, self->offset); ret = gst_pad_pull_range (self->sinkpad, self->offset, num_bytes, &buf); if (ret != GST_FLOW_OK) goto pull_failed; if (GST_BUFFER_SIZE (buf) < num_bytes) { GST_WARNING_OBJECT (self, "short buffer: %u bytes", GST_BUFFER_SIZE (buf)); } self->offset += GST_BUFFER_SIZE (buf); ret = gst_pad_push (self->srcpad, buf); if (ret != GST_FLOW_OK) goto push_failed; return; pause_task: { GST_DEBUG_OBJECT (self, "pausing task"); gst_pad_pause_task (self->sinkpad); return; } pull_failed: { if (ret == GST_FLOW_UNEXPECTED) { GST_DEBUG_OBJECT (self, "eos"); gst_pad_push_event (self->srcpad, gst_event_new_eos ()); } else { GST_WARNING_OBJECT (self, "pull_range flow: %s", gst_flow_get_name (ret)); } goto pause_task; } push_failed: { GST_DEBUG_OBJECT (self, "push flow: %s", gst_flow_get_name (ret)); if (ret == GST_FLOW_UNEXPECTED) { GST_DEBUG_OBJECT (self, "eos"); gst_pad_push_event (self->srcpad, gst_event_new_eos ()); } else if (GST_FLOW_IS_FATAL (ret) || ret == GST_FLOW_NOT_LINKED) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("streaming stopped, reason: %s", gst_flow_get_name (ret))); } goto pause_task; } bogus_minmax: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, ("The minimum buffer size is smaller than the maximum buffer size."), ("buffer sizes: max=%ld, min=%ld", self->min, self->max)); goto pause_task; } }
static gboolean gst_divxenc_setup (GstDivxEnc * divxenc) { void *handle = NULL; SETTINGS output; DivXBitmapInfoHeader input; int ret; /* set it up */ memset (&input, 0, sizeof (DivXBitmapInfoHeader)); input.biSize = sizeof (DivXBitmapInfoHeader); input.biWidth = divxenc->width; input.biHeight = divxenc->height; input.biBitCount = divxenc->bitcnt; input.biCompression = divxenc->csp; memset (&output, 0, sizeof (SETTINGS)); output.vbr_mode = RCMODE_VBV_1PASS; output.bitrate = divxenc->bitrate; output.quantizer = 0; output.use_bidirect = 1; output.input_clock = 0; output.input_frame_period = 1000000; output.internal_timescale = (divxenc->fps_n / divxenc->fps_d) * 1000000; /* FIX? */ output.max_key_interval = (divxenc->max_key_interval == -1) ? 150 : divxenc->max_key_interval; output.key_frame_threshold = 50; output.vbv_bitrate = 0; output.vbv_size = 0; output.vbv_occupancy = 0; output.complexity_modulation = 0; output.deinterlace = 0; output.quality = divxenc->quality; output.data_partitioning = 0; output.quarter_pel = 1; output.use_gmc = 1; output.psychovisual = 0; output.pv_strength_frame = 0; output.pv_strength_MB = 0; output.interlace_mode = 0; output.enable_crop = 0; output.enable_resize = 0; output.temporal_enable = 1; output.spatial_passes = 3; output.spatial_level = 1.0; output.temporal_level = 1.0; if ((ret = encore (&handle, ENC_OPT_INIT, &input, &output))) { GST_ELEMENT_ERROR (divxenc, LIBRARY, SETTINGS, (NULL), ("Error setting up divx encoder: %s (%d)", gst_divxenc_error (ret), ret)); return FALSE; } divxenc->handle = handle; /* set buffer size to theoretical limit (see docs on divx.com) */ divxenc->buffer_size = 6 * divxenc->width * divxenc->height; return TRUE; }
static GstFlowReturn daala_handle_data_packet (GstDaalaDec * dec, ogg_packet * packet, GstVideoCodecFrame * frame) { /* normal data packet */ od_img img; gboolean keyframe; GstFlowReturn result; if (G_UNLIKELY (!dec->have_header)) goto not_initialized; /* the second most significant bit of the first data byte is cleared * for keyframes. We can only check it if it's not a zero-length packet. */ keyframe = packet->bytes && ((packet->packet[0] & 0x40)); if (G_UNLIKELY (keyframe)) { GST_DEBUG_OBJECT (dec, "we have a keyframe"); dec->need_keyframe = FALSE; } else if (G_UNLIKELY (dec->need_keyframe)) { goto dropping; } GST_DEBUG_OBJECT (dec, "parsing data packet"); /* this does the decoding */ if (G_UNLIKELY (daala_decode_packet_in (dec->decoder, &img, packet) < 0)) goto decode_error; if (frame && (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (dec), frame) < 0)) goto dropping_qos; if (G_UNLIKELY ((img.width != dec->info.pic_width || img.height != dec->info.pic_height))) goto wrong_dimensions; result = daala_handle_image (dec, &img, frame); return result; /* ERRORS */ not_initialized: { GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, (NULL), ("no header sent yet")); return GST_FLOW_ERROR; } dropping: { GST_WARNING_OBJECT (dec, "dropping frame because we need a keyframe"); return GST_CUSTOM_FLOW_DROP; } dropping_qos: { GST_WARNING_OBJECT (dec, "dropping frame because of QoS"); return GST_CUSTOM_FLOW_DROP; } decode_error: { GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, (NULL), ("daala decoder did not decode data packet")); return GST_FLOW_ERROR; } wrong_dimensions: { GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, FORMAT, (NULL), ("dimensions of image do not match header")); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf) { GstUDPSrc *udpsrc; GstBuffer *outbuf = NULL; GSocketAddress *saddr = NULL; gint flags = G_SOCKET_MSG_NONE; gboolean try_again; GError *err = NULL; gssize res; gsize offset; udpsrc = GST_UDPSRC_CAST (psrc); if (!gst_udpsrc_ensure_mem (udpsrc)) goto memory_alloc_error; retry: do { gint64 timeout; try_again = FALSE; if (udpsrc->timeout) timeout = udpsrc->timeout / 1000; else timeout = -1; GST_LOG_OBJECT (udpsrc, "doing select, timeout %" G_GINT64_FORMAT, timeout); if (!g_socket_condition_timed_wait (udpsrc->used_socket, G_IO_IN | G_IO_PRI, timeout, udpsrc->cancellable, &err)) { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { goto stopped; } else if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_TIMED_OUT)) { g_clear_error (&err); /* timeout, post element message */ gst_element_post_message (GST_ELEMENT_CAST (udpsrc), gst_message_new_element (GST_OBJECT_CAST (udpsrc), gst_structure_new ("GstUDPSrcTimeout", "timeout", G_TYPE_UINT64, udpsrc->timeout, NULL))); } else { goto select_error; } try_again = TRUE; } } while (G_UNLIKELY (try_again)); if (saddr != NULL) { g_object_unref (saddr); saddr = NULL; } res = g_socket_receive_message (udpsrc->used_socket, &saddr, udpsrc->vec, 2, NULL, NULL, &flags, udpsrc->cancellable, &err); if (G_UNLIKELY (res < 0)) { /* EHOSTUNREACH for a UDP socket means that a packet sent with udpsink * generated a "port unreachable" ICMP response. We ignore that and try * again. */ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_HOST_UNREACHABLE)) { g_clear_error (&err); goto retry; } goto receive_error; } /* remember maximum packet size */ if (res > udpsrc->max_size) udpsrc->max_size = res; outbuf = gst_buffer_new (); /* append first memory chunk to buffer */ gst_buffer_append_memory (outbuf, udpsrc->mem); /* if the packet didn't fit into the first chunk, add second one as well */ if (res > udpsrc->map.size) { gst_buffer_append_memory (outbuf, udpsrc->mem_max); gst_memory_unmap (udpsrc->mem_max, &udpsrc->map_max); udpsrc->vec[1].buffer = NULL; udpsrc->vec[1].size = 0; udpsrc->mem_max = NULL; } /* make sure we allocate a new chunk next time (we do this only here because * we look at map.size to see if the second memory chunk is needed above) */ gst_memory_unmap (udpsrc->mem, &udpsrc->map); udpsrc->vec[0].buffer = NULL; udpsrc->vec[0].size = 0; udpsrc->mem = NULL; offset = udpsrc->skip_first_bytes; if (G_UNLIKELY (offset > 0 && res < offset)) goto skip_error; gst_buffer_resize (outbuf, offset, res - offset); /* use buffer metadata so receivers can also track the address */ if (saddr) { gst_buffer_add_net_address_meta (outbuf, saddr); g_object_unref (saddr); saddr = NULL; } GST_LOG_OBJECT (udpsrc, "read packet of %d bytes", (int) res); *buf = GST_BUFFER_CAST (outbuf); return GST_FLOW_OK; /* ERRORS */ memory_alloc_error: { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("Failed to allocate or map memory")); return GST_FLOW_ERROR; } select_error: { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("select error: %s", err->message)); g_clear_error (&err); return GST_FLOW_ERROR; } stopped: { GST_DEBUG ("stop called"); g_clear_error (&err); return GST_FLOW_FLUSHING; } receive_error: { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { g_clear_error (&err); return GST_FLOW_FLUSHING; } else { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("receive error %" G_GSSIZE_FORMAT ": %s", res, err->message)); g_clear_error (&err); return GST_FLOW_ERROR; } } skip_error: { gst_buffer_unref (outbuf); GST_ELEMENT_ERROR (udpsrc, STREAM, DECODE, (NULL), ("UDP buffer to small to skip header")); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_mpg123_audio_dec_handle_frame (GstAudioDecoder * dec, GstBuffer * input_buffer) { GstMpg123AudioDec *mpg123_decoder; int decode_error; unsigned char *decoded_bytes; size_t num_decoded_bytes; GstFlowReturn retval; mpg123_decoder = GST_MPG123_AUDIO_DEC (dec); g_assert (mpg123_decoder->handle != NULL); /* The actual decoding */ { /* feed input data (if there is any) */ if (G_LIKELY (input_buffer != NULL)) { GstMapInfo info; if (gst_buffer_map (input_buffer, &info, GST_MAP_READ)) { mpg123_feed (mpg123_decoder->handle, info.data, info.size); gst_buffer_unmap (input_buffer, &info); } else { GST_ERROR_OBJECT (mpg123_decoder, "gst_memory_map() failed"); return GST_FLOW_ERROR; } } /* Try to decode a frame */ decoded_bytes = NULL; num_decoded_bytes = 0; decode_error = mpg123_decode_frame (mpg123_decoder->handle, &mpg123_decoder->frame_offset, &decoded_bytes, &num_decoded_bytes); } retval = GST_FLOW_OK; switch (decode_error) { case MPG123_NEW_FORMAT: /* As mentioned in gst_mpg123_audio_dec_set_format(), the next audioinfo * is not set immediately; instead, the code waits for mpg123 to take * note of the new format, and then sets the audioinfo. This fixes glitches * with mp3s containing several format headers (for example, first half * using 44.1kHz, second half 32 kHz) */ GST_LOG_OBJECT (dec, "mpg123 reported a new format -> setting next srccaps"); gst_mpg123_audio_dec_push_decoded_bytes (mpg123_decoder, decoded_bytes, num_decoded_bytes); /* If there is a next audioinfo, use it, then set has_next_audioinfo to * FALSE, to make sure gst_audio_decoder_set_output_format() isn't called * again until set_format is called by the base class */ if (mpg123_decoder->has_next_audioinfo) { if (!gst_audio_decoder_set_output_format (dec, &(mpg123_decoder->next_audioinfo))) { GST_WARNING_OBJECT (dec, "Unable to set output format"); retval = GST_FLOW_NOT_NEGOTIATED; } mpg123_decoder->has_next_audioinfo = FALSE; } break; case MPG123_NEED_MORE: case MPG123_OK: retval = gst_mpg123_audio_dec_push_decoded_bytes (mpg123_decoder, decoded_bytes, num_decoded_bytes); break; case MPG123_DONE: /* If this happens, then the upstream parser somehow missed the ending * of the bitstream */ GST_LOG_OBJECT (dec, "mpg123 is done decoding"); gst_mpg123_audio_dec_push_decoded_bytes (mpg123_decoder, decoded_bytes, num_decoded_bytes); retval = GST_FLOW_EOS; break; default: { /* Anything else is considered an error */ int errcode; switch (decode_error) { case MPG123_ERR: errcode = mpg123_errcode (mpg123_decoder->handle); break; default: errcode = decode_error; } switch (errcode) { case MPG123_BAD_OUTFORMAT:{ GstCaps *input_caps = gst_pad_get_current_caps (GST_AUDIO_DECODER_SINK_PAD (dec)); GST_ELEMENT_ERROR (dec, STREAM, FORMAT, (NULL), ("Output sample format could not be used when trying to decode frame. " "This is typically caused when the input caps (often the sample " "rate) do not match the actual format of the audio data. " "Input caps: %" GST_PTR_FORMAT, input_caps) ); gst_caps_unref (input_caps); break; } default:{ char const *errmsg = mpg123_plain_strerror (errcode); GST_ERROR_OBJECT (dec, "Reported error: %s", errmsg); } } retval = GST_FLOW_ERROR; } } return retval; }