コード例 #1
0
ファイル: pipeline.cpp プロジェクト: AmineYaiche/toonloop
/**
 * Called every time there is a message on the GStreamer pipeline's bus.
 *
 * We are mostly interested in the new pixbug message.
 * In that case, checks if the video recording or the intervalometer is enabled. 
 * If so, grabs an image if it's time to do so.
 */
void Pipeline::bus_message_cb(GstBus* /*bus*/, GstMessage *msg, gpointer user_data)
{
    Pipeline *context = static_cast<Pipeline*>(user_data);
    bool verbose = context->owner_->get_configuration()->get_verbose();
    switch (GST_MESSAGE_TYPE (msg)) 
    {
    case GST_MESSAGE_ELEMENT:
    {
        const GValue *val;
        GdkPixbuf *pixbuf = NULL;
  
        /* only interested in element messages from our gdkpixbufsink */
        if (msg->src != GST_OBJECT_CAST(context->gdkpixbufsink_))
            break;
  
        /* only interested in these two messages */
        if (!gst_structure_has_name(msg->structure, "preroll-pixbuf") &&
                !gst_structure_has_name(msg->structure, "pixbuf")) 
        {
            break;
        }
  
        //g_print("pixbuf\n");
        val = gst_structure_get_value(msg->structure, "pixbuf");
        g_return_if_fail(val != NULL);
  
        pixbuf = GDK_PIXBUF(g_value_dup_object(val));
        if (context->get_record_all_frames() || context->get_intervalometer_is_on()) // if video grabbing is enabled
        {
            Clip *current_clip = context->owner_->get_current_clip();
            unsigned long last_time_grabbed = current_clip->get_last_time_grabbed_image();
            unsigned long now = timing::get_timestamp_now();
            bool must_grab_now = false;
            // VIDEO RECORDING:
            if (context->get_record_all_frames())
            {
                //std::cout << "Video grabbing is on." << std::endl; 
                unsigned long time_between_frames = (unsigned long)(1.0f / float(current_clip->get_playhead_fps()) * timing::TIMESTAMP_PRECISION);
                if (verbose)
                    std::cout << "now=" << now << " last_time_grabbed=" << last_time_grabbed << " time_between_frames" << time_between_frames << std::endl;
                if ((now - last_time_grabbed) > time_between_frames)
                {
                    must_grab_now = true;
                }
            } // not mutually exclusive - why not have both on?
            // INTERVALOMETER:
            if (context->get_intervalometer_is_on())
            {
                long time_between_intervalometer_ticks = long(current_clip->get_intervalometer_rate() * timing::TIMESTAMP_PRECISION);
                long passed = (now - last_time_grabbed);
                if (verbose)
                    std::cout << "time between intervalometer ticks: " << passed << "/" << time_between_intervalometer_ticks << std::endl;
                if (passed > time_between_intervalometer_ticks)
                {
                    if (verbose)
                        std::cout << "Interval has passed. Time to grab." << std::endl;
                    must_grab_now = true;
                }
            }
            if (must_grab_now)
            {
                if (verbose)
                    std::cout << "Grabbing an image" << std::endl;
                context->save_image_to_current_clip(pixbuf);
                current_clip->set_last_time_grabbed_image(now);
            }
        }
        g_object_unref(pixbuf);
        break;
    }
    case GST_MESSAGE_ERROR:
    {
        GError *err = NULL;
        gchar *dbg = NULL;
        gst_message_parse_error(msg, &err, &dbg);
        g_error("Error: %s\n%s\n", err->message, (dbg) ? dbg : "");
        g_error_free(err);
        g_free(dbg);
        break;
    }
    default:
        break;
    }
}
コード例 #2
0
static gboolean
rsn_audiomunge_sink_event (GstPad * pad, GstEvent * event)
{
  gboolean ret = FALSE;
  RsnAudioMunge *munge = RSN_AUDIOMUNGE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_STOP:
      rsn_audiomunge_reset (munge);
      ret = gst_pad_push_event (munge->srcpad, event);
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      GstSegment *segment;
      gboolean update;
      GstFormat format;
      gdouble rate, arate;
      gint64 start, stop, time;

      gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
          &start, &stop, &time);

      /* we need TIME format */
      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      /* now configure the values */
      segment = &munge->sink_segment;

      gst_segment_set_newsegment_full (segment, update,
          rate, arate, format, start, stop, time);

      if (munge->have_audio) {
        ret = gst_pad_push_event (munge->srcpad, event);
        break;
      }

      /*
       * FIXME:
       * If the accum >= threshold or we're in a still frame and there's been
       * no audio received, then we need to generate some audio data.
       * If caused by a segment start update (time advancing in a gap) adjust
       * the new-segment and send the buffer.
       *
       * Otherwise, send the buffer before the newsegment, so that it appears
       * in the closing segment.
       */
      if (!update) {
        GST_DEBUG_OBJECT (munge, "Sending newsegment: start %" GST_TIME_FORMAT
            " stop %" GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT,
            GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
            GST_TIME_ARGS (segment->accum));

        ret = gst_pad_push_event (munge->srcpad, event);
      }

      if (segment->accum >= AUDIO_FILL_THRESHOLD || munge->in_still) {
        g_print ("***********  Sending audio fill: accum = %" GST_TIME_FORMAT
            " still-state=%d\n", GST_TIME_ARGS (segment->accum),
            munge->in_still);

        /* Just generate a 100ms silence buffer for now. FIXME: Fill the gap */
        if (rsn_audiomunge_make_audio (munge, segment->start,
                GST_SECOND / 10) == GST_FLOW_OK)
          munge->have_audio = TRUE;
      } else {
        GST_LOG_OBJECT (munge, "Not sending audio fill buffer: "
            "segment accum below thresh: accum = %" GST_TIME_FORMAT,
            GST_TIME_ARGS (segment->accum));
      }

      if (update) {
        GST_DEBUG_OBJECT (munge, "Sending newsegment: start %" GST_TIME_FORMAT
            " stop %" GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT,
            GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
            GST_TIME_ARGS (segment->accum));

        ret = gst_pad_push_event (munge->srcpad, event);
      }

      break;
    }
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    {
      const GstStructure *s = gst_event_get_structure (event);

      if (s && gst_structure_has_name (s, "application/x-gst-dvd"))
        rsn_audiomunge_handle_dvd_event (munge, event);

      ret = gst_pad_push_event (munge->srcpad, event);
      break;
    }
    default:
      ret = gst_pad_push_event (munge->srcpad, event);
      break;
  }

  return ret;

newseg_wrong_format:

  GST_DEBUG_OBJECT (munge, "received non TIME newsegment");
  gst_event_unref (event);
  gst_object_unref (munge);
  return FALSE;
}
コード例 #3
0
static void
fs_rtp_conference_handle_message (
    GstBin * bin,
    GstMessage * message)
{
  FsRtpConference *self = FS_RTP_CONFERENCE (bin);

  if (!self->rtpbin)
    goto out;

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ELEMENT:
    {
      const GstStructure *s = gst_message_get_structure (message);

      /* we change the structure name and add the session ID to it */
      if (gst_structure_has_name (s, "application/x-rtp-source-sdes") &&
          gst_structure_has_field_typed (s, "session", G_TYPE_UINT) &&
          gst_structure_has_field_typed (s, "ssrc", G_TYPE_UINT) &&
          gst_structure_has_field_typed (s, "cname", G_TYPE_STRING))
      {
        guint session_id;
        guint ssrc;
        const GValue *val;
        FsRtpSession *session;
        const gchar *cname;

        val = gst_structure_get_value (s, "session");
        session_id = g_value_get_uint (val);

        val = gst_structure_get_value (s, "ssrc");
        ssrc = g_value_get_uint (val);

        cname = gst_structure_get_string (s, "cname");

        if (!ssrc || !cname)
        {
          GST_WARNING_OBJECT (self,
              "Got GstRTPBinSDES without a ssrc or a cname (ssrc:%u cname:%p)",
              ssrc, cname);
          break;
        }

        session = fs_rtp_conference_get_session_by_id (self, session_id);

        if (session) {
          fs_rtp_session_associate_ssrc_cname (session, ssrc, cname);
          g_object_unref (session);
        } else {
          GST_WARNING_OBJECT (self,"Our RtpBin announced a new association"
              "for non-existent session %u for ssrc: %u and cname %s",
              session_id, ssrc, cname);
        }
      }
      else if (gst_structure_has_name (s, "dtmf-event-processed") ||
          gst_structure_has_name (s, "dtmf-event-dropped"))
      {
        GList *item;
        guint cookie;


        GST_OBJECT_LOCK (self);
      restart:
        cookie = self->priv->sessions_cookie;
        for (item = self->priv->sessions; item; item = item->next)
        {
          GST_OBJECT_UNLOCK (self);
          if (fs_rtp_session_handle_dtmf_event_message (item->data, message))
          {
            gst_message_unref (message);
            message = NULL;
            goto out;
          }
          GST_OBJECT_LOCK (self);
          if (cookie != self->priv->sessions_cookie)
            goto restart;
        }
        GST_OBJECT_UNLOCK (self);

      }
    }
    break;
    case GST_MESSAGE_STREAM_STATUS:
    {
      GstStreamStatusType type;
      guint i;

      gst_message_parse_stream_status (message, &type, NULL);

      switch (type)
      {
        case GST_STREAM_STATUS_TYPE_ENTER:
          GST_OBJECT_LOCK (self);
          for (i = 0; i < self->priv->threads->len; i++)
          {
            if (g_ptr_array_index (self->priv->threads, i) ==
                g_thread_self ())
              goto done;
          }
          g_ptr_array_add (self->priv->threads, g_thread_self ());
        done:
          GST_OBJECT_UNLOCK (self);
          break;

        case GST_STREAM_STATUS_TYPE_LEAVE:
          GST_OBJECT_LOCK (self);
          while (g_ptr_array_remove_fast (self->priv->threads,
                  g_thread_self ()));
          GST_OBJECT_UNLOCK (self);
          break;

        default:
          /* Do nothing */
          break;
      }
    }
      break;
    default:
      break;
  }

 out:
  /* forward all messages to the parent */
  if (message)
    GST_BIN_CLASS (fs_rtp_conference_parent_class)->handle_message (bin,
        message);
}
コード例 #4
0
static gboolean
bus_callback (GstBus * bus, GstMessage * message, gpointer data)
{
  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR:{
      GError *err;
      gchar *debug;

      gst_message_parse_error (message, &err, &debug);
      g_print ("Error: %s\n", err->message);
      g_error_free (err);
      g_free (debug);

      /* Write debug graph to file */
      GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camerabin),
          GST_DEBUG_GRAPH_SHOW_ALL, "camerabin.error");

      g_main_loop_quit (loop);
      break;
    }
    case GST_MESSAGE_STATE_CHANGED:
      if (GST_IS_BIN (GST_MESSAGE_SRC (message))) {
        GstState oldstate, newstate;

        gst_message_parse_state_changed (message, &oldstate, &newstate, NULL);
        GST_DEBUG_OBJECT (GST_MESSAGE_SRC (message), "state-changed: %s -> %s",
            gst_element_state_get_name (oldstate),
            gst_element_state_get_name (newstate));
      }
      break;
    case GST_MESSAGE_EOS:
      /* end-of-stream */
      GST_INFO ("got eos() - should not happen");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_ELEMENT:
      if (GST_MESSAGE_SRC (message) == (GstObject *) camerabin) {
        const GstStructure *structure = gst_message_get_structure (message);

        if (gst_structure_has_name (structure, "image-done")) {
          CaptureTiming *timing;
#ifndef GST_DISABLE_GST_DEBUG
          const gchar *fname = gst_structure_get_string (structure, "filename");

          GST_DEBUG ("image done: %s", fname);
#endif
          timing = (CaptureTiming *) g_list_first (capture_times)->data;
          timing->capture_done = gst_util_get_timestamp ();

          if (capture_count < capture_total) {
            g_idle_add ((GSourceFunc) run_pipeline, NULL);
          } else {
            g_main_loop_quit (loop);
          }
        }
      }
      break;
    default:
      /* unhandled message */
      break;
  }
  return TRUE;
}
コード例 #5
0
ファイル: rtp-codec.c プロジェクト: nicobou/prototypes
int
main (int argc,
      char *argv[])
{
    
  /* Initialisation */
  gst_init (&argc, &argv);

  GList *element_list = gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_DEPAYLOADER, 
							       GST_RANK_NONE);
  GList *iter = element_list;
  while (iter != NULL)
    {
      g_print ("+++++\n");
      g_print ("%s -- ", gst_element_factory_get_longname ((GstElementFactory *)iter->data));
      g_print ("%s\n", gst_plugin_feature_get_name ((GstPluginFeature *)iter->data));
	 
      const GList *static_pads = 
	gst_element_factory_get_static_pad_templates ((GstElementFactory *)iter->data);
	 
      while (NULL != static_pads)
	{
	  GstStaticPadTemplate *pad = (GstStaticPadTemplate *)static_pads->data; 
	  //the following is EMPTY gchar *caps_str = gst_caps_to_string (&pad->static_caps.caps); 
	  //g_free (caps_str); 
	  /* g_print ("string: %s\n",  */
	  /* 	      pad->static_caps.string);  */
	  GstCaps *caps = gst_caps_from_string (pad->static_caps.string);
	  guint caps_size = gst_caps_get_size (caps);
	  if (! gst_caps_is_any (caps))
	    for (guint i = caps_size; i > 0; i--) 
	      {
		GstStructure *caps_struct = gst_caps_get_structure (caps, i-1);
		if (gst_structure_has_name (caps_struct,"application/x-rtp")) 
		  {
		    g_print ("string: %s\n",   
			     gst_structure_to_string (caps_struct));   
		    
		    {//payload 
		      const GValue *val = gst_structure_get_value (caps_struct, "payload");  
		      if (NULL != val) 
			{ 
			  //g_print ("payload struct type %s\n", G_VALUE_TYPE_NAME (val));  
			  if(GST_VALUE_HOLDS_INT_RANGE(val)) 
			    { 
			      g_print ("payload min %d\n", gst_value_get_int_range_min (val));  
			    } 
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("payload list %d\n", g_value_get_int (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_INT (val)) 
			    { 
			      g_print ("payload int %d\n", g_value_get_int (val)); 
			    } 
			} 
		    } 
		    { //encodeing-name
		      const GValue *val = gst_structure_get_value (caps_struct, "encoding-name");  
		      if (NULL != val) 
			{
			  //g_print ("encoding-name struct type %s\n", G_VALUE_TYPE_NAME (val));  
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("encoding-name list %s\n", g_value_get_string (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_STRING (val)) 
			    { 
			      g_print ("encoding-name string %s\n", g_value_get_string (val)); 
			    } 
				      
			}
		    } 
		    {//media
		      const GValue *val = gst_structure_get_value (caps_struct, "media");  
		      if (NULL != val) 
			{
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("media list %s\n", g_value_get_string (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_STRING (val)) 
			    { 
			      g_print ("media string %s\n", g_value_get_string (val)); 
			    } 
				      
			}
		    } 

		    {//clock rate 
		      const GValue *val = gst_structure_get_value (caps_struct, "clock-rate");  
		      if (NULL != val) 
			{ 
			  //g_print ("payload struct type %s\n", G_VALUE_TYPE_NAME (val));  
			  if(GST_VALUE_HOLDS_INT_RANGE(val)) 
			    { 
			      g_print ("clock-rate min %d\n", gst_value_get_int_range_min (val));  
			    } 
			  if (GST_VALUE_HOLDS_LIST(val)) 
			    { 
			      for (guint i = 0; i < gst_value_list_get_size (val); i++) 
				{ 
				  const GValue *item_val = gst_value_list_get_value (val, i); 
				  g_print ("clock-rate list %d\n", g_value_get_int (item_val)); 
				} 
			    } 
			  if (G_VALUE_HOLDS_INT (val)) 
			    { 
			      g_print ("clock-rate int %d\n", g_value_get_int (val)); 
			    } 
			} 
		    } 

		    /* g_print ("\nencoding-name %s\n",   */
		    /* 	 gst_structure_get_string (caps_struct,  */
		    /* 				   "encoding-name"));  */
			
		  }
	      }
	  static_pads = g_list_next (static_pads); 
	  gst_caps_unref (caps);
	}
	 
      iter = g_list_next (iter);
    }
  gst_plugin_feature_list_free (element_list);
    
  return 0;
}
コード例 #6
0
/**
 * gst_audio_info_from_caps:
 * @info: a #GstAudioInfo
 * @caps: a #GstCaps
 *
 * Parse @caps and update @info.
 *
 * Returns: TRUE if @caps could be parsed
 */
gboolean
gst_audio_info_from_caps (GstAudioInfo * info, const GstCaps * caps)
{
  GstStructure *str;
  const gchar *s;
  GstAudioFormat format;
  gint rate, channels;
  guint64 channel_mask;
  gint i;
  GstAudioChannelPosition position[64];
  GstAudioFlags flags;
  GstAudioLayout layout;

  g_return_val_if_fail (info != NULL, FALSE);
  g_return_val_if_fail (caps != NULL, FALSE);
  g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE);

  GST_DEBUG ("parsing caps %" GST_PTR_FORMAT, caps);

  flags = 0;

  str = gst_caps_get_structure (caps, 0);

  if (!gst_structure_has_name (str, "audio/x-raw"))
    goto wrong_name;

  if (!(s = gst_structure_get_string (str, "format")))
    goto no_format;

  format = gst_audio_format_from_string (s);
  if (format == GST_AUDIO_FORMAT_UNKNOWN)
    goto unknown_format;

  if (!(s = gst_structure_get_string (str, "layout")))
    goto no_layout;
  if (g_str_equal (s, "interleaved"))
    layout = GST_AUDIO_LAYOUT_INTERLEAVED;
  else if (g_str_equal (s, "non-interleaved"))
    layout = GST_AUDIO_LAYOUT_NON_INTERLEAVED;
  else
    goto unknown_layout;

  if (!gst_structure_get_int (str, "rate", &rate))
    goto no_rate;
  if (!gst_structure_get_int (str, "channels", &channels))
    goto no_channels;

  if (!gst_structure_get (str, "channel-mask", GST_TYPE_BITMASK, &channel_mask,
          NULL)) {
    if (channels == 1) {
      position[0] = GST_AUDIO_CHANNEL_POSITION_MONO;
    } else if (channels == 2) {
      position[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
      position[1] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
    } else {
      goto no_channel_mask;
    }
  } else if (channel_mask == 0) {
    flags |= GST_AUDIO_FLAG_UNPOSITIONED;
    for (i = 0; i < MIN (64, channels); i++)
      position[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
  } else {
    if (!gst_audio_channel_positions_from_mask (channels, channel_mask,
            position))
      goto invalid_channel_mask;
  }

  gst_audio_info_set_format (info, format, rate, channels,
      (channels > 64) ? NULL : position);

  info->flags = flags;
  info->layout = layout;

  return TRUE;

  /* ERROR */
wrong_name:
  {
    GST_ERROR ("wrong name, expected audio/x-raw");
    return FALSE;
  }
no_format:
  {
    GST_ERROR ("no format given");
    return FALSE;
  }
unknown_format:
  {
    GST_ERROR ("unknown format given");
    return FALSE;
  }
no_layout:
  {
    GST_ERROR ("no layout given");
    return FALSE;
  }
unknown_layout:
  {
    GST_ERROR ("unknown layout given");
    return FALSE;
  }
no_rate:
  {
    GST_ERROR ("no rate property given");
    return FALSE;
  }
no_channels:
  {
    GST_ERROR ("no channels property given");
    return FALSE;
  }
no_channel_mask:
  {
    GST_ERROR ("no channel-mask property given");
    return FALSE;
  }
invalid_channel_mask:
  {
    GST_ERROR ("Invalid channel mask 0x%016" G_GINT64_MODIFIER
        "x for %d channels", channel_mask, channels);
    return FALSE;
  }
}
コード例 #7
0
static GstBusSyncReply
sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data)
{
  const GstStructure *st;
  const GValue *image;
  GstBuffer *buf = NULL;
  guint8 *data_buf = NULL;
  gchar *caps_string;
  guint size = 0;
  gchar *preview_filename = NULL;
  FILE *f = NULL;
  size_t written;

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ELEMENT:{
      st = gst_message_get_structure (message);
      if (st) {
        if (gst_structure_has_name (message->structure, "prepare-xwindow-id")) {
          if (!no_xwindow && window) {
            gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC
                    (message)), window);
            gst_message_unref (message);
            message = NULL;
            return GST_BUS_DROP;
          }
        } else if (gst_structure_has_name (st, "image-captured")) {
          GST_DEBUG ("image-captured");
        } else if (gst_structure_has_name (st, "preview-image")) {
          GST_DEBUG ("preview-image");
          //extract preview-image from msg
          image = gst_structure_get_value (st, "buffer");
          if (image) {
            buf = gst_value_get_buffer (image);
            data_buf = GST_BUFFER_DATA (buf);
            size = GST_BUFFER_SIZE (buf);
            preview_filename = g_strdup_printf ("test_vga.rgb");
            caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf));
            g_print ("writing buffer to %s, elapsed: %.2fs, buffer caps: %s\n",
                preview_filename, g_timer_elapsed (timer, NULL), caps_string);
            g_free (caps_string);
            f = g_fopen (preview_filename, "w");
            if (f) {
              written = fwrite (data_buf, size, 1, f);
              if (!written) {
                g_print ("error writing file\n");
              }
              fclose (f);
            } else {
              g_print ("error opening file for raw image writing\n");
            }
            g_free (preview_filename);
          }
        }
      }
      break;
    }
    default:
      /* unhandled message */
      break;
  }
  return GST_BUS_PASS;
}
コード例 #8
0
ファイル: playbin.c プロジェクト: ChinnaSuhas/ossbuild
static void
test_missing_primary_decoder (void)
{
  GstStructure *s;
  GstMessage *msg;
  GstElement *playbin;
  GError *err = NULL;
  GstBus *bus;
  gchar *use_decodebin2 = getenv ("USE_DECODEBIN2");
  gboolean decodebin2 = use_decodebin2 != NULL && *use_decodebin2 == '1';

  fail_unless (gst_element_register (NULL, "codecsrc", GST_RANK_PRIMARY,
          gst_codec_src_get_type ()));

  playbin = create_playbin ("codec://");

  fail_unless_equals_int (gst_element_set_state (playbin, GST_STATE_READY),
      GST_STATE_CHANGE_SUCCESS);
  fail_unless_equals_int (gst_element_set_state (playbin, GST_STATE_PAUSED),
      GST_STATE_CHANGE_ASYNC);

  /* there should soon be at least a missing-plugin message on the bus and an
   * error message; the missing-plugin message should be first */
  bus = gst_element_get_bus (playbin);

  msg = gst_bus_poll (bus, GST_MESSAGE_ELEMENT | GST_MESSAGE_ERROR, -1);
  fail_unless_equals_int (GST_MESSAGE_TYPE (msg), GST_MESSAGE_ELEMENT);
  fail_unless (msg->structure != NULL);
  s = msg->structure;
  fail_unless (gst_structure_has_name (s, "missing-plugin"));
  fail_unless (gst_structure_has_field_typed (s, "type", G_TYPE_STRING));
  fail_unless_equals_string (gst_structure_get_string (s, "type"), "decoder");
  fail_unless (gst_structure_has_field_typed (s, "detail", GST_TYPE_CAPS));
  gst_message_unref (msg);

  msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, -1);
  fail_unless_equals_int (GST_MESSAGE_TYPE (msg), GST_MESSAGE_ERROR);

  /* make sure the error is a STREAM CODEC_NOT_FOUND one */
  gst_message_parse_error (msg, &err, NULL);
  fail_unless (err != NULL);
  if (decodebin2) {
    fail_unless (err->domain == GST_CORE_ERROR, "error has wrong error domain "
        "%s instead of core-error-quark", g_quark_to_string (err->domain));
    fail_unless (err->code == GST_CORE_ERROR_MISSING_PLUGIN, "error has wrong "
        "code %u instead of GST_RESOURCE_ERROR_MISSING_PLUGIN", err->code);
  } else {
    fail_unless (err->domain == GST_STREAM_ERROR,
        "error has wrong error domain " "%s instead of stream-error-quark",
        g_quark_to_string (err->domain));
    fail_unless (err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND,
        "error has wrong "
        "code %u instead of GST_STREAM_ERROR_CODEC_NOT_FOUND", err->code);
  }
  g_error_free (err);
  gst_message_unref (msg);
  gst_object_unref (bus);

  gst_element_set_state (playbin, GST_STATE_NULL);
  gst_object_unref (playbin);
}
コード例 #9
0
static gboolean
gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (parent);
  gboolean res;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CUSTOM_UPSTREAM:
    {
      const GstStructure *s = gst_event_get_structure (event);

      /* This event usually comes from the downstream gstrtpjitterbuffer */
      if (gst_structure_has_name (s, "GstRTPRetransmissionRequest")) {
        guint seqnum = 0;
        guint ssrc = 0;
        gpointer ssrc2 = 0;

        /* retrieve seqnum of the packet that need to be retransmitted */
        if (!gst_structure_get_uint (s, "seqnum", &seqnum))
          seqnum = -1;

        /* retrieve ssrc of the packet that need to be retransmitted
         * it's useful when reconstructing the original packet from the rtx packet */
        if (!gst_structure_get_uint (s, "ssrc", &ssrc))
          ssrc = -1;

        GST_DEBUG_OBJECT (rtx,
            "request seqnum: %" G_GUINT32_FORMAT ", ssrc: %" G_GUINT32_FORMAT,
            seqnum, ssrc);

        GST_OBJECT_LOCK (rtx);

        /* increase number of seen requests for our statistics */
        ++rtx->num_rtx_requests;

        /* First, we lookup in our map to see if we have already associate this
         * master stream ssrc with its retransmitted stream.
         * Every ssrc are unique so we can use the same hash table
         * for both retrieving the ssrc1 from ssrc2 and also ssrc2 from ssrc1
         */
        if (g_hash_table_lookup_extended (rtx->ssrc2_ssrc1_map,
                GUINT_TO_POINTER (ssrc), NULL, &ssrc2)
            && GPOINTER_TO_UINT (ssrc2) != GPOINTER_TO_UINT (ssrc)) {
          GST_DEBUG_OBJECT (rtx, "Retransmited stream %" G_GUINT32_FORMAT
              " already associated to its master", GPOINTER_TO_UINT (ssrc2));
        } else {
          SsrcAssoc *assoc;

          /* not already associated but also we have to check that we have not
           * already considered this request.
           */
          if (g_hash_table_lookup_extended (rtx->seqnum_ssrc1_map,
                  GUINT_TO_POINTER (seqnum), NULL, (gpointer *) & assoc)) {
            if (assoc->ssrc == ssrc) {
              /* do nothing because we have already considered this request
               * The jitter may be too impatient of the rtx packet has been
               * lost too.
               * It does not mean we reject the event, we still want to forward
               * the request to the gstrtpsession to be translater into a FB NACK
               */
              GST_DEBUG_OBJECT (rtx, "Duplicated request seqnum: %"
                  G_GUINT32_FORMAT ", ssrc1: %" G_GUINT32_FORMAT, seqnum, ssrc);
            } else {

              /* If the association attempt is larger than ASSOC_TIMEOUT,
               * then we give up on it, and try this one.
               */
              if (!GST_CLOCK_TIME_IS_VALID (rtx->last_time) ||
                  !GST_CLOCK_TIME_IS_VALID (assoc->time) ||
                  assoc->time + ASSOC_TIMEOUT < rtx->last_time) {
                /* From RFC 4588:
                 * the receiver MUST NOT have two outstanding requests for the
                 * same packet sequence number in two different original streams
                 * before the association is resolved. Otherwise it's impossible
                 * to associate a rtx stream and its master stream
                 */

                /* remove seqnum in order to reuse the spot */
                g_hash_table_remove (rtx->seqnum_ssrc1_map,
                    GUINT_TO_POINTER (seqnum));
                goto retransmit;
              } else {
                GST_DEBUG_OBJECT (rtx,
                    "reject request for seqnum %" G_GUINT32_FORMAT
                    " of master stream %" G_GUINT32_FORMAT, seqnum, ssrc);

                /* do not forward the event as we are rejecting this request */
                GST_OBJECT_UNLOCK (rtx);
                gst_event_unref (event);
                return TRUE;
              }
            }
          } else {
          retransmit:
            /* the request has not been already considered
             * insert it for the first time */
            g_hash_table_insert (rtx->seqnum_ssrc1_map,
                GUINT_TO_POINTER (seqnum),
                ssrc_assoc_new (ssrc, rtx->last_time));
          }
        }

        GST_DEBUG_OBJECT (rtx,
            "packet number %" G_GUINT32_FORMAT " of master stream %"
            G_GUINT32_FORMAT " needs to be retransmitted", seqnum, ssrc);

        GST_OBJECT_UNLOCK (rtx);
      }

      /* Transfer event upstream so that the request can acutally by translated
       * through gstrtpsession through the network */
      res = gst_pad_event_default (pad, parent, event);
      break;
    }
    default:
      res = gst_pad_event_default (pad, parent, event);
      break;
  }
  return res;
}
コード例 #10
0
GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
{
	gchar * sourceName;
	
	// source
	GstObject * source;
	source = GST_MESSAGE_SRC(msg);
	
	if (!GST_IS_OBJECT(source))
		return GST_BUS_DROP;
	
	sourceName = gst_object_get_name(source);

	switch (GST_MESSAGE_TYPE(msg)) 
	{
		case GST_MESSAGE_EOS: 
		{
			g_message("End-of-stream");
			end_eof = 1;
			break;
		}
		
		case GST_MESSAGE_ERROR: 
		{
			gchar * debug;
			GError *err;
			gst_message_parse_error(msg, &err, &debug);
			g_free (debug);
			lt_info_c( "%s:%s - GST_MESSAGE_ERROR: %s (%i) from %s\n", FILENAME, __FUNCTION__, err->message, err->code, sourceName );
			if ( err->domain == GST_STREAM_ERROR )
			{
				if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
				{
					if ( g_strrstr(sourceName, "videosink") )
						lt_info_c( "%s:%s - GST_MESSAGE_ERROR: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
					else if ( g_strrstr(sourceName, "audiosink") )
						lt_info_c( "%s:%s - GST_MESSAGE_ERROR: audioSink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
				}
			}
			g_error_free(err);

			end_eof = 1; 		// NOTE: just to exit
			
			break;
		}
		
		case GST_MESSAGE_INFO:
		{
			gchar *debug;
			GError *inf;
	
			gst_message_parse_info (msg, &inf, &debug);
			g_free (debug);
			if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE )
			{
				if ( g_strrstr(sourceName, "videosink") )
					lt_info_c( "%s:%s - GST_MESSAGE_INFO: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
			}
			g_error_free(inf);
			break;
		}
		
		case GST_MESSAGE_TAG:
		{
			GstTagList *tags, *result;
			gst_message_parse_tag(msg, &tags);
	
			result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE);
			if (result)
			{
				if (m_stream_tags)
					gst_tag_list_free(m_stream_tags);
				m_stream_tags = result;
			}
	
			const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0);
			if ( gv_image )
			{
				GstBuffer *buf_image;
				buf_image = gst_value_get_buffer (gv_image);
				int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644);
				if(fd >= 0)
				{
					int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image));
					close(fd);
					lt_info_c( "%s:%s - GST_MESSAGE_INFO: cPlayback::state /tmp/.id3coverart %d bytes written\n", FILENAME, __FUNCTION__ , ret);
				}
				//FIXME: how shall playback handle this event???
			}
			gst_tag_list_free(tags);
			lt_info_c( "%s:%s - GST_MESSAGE_INFO: update info tags\n", FILENAME, __FUNCTION__);  //FIXME: how shall playback handle this event???
			break;
		}
		
		case GST_MESSAGE_STATE_CHANGED:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
				break;

			GstState old_state, new_state;
			gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
			
			if(old_state == new_state)
				break;
			lt_info_c( "%s:%s - GST_MESSAGE_STATE_CHANGED: state transition %s -> %s\n", FILENAME, __FUNCTION__, gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
		
			GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
		
			switch(transition)
			{
				case GST_STATE_CHANGE_NULL_TO_READY:
				{
				}	break;
				case GST_STATE_CHANGE_READY_TO_PAUSED:
				{
					GstIterator *children;
					if (audioSink)
					{
						gst_object_unref(GST_OBJECT(audioSink));
						audioSink = NULL;
					}
					
					if (videoSink)
					{
						gst_object_unref(GST_OBJECT(videoSink));
						videoSink = NULL;
					}
					children = gst_bin_iterate_recurse(GST_BIN(m_gst_playbin));
					audioSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBAudioSink"));
					videoSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBVideoSink"));
					gst_iterator_free(children);
					
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
				{
				}	break;
				case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
				{
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_READY:
				{
					if (audioSink)
					{
						gst_object_unref(GST_OBJECT(audioSink));
						audioSink = NULL;
					}
					if (videoSink)
					{
						gst_object_unref(GST_OBJECT(videoSink));
						videoSink = NULL;
					}
				}	break;
				case GST_STATE_CHANGE_READY_TO_NULL:
				{
				}	break;
			}
			break;
		}
#if 0
		case GST_MESSAGE_ELEMENT:
		{
			if(gst_structure_has_name(gst_message_get_structure(msg), "prepare-xwindow-id")) 
			{
				// set window id
				gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), glfb->getWindowID());
				
				// reshape window
				gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), 0, 0, glfb->getOSDWidth(), glfb->getOSDHeight());
				
				// sync frames
				gst_x_overlay_expose(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)));
			}
		}
#endif
		break;
		default:
			break;
	}

	return GST_BUS_DROP;
}
コード例 #11
0
/**
 * gst_video_format_parse_caps:
 * @caps: the #GstCaps to parse
 * @format: the #GstVideoFormat of the video represented by @caps (output)
 * @width: the width of the video represented by @caps (output)
 * @height: the height of the video represented by @caps (output)
 *
 * Determines the #GstVideoFormat of @caps and places it in the location
 * pointed to by @format.  Extracts the size of the video and places it
 * in the location pointed to by @width and @height.  If @caps does not
 * represent one of the raw video formats listed in #GstVideoFormat, the
 * function will fail and return FALSE.
 *
 * Since: 0.10.16
 *
 * Returns: TRUE if @caps was parsed correctly.
 */
gboolean
gst_video_format_parse_caps (GstCaps * caps, GstVideoFormat * format,
    int *width, int *height)
{
  GstStructure *structure;
  gboolean ok = TRUE;

  if (!gst_caps_is_fixed (caps))
    return FALSE;

  structure = gst_caps_get_structure (caps, 0);

  if (format) {
    if (gst_structure_has_name (structure, "video/x-raw-yuv")) {
      guint32 fourcc;

      ok &= gst_structure_get_fourcc (structure, "format", &fourcc);

      *format = gst_video_format_from_fourcc (fourcc);
      if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
        ok = FALSE;
      }
    } else if (gst_structure_has_name (structure, "video/x-raw-rgb")) {
      int depth;
      int bpp;
      int endianness;
      int red_mask;
      int green_mask;
      int blue_mask;
      int alpha_mask;
      gboolean have_alpha;

      ok &= gst_structure_get_int (structure, "depth", &depth);
      ok &= gst_structure_get_int (structure, "bpp", &bpp);
      ok &= gst_structure_get_int (structure, "endianness", &endianness);
      ok &= gst_structure_get_int (structure, "red_mask", &red_mask);
      ok &= gst_structure_get_int (structure, "green_mask", &green_mask);
      ok &= gst_structure_get_int (structure, "blue_mask", &blue_mask);
      have_alpha = gst_structure_get_int (structure, "alpha_mask", &alpha_mask);

      if (depth == 24 && bpp == 32 && endianness == G_BIG_ENDIAN) {
        *format = gst_video_format_from_rgb32_masks (red_mask, green_mask,
            blue_mask);
        if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
          ok = FALSE;
        }
      } else if (depth == 32 && bpp == 32 && endianness == G_BIG_ENDIAN &&
          have_alpha) {
        *format = gst_video_format_from_rgba32_masks (red_mask, green_mask,
            blue_mask, alpha_mask);
        if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
          ok = FALSE;
        }
      } else if (depth == 24 && bpp == 24 && endianness == G_BIG_ENDIAN) {
        *format = gst_video_format_from_rgb24_masks (red_mask, green_mask,
            blue_mask);
        if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
          ok = FALSE;
        }
      } else {
        ok = FALSE;
      }
    } else {
      ok = FALSE;
    }
  }

  if (width) {
    ok &= gst_structure_get_int (structure, "width", width);
  }

  if (height) {
    ok &= gst_structure_get_int (structure, "height", height);
  }

  return ok;
}
コード例 #12
0
void cPlayback::FindAllPids(int *apids, unsigned int *ac3flags, unsigned int *numpida, std::string * language)
{ 
	lt_info( "%s:%s\n", FILENAME, __FUNCTION__);

	if(m_gst_playbin)
	{
		gint i, n_audio = 0;
		//GstStructure * structure = NULL;
		
		// get audio
		g_object_get (m_gst_playbin, "n-audio", &n_audio, NULL);
		printf("%s: %d audio\n", __FUNCTION__, n_audio);
		
		if(n_audio == 0)
			return;
		
		for (i = 0; i < n_audio; i++)
		{
			// apids
			apids[i]=i;
			
			GstPad * pad = 0;
			g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad);
			GstCaps * caps = gst_pad_get_negotiated_caps(pad);
			if (!caps)
				continue;
			
			GstStructure * structure = gst_caps_get_structure(caps, 0);
			//const gchar *g_type = gst_structure_get_name(structure);
		
			//if (!structure)
				//return atUnknown;
			//ac3flags[0] = 0;

			// ac3flags
			if ( gst_structure_has_name (structure, "audio/mpeg"))
			{
				gint mpegversion, layer = -1;
				
				if (!gst_structure_get_int (structure, "mpegversion", &mpegversion))
					//return atUnknown;
					ac3flags[i] = 0;

				switch (mpegversion) 
				{
					case 1:
						/*
						{
							gst_structure_get_int (structure, "layer", &layer);
							if ( layer == 3 )
								return atMP3;
							else
								return atMPEG;
								ac3flags[0] = 4;
							break;
						}
						*/
						ac3flags[i] = 4;
					case 2:
						//return atAAC;
						ac3flags[i] = 5;
					case 4:
						//return atAAC;
						ac3flags[i] = 5;
					default:
						//return atUnknown;
						ac3flags[i] = 0;
				}
			}
			else if ( gst_structure_has_name (structure, "audio/x-ac3") || gst_structure_has_name (structure, "audio/ac3") )
				//return atAC3;
				ac3flags[i] = 1;
			else if ( gst_structure_has_name (structure, "audio/x-dts") || gst_structure_has_name (structure, "audio/dts") )
				//return atDTS;
				ac3flags[i] = 6;
			else if ( gst_structure_has_name (structure, "audio/x-raw-int") )
				//return atPCM;
				ac3flags[i] = 0;
			
			gst_caps_unref(caps);
		}
		
		// numpids
		*numpida=i;
	}
}
コード例 #13
0
static GstCaps *
gst_play_sink_convert_bin_getcaps (GstPad * pad, GstCaps * filter)
{
  GstPlaySinkConvertBin *self =
      GST_PLAY_SINK_CONVERT_BIN (gst_pad_get_parent (pad));
  GstCaps *ret;
  GstPad *otherpad, *peer;

  GST_PLAY_SINK_CONVERT_BIN_LOCK (self);
  if (pad == self->srcpad) {
    otherpad = self->sinkpad;
  } else if (pad == self->sinkpad) {
    otherpad = self->srcpad;
  } else {
    GST_ERROR_OBJECT (pad, "Not one of our pads");
    otherpad = NULL;
  }

  if (otherpad) {
    peer = gst_pad_get_peer (otherpad);
    if (peer) {
      GstCaps *peer_caps;
      GstCaps *downstream_filter = NULL;

      /* Add all the caps that we can convert to to the filter caps,
       * otherwise downstream might just return EMPTY caps because
       * it doesn't handle the filter caps but we could still convert
       * to these caps */
      if (filter) {
        guint i, n;

        downstream_filter = gst_caps_new_empty ();

        /* Intersect raw video caps in the filter caps with the converter
         * caps. This makes sure that we don't accept raw video that we
         * can't handle, e.g. because of caps features */
        n = gst_caps_get_size (filter);
        for (i = 0; i < n; i++) {
          GstStructure *s;
          GstCaps *tmp, *tmp2;

          s = gst_structure_copy (gst_caps_get_structure (filter, i));
          if (gst_structure_has_name (s,
                  self->audio ? "audio/x-raw" : "video/x-raw")) {
            tmp = gst_caps_new_full (s, NULL);
            tmp2 = gst_caps_intersect (tmp, self->converter_caps);
            gst_caps_append (downstream_filter, tmp2);
            gst_caps_unref (tmp);
          } else {
            gst_caps_append_structure (downstream_filter, s);
          }
        }
        downstream_filter =
            gst_caps_merge (downstream_filter,
            gst_caps_ref (self->converter_caps));
      }

      peer_caps = gst_pad_query_caps (peer, downstream_filter);
      if (downstream_filter)
        gst_caps_unref (downstream_filter);
      gst_object_unref (peer);
      if (self->converter_caps && is_raw_caps (peer_caps, self->audio)) {
        GstCaps *converter_caps = gst_caps_ref (self->converter_caps);
        GstCapsFeatures *cf;
        GstStructure *s;
        guint i, n;

        ret = gst_caps_make_writable (peer_caps);

        /* Filter out ANY capsfeatures from the converter caps. We can't
         * convert to ANY capsfeatures, they are only there so that we
         * can passthrough whatever downstream can support... but we
         * definitely don't want to return them here
         */
        n = gst_caps_get_size (converter_caps);
        for (i = 0; i < n; i++) {
          s = gst_caps_get_structure (converter_caps, i);
          cf = gst_caps_get_features (converter_caps, i);

          if (cf && gst_caps_features_is_any (cf))
            continue;
          ret =
              gst_caps_merge_structure_full (ret, gst_structure_copy (s),
              (cf ? gst_caps_features_copy (cf) : NULL));
        }
        gst_caps_unref (converter_caps);
      } else {
        ret = peer_caps;
      }
    } else {
      ret = gst_caps_ref (self->converter_caps);
    }
    GST_PLAY_SINK_CONVERT_BIN_FILTER_CAPS (filter, ret);

  } else {
    ret = filter ? gst_caps_ref (filter) : gst_caps_new_any ();
  }
  GST_PLAY_SINK_CONVERT_BIN_UNLOCK (self);

  gst_object_unref (self);

  GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret);

  return ret;
}
コード例 #14
0
static gboolean
gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
  GstStructure *structure;
  GstCaps *video_caps = NULL;
  gboolean res = FALSE;

  GstCaps *allowed_caps, *output_caps, *src_caps;

  /* check if the input is non native */
  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-yuv")) {
    if (!gst_structure_get_fourcc (structure, "format", &vpp->fourcc))
      goto done;
    vpp->native_input = FALSE;
    video_caps = gst_vdp_yuv_to_video_caps (caps);
    if (!video_caps)
      goto done;

    if (!vpp->vpool)
      vpp->vpool = gst_vdp_video_buffer_pool_new (vpp->device);

    gst_vdp_buffer_pool_set_caps (vpp->vpool, video_caps);

  } else {
    vpp->native_input = TRUE;
    video_caps = gst_caps_ref (caps);

    if (vpp->vpool) {
      g_object_unref (vpp->vpool);
      vpp->vpool = NULL;
    }
  }


  structure = gst_caps_get_structure (video_caps, 0);
  if (!gst_structure_get_int (structure, "width", &vpp->width) ||
      !gst_structure_get_int (structure, "height", &vpp->height) ||
      !gst_structure_get_int (structure, "chroma-type",
          (gint *) & vpp->chroma_type))
    goto done;


  /* get interlaced flag */
  gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced);

  /* extract par */
  if (gst_structure_has_field_typed (structure, "pixel-aspect-ratio",
          GST_TYPE_FRACTION)) {
    gst_structure_get_fraction (structure, "pixel-aspect-ratio", &vpp->par_n,
        &vpp->par_d);
    vpp->got_par = TRUE;
  } else
    vpp->got_par = FALSE;

  if (gst_vdp_vpp_is_interlaced (vpp)) {
    gint fps_n, fps_d;

    if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
      gst_fraction_double (&fps_n, &fps_d);
      gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n,
          fps_d, NULL);
      vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
    }

    gst_structure_remove_field (structure, "interlaced");
  }

  allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad);
  if (G_UNLIKELY (!allowed_caps))
    goto allowed_caps_error;
  if (G_UNLIKELY (gst_caps_is_empty (allowed_caps))) {
    gst_caps_unref (allowed_caps);
    goto allowed_caps_error;
  }
  GST_DEBUG ("allowed_caps: %" GST_PTR_FORMAT, allowed_caps);

  output_caps = gst_vdp_video_to_output_caps (video_caps);
  src_caps = gst_caps_intersect (output_caps, allowed_caps);
  gst_caps_unref (allowed_caps);
  gst_caps_unref (output_caps);

  if (gst_caps_is_empty (src_caps))
    goto not_negotiated;

  gst_pad_fixate_caps (vpp->srcpad, src_caps);

  GST_DEBUG ("src_caps: %" GST_PTR_FORMAT, src_caps);

  res = gst_pad_set_caps (vpp->srcpad, src_caps);
  gst_caps_unref (src_caps);

done:
  gst_object_unref (vpp);
  if (video_caps)
    gst_caps_unref (video_caps);

  return res;

allowed_caps_error:
  GST_ERROR_OBJECT (vpp, "Got invalid allowed caps");
  goto done;

not_negotiated:
  gst_caps_unref (src_caps);
  GST_ERROR_OBJECT (vpp, "Couldn't find suitable output format");
  goto done;
}
コード例 #15
0
static gboolean gst_a2dp_sink_init_dynamic_elements(GstA2dpSink *self,
						GstCaps *caps)
{
	GstStructure *structure;
	GstEvent *event;
	GstPad *capsfilterpad;
	gboolean crc;
	gchar *mode = NULL;

	structure = gst_caps_get_structure(caps, 0);

	/* before everything we need to remove fakesink */
	gst_a2dp_sink_remove_fakesink(self);

	/* first, we need to create our rtp payloader */
	if (gst_structure_has_name(structure, "audio/x-sbc")) {
		GST_LOG_OBJECT(self, "sbc media received");
		if (!gst_a2dp_sink_init_rtp_sbc_element(self))
			return FALSE;
	} else if (gst_structure_has_name(structure, "audio/mpeg")) {
		GST_LOG_OBJECT(self, "mp3 media received");
		if (!gst_a2dp_sink_init_rtp_mpeg_element(self))
			return FALSE;
	} else {
		GST_ERROR_OBJECT(self, "Unexpected media type");
		return FALSE;
	}

	if (!gst_a2dp_sink_init_avdtp_sink(self))
		return FALSE;

	/* check if we should push the taglist FIXME should we push this?
	 * we can send the tags directly if needed */
	if (self->taglist != NULL &&
			gst_structure_has_name(structure, "audio/mpeg")) {

		event = gst_event_new_tag(self->taglist);

		/* send directly the crc */
		if (gst_tag_list_get_boolean(self->taglist, "has-crc", &crc))
			gst_avdtp_sink_set_crc(self->sink, crc);

		if (gst_tag_list_get_string(self->taglist, "channel-mode",
				&mode))
			gst_avdtp_sink_set_channel_mode(self->sink, mode);

		capsfilterpad = gst_ghost_pad_get_target(self->ghostpad);
		gst_pad_send_event(capsfilterpad, event);
		self->taglist = NULL;
		g_free(mode);
	}

	if (!gst_avdtp_sink_set_device_caps(self->sink, caps))
		return FALSE;

	g_object_set(G_OBJECT(self->rtp), "mtu",
		gst_avdtp_sink_get_link_mtu(self->sink), NULL);

	/* we forward our new segment here if we have one */
	if (self->newseg_event) {
		gst_pad_send_event(GST_BASE_RTP_PAYLOAD_SINKPAD(self->rtp),
					self->newseg_event);
		self->newseg_event = NULL;
	}

	return TRUE;
}
コード例 #16
0
ファイル: bdremux.c プロジェクト: OpenDMM/bdremux
static void
demux_pad_added_cb (GstElement * element, GstPad * demuxpad, App * app)
{
    GstPad *parser_sinkpad = NULL, *parser_srcpad = NULL, *queue_sinkpad = NULL, *queue_srcpad = NULL, *mux_sinkpad = NULL;
    GstStructure *s;
    GstCaps *caps = gst_pad_get_caps (demuxpad);

    gchar *demuxpadname, sinkpadname[10], srcpadname[9];
    guint sourcepid;
    int i, ret;

    s = gst_caps_get_structure (caps, 0);
    demuxpadname = gst_pad_get_name (demuxpad);
    GST_DEBUG ("demux_pad_added_cb %s:%s", GST_DEBUG_PAD_NAME(demuxpad));

    if (g_ascii_strncasecmp (demuxpadname, "video", 5) == 0) {
        sscanf (demuxpadname + 6, "%x", &sourcepid);
        if (app->auto_pids) {
            app->a_source_pids[0] = sourcepid;
            if (app->a_sink_pids[0] == -1)
            {
                app->a_sink_pids[0] = sourcepid;
                app->no_sink_pids++;
            }
            app->no_source_pids++;
        }
        if (sourcepid == app->a_source_pids[0] && app->videoparser == NULL) {
            if (gst_structure_has_name (s, "video/mpeg")) {
                app->videoparser = gst_element_factory_make ("mpegvideoparse", "videoparse");
                if (!app->videoparser) {
                    bdremux_errout("mpegvideoparse not found! please install gst-plugin-mpegvideoparse!");
                }
            }
            else if (gst_structure_has_name (s, "video/x-h264")) {
                app->videoparser = gst_element_factory_make ("h264parse", "videoparse");
                if (!app->videoparser) {
                    bdremux_errout("h264parse not found! please install gst-plugin-videoparsersbad!");
                }
            }
            gst_bin_add (GST_BIN (app->pipeline), app->videoparser);
            gst_element_set_state (app->videoparser, GST_STATE_PLAYING);
            parser_sinkpad = gst_element_get_static_pad (app->videoparser, "sink");
            parser_srcpad = gst_element_get_static_pad (app->videoparser, "src");
            g_sprintf (sinkpadname, "sink%d", app->a_sink_pids[0]);
            g_sprintf (srcpadname, "src%d", app->a_sink_pids[0]);
            queue_sinkpad = gst_element_get_request_pad (app->queue, sinkpadname);
            queue_srcpad = gst_element_get_static_pad(app->queue, srcpadname);
            g_sprintf (sinkpadname, "sink_%d", app->a_sink_pids[0]);
            mux_sinkpad = gst_element_get_request_pad (app->m2tsmux, sinkpadname);
            app->requested_pid_count++;
            if (app->requested_pid_count <= app->no_source_pids)
            {
                ret = gst_pad_set_blocked_async (queue_srcpad, TRUE, (GstPadBlockCallback) pad_block_cb, app);
                GST_DEBUG ("BLOCKING %s returned %i", srcpadname, ret);
            }
            if (gst_pad_link (demuxpad, parser_sinkpad) == 0)
            {
                if (gst_pad_link (parser_srcpad, queue_sinkpad) == 0)
                {
                    if (gst_pad_link (queue_srcpad, mux_sinkpad) == 0) {
                        g_fprintf
                        (stdout, "linked: Source PID %d to %s\n",
                         app->a_source_pids[0], sinkpadname);
                        g_signal_connect (G_OBJECT (mux_sinkpad), "notify::caps", G_CALLBACK (mux_pad_has_caps_cb), app);
                        fflush(stdout);
                    } else {
                        bdremux_errout(g_strdup_printf("Couldn't link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(queue_srcpad), GST_DEBUG_PAD_NAME(mux_sinkpad)));
                    }
                } else {
                    bdremux_errout(g_strdup_printf("Couldn't link %s:%s to %s:%s @%p", GST_DEBUG_PAD_NAME(parser_srcpad), GST_DEBUG_PAD_NAME(queue_sinkpad), queue_sinkpad));
                }
            } else {
                bdremux_errout(g_strdup_printf("Couldn't link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(demuxpad), GST_DEBUG_PAD_NAME(parser_sinkpad)));
            }
        }
    } else if (g_ascii_strncasecmp (demuxpadname, "audio", 5) == 0) {
        sscanf (demuxpadname + 6, "%x", &sourcepid);
        if (app->auto_pids)
        {
            if (app->no_source_pids == 0)
                i = 1;
            else
                i = app->no_source_pids;
            app->a_source_pids[i] = sourcepid;
            if (app->a_sink_pids[i] == -1)
            {
                app->a_sink_pids[i] = sourcepid;
                app->no_sink_pids++;
            }
            app->no_source_pids++;
        }
        for (i = 1; i < app->no_source_pids; i++) {
            if (sourcepid == app->a_source_pids[i]) {
                if (gst_structure_has_name (s, "audio/mpeg")) {
                    app->audioparsers[i] = gst_element_factory_make ("mpegaudioparse", NULL);
                    if (!app->audioparsers[i]) {
                        bdremux_errout("mpegaudioparse not found! please install gst-plugin-mpegaudioparse!");
                    }
                }
                else if (gst_structure_has_name (s, "audio/x-ac3")) {
                    app->audioparsers[i] = gst_element_factory_make ("ac3parse", NULL);
                    if (!app->audioparsers[i]) {
                        bdremux_errout("mpegaudioparse not found! please install gst-plugin-audioparses!");
                    }
                }
                else if (gst_structure_has_name (s, "audio/x-dts")) {
                    app->audioparsers[i] = gst_element_factory_make ("dcaparse", NULL);
                    if (!app->audioparsers[i]) {
                        bdremux_errout("dcaparse not found! please install gst-plugin-audioparses!");
                    }
                }
                else {
                    bdremux_errout(g_strdup_printf("could not find parser for audio stream with pid 0x%04x!", sourcepid));
                }
                gst_bin_add (GST_BIN (app->pipeline), app->audioparsers[i]);
                gst_element_set_state (app->audioparsers[i], GST_STATE_PLAYING);
                parser_sinkpad = gst_element_get_static_pad (app->audioparsers[i], "sink");
                parser_srcpad = gst_element_get_static_pad (app->audioparsers[i], "src");
                g_sprintf (sinkpadname, "sink%d", app->a_sink_pids[i]);
                g_sprintf (srcpadname, "src%d", app->a_sink_pids[i]);
                queue_sinkpad = gst_element_get_request_pad (app->queue, sinkpadname);
                queue_srcpad = gst_element_get_static_pad(app->queue, srcpadname);
                g_sprintf (sinkpadname, "sink_%d", app->a_sink_pids[i]);
                mux_sinkpad = gst_element_get_request_pad (app->m2tsmux, sinkpadname);
                app->requested_pid_count++;
                if (app->requested_pid_count <= app->no_source_pids)
                {
                    ret = gst_pad_set_blocked_async (queue_srcpad, TRUE, (GstPadBlockCallback) pad_block_cb, app);
                    GST_DEBUG ("BLOCKING %s returned %i", srcpadname, ret);
                }
                if (gst_pad_link (demuxpad, parser_sinkpad) == 0
                        && gst_pad_link (parser_srcpad, queue_sinkpad) == 0
                        && gst_pad_link (queue_srcpad, mux_sinkpad) == 0) {
                    g_print
                    ("linked: Source PID %d to %s\n",
                     app->a_source_pids[i], sinkpadname);
                    g_signal_connect (G_OBJECT (mux_sinkpad), "notify::caps", G_CALLBACK (mux_pad_has_caps_cb), app);
                } else
                    bdremux_errout (g_strdup_printf("Couldn't link audio PID 0x%04x to sink PID 0x%04x",
                                                    app->a_source_pids[i], app->a_sink_pids[i]));
                break;
            }
        }
    } else
        GST_INFO ("Ignoring pad %s!", demuxpadname);

    if (parser_sinkpad)
        gst_object_unref (parser_sinkpad);
    if (parser_srcpad)
        gst_object_unref (parser_srcpad);
    if (queue_sinkpad)
        gst_object_unref (queue_sinkpad);
    if (queue_srcpad)
        gst_object_unref (queue_srcpad);
    if (mux_sinkpad)
        gst_object_unref (mux_sinkpad);
    if (caps)
        gst_caps_unref (caps);

//   g_print("app->requested_pid_count = %i, app->no_source_pids = %i\n", app->requested_pid_count, app->no_source_pids);
    if (!app->auto_pids && app->requested_pid_count == app->no_source_pids)
    {
        GST_INFO("All %i source PIDs have been linked to the mux -> UNBLOCKING all pads and start muxing", app->requested_pid_count);
        for (i = 0; i < app->no_sink_pids; i++)
        {
            g_sprintf (srcpadname, "src%d", app->a_sink_pids[i]);
            queue_srcpad = gst_element_get_static_pad(app->queue, srcpadname);
            ret = gst_pad_set_blocked_async (queue_srcpad, FALSE, (GstPadBlockCallback) pad_block_cb, app);
            GST_DEBUG ("UNBLOCKING %s returned %i", srcpadname, ret);
        }
    }

    g_free (demuxpadname);
    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(app->pipeline),GST_DEBUG_GRAPH_SHOW_ALL,"bdremux_pipelinegraph_pad_added");
}
コード例 #17
0
ファイル: gstkateenc.c プロジェクト: 0p1pp1/gst-plugins-bad
static gboolean
gst_kate_enc_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstKateEnc *ke = GST_KATE_ENC (parent);
  const GstStructure *structure;
  gboolean ret;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);
      ret = gst_kate_enc_setcaps (ke, caps);
      gst_event_unref (event);
      break;
    }
    case GST_EVENT_SEGMENT:{
      GstSegment seg;

      GST_LOG_OBJECT (ke, "Got newsegment event");

      gst_event_copy_segment (event, &seg);

      if (!ke->headers_sent) {
        if (ke->pending_segment)
          gst_event_unref (ke->pending_segment);
        ke->pending_segment = event;
        event = NULL;
      }

      if (ke->initialized) {
        GST_LOG_OBJECT (ke, "ensuring all headers are in");
        if (gst_kate_enc_flush_headers (ke) != GST_FLOW_OK) {
          GST_WARNING_OBJECT (ke, "Failed to flush headers");
        } else {
          if (seg.format != GST_FORMAT_TIME
              || !GST_CLOCK_TIME_IS_VALID (seg.start)) {
            GST_WARNING_OBJECT (ke,
                "No time in newsegment event %p, format %d, timestamp %"
                G_GINT64_FORMAT, event, (int) seg.format, seg.start);
            /* to be safe, we'd need to generate a keepalive anyway, but we'd have to guess at the timestamp to use; a
               good guess would be the last known timestamp plus the keepalive time, but if we then get a packet with a
               timestamp less than this, it would fail to encode, which would be Bad. If we don't encode a keepalive, we
               run the risk of stalling the pipeline and hanging, which is Very Bad. Oh dear. We can't exit(-1), can we ? */
          } else {
            float t = seg.start / (double) GST_SECOND;

            if (ke->delayed_spu
                && t - ke->delayed_start / (double) GST_SECOND >=
                ke->default_spu_duration) {
              if (G_UNLIKELY (gst_kate_enc_flush_waiting (ke,
                          seg.start) != GST_FLOW_OK)) {
                GST_WARNING_OBJECT (ke, "Failed to encode delayed packet");
                /* continue with new segment handling anyway */
              }
            }

            GST_LOG_OBJECT (ke, "ts %f, last %f (min %f)", t,
                ke->last_timestamp / (double) GST_SECOND,
                ke->keepalive_min_time);
            if (ke->keepalive_min_time > 0.0f
                && t - ke->last_timestamp / (double) GST_SECOND >=
                ke->keepalive_min_time) {
              /* we only generate a keepalive if there is no SPU waiting, as it would
                 mean out of sequence start times - and granulepos */
              if (!ke->delayed_spu) {
                gst_kate_enc_generate_keepalive (ke, seg.start);
              }
            }
          }
        }
      }
      if (event)
        ret = gst_pad_push_event (ke->srcpad, event);
      else
        ret = TRUE;
      break;
    }
    case GST_EVENT_CUSTOM_DOWNSTREAM:
      GST_LOG_OBJECT (ke, "Got custom downstream event");
      /* adapted from the dvdsubdec element */
      structure = gst_event_get_structure (event);
      if (structure != NULL
          && gst_structure_has_name (structure, "application/x-gst-dvd")) {
        if (ke->initialized) {
          GST_LOG_OBJECT (ke, "ensuring all headers are in");
          if (gst_kate_enc_flush_headers (ke) != GST_FLOW_OK) {
            GST_WARNING_OBJECT (ke, "Failed to flush headers");
          } else {
            const gchar *event_name =
                gst_structure_get_string (structure, "event");
            if (event_name) {
              if (!strcmp (event_name, "dvd-spu-clut-change")) {
                gchar name[16];
                int idx;
                gboolean found;
                gint value;
                GST_INFO_OBJECT (ke, "New CLUT received");
                for (idx = 0; idx < 16; ++idx) {
                  g_snprintf (name, sizeof (name), "clut%02d", idx);
                  found = gst_structure_get_int (structure, name, &value);
                  if (found) {
                    ke->spu_clut[idx] = value;
                  } else {
                    GST_WARNING_OBJECT (ke,
                        "DVD CLUT event did not contain %s field", name);
                  }
                }
              } else if (!strcmp (event_name, "dvd-lang-codes")) {
                /* we can't know which stream corresponds to us */
              }
            } else {
              GST_WARNING_OBJECT (ke, "custom downstream event with no name");
            }
          }
        }
      }
      ret = gst_pad_push_event (ke->srcpad, event);
      break;

    case GST_EVENT_TAG:
      GST_LOG_OBJECT (ke, "Got tag event");
      if (ke->tags) {
        GstTagList *list;

        gst_event_parse_tag (event, &list);
        gst_tag_list_insert (ke->tags, list,
            gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (ke)));
      } else {
        g_assert_not_reached ();
      }
      ret = gst_pad_event_default (pad, parent, event);
      break;

    case GST_EVENT_EOS:
      GST_INFO_OBJECT (ke, "Got EOS event");
      if (ke->initialized) {
        GST_LOG_OBJECT (ke, "ensuring all headers are in");
        if (gst_kate_enc_flush_headers (ke) != GST_FLOW_OK) {
          GST_WARNING_OBJECT (ke, "Failed to flush headers");
        } else {
          kate_packet kp;
          int ret;
          GstClockTime delayed_end =
              ke->delayed_start + ke->default_spu_duration * GST_SECOND;

          if (G_UNLIKELY (gst_kate_enc_flush_waiting (ke,
                      delayed_end) != GST_FLOW_OK)) {
            GST_WARNING_OBJECT (ke, "Failed to encode delayed packet");
            /* continue with EOS handling anyway */
          }

          ret = kate_encode_finish (&ke->k, -1, &kp);
          if (ret < 0) {
            GST_WARNING_OBJECT (ke, "Failed to encode EOS packet: %s",
                gst_kate_util_get_error_message (ret));
          } else {
            kate_int64_t granpos = kate_encode_get_granule (&ke->k);
            GST_LOG_OBJECT (ke, "EOS packet encoded");
            if (gst_kate_enc_push_and_free_kate_packet (ke, &kp, granpos,
                    ke->latest_end_time, 0, FALSE)) {
              GST_WARNING_OBJECT (ke, "Failed to push EOS packet");
            }
          }
        }
      }
      ret = gst_pad_event_default (pad, parent, event);
      break;

    default:
      GST_LOG_OBJECT (ke, "Got unhandled event");
      ret = gst_pad_event_default (pad, parent, event);
      break;
  }

  return ret;
}
コード例 #18
0
ファイル: simple-call.c プロジェクト: ChinnaSuhas/ossbuild
static gboolean
async_bus_cb (GstBus *bus, GstMessage *message, gpointer user_data)
{
  switch (GST_MESSAGE_TYPE(message))
  {
    case GST_MESSAGE_ERROR:
      {
        GError *error = NULL;
        gchar *debug_str = NULL;

        gst_message_parse_error (message, &error, &debug_str);
        g_error ("Got gst message: %s %s", error->message, debug_str);
      }
      break;
    case GST_MESSAGE_WARNING:
      {
        GError *error = NULL;
        gchar *debug_str = NULL;

        gst_message_parse_warning (message, &error, &debug_str);
        g_warning ("Got gst message: %s %s", error->message, debug_str);
      }
      break;
    case GST_MESSAGE_ELEMENT:
      {
        const GstStructure *s = gst_message_get_structure (message);

        if (gst_structure_has_name (s, "farsight-error"))
        {
          gint error;
          const gchar *error_msg = gst_structure_get_string (s, "error-msg");
          const gchar *debug_msg = gst_structure_get_string (s, "debug-msg");

          g_assert (gst_structure_get_enum (s, "error-no", FS_TYPE_ERROR,
                  &error));

          if (FS_ERROR_IS_FATAL (error))
            g_error ("Farsight fatal error: %d %s %s", error, error_msg,
                debug_msg);
          else
            g_warning ("Farsight non-fatal error: %d %s %s", error, error_msg,
                debug_msg);
        }
        else if (gst_structure_has_name (s, "farsight-new-local-candidate"))
        {
          const GValue *val = gst_structure_get_value (s, "candidate");
          FsCandidate *cand = NULL;

          g_assert (val);
          cand = g_value_get_boxed (val);

          g_print ("New candidate: %s %d\n", cand->ip, cand->port);
        }
        else if (gst_structure_has_name (s,
                "farsight-local-candidates-prepared"))
        {
          g_print ("Local candidates prepared\n");
        }
        else if (gst_structure_has_name (s, "farsight-recv-codecs-changed"))
        {
          const GValue *val = gst_structure_get_value (s, "codecs");
          GList *codecs = NULL;

          g_assert (val);
          codecs = g_value_get_boxed (val);

          g_print ("Recv codecs changed:\n");
          for (; codecs; codecs = g_list_next (codecs))
          {
            FsCodec *codec = codecs->data;
            gchar *tmp = fs_codec_to_string (codec);
            g_print ("%s\n", tmp);
            g_free (tmp);
          }
        }
        else if (gst_structure_has_name (s, "farsight-send-codec-changed"))
        {
          const GValue *val = gst_structure_get_value (s, "codec");
          FsCodec *codec = NULL;
          gchar *tmp;
          g_assert (val);
          codec = g_value_get_boxed (val);
          tmp = fs_codec_to_string (codec);

          g_print ("Send codec changed: %s\n", tmp);
          g_free (tmp);
        }
      }
      break;
    default:
      break;
  }

  return TRUE;
}
コード例 #19
0
ファイル: rb-player-gst.c プロジェクト: wangd/rhythmbox
static gboolean
bus_cb (GstBus *bus, GstMessage *message, RBPlayerGst *mp)
{
	const GstStructure *structure;
	g_return_val_if_fail (mp != NULL, FALSE);

	switch (GST_MESSAGE_TYPE (message)) {
	case GST_MESSAGE_ERROR: {
		char *debug;
		GError *error = NULL;
		GError *sig_error = NULL;
		int code;
		gboolean emit = TRUE;

		gst_message_parse_error (message, &error, &debug);

		/* If we've already got an error, ignore 'internal data flow error'
		 * type messages, as they're too generic to be helpful.
		 */
		if (mp->priv->emitted_error &&
		    error->domain == GST_STREAM_ERROR &&
		    error->code == GST_STREAM_ERROR_FAILED) {
			rb_debug ("Ignoring generic error \"%s\"", error->message);
			emit = FALSE;
		}

		code = rb_gst_error_get_error_code (error);

		if (emit) {
			if (message_from_sink (mp->priv->audio_sink, message)) {
				rb_debug ("got error from sink: %s (%s)", error->message, debug);
				/* Translators: the parameter here is an error message */
				g_set_error (&sig_error,
					     RB_PLAYER_ERROR,
					     code,
					     _("Failed to open output device: %s"),
					     error->message);
			} else {
				rb_debug ("got error from stream: %s (%s)", error->message, debug);
				g_set_error (&sig_error,
					     RB_PLAYER_ERROR,
					     code,
					     "%s",
					     error->message);
			}
			state_change_finished (mp, sig_error);
			mp->priv->emitted_error = TRUE;
			_rb_player_emit_error (RB_PLAYER (mp), mp->priv->stream_data, sig_error);
		}

		/* close if not already closing */
		if (mp->priv->uri != NULL)
			rb_player_close (RB_PLAYER (mp), NULL, NULL);

		g_error_free (error);
		g_free (debug);
		break;
	}

	case GST_MESSAGE_EOS:
		_rb_player_emit_eos (RB_PLAYER (mp), mp->priv->stream_data, FALSE);
		break;

	case GST_MESSAGE_STATE_CHANGED:
		{
			GstState oldstate;
			GstState newstate;
			GstState pending;
			gst_message_parse_state_changed (message, &oldstate, &newstate, &pending);
			if (GST_MESSAGE_SRC (message) == GST_OBJECT (mp->priv->playbin)) {
				rb_debug ("playbin reached state %s", gst_element_state_get_name (newstate));
				if (pending == GST_STATE_VOID_PENDING) {
					state_change_finished (mp, NULL);
				}
			}
			break;
		}

	case GST_MESSAGE_TAG: {
		GstTagList *tags;
		gst_message_parse_tag (message, &tags);

		if (mp->priv->stream_change_pending || mp->priv->playbin_stream_changing) {
			mp->priv->stream_tags = g_list_append (mp->priv->stream_tags, tags);
		} else {
			gst_tag_list_foreach (tags, (GstTagForeachFunc) process_tag, mp);
			gst_tag_list_free (tags);
		}
		break;
	}


	case GST_MESSAGE_BUFFERING: {
		gint progress;

		structure = gst_message_get_structure (message);
		if (!gst_structure_get_int (structure, "buffer-percent", &progress)) {
			g_warning ("Could not get value from BUFFERING message");
			break;
		}
		if (progress >= 100) {
			mp->priv->buffering = FALSE;
			if (mp->priv->playing) {
				rb_debug ("buffering done, setting pipeline back to PLAYING");
				gst_element_set_state (mp->priv->playbin, GST_STATE_PLAYING);
			} else {
				rb_debug ("buffering done, leaving pipeline PAUSED");
			}
		} else if (mp->priv->buffering == FALSE && mp->priv->playing) {
			GstState cur_state;

			gst_element_get_state (mp->priv->playbin, &cur_state, NULL, 0);
			if (cur_state == GST_STATE_PLAYING) {
				rb_debug ("buffering - temporarily pausing playback");
				gst_element_set_state (mp->priv->playbin, GST_STATE_PAUSED);
			} else {
				rb_debug ("buffering - during preroll; doing nothing");
			}
			mp->priv->buffering = TRUE;
		}

		_rb_player_emit_buffering (RB_PLAYER (mp), mp->priv->stream_data, progress);
		break;
	}

	case GST_MESSAGE_APPLICATION:
		structure = gst_message_get_structure (message);
		_rb_player_emit_event (RB_PLAYER (mp), mp->priv->stream_data, gst_structure_get_name (structure), NULL);
		break;

	case GST_MESSAGE_ELEMENT:
		structure = gst_message_get_structure (message);
		if (gst_is_missing_plugin_message (message)) {
			handle_missing_plugin_message (mp, message);
		} else if (mp->priv->playbin_stream_changing &&
			   gst_structure_has_name (structure, "playbin2-stream-changed")) {
			rb_debug ("got playbin2-stream-changed message");
			mp->priv->playbin_stream_changing = FALSE;
			emit_playing_stream_and_tags (mp, TRUE);
		} else if (gst_structure_has_name (structure, "redirect")) {
			const char *uri = gst_structure_get_string (structure, "new-location");
			_rb_player_emit_redirect (RB_PLAYER (mp), mp->priv->stream_data, uri);
		}
		break;

	default:
		break;
	}

	/* emit message signals too, so plugins can process messages */
	gst_bus_async_signal_func (bus, message, NULL);

	return TRUE;
}
コード例 #20
0
ファイル: gsttheoraenc.c プロジェクト: ChinnaSuhas/ossbuild
static gboolean
theora_enc_sink_event (GstPad * pad, GstEvent * event)
{
  GstTheoraEnc *enc;
  ogg_packet op;
  gboolean res;

  enc = GST_THEORA_ENC (GST_PAD_PARENT (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      gdouble rate, applied_rate;
      GstFormat format;
      gint64 start, stop, time;

      gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
          &format, &start, &stop, &time);

      gst_segment_set_newsegment_full (&enc->segment, update, rate,
          applied_rate, format, start, stop, time);

      res = gst_pad_push_event (enc->srcpad, event);
      break;
    }
    case GST_EVENT_EOS:
      if (enc->initialised) {
        /* push last packet with eos flag, should not be called */
        while (th_encode_packetout (enc->encoder, 1, &op)) {
          GstClockTime next_time =
              th_granule_time (enc->encoder, op.granulepos) * GST_SECOND;

          theora_push_packet (enc, &op, GST_CLOCK_TIME_NONE, enc->next_ts,
              next_time - enc->next_ts);
          enc->next_ts = next_time;
        }
      }
      if (enc->initialised && enc->multipass_cache_fd
          && enc->multipass_mode == MULTIPASS_MODE_FIRST_PASS)
        theora_enc_write_multipass_cache (enc, TRUE, TRUE);

      theora_enc_clear_multipass_cache (enc);

      res = gst_pad_push_event (enc->srcpad, event);
      break;
    case GST_EVENT_FLUSH_STOP:
      gst_segment_init (&enc->segment, GST_FORMAT_UNDEFINED);
      res = gst_pad_push_event (enc->srcpad, event);
      break;
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    {
      const GstStructure *s;

      s = gst_event_get_structure (event);

      if (gst_structure_has_name (s, "GstForceKeyUnit"))
        theora_enc_force_keyframe (enc);
      res = gst_pad_push_event (enc->srcpad, event);
      break;
    }
    default:
      res = gst_pad_push_event (enc->srcpad, event);
      break;
  }
  return res;
}
コード例 #21
0
static GstBusSyncReply
sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data)
{
  const GstStructure *st;
  const GValue *image;
  GstBuffer *buf = NULL;
  guint8 *data_buf = NULL;
  gchar *caps_string;
  guint size = 0;
  gchar *preview_filename = NULL;
  FILE *f = NULL;
  size_t written;

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ELEMENT:{
      st = gst_message_get_structure (message);
      if (st) {
        if (gst_structure_has_name (message->structure, "prepare-xwindow-id")) {
          if (!no_xwindow && window) {
            gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC
                    (message)), window);
            gst_message_unref (message);
            message = NULL;
            return GST_BUS_DROP;
          }
        } else if (gst_structure_has_name (st, "preview-image")) {
          CaptureTiming *timing;

          GST_DEBUG ("preview-image");

          timing = (CaptureTiming *) g_list_first (capture_times)->data;
          timing->got_preview = gst_util_get_timestamp ();

          {
            /* set up probe to check when the viewfinder gets data */
            GstPad *pad = gst_element_get_static_pad (viewfinder_sink, "sink");

            viewfinder_probe_id = gst_pad_add_buffer_probe (pad,
                (GCallback) viewfinder_get_timestamp_probe, NULL);

            gst_object_unref (pad);
          }

          /* extract preview-image from msg */
          image = gst_structure_get_value (st, "buffer");
          if (image) {
            buf = gst_value_get_buffer (image);
            data_buf = GST_BUFFER_DATA (buf);
            size = GST_BUFFER_SIZE (buf);
            preview_filename = g_strdup_printf ("test_vga.rgb");
            caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf));
            g_free (caps_string);
            f = g_fopen (preview_filename, "w");
            if (f) {
              written = fwrite (data_buf, size, 1, f);
              if (!written) {
                g_print ("error writing file\n");
              }
              fclose (f);
            } else {
              g_print ("error opening file for raw image writing\n");
            }
            g_free (preview_filename);
          }
        }
      }
      break;
    }
    case GST_MESSAGE_STATE_CHANGED:
      if (GST_MESSAGE_SRC (message) == (GstObject *) camerabin) {
        GstState newstate;

        gst_message_parse_state_changed (message, NULL, &newstate, NULL);
        if (newstate == GST_STATE_PLAYING) {
          startup_time = gst_util_get_timestamp ();
        }
      }
      break;
    default:
      /* unhandled message */
      break;
  }
  return GST_BUS_PASS;
}
コード例 #22
0
static gboolean gst_avdtp_sink_configure(GstAvdtpSink *self,
			GstCaps *caps)
{
	gchar buf[BT_SUGGESTED_BUFFER_SIZE];
	struct bt_open_req *open_req = (void *) buf;
	struct bt_open_rsp *open_rsp = (void *) buf;
	struct bt_set_configuration_req *req = (void *) buf;
	struct bt_set_configuration_rsp *rsp = (void *) buf;
	gboolean ret;
	GIOError io_error;
	gchar *temp;
	GstStructure *structure;
	codec_capabilities_t *codec = NULL;

	temp = gst_caps_to_string(caps);
	GST_DEBUG_OBJECT(self, "configuring device with caps: %s", temp);
	g_free(temp);

	structure = gst_caps_get_structure(caps, 0);

	if (gst_structure_has_name(structure, "audio/x-sbc"))
		codec = (void *) gst_avdtp_find_caps(self, BT_A2DP_SBC_SINK);
	else if (gst_structure_has_name(structure, "audio/mpeg"))
		codec = (void *) gst_avdtp_find_caps(self, BT_A2DP_MPEG12_SINK);

	if (codec == NULL) {
		GST_ERROR_OBJECT(self, "Couldn't parse caps "
				"to packet configuration");
		return FALSE;
	}

	memset(req, 0, BT_SUGGESTED_BUFFER_SIZE);
	open_req->h.type = BT_REQUEST;
	open_req->h.name = BT_OPEN;
	open_req->h.length = sizeof(*open_req);

	strncpy(open_req->destination, self->device, 18);
	open_req->seid = codec->seid;
	open_req->lock = BT_WRITE_LOCK;

	io_error = gst_avdtp_sink_audioservice_send(self, &open_req->h);
	if (io_error != G_IO_ERROR_NONE) {
		GST_ERROR_OBJECT(self, "Error ocurred while sending "
					"open packet");
		return FALSE;
	}

	open_rsp->h.length = sizeof(*open_rsp);
	io_error = gst_avdtp_sink_audioservice_expect(self,
			&open_rsp->h, BT_OPEN);
	if (io_error != G_IO_ERROR_NONE) {
		GST_ERROR_OBJECT(self, "Error while receiving device "
					"confirmation");
		return FALSE;
	}

	memset(req, 0, sizeof(buf));
	req->h.type = BT_REQUEST;
	req->h.name = BT_SET_CONFIGURATION;
	req->h.length = sizeof(*req);
	memcpy(&req->codec, codec, sizeof(req->codec));

	if (codec->type == BT_A2DP_SBC_SINK)
		ret = gst_avdtp_sink_init_sbc_pkt_conf(self, caps,
				(void *) &req->codec);
	else
		ret = gst_avdtp_sink_init_mp3_pkt_conf(self, caps,
				(void *) &req->codec);

	if (!ret) {
		GST_ERROR_OBJECT(self, "Couldn't parse caps "
				"to packet configuration");
		return FALSE;
	}

	req->h.length += req->codec.length - sizeof(req->codec);
	io_error = gst_avdtp_sink_audioservice_send(self, &req->h);
	if (io_error != G_IO_ERROR_NONE) {
		GST_ERROR_OBJECT(self, "Error ocurred while sending "
					"configurarion packet");
		return FALSE;
	}

	rsp->h.length = sizeof(*rsp);
	io_error = gst_avdtp_sink_audioservice_expect(self,
			&rsp->h, BT_SET_CONFIGURATION);
	if (io_error != G_IO_ERROR_NONE) {
		GST_ERROR_OBJECT(self, "Error while receiving device "
					"confirmation");
		return FALSE;
	}

	self->data->link_mtu = rsp->link_mtu;

	return TRUE;
}
コード例 #23
0
static gboolean
gst_amc_audio_dec_set_format (GstAudioDecoder * decoder, GstCaps * caps)
{
  GstAmcAudioDec *self;
  GstStructure *s;
  GstAmcFormat *format;
  const gchar *mime;
  gboolean is_format_change = FALSE;
  gboolean needs_disable = FALSE;
  gchar *format_string;
  gint rate, channels;
  GError *err = NULL;

  self = GST_AMC_AUDIO_DEC (decoder);

  GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, caps);

  /* Check if the caps change is a real format change or if only irrelevant
   * parts of the caps have changed or nothing at all.
   */
  is_format_change |= (!self->input_caps
      || !gst_caps_is_equal (self->input_caps, caps));

  needs_disable = self->started;

  /* If the component is not started and a real format change happens
   * we have to restart the component. If no real format change
   * happened we can just exit here.
   */
  if (needs_disable && !is_format_change) {
    /* Framerate or something minor changed */
    self->input_caps_changed = TRUE;
    GST_DEBUG_OBJECT (self,
        "Already running and caps did not change the format");
    return TRUE;
  }

  if (needs_disable && is_format_change) {
    gst_amc_audio_dec_drain (self);
    GST_AUDIO_DECODER_STREAM_UNLOCK (self);
    gst_amc_audio_dec_stop (GST_AUDIO_DECODER (self));
    GST_AUDIO_DECODER_STREAM_LOCK (self);
    gst_amc_audio_dec_close (GST_AUDIO_DECODER (self));
    if (!gst_amc_audio_dec_open (GST_AUDIO_DECODER (self))) {
      GST_ERROR_OBJECT (self, "Failed to open codec again");
      return FALSE;
    }

    if (!gst_amc_audio_dec_start (GST_AUDIO_DECODER (self))) {
      GST_ERROR_OBJECT (self, "Failed to start codec again");
    }
  }
  /* srcpad task is not running at this point */

  mime = caps_to_mime (caps);
  if (!mime) {
    GST_ERROR_OBJECT (self, "Failed to convert caps to mime");
    return FALSE;
  }

  s = gst_caps_get_structure (caps, 0);
  if (!gst_structure_get_int (s, "rate", &rate) ||
      !gst_structure_get_int (s, "channels", &channels)) {
    GST_ERROR_OBJECT (self, "Failed to get rate/channels");
    return FALSE;
  }

  format = gst_amc_format_new_audio (mime, rate, channels, &err);
  if (!format) {
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    return FALSE;
  }

  /* FIXME: These buffers needs to be valid until the codec is stopped again */
  g_list_foreach (self->codec_datas, (GFunc) gst_buffer_unref, NULL);
  g_list_free (self->codec_datas);
  self->codec_datas = NULL;
  if (gst_structure_has_field (s, "codec_data")) {
    const GValue *h = gst_structure_get_value (s, "codec_data");
    GstBuffer *codec_data = gst_value_get_buffer (h);
    GstMapInfo minfo;
    guint8 *data;

    gst_buffer_map (codec_data, &minfo, GST_MAP_READ);
    data = g_memdup (minfo.data, minfo.size);
    self->codec_datas = g_list_prepend (self->codec_datas, data);
    gst_amc_format_set_buffer (format, "csd-0", data, minfo.size, &err);
    if (err)
      GST_ELEMENT_WARNING_FROM_ERROR (self, err);
    gst_buffer_unmap (codec_data, &minfo);
  } else if (gst_structure_has_field (s, "streamheader")) {
    const GValue *sh = gst_structure_get_value (s, "streamheader");
    gint nsheaders = gst_value_array_get_size (sh);
    GstBuffer *buf;
    const GValue *h;
    gint i, j;
    gchar *fname;
    GstMapInfo minfo;
    guint8 *data;

    for (i = 0, j = 0; i < nsheaders; i++) {
      h = gst_value_array_get_value (sh, i);
      buf = gst_value_get_buffer (h);

      if (strcmp (mime, "audio/vorbis") == 0) {
        guint8 header_type;

        gst_buffer_extract (buf, 0, &header_type, 1);

        /* Only use the identification and setup packets */
        if (header_type != 0x01 && header_type != 0x05)
          continue;
      }

      fname = g_strdup_printf ("csd-%d", j);
      gst_buffer_map (buf, &minfo, GST_MAP_READ);
      data = g_memdup (minfo.data, minfo.size);
      self->codec_datas = g_list_prepend (self->codec_datas, data);
      gst_amc_format_set_buffer (format, fname, data, minfo.size, &err);
      if (err)
        GST_ELEMENT_WARNING_FROM_ERROR (self, err);
      gst_buffer_unmap (buf, &minfo);
      g_free (fname);
      j++;
    }
  }

  format_string = gst_amc_format_to_string (format, &err);
  if (err)
    GST_ELEMENT_WARNING_FROM_ERROR (self, err);
  GST_DEBUG_OBJECT (self, "Configuring codec with format: %s",
      GST_STR_NULL (format_string));
  g_free (format_string);

  if (!gst_amc_codec_configure (self->codec, format, NULL, 0, &err)) {
    GST_ERROR_OBJECT (self, "Failed to configure codec");
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    return FALSE;
  }

  gst_amc_format_free (format);

  if (!gst_amc_codec_start (self->codec, &err)) {
    GST_ERROR_OBJECT (self, "Failed to start codec");
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    return FALSE;
  }

  self->spf = -1;
  /* TODO: Implement for other codecs too */
  if (gst_structure_has_name (s, "audio/mpeg")) {
    gint mpegversion = -1;

    gst_structure_get_int (s, "mpegversion", &mpegversion);
    if (mpegversion == 1) {
      gint layer = -1, mpegaudioversion = -1;

      gst_structure_get_int (s, "layer", &layer);
      gst_structure_get_int (s, "mpegaudioversion", &mpegaudioversion);
      if (layer == 1)
        self->spf = 384;
      else if (layer == 2)
        self->spf = 1152;
      else if (layer == 3 && mpegaudioversion != -1)
        self->spf = (mpegaudioversion == 1 ? 1152 : 576);
    }
  }

  self->started = TRUE;
  self->input_caps_changed = TRUE;

  /* Start the srcpad loop again */
  self->flushing = FALSE;
  self->downstream_flow_ret = GST_FLOW_OK;
  gst_pad_start_task (GST_AUDIO_DECODER_SRC_PAD (self),
      (GstTaskFunction) gst_amc_audio_dec_loop, decoder, NULL);

  return TRUE;
}
コード例 #24
0
static gboolean
gst_dvd_spu_subpic_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstDVDSpu *dvdspu = (GstDVDSpu *) parent;
  gboolean res = TRUE;

  /* Some events on the subpicture sink pad just get ignored, like 
   * FLUSH_START */
  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);
      res = gst_dvd_spu_subpic_set_caps (pad, caps);
      gst_event_unref (event);
      break;
    }
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    case GST_EVENT_CUSTOM_DOWNSTREAM_STICKY:
    case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:
    {
      const GstStructure *structure = gst_event_get_structure (event);
      gboolean need_push;

      if (!gst_structure_has_name (structure, "application/x-gst-dvd")) {
        res = gst_pad_event_default (pad, parent, event);
        break;
      }

      DVD_SPU_LOCK (dvdspu);
      if (GST_EVENT_IS_SERIALIZED (event)) {
        SpuPacket *spu_packet = g_new0 (SpuPacket, 1);
        GST_DEBUG_OBJECT (dvdspu,
            "Enqueueing DVD event on subpicture pad for later");
        spu_packet->event = event;
        g_queue_push_tail (dvdspu->pending_spus, spu_packet);
      } else {
        gst_dvd_spu_handle_dvd_event (dvdspu, event);
      }

      /* If the handle_dvd_event generated a pending frame, we
       * need to synchronise with the video pad's stream lock and push it.
       * This requires some dancing to preserve locking order and handle
       * flushes correctly */
      need_push = (dvdspu->pending_frame != NULL);
      DVD_SPU_UNLOCK (dvdspu);
      if (need_push) {
        GstBuffer *to_push = NULL;
        gboolean flushing;

        GST_LOG_OBJECT (dvdspu, "Going for stream lock");
        GST_PAD_STREAM_LOCK (dvdspu->videosinkpad);
        GST_LOG_OBJECT (dvdspu, "Got stream lock");
        GST_OBJECT_LOCK (dvdspu->videosinkpad);
        flushing = GST_PAD_IS_FLUSHING (dvdspu->videosinkpad);
        GST_OBJECT_UNLOCK (dvdspu->videosinkpad);

        DVD_SPU_LOCK (dvdspu);
        if (dvdspu->pending_frame == NULL || flushing) {
          /* Got flushed while waiting for the stream lock */
          DVD_SPU_UNLOCK (dvdspu);
        } else {
          to_push = dvdspu->pending_frame;
          dvdspu->pending_frame = NULL;

          DVD_SPU_UNLOCK (dvdspu);
          gst_pad_push (dvdspu->srcpad, to_push);
        }
        GST_LOG_OBJECT (dvdspu, "Dropping stream lock");
        GST_PAD_STREAM_UNLOCK (dvdspu->videosinkpad);
      }

      break;
    }
    case GST_EVENT_SEGMENT:
    {
      GstSegment seg;

      gst_event_copy_segment (event, &seg);

      /* Only print updates if they have an end time (don't print start_time
       * updates */
      GST_DEBUG_OBJECT (dvdspu, "subpic pad Segment: %" GST_SEGMENT_FORMAT,
          &seg);

      DVD_SPU_LOCK (dvdspu);

      dvdspu->subp_seg = seg;
      GST_LOG_OBJECT (dvdspu, "Subpicture segment now: %" GST_SEGMENT_FORMAT,
          &dvdspu->subp_seg);
      DVD_SPU_UNLOCK (dvdspu);

      gst_event_unref (event);
      break;
    }
    case GST_EVENT_GAP:
    {
      GstClockTime timestamp, duration;
      gst_event_parse_gap (event, &timestamp, &duration);
      if (GST_CLOCK_TIME_IS_VALID (duration))
        timestamp += duration;

      DVD_SPU_LOCK (dvdspu);
      dvdspu->subp_seg.position = timestamp;
      GST_LOG_OBJECT (dvdspu, "Received GAP. Segment now: %" GST_SEGMENT_FORMAT,
          &dvdspu->subp_seg);
      DVD_SPU_UNLOCK (dvdspu);

      gst_event_unref (event);
      break;
    }
    case GST_EVENT_FLUSH_START:
      gst_event_unref (event);
      goto done;
    case GST_EVENT_FLUSH_STOP:
      GST_DEBUG_OBJECT (dvdspu, "Have flush-stop event on SPU pad");
      DVD_SPU_LOCK (dvdspu);
      gst_segment_init (&dvdspu->subp_seg, GST_FORMAT_UNDEFINED);
      gst_dvd_spu_flush_spu_info (dvdspu, TRUE);
      DVD_SPU_UNLOCK (dvdspu);

      /* We don't forward flushes on the spu pad */
      gst_event_unref (event);
      goto done;
    case GST_EVENT_EOS:
      /* drop EOS on the subtitle pad, it means there are no more subtitles,
       * video might still continue, though */
      gst_event_unref (event);
      goto done;
    default:
      res = gst_pad_event_default (pad, parent, event);
      break;
  }

done:

  return res;
}
コード例 #25
0
ファイル: pipeline.c プロジェクト: RomainNaour/openvivoe
/**
 * \brief This function add the RTP element to the pipeline for service provider * \param pipeline the pipeline associated to this SP
 * \param bus the bus the channel
 * \param bus_watch_id an id watch on the bus
 * \param input last element added in pipeline to which we should link elements added
 * \param video_info a "fake" entry to VFT into which we will save all element we can retrieve from the video caps of the stream (via fill_entry() )
 * \param stream_data the data associated to this SP's pipeline
 * \param caps the caps of the input stream if this is a redirection, NULL otherwise
 * \param channel_entry_index the channel's index of this SP: to build the multicast address
 * \return the last element added in pipeline (rtp payloader if everything goes ok)
 */
static GstElement* addRTP( 	GstElement 						*pipeline, 		GstBus *bus,
							guint 							bus_watch_id, 	GstElement* input,
							struct videoFormatTable_entry 	*video_info,	gpointer stream_datas,
							GstCaps 						*caps){

	/*Create element that will be add to the pipeline */
	GstElement *rtp = NULL;
	GstElement *parser;
	GstStructure *video_caps;

	g_debug("addRTP: add RTP payloader to Service Provider's pipeline");

	if (caps == NULL){

		/* Media stream Type detection */
		video_caps = type_detection(GST_BIN(pipeline), input, NULL);
		if ( video_caps == NULL )
			return NULL;

		/* Fill the MIB a first Time */
		fill_entry(video_caps, video_info, stream_datas);

 	}else{

		GstElement *appsrc = gst_bin_get_by_name( GST_BIN ( pipeline ) , APPSRC_NAME ) ;
		g_object_set ( appsrc , "caps" ,  caps , NULL) ;
		video_caps = gst_caps_get_structure( caps, 0 );
		/* This is a redirection, fill the MIB once for all */
		fill_entry(video_caps, video_info, stream_datas);

	}

	/*
	 * Handle the ROI
	 */
	handle_roi ( pipeline ,  video_info , NULL ) ;

 	/* in case RAW video type has been detected */
	if ( gst_structure_has_name( video_caps, "video/x-raw") ){

		g_debug("%s video detected: add %s to SP pipeline", RAW_NAME , RTPRAWPAY_NAME);

		/* For Raw video */
		rtp 	= gst_element_factory_make_log ("rtpvrawpay", RTPRAWPAY_NAME);
		if ( !rtp )
			return NULL;

	}
	/* in case MPEG4 video type has been detected */
	else if  (gst_structure_has_name( video_caps, "video/mpeg")){

		/*
		 * For MPEG-a videos we need to add a parser before the RTP payloader. However, if caps are NULL i.e. if this pipeline is a Service Provider's pipeline
		 * used for a redirection, we cannot add the mpeg4 parser here because the parser need to be in the same pipeline as the MPEG-4 encoder, otherwise the typefind
		 * cannot be performed. So if caps are NULL then it means that the parser has already been added in the SU's pipeline of the Service Users's part of teh redirection.
		 * We do not have to add it again
		 */
		if ( caps == NULL ){

			parser 	= gst_element_factory_make_log ("mpeg4videoparse", MPEG4PARSER_NAME );
			if ( !parser )
				return NULL;

			g_debug("%s video detected: add %s to pipeline", MPEG4_NAME , MPEG4PARSER_NAME);

			gst_bin_add(GST_BIN(pipeline),parser);

			if ( !gst_element_link_log(input, parser))
				return NULL;

			input = parser;

		}

		g_debug("%s video detected: add %s to SP pipeline", MPEG4_NAME , RTPMP4PAY_NAME );

		rtp 	= gst_element_factory_make_log ("rtpmp4vpay", RTPMP4PAY_NAME );
		if ( !rtp )
			return NULL;

	}

	/* in case J2K video type has been detected */
	else if  ( g_strv_contains ( J2K_STR_NAMES, gst_structure_get_name(video_caps))){

		/*
		 * For J2K video our RTP payloader can only accept image/x-jpc input video media type. However, not all encoders have this caos on their src pads.
		 * So we first link the output of the encoder to a capsfilter with image/x-jpc media type. If the encoder and the capfsilter cannot negociate caps, then the encoder
		 * cannot be link to the RTP payloader, so we stop there. As an example: avenc_jpeg2000 only has image/x-j2c as a media type. But openjpegenc has image/x-j2c, image/-xjpc
		 * and image-j2p
		 */

		GstElement *capsfilter = gst_element_factory_make_log("capsfilter", CAPSFITER_J2K_NAME ) ;
		GstCaps *caps_jpeg2000 = get_rtpj2kpay_allowed_caps();

		/* Put the source in the pipeline */
		g_object_set (capsfilter, "caps",caps_jpeg2000 , NULL);

		g_debug("%s video detected: add %s to SP pipeline", J2K_NAME , CAPSFITER_J2K_NAME );

		gst_bin_add(GST_BIN(pipeline),capsfilter);

		if ( !gst_element_link_log(input,capsfilter )){
			g_critical("JPEG2000 format can only be x-jpc");
			return NULL;
		}

		input = capsfilter;

		g_debug("%s video detected: add %s to SP pipeline", J2K_NAME , RTPJ2KPAY_NAME  );

		/* For J2K video */
		rtp 	= gst_element_factory_make_log ("rtpj2kpay", RTPJ2KPAY_NAME );
		if ( !rtp )
			return NULL;
	}
 	/* in case the video type detected is unknown */
	else
	{
		g_critical("unknow type of video stream");
		return NULL;
	}

	/* add rtp to pipeline */
	gst_bin_add(GST_BIN (pipeline), rtp);

	if (caps == NULL ){

		/* Filters out non VIVOE videos, and link input to RTP if video has a valid format*/
		video_caps = type_detection(GST_BIN(pipeline), input,NULL);
		if (!filter_VIVOE(video_caps,input, rtp))
			return NULL;

		/* Now that we have added the RTP payloader to the pipeline, we can get the new caps of the video stream*/
		/* Media stream Type detection */
		video_caps = type_detection(GST_BIN(pipeline), rtp, NULL);

		if ( video_caps == NULL)
			return NULL;

		/*Fill the MIB a second time after creating payload*/
		fill_entry(video_caps, video_info, stream_datas);

	}else{

		/* link input to rtp payloader */
		if ( !gst_element_link_log(input, rtp))
		   return NULL;

		input = rtp ;

		video_caps = type_detection(GST_BIN(pipeline), input ,NULL);
		if ( !video_caps )
			return NULL;

		/*Fill the MIB a second time after creating payload, this is needed to get rtp_data needed to build SDP files */
		fill_entry(video_caps, video_info, stream_datas);

	}

	/* Finally return*/
	return rtp;
}
コード例 #26
0
ファイル: pipeline.cpp プロジェクト: Igalia/aura
void Pipeline::handleBusMessage(GstMessage *message)
{
    switch (GST_MESSAGE_TYPE(message)) {
    case GST_MESSAGE_ELEMENT:
        {
            // The only message we are handling here is the
            // prepare-xwindow-id one
            if (gst_structure_has_name (message->structure,
                                        "prepare-xwindow-id")) {
                gst_x_overlay_set_window_handle(GST_X_OVERLAY(viewfinder),
                                                windowId);
            }
            break;
        }
    case GST_MESSAGE_ERROR:
        {
            GError *gerror = 0;
            gchar *debug = 0;
            gst_message_parse_error(message, &gerror, &debug);
            qCritical() << "Debug" << debug << " Error " << gerror->message;
            g_free(debug);
            g_error_free(gerror);
            break;
        }

    case GST_MESSAGE_WARNING:
        {
            GError *gerror = 0;
            gchar *debug = 0;
            gst_message_parse_warning(message, &gerror, &debug);
            qWarning() << "Debug" << debug << " Warning " << gerror->message;
            g_free(debug);
            g_error_free(gerror);
            break;
        }

    case GST_MESSAGE_INFO:
        {
            GError *gerror = 0;
            gchar *debug = 0;
            gst_message_parse_info(message, &gerror, &debug);
            qDebug() << "Debug" << debug << " Info " << gerror->message;
            g_free(debug);
            g_error_free(gerror);
            break;
        }

    case GST_MESSAGE_STATE_CHANGED:
        {
            if (GST_ELEMENT(GST_MESSAGE_SRC(message)) == camerabin) {
                GstState oldstate, newstate, pending;
                gst_message_parse_state_changed(message, &oldstate, &newstate, &pending);
                qDebug() << Q_FUNC_INFO << gst_element_state_get_name(oldstate)
                         << "->" << gst_element_state_get_name(newstate) << "=>"
                         << gst_element_state_get_name(pending);

                GstStateChange stateTransition =
                    GST_STATE_TRANSITION(oldstate, newstate);

                switch (stateTransition) {
                case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
                    QMetaObject::invokeMethod(this, "pipelinePlaying", Qt::QueuedConnection);
                    break;
                default:
                    break;
                }
            }
            break;
        }

    default:
        break;
    }
}
コード例 #27
0
ファイル: mpegpsmux.c プロジェクト: PeterXu/gst-mobile
static GstFlowReturn
mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
{
  /* Create a steam. Fill in codec specific information */

  GstFlowReturn ret = GST_FLOW_ERROR;
  GstCaps *caps;
  GstStructure *s;
  gboolean is_video = FALSE;

  caps = gst_pad_get_current_caps (pad);
  if (caps == NULL) {
    GST_DEBUG_OBJECT (pad, "Sink pad caps were not set before pushing");
    return GST_FLOW_NOT_NEGOTIATED;
  }

  s = gst_caps_get_structure (caps, 0);
  g_return_val_if_fail (s != NULL, FALSE);

  if (gst_structure_has_name (s, "video/x-dirac")) {
    GST_DEBUG_OBJECT (pad, "Creating Dirac stream");
    ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_DIRAC);
    is_video = TRUE;
  } else if (gst_structure_has_name (s, "audio/x-ac3")) {
    GST_DEBUG_OBJECT (pad, "Creating AC3 stream");
    ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_AC3);
  } else if (gst_structure_has_name (s, "audio/x-dts")) {
    GST_DEBUG_OBJECT (pad, "Creating DTS stream");
    ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_DTS);
  } else if (gst_structure_has_name (s, "audio/x-lpcm")) {
    GST_DEBUG_OBJECT (pad, "Creating LPCM stream");
    ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_LPCM);
  } else if (gst_structure_has_name (s, "video/x-h264")) {
    const GValue *value;
    GST_DEBUG_OBJECT (pad, "Creating H264 stream");
    /* Codec data contains SPS/PPS which need to go in stream for valid ES */
    value = gst_structure_get_value (s, "codec_data");
    if (value) {
      ps_data->codec_data = gst_buffer_ref (gst_value_get_buffer (value));
      GST_DEBUG_OBJECT (pad, "%" G_GSIZE_FORMAT " bytes of codec data",
          gst_buffer_get_size (ps_data->codec_data));
      ps_data->prepare_func = mpegpsmux_prepare_h264;
    } else {
      ps_data->codec_data = NULL;
    }
    ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_H264);
    is_video = TRUE;
  } else if (gst_structure_has_name (s, "audio/mpeg")) {
    gint mpegversion;
    if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) {
      GST_ELEMENT_ERROR (pad, STREAM, FORMAT,
          ("Invalid data format presented"),
          ("Caps with type audio/mpeg did not have mpegversion"));
      goto beach;
    }

    switch (mpegversion) {
      case 1:
        GST_DEBUG_OBJECT (pad, "Creating MPEG Audio, version 1 stream");
        ps_data->stream =
            psmux_create_stream (mux->psmux, PSMUX_ST_AUDIO_MPEG1);
        break;
      case 2:
        GST_DEBUG_OBJECT (pad, "Creating MPEG Audio, version 2 stream");
        ps_data->stream =
            psmux_create_stream (mux->psmux, PSMUX_ST_AUDIO_MPEG2);
        break;
      case 4:
      {
        const GValue *value;
        /* Codec data contains SPS/PPS which need to go in stream for valid ES */
        GST_DEBUG_OBJECT (pad, "Creating MPEG Audio, version 4 stream");
        value = gst_structure_get_value (s, "codec_data");
        if (value) {
          ps_data->codec_data = gst_buffer_ref (gst_value_get_buffer (value));
          GST_DEBUG_OBJECT (pad, "%" G_GSIZE_FORMAT " bytes of codec data",
              gst_buffer_get_size (ps_data->codec_data));
          ps_data->prepare_func = mpegpsmux_prepare_aac;
        } else {
          ps_data->codec_data = NULL;
        }
        ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_AUDIO_AAC);
        break;
      }
      default:
        GST_WARNING_OBJECT (pad, "unsupported mpegversion %d", mpegversion);
        goto beach;
    }
  } else if (gst_structure_has_name (s, "video/mpeg")) {
    gint mpegversion;
    if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) {
      GST_ELEMENT_ERROR (mux, STREAM, FORMAT,
          ("Invalid data format presented"),
          ("Caps with type video/mpeg did not have mpegversion"));
      goto beach;
    }

    if (mpegversion == 1) {
      GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 1 stream");
      ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG1);
    } else if (mpegversion == 2) {
      GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 2 stream");
      ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG2);
    } else {
      GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 4 stream");
      ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG4);
    }
    is_video = TRUE;
  }

  if (ps_data->stream != NULL) {
    ps_data->stream_id = ps_data->stream->stream_id;
    ps_data->stream_id_ext = ps_data->stream->stream_id_ext;
    GST_DEBUG_OBJECT (pad, "Stream created, stream_id=%04x, stream_id_ext=%04x",
        ps_data->stream_id, ps_data->stream_id_ext);

    gst_structure_get_int (s, "rate", &ps_data->stream->audio_sampling);
    gst_structure_get_int (s, "channels", &ps_data->stream->audio_channels);
    gst_structure_get_int (s, "bitrate", &ps_data->stream->audio_bitrate);

    ret = GST_FLOW_OK;

    if (is_video && mux->video_stream_id == 0) {
      mux->video_stream_id = ps_data->stream_id;
      GST_INFO_OBJECT (mux, "video pad stream_id 0x%02x", mux->video_stream_id);
    }
  }

beach:
  return ret;
}
コード例 #28
0
ファイル: gstmfcdec.c プロジェクト: PeterXu/gst-mobile
static gboolean
gst_mfc_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstMFCDec *self = GST_MFC_DEC (decoder);
  GstStructure *s;

  GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);

  if (self->input_state
      && gst_caps_can_intersect (self->input_state->caps, state->caps)) {
    GST_DEBUG_OBJECT (self, "Compatible caps");
    goto done;
  }

  s = gst_caps_get_structure (state->caps, 0);

  if (self->context) {
    mfc_dec_destroy (self->context);
    self->context = NULL;
  }
  self->initialized = FALSE;

  if (gst_structure_has_name (s, "video/x-h264")) {
    self->context = mfc_dec_create (CODEC_TYPE_H264);
    if (!self->context) {
      GST_ELEMENT_ERROR (self, LIBRARY, INIT,
          ("Failed to initialize MFC decoder context"), (NULL));
      return FALSE;
    }
  } else if (gst_structure_has_name (s, "video/mpeg")) {
    gint mpegversion;

    if (!gst_structure_get_int (s, "mpegversion", &mpegversion))
      return FALSE;
    if (mpegversion != 1 && mpegversion != 2 && mpegversion != 4)
      return FALSE;

    if (mpegversion == 1 || mpegversion == 2) {
      self->context = mfc_dec_create (CODEC_TYPE_MPEG2);
    } else {
      self->context = mfc_dec_create (CODEC_TYPE_MPEG4);
    }

    if (!self->context) {
      GST_ELEMENT_ERROR (self, LIBRARY, INIT,
          ("Failed to initialize MFC decoder context"), (NULL));
      return FALSE;
    }
  } else if (gst_structure_has_name (s, "video/x-h263")) {
    self->context = mfc_dec_create (CODEC_TYPE_H263);
    if (!self->context) {
      GST_ELEMENT_ERROR (self, LIBRARY, INIT,
          ("Failed to initialize MFC decoder context"), (NULL));
      return FALSE;
    }
  } else {
    g_return_val_if_reached (FALSE);
  }

  if (mfc_dec_init_input (self->context, 1) < 0) {
    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to initialize MFC decoder context input"), (NULL));
    return FALSE;
  }

  gst_buffer_replace (&self->codec_data, state->codec_data);

done:
  if (self->input_state)
    gst_video_codec_state_unref (self->input_state);
  self->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}
コード例 #29
0
static void
check_pad_template (GstPadTemplate * tmpl)
{
  const GValue *list_val, *fmt_val;
  GstStructure *s;
  gboolean *formats_supported;
  GstCaps *caps;
  guint i, num_formats;

  num_formats = get_num_formats ();
  formats_supported = g_new0 (gboolean, num_formats);

  caps = gst_pad_template_get_caps (tmpl);

  /* If this fails, we need to update this unit test */
  fail_unless_equals_int (gst_caps_get_size (caps), 1);
  s = gst_caps_get_structure (caps, 0);

  fail_unless (gst_structure_has_name (s, "video/x-raw"));

  list_val = gst_structure_get_value (s, "format");
  fail_unless (list_val != NULL);
  /* If this fails, we need to update this unit test */
  fail_unless (GST_VALUE_HOLDS_LIST (list_val));

  for (i = 0; i < gst_value_list_get_size (list_val); ++i) {
    GstVideoFormat fmt;
    const gchar *fmt_str;

    fmt_val = gst_value_list_get_value (list_val, i);
    fail_unless (G_VALUE_HOLDS_STRING (fmt_val));
    fmt_str = g_value_get_string (fmt_val);
    GST_LOG ("format string: '%s'", fmt_str);
    fmt = gst_video_format_from_string (fmt_str);
    if (fmt == GST_VIDEO_FORMAT_UNKNOWN)
      g_error ("Unknown raw format '%s' in pad template caps", fmt_str);
    formats_supported[(guint) fmt] = TRUE;
  }

  gst_caps_unref (caps);

  for (i = 2; i < num_formats; ++i) {
    if (!formats_supported[i]) {
      const gchar *fmt_str = gst_video_format_to_string ((GstVideoFormat) i);

      switch (i) {
        case GST_VIDEO_FORMAT_v210:
        case GST_VIDEO_FORMAT_v216:
        case GST_VIDEO_FORMAT_NV12:
        case GST_VIDEO_FORMAT_NV21:
        case GST_VIDEO_FORMAT_UYVP:
        case GST_VIDEO_FORMAT_A420:
        case GST_VIDEO_FORMAT_YUV9:
        case GST_VIDEO_FORMAT_YVU9:
        case GST_VIDEO_FORMAT_IYU1:
        case GST_VIDEO_FORMAT_r210:{
          static gboolean shown_fixme[100] = { FALSE, };

          if (!shown_fixme[i]) {
            GST_FIXME ("FIXME: add %s support to videoscale", fmt_str);
            shown_fixme[i] = TRUE;
          }
          break;
        }
        case GST_VIDEO_FORMAT_BGR16:
        case GST_VIDEO_FORMAT_BGR15:
        case GST_VIDEO_FORMAT_RGB8P:
        case GST_VIDEO_FORMAT_I420_10BE:
        case GST_VIDEO_FORMAT_I420_10LE:
        case GST_VIDEO_FORMAT_I422_10BE:
        case GST_VIDEO_FORMAT_I422_10LE:
          GST_LOG ("Ignoring lack of support for format %s", fmt_str);
          break;
        default:
          g_error ("videoconvert doesn't support format '%s'", fmt_str);
          break;
      }
    }
  }

  g_free (formats_supported);
}
コード例 #30
0
static GstBusSyncReply
bus_sync_handler (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
{
  const GstStructure *structure;
  gint64 position, length;
  GstFormat format = GST_FORMAT_TIME;
  const GValue *x_value, *y_value;
  gint x, i, y;
  /* select msg */
  if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT ||
      !gst_structure_has_name (gst_message_get_structure (message),
          "hand-gesture"))
    return GST_BUS_PASS;

  /* parse msg structure */
  structure = gst_message_get_structure (message);

  /* if PALM gesture detected */
  if (structure &&
      strcmp (gst_structure_get_name (structure), "hand-gesture") == 0 &&
      strcmp (gst_structure_get_string (structure, "gesture"), "palm") == 0) {
    /* media operation - closed palm to stop media play */
    gst_element_set_state (playbin, GST_STATE_PAUSED);
  }

  /* if FIST gesture detected */
  if (structure &&
      strcmp (gst_structure_get_name (structure), "hand-gesture") == 0 &&
      strcmp (gst_structure_get_string (structure, "gesture"), "fist") == 0) {
    /* print message type and structure name */
    g_print ("%s{{%s}}\n", gst_message_type_get_name (message->type),
        gst_structure_get_name (structure));
    /* print msg structure names&values */
    for (i = 0; i < gst_structure_n_fields (structure); i++) {
      const gchar *name = gst_structure_nth_field_name (structure, i);
      GType type = gst_structure_get_field_type (structure, name);
      const GValue *value = gst_structure_get_value (structure, name);
      type == G_TYPE_STRING ?
          g_print ("-%s[%s]{%s}\n", name, g_type_name (type),
          g_value_get_string (value)) : g_print ("-%s[%s]{%d}\n", name,
          g_type_name (type), g_value_get_uint (value));
    }
    g_print ("\n");

    /* get X,Y positions in frame */
    x_value = gst_structure_get_value (structure, "x");
    x = g_value_get_uint (x_value);
    y_value = gst_structure_get_value (structure, "y");
    y = g_value_get_uint (y_value);

    /* set object volumes [0-10] based on Y */
    g_object_set (G_OBJECT (playbin), "volume", (gdouble) (10 - y / 24), NULL);

    /* seek playback positions */
    gst_element_query_duration (playbin, format, &length);
    /* Width = 320 is specified in caps */
    position = (gint64) length *x / 320;
    gst_element_set_state (playbin, GST_STATE_PAUSED);
    gst_element_seek (GST_ELEMENT (playbin),
        1.0,
        format,
        GST_SEEK_FLAG_FLUSH,
        GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
    gst_element_set_state (GST_ELEMENT (playbin), GST_STATE_PLAYING);
  }

  gst_message_unref (message);
  return GST_BUS_DROP;
}