Example #1
0
/* must be called with the bus OBJECT LOCK */
static guint
gst_bus_add_watch_full_unlocked (GstBus * bus, gint priority,
    GstBusFunc func, gpointer user_data, GDestroyNotify notify)
{
  guint id;
  GSource *source;

  if (bus->priv->watch_id) {
    GST_ERROR_OBJECT (bus,
        "Tried to add new watch while one was already there");
    return 0;
  }

  source = gst_bus_create_watch (bus);

  if (priority != G_PRIORITY_DEFAULT)
    g_source_set_priority (source, priority);

  g_source_set_callback (source, (GSourceFunc) func, user_data, notify);

  id = g_source_attach (source, NULL);
  g_source_unref (source);

  if (id) {
    bus->priv->watch_id = source;
  }

  GST_DEBUG_OBJECT (bus, "New source %p with id %u", source, id);
  return id;
}
/**
 * gst_discoverer_start:
 * @discoverer: A #GstDiscoverer
 *
 * Allow asynchronous discovering of URIs to take place.
 * A #GMainLoop must be available for #GstDiscoverer to properly work in
 * asynchronous mode.
 *
 * Since: 0.10.31
 */
void
gst_discoverer_start (GstDiscoverer * discoverer)
{
  GSource *source;
  GMainContext *ctx = NULL;

  GST_DEBUG_OBJECT (discoverer, "Starting...");

  if (discoverer->priv->async) {
    GST_DEBUG_OBJECT (discoverer, "We were already started");
    return;
  }

  discoverer->priv->async = TRUE;
  discoverer->priv->running = TRUE;

  ctx = g_main_context_get_thread_default ();

  /* Connect to bus signals */
  if (ctx == NULL)
    ctx = g_main_context_default ();

  source = gst_bus_create_watch (discoverer->priv->bus);
  g_source_set_callback (source, (GSourceFunc) gst_bus_async_signal_func,
      NULL, NULL);
  discoverer->priv->sourceid = g_source_attach (source, ctx);
  g_source_unref (source);
  discoverer->priv->ctx = g_main_context_ref (ctx);

  start_discovering (discoverer);
  GST_DEBUG_OBJECT (discoverer, "Started");
}
static void owr_media_renderer_init(OwrMediaRenderer *renderer)
{
    OwrMediaRendererPrivate *priv;
    GstBus *bus;
    GSource *bus_source;
    gchar *bin_name;

    renderer->priv = priv = OWR_MEDIA_RENDERER_GET_PRIVATE(renderer);

    priv->media_type = DEFAULT_MEDIA_TYPE;
    priv->source = DEFAULT_SOURCE;
    priv->disabled = DEFAULT_DISABLED;

    bin_name = g_strdup_printf("media-renderer-%u", g_atomic_int_add(&unique_bin_id, 1));
    priv->pipeline = gst_pipeline_new(bin_name);
    g_free(bin_name);

#ifdef OWR_DEBUG
    g_signal_connect(priv->pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL);
#endif

    priv->sink = NULL;
    priv->src = NULL;

    bus = gst_pipeline_get_bus(GST_PIPELINE(priv->pipeline));
    bus_source = gst_bus_create_watch(bus);
    g_source_set_callback(bus_source, (GSourceFunc) bus_call, priv->pipeline, NULL);
    g_source_attach(bus_source, _owr_get_main_context());
    g_source_unref(bus_source);

    g_mutex_init(&priv->media_renderer_lock);
}
Example #4
0
/* Main method for the native code. This is executed on its own thread. */
static void *app_function (void *userdata) {
  JavaVMAttachArgs args;
  GstBus *bus;
  CustomData *data = (CustomData *)userdata;
  GSource *bus_source;
  GError *error = NULL;

  GST_DEBUG ("Creating pipeline in CustomData at %p", data);

  /* Create our own GLib Main Context and make it the default one */
  data->context = g_main_context_new ();
  g_main_context_push_thread_default(data->context);

  /* Build pipeline */
  data->pipeline = gst_parse_launch("videotestsrc ! warptv ! videoconvert ! autovideosink", &error);
  if (error) {
    gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
    g_clear_error (&error);
    set_ui_message(message, data);
    g_free (message);
    return NULL;
  }

  /* Set the pipeline to READY, so it can already accept a window handle, if we have one */
  gst_element_set_state(data->pipeline, GST_STATE_READY);

  data->video_sink = gst_bin_get_by_interface(GST_BIN(data->pipeline), GST_TYPE_VIDEO_OVERLAY);
  if (!data->video_sink) {
    GST_ERROR ("Could not retrieve video sink");
    return NULL;
  }

  /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
  bus = gst_element_get_bus (data->pipeline);
  bus_source = gst_bus_create_watch (bus);
  g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
  g_source_attach (bus_source, data->context);
  g_source_unref (bus_source);
  g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
  gst_object_unref (bus);

  /* Create a GLib Main Loop and set it to run */
  GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
  data->main_loop = g_main_loop_new (data->context, FALSE);
  check_initialization_complete (data);
  g_main_loop_run (data->main_loop);
  GST_DEBUG ("Exited main loop");
  g_main_loop_unref (data->main_loop);
  data->main_loop = NULL;

  /* Free resources */
  g_main_context_pop_thread_default(data->context);
  g_main_context_unref (data->context);
  gst_element_set_state (data->pipeline, GST_STATE_NULL);
  gst_object_unref (data->video_sink);
  gst_object_unref (data->pipeline);

  return NULL;
}
Example #5
0
void set_notifyfunction(CustomData *data)
{
	GstBus *bus;
	GSource *bus_source;

	if (data->notify_time > 0)
	{
		bus = gst_element_get_bus(data->pipeline);
		bus_source = gst_bus_create_watch(bus);
		g_source_set_callback(bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL,
		NULL);
		g_source_attach(bus_source, data->context);
		g_source_unref(bus_source);

		if (data->timeout_source)
		{
			g_source_destroy(data->timeout_source);
		}
		/* Register a function that GLib will call 4 times per second */
		data->timeout_source = g_timeout_source_new(data->notify_time);
		g_source_set_callback(data->timeout_source, (GSourceFunc) gst_notify_time_cb, data, NULL);
		g_source_attach(data->timeout_source, data->context);
		g_source_unref(data->timeout_source);
	}
}
Example #6
0
static gpointer gstreamer_loop(struct input_handle *ih) {
  GstElement *fdsrc;
  GError *error = NULL;
  GstBus *bus;

  ih->bin = gst_parse_launch("filesrc name=my_fdsrc ! "
                             "decodebin2 ! "
                             "audioconvert name=converter ! "
#if 1
                             "audio/x-raw-float,width=32,endianness=1234 ! "
                             "appsink name=sink sync=FALSE", &error);
#else
                             "audio/x-raw-float,width=32,endianness=1234 ! "
                             "ffenc_ac3 bitrate=128000 ! ffmux_ac3 ! filesink location=tmp.ac3", &error);
#endif
  if (!ih->bin) {
    fprintf(stderr, "Parse error: %s", error->message);
    return NULL;
  }

  fdsrc = gst_bin_get_by_name(GST_BIN(ih->bin), "my_fdsrc");
  g_object_set(G_OBJECT(fdsrc), "location", ih->filename, NULL);

  ih->appsink = gst_bin_get_by_name(GST_BIN(ih->bin), "sink");
  /* gst_app_sink_set_max_buffers(GST_APP_SINK(ih->appsink), 1); */

  /* start playing */
  ih->loop = g_main_loop_new(ih->main_context, FALSE);

  bus = gst_element_get_bus(ih->bin);
  ih->message_source = gst_bus_create_watch(bus);
  g_source_set_callback(ih->message_source, (GSourceFunc) bus_call, ih, NULL);
  g_source_attach(ih->message_source, ih->main_context);
  g_source_unref(ih->message_source);
  g_object_unref(bus);

  /* start play back and listed to events */
  gst_element_set_state(ih->bin, GST_STATE_PLAYING);

  g_main_loop_run(ih->loop);

  return NULL;
}
Example #7
0
static void
construct_player (AurClient * client)
{
  GstBus *bus;
  guint flags;

  GST_DEBUG ("Constructing playbin");
  client->player = gst_element_factory_make ("playbin", NULL);

  if (client->player == NULL) {
    g_warning ("Failed to construct playbin");
    return;
  }

  g_object_get (client->player, "flags", &flags, NULL);
  /* Disable subtitles for now */
  flags &= ~0x00000004;
  g_object_set (client->player, "flags", flags, NULL);

  bus = gst_element_get_bus (GST_ELEMENT (client->player));

#if 0
  gst_bus_add_signal_watch (bus);
#else
  {
    GSource *bus_source;
    bus_source = gst_bus_create_watch (bus);
    g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func,
        NULL, NULL);
    g_source_attach (bus_source, client->context);
    g_source_unref (bus_source);
  }
#endif

  g_signal_connect (bus, "message::error", (GCallback) (on_error_msg), client);
  gst_object_unref (bus);

  gst_element_set_state (client->player, GST_STATE_READY);

  g_signal_emit (client, signals[SIGNAL_PLAYER_CREATED], 0, client->player);
}
Example #8
0
/* must be called with the bus OBJECT LOCK */
static guint
gst_bus_add_watch_full_unlocked (GstBus * bus, gint priority,
    GstBusFunc func, gpointer user_data, GDestroyNotify notify)
{
  GMainContext *ctx;
  guint id;
  GSource *source;

  if (bus->priv->signal_watch) {
    GST_ERROR_OBJECT (bus,
        "Tried to add new watch while one was already there");
    return 0;
  }

  source = gst_bus_create_watch (bus);
  if (!source) {
    g_critical ("Creating bus watch failed");
    return 0;
  }

  if (priority != G_PRIORITY_DEFAULT)
    g_source_set_priority (source, priority);

  g_source_set_callback (source, (GSourceFunc) func, user_data, notify);

  ctx = g_main_context_get_thread_default ();
  id = g_source_attach (source, ctx);
  g_source_unref (source);

  if (id) {
    bus->priv->signal_watch = source;
  }

  GST_DEBUG_OBJECT (bus, "New source %p with id %u", source, id);
  return id;
}
Example #9
0
/* Main method for the native code. This is executed on its own thread. */
static void *app_function (void *userdata) {
    JavaVMAttachArgs args;
    GstBus *bus;
    CustomData *data = (CustomData *)userdata;
    GSource *bus_source;
    GError *error = NULL;

    GST_DEBUG ("Creating pipeline in CustomData at %p", data);

    /* Create our own GLib Main Context and make it the default one */
    data->context = g_main_context_new ();
    g_main_context_push_thread_default(data->context);



    /* Build pipeline */
    //char caps[1000]="caps = \"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, sprop-parameter-sets=(string)\\\"Z2QAHqyyAUB7YCIAAAMAAgAAAwBlHixckA\\\\=\\\\=\\\\,aOvBssiw\\\", payload=(int)96, ssrc=(uint)226114972, timestamp-offset=(uint)497630140, seqnum-offset=(uint)4095\"";
    //data->pipeline = gst_parse_launch("udpsrc port=5010 caps = \"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, sprop-parameter-sets=(string)\\\"Z2QAHqyyAUB7YCIAAAMAAgAAAwB5HixckA\\\\=\\\\=\\\\,aOvBssiw\\\", payload=(int)96, ssrc=(uint)1750089033, timestamp-offset=(uint)1121294972, seqnum-offset=(uint)31049\" ! rtph264depay ! avdec_h264 ! autovideosink sync=false", &error);
    data->pipeline = gst_parse_launch(pipeline_path, &error);


    if (error) {
        gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
        g_clear_error (&error);
        set_ui_message(message, data);
        g_free (message);
        return NULL;
    }

    /* Set the pipeline to READY, so it can already accept a window handle, if we have one */
    gst_element_set_state(data->pipeline, GST_STATE_READY);

    data->video_sink = gst_bin_get_by_interface(GST_BIN(data->pipeline), GST_TYPE_VIDEO_OVERLAY);
    if (!data->video_sink) {
        GST_ERROR ("Could not retrieve video sink");
        return NULL;
    }

    /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
    bus = gst_element_get_bus (data->pipeline);
    bus_source = gst_bus_create_watch (bus);
    g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
    g_source_attach (bus_source, data->context);
    g_source_unref (bus_source);
    g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
    g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
    gst_object_unref (bus);

    /* Create a GLib Main Loop and set it to run */
    GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
    data->main_loop = g_main_loop_new (data->context, FALSE);
    check_initialization_complete (data);
    g_main_loop_run (data->main_loop);
    GST_DEBUG ("Exited main loop");
    g_main_loop_unref (data->main_loop);
    data->main_loop = NULL;

    /* Free resources */
    g_main_context_pop_thread_default(data->context);
    g_main_context_unref (data->context);
    gst_element_set_state (data->pipeline, GST_STATE_NULL);
    gst_object_unref (data->video_sink);
    gst_object_unref (data->pipeline);

    return NULL;
}
Example #10
0
/* Main method for the native code. This is executed on its own thread. */
static void *app_function(void *userdata) {

	//test
	int fd;
	int ret;

	void * pbuf = malloc(sizeof(unsigned char) * 1024);
	memset(pbuf, 0xef, sizeof(unsigned char) * 1024);

	fd = open("/storage/sdcard0/fifo9000", O_WRONLY);
	if (-1 == fd) {
		g_print("/storage/sdcard0/fifo9000 open error/n");
		return;
	}
	while (TRUE) {
		/*
		 if (0 != pthread_mutex_lock(&mutex))
		 {
		 g_print("pthread_mutex_lock fail!");
		 }
		 else
		 {
		 g_print("pthread_mutex_lock succ!");
		 }
		 sleep(60);
		 g_print("thread finish sleeping 10s");
		 // Unlock mutex
		 if (0 != pthread_mutex_unlock(&mutex))
		 {
		 g_print("pthread_mutex_unlock fail!");
		 }
		 else
		 {
		 g_print("pthread_mutex_lock succ!");
		 }

		 sleep(10);
		 g_print("after unlocking, thread finish sleeping 10s");
		 */

		write(fd, pbuf, sizeof(unsigned char) * 1024);
		sleep(10);

	}

	return;

	//
	JavaVMAttachArgs args;
	GstBus *bus;
	CustomData *data = (CustomData *) userdata;

	data->b = 1; /* For waveform generation */
	data->d = 1;

	GSource *bus_source;
	GError *error = NULL;

	GST_DEBUG("Creating pipeline in CustomData at %p", data);

	/* Create our own GLib Main Context and make it the default one */
	data->context = g_main_context_new();
	g_main_context_push_thread_default(data->context);

	/* Build pipeline */
//  data->pipeline = gst_parse_launch("audiotestsrc ! audioconvert ! audioresample ! autoaudiosink", &error);
//  data->pipeline = gst_parse_launch("filesrc location=/storage/sdcard0/x.mp3 ! mad ! audioconvert ! audioresample ! autoaudiosink", &error);
//  data->pipeline = gst_parse_launch("playbin2 uri=file:///storage/sdcard0/x.mp3", NULL);
	data->pipeline = gst_parse_launch("playbin2 uri=appsrc://", &error);
//	data->pipeline = gst_parse_launch("appsrc name=mysource ! audio/mpeg ! mad ! audioconvert ! audioresample ! autoaudiosink", &error);
//	data->pipeline = gst_parse_launch("filesrc location=/storage/sdcard0/x.mp3 ! mpegaudioparse  ! mad ! audioconvert ! audioresample ! autoaudiosink", &error);

	if (error) {
		gchar *message = g_strdup_printf("Unable to build pipeline: %s",
				error->message);
		g_clear_error(&error);
		set_ui_message(message, data);
		g_free(message);
		return NULL;
	}

	g_signal_connect(data->pipeline, "source-setup", G_CALLBACK(source_setup),
			data);
	/* get the appsrc */
	/*
	 data->app_source = gst_bin_get_by_name (GST_BIN(data->pipeline), "mysource");
	 g_assert(data->app_source);
	 // g_assert(GST_IS_APP_SRC(data->app_source));
	 g_signal_connect (data->app_source, "need-data", G_CALLBACK (start_feed), data);
	 g_signal_connect (data->app_source, "enough-data", G_CALLBACK (stop_feed), data);
	 */

	/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
	bus = gst_element_get_bus(data->pipeline);
	bus_source = gst_bus_create_watch(bus);
	g_source_set_callback(bus_source, (GSourceFunc) gst_bus_async_signal_func,
			NULL, NULL);
	g_source_attach(bus_source, data->context);
	g_source_unref(bus_source);
	g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_cb,
			data);
	g_signal_connect(G_OBJECT(bus), "message::state-changed",
			(GCallback) state_changed_cb, data);
	gst_object_unref(bus);

	/* Create a GLib Main Loop and set it to run */
	GST_DEBUG("Entering main loop... (CustomData:%p)", data);
	data->main_loop = g_main_loop_new(data->context, FALSE);
	check_initialization_complete(data);

	/* Start playing the pipeline */
	//gst_element_set_state (data->pipeline, GST_STATE_PLAYING);

	g_main_loop_run(data->main_loop);
	GST_DEBUG("Exited main loop");
	g_main_loop_unref(data->main_loop);
	data->main_loop = NULL;

	/* Free resources */
	g_main_context_pop_thread_default(data->context);
	g_main_context_unref(data->context);
	gst_element_set_state(data->pipeline, GST_STATE_NULL);
	gst_object_unref(data->pipeline);

	return NULL;
}
/*
 * owr_local_media_source_get_pad
 *
 * The beginning of a media source chain in the pipeline looks like this:
 *                                                             +------------+
 *                                                         /---+ inter*sink |
 * +--------+    +--------+   +------------+   +-----+    /    +------------+
 * | source +----+ scale? +---+ capsfilter +---+ tee +---/
 * +--------+    +--------+   +------------+   +-----+   \
 *                                                        \    +------------+
 *                                                         \---+ inter*sink |
 *                                                             +------------+
 *
 * For each newly requested pad a new inter*sink is added to the tee.
 * Note that this is a completely independent pipeline, and the complete
 * pipeline is only created once for a specific media source.
 *
 * Then for each newly requested pad another bin with a inter*src is
 * created, which is then going to be part of the transport agent
 * pipeline. The ghostpad of it is what we return here.
 *
 * +-----------+   +-------------------------------+   +----------+
 * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad |
 * +-----------+   +-------------------------------+   +----------+
 *
 */
static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_source, GstCaps *caps)
{
    OwrLocalMediaSource *local_source;
    OwrLocalMediaSourcePrivate *priv;
    GstElement *source_element = NULL;
    GstElement *source_pipeline;
    GHashTable *event_data;
    GValue *value;
#if defined(__linux__) && !defined(__ANDROID__)
    gchar *tmp;
#endif

    g_assert(media_source);
    local_source = OWR_LOCAL_MEDIA_SOURCE(media_source);
    priv = local_source->priv;

    /* only create the source bin for this media source once */
    if ((source_pipeline = _owr_media_source_get_source_bin(media_source)))
        GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin");
    else {
        OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN;
        OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN;
        GstElement *source, *source_process = NULL, *capsfilter = NULL, *tee;
        GstPad *sinkpad, *source_pad;
        GEnumClass *media_enum_class, *source_enum_class;
        GEnumValue *media_enum_value, *source_enum_value;
        gchar *bin_name;
        GstCaps *source_caps;
        GstBus *bus;
        GSource *bus_source;

        event_data = _owr_value_table_new();
        value = _owr_value_table_add(event_data, "start_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());

        g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL);

        media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE));
        source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE));
        media_enum_value = g_enum_get_value(media_enum_class, media_type);
        source_enum_value = g_enum_get_value(source_enum_class, source_type);

        bin_name = g_strdup_printf("local-%s-%s-source-bin-%u",
            media_enum_value ? media_enum_value->value_nick : "unknown",
            source_enum_value ? source_enum_value->value_nick : "unknown",
            g_atomic_int_add(&unique_bin_id, 1));

        g_type_class_unref(media_enum_class);
        g_type_class_unref(source_enum_class);

        source_pipeline = gst_pipeline_new(bin_name);
        gst_pipeline_use_clock(GST_PIPELINE(source_pipeline), gst_system_clock_obtain());
        gst_element_set_base_time(source_pipeline, _owr_get_base_time());
        gst_element_set_start_time(source_pipeline, GST_CLOCK_TIME_NONE);
        g_free(bin_name);
        bin_name = NULL;

#ifdef OWR_DEBUG
        g_signal_connect(source_pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL);
#endif

        bus = gst_pipeline_get_bus(GST_PIPELINE(source_pipeline));
        bus_source = gst_bus_create_watch(bus);
        g_source_set_callback(bus_source, (GSourceFunc) bus_call, media_source, NULL);
        g_source_attach(bus_source, _owr_get_main_context());
        g_source_unref(bus_source);

        GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type);

        if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) {
            GST_ERROR_OBJECT(local_source,
                "Cannot connect source with unknown type or media type to other component");
            goto done;
        }

        switch (media_type) {
        case OWR_MEDIA_TYPE_AUDIO:
            {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, AUDIO_SRC, "audio-source");
#if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR
/*
    Default values for buffer-time and latency-time on android are 200ms and 20ms.
    The minimum latency-time that can be used on Android is 20ms, and using
    a 40ms buffer-time with a 20ms latency-time causes crackling audio.
    So let's just stick with the defaults.
*/
#if !defined(__ANDROID__)
                g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000),
                    "latency-time", G_GINT64_CONSTANT(10000), NULL);
#endif
                if (priv->device_index > -1) {
#ifdef __APPLE__
                    g_object_set(source, "device", priv->device_index, NULL);
#elif defined(__linux__) && !defined(__ANDROID__)
                    tmp = g_strdup_printf("%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "audiotestsrc", "audio-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            break;
            }
        case OWR_MEDIA_TYPE_VIDEO:
        {
            GstPad *srcpad;
            GstCaps *device_caps;

            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, VIDEO_SRC, "video-source");
                if (priv->device_index > -1) {
#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
                    g_object_set(source, "device-index", priv->device_index, NULL);
#elif defined(__ANDROID__)
                    g_object_set(source, "cam-index", priv->device_index, NULL);
#elif defined(__linux__)
                    tmp = g_strdup_printf("/dev/video%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
                break;
            case OWR_SOURCE_TYPE_TEST: {
                GstElement *src, *time;
                GstPad *srcpad;

                source = gst_bin_new("video-source");

                CREATE_ELEMENT(src, "videotestsrc", "videotestsrc");
                g_object_set(src, "is-live", TRUE, NULL);
                gst_bin_add(GST_BIN(source), src);

                time = gst_element_factory_make("timeoverlay", "timeoverlay");
                if (time) {
                    g_object_set(time, "font-desc", "Sans 60", NULL);
                    gst_bin_add(GST_BIN(source), time);
                    gst_element_link(src, time);
                    srcpad = gst_element_get_static_pad(time, "src");
                } else
                    srcpad = gst_element_get_static_pad(src, "src");

                gst_element_add_pad(source, gst_ghost_pad_new("src", srcpad));
                gst_object_unref(srcpad);

                break;
            }
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            /* First try to see if we can just get the format we want directly */

            source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
            gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#else
            _owr_gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#endif
            /* Now see what the device can really produce */
            srcpad = gst_element_get_static_pad(source, "src");
            gst_element_set_state(source, GST_STATE_READY);
            device_caps = gst_pad_query_caps(srcpad, source_caps);

            if (gst_caps_is_empty(device_caps)) {
                /* Let's see if it works when we drop format constraints (which can be dealt with downsteram) */
                GstCaps *tmp = source_caps;
                source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
                gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#else
                _owr_gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#endif
                gst_caps_unref(tmp);

                gst_caps_unref(device_caps);
                device_caps = gst_pad_query_caps(srcpad, source_caps);

                if (gst_caps_is_empty(device_caps)) {
                    /* Accepting any format didn't work, we're going to hope that scaling fixes it */
                    CREATE_ELEMENT(source_process, "videoscale", "video-source-scale");
                    gst_bin_add(GST_BIN(source_pipeline), source_process);
                }
            }

            gst_caps_unref(device_caps);
            gst_object_unref(srcpad);

#if defined(__APPLE__) && TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR
            /* Force NV12 on iOS else the source can negotiate BGRA
             * ercolorspace can do NV12 -> BGRA and NV12 -> I420 which is what
             * is needed for Bowser */
            gst_caps_set_simple(source_caps, "format", G_TYPE_STRING, "NV12", NULL);
#endif

            CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_pipeline), capsfilter);

            break;
        }
        case OWR_MEDIA_TYPE_UNKNOWN:
        default:
            g_assert_not_reached();
            goto done;
        }
        g_assert(source);

        source_pad = gst_element_get_static_pad(source, "src");
        g_signal_connect(source_pad, "notify::caps", G_CALLBACK(on_caps), media_source);
        gst_object_unref(source_pad);

        CREATE_ELEMENT(tee, "tee", "source-tee");
        g_object_set(tee, "allow-not-linked", TRUE, NULL);

        gst_bin_add_many(GST_BIN(source_pipeline), source, tee, NULL);

        /* Many sources don't like reconfiguration and it's pointless
         * here anyway right now. No need to reconfigure whenever something
         * is added to the tee or removed.
         * We will have to implement reconfiguration differently later by
         * selecting the best caps based on all consumers.
         */
        sinkpad = gst_element_get_static_pad(tee, "sink");
        gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, drop_reconfigure_event, NULL, NULL);
        gst_object_unref(sinkpad);

        if (!source)
            GST_ERROR_OBJECT(media_source, "Failed to create source element!");

        if (capsfilter) {
            LINK_ELEMENTS(capsfilter, tee);
            if (source_process) {
                LINK_ELEMENTS(source_process, capsfilter);
                LINK_ELEMENTS(source, source_process);
            } else
                LINK_ELEMENTS(source, capsfilter);
        } else if (source_process) {
            LINK_ELEMENTS(source_process, tee);
            LINK_ELEMENTS(source, source_process);
        } else
            LINK_ELEMENTS(source, tee);

        gst_element_sync_state_with_parent(tee);
        if (capsfilter)
            gst_element_sync_state_with_parent(capsfilter);
        if (source_process)
            gst_element_sync_state_with_parent(source_process);
        gst_element_sync_state_with_parent(source);

        _owr_media_source_set_source_bin(media_source, source_pipeline);
        _owr_media_source_set_source_tee(media_source, tee);
        if (gst_element_set_state(source_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
            GST_ERROR("Failed to set local source pipeline %s to playing", GST_OBJECT_NAME(source_pipeline));
            /* FIXME: We should handle this and don't expose the source */
        }

        value = _owr_value_table_add(event_data, "end_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());
        OWR_POST_EVENT(media_source, LOCAL_SOURCE_STARTED, event_data);

        g_signal_connect(tee, "pad-removed", G_CALLBACK(tee_pad_removed_cb), media_source);
    }
    gst_object_unref(source_pipeline);

    source_element = OWR_MEDIA_SOURCE_CLASS(owr_local_media_source_parent_class)->request_source(media_source, caps);

done:
    return source_element;
}
Example #12
0
/**
 * gst_video_convert_frame_async:
 * @buf: a #GstBuffer
 * @to_caps: the #GstCaps to convert to
 * @timeout: the maximum amount of time allowed for the processing.
 * @callback: %GstVideoConvertFrameCallback that will be called after conversion.
 * @destroy_notify: %GDestroyNotify to be called after @user_data is not needed anymore
 *
 * Converts a raw video buffer into the specified output caps.
 *
 * The output caps can be any raw video formats or any image formats (jpeg, png, ...).
 *
 * The width, height and pixel-aspect-ratio can also be specified in the output caps.
 *
 * @callback will be called after conversion, when an error occured or if conversion didn't
 * finish after @timeout. @callback will always be called from the thread default
 * %GMainContext, see g_main_context_get_thread_default(). If GLib before 2.22 is used,
 * this will always be the global default main context.
 *
 * @destroy_notify will be called after the callback was called and @user_data is not needed
 * anymore.
 *
 * Since: 0.10.31
 *
 */
void
gst_video_convert_frame_async (GstBuffer * buf, const GstCaps * to_caps,
    GstClockTime timeout, GstVideoConvertFrameCallback callback,
    gpointer user_data, GDestroyNotify destroy_notify)
{
  GMainContext *context = NULL;
  GError *error = NULL;
  GstBus *bus;
  GstCaps *from_caps, *to_caps_copy = NULL;
  GstElement *pipeline, *src, *sink;
  guint i, n;
  GSource *source;
  GstVideoConvertFrameContext *ctx;

  g_return_if_fail (buf != NULL);
  g_return_if_fail (to_caps != NULL);
  g_return_if_fail (GST_BUFFER_CAPS (buf) != NULL);
  g_return_if_fail (callback != NULL);

#if GLIB_CHECK_VERSION(2,22,0)
  context = g_main_context_get_thread_default ();
#endif
  if (!context)
    context = g_main_context_default ();

  from_caps = GST_BUFFER_CAPS (buf);

  to_caps_copy = gst_caps_new_empty ();
  n = gst_caps_get_size (to_caps);
  for (i = 0; i < n; i++) {
    GstStructure *s = gst_caps_get_structure (to_caps, i);

    s = gst_structure_copy (s);
    gst_structure_remove_field (s, "framerate");
    gst_caps_append_structure (to_caps_copy, s);
  }

  pipeline =
      build_convert_frame_pipeline (&src, &sink, from_caps, to_caps_copy,
      &error);
  if (!pipeline)
    goto no_pipeline;

  bus = gst_element_get_bus (pipeline);

  ctx = g_slice_new0 (GstVideoConvertFrameContext);
  ctx->mutex = g_mutex_new ();
  ctx->buffer = gst_buffer_ref (buf);
  ctx->callback = callback;
  ctx->user_data = user_data;
  ctx->destroy_notify = destroy_notify;
  ctx->context = g_main_context_ref (context);
  ctx->finished = FALSE;
  ctx->pipeline = pipeline;

  if (timeout != GST_CLOCK_TIME_NONE) {
    source = g_timeout_source_new (timeout / GST_MSECOND);
    g_source_set_callback (source,
        (GSourceFunc) convert_frame_timeout_callback, ctx, NULL);
    ctx->timeout_id = g_source_attach (source, context);
    g_source_unref (source);
  }

  g_signal_connect (src, "need-data",
      G_CALLBACK (convert_frame_need_data_callback), ctx);
  g_signal_connect (sink, "new-preroll",
      G_CALLBACK (convert_frame_new_buffer_callback), ctx);

  source = gst_bus_create_watch (bus);
  g_source_set_callback (source, (GSourceFunc) convert_frame_bus_callback,
      ctx, NULL);
  g_source_attach (source, context);
  g_source_unref (source);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  gst_object_unref (bus);
  gst_caps_unref (to_caps_copy);

  return;
  /* ERRORS */
no_pipeline:
  {
    GstVideoConvertFrameCallbackContext *ctx;
    GSource *source;

    gst_caps_unref (to_caps_copy);

    ctx = g_slice_new0 (GstVideoConvertFrameCallbackContext);
    ctx->callback = callback;
    ctx->user_data = user_data;
    ctx->destroy_notify = destroy_notify;
    ctx->buffer = NULL;
    ctx->error = error;

    source = g_timeout_source_new (0);
    g_source_set_callback (source,
        (GSourceFunc) convert_frame_dispatch_callback, ctx,
        (GDestroyNotify) gst_video_convert_frame_callback_context_free);
    g_source_attach (source, context);
    g_source_unref (source);
  }
}
/* Main method for the native code. This is executed on its own thread. */
static void *app_function (void *userdata) {
  JavaVMAttachArgs args;
  GstBus *bus;
  CustomData *data = (CustomData *)userdata;
  GSource *timeout_source;
  GSource *bus_source;
  GError *error = NULL;
  guint flags;

  GST_DEBUG ("Creating pipeline in CustomData at %p", data);

  /* Create our own GLib Main Context and make it the default one */
  data->context = g_main_context_new ();
  g_main_context_push_thread_default(data->context);

  /* Build pipeline */
  data->pipeline = gst_parse_launch("playbin2", &error);
  if (error) {
    gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
    g_clear_error (&error);
    set_ui_message(message, data);
    g_free (message);
    return NULL;
  }

  /* Disable subtitles */
  g_object_get (data->pipeline, "flags", &flags, NULL);
  flags &= ~GST_PLAY_FLAG_TEXT;
  g_object_set (data->pipeline, "flags", flags, NULL);

  /* Set the pipeline to READY, so it can already accept a window handle, if we have one */
  data->target_state = GST_STATE_READY;
  gst_element_set_state(data->pipeline, GST_STATE_READY);

  /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
  bus = gst_element_get_bus (data->pipeline);
  bus_source = gst_bus_create_watch (bus);
  g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
  g_source_attach (bus_source, data->context);
  g_source_unref (bus_source);
  g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::duration", (GCallback)duration_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::buffering", (GCallback)buffering_cb, data);
  g_signal_connect (G_OBJECT (bus), "message::clock-lost", (GCallback)clock_lost_cb, data);
  gst_object_unref (bus);

  /* Register a function that GLib will call 4 times per second */
  timeout_source = g_timeout_source_new (250);
  g_source_set_callback (timeout_source, (GSourceFunc)refresh_ui, data, NULL);
  g_source_attach (timeout_source, data->context);
  g_source_unref (timeout_source);

  /* Create a GLib Main Loop and set it to run */
  GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
  data->main_loop = g_main_loop_new (data->context, FALSE);
  check_initialization_complete (data);
  g_main_loop_run (data->main_loop);
  GST_DEBUG ("Exited main loop");
  g_main_loop_unref (data->main_loop);
  data->main_loop = NULL;

  /* Free resources */
  g_main_context_pop_thread_default(data->context);
  g_main_context_unref (data->context);
  data->target_state = GST_STATE_NULL;
  gst_element_set_state (data->pipeline, GST_STATE_NULL);
  gst_object_unref (data->pipeline);

  return NULL;
}