예제 #1
0
static gboolean _owr_arrival_time_meta_transform(GstBuffer *transbuf, GstMeta *meta, GstBuffer *buffer, GQuark type, gpointer data)
{
    OwrArrivalTimeMeta *at_meta = (OwrArrivalTimeMeta *)meta;
    _owr_buffer_add_arrival_time_meta(transbuf, at_meta->arrival_time);
    OWR_UNUSED(buffer);
    OWR_UNUSED(type);
    OWR_UNUSED(data);
    return TRUE;
}
예제 #2
0
static void on_message(OwrMessageOrigin *origin, OwrMessageType type, OwrMessageSubType sub_type, GHashTable *data, gpointer user_data)
{
    GAsyncQueue *queue = (GAsyncQueue *) user_data;
    OWR_UNUSED(origin);
    OWR_UNUSED(type);
    OWR_UNUSED(data);

    g_async_queue_push(queue, GINT_TO_POINTER(sub_type + 1));
}
예제 #3
0
static GstPadProbeReturn
drop_reconfigure_event(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
    OWR_UNUSED(pad);
    OWR_UNUSED(user_data);

    if (GST_EVENT_TYPE(info->data) == GST_EVENT_RECONFIGURE)
        return GST_PAD_PROBE_DROP;
    return GST_PAD_PROBE_OK;
}
예제 #4
0
static gboolean binding_transform_to_kbps(GBinding *binding, const GValue *from_value, GValue *to_value, gpointer user_data)
{
    guint bitrate;

    OWR_UNUSED(binding);
    OWR_UNUSED(user_data);

    bitrate = g_value_get_uint(from_value);
    g_value_set_uint(to_value, bitrate / 1000);

    return TRUE;
}
예제 #5
0
static GstPadProbeReturn
drop_gap_buffers(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
    OWR_UNUSED(pad);
    OWR_UNUSED(user_data);

    /* Drop GAP buffers, they're just duplicated buffers and we don't
     * care about constant framerate here */
    if (GST_BUFFER_FLAG_IS_SET (info->data, GST_BUFFER_FLAG_GAP)) {
        return GST_PAD_PROBE_DROP;
    }
    return GST_PAD_PROBE_OK;
}
/* reconfiguration is not supported by all sources and can be disruptive
 * we will handle reconfiguration manually
 * FIXME: implement source reconfiguration support :) */
static GstPadProbeReturn drop_reconfigure_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
    OWR_UNUSED(pad);
    OWR_UNUSED(user_data);

    if (GST_IS_EVENT(GST_PAD_PROBE_INFO_DATA(info))
        && GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) == GST_EVENT_RECONFIGURE) {
        GST_DEBUG("Dropping reconfigure event");
        return GST_PAD_PROBE_DROP;
    }

    return GST_PAD_PROBE_OK;
}
예제 #7
0
파일: owr.c 프로젝트: bill-auger/openwebrtc
static void gst_log_android_handler(GstDebugCategory *category,
                 GstDebugLevel level,
                 const gchar *file,
                 const gchar *function,
                 gint line,
                 GObject *object,
                 GstDebugMessage *message,
                 gpointer data)
{
    gchar *obj = NULL;

    OWR_UNUSED(data);

    if (level > gst_debug_category_get_threshold(category))
      return;

    if (GST_IS_PAD(object) && GST_OBJECT_NAME(object)) {
      obj = g_strdup_printf("<%s:%s>", GST_DEBUG_PAD_NAME(object));
    } else if (GST_IS_OBJECT(object)) {
      obj = g_strdup_printf("<%s>", GST_OBJECT_NAME(object));
    }

    __android_log_print(ANDROID_LOG_INFO, "gst_log", "%p %s %s %s:%d:%s:%s %s\n",
            (void *)g_thread_self(),
            gst_debug_level_get_name(level), gst_debug_category_get_name(category),
            file, line, function, obj ? obj : "", gst_debug_message_get(message));

    g_free(obj);
}
예제 #8
0
static gpointer owr_payload_detect_codecs(gpointer data)
{
    GList *decoder_factories;
    GList *encoder_factories;
    GstCaps *caps;

    OWR_UNUSED(data);

    decoder_factories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER |
                                                              GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO,
                                                              GST_RANK_MARGINAL);
    encoder_factories = gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_ENCODER |
                                                               GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO,
                                                               GST_RANK_MARGINAL);

    caps = gst_caps_new_empty_simple("video/x-h264");
    h264_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE);
    h264_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE);
    gst_caps_unref (caps);

    caps = gst_caps_new_empty_simple("video/x-vp8");
    vp8_decoders = gst_element_factory_list_filter(decoder_factories, caps, GST_PAD_SINK, FALSE);
    vp8_encoders = gst_element_factory_list_filter(encoder_factories, caps, GST_PAD_SRC, FALSE);
    gst_caps_unref (caps);

    gst_plugin_feature_list_free(decoder_factories);
    gst_plugin_feature_list_free(encoder_factories);

    h264_decoders = g_list_sort(h264_decoders, gst_plugin_feature_rank_compare_func);
    h264_encoders = g_list_sort(h264_encoders, gst_plugin_feature_rank_compare_func);
    vp8_decoders = g_list_sort(vp8_decoders, gst_plugin_feature_rank_compare_func);
    vp8_encoders = g_list_sort(vp8_encoders, gst_plugin_feature_rank_compare_func);

    return NULL;
}
예제 #9
0
static gpointer timeout_thread_func(gpointer data)
{
    OWR_UNUSED(data);

    g_usleep(G_USEC_PER_SEC);
    g_print("** ERROR ** test timed out\n");
    exit(-1);
}
예제 #10
0
static gboolean data_channel_send(GHashTable *args)
{
    OwrDataChannelPrivate *priv;
    OwrDataChannel *data_channel;
    guint8 *data;
    guint length;
    gboolean is_binary;

    OWR_UNUSED(priv);
    OWR_UNUSED(data_channel);
    OWR_UNUSED(data);
    OWR_UNUSED(is_binary);

    data_channel = g_hash_table_lookup(args, "data_channel");
    data = g_hash_table_lookup(args, "data");
    length = GPOINTER_TO_UINT(g_hash_table_lookup(args, "length"));
    is_binary = GPOINTER_TO_UINT(g_hash_table_lookup(args, "is_binary"));
    priv = data_channel->priv;

    if (priv->on_datachannel_send) {
        GValue params[4] = { G_VALUE_INIT, G_VALUE_INIT, G_VALUE_INIT, G_VALUE_INIT };

        g_value_init(&params[0], OWR_TYPE_DATA_CHANNEL);
        g_value_set_object(&params[0], data_channel);
        g_value_init(&params[1], G_TYPE_POINTER);
        g_value_set_pointer(&params[1], data);
        g_value_init(&params[2], G_TYPE_UINT);
        g_value_set_uint(&params[2], length);
        g_value_init(&params[3], G_TYPE_BOOLEAN);
        g_value_set_boolean(&params[3], is_binary);

        g_closure_invoke(priv->on_datachannel_send, NULL, 4, (const GValue *)&params, NULL);

        g_value_unset(&params[0]);
        g_value_unset(&params[1]);
        g_value_unset(&params[2]);
        g_value_unset(&params[3]);
    } else
        g_free(data);

    g_hash_table_unref(args);
    g_object_unref(data_channel);

    return FALSE;
}
예제 #11
0
static gboolean _owr_arrival_time_meta_init(GstMeta *meta, gpointer params, GstBuffer *buffer)
{
    OwrArrivalTimeMeta *at_meta = (OwrArrivalTimeMeta *)meta;
    OWR_UNUSED(buffer);

    if (params)
        at_meta->arrival_time = *((guint64 *) params);
    else
        at_meta->arrival_time = GST_CLOCK_TIME_NONE;

    return TRUE;
}
예제 #12
0
static GstElement *owr_audio_renderer_get_sink(OwrMediaRenderer *renderer)
{
    GstElement *sink = NULL;

    OWR_UNUSED(renderer);

    sink = gst_element_factory_make(AUDIO_SINK, "audio-renderer-sink");
    g_object_set(sink, "buffer-time", SINK_BUFFER_TIME,
        "latency-time", G_GINT64_CONSTANT(10000),
        "enable-last-sample", FALSE, NULL);
    return sink;
}
예제 #13
0
static GstCaps *owr_audio_renderer_get_caps(OwrMediaRenderer *renderer)
{
    GstCaps *caps = NULL;

    OWR_UNUSED(renderer);

    caps = gst_caps_new_simple("audio/x-raw",
        "format", G_TYPE_STRING, "S16LE",
        "layout", G_TYPE_STRING, "interleaved",
        NULL);
    return caps;
}
예제 #14
0
void _owr_deep_notify(GObject *object, GstObject *orig,
    GParamSpec *pspec, gpointer user_data)
{
    GValue value = G_VALUE_INIT;
    gchar *str = NULL;
    GstObject *it;
    gchar *prevpath, *path;

    OWR_UNUSED(user_data);
    OWR_UNUSED(object);

    path = g_strdup("");

    for (it = orig; GST_IS_OBJECT(it); it = GST_OBJECT_PARENT(it)) {
        prevpath = path;
        path = g_strjoin("/", GST_OBJECT_NAME(it), prevpath, NULL);
        g_free(prevpath);
    }

    if (pspec->flags & G_PARAM_READABLE) {
        g_value_init(&value, pspec->value_type);
        g_object_get_property(G_OBJECT(orig), pspec->name, &value);

        if (G_VALUE_TYPE(&value) == GST_TYPE_CAPS)
            str = gst_caps_to_string(gst_value_get_caps(&value));
        else if (G_VALUE_HOLDS_STRING(&value))
            str = g_value_dup_string(&value);
        else
            str = gst_value_serialize(&value);

        GST_INFO_OBJECT(object, "%s%s = %s\n", path, pspec->name, str);
        g_free(str);
        g_value_unset(&value);
    } else
        GST_INFO_OBJECT(object, "Parameter %s not readable in %s.", pspec->name, path);

    g_free(path);
}
예제 #15
0
static void callback_merger_on_destroy_data(CallbackMergeContext *context, GClosure *closure)
{
    OWR_UNUSED(closure);

    _owr_utils_call_closure_with_list(context->callback, context->list);

    g_mutex_clear(&context->mutex);

    if (context->item_destroy)
        g_list_free_full(context->list, context->item_destroy);
    else
        g_list_free(context->list);
    g_free(context);
}
예제 #16
0
static void on_caps(GstElement *sink, GParamSpec *pspec, OwrMediaRenderer *media_renderer)
{
    GstCaps *caps;

    OWR_UNUSED(pspec);

    g_object_get(sink, "caps", &caps, NULL);

    if (GST_IS_CAPS(caps)) {
        GST_INFO_OBJECT(media_renderer, "%s renderer - configured with caps: %" GST_PTR_FORMAT,
                        media_renderer->priv->media_type == OWR_MEDIA_TYPE_AUDIO ? "Audio" :
                        media_renderer->priv->media_type == OWR_MEDIA_TYPE_VIDEO ? "Video" :
                        "Unknown", caps);
    }
}
static void on_caps(GstElement *source, GParamSpec *pspec, OwrMediaSource *media_source)
{
    gchar *media_source_name;
    GstCaps *caps;

    OWR_UNUSED(pspec);

    g_object_get(source, "caps", &caps, NULL);
    g_object_get(media_source, "name", &media_source_name, NULL);

    if (GST_IS_CAPS(caps)) {
        GST_INFO_OBJECT(source, "%s - configured with caps: %" GST_PTR_FORMAT,
            media_source_name, caps);
    }
}
static void tee_pad_removed_cb(GstElement *tee, GstPad *old_pad, gpointer user_data)
{
    OwrMediaSource *media_source = user_data;

    OWR_UNUSED(old_pad);

    /* No sink is left, shutdown */
    if (!tee->numsrcpads) {
        GHashTable *args;

        args = _owr_create_schedule_table(OWR_MESSAGE_ORIGIN(media_source));
        g_hash_table_insert(args, "media_source", media_source);
        g_object_ref(media_source);

        _owr_schedule_with_hash_table((GSourceFunc)shutdown_media_source, args);
    }
}
예제 #19
0
static void tee_pad_removed_cb(GstElement *tee, GstPad *old_pad, gpointer user_data)
{
    OwrMediaSource *media_source = user_data;

    OWR_UNUSED(old_pad);

    /* Only the fakesink is left, shutdown */
    if (tee->numsrcpads == 1) {
      GHashTable *args;

      args = g_hash_table_new(g_str_hash, g_str_equal);
      g_hash_table_insert(args, "media_source", media_source);
      g_object_ref (media_source);

      _owr_schedule_with_hash_table((GSourceFunc)shutdown_media_source, args);
    }
}
예제 #20
0
static void source_info_iterator(pa_context *pa_context, const pa_source_info *info, int eol, AudioListContext *context)
{
    OWR_UNUSED(pa_context);

    if (!eol) {
        OwrLocalMediaSource *source;

        if (info->monitor_of_sink_name != NULL) {
            /* We don't want to list monitor sources */
            return;
        }

        source = _owr_local_media_source_new_cached(info->index, info->description,
            OWR_MEDIA_TYPE_AUDIO, OWR_SOURCE_TYPE_CAPTURE);

        context->list = g_list_prepend(context->list, source);
    } else {
        context->list = g_list_reverse(context->list);
        finish_pa_list(context);
    }
}
/* For each raw video structure, adds a variant with format unset */
static gboolean
fix_video_caps_format(GstCapsFeatures *f, GstStructure *s, gpointer user_data)
{
    GstCaps *ret = GST_CAPS(user_data);
    OWR_UNUSED(f);

    gst_caps_append_structure(ret, gst_structure_copy(s));

    /* Don't mess with non-raw structures */
    if (!gst_structure_has_name(s, "video/x-raw"))
        goto done;

    if (gst_structure_has_field(s, "format")) {
        GstStructure *tmp = gst_structure_copy(s);
        gst_structure_remove_field(tmp, "format");
        gst_caps_append_structure(ret, tmp);
    }

done:
    return TRUE;
}
/* For each raw video structure, adds a variant with framerate unset */
static gboolean
fix_video_caps_framerate(GstCapsFeatures *f, GstStructure *s, gpointer user_data)
{
    GstCaps *ret = GST_CAPS(user_data);
    gint fps_n, fps_d;
    OWR_UNUSED(f);

    gst_caps_append_structure(ret, gst_structure_copy(s));

    /* Don't mess with non-raw structures */
    if (!gst_structure_has_name(s, "video/x-raw"))
        goto done;

    /* If possible try to limit the framerate at the source already */
    if (gst_structure_get_fraction(s, "framerate", &fps_n, &fps_d)) {
        GstStructure *tmp = gst_structure_copy(s);
        gst_structure_remove_field(tmp, "framerate");
        gst_caps_append_structure(ret, tmp);
    }

done:
    return TRUE;
}
예제 #23
0
static gboolean data_channel_close(GHashTable *args)
{
    OwrDataChannelPrivate *priv;
    OwrDataChannel *data_channel;
    GValue params[1] = { G_VALUE_INIT };

    OWR_UNUSED(data_channel);

    data_channel = g_hash_table_lookup(args, "data_channel");
    priv = data_channel->priv;

    g_warn_if_fail(priv->on_datachannel_close);

    if (priv->on_datachannel_close) {
        g_value_init(&params[0], OWR_TYPE_DATA_CHANNEL);
        g_value_set_object(&params[0], data_channel);
        g_closure_invoke(priv->on_datachannel_close, NULL, 1, (const GValue *)&params, NULL);
        g_value_unset(&params[0]);
    }

    g_hash_table_unref(args);
    g_object_unref(data_channel);
    return FALSE;
}
예제 #24
0
static void post_messages_from_new_thread(OwrMessageOrigin *origin, gpointer user_data)
{
    OWR_UNUSED(user_data);
    g_thread_new("bus-test-thread", (GThreadFunc) post_message_thread_func, origin);
}
예제 #25
0
static void unref_bus_from_new_thread(OwrBus *bus, gpointer user_data)
{
    OWR_UNUSED(user_data);
    g_thread_new("bus-test-thread", (GThreadFunc) unref_bus_thread_func, bus);
}
예제 #26
0
static gboolean on_incoming_connection(GThreadedSocketService *service,
    GSocketConnection *connection, GObject *source_object, OwrImageServer *image_server)
{
    GOutputStream *bos;
    GDataInputStream *dis;
    gchar *error_body, *error_header = NULL, *response_header = NULL;
    gchar *line, *tag;
    gsize line_length, i;
    guint content_length = 0;
    OwrImageRenderer *image_renderer;
    GBytes *image;
    gconstpointer image_data;
    gsize image_data_size = 0;

    OWR_UNUSED(service);
    OWR_UNUSED(source_object);

    g_return_val_if_fail(OWR_IS_IMAGE_SERVER(image_server), TRUE);

    bos = g_buffered_output_stream_new(g_io_stream_get_output_stream(G_IO_STREAM(connection)));
    dis = g_data_input_stream_new(g_io_stream_get_input_stream(G_IO_STREAM(connection)));
    g_data_input_stream_set_newline_type(dis, G_DATA_STREAM_NEWLINE_TYPE_CR_LF);

    error_body = "404 Not Found";
    error_header = g_strdup_printf(HTTP_RESPONSE_HEADER_TEMPLATE, 404, "Not Found",
        "text/plain", (guint)strlen(error_body));

    while (TRUE) {
        line = g_data_input_stream_read_line(dis, &line_length, NULL, NULL);
        if (!line)
            break;

        if (line_length > 6) {
            tag = g_strdup(line + 7);
            for (i = 0; i < strlen(tag); i++) {
                if (tag[i] == '-') {
                    tag[i] = '\0';
                    break;
                }
            }
        } else
            tag = NULL;

        g_free(line);

        while ((line = g_data_input_stream_read_line(dis, &line_length, NULL, NULL))) {
            g_free(line);

            if (!line_length) {
                /* got all request headers */
                break;
            }
        }

        if (!line)
            break;

        g_mutex_lock(&image_server->priv->image_renderers_mutex);
        image_renderer = tag ? g_hash_table_lookup(image_server->priv->image_renderers, tag) : NULL;
        g_mutex_unlock(&image_server->priv->image_renderers_mutex);

        image = image_renderer ? _owr_image_renderer_pull_bmp_image(image_renderer) : NULL;

        if (!image) {
            g_output_stream_write(bos, error_header, strlen(error_header), NULL, NULL);
            g_output_stream_write(bos, error_body, strlen(error_body), NULL, NULL);
            break;
        }

        image_data = g_bytes_get_data(image, &image_data_size);

        if (content_length != image_data_size) {
            content_length = image_data_size;
            g_free(response_header);
            response_header = g_strdup_printf(HTTP_RESPONSE_HEADER_TEMPLATE, 200, "OK",
                "image/bmp", content_length);
            g_buffered_output_stream_set_buffer_size(G_BUFFERED_OUTPUT_STREAM(bos),
                strlen(response_header) + content_length);
        }
        g_output_stream_write(bos, response_header, strlen(response_header), NULL, NULL);
        g_output_stream_write(bos, image_data, image_data_size, NULL, NULL);
        g_output_stream_flush(bos, NULL, NULL);

        g_bytes_unref(image);
    }

    g_free(response_header);
    g_free(error_header);
    g_object_unref(dis);
    g_object_unref(bos);

    return FALSE;
}
예제 #27
0
static void post_messages(OwrMessageOrigin *origin, gpointer user_data)
{
    OWR_UNUSED(user_data);

    OWR_POST_EVENT(origin, TEST, NULL);
}