static int base_init(base *self, PyObject *args, PyObject *kwds)
{
	// call tp_init of PyGObject
	int args_len = PyTuple_Size(args);
	PyObject *reduced_args = PySequence_GetSlice(args, 2, args_len);
	if (PyGObject_Type->tp_init((PyObject *)self, reduced_args, kwds) < 0) return -1;
	Py_DECREF(reduced_args);

	// parse arguments
	if (!PyArg_ParseTuple(args, "O!O!", PyGObject_Type, &(self->spectrum_element), PyGObject_Type, &(self->pipeline))) return -1;

	// listen for spectrum messages
	GstBus *gstbus = gst_pipeline_get_bus(GST_PIPELINE(self->pipeline->obj));
	g_assert(gstbus != NULL);
	gst_bus_add_signal_watch(gstbus);
	Py_INCREF(self);
	g_signal_connect(G_OBJECT(gstbus), "message::element", G_CALLBACK(on_message), self);
	gst_object_unref(gstbus);

	// get clock of pipeline
	self->sync_clock = gst_pipeline_get_clock(GST_PIPELINE(self->pipeline->obj));
	g_assert(self->sync_clock != NULL);

	return 0;
}
示例#2
0
void ofxGstRTPServer::sendAudioOut(PooledAudioFrame * pooledFrame){
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));
	gst_object_ref(clock);
	GstClockTime now = gst_clock_get_time (clock) - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	if(firstAudioFrame && !audioAutoTimestamp){
		prevTimestampAudio = now;
		firstAudioFrame = false;
		return;
	}

	int size = pooledFrame->audioFrame._payloadDataLengthInSamples*2*pooledFrame->audioFrame._audioChannel;

	GstBuffer * echoCancelledBuffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,(void*)pooledFrame->audioFrame._payloadData,size,0,size,pooledFrame,(GDestroyNotify)&ofxWebRTCAudioPool::relaseFrame);

	if(!audioAutoTimestamp){
		GstClockTime duration = (pooledFrame->audioFrame._payloadDataLengthInSamples * GST_SECOND / pooledFrame->audioFrame._frequencyInHz);
		GstClockTime now = prevTimestamp + duration;

		GST_BUFFER_OFFSET(echoCancelledBuffer) = numFrameAudio++;
		GST_BUFFER_OFFSET_END(echoCancelledBuffer) = numFrameAudio;
		GST_BUFFER_DTS (echoCancelledBuffer) = now;
		GST_BUFFER_PTS (echoCancelledBuffer) = now;
		GST_BUFFER_DURATION(echoCancelledBuffer) = duration;
		prevTimestampAudio = now;
	}


	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcAudio, echoCancelledBuffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError(LOG_NAME) << "error pushing audio buffer: flow_return was " << flow_return;
	}
}
示例#3
0
static gboolean create_pipeline(SpiceGstDecoder *decoder)
{
    gchar *desc;
    gboolean auto_enabled;
    guint opt;
    GstAppSinkCallbacks appsink_cbs = { NULL };
    GError *err = NULL;
    GstBus *bus;

    auto_enabled = (g_getenv("SPICE_GSTVIDEO_AUTO") != NULL);
    if (auto_enabled || !VALID_VIDEO_CODEC_TYPE(decoder->base.codec_type)) {
        SPICE_DEBUG("Trying %s for codec type %d %s",
                    gst_opts[0].dec_name, decoder->base.codec_type,
                    (auto_enabled) ? "(SPICE_GSTVIDEO_AUTO is set)" : "");
        opt = 0;
    } else {
        opt = decoder->base.codec_type;
    }

    /* - We schedule the frame display ourselves so set sync=false on appsink
     *   so the pipeline decodes them as fast as possible. This will also
     *   minimize the risk of frames getting lost when we rebuild the
     *   pipeline.
     * - Set max-bytes=0 on appsrc so it does not drop frames that may be
     *   needed by those that follow.
     */
    desc = g_strdup_printf("appsrc name=src is-live=true format=time max-bytes=0 block=true "
                           "%s ! %s ! videoconvert ! appsink name=sink "
                           "caps=video/x-raw,format=BGRx sync=false drop=false",
                           gst_opts[opt].dec_caps, gst_opts[opt].dec_name);
    SPICE_DEBUG("GStreamer pipeline: %s", desc);

    decoder->pipeline = gst_parse_launch_full(desc, NULL, GST_PARSE_FLAG_FATAL_ERRORS, &err);
    g_free(desc);
    if (!decoder->pipeline) {
        spice_warning("GStreamer error: %s", err->message);
        g_clear_error(&err);
        return FALSE;
    }

    decoder->appsrc = GST_APP_SRC(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "src"));
    decoder->appsink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "sink"));

    appsink_cbs.new_sample = new_sample;
    gst_app_sink_set_callbacks(decoder->appsink, &appsink_cbs, decoder, NULL);
    bus = gst_pipeline_get_bus(GST_PIPELINE(decoder->pipeline));
    gst_bus_add_watch(bus, handle_pipeline_message, decoder);
    gst_object_unref(bus);

    decoder->clock = gst_pipeline_get_clock(GST_PIPELINE(decoder->pipeline));

    if (gst_element_set_state(decoder->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        SPICE_DEBUG("GStreamer error: Unable to set the pipeline to the playing state.");
        free_pipeline(decoder);
        return FALSE;
    }

    return TRUE;
}
示例#4
0
GstClockTime ofxGstRTPServer::getTimeStamp(){
	if(!gst.isLoaded()) return GST_CLOCK_TIME_NONE;
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));

	gst_object_ref(clock);
	GstClockTime now = gst_clock_get_time (clock) - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	return now;
}
int
main (int argc, char *argv[])
{
  GstElement *bin;
  GstElement *src, *spectrum, *sink;
  GstBus *bus;
  GtkWidget *appwindow;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  bin = gst_pipeline_new ("bin");

  src = gst_element_factory_make (DEFAULT_AUDIOSRC, "src");

  spectrum = gst_element_factory_make ("spectrum", "spectrum");
  g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80,
      "post-messages", TRUE, NULL);

  sink = gst_element_factory_make ("fakesink", "sink");

  gst_bin_add_many (GST_BIN (bin), src, spectrum, sink, NULL);
  if (!gst_element_link_many (src, spectrum, sink, NULL)) {
    fprintf (stderr, "can't link elements\n");
    exit (1);
  }

  bus = gst_element_get_bus (bin);
  gst_bus_add_watch (bus, message_handler, NULL);
  gst_object_unref (bus);

  sync_clock = gst_pipeline_get_clock (GST_PIPELINE (bin));

  appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  g_signal_connect (G_OBJECT (appwindow), "destroy",
      G_CALLBACK (on_window_destroy), NULL);

  drawingarea = gtk_drawing_area_new ();
  gtk_widget_set_size_request (drawingarea, spect_bands, spect_height);
  g_signal_connect (G_OBJECT (drawingarea), "configure-event",
      G_CALLBACK (on_configure_event), (gpointer) spectrum);
  gtk_container_add (GTK_CONTAINER (appwindow), drawingarea);
  gtk_widget_show_all (appwindow);

  gst_element_set_state (bin, GST_STATE_PLAYING);
  gtk_main ();
  gst_element_set_state (bin, GST_STATE_NULL);

  gst_object_unref (sync_clock);
  gst_object_unref (bin);

  return 0;
}
示例#6
0
文件: introbin.c 项目: LjsOks1/kiosk
static void on_new_decoded_pad(GstElement *introdec, 
                               GstPad *srcpad,
                               gpointer data)
{  
   GstPadLinkReturn result;
   GstPad *sinkpad;
   GstCaps *new_pad_caps;

   CustomData *cdata=(CustomData*)data;
   GstElement *introbin=cdata->introbin;

   new_pad_caps=gst_pad_query_caps(srcpad,NULL);
   g_print("Caps:%s\n",gst_caps_to_string(new_pad_caps));

   /* Setup src pad offset, sync with pipeline. */
   gint64 pos2;
   pos2=gst_element_get_base_time(cdata->pipeline);
   GstClock *clock;
   clock=gst_pipeline_get_clock(GST_PIPELINE(cdata->pipeline));
   GstClockTime clock_time;
   clock_time=gst_clock_get_time(clock);
   gst_object_unref(clock);
//   g_print("Pipeline times: base_time=%lld\n clock_time=%lld\n",
//		            pos2,clock_time);
   gst_pad_set_offset(srcpad,clock_time-pos2);
   cdata->introbin_offset=clock_time-pos2;

   if(strncmp(gst_caps_to_string(new_pad_caps),"video",5)==0) {
       GstElement *vqueue;
       vqueue=gst_bin_get_by_name(GST_BIN(introbin),"introscale");
       sinkpad=gst_element_get_static_pad(vqueue,"sink");
       result=gst_pad_link(srcpad,sinkpad);
       if(result!=GST_PAD_LINK_OK) {
          g_printerr("Couldn't link introbin decodebin video pad...\n");
       }
       gst_object_unref(vqueue);
   }
   if(strncmp(gst_caps_to_string(new_pad_caps),"audio",5)==0) {
       GstElement *arate;
       arate=gst_bin_get_by_name(GST_BIN(introbin),"introaudiorate");
       sinkpad=gst_element_get_static_pad(arate,"sink");
       result=gst_pad_link(srcpad,sinkpad);
       if(result!=GST_PAD_LINK_OK) {
          GstCaps *peer_caps;
	  peer_caps=gst_pad_query_caps(sinkpad,NULL);
	  g_print("SinkCaps:%s\n",gst_caps_to_string(peer_caps));
          g_printerr("Couldn't link introbin decodebin audio pad...\n");
	  gst_caps_unref(peer_caps);
       }
       gst_object_unref(arate);
   }
}
示例#7
0
void ofxGstRTPServer::emitDepthKeyFrame(){
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));
	gst_object_ref(clock);
	GstClockTime time = gst_clock_get_time (clock);
	GstClockTime now =  time - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	GstEvent * keyFrameEvent = gst_video_event_new_downstream_force_key_unit(now,
															 time,
															 now,
															 TRUE,
															 0);
	gst_element_send_event(appSrcDepth,keyFrameEvent);

}
示例#8
0
文件: introbin.c 项目: LjsOks1/kiosk
gboolean introbin_set_pad_offset(CustomData *data)
{
  gint64 pos2;
  pos2=gst_element_get_base_time(data->pipeline);
  GstClock *clock;
  clock=gst_pipeline_get_clock(GST_PIPELINE(data->pipeline));
  GstClockTime clock_time;
  clock_time=gst_clock_get_time(clock);
  gst_object_unref(clock);
  g_print("Pipeline times: base_time=%lld\n clock_time=%lld",
		            pos2,clock_time);
  GstElement *dec=gst_bin_get_by_name(GST_BIN(data->introbin),"introdec");
  GstPad *src_pad1,*src_pad2;
  src_pad1=gst_element_get_static_pad(GST_ELEMENT(dec),"src_0");
  gst_pad_set_offset(src_pad1,clock_time-pos2);
  gst_object_unref(src_pad1);
  src_pad2=gst_element_get_static_pad(GST_ELEMENT(dec),"src_1");
  gst_pad_set_offset(src_pad2,clock_time-pos2);
  gst_object_unref(src_pad2);

  return TRUE;
}
示例#9
0
int
main (int argc, char *argv[])
{
  GstElement *bin;
  GstElement *src, *spectrum, *audioconvert, *sink;
  GstBus *bus;
  GtkWidget *appwindow, *vbox, *widget;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  bin = gst_pipeline_new ("bin");

  src = gst_element_factory_make ("audiotestsrc", "src");
  g_object_set (G_OBJECT (src), "wave", 0, NULL);

  spectrum = gst_element_factory_make ("spectrum", "spectrum");
  g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80,
      "message", TRUE, NULL);

  audioconvert = gst_element_factory_make ("audioconvert", "audioconvert");

  sink = gst_element_factory_make (DEFAULT_AUDIOSINK, "sink");

  gst_bin_add_many (GST_BIN (bin), src, spectrum, audioconvert, sink, NULL);
  if (!gst_element_link_many (src, spectrum, audioconvert, sink, NULL)) {
    fprintf (stderr, "can't link elements\n");
    exit (1);
  }

  bus = gst_element_get_bus (bin);
  gst_bus_add_watch (bus, message_handler, NULL);
  gst_object_unref (bus);

  sync_clock = gst_pipeline_get_clock (GST_PIPELINE (bin));

  appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  g_signal_connect (G_OBJECT (appwindow), "destroy",
      G_CALLBACK (on_window_destroy), NULL);
  vbox = gtk_vbox_new (FALSE, 6);

  widget = gtk_hscale_new_with_range (50.0, 20000.0, 10);
  gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE);
  gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP);
  gtk_range_set_value (GTK_RANGE (widget), 440.0);
  g_signal_connect (G_OBJECT (widget), "value-changed",
      G_CALLBACK (on_frequency_changed), (gpointer) src);
  gtk_box_pack_start (GTK_BOX (vbox), widget, FALSE, FALSE, 0);

  drawingarea = gtk_drawing_area_new ();
  gtk_widget_set_size_request (drawingarea, spect_bands, spect_height);
  g_signal_connect (G_OBJECT (drawingarea), "configure-event",
      G_CALLBACK (on_configure_event), (gpointer) spectrum);
  gtk_box_pack_start (GTK_BOX (vbox), drawingarea, TRUE, TRUE, 0);

  gtk_container_add (GTK_CONTAINER (appwindow), vbox);
  gtk_widget_show_all (appwindow);

  gst_element_set_state (bin, GST_STATE_PLAYING);
  gtk_main ();
  gst_element_set_state (bin, GST_STATE_NULL);

  gst_object_unref (sync_clock);
  gst_object_unref (bin);

  return 0;
}
示例#10
0
gint
main (gint argc, gchar ** argv)
{
    gint res = 1;
    GstElement *src, *sink;
    GstElement *bin;
    GstController *ctrl;
    GstInterpolationControlSource *csource1, *csource2;
    GstClock *clock;
    GstClockID clock_id;
    GstClockReturn wait_ret;
    GValue vol = { 0, };

    gst_init (&argc, &argv);
    gst_controller_init (&argc, &argv);

    /* build pipeline */
    bin = gst_pipeline_new ("pipeline");
    clock = gst_pipeline_get_clock (GST_PIPELINE (bin));
    src = gst_element_factory_make ("audiotestsrc", "gen_audio");
    if (!src) {
        GST_WARNING ("need audiotestsrc from gst-plugins-base");
        goto Error;
    }
    sink = gst_element_factory_make ("autoaudiosink", "play_audio");
    if (!sink) {
        GST_WARNING ("need autoaudiosink from gst-plugins-base");
        goto Error;
    }

    gst_bin_add_many (GST_BIN (bin), src, sink, NULL);
    if (!gst_element_link (src, sink)) {
        GST_WARNING ("can't link elements");
        goto Error;
    }

    /* square wave
       g_object_set (G_OBJECT(src), "wave", 1, NULL);
     */

    /* add a controller to the source */
    if (!(ctrl = gst_controller_new (G_OBJECT (src), "freq", "volume", NULL))) {
        GST_WARNING ("can't control source element");
        goto Error;
    }

    csource1 = gst_interpolation_control_source_new ();
    csource2 = gst_interpolation_control_source_new ();

    gst_controller_set_control_source (ctrl, "volume",
                                       GST_CONTROL_SOURCE (csource1));
    gst_controller_set_control_source (ctrl, "freq",
                                       GST_CONTROL_SOURCE (csource2));

    /* Set interpolation mode */

    gst_interpolation_control_source_set_interpolation_mode (csource1,
            GST_INTERPOLATE_LINEAR);
    gst_interpolation_control_source_set_interpolation_mode (csource2,
            GST_INTERPOLATE_LINEAR);

    /* set control values */
    g_value_init (&vol, G_TYPE_DOUBLE);
    g_value_set_double (&vol, 0.0);
    gst_interpolation_control_source_set (csource1, 0 * GST_SECOND, &vol);
    g_value_set_double (&vol, 1.0);
    gst_interpolation_control_source_set (csource1, 5 * GST_SECOND, &vol);

    g_object_unref (csource1);

    g_value_set_double (&vol, 220.0);
    gst_interpolation_control_source_set (csource2, 0 * GST_SECOND, &vol);
    g_value_set_double (&vol, 3520.0);
    gst_interpolation_control_source_set (csource2, 3 * GST_SECOND, &vol);
    g_value_set_double (&vol, 440.0);
    gst_interpolation_control_source_set (csource2, 6 * GST_SECOND, &vol);

    g_object_unref (csource2);

    clock_id =
        gst_clock_new_single_shot_id (clock,
                                      gst_clock_get_time (clock) + (7 * GST_SECOND));

    /* run for 7 seconds */
    if (gst_element_set_state (bin, GST_STATE_PLAYING)) {
        if ((wait_ret = gst_clock_id_wait (clock_id, NULL)) != GST_CLOCK_OK) {
            GST_WARNING ("clock_id_wait returned: %d", wait_ret);
        }
        gst_element_set_state (bin, GST_STATE_NULL);
    }

    /* cleanup */
    g_object_unref (G_OBJECT (ctrl));
    gst_clock_id_unref (clock_id);
    gst_object_unref (G_OBJECT (clock));
    gst_object_unref (G_OBJECT (bin));
    res = 0;
Error:
    return (res);
}