MsgPopThread::~MsgPopThread () { gst_object_unref (Bus_); }
static gboolean gst_navseek_handle_src_event (GstPad * pad, GstEvent * event) { GstNavSeek *navseek; gboolean ret = TRUE; navseek = GST_NAVSEEK (GST_PAD_PARENT (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NAVIGATION: /* Check for a keyup and convert left/right to a seek event */ { const GstStructure *structure; const gchar *event_type; structure = gst_event_get_structure (event); g_return_val_if_fail (structure != NULL, FALSE); event_type = gst_structure_get_string (structure, "event"); g_return_val_if_fail (event_type != NULL, FALSE); if (strcmp (event_type, "key-press") == 0) { const gchar *key; key = gst_structure_get_string (structure, "key"); g_return_val_if_fail (key != NULL, FALSE); if (strcmp (key, "Left") == 0) { /* Seek backward by 5 secs */ gst_navseek_seek (navseek, -1.0 * navseek->seek_offset * GST_SECOND); } else if (strcmp (key, "Right") == 0) { /* Seek forward */ gst_navseek_seek (navseek, navseek->seek_offset * GST_SECOND); } else if (strcmp (key, "s") == 0) { /* Grab the next frame as the start frame of a segment */ navseek->grab_seg_start = TRUE; } else if (strcmp (key, "e") == 0) { /* Grab the next frame as the end frame of a segment */ navseek->grab_seg_end = TRUE; } else if (strcmp (key, "l") == 0) { /* Toggle the loop flag. If we have both start and end segment times send a seek */ navseek->loop = !navseek->loop; gst_navseek_segseek (navseek); } } else { break; } gst_event_unref (event); event = NULL; } break; default: break; } if (event && GST_PAD_IS_LINKED (GST_BASE_TRANSFORM (navseek)->sinkpad)) { GstPad *peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad); ret = gst_pad_send_event (peer_pad, event); gst_object_unref (peer_pad); } return ret; }
static void gst_insert_bin_block_pad_unlock (GstInsertBin * self) { struct ChangeData *data; GstPad *pad; GstPadProbeType probetype; again: data = g_queue_peek_head (&self->priv->change_queue); if (!data) { GST_OBJECT_UNLOCK (self); return; } if (data->action == GST_INSERT_BIN_ACTION_ADD && !validate_element (self, data->element)) goto error; if (data->action == GST_INSERT_BIN_ACTION_ADD) { if (data->sibling) { if (data->direction == DIRECTION_BEFORE) pad = get_single_pad (data->sibling, GST_PAD_SINK); else pad = get_single_pad (data->sibling, GST_PAD_SRC); } else { if (data->direction == DIRECTION_BEFORE) pad = (GstPad *) gst_proxy_pad_get_internal (GST_PROXY_PAD (self->priv->srcpad)); else pad = (GstPad *) gst_proxy_pad_get_internal (GST_PROXY_PAD (self->priv->sinkpad)); } if (!pad) { GST_WARNING_OBJECT (self, "Can not obtain a sibling pad to block" " before adding"); goto error; } if (!is_right_direction_for_block (pad)) { GstPad *peer = gst_pad_get_peer (pad); if (peer) { gst_object_unref (pad); pad = peer; } } } else { GstPad *element_pad; element_pad = get_single_pad (data->element, GST_PAD_SINK); if (!element_pad) { GST_WARNING_OBJECT (self, "Can not obtain the element's sink pad"); goto error; } if (!is_right_direction_for_block (element_pad)) { pad = gst_pad_get_peer (element_pad); } else { gst_object_unref (element_pad); element_pad = get_single_pad (data->element, GST_PAD_SRC); if (!element_pad) { GST_WARNING_OBJECT (self, "Can not obtain the element's src pad"); goto error; } pad = gst_pad_get_peer (element_pad); } gst_object_unref (element_pad); if (!pad) { GST_WARNING_OBJECT (self, "Can not obtain a sibling pad for removing"); goto error; } } if (GST_PAD_IS_SRC (pad)) probetype = GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM; else probetype = GST_PAD_PROBE_TYPE_BLOCK_UPSTREAM; GST_OBJECT_UNLOCK (self); gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_IDLE | probetype, pad_blocked_cb, self, NULL); gst_object_unref (pad); return; error: g_queue_pop_head (&self->priv->change_queue); gst_insert_bin_change_data_complete (self, data, FALSE); goto again; }
/* build a pipeline equivalent to: * * gst-launch -v rtpbin name=rtpbin \ * udpsrc caps=$AUDIO_CAPS port=5002 ! rtpbin.recv_rtp_sink_0 \ * rtpbin. ! rtppcmadepay ! alawdec ! audioconvert ! audioresample ! alsasink \ * udpsrc port=5003 ! rtpbin.recv_rtcp_sink_0 \ * rtpbin.send_rtcp_src_0 ! udpsink port=5007 host=$DEST sync=false async=false */ int main (int argc, char *argv[]) { GstElement *rtpbin, *rtpsrc, *rtcpsrc, *rtcpsink; GstElement *audiodepay, *audiodec, *audiores, *audioconv, *audiosink; GstElement *pipeline; GMainLoop *loop; GstCaps *caps; gboolean res; GstPadLinkReturn lres; GstPad *srcpad, *sinkpad; /* always init first */ gst_init (&argc, &argv); /* the pipeline to hold everything */ pipeline = gst_pipeline_new (NULL); g_assert (pipeline); /* the udp src and source we will use for RTP and RTCP */ rtpsrc = gst_element_factory_make ("udpsrc", "rtpsrc"); g_assert (rtpsrc); g_object_set (rtpsrc, "port", 5002, NULL); /* we need to set caps on the udpsrc for the RTP data */ caps = gst_caps_from_string (AUDIO_CAPS); g_object_set (rtpsrc, "caps", caps, NULL); gst_caps_unref (caps); rtcpsrc = gst_element_factory_make ("udpsrc", "rtcpsrc"); g_assert (rtcpsrc); g_object_set (rtcpsrc, "port", 5003, NULL); rtcpsink = gst_element_factory_make ("udpsink", "rtcpsink"); g_assert (rtcpsink); g_object_set (rtcpsink, "port", 5007, "host", DEST_HOST, NULL); /* no need for synchronisation or preroll on the RTCP sink */ g_object_set (rtcpsink, "async", FALSE, "sync", FALSE, NULL); gst_bin_add_many (GST_BIN (pipeline), rtpsrc, rtcpsrc, rtcpsink, NULL); /* the depayloading and decoding */ audiodepay = gst_element_factory_make (AUDIO_DEPAY, "audiodepay"); g_assert (audiodepay); audiodec = gst_element_factory_make (AUDIO_DEC, "audiodec"); g_assert (audiodec); /* the audio playback and format conversion */ audioconv = gst_element_factory_make ("audioconvert", "audioconv"); g_assert (audioconv); audiores = gst_element_factory_make ("audioresample", "audiores"); g_assert (audiores); audiosink = gst_element_factory_make (AUDIO_SINK, "audiosink"); g_assert (audiosink); /* add depayloading and playback to the pipeline and link */ gst_bin_add_many (GST_BIN (pipeline), audiodepay, audiodec, audioconv, audiores, audiosink, NULL); res = gst_element_link_many (audiodepay, audiodec, audioconv, audiores, audiosink, NULL); g_assert (res == TRUE); /* the rtpbin element */ rtpbin = gst_element_factory_make ("rtpbin", "rtpbin"); g_assert (rtpbin); gst_bin_add (GST_BIN (pipeline), rtpbin); /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */ srcpad = gst_element_get_static_pad (rtpsrc, "src"); sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0"); lres = gst_pad_link (srcpad, sinkpad); g_assert (lres == GST_PAD_LINK_OK); gst_object_unref (srcpad); /* get an RTCP sinkpad in session 0 */ srcpad = gst_element_get_static_pad (rtcpsrc, "src"); sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0"); lres = gst_pad_link (srcpad, sinkpad); g_assert (lres == GST_PAD_LINK_OK); gst_object_unref (srcpad); gst_object_unref (sinkpad); /* get an RTCP srcpad for sending RTCP back to the sender */ srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0"); sinkpad = gst_element_get_static_pad (rtcpsink, "sink"); lres = gst_pad_link (srcpad, sinkpad); g_assert (lres == GST_PAD_LINK_OK); gst_object_unref (sinkpad); /* the RTP pad that we have to connect to the depayloader will be created * dynamically so we connect to the pad-added signal, pass the depayloader as * user_data so that we can link to it. */ g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), audiodepay); /* give some stats when we receive RTCP */ g_signal_connect (rtpbin, "on-ssrc-active", G_CALLBACK (on_ssrc_active_cb), audiodepay); /* set the pipeline to playing */ g_print ("starting receiver pipeline\n"); gst_element_set_state (pipeline, GST_STATE_PLAYING); /* we need to run a GLib main loop to get the messages */ loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (loop); g_print ("stopping receiver pipeline\n"); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
int main (int argc, char **argv) { #ifdef WIN32 HGLRC sdl_gl_context = 0; HDC sdl_dc = 0; #else SDL_SysWMinfo info; Display *sdl_display = NULL; Window sdl_win = 0; GLXContext sdl_gl_context = NULL; #endif GMainLoop *loop = NULL; GstPipeline *pipeline = NULL; GstBus *bus = NULL; GstElement *glfilter = NULL; GstElement *fakesink = NULL; GstState state; GAsyncQueue *queue_input_buf = NULL; GAsyncQueue *queue_output_buf = NULL; GstGLDisplay *display; GstGLContext *sdl_context; const gchar *platform; /* Initialize SDL for video output */ if (SDL_Init (SDL_INIT_VIDEO) < 0) { fprintf (stderr, "Unable to initialize SDL: %s\n", SDL_GetError ()); return -1; } /* Create a 640x480 OpenGL screen */ if (SDL_SetVideoMode (640, 480, 0, SDL_OPENGL) == NULL) { fprintf (stderr, "Unable to create OpenGL screen: %s\n", SDL_GetError ()); SDL_Quit (); return -1; } /* Set the title bar in environments that support it */ SDL_WM_SetCaption ("SDL and gst-plugins-gl", NULL); /* Loop, drawing and checking events */ InitGL (640, 480); gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* retrieve and turn off sdl opengl context */ #ifdef WIN32 sdl_gl_context = wglGetCurrentContext (); sdl_dc = wglGetCurrentDC (); wglMakeCurrent (0, 0); platform = "wgl"; display = gst_gl_display_new (); #else SDL_VERSION (&info.version); SDL_GetWMInfo (&info); /* FIXME: This display is different to the one that SDL uses to create the * GL context inside SDL_SetVideoMode() above which fails on Intel hardware */ sdl_display = info.info.x11.display; sdl_win = info.info.x11.window; sdl_gl_context = glXGetCurrentContext (); glXMakeCurrent (sdl_display, None, 0); platform = "glx"; display = (GstGLDisplay *) gst_gl_display_x11_new_with_display (sdl_display); #endif sdl_context = gst_gl_context_new_wrapped (display, (guintptr) sdl_gl_context, gst_gl_platform_from_string (platform), GST_GL_API_OPENGL); pipeline = GST_PIPELINE (gst_parse_launch ("videotestsrc ! video/x-raw, width=320, height=240, framerate=(fraction)30/1 ! " "gleffects effect=5 ! fakesink sync=1", NULL)); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), loop); g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), loop); g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), loop); gst_object_unref (bus); /* sdl_gl_context is an external OpenGL context with which gst-plugins-gl want to share textures */ glfilter = gst_bin_get_by_name (GST_BIN (pipeline), "gleffects0"); g_object_set (G_OBJECT (glfilter), "other-context", sdl_context, NULL); gst_object_unref (glfilter); /* NULL to PAUSED state pipeline to make sure the gst opengl context is created and * shared with the sdl one */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED); state = GST_STATE_PAUSED; if (gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL, GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) { g_debug ("failed to pause pipeline\n"); return -1; } /* turn on back sdl opengl context */ #ifdef WIN32 wglMakeCurrent (sdl_dc, sdl_gl_context); #else glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context); #endif /* append a gst-gl texture to this queue when you do not need it no more */ fakesink = gst_bin_get_by_name (GST_BIN (pipeline), "fakesink0"); g_object_set (G_OBJECT (fakesink), "signal-handoffs", TRUE, NULL); g_signal_connect (fakesink, "handoff", G_CALLBACK (on_gst_buffer), NULL); queue_input_buf = g_async_queue_new (); queue_output_buf = g_async_queue_new (); g_object_set_data (G_OBJECT (fakesink), "queue_input_buf", queue_input_buf); g_object_set_data (G_OBJECT (fakesink), "queue_output_buf", queue_output_buf); g_object_set_data (G_OBJECT (fakesink), "loop", loop); gst_object_unref (fakesink); gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); g_main_loop_run (loop); /* before to deinitialize the gst-gl-opengl context, * no shared context (here the sdl one) must be current */ #ifdef WIN32 wglMakeCurrent (0, 0); #else glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context); #endif gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); gst_object_unref (pipeline); /* turn on back sdl opengl context */ #ifdef WIN32 wglMakeCurrent (sdl_dc, sdl_gl_context); #else glXMakeCurrent (sdl_display, None, 0); #endif SDL_Quit (); /* make sure there is no pending gst gl buffer in the communication queues * between sdl and gst-gl */ while (g_async_queue_length (queue_input_buf) > 0) { GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_input_buf); gst_buffer_unref (buf); } while (g_async_queue_length (queue_output_buf) > 0) { GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_output_buf); gst_buffer_unref (buf); } return 0; }
static GstFlowReturn gst_gsmdec_chain (GstPad * pad, GstBuffer * buf) { GstGSMDec *gsmdec; gsm_byte *data; GstFlowReturn ret = GST_FLOW_OK; GstClockTime timestamp; gint needed; gsmdec = GST_GSMDEC (gst_pad_get_parent (pad)); timestamp = GST_BUFFER_TIMESTAMP (buf); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (gsmdec->adapter); gsmdec->next_ts = GST_CLOCK_TIME_NONE; /* FIXME, do some good offset */ gsmdec->next_of = 0; } gst_adapter_push (gsmdec->adapter, buf); needed = 33; /* do we have enough bytes to read a frame */ while (gst_adapter_available (gsmdec->adapter) >= needed) { GstBuffer *outbuf; /* always the same amount of output samples */ outbuf = gst_buffer_new_and_alloc (ENCODED_SAMPLES * sizeof (gsm_signal)); /* If we are not given any timestamp, interpolate from last seen * timestamp (if any). */ if (timestamp == GST_CLOCK_TIME_NONE) timestamp = gsmdec->next_ts; GST_BUFFER_TIMESTAMP (outbuf) = timestamp; /* interpolate in the next run */ if (timestamp != GST_CLOCK_TIME_NONE) gsmdec->next_ts = timestamp + gsmdec->duration; timestamp = GST_CLOCK_TIME_NONE; GST_BUFFER_DURATION (outbuf) = gsmdec->duration; GST_BUFFER_OFFSET (outbuf) = gsmdec->next_of; if (gsmdec->next_of != -1) gsmdec->next_of += ENCODED_SAMPLES; GST_BUFFER_OFFSET_END (outbuf) = gsmdec->next_of; gst_buffer_set_caps (outbuf, GST_PAD_CAPS (gsmdec->srcpad)); /* now encode frame into the output buffer */ data = (gsm_byte *) gst_adapter_peek (gsmdec->adapter, needed); if (gsm_decode (gsmdec->state, data, (gsm_signal *) GST_BUFFER_DATA (outbuf)) < 0) { /* invalid frame */ GST_WARNING_OBJECT (gsmdec, "tried to decode an invalid frame, skipping"); } gst_adapter_flush (gsmdec->adapter, needed); /* WAV49 requires alternating 33 and 32 bytes of input */ if (gsmdec->use_wav49) needed = (needed == 33 ? 32 : 33); GST_DEBUG_OBJECT (gsmdec, "Pushing buffer of size %d ts %" GST_TIME_FORMAT, GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); /* push */ ret = gst_pad_push (gsmdec->srcpad, outbuf); } gst_object_unref (gsmdec); return ret; }
void LinVideoDisplayForm::setProgram( QString feedName, QString summary, QString pubDate, QString mediaUrl) { if (gstreamerInUse) stopPlaying(); setWindowTitle(feedName); dataDialog->setSummary(summary); dataDialog->setPubDate(pubDate); // GstElement *xvsink = gst_element_factory_make("xvimagesink", "xvsink"); xvsink = gst_element_factory_make("xvimagesink", "xvsink"); if (!xvsink) { QString err("Unable to create GStreamer element 'xvimagesink'"); QMaemo5InformationBox::information(this, err); qDebug() << err; return; } //qDebug() << "winId: " << ui->videoWidget->winId(); unsigned long windowId = ui->videoWidget->winId(); // unsigned long windowId = videoWidget->winId(); QApplication::syncX(); gst_x_overlay_set_xwindow_id ( GST_X_OVERLAY(G_OBJECT(xvsink)), windowId); gst_element_set_state(xvsink, GST_STATE_READY); g_object_set( G_OBJECT(xvsink), "force_aspect_ratio", true, "autopaint-colorkey", false, "colorkey", 0x080810, NULL); GstElement *player = gst_element_factory_make("playbin2", "player"); if (!player) { QString err("Unable to create GStreamer element 'playbin2'"); QMaemo5InformationBox::information(this, err); qDebug() << err; return; } // Set up player for video and audio use: gint flags; g_object_get(G_OBJECT(player), "flags", &flags, NULL); flags |= GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_VIDEO; g_object_set(G_OBJECT(player), "flags", flags, NULL); g_object_set(G_OBJECT(player), "video-sink", xvsink, NULL); QByteArray ba = mediaUrl.toAscii(); g_object_set(G_OBJECT(player), "uri", ba.data(), NULL); runningElement = player; GstBus *bus = gst_element_get_bus(GST_ELEMENT(runningElement)); gst_bus_add_watch(bus, linGstBusCallback, this); gst_object_unref(bus); // gst_element_set_state(runningElement, GST_STATE_PAUSED); gst_element_set_state(runningElement, GST_STATE_PLAYING); setPaused(false); ui->playButton->setEnabled(true); gstreamerInUse = true; }
bool CvCapture_GStreamer::open( int type, const char* filename ) { close(); CV_FUNCNAME("cvCaptureFromCAM_GStreamer"); __BEGIN__; gst_initializer::init(); // if(!isInited) { // printf("gst_init\n"); // gst_init (NULL, NULL); // gst_debug_set_active(TRUE); // gst_debug_set_colored(TRUE); // gst_debug_set_default_threshold(GST_LEVEL_WARNING); // isInited = true; // } bool stream = false; bool manualpipeline = false; char *uri = NULL; uridecodebin = NULL; if(type != CV_CAP_GSTREAMER_FILE) { close(); return false; } if(!gst_uri_is_valid(filename)) { uri = realpath(filename, NULL); stream=false; if(uri) { uri = g_filename_to_uri(uri, NULL, NULL); if(!uri) { CV_WARN("GStreamer: Error opening file\n"); close(); return false; } } else { GError *err = NULL; //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err); uridecodebin = gst_parse_launch(filename, &err); if(!uridecodebin) { CV_WARN("GStreamer: Error opening bin\n"); close(); return false; } stream = true; manualpipeline = true; } } else { stream = true; uri = g_strdup(filename); } if(!uridecodebin) { uridecodebin = gst_element_factory_make ("uridecodebin", NULL); g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL); if(!uridecodebin) { CV_WARN("GStreamer: Failed to create uridecodebin\n"); close(); return false; } } if(manualpipeline) { GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin)); if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) { CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n"); return false; } pipeline = uridecodebin; } else { pipeline = gst_pipeline_new (NULL); color = gst_element_factory_make("ffmpegcolorspace", NULL); sink = gst_element_factory_make("appsink", NULL); gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL); g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color); if(!gst_element_link(color, sink)) { CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n"); gst_object_unref(pipeline); return false; } } gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1); gst_app_sink_set_drop (GST_APP_SINK(sink), stream); caps = gst_caps_new_simple("video/x-raw-rgb", "red_mask", G_TYPE_INT, 0x0000FF, "green_mask", G_TYPE_INT, 0x00FF00, "blue_mask", G_TYPE_INT, 0xFF0000, NULL); gst_app_sink_set_caps(GST_APP_SINK(sink), caps); gst_caps_unref(caps); if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) == GST_STATE_CHANGE_FAILURE) { CV_WARN("GStreamer: unable to set pipeline to ready\n"); gst_object_unref(pipeline); return false; } if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); CV_WARN("GStreamer: unable to set pipeline to playing\n"); gst_object_unref(pipeline); return false; } handleMessage(); __END__; return true; }
static gboolean theora_enc_sink_setcaps (GstPad * pad, GstCaps * caps) { GstStructure *structure = gst_caps_get_structure (caps, 0); GstTheoraEnc *enc = GST_THEORA_ENC (gst_pad_get_parent (pad)); guint32 fourcc; const GValue *par; gint fps_n, fps_d; gst_structure_get_fourcc (structure, "format", &fourcc); gst_structure_get_int (structure, "width", &enc->width); gst_structure_get_int (structure, "height", &enc->height); gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d); par = gst_structure_get_value (structure, "pixel-aspect-ratio"); th_info_clear (&enc->info); th_info_init (&enc->info); /* Theora has a divisible-by-sixteen restriction for the encoded video size but * we can define a picture area using pic_width/pic_height */ enc->info.frame_width = GST_ROUND_UP_16 (enc->width); enc->info.frame_height = GST_ROUND_UP_16 (enc->height); enc->info.pic_width = enc->width; enc->info.pic_height = enc->height; switch (fourcc) { case GST_MAKE_FOURCC ('I', '4', '2', '0'): enc->info.pixel_fmt = TH_PF_420; break; case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): enc->info.pixel_fmt = TH_PF_422; break; case GST_MAKE_FOURCC ('Y', '4', '4', '4'): enc->info.pixel_fmt = TH_PF_444; break; default: g_assert_not_reached (); } enc->info.fps_numerator = enc->fps_n = fps_n; enc->info.fps_denominator = enc->fps_d = fps_d; if (par) { enc->info.aspect_numerator = gst_value_get_fraction_numerator (par); enc->info.aspect_denominator = gst_value_get_fraction_denominator (par); } else { /* setting them to 0 indicates that the decoder can chose a good aspect * ratio, defaulting to 1/1 */ enc->info.aspect_numerator = 0; enc->info.aspect_denominator = 0; } enc->info.colorspace = TH_CS_UNSPECIFIED; /* as done in theora */ enc->info.keyframe_granule_shift = _ilog (enc->keyframe_force - 1); GST_DEBUG_OBJECT (enc, "keyframe_frequency_force is %d, granule shift is %d", enc->keyframe_force, enc->info.keyframe_granule_shift); theora_enc_reset (enc); enc->initialised = TRUE; gst_object_unref (enc); return TRUE; }
int main(int argc, char *argv[]) { GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink; GstElement *video_queue, *visual, *video_convert, *video_sink; GstBus *bus; GstMessage *msg; GstPadTemplate *tee_src_pad_template; GstPad *tee_audio_pad, *tee_video_pad; GstPad *queue_audio_pad, *queue_video_pad; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the elements */ audio_source = gst_element_factory_make ("audiotestsrc", "audio_source"); tee = gst_element_factory_make ("tee", "tee"); audio_queue = gst_element_factory_make ("queue", "audio_queue"); audio_convert = gst_element_factory_make ("audioconvert", "audio_convert"); audio_resample = gst_element_factory_make ("audioresample", "audio_resample"); audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink"); video_queue = gst_element_factory_make ("queue", "video_queue"); visual = gst_element_factory_make ("wavescope", "visual"); video_convert = gst_element_factory_make ("videoconvert", "video_convert"); video_sink = gst_element_factory_make ("autovideosink", "video_sink"); /* Create the empty pipeline */ pipeline = gst_pipeline_new ("test-pipeline"); if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink || !video_queue || !visual || !video_convert || !video_sink) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Configure elements */ g_object_set (audio_source, "freq", 215.0f, NULL); g_object_set (visual, "shader", 0, "style", 1, NULL); /* Link all elements that can be automatically linked because they have "Always" pads */ gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink, video_queue, visual, video_convert, video_sink, NULL); if (gst_element_link_many (audio_source, tee, NULL) != TRUE || gst_element_link_many (audio_queue, audio_convert, audio_resample, audio_sink, NULL) != TRUE || gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return -1; } /* Manually link the Tee, which has "Request" pads */ tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%u"); tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad)); queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink"); tee_video_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad)); queue_video_pad = gst_element_get_static_pad (video_queue, "sink"); if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) { g_printerr ("Tee could not be linked.\n"); gst_object_unref (pipeline); return -1; } gst_object_unref (queue_audio_pad); gst_object_unref (queue_video_pad); /* Start playing the pipeline */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Release the request pads from the Tee, and unref them */ gst_element_release_request_pad (tee, tee_audio_pad); gst_element_release_request_pad (tee, tee_video_pad); gst_object_unref (tee_audio_pad); gst_object_unref (tee_video_pad); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query) { WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink); GstCaps* caps = NULL; gboolean need_pool; gst_query_parse_allocation(query, &caps, &need_pool); if (!caps) return FALSE; if (!gst_video_info_from_caps(&sink->priv->info, caps)) return FALSE; #if USE(OPENGL_ES_2) && GST_CHECK_VERSION(1, 3, 0) // Code adapted from gst-plugins-bad's glimagesink. GstBufferPool* pool; GstStructure* config; guint size; GstAllocator* allocator = 0; GstAllocationParams params; if (!_ensure_gl_setup(sink)) return FALSE; if ((pool = sink->priv->pool)) gst_object_ref(pool); if (pool) { GstCaps* pcaps; // We had a pool, check its caps. GST_DEBUG_OBJECT (sink, "check existing pool caps"); config = gst_buffer_pool_get_config(pool); gst_buffer_pool_config_get_params(config, &pcaps, &size, 0, 0); if (!gst_caps_is_equal(caps, pcaps)) { GST_DEBUG_OBJECT(sink, "pool has different caps"); // Different caps, we can't use this pool. gst_object_unref(pool); pool = 0; } gst_structure_free(config); } if (need_pool && !pool) { GstVideoInfo info; if (!gst_video_info_from_caps(&info, caps)) { GST_DEBUG_OBJECT(sink, "invalid caps specified"); return FALSE; } GST_DEBUG_OBJECT(sink, "create new pool"); pool = gst_gl_buffer_pool_new(sink->priv->context); // The normal size of a frame. size = info.size; config = gst_buffer_pool_get_config(pool); gst_buffer_pool_config_set_params(config, caps, size, 0, 0); if (!gst_buffer_pool_set_config(pool, config)) { GST_DEBUG_OBJECT(sink, "failed setting config"); return FALSE; } } // [WiP] Let's require 8 buffers for now. The player holds to the last 3 // ones and the sink holds only the last one so in theory 5 should // be enough. if (pool) { gst_query_add_allocation_pool(query, pool, size, 8, 0); gst_object_unref(pool); } gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0); gst_allocation_params_init(¶ms); allocator = gst_allocator_find(GST_EGL_IMAGE_MEMORY_TYPE); gst_query_add_allocation_param(query, allocator, ¶ms); gst_object_unref(allocator); #else gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0); gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0); gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0); #endif return TRUE; }
static GstFlowReturn gst_pngdec_chain (GstPad * pad, GstBuffer * buffer) { GstPngDec *pngdec; GstFlowReturn ret = GST_FLOW_OK; pngdec = GST_PNGDEC (gst_pad_get_parent (pad)); GST_LOG_OBJECT (pngdec, "Got buffer, size=%u", GST_BUFFER_SIZE (buffer)); if (G_UNLIKELY (!pngdec->setup)) goto not_configured; /* Something is going wrong in our callbacks */ ret = pngdec->ret; if (G_UNLIKELY (ret != GST_FLOW_OK)) { GST_WARNING_OBJECT (pngdec, "we have a pending return code of %d", ret); goto beach; } /* Let libpng come back here on error */ if (setjmp (png_jmpbuf (pngdec->png))) { GST_WARNING_OBJECT (pngdec, "error during decoding"); ret = GST_FLOW_ERROR; goto beach; } pngdec->in_timestamp = GST_BUFFER_TIMESTAMP (buffer); pngdec->in_duration = GST_BUFFER_DURATION (buffer); /* Progressive loading of the PNG image */ png_process_data (pngdec->png, pngdec->info, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer)); if (pngdec->image_ready) { if (pngdec->framed) { /* Reset ourselves for the next frame */ gst_pngdec_libpng_clear (pngdec); gst_pngdec_libpng_init (pngdec); GST_LOG_OBJECT (pngdec, "setting up callbacks for next frame"); png_set_progressive_read_fn (pngdec->png, pngdec, user_info_callback, user_endrow_callback, user_end_callback); } else { GST_LOG_OBJECT (pngdec, "sending EOS"); pngdec->ret = gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ()); } pngdec->image_ready = FALSE; } /* grab new return code */ ret = pngdec->ret; /* And release the buffer */ gst_buffer_unref (buffer); beach: gst_object_unref (pngdec); return ret; /* ERRORS */ not_configured: { GST_LOG_OBJECT (pngdec, "we are not configured yet"); ret = GST_FLOW_WRONG_STATE; goto beach; } }
static GstFlowReturn gst_pngdec_caps_create_and_set (GstPngDec * pngdec) { GstFlowReturn ret = GST_FLOW_OK; GstCaps *caps = NULL, *res = NULL; GstPadTemplate *templ = NULL; gint bpc = 0, color_type; png_uint_32 width, height; g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR); /* Get bits per channel */ bpc = png_get_bit_depth (pngdec->png, pngdec->info); /* We don't handle 16 bits per color, strip down to 8 */ if (bpc == 16) { GST_LOG_OBJECT (pngdec, "this is a 16 bits per channel PNG image, strip down to 8 bits"); png_set_strip_16 (pngdec->png); } /* Get Color type */ color_type = png_get_color_type (pngdec->png, pngdec->info); #if 0 /* We used to have this HACK to reverse the outgoing bytes, but the problem * that originally required the hack seems to have been in ffmpegcolorspace's * RGBA descriptions. It doesn't seem needed now that's fixed, but might * still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */ if (color_type == PNG_COLOR_TYPE_RGB_ALPHA) png_set_bgr (pngdec->png); #endif /* Gray scale converted to RGB and upscaled to 8 bits */ if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) || (color_type == PNG_COLOR_TYPE_GRAY)) { GST_LOG_OBJECT (pngdec, "converting grayscale png to RGB"); png_set_gray_to_rgb (pngdec->png); if (bpc < 8) { /* Convert to 8 bits */ GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits"); png_set_gray_1_2_4_to_8 (pngdec->png); } } /* Palette converted to RGB */ if (color_type == PNG_COLOR_TYPE_PALETTE) { GST_LOG_OBJECT (pngdec, "converting palette png to RGB"); png_set_palette_to_rgb (pngdec->png); } /* Update the info structure */ png_read_update_info (pngdec->png, pngdec->info); /* Get IHDR header again after transformation settings */ png_get_IHDR (pngdec->png, pngdec->info, &width, &height, &bpc, &pngdec->color_type, NULL, NULL, NULL); pngdec->width = width; pngdec->height = height; GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", pngdec->width, pngdec->height); switch (pngdec->color_type) { case PNG_COLOR_TYPE_RGB: GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits"); pngdec->bpp = 24; break; case PNG_COLOR_TYPE_RGB_ALPHA: GST_LOG_OBJECT (pngdec, "we have an alpha channel, depth is 32 bits"); pngdec->bpp = 32; break; default: GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL), ("pngdec does not support this color type")); ret = GST_FLOW_NOT_SUPPORTED; goto beach; } caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, pngdec->width, "height", G_TYPE_INT, pngdec->height, "bpp", G_TYPE_INT, pngdec->bpp, "framerate", GST_TYPE_FRACTION, pngdec->fps_n, pngdec->fps_d, NULL); templ = gst_static_pad_template_get (&gst_pngdec_src_pad_template); res = gst_caps_intersect (caps, gst_pad_template_get_caps (templ)); gst_caps_unref (caps); gst_object_unref (templ); if (!gst_pad_set_caps (pngdec->srcpad, res)) ret = GST_FLOW_NOT_NEGOTIATED; GST_DEBUG_OBJECT (pngdec, "our caps %" GST_PTR_FORMAT, res); gst_caps_unref (res); /* Push a newsegment event */ if (pngdec->need_newsegment) { gst_pad_push_event (pngdec->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); pngdec->need_newsegment = FALSE; } beach: return ret; }
static void output_loop (gpointer data) { GstPad *pad; GOmxCore *gomx; GOmxPort *out_port; GstOmxBaseFilter *self; GstFlowReturn ret = GST_FLOW_OK; pad = data; self = GST_OMX_BASE_FILTER (gst_pad_get_parent (pad)); gomx = self->gomx; GST_LOG_OBJECT (self, "begin"); /* do not bother if we have been setup to bail out */ if ((ret = g_atomic_int_get (&self->last_pad_push_return)) != GST_FLOW_OK) goto leave; if (!self->ready) { g_error ("not ready"); return; } out_port = self->out_port; if (G_LIKELY (out_port->enabled)) { OMX_BUFFERHEADERTYPE *omx_buffer = NULL; GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (out_port); GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer); if (G_UNLIKELY (!omx_buffer)) { GST_WARNING_OBJECT (self, "null buffer: leaving"); ret = GST_FLOW_WRONG_STATE; goto leave; } log_buffer (self, omx_buffer); if (G_LIKELY (omx_buffer->nFilledLen > 0)) { GstBuffer *buf; #if 1 /** @todo remove this check */ if (G_LIKELY (self->in_port->enabled)) { GstCaps *caps = NULL; caps = gst_pad_get_negotiated_caps (self->srcpad); #ifdef ANDROID if (!caps || gomx->settings_changed) { #else if (!caps) { #endif /** @todo We shouldn't be doing this. */ GST_WARNING_OBJECT (self, "faking settings changed notification"); if (gomx->settings_changed_cb) gomx->settings_changed_cb (gomx); #ifdef ANDROID gomx->settings_changed = FALSE; #endif } else { GST_LOG_OBJECT (self, "caps already fixed: %" GST_PTR_FORMAT, caps); gst_caps_unref (caps); } } #endif /* buf is always null when the output buffer pointer isn't shared. */ buf = omx_buffer->pAppPrivate; /** @todo we need to move all the caps handling to one single * place, in the output loop probably. */ if (G_UNLIKELY (omx_buffer->nFlags & 0x80)) { GstCaps *caps = NULL; GstStructure *structure; GValue value = { 0, {{0} } }; caps = gst_pad_get_negotiated_caps (self->srcpad); caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); g_value_init (&value, GST_TYPE_BUFFER); buf = gst_buffer_new_and_alloc (omx_buffer->nFilledLen); memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen); gst_value_set_buffer (&value, buf); gst_buffer_unref (buf); gst_structure_set_value (structure, "codec_data", &value); g_value_unset (&value); gst_pad_set_caps (self->srcpad, caps); } else if (buf && !(omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) { GST_BUFFER_SIZE (buf) = omx_buffer->nFilledLen; if (self->use_timestamps) { GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); } omx_buffer->pAppPrivate = NULL; omx_buffer->pBuffer = NULL; ret = push_buffer (self, buf); gst_buffer_unref (buf); } else { /* This is only meant for the first OpenMAX buffers, * which need to be pre-allocated. */ /* Also for the very last one. */ ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, omx_buffer->nFilledLen, GST_PAD_CAPS (self->srcpad), &buf); if (G_LIKELY (buf)) { memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen); if (self->use_timestamps) { GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); } if (self->share_output_buffer) { GST_WARNING_OBJECT (self, "couldn't zero-copy"); /* If pAppPrivate is NULL, it means it was a dummy * allocation, free it. */ if (!omx_buffer->pAppPrivate) { g_free (omx_buffer->pBuffer); omx_buffer->pBuffer = NULL; } } ret = push_buffer (self, buf); } else { GST_WARNING_OBJECT (self, "couldn't allocate buffer of size %lu", omx_buffer->nFilledLen); } } } else { GST_WARNING_OBJECT (self, "empty buffer"); } if (self->share_output_buffer && !omx_buffer->pBuffer && omx_buffer->nOffset == 0) { GstBuffer *buf; GstFlowReturn result; GST_LOG_OBJECT (self, "allocate buffer"); result = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, omx_buffer->nAllocLen, GST_PAD_CAPS (self->srcpad), &buf); if (G_LIKELY (result == GST_FLOW_OK)) { gst_buffer_ref (buf); omx_buffer->pAppPrivate = buf; omx_buffer->pBuffer = GST_BUFFER_DATA (buf); omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf); } else { GST_WARNING_OBJECT (self, "could not pad allocate buffer, using malloc"); omx_buffer->pBuffer = g_malloc (omx_buffer->nAllocLen); } } if (self->share_output_buffer && !omx_buffer->pBuffer) { GST_ERROR_OBJECT (self, "no input buffer to share"); } if (G_UNLIKELY (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) { GST_DEBUG_OBJECT (self, "got eos"); gst_pad_push_event (self->srcpad, gst_event_new_eos ()); omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS; ret = GST_FLOW_UNEXPECTED; } omx_buffer->nFilledLen = 0; GST_LOG_OBJECT (self, "release_buffer"); g_omx_port_release_buffer (out_port, omx_buffer); } leave: self->last_pad_push_return = ret; if (gomx->omx_error != OMX_ErrorNone) ret = GST_FLOW_ERROR; if (ret != GST_FLOW_OK) { GST_INFO_OBJECT (self, "pause task, reason: %s", gst_flow_get_name (ret)); gst_pad_pause_task (self->srcpad); } GST_LOG_OBJECT (self, "end"); gst_object_unref (self); } static GstFlowReturn pad_chain (GstPad * pad, GstBuffer * buf) { GOmxCore *gomx; GOmxPort *in_port; GstOmxBaseFilter *self; GstFlowReturn ret = GST_FLOW_OK; self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad)); gomx = self->gomx; GST_LOG_OBJECT (self, "begin"); GST_LOG_OBJECT (self, "gst_buffer: size=%u", GST_BUFFER_SIZE (buf)); GST_LOG_OBJECT (self, "state: %d", gomx->omx_state); if (G_UNLIKELY (gomx->omx_state == OMX_StateLoaded)) { g_mutex_lock (self->ready_lock); GST_INFO_OBJECT (self, "omx: prepare"); /** @todo this should probably go after doing preparations. */ if (self->omx_setup) { self->omx_setup (self); } setup_ports (self); g_omx_core_prepare (self->gomx); if (gomx->omx_state == OMX_StateIdle) { self->ready = TRUE; GST_INFO_OBJECT (self, "start srcpad task"); gst_pad_start_task (self->srcpad, output_loop, self->srcpad); } g_mutex_unlock (self->ready_lock); if (gomx->omx_state != OMX_StateIdle) goto out_flushing; } #ifdef ANDROID if (gomx->settings_changed) { GST_DEBUG_OBJECT (self, "settings changed called from streaming thread... Android"); if (gomx->settings_changed_cb) gomx->settings_changed_cb (gomx); gomx->settings_changed = FALSE; } #endif in_port = self->in_port; if (G_LIKELY (in_port->enabled)) { guint buffer_offset = 0; if (G_UNLIKELY (gomx->omx_state == OMX_StateIdle)) { GST_INFO_OBJECT (self, "omx: play"); g_omx_core_start (gomx); if (gomx->omx_state != OMX_StateExecuting) goto out_flushing; /* send buffer with codec data flag */ /** @todo move to util */ if (self->codec_data) { OMX_BUFFERHEADERTYPE *omx_buffer; GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (in_port); if (G_LIKELY (omx_buffer)) { omx_buffer->nFlags |= 0x00000080; /* codec data flag */ omx_buffer->nFilledLen = GST_BUFFER_SIZE (self->codec_data); memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (self->codec_data), omx_buffer->nFilledLen); GST_LOG_OBJECT (self, "release_buffer"); g_omx_port_release_buffer (in_port, omx_buffer); } } } if (G_UNLIKELY (gomx->omx_state != OMX_StateExecuting)) { GST_ERROR_OBJECT (self, "Whoa! very wrong"); } while (G_LIKELY (buffer_offset < GST_BUFFER_SIZE (buf))) { OMX_BUFFERHEADERTYPE *omx_buffer; if (self->last_pad_push_return != GST_FLOW_OK || !(gomx->omx_state == OMX_StateExecuting || gomx->omx_state == OMX_StatePause)) { goto out_flushing; } GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (in_port); GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer); if (G_LIKELY (omx_buffer)) { log_buffer (self, omx_buffer); if (omx_buffer->nOffset == 0 && self->share_input_buffer) { { GstBuffer *old_buf; old_buf = omx_buffer->pAppPrivate; if (old_buf) { gst_buffer_unref (old_buf); } else if (omx_buffer->pBuffer) { g_free (omx_buffer->pBuffer); } } omx_buffer->pBuffer = GST_BUFFER_DATA (buf); omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf); omx_buffer->nFilledLen = GST_BUFFER_SIZE (buf); omx_buffer->pAppPrivate = buf; } else { omx_buffer->nFilledLen = MIN (GST_BUFFER_SIZE (buf) - buffer_offset, omx_buffer->nAllocLen - omx_buffer->nOffset); memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (buf) + buffer_offset, omx_buffer->nFilledLen); } if (self->use_timestamps) { GstClockTime timestamp_offset = 0; if (buffer_offset && GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) { timestamp_offset = gst_util_uint64_scale_int (buffer_offset, GST_BUFFER_DURATION (buf), GST_BUFFER_SIZE (buf)); } omx_buffer->nTimeStamp = gst_util_uint64_scale_int (GST_BUFFER_TIMESTAMP (buf) + timestamp_offset, OMX_TICKS_PER_SECOND, GST_SECOND); } buffer_offset += omx_buffer->nFilledLen; #ifdef ANDROID omx_buffer->nFlags |= OMX_BUFFERFLAG_ENDOFFRAME; log_buffer (self, omx_buffer); #endif GST_LOG_OBJECT (self, "release_buffer"); /** @todo untaint buffer */ g_omx_port_release_buffer (in_port, omx_buffer); } else { GST_WARNING_OBJECT (self, "null buffer"); ret = GST_FLOW_WRONG_STATE; goto out_flushing; } } } else { GST_WARNING_OBJECT (self, "done"); ret = GST_FLOW_UNEXPECTED; } if (!self->share_input_buffer) { gst_buffer_unref (buf); } leave: GST_LOG_OBJECT (self, "end"); return ret; /* special conditions */ out_flushing: { const gchar *error_msg = NULL; if (gomx->omx_error) { error_msg = "Error from OpenMAX component"; } else if (gomx->omx_state != OMX_StateExecuting && gomx->omx_state != OMX_StatePause) { error_msg = "OpenMAX component in wrong state"; } if (error_msg) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("%s", error_msg)); ret = GST_FLOW_ERROR; } gst_buffer_unref (buf); goto leave; } }
static gboolean gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object, GstClockTime duration) { GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object); GstBuffer *buf = NULL; IPin *pPin = NULL; HRESULT hres = S_FALSE; AM_MEDIA_TYPE *pMediaType = NULL; GstMapInfo info; if (!buffer || size == 0 || !src) { return FALSE; } /* create a new buffer assign to it the clock time as timestamp */ buf = gst_buffer_new_and_alloc (size); gst_buffer_set_size(buf, size); GstClock *clock = gst_element_get_clock (GST_ELEMENT (src)); GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock)); gst_object_unref (clock); GST_BUFFER_DURATION (buf) = duration; if (!gst_buffer_map(buf, &info, GST_MAP_WRITE)) { gst_buffer_unref(buf); GST_ERROR("Failed to map buffer"); return FALSE; } if (src->is_rgb) { /* FOR RGB directshow decoder will return bottom-up BITMAP * There is probably a way to get top-bottom video frames from * the decoder... */ gint line = 0; gint stride = size / src->height; for (; line < src->height; line++) { memcpy (info.data + (line * stride), buffer + (size - ((line + 1) * (stride))), stride); } } else { memcpy (info.data, buffer, size); } gst_buffer_unmap(buf, &info); GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (duration)); g_mutex_lock (&src->buffer_mutex); if (src->buffer != NULL) gst_buffer_unref (src->buffer); src->buffer = buf; g_cond_signal (&src->buffer_cond); g_mutex_unlock (&src->buffer_mutex); return TRUE; }
gint main (gint argc, gchar *argv[]) { gtk_init (&argc, &argv); gst_init (&argc, &argv); GstElement* pipeline = gst_pipeline_new ("pipeline"); //window that contains an area where the video is drawn GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL); gtk_widget_set_size_request (window, 640, 480); gtk_window_move (GTK_WINDOW (window), 300, 10); gtk_window_set_title (GTK_WINDOW (window), "glimagesink implement the gstxoverlay interface"); GdkGeometry geometry; geometry.min_width = 1; geometry.min_height = 1; geometry.max_width = -1; geometry.max_height = -1; gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE); //window to control the states GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL); geometry.min_width = 1; geometry.min_height = 1; geometry.max_width = -1; geometry.max_height = -1; gtk_window_set_geometry_hints (GTK_WINDOW (window_control), window_control, &geometry, GDK_HINT_MIN_SIZE); gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE); gtk_window_move (GTK_WINDOW (window_control), 10, 10); GtkWidget* table = gtk_table_new (2, 1, TRUE); gtk_container_add (GTK_CONTAINER (window_control), table); //control state null GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL"); g_signal_connect (G_OBJECT (button_state_null), "clicked", G_CALLBACK (button_state_null_cb), pipeline); gtk_table_attach_defaults (GTK_TABLE (table), button_state_null, 0, 1, 0, 1); gtk_widget_show (button_state_null); //control state ready GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY"); g_signal_connect (G_OBJECT (button_state_ready), "clicked", G_CALLBACK (button_state_ready_cb), pipeline); gtk_table_attach_defaults (GTK_TABLE (table), button_state_ready, 0, 1, 1, 2); gtk_widget_show (button_state_ready); //control state paused GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED"); g_signal_connect (G_OBJECT (button_state_paused), "clicked", G_CALLBACK (button_state_paused_cb), pipeline); gtk_table_attach_defaults (GTK_TABLE (table), button_state_paused, 0, 1, 2, 3); gtk_widget_show (button_state_paused); //control state playing GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING"); g_signal_connect (G_OBJECT (button_state_playing), "clicked", G_CALLBACK (button_state_playing_cb), pipeline); gtk_table_attach_defaults (GTK_TABLE (table), button_state_playing, 0, 1, 3, 4); gtk_widget_show (button_state_playing); //change framerate GtkWidget* slider_fps = gtk_vscale_new_with_range (1, 30, 2); g_signal_connect (G_OBJECT (slider_fps), "format-value", G_CALLBACK (slider_fps_cb), pipeline); gtk_table_attach_defaults (GTK_TABLE (table), slider_fps, 1, 2, 0, 4); gtk_widget_show (slider_fps); gtk_widget_show (table); gtk_widget_show (window_control); //configure the pipeline g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(destroy_cb), pipeline); GstElement* videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc"); GstElement* glupload = gst_element_factory_make ("glupload", "glupload"); GstElement* glfiltercube = gst_element_factory_make ("glfiltercube", "glfiltercube"); GstElement* glfilterlaplacian = gst_element_factory_make ("glfilterlaplacian", "glfilterlaplacian"); GstElement* videosink = gst_element_factory_make ("glimagesink", "glimagesink"); GstCaps *caps = gst_caps_new_simple("video/x-raw-yuv", "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, "framerate", GST_TYPE_FRACTION, 25, 1, "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'), NULL) ; gst_bin_add_many (GST_BIN (pipeline), videosrc, glupload, glfiltercube, glfilterlaplacian, videosink, NULL); gboolean link_ok = gst_element_link_filtered(videosrc, glupload, caps) ; gst_caps_unref(caps) ; if(!link_ok) { g_warning("Failed to link videosrc to glupload!\n") ; return -1; } if(!gst_element_link_many(glupload, glfiltercube, glfilterlaplacian, videosink, NULL)) { g_warning("Failed to link glupload to videosink!\n") ; return -1; } //area where the video is drawn GtkWidget* area = gtk_drawing_area_new(); gtk_container_add (GTK_CONTAINER (window), area); //set window id on this event GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area); gst_bus_add_signal_watch (bus); g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), pipeline); g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), pipeline); g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), pipeline); gst_object_unref (bus); //needed when being in GST_STATE_READY, GST_STATE_PAUSED //or resizing/obscuring the window g_signal_connect(area, "expose-event", G_CALLBACK(expose_cb), videosink); g_signal_connect (area, "realize", G_CALLBACK (area_realize_cb), pipeline); //start GstStateChangeReturn ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_print ("Failed to start up pipeline!\n"); return -1; } gtk_widget_show_all (window); gtk_main(); return 0; }
static void finalize_element(value v) { GstElement *e = Element_val(v); gst_object_unref(e); }
gint main (gint argc, gchar * argv[]) { LocalState state; GtkWidget *area, *combo, *w; const gchar *uri; #if defined (GDK_WINDOWING_X11) XInitThreads (); #endif gst_init (&argc, &argv); gtk_init (&argc, &argv); if (argc < 2) { g_print ("Usage: 3dvideo <uri-to-play>\n"); return 1; } uri = argv[1]; GstElement *pipeline = gst_element_factory_make ("playbin", NULL); GstBin *sinkbin = (GstBin *) gst_parse_bin_from_description ("glupload ! glcolorconvert ! glviewconvert name=viewconvert ! glimagesink name=sink", TRUE, NULL); #if USE_GLCONVERT_FOR_INPUT GstElement *glconvert = gst_bin_get_by_name (sinkbin, "viewconvert"); #endif GstElement *videosink = gst_bin_get_by_name (sinkbin, "sink"); /* Get defaults */ g_object_get (pipeline, "video-multiview-mode", &state.in_mode, "video-multiview-flags", &state.in_flags, NULL); gst_child_proxy_get (GST_CHILD_PROXY (videosink), "sink::output-multiview-mode", &state.out_mode, "sink::output-multiview-flags", &state.out_flags, NULL); detect_mode_from_uri (&state, uri); g_return_val_if_fail (pipeline != NULL, 1); g_return_val_if_fail (videosink != NULL, 1); g_object_set (G_OBJECT (pipeline), "video-sink", sinkbin, NULL); g_object_set (G_OBJECT (pipeline), "uri", uri, NULL); #if USE_GLCONVERT_FOR_INPUT g_object_set (G_OBJECT (glconvert), "input-mode-override", state.in_mode, NULL); g_object_set (G_OBJECT (glconvert), "input-flags-override", state.in_flags, NULL); #else g_object_set (G_OBJECT (pipeline), "video-multiview-mode", state.in_mode, NULL); g_object_set (G_OBJECT (pipeline), "video-multiview-flags", state.in_flags, NULL); #endif /* Connect to bus for signal handling */ GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), pipeline); g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), pipeline); g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), pipeline); gst_element_set_state (pipeline, GST_STATE_READY); area = gtk_drawing_area_new (); gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area, NULL); gst_object_unref (bus); /* Toplevel window */ GtkWidget *window = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_window_set_default_size (GTK_WINDOW (window), 800, 600); gtk_window_set_title (GTK_WINDOW (window), "Stereoscopic video demo"); GdkGeometry geometry; geometry.min_width = 1; geometry.min_height = 1; geometry.max_width = -1; geometry.max_height = -1; gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE); GtkWidget *vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 2); gtk_container_add (GTK_CONTAINER (window), vbox); /* area where the video is drawn */ gtk_box_pack_start (GTK_BOX (vbox), area, TRUE, TRUE, 0); /* Buttons to control the pipeline state */ GtkWidget *table = gtk_grid_new (); gtk_container_add (GTK_CONTAINER (vbox), table); GtkWidget *button_state_ready = gtk_button_new_with_label ("Stop"); g_signal_connect (G_OBJECT (button_state_ready), "clicked", G_CALLBACK (button_state_ready_cb), pipeline); gtk_grid_attach (GTK_GRID (table), button_state_ready, 1, 0, 1, 1); gtk_widget_show (button_state_ready); //control state paused GtkWidget *button_state_paused = gtk_button_new_with_label ("Pause"); g_signal_connect (G_OBJECT (button_state_paused), "clicked", G_CALLBACK (button_state_paused_cb), pipeline); gtk_grid_attach (GTK_GRID (table), button_state_paused, 2, 0, 1, 1); gtk_widget_show (button_state_paused); //control state playing GtkWidget *button_state_playing = gtk_button_new_with_label ("Play"); g_signal_connect (G_OBJECT (button_state_playing), "clicked", G_CALLBACK (button_state_playing_cb), pipeline); gtk_grid_attach (GTK_GRID (table), button_state_playing, 3, 0, 1, 1); //gtk_widget_show (button_state_playing); w = gst_mview_widget_new (FALSE); combo = GST_MVIEW_WIDGET (w)->mode_selector; gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo), enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_FRAME_PACKING, state.in_mode)); #if USE_GLCONVERT_FOR_INPUT g_signal_connect (G_OBJECT (combo), "changed", G_CALLBACK (set_mview_input_mode), glconvert); #else g_signal_connect (G_OBJECT (combo), "changed", G_CALLBACK (set_mview_input_mode), pipeline); #endif g_object_set (G_OBJECT (w), "flags", state.in_flags, NULL); #if USE_GLCONVERT_FOR_INPUT g_signal_connect (G_OBJECT (w), "notify::flags", G_CALLBACK (input_flags_changed), glconvert); #else g_signal_connect (G_OBJECT (w), "notify::flags", G_CALLBACK (input_flags_changed), pipeline); #endif gtk_container_add (GTK_CONTAINER (vbox), w); w = gst_mview_widget_new (TRUE); combo = GST_MVIEW_WIDGET (w)->mode_selector; gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo), enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_MODE, state.out_mode)); g_signal_connect (G_OBJECT (combo), "changed", G_CALLBACK (set_mview_output_mode), videosink); g_object_set (G_OBJECT (w), "flags", state.out_flags, NULL); g_signal_connect (G_OBJECT (w), "notify::flags", G_CALLBACK (output_flags_changed), videosink); g_signal_connect (G_OBJECT (w), "notify::downmix-mode", G_CALLBACK (downmix_method_changed), videosink); gtk_container_add (GTK_CONTAINER (vbox), w); //configure the pipeline g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (destroy_cb), pipeline); gtk_widget_realize (area); /* Redraw needed when paused or stopped (PAUSED or READY) */ g_signal_connect (area, "draw", G_CALLBACK (draw_cb), videosink); g_signal_connect(area, "configure-event", G_CALLBACK(resize_cb), videosink); gtk_widget_show_all (window); gst_element_set_state (pipeline, GST_STATE_PLAYING); gtk_main (); return 0; }
MainWindow::MainWindow() : MainWindowBase(0, "main window"), editPalette(palette()), previewPalette(palette()), previewstyle(0) { modified = true; desktopThemeName = tr("Desktop Settings (Default)"); QStringList gstyles = QStyleFactory::keys(); gstyles.sort(); gstylecombo->addItem(desktopThemeName); gstylecombo->setItemData(gstylecombo->findText(desktopThemeName), tr("Choose style and palette based on your desktop settings."), Qt::ToolTipRole); gstylecombo->insertStringList(gstyles); QSettings settings(QLatin1String("Trolltech")); settings.beginGroup(QLatin1String("Qt")); QString currentstyle = settings.value(QLatin1String("style")).toString(); if (currentstyle.isEmpty()) { gstylecombo->setCurrentItem(gstylecombo->findText(desktopThemeName)); currentstyle = QLatin1String(QApplication::style()->name()); } else { int index = gstylecombo->findText(currentstyle, Qt::MatchFixedString); if (index != -1) { gstylecombo->setCurrentItem(index); } else { // we give up gstylecombo->insertItem(QLatin1String("Unknown")); gstylecombo->setCurrentItem(gstylecombo->count() - 1); } } buttonMainColor->setColor(palette().color(QPalette::Active, QColorGroup::Button)); buttonMainColor2->setColor(palette().color(QPalette::Active, QColorGroup::Window)); connect(buttonMainColor, SIGNAL(colorChanged(QColor)), this, SLOT(buildPalette())); connect(buttonMainColor2, SIGNAL(colorChanged(QColor)), this, SLOT(buildPalette())); if (X11->desktopEnvironment == DE_KDE) colorConfig->hide(); else labelKDENote->hide(); QFontDatabase db; QStringList families = db.families(); familycombo->insertStringList(families); QStringList fs = families; QStringList fs2 = QFont::substitutions(); QStringList::Iterator fsit = fs2.begin(); while (fsit != fs2.end()) { if (! fs.contains(*fsit)) fs += *fsit; fsit++; } fs.sort(); familysubcombo->insertStringList(fs); choosesubcombo->insertStringList(families); Q3ValueList<int> sizes = db.standardSizes(); Q3ValueList<int>::Iterator it = sizes.begin(); while (it != sizes.end()) psizecombo->insertItem(QString::number(*it++)); dcispin->setValue(QApplication::doubleClickInterval()); cfispin->setValue(QApplication::cursorFlashTime()); wslspin->setValue(QApplication::wheelScrollLines()); // ############# // resolvelinks->setChecked(qt_resolve_symlinks); effectcheckbox->setChecked(QApplication::isEffectEnabled(Qt::UI_General)); effectbase->setEnabled(effectcheckbox->isChecked()); if (QApplication::isEffectEnabled(Qt::UI_FadeMenu)) menueffect->setCurrentItem(2); else if (QApplication::isEffectEnabled(Qt::UI_AnimateMenu)) menueffect->setCurrentItem(1); if (QApplication::isEffectEnabled(Qt::UI_AnimateCombo)) comboeffect->setCurrentItem(1); if (QApplication::isEffectEnabled(Qt::UI_FadeTooltip)) tooltipeffect->setCurrentItem(2); else if (QApplication::isEffectEnabled(Qt::UI_AnimateTooltip)) tooltipeffect->setCurrentItem(1); if ( QApplication::isEffectEnabled( Qt::UI_AnimateToolBox ) ) toolboxeffect->setCurrentItem( 1 ); QSize globalStrut = QApplication::globalStrut(); strutwidth->setValue(globalStrut.width()); strutheight->setValue(globalStrut.height()); // find the default family QStringList::Iterator sit = families.begin(); int i = 0, possible = -1; while (sit != families.end()) { if (*sit == QApplication::font().family()) break; if ((*sit).contains(QApplication::font().family())) possible = i; i++; sit++; } if (sit == families.end()) i = possible; if (i == -1) // no clue about the current font i = 0; familycombo->setCurrentItem(i); QStringList styles = db.styles(familycombo->currentText()); stylecombo->insertStringList(styles); QString stylestring = db.styleString(QApplication::font()); sit = styles.begin(); i = 0; possible = -1; while (sit != styles.end()) { if (*sit == stylestring) break; if ((*sit).contains(stylestring)) possible = i; i++; sit++; } if (sit == styles.end()) i = possible; if (i == -1) // no clue about the current font i = 0; stylecombo->setCurrentItem(i); i = 0; for (int psize = QApplication::font().pointSize(); i < psizecombo->count(); ++i) { const int sz = psizecombo->text(i).toInt(); if (sz == psize) { psizecombo->setCurrentItem(i); break; } else if(sz > psize) { psizecombo->insertItem(i, QString::number(psize)); psizecombo->setCurrentItem(i); break; } } QStringList subs = QFont::substitutes(familysubcombo->currentText()); sublistbox->clear(); sublistbox->insertStringList(subs); rtlExtensions->setChecked(settings.value(QLatin1String("useRtlExtensions"), false).toBool()); #ifdef Q_WS_X11 inputStyle->setCurrentText(settings.value(QLatin1String("XIMInputStyle"), trUtf8("On The Spot")).toString()); #else inputStyle->hide(); inputStyleLabel->hide(); #endif #if defined(Q_WS_X11) && !defined(QT_NO_XIM) QStringList inputMethods = QInputContextFactory::keys(); int inputMethodIndex = -1; QString defaultInputMethod = settings.value(QLatin1String("DefaultInputMethod"), QLatin1String("xim")).toString(); for (int i = inputMethods.size()-1; i >= 0; --i) { const QString &im = inputMethods.at(i); if (im.contains(QLatin1String("imsw"))) { inputMethods.removeAt(i); if (inputMethodIndex > i) --inputMethodIndex; } else if (im == defaultInputMethod) { inputMethodIndex = i; } } if (inputMethodIndex == -1 && !inputMethods.isEmpty()) inputMethodIndex = 0; inputMethod->addItems(inputMethods); inputMethod->setCurrentIndex(inputMethodIndex); #else inputMethod->hide(); inputMethodLabel->hide(); #endif fontembeddingcheckbox->setChecked(settings.value(QLatin1String("embedFonts"), true).toBool()); fontpaths = settings.value(QLatin1String("fontPath")).toStringList(); fontpathlistbox->insertStringList(fontpaths); audiosinkCombo->addItem(tr("Auto (default)"), QLatin1String("Auto")); audiosinkCombo->setItemData(audiosinkCombo->findText(tr("Auto (default)")), tr("Choose audio output automatically."), Qt::ToolTipRole); audiosinkCombo->addItem(tr("aRts"), QLatin1String("artssink")); audiosinkCombo->setItemData(audiosinkCombo->findText(tr("aRts")), tr("Experimental aRts support for GStreamer."), Qt::ToolTipRole); #ifndef QT_NO_GSTREAMER phononVersionLabel->setText(QLatin1String(Phonon::phononVersion())); if (gst_init_check(0, 0, 0)) { gchar *versionString = gst_version_string(); gstversionLabel->setText(QLatin1String(versionString)); g_free(versionString); GList* factoryList = gst_registry_get_feature_list(gst_registry_get_default (), GST_TYPE_ELEMENT_FACTORY); QString name, klass, description; for (GList* iter = g_list_first(factoryList) ; iter != NULL ; iter = g_list_next(iter)) { GstPluginFeature *feature = GST_PLUGIN_FEATURE(iter->data); klass = QLatin1String(gst_element_factory_get_klass(GST_ELEMENT_FACTORY(feature))); if (klass == QLatin1String("Sink/Audio")) { name = QLatin1String(GST_PLUGIN_FEATURE_NAME(feature)); if (name == QLatin1String("sfsink")) continue; //useless to output audio to file when you cannot set the file path else if (name == QLatin1String("autoaudiosink")) continue; //This is used implicitly from the auto setting GstElement *sink = gst_element_factory_make (qPrintable(name), NULL); if (sink) { description = QLatin1String(gst_element_factory_get_description (GST_ELEMENT_FACTORY(feature))); audiosinkCombo->addItem(name, name); audiosinkCombo->setItemData(audiosinkCombo->findText(name), description, Qt::ToolTipRole); gst_object_unref (sink); } } } g_list_free(factoryList); } #else tab4->setEnabled(false); phononLabel->setText(tr("Phonon GStreamer backend not available.")); #endif videomodeCombo->addItem(tr("Auto (default)"), QLatin1String("Auto")); videomodeCombo->setItemData(videomodeCombo->findText(tr("Auto (default)")), tr("Choose render method automatically"), Qt::ToolTipRole); #ifdef Q_WS_X11 videomodeCombo->addItem(tr("X11"), QLatin1String("X11")); videomodeCombo->setItemData(videomodeCombo->findText(tr("X11")), tr("Use X11 Overlays"), Qt::ToolTipRole); #endif #ifndef QT_NO_OPENGL videomodeCombo->addItem(tr("OpenGL"), QLatin1String("OpenGL")); videomodeCombo->setItemData(videomodeCombo->findText(tr("OpenGL")), tr("Use OpenGL if avaiable"), Qt::ToolTipRole); #endif videomodeCombo->addItem(tr("Software"), QLatin1String("Software")); videomodeCombo->setItemData(videomodeCombo->findText(tr("Software")), tr("Use simple software rendering"), Qt::ToolTipRole); QString audioSink = settings.value(QLatin1String("audiosink"), QLatin1String("Auto")).toString(); QString videoMode = settings.value(QLatin1String("videomode"), QLatin1String("Auto")).toString(); audiosinkCombo->setCurrentItem(audiosinkCombo->findData(audioSink)); videomodeCombo->setCurrentItem(videomodeCombo->findData(videoMode)); settings.endGroup(); // Qt helpview->setText(tr(appearance_text)); setModified(false); updateStyleLayout(); }
static void output_loop (gpointer data) { GstPad *pad; GOmxCore *gomx; GOmxPort *out_port; GstOmxBaseFilter *self; GstFlowReturn ret = GST_FLOW_OK; pad = data; self = GST_OMX_BASE_FILTER (gst_pad_get_parent (pad)); gomx = self->gomx; GST_LOG_OBJECT (self, "begin"); /* do not bother if we have been setup to bail out */ if ((ret = g_atomic_int_get (&self->last_pad_push_return)) != GST_FLOW_OK) goto leave; if (!self->ready) { g_error ("not ready"); return; } out_port = self->out_port; if (G_LIKELY (out_port->enabled)) { OMX_BUFFERHEADERTYPE *omx_buffer = NULL; GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (out_port); GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer); if (G_UNLIKELY (!omx_buffer)) { GST_WARNING_OBJECT (self, "null buffer: leaving"); ret = GST_FLOW_WRONG_STATE; goto leave; } log_buffer (self, omx_buffer); if (G_LIKELY (omx_buffer->nFilledLen > 0)) { GstBuffer *buf; #if 1 /** @todo remove this check */ if (G_LIKELY (self->in_port->enabled)) { GstCaps *caps = NULL; caps = gst_pad_get_negotiated_caps (self->srcpad); if (!caps) { /** @todo We shouldn't be doing this. */ GST_WARNING_OBJECT (self, "faking settings changed notification"); if (gomx->settings_changed_cb) gomx->settings_changed_cb (gomx); } else { GST_LOG_OBJECT (self, "caps already fixed: %" GST_PTR_FORMAT, caps); gst_caps_unref (caps); } } #endif /* buf is always null when the output buffer pointer isn't shared. */ buf = omx_buffer->pAppPrivate; /** @todo we need to move all the caps handling to one single * place, in the output loop probably. */ if (G_UNLIKELY (omx_buffer->nFlags & 0x80)) { GstCaps *caps = NULL; GstStructure *structure; GValue value = { 0 }; caps = gst_pad_get_negotiated_caps (self->srcpad); caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); g_value_init (&value, GST_TYPE_BUFFER); buf = gst_buffer_new_and_alloc (omx_buffer->nFilledLen); memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen); gst_value_set_buffer (&value, buf); gst_buffer_unref (buf); gst_structure_set_value (structure, "codec_data", &value); g_value_unset (&value); gst_pad_set_caps (self->srcpad, caps); } else if (buf && !(omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) { GST_BUFFER_SIZE (buf) = omx_buffer->nFilledLen; if (self->use_timestamps) { GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); } omx_buffer->pAppPrivate = NULL; omx_buffer->pBuffer = NULL; ret = push_buffer (self, buf); gst_buffer_unref (buf); } else { /* This is only meant for the first OpenMAX buffers, * which need to be pre-allocated. */ /* Also for the very last one. */ ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, omx_buffer->nFilledLen, GST_PAD_CAPS (self->srcpad), &buf); if (G_LIKELY (buf)) { memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen); if (self->use_timestamps) { GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); } if (self->share_output_buffer) { GST_WARNING_OBJECT (self, "couldn't zero-copy"); /* If pAppPrivate is NULL, it means it was a dummy * allocation, free it. */ if (!omx_buffer->pAppPrivate) { g_free (omx_buffer->pBuffer); omx_buffer->pBuffer = NULL; } } ret = push_buffer (self, buf); } else { GST_WARNING_OBJECT (self, "couldn't allocate buffer of size %" G_GUINT32_FORMAT, omx_buffer->nFilledLen); } } } else { GST_WARNING_OBJECT (self, "empty buffer"); } if (G_UNLIKELY (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) { GST_DEBUG_OBJECT (self, "got eos"); gst_pad_push_event (self->srcpad, gst_event_new_eos ()); ret = GST_FLOW_UNEXPECTED; goto leave; } if (self->share_output_buffer && !omx_buffer->pBuffer && omx_buffer->nOffset == 0) { GstBuffer *buf; GstFlowReturn result; GST_LOG_OBJECT (self, "allocate buffer"); result = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, omx_buffer->nAllocLen, GST_PAD_CAPS (self->srcpad), &buf); if (G_LIKELY (result == GST_FLOW_OK)) { gst_buffer_ref (buf); omx_buffer->pAppPrivate = buf; omx_buffer->pBuffer = GST_BUFFER_DATA (buf); omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf); } else { GST_WARNING_OBJECT (self, "could not pad allocate buffer, using malloc"); omx_buffer->pBuffer = g_malloc (omx_buffer->nAllocLen); } } if (self->share_output_buffer && !omx_buffer->pBuffer) { GST_ERROR_OBJECT (self, "no input buffer to share"); } omx_buffer->nFilledLen = 0; GST_LOG_OBJECT (self, "release_buffer"); g_omx_port_release_buffer (out_port, omx_buffer); } leave: self->last_pad_push_return = ret; if (gomx->omx_error != OMX_ErrorNone) ret = GST_FLOW_ERROR; if (ret != GST_FLOW_OK) { GST_INFO_OBJECT (self, "pause task, reason: %s", gst_flow_get_name (ret)); gst_pad_pause_task (self->srcpad); } GST_LOG_OBJECT (self, "end"); gst_object_unref (self); }
//----------------------------------------------------------------------------- void tIMX51Video::RunInternal() { if( !m_RunningInternal ) { //qDebug() << "tHalVideoSr2::RunInternal creating elements"; //Note: freeing the pipeline will also free child elements (source, mirror, csc, sink ) if( m_pGstPipeline ) { gst_object_unref( GST_OBJECT( m_pGstPipeline ) ); } m_EosReceived = false; /* Create elements for pipeline */ m_pGstPipeline = gst_pipeline_new ("video capture"); m_pGstSource = gst_element_factory_make ("sr2_v4lsrc", "camera-source"); m_pGstSink = gst_element_factory_make ("fakesink", "video-output"); /* Need all elements of pipeline to be successfully created */ if (!m_pGstPipeline || !m_pGstSource || !m_pGstSink) { qDebug() << "tHalVideoSr2::runInternal One element could not be created. Exiting" << (int)m_pGstSource << (int)m_pGstSink; return; } //qDebug() << "tHalVideoSr2::RunInternal setting properties"; SetGeometryInternal(); SetChannelInternal(); SetVideoStandardInternal(); SetMirroredInternal(); SetHueInternal(); SetContrastInternal(); SetSaturationInternal(); SetBrightnessInternal(); /* Initialize elements with some custom defaults */ g_object_set(m_pGstSource, "preview", TRUE, NULL); int colourkey; colourkey = (m_ColourKey.red() << 16) | (m_ColourKey.green() << 8) | m_ColourKey.blue(); g_object_set(m_pGstSource, "color-key", colourkey, NULL); /* Set up the pipeline */ /* we add a message handler */ GstBus *bus; bus = gst_pipeline_get_bus (GST_PIPELINE (m_pGstPipeline)); if (!bus) { g_printerr ("gst_pipeline_get_bus failed\n"); return; } gst_bus_set_sync_handler (bus, (GstBusSyncHandler) global_bus_sync_handler, this); gst_object_unref (bus); /* we add all elements into the pipeline */ gst_bin_add_many (GST_BIN (m_pGstPipeline), m_pGstSource, m_pGstSink, NULL); /* we link the elements together */ if (!gst_element_link (m_pGstSource, m_pGstSink) ) { g_printerr ("gst_element_link failed\n"); return; } qDebug() << "tHalVideoSr2::RunInternal setting GST_STATE_PLAYING state"; // Use PAUSED state, because we are using the 'preview' function, which is direct to screen // the PLAYING state activates the frame dequeing into user-space, but this is uneccessary // (they just get dumped into the fakesink anyway) gst_element_set_state (m_pGstPipeline, GST_STATE_PLAYING); m_RunningInternal = true; m_TimerId = startTimer(1000); } //qDebug() << "tHalVideoSr2::RunInternal done"; }
static GstFlowReturn gst_y4m_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstY4mDec *y4mdec; int n_avail; GstFlowReturn flow_ret = GST_FLOW_OK; #define MAX_HEADER_LENGTH 80 char header[MAX_HEADER_LENGTH]; int i; int len; y4mdec = GST_Y4M_DEC (parent); GST_DEBUG_OBJECT (y4mdec, "chain"); if (GST_BUFFER_IS_DISCONT (buffer)) { GST_DEBUG ("got discont"); gst_adapter_clear (y4mdec->adapter); } gst_adapter_push (y4mdec->adapter, buffer); n_avail = gst_adapter_available (y4mdec->adapter); if (!y4mdec->have_header) { gboolean ret; GstCaps *caps; GstQuery *query; if (n_avail < MAX_HEADER_LENGTH) return GST_FLOW_OK; gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH); header[MAX_HEADER_LENGTH - 1] = 0; for (i = 0; i < MAX_HEADER_LENGTH; i++) { if (header[i] == 0x0a) header[i] = 0; } ret = gst_y4m_dec_parse_header (y4mdec, header); if (!ret) { GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE, ("Failed to parse YUV4MPEG header"), (NULL)); return GST_FLOW_ERROR; } y4mdec->header_size = strlen (header) + 1; gst_adapter_flush (y4mdec->adapter, y4mdec->header_size); caps = gst_video_info_to_caps (&y4mdec->info); ret = gst_pad_set_caps (y4mdec->srcpad, caps); query = gst_query_new_allocation (caps, FALSE); y4mdec->video_meta = FALSE; if (y4mdec->pool) { gst_buffer_pool_set_active (y4mdec->pool, FALSE); gst_object_unref (y4mdec->pool); } y4mdec->pool = NULL; if (gst_pad_peer_query (y4mdec->srcpad, query)) { y4mdec->video_meta = gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); /* We only need a pool if we need to do stride conversion for downstream */ if (!y4mdec->video_meta && memcmp (&y4mdec->info, &y4mdec->out_info, sizeof (y4mdec->info)) != 0) { GstBufferPool *pool = NULL; GstAllocator *allocator = NULL; GstAllocationParams params; GstStructure *config; guint size, min, max; if (gst_query_get_n_allocation_params (query) > 0) { gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms); } else { allocator = NULL; gst_allocation_params_init (¶ms); } if (gst_query_get_n_allocation_pools (query) > 0) { gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); size = MAX (size, y4mdec->out_info.size); } else { pool = NULL; size = y4mdec->out_info.size; min = max = 0; } if (pool == NULL) { pool = gst_video_buffer_pool_new (); } config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, caps, size, min, max); gst_buffer_pool_config_set_allocator (config, allocator, ¶ms); gst_buffer_pool_set_config (pool, config); if (allocator) gst_object_unref (allocator); y4mdec->pool = pool; } } else if (memcmp (&y4mdec->info, &y4mdec->out_info, sizeof (y4mdec->info)) != 0) { GstBufferPool *pool; GstStructure *config; /* No pool, create our own if we need to do stride conversion */ pool = gst_video_buffer_pool_new (); config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, caps, y4mdec->out_info.size, 0, 0); gst_buffer_pool_set_config (pool, config); y4mdec->pool = pool; } if (y4mdec->pool) { gst_buffer_pool_set_active (y4mdec->pool, TRUE); } gst_query_unref (query); gst_caps_unref (caps); if (!ret) { GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad"); return GST_FLOW_ERROR; } y4mdec->have_header = TRUE; } if (y4mdec->have_new_segment) { GstEvent *event; GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment.start); GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment.stop); GstClockTime time = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment.time); GstSegment seg; gst_segment_init (&seg, GST_FORMAT_TIME); seg.start = start; seg.stop = stop; seg.time = time; event = gst_event_new_segment (&seg); gst_pad_push_event (y4mdec->srcpad, event); //gst_event_unref (event); y4mdec->have_new_segment = FALSE; y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec, y4mdec->segment.time); GST_DEBUG ("new frame_index %d", y4mdec->frame_index); } while (1) { n_avail = gst_adapter_available (y4mdec->adapter); if (n_avail < MAX_HEADER_LENGTH) break; gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH); header[MAX_HEADER_LENGTH - 1] = 0; for (i = 0; i < MAX_HEADER_LENGTH; i++) { if (header[i] == 0x0a) header[i] = 0; } if (memcmp (header, "FRAME", 5) != 0) { GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE, ("Failed to parse YUV4MPEG frame"), (NULL)); flow_ret = GST_FLOW_ERROR; break; } len = strlen (header); if (n_avail < y4mdec->info.size + len + 1) { /* not enough data */ GST_DEBUG ("not enough data for frame %d < %" G_GSIZE_FORMAT, n_avail, y4mdec->info.size + len + 1); break; } gst_adapter_flush (y4mdec->adapter, len + 1); buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->info.size); GST_BUFFER_TIMESTAMP (buffer) = gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index); GST_BUFFER_DURATION (buffer) = gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) - GST_BUFFER_TIMESTAMP (buffer); y4mdec->frame_index++; if (y4mdec->video_meta) { gst_buffer_add_video_meta_full (buffer, 0, y4mdec->info.finfo->format, y4mdec->info.width, y4mdec->info.height, y4mdec->info.finfo->n_planes, y4mdec->info.offset, y4mdec->info.stride); } else if (memcmp (&y4mdec->info, &y4mdec->out_info, sizeof (y4mdec->info)) != 0) { GstBuffer *outbuf; GstVideoFrame iframe, oframe; gint i, j; gint w, h, istride, ostride; guint8 *src, *dest; /* Allocate a new buffer and do stride conversion */ g_assert (y4mdec->pool != NULL); flow_ret = gst_buffer_pool_acquire_buffer (y4mdec->pool, &outbuf, NULL); if (flow_ret != GST_FLOW_OK) { gst_buffer_unref (buffer); break; } gst_video_frame_map (&iframe, &y4mdec->info, buffer, GST_MAP_READ); gst_video_frame_map (&oframe, &y4mdec->out_info, outbuf, GST_MAP_WRITE); for (i = 0; i < 3; i++) { w = GST_VIDEO_FRAME_COMP_WIDTH (&iframe, i); h = GST_VIDEO_FRAME_COMP_HEIGHT (&iframe, i); istride = GST_VIDEO_FRAME_COMP_STRIDE (&iframe, i); ostride = GST_VIDEO_FRAME_COMP_STRIDE (&oframe, i); src = GST_VIDEO_FRAME_COMP_DATA (&iframe, i); dest = GST_VIDEO_FRAME_COMP_DATA (&oframe, i); for (j = 0; j < h; j++) { memcpy (dest, src, w); dest += ostride; src += istride; } } gst_video_frame_unmap (&iframe); gst_video_frame_unmap (&oframe); gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1); gst_buffer_unref (buffer); buffer = outbuf; } flow_ret = gst_pad_push (y4mdec->srcpad, buffer); if (flow_ret != GST_FLOW_OK) break; } GST_DEBUG ("returning %d", flow_ret); return flow_ret; }
void _receive_video_init_gstreamer(NiceAgent *magent, guint stream_id, CustomData *data) { GstElement *pipeline, *source, *capsfilter, *videoconvert, *h263p, *rtph263pdepay, *sink; GstBus *bus; GstMessage *msg; GstStateChangeReturn ret; GSource *bus_source; GST_INFO ("Pipeline initialization"); // TODO: figure out showing video source = gst_element_factory_make ("udpsrc", "source"); //videoconvert = gst_element_factory_make ("videoconvert", "convert"); //capsfilter = gst_element_factory_make ("capsfilter", "caps"); rtph263pdepay = gst_element_factory_make ("rtph263pdepay", "rtph263pdepay"); h263p = gst_element_factory_make ("avdec_h263p", "h263p"); sink = gst_element_factory_make ("autovideosink", "sink"); /* g_object_set (source, "agent", magent, NULL); g_object_set (source, "stream", stream_id, NULL); g_object_set (source, "component", 1, NULL); */ g_object_set (source, "address", "127.0.0.1", NULL); g_object_set (source, "port", 1234, NULL); g_object_set (source, "caps", gst_caps_from_string("application/x-rtp"), NULL); /* g_object_set (source, "caps", gst_caps_from_string( "application/x-rtp\,\ media\=\(string\)video\,\ " "clock-rate\=\(int\)90000\,\ " "encoding-name\=\(string\)H263-1998\,\ " "payload\=\(int\)96"), NULL); */ //g_object_set (sink, "sync", FALSE, NULL); pipeline = gst_pipeline_new ("Video receive pipeline"); if (!pipeline || !source || //!capsfilter || !h263p || !rtph263pdepay || !sink) { g_printerr ("Not all elements could be created.\n"); return; } // Build the pipeline gst_bin_add_many (GST_BIN (pipeline), source, //capsfilter, rtph263pdepay, h263p, sink, NULL); if (gst_element_link_many (source, //capsfilter, rtph263pdepay, h263p, sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return; } // TODO: this is just output dump pipeline /* source = gst_element_factory_make ("nicesrc", "source"); sink = gst_element_factory_make ("fakesink", "sink"); g_object_set (source, "agent", magent, NULL); g_object_set (source, "stream", stream_id, NULL); g_object_set (source, "component", 1, NULL); g_object_set (sink, "dump", 1, NULL); pipeline = gst_pipeline_new ("Video send pipeline"); if (!pipeline || !source || !sink) { g_printerr ("Not all elements could be created.\n"); return; } // Build the pipeline gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL); if (gst_element_link (source, sink) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return; } */ GST_INFO ("Pipeline created, registing on bus"); bus = gst_element_get_bus (pipeline); gst_bus_enable_sync_message_emission (bus); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::error", (GCallback) on_error, NULL); GST_INFO ("Registing pipeline on bus"); data->pipeline = pipeline; ret = gst_element_set_state(data->pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return; } }
static GstFlowReturn gst_gdp_depay_chain (GstPad * pad, GstBuffer * buffer) { GstGDPDepay *this; GstFlowReturn ret = GST_FLOW_OK; GstCaps *caps; GstBuffer *buf; GstEvent *event; guint available; this = GST_GDP_DEPAY (gst_pad_get_parent (pad)); /* On DISCONT, get rid of accumulated data. We assume a buffer after the * DISCONT contains (part of) a new valid header, if not we error because we * lost sync */ if (GST_BUFFER_IS_DISCONT (buffer)) { gst_adapter_clear (this->adapter); this->state = GST_GDP_DEPAY_STATE_HEADER; } gst_adapter_push (this->adapter, buffer); while (TRUE) { switch (this->state) { case GST_GDP_DEPAY_STATE_HEADER: { guint8 *header; /* collect a complete header, validate and store the header. Figure out * the payload length and switch to the PAYLOAD state */ available = gst_adapter_available (this->adapter); if (available < GST_DP_HEADER_LENGTH) goto done; GST_LOG_OBJECT (this, "reading GDP header from adapter"); header = gst_adapter_take (this->adapter, GST_DP_HEADER_LENGTH); if (!gst_dp_validate_header (GST_DP_HEADER_LENGTH, header)) { g_free (header); goto header_validate_error; } /* store types and payload length. Also store the header, which we need * to make the payload. */ this->payload_length = gst_dp_header_payload_length (header); this->payload_type = gst_dp_header_payload_type (header); /* free previous header and store new one. */ g_free (this->header); this->header = header; GST_LOG_OBJECT (this, "read GDP header, payload size %d, payload type %d, switching to state PAYLOAD", this->payload_length, this->payload_type); this->state = GST_GDP_DEPAY_STATE_PAYLOAD; break; } case GST_GDP_DEPAY_STATE_PAYLOAD: { /* in this state we wait for all the payload data to be available in the * adapter. Then we switch to the state where we actually process the * payload. */ available = gst_adapter_available (this->adapter); if (available < this->payload_length) goto done; /* change state based on type */ if (this->payload_type == GST_DP_PAYLOAD_BUFFER) { GST_LOG_OBJECT (this, "switching to state BUFFER"); this->state = GST_GDP_DEPAY_STATE_BUFFER; } else if (this->payload_type == GST_DP_PAYLOAD_CAPS) { GST_LOG_OBJECT (this, "switching to state CAPS"); this->state = GST_GDP_DEPAY_STATE_CAPS; } else if (this->payload_type >= GST_DP_PAYLOAD_EVENT_NONE) { GST_LOG_OBJECT (this, "switching to state EVENT"); this->state = GST_GDP_DEPAY_STATE_EVENT; } else { goto wrong_type; } if (this->payload_length && (!gst_dp_validate_payload (GST_DP_HEADER_LENGTH, this->header, gst_adapter_peek (this->adapter, this->payload_length)))) { goto payload_validate_error; } break; } case GST_GDP_DEPAY_STATE_BUFFER: { /* if we receive a buffer without caps first, we error out */ if (!this->caps) goto no_caps; GST_LOG_OBJECT (this, "reading GDP buffer from adapter"); buf = gst_dp_buffer_from_header (GST_DP_HEADER_LENGTH, this->header); if (!buf) goto buffer_failed; /* now take the payload if there is any */ if (this->payload_length > 0) { guint8 *payload; payload = gst_adapter_take (this->adapter, this->payload_length); memcpy (GST_BUFFER_DATA (buf), payload, this->payload_length); g_free (payload); } /* set caps and push */ gst_buffer_set_caps (buf, this->caps); GST_LOG_OBJECT (this, "deserialized buffer %p, pushing, timestamp %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT ", size %d, flags 0x%x", buf, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_SIZE (buf), GST_BUFFER_FLAGS (buf)); ret = gst_pad_push (this->srcpad, buf); if (ret != GST_FLOW_OK) goto push_error; GST_LOG_OBJECT (this, "switching to state HEADER"); this->state = GST_GDP_DEPAY_STATE_HEADER; break; } case GST_GDP_DEPAY_STATE_CAPS: { guint8 *payload; /* take the payload of the caps */ GST_LOG_OBJECT (this, "reading GDP caps from adapter"); payload = gst_adapter_take (this->adapter, this->payload_length); caps = gst_dp_caps_from_packet (GST_DP_HEADER_LENGTH, this->header, payload); g_free (payload); if (!caps) goto caps_failed; GST_DEBUG_OBJECT (this, "deserialized caps %" GST_PTR_FORMAT, caps); gst_caps_replace (&(this->caps), caps); gst_pad_set_caps (this->srcpad, caps); /* drop the creation ref we still have */ gst_caps_unref (caps); GST_LOG_OBJECT (this, "switching to state HEADER"); this->state = GST_GDP_DEPAY_STATE_HEADER; break; } case GST_GDP_DEPAY_STATE_EVENT: { guint8 *payload; GST_LOG_OBJECT (this, "reading GDP event from adapter"); /* adapter doesn't like 0 length payload */ if (this->payload_length > 0) payload = gst_adapter_take (this->adapter, this->payload_length); else payload = NULL; event = gst_dp_event_from_packet (GST_DP_HEADER_LENGTH, this->header, payload); g_free (payload); if (!event) goto event_failed; GST_DEBUG_OBJECT (this, "deserialized event %p of type %s, pushing", event, gst_event_type_get_name (event->type)); gst_pad_push_event (this->srcpad, event); GST_LOG_OBJECT (this, "switching to state HEADER"); this->state = GST_GDP_DEPAY_STATE_HEADER; break; } } } done: gst_object_unref (this); return ret; /* ERRORS */ header_validate_error: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("GDP packet header does not validate")); ret = GST_FLOW_ERROR; goto done; } payload_validate_error: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("GDP packet payload does not validate")); ret = GST_FLOW_ERROR; goto done; } wrong_type: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("GDP packet header is of wrong type")); ret = GST_FLOW_ERROR; goto done; } no_caps: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("Received a buffer without first receiving caps")); ret = GST_FLOW_NOT_NEGOTIATED; goto done; } buffer_failed: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("could not create buffer from GDP packet")); ret = GST_FLOW_ERROR; goto done; } push_error: { GST_WARNING_OBJECT (this, "pushing depayloaded buffer returned %d", ret); goto done; } caps_failed: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("could not create caps from GDP packet")); ret = GST_FLOW_ERROR; goto done; } event_failed: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("could not create event from GDP packet")); ret = GST_FLOW_ERROR; goto done; } }
/////////////////////////////////////////////////////////////////////////////////////////////////// /// main() /// /// The main function. Creates the pipeline and makes it go. /////////////////////////////////////////////////////////////////////////////////////////////////// int main(int argc, char *argv[]) { // Initialize GStreamer gst_init (&argc, &argv); // Parse the target hostname/IP if (argc != 2) { g_printerr("Usage: %s [host or ip]\n", argv[0]); return -1; } const gchar* target = argv[1]; g_assert(target != NULL); // Parse the pipeline from the string above pipeline = gst_parse_launch(PIPELINE_STRING, NULL); if (pipeline == NULL) { g_printerr("Failed to create pipeline!\n"); return -1; } // Set the clients property of the UDP sink elements GstElement* element = gst_bin_get_by_name(GST_BIN(pipeline), "vsink"); g_assert(element != NULL); const gchar* clients_value = g_strdup_printf("%s:10000", target); g_object_set(element, "clients", clients_value, NULL); g_free(const_cast<gchar*>(clients_value)); gst_object_unref(element); element = gst_bin_get_by_name(GST_BIN(pipeline), "vcsink"); g_assert(element != NULL); clients_value = g_strdup_printf("%s:10001", target); g_object_set(element, "clients", clients_value, NULL); g_free(const_cast<gchar*>(clients_value)); gst_object_unref(element); element = gst_bin_get_by_name(GST_BIN(pipeline), "asink"); g_assert(element != NULL); clients_value = g_strdup_printf("%s:10002", target); g_object_set(element, "clients", clients_value, NULL); g_free(const_cast<gchar*>(clients_value)); gst_object_unref(element); element = gst_bin_get_by_name(GST_BIN(pipeline), "acsink"); g_assert(element != NULL); clients_value = g_strdup_printf("%s:10003", target); g_object_set(element, "clients", clients_value, NULL); g_free(const_cast<gchar*>(clients_value)); gst_object_unref(element); // Create a pipeline tracer for latency / jitter information PipelineTracer* pTracer = new PipelineTracer(pipeline); // Put the pipeline in the playing state GstStateChangeReturn ret = gst_element_set_state(pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr("Unable to set the pipeline to the playing state.\n"); gst_object_unref(pipeline); return -1; } // Dump to dot file (if GST_DEBUG_DUMP_DOT_DIR is set) to ${GST_DEBUG_DUMP_DOT_DIR}/.dot. // We wait until the pipeline is playing to make sure pads are linked. GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, argv[0]); // Assign the SIGINT handler to send EOS struct sigaction sigact; sigact.sa_handler = on_sig_int; sigemptyset(&sigact.sa_mask); sigact.sa_flags = 0; sigaction(SIGINT, &sigact, NULL); g_print("Playing... press Ctrl-C to terminate.\n"); // Wait until error or EOS GstBus* bus = gst_element_get_bus(pipeline); GstMessage* msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, static_cast<GstMessageType>(GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); // Parse message and print stuff about it. if (msg != NULL) { GError *err; gchar *debug_info; switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error(msg, &err, &debug_info); g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error(&err); g_free(debug_info); break; case GST_MESSAGE_EOS: g_print("End-Of-Stream reached.\n"); break; default: // We should not reach here because we only asked for ERRORs and EOS g_printerr("Unexpected message received.\n"); break; } // END switch(message type) gst_message_unref(msg); } // END if (message) // Free resources delete pTracer; gst_object_unref(bus); gst_element_set_state(pipeline, GST_STATE_NULL); gst_object_unref(pipeline); return 0; } // END main()
bool GstEnginePipeline::Init() { // Here we create all the parts of the gstreamer pipeline - from the source // to the sink. The parts of the pipeline are split up into bins: // uri decode bin -> audio bin // The uri decode bin is a gstreamer builtin that automatically picks the // right type of source and decoder for the URI. // The audio bin gets created here and contains: // queue ! audioconvert ! <caps32> // ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee // rgvolume and rglimiter are only created when replaygain is enabled. // After the tee the pipeline splits. One split is converted to 16-bit int // samples for the scope, the other is kept as float32 and sent to the // speaker. // tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink // tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale // ! convert ! audiosink gst_segment_init(&last_decodebin_segment_, GST_FORMAT_TIME); // Audio bin audiobin_ = gst_bin_new("audiobin"); gst_bin_add(GST_BIN(pipeline_), audiobin_); // Create the sink if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false; if (g_object_class_find_property(G_OBJECT_GET_CLASS(audiosink_), "device") && !device_.toString().isEmpty()) { switch (device_.type()) { case QVariant::Int: g_object_set(G_OBJECT(audiosink_), "device", device_.toInt(), nullptr); break; case QVariant::String: g_object_set(G_OBJECT(audiosink_), "device", device_.toString().toUtf8().constData(), nullptr); break; #ifdef Q_OS_WIN32 case QVariant::ByteArray: { GUID guid = QUuid(device_.toByteArray()); g_object_set(G_OBJECT(audiosink_), "device", &guid, nullptr); break; } #endif // Q_OS_WIN32 default: qLog(Warning) << "Unknown device type" << device_; break; } } // Create all the other elements GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue, *convert; queue_ = engine_->CreateElement("queue2", audiobin_); audioconvert_ = engine_->CreateElement("audioconvert", audiobin_); tee = engine_->CreateElement("tee", audiobin_); probe_queue = engine_->CreateElement("queue", audiobin_); probe_converter = engine_->CreateElement("audioconvert", audiobin_); probe_sink = engine_->CreateElement("fakesink", audiobin_); audio_queue = engine_->CreateElement("queue", audiobin_); equalizer_preamp_ = engine_->CreateElement("volume", audiobin_); equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_); stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_); volume_ = engine_->CreateElement("volume", audiobin_); audioscale_ = engine_->CreateElement("audioresample", audiobin_); convert = engine_->CreateElement("audioconvert", audiobin_); if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter || !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ || !stereo_panorama_ || !volume_ || !audioscale_ || !convert) { return false; } // Create the replaygain elements if it's enabled. event_probe is the // audioconvert element we attach the probe to, which will change depending // on whether replaygain is enabled. convert_sink is the element after the // first audioconvert, which again will change. GstElement* event_probe = audioconvert_; GstElement* convert_sink = tee; if (rg_enabled_) { rgvolume_ = engine_->CreateElement("rgvolume", audiobin_); rglimiter_ = engine_->CreateElement("rglimiter", audiobin_); audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_); event_probe = audioconvert2_; convert_sink = rgvolume_; if (!rgvolume_ || !rglimiter_ || !audioconvert2_) { return false; } // Set replaygain settings g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr); g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr); g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_), nullptr); } // Create a pad on the outside of the audiobin and connect it to the pad of // the first element. GstPad* pad = gst_element_get_static_pad(queue_, "sink"); gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); // Add a data probe on the src pad of the audioconvert element for our scope. // We do it here because we want pre-equalized and pre-volume samples // so that our visualization are not be affected by them. pad = gst_element_get_static_pad(event_probe, "src"); gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, &EventHandoffCallback, this, NULL); gst_object_unref(pad); // Configure the fakesink properly g_object_set(G_OBJECT(probe_sink), "sync", TRUE, nullptr); // Setting the equalizer bands: // // GStreamer's GstIirEqualizerNBands sets up shelve filters for the first and // last bands as corner cases. That was causing the "inverted slider" bug. // As a workaround, we create two dummy bands at both ends of the spectrum. // This causes the actual first and last adjustable bands to be // implemented using band-pass filters. g_object_set(G_OBJECT(equalizer_), "num-bands", 10 + 2, nullptr); // Dummy first band (bandwidth 0, cutting below 20Hz): GstObject* first_band = GST_OBJECT( gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), 0)); g_object_set(G_OBJECT(first_band), "freq", 20.0, "bandwidth", 0, "gain", 0.0f, nullptr); g_object_unref(G_OBJECT(first_band)); // Dummy last band (bandwidth 0, cutting over 20KHz): GstObject* last_band = GST_OBJECT(gst_child_proxy_get_child_by_index( GST_CHILD_PROXY(equalizer_), kEqBandCount + 1)); g_object_set(G_OBJECT(last_band), "freq", 20000.0, "bandwidth", 0, "gain", 0.0f, nullptr); g_object_unref(G_OBJECT(last_band)); int last_band_frequency = 0; for (int i = 0; i < kEqBandCount; ++i) { const int index_in_eq = i + 1; GstObject* band = GST_OBJECT(gst_child_proxy_get_child_by_index( GST_CHILD_PROXY(equalizer_), index_in_eq)); const float frequency = kEqBandFrequencies[i]; const float bandwidth = frequency - last_band_frequency; last_band_frequency = frequency; g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth, "gain", 0.0f, nullptr); g_object_unref(G_OBJECT(band)); } // Set the stereo balance. g_object_set(G_OBJECT(stereo_panorama_), "panorama", stereo_balance_, nullptr); // Set the buffer duration. We set this on this queue instead of the // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin // only affects network sources. // Disable the default buffer and byte limits, so we only buffer based on // time. g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, nullptr); g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, nullptr); g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_, nullptr); g_object_set(G_OBJECT(queue_), "low-percent", buffer_min_fill_, nullptr); if (buffer_duration_nanosec_ > 0) { g_object_set(G_OBJECT(queue_), "use-buffering", true, nullptr); } gst_element_link_many(queue_, audioconvert_, convert_sink, nullptr); // Link the elements with special caps // The scope path through the tee gets 16-bit ints. GstCaps* caps16 = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING, "S16LE", NULL); gst_element_link_filtered(probe_converter, probe_sink, caps16); gst_caps_unref(caps16); // Link the outputs of tee to the queues on each path. gst_pad_link(gst_element_get_request_pad(tee, "src_%u"), gst_element_get_static_pad(probe_queue, "sink")); gst_pad_link(gst_element_get_request_pad(tee, "src_%u"), gst_element_get_static_pad(audio_queue, "sink")); // Link replaygain elements if enabled. if (rg_enabled_) { gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, nullptr); } // Link everything else. gst_element_link(probe_queue, probe_converter); gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_, stereo_panorama_, volume_, audioscale_, convert, nullptr); // add caps for fixed sample rate and mono, but only if requested if (sample_rate_ != GstEngine::kAutoSampleRate && sample_rate_ > 0) { GstCaps* caps = gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, sample_rate_, nullptr); if (mono_playback_) { gst_caps_set_simple(caps, "channels", G_TYPE_INT, 1, nullptr); } gst_element_link_filtered(convert, audiosink_, caps); gst_caps_unref(caps); } else if (mono_playback_) { GstCaps* capsmono = gst_caps_new_simple("audio/x-raw", "channels", G_TYPE_INT, 1, nullptr); gst_element_link_filtered(convert, audiosink_, capsmono); gst_caps_unref(capsmono); } else { gst_element_link(convert, audiosink_); } // Add probes and handlers. gst_pad_add_probe(gst_element_get_static_pad(probe_converter, "src"), GST_PAD_PROBE_TYPE_BUFFER, HandoffCallback, this, nullptr); gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallbackSync, this, nullptr); bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this); MaybeLinkDecodeToAudio(); return true; }
int main (int argc, char *argv[]) { GstElement *bin; GstElement *decodebin, *decconvert; GstElement *capsfilter, *equalizer, *spectrum, *sinkconvert, *sink; GstCaps *caps; GstBus *bus; GtkWidget *appwindow, *vbox, *hbox, *scale; int i, num_bands = NBANDS; GOptionEntry options[] = { {"bands", 'b', 0, G_OPTION_ARG_INT, &num_bands, "Number of bands", NULL}, {NULL} }; GOptionContext *ctx; GError *err = NULL; ctx = g_option_context_new ("- demo of audio equalizer"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); g_option_context_add_group (ctx, gtk_get_option_group (TRUE)); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing: %s\n", err->message); exit (1); } if (argc < 2) { g_print ("Usage: %s <uri to play>\n", argv[0]); g_print (" For optional arguments: --help\n"); exit (-1); } gst_init (&argc, &argv); gtk_init (&argc, &argv); bin = gst_pipeline_new ("bin"); /* Uri decoding */ decodebin = gst_element_factory_make ("uridecodebin", "decoder"); g_object_set (G_OBJECT (decodebin), "uri", argv[1], NULL); /* Force float32 samples */ decconvert = gst_element_factory_make ("audioconvert", "decconvert"); capsfilter = gst_element_factory_make ("capsfilter", "capsfilter"); caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "F32LE", NULL); g_object_set (capsfilter, "caps", caps, NULL); equalizer = gst_element_factory_make ("equalizer-nbands", "equalizer"); g_object_set (G_OBJECT (equalizer), "num-bands", num_bands, NULL); spectrum = gst_element_factory_make ("spectrum", "spectrum"); g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80, "post-messages", TRUE, "interval", 500 * GST_MSECOND, NULL); sinkconvert = gst_element_factory_make ("audioconvert", "sinkconvert"); sink = gst_element_factory_make ("autoaudiosink", "sink"); gst_bin_add_many (GST_BIN (bin), decodebin, decconvert, capsfilter, equalizer, spectrum, sinkconvert, sink, NULL); if (!gst_element_link_many (decconvert, capsfilter, equalizer, spectrum, sinkconvert, sink, NULL)) { fprintf (stderr, "can't link elements\n"); exit (1); } /* Handle dynamic pads */ g_signal_connect (G_OBJECT (decodebin), "pad-added", G_CALLBACK (dynamic_link), gst_element_get_static_pad (decconvert, "sink")); bus = gst_element_get_bus (bin); gst_bus_add_watch (bus, message_handler, NULL); gst_object_unref (bus); appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_window_set_title (GTK_WINDOW (appwindow), "Equalizer Demo"); g_signal_connect (G_OBJECT (appwindow), "destroy", G_CALLBACK (on_window_destroy), NULL); vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 6); drawingarea = gtk_drawing_area_new (); gtk_widget_set_size_request (drawingarea, spect_bands, spect_height); g_signal_connect (G_OBJECT (drawingarea), "configure-event", G_CALLBACK (on_configure_event), (gpointer) spectrum); gtk_box_pack_start (GTK_BOX (vbox), drawingarea, TRUE, TRUE, 0); hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 20); for (i = 0; i < num_bands; i++) { GObject *band; gdouble freq; gdouble bw; gdouble gain; gchar *label; GtkWidget *frame, *scales_hbox; band = gst_child_proxy_get_child_by_index (GST_CHILD_PROXY (equalizer), i); g_assert (band != NULL); g_object_get (band, "freq", &freq, NULL); g_object_get (band, "bandwidth", &bw, NULL); g_object_get (band, "gain", &gain, NULL); label = g_strdup_printf ("%d Hz", (int) (freq + 0.5)); frame = gtk_frame_new (label); g_free (label); scales_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 6); /* Create gain scale */ scale = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL, -24.0, 12.0, 0.5); gtk_scale_set_draw_value (GTK_SCALE (scale), TRUE); gtk_scale_set_value_pos (GTK_SCALE (scale), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (scale), gain); gtk_widget_set_size_request (scale, 35, 150); g_signal_connect (G_OBJECT (scale), "value-changed", G_CALLBACK (on_gain_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), scale, FALSE, FALSE, 0); /* Create bandwidth scale */ scale = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL, 0.0, 20000.0, 5.0); gtk_scale_set_draw_value (GTK_SCALE (scale), TRUE); gtk_scale_set_value_pos (GTK_SCALE (scale), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (scale), bw); gtk_widget_set_size_request (scale, 45, 150); g_signal_connect (G_OBJECT (scale), "value-changed", G_CALLBACK (on_bandwidth_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), scale, TRUE, TRUE, 0); /* Create frequency scale */ scale = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL, 20.0, 20000.0, 5.0); gtk_scale_set_draw_value (GTK_SCALE (scale), TRUE); gtk_scale_set_value_pos (GTK_SCALE (scale), GTK_POS_TOP); gtk_range_set_value (GTK_RANGE (scale), freq); gtk_widget_set_size_request (scale, 45, 150); g_signal_connect (G_OBJECT (scale), "value-changed", G_CALLBACK (on_freq_changed), (gpointer) band); gtk_box_pack_start (GTK_BOX (scales_hbox), scale, TRUE, TRUE, 0); gtk_container_add (GTK_CONTAINER (frame), scales_hbox); gtk_box_pack_start (GTK_BOX (hbox), frame, TRUE, TRUE, 0); } gtk_box_pack_start (GTK_BOX (vbox), hbox, TRUE, TRUE, 0); gtk_container_add (GTK_CONTAINER (appwindow), vbox); gtk_widget_show_all (appwindow); gst_element_set_state (bin, GST_STATE_PLAYING); gtk_main (); gst_element_set_state (bin, GST_STATE_NULL); gst_object_unref (bin); return 0; }
static void mux_pcm_audio (guint num_buffers, guint repeat) { GstElement *src, *sink, *flvmux, *conv, *pipeline; GstPad *sinkpad, *srcpad; gint counter; GST_LOG ("num_buffers = %u", num_buffers); pipeline = gst_pipeline_new ("pipeline"); fail_unless (pipeline != NULL, "Failed to create pipeline!"); /* kids, don't use a sync handler for this at home, really; we do because * we just want to abort and nothing else */ gst_bus_set_sync_handler (GST_ELEMENT_BUS (pipeline), error_cb, NULL); src = gst_element_factory_make ("audiotestsrc", "audiotestsrc"); fail_unless (src != NULL, "Failed to create 'audiotestsrc' element!"); g_object_set (src, "num-buffers", num_buffers, NULL); conv = gst_element_factory_make ("audioconvert", "audioconvert"); fail_unless (conv != NULL, "Failed to create 'audioconvert' element!"); flvmux = gst_element_factory_make ("flvmux", "flvmux"); fail_unless (flvmux != NULL, "Failed to create 'flvmux' element!"); sink = gst_element_factory_make ("fakesink", "fakesink"); fail_unless (sink != NULL, "Failed to create 'fakesink' element!"); g_object_set (sink, "signal-handoffs", TRUE, NULL); g_signal_connect (sink, "handoff", G_CALLBACK (handoff_cb), &counter); gst_bin_add_many (GST_BIN (pipeline), src, conv, flvmux, sink, NULL); fail_unless (gst_element_link (src, conv)); fail_unless (gst_element_link (flvmux, sink)); /* now link the elements */ sinkpad = gst_element_get_request_pad (flvmux, "audio"); fail_unless (sinkpad != NULL, "Could not get audio request pad"); srcpad = gst_element_get_static_pad (conv, "src"); fail_unless (srcpad != NULL, "Could not get audioconvert's source pad"); fail_unless_equals_int (gst_pad_link (srcpad, sinkpad), GST_PAD_LINK_OK); gst_object_unref (srcpad); gst_object_unref (sinkpad); do { GstStateChangeReturn state_ret; GstMessage *msg; GST_LOG ("repeat=%d", repeat); counter = 0; state_ret = gst_element_set_state (pipeline, GST_STATE_PAUSED); fail_unless (state_ret != GST_STATE_CHANGE_FAILURE); if (state_ret == GST_STATE_CHANGE_ASYNC) { GST_LOG ("waiting for pipeline to reach PAUSED state"); state_ret = gst_element_get_state (pipeline, NULL, NULL, -1); fail_unless_equals_int (state_ret, GST_STATE_CHANGE_SUCCESS); } GST_LOG ("PAUSED, let's do the rest of it"); state_ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); fail_unless (state_ret != GST_STATE_CHANGE_FAILURE); msg = gst_bus_poll (GST_ELEMENT_BUS (pipeline), GST_MESSAGE_EOS, -1); fail_unless (msg != NULL, "Expected EOS message on bus!"); GST_LOG ("EOS"); gst_message_unref (msg); /* should have some output */ fail_unless (counter > 2); fail_unless_equals_int (gst_element_set_state (pipeline, GST_STATE_NULL), GST_STATE_CHANGE_SUCCESS); /* repeat = test re-usability */ --repeat; } while (repeat > 0); gst_object_unref (pipeline); }
static void gst_insert_bin_do_change (GstInsertBin * self, GstPad * pad) { struct ChangeData *data; GST_OBJECT_LOCK (self); if (!is_right_direction_for_block (pad)) { GST_WARNING_OBJECT (self, "Block pad does not have the expected direction"); goto next; } while ((data = g_queue_pop_head (&self->priv->change_queue)) != NULL) { GstPad *peer = NULL; GstPad *other_peer = NULL; GST_OBJECT_UNLOCK (self); if (data->action == GST_INSERT_BIN_ACTION_ADD && !validate_element (self, data->element)) goto error; peer = gst_pad_get_peer (pad); if (peer == NULL) { GST_WARNING_OBJECT (self, "Blocked pad has no peer"); goto error; } if (data->action == GST_INSERT_BIN_ACTION_ADD) { GstPad *srcpad = NULL, *sinkpad = NULL; GstPad *peersrcpad, *peersinkpad; /* First let's make sure we have the right pad */ if (data->sibling) { GstElement *parent = NULL; GstPad *siblingpad; if ((gst_pad_get_direction (pad) == GST_PAD_SRC && data->direction == DIRECTION_BEFORE) || (gst_pad_get_direction (pad) == GST_PAD_SINK && data->direction == DIRECTION_AFTER)) siblingpad = peer; else siblingpad = pad; parent = gst_pad_get_parent_element (siblingpad); if (parent != NULL) gst_object_unref (parent); if (parent != data->sibling) goto retry; } else { GstObject *parent; GstPad *ghost; GstPad *proxypad; if (data->direction == DIRECTION_BEFORE) { ghost = self->priv->srcpad; if (gst_pad_get_direction (pad) == GST_PAD_SINK) proxypad = pad; else proxypad = peer; } else { ghost = self->priv->sinkpad; if (gst_pad_get_direction (pad) == GST_PAD_SINK) proxypad = peer; else proxypad = pad; } if (!GST_IS_PROXY_PAD (proxypad)) goto retry; parent = gst_pad_get_parent (proxypad); if (!parent) goto retry; gst_object_unref (parent); if (GST_PAD_CAST (parent) != ghost) goto retry; } if (gst_pad_get_direction (pad) == GST_PAD_SRC) { peersrcpad = pad; peersinkpad = peer; } else { peersrcpad = peer; peersinkpad = pad; } if (GST_IS_PROXY_PAD (peersrcpad)) { GstObject *parent = gst_pad_get_parent (peersrcpad); if (GST_PAD_CAST (parent) == self->priv->sinkpad) peersrcpad = NULL; if (parent) gst_object_unref (parent); } if (GST_IS_PROXY_PAD (peersinkpad)) { GstObject *parent = gst_pad_get_parent (peersinkpad); if (GST_PAD_CAST (parent) == self->priv->srcpad) peersinkpad = NULL; if (parent) gst_object_unref (parent); } if (peersinkpad && peersrcpad) { gst_pad_unlink (peersrcpad, peersinkpad); } else { if (!peersinkpad) gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->srcpad), NULL); if (!peersrcpad) gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->sinkpad), NULL); } srcpad = get_single_pad (data->element, GST_PAD_SRC); sinkpad = get_single_pad (data->element, GST_PAD_SINK); if (srcpad == NULL || sinkpad == NULL) { GST_WARNING_OBJECT (self, "Can not get element src or sink pad"); goto error; } if (!gst_bin_add (GST_BIN (self), data->element)) { GST_WARNING_OBJECT (self, "Can not add element to bin"); goto error; } if (peersrcpad) { if (GST_PAD_LINK_FAILED (gst_pad_link (peersrcpad, sinkpad))) { GST_WARNING_OBJECT (self, "Can not link sibling's %s:%s pad" " to element's %s:%s pad", GST_DEBUG_PAD_NAME (peersrcpad), GST_DEBUG_PAD_NAME (sinkpad)); goto error; } } else { if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->sinkpad), sinkpad)) { GST_WARNING_OBJECT (self, "Can not set %s:%s as target for %s:%s", GST_DEBUG_PAD_NAME (sinkpad), GST_DEBUG_PAD_NAME (self->priv->sinkpad)); goto error; } } if (peersinkpad) { if (GST_PAD_LINK_FAILED (gst_pad_link (srcpad, peersinkpad))) { GST_WARNING_OBJECT (self, "Can not link element's %s:%s pad" " to sibling's %s:%s pad", GST_DEBUG_PAD_NAME (srcpad), GST_DEBUG_PAD_NAME (peersinkpad)); goto error; } } else { if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->srcpad), srcpad)) { GST_WARNING_OBJECT (self, "Can not set %s:%s as target for %s:%s", GST_DEBUG_PAD_NAME (srcpad), GST_DEBUG_PAD_NAME (self->priv->srcpad)); goto error; } } gst_object_unref (srcpad); gst_object_unref (sinkpad); if (!gst_element_sync_state_with_parent (data->element)) { GST_WARNING_OBJECT (self, "Can not sync element's state with parent"); goto error; } } else { GstElement *parent = NULL; GstPad *other_pad; GstCaps *caps = NULL, *peercaps = NULL; gboolean can_intersect; gboolean success; parent = gst_pad_get_parent_element (peer); if (parent != NULL) gst_object_unref (parent); if (parent != data->element) goto retry; if (gst_pad_get_direction (peer) == GST_PAD_SRC) other_pad = get_single_pad (data->element, GST_PAD_SINK); else other_pad = get_single_pad (data->element, GST_PAD_SRC); if (!other_pad) { GST_WARNING_OBJECT (self, "Can not get element's other pad"); goto error; } other_peer = gst_pad_get_peer (other_pad); gst_object_unref (other_pad); if (!other_peer) { GST_WARNING_OBJECT (self, "Can not get element's other peer"); goto error; } /* Get the negotiated caps for the source pad peer, * because renegotiation while the pipeline is playing doesn't work * that fast. */ if (gst_pad_get_direction (pad) == GST_PAD_SRC) caps = gst_pad_get_current_caps (pad); else peercaps = gst_pad_get_current_caps (other_peer); if (!caps) caps = gst_pad_query_caps (pad, NULL); if (!peercaps) peercaps = gst_pad_query_caps (other_peer, NULL); can_intersect = gst_caps_can_intersect (caps, peercaps); gst_caps_unref (caps); gst_caps_unref (peercaps); if (!can_intersect) { GST_WARNING_OBJECT (self, "Pads are incompatible without the element"); goto error; } if (gst_pad_get_direction (other_peer) == GST_PAD_SRC && gst_pad_is_active (other_peer)) { gulong probe_id; probe_id = gst_pad_add_probe (other_peer, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, wait_and_drop_eos_cb, NULL, NULL); gst_pad_send_event (peer, gst_event_new_eos ()); gst_pad_remove_probe (other_peer, probe_id); } gst_element_set_locked_state (data->element, TRUE); gst_element_set_state (data->element, GST_STATE_NULL); if (!gst_bin_remove (GST_BIN (self), data->element)) { GST_WARNING_OBJECT (self, "Element removal rejected"); goto error; } gst_element_set_locked_state (data->element, FALSE); if (gst_pad_get_direction (pad) == GST_PAD_SRC) success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (pad, other_peer, GST_PAD_LINK_CHECK_HIERARCHY | GST_PAD_LINK_CHECK_TEMPLATE_CAPS)); else success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (other_peer, pad, GST_PAD_LINK_CHECK_HIERARCHY | GST_PAD_LINK_CHECK_TEMPLATE_CAPS)); gst_object_unref (other_peer); other_peer = NULL; if (!success) { GST_ERROR_OBJECT (self, "Could not re-link after the element's" " removal"); goto error; } } gst_insert_bin_change_data_complete (self, data, TRUE); gst_object_unref (peer); GST_OBJECT_LOCK (self); continue; done: if (other_peer != NULL) gst_object_unref (other_peer); if (peer != NULL) gst_object_unref (peer); break; retry: GST_OBJECT_LOCK (self); g_queue_push_head (&self->priv->change_queue, data); goto done; error: /* Handle error */ gst_insert_bin_change_data_complete (self, data, FALSE); GST_OBJECT_LOCK (self); goto done; } next: gst_insert_bin_block_pad_unlock (self); }
static GstCaps * gst_dshowvideodec_src_getcaps (GstPad * pad) { GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad); GstCaps *caps = NULL; if (!vdec->srccaps) vdec->srccaps = gst_caps_new_empty (); if (vdec->decfilter) { CComPtr<IPin> output_pin; CComPtr<IEnumMediaTypes> enum_mediatypes; HRESULT hres; ULONG fetched; output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT); if (!output_pin) { GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("failed getting ouput pin from the decoder"), (NULL)); goto beach; } hres = output_pin->EnumMediaTypes (&enum_mediatypes); if (hres == S_OK && enum_mediatypes) { AM_MEDIA_TYPE *mediatype = NULL; enum_mediatypes->Reset(); while (hres = enum_mediatypes->Next(1, &mediatype, &fetched), hres == S_OK) { VIDEOINFOHEADER *video_info; GstCaps *mediacaps = NULL; /* RGB24 */ if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_RGB24) && IsEqualGUID (mediatype->formattype, FORMAT_VideoInfo)) { video_info = (VIDEOINFOHEADER *) mediatype->pbFormat; /* ffmpegcolorspace handles RGB24 in BIG_ENDIAN */ mediacaps = gst_caps_new_simple ("video/x-raw-rgb", "bpp", G_TYPE_INT, 24, "depth", G_TYPE_INT, 24, "width", G_TYPE_INT, video_info->bmiHeader.biWidth, "height", G_TYPE_INT, video_info->bmiHeader.biHeight, "framerate", GST_TYPE_FRACTION, (int) (10000000 / video_info->AvgTimePerFrame), 1, "endianness", G_TYPE_INT, G_BIG_ENDIAN, "red_mask", G_TYPE_INT, 255, "green_mask", G_TYPE_INT, 65280, "blue_mask", G_TYPE_INT, 16711680, NULL); if (mediacaps) { vdec->mediatypes = g_list_append (vdec->mediatypes, mediatype); gst_caps_append (vdec->srccaps, mediacaps); } else { DeleteMediaType (mediatype); } } else { DeleteMediaType (mediatype); } } } } if (vdec->srccaps) caps = gst_caps_ref (vdec->srccaps); beach: gst_object_unref (vdec); return caps; }