static void gst_gio_base_sink_init (GstGioBaseSink * sink) { gst_base_sink_set_sync (GST_BASE_SINK (sink), FALSE); sink->cancel = g_cancellable_new (); }
bool ofGstUtils::setPipelineWithSink(string pipeline){ bHavePixelsChanged = false; bIsCustomWithSink = true; gstData.loop = g_main_loop_new (NULL, FALSE); gchar* pipeline_string = g_strdup((pipeline).c_str()); GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string); if(error!=NULL){ ofLog(OF_LOG_ERROR,"couldnt create pipeline: " + string(error->message)); return false; } gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); return startPipeline(); }
/* initialize the new element * instantiate pads and add them to element * set functions * initialize structure */ static void gst_dvbvideosink_init (GstDVBVideoSink *self, GstDVBVideoSinkClass *gclass) { self->must_send_header = TRUE; self->h264_nal_len_size = 0; self->pesheader_buffer = NULL; self->codec_data = NULL; self->codec_type = CT_H264; #ifdef PACK_UNPACKED_XVID_DIVX5_BITSTREAM self->must_pack_bitstream = FALSE; self->num_non_keyframes = 0; self->prev_frame = NULL; #endif self->paused = self->playing = self->unlocking = self->flushing = FALSE; self->pts_written = FALSE; self->lastpts = 0; self->timestamp_offset = 0; self->queue = NULL; self->fd = -1; self->unlockfd[0] = self->unlockfd[1] = -1; self->saved_fallback_framerate[0] = 0; self->rate = 1.0; gst_base_sink_set_sync(GST_BASE_SINK(self), FALSE); gst_base_sink_set_async_enabled(GST_BASE_SINK(self), TRUE); }
/** * Initialize the new element instance * * We instantiate our pad and add it to the element * We set the pad calback functions, and initialize instance data for our structure. * \todo shouldn't this code call the base class init also? * * \param me pointer to the new instance * \param gclass pointer to the class definition */ static void gst_ccnxsink_init (Gstccnxsink * me, /*@unused@ */ GstccnxsinkClass * gclass) { GST_DEBUG ("CCNxSink: instance init"); gst_base_sink_set_sync (GST_BASE_SINK (me), FALSE); me->silent = FALSE; me->uri = g_strdup (CCNX_DEFAULT_URI); me->name = NULL; me->keylocator = NULL; me->keystore = NULL; me->ts = GST_CLOCK_TIME_NONE; me->temp = NULL; me->partial = NULL; me->lastPublish = NULL; me->signed_info = NULL; me->keylocator = NULL; me->keystore = NULL; memcpy (&(me->sp), &CCNX_DEFAULT_SIGNING_PARAMS, sizeof (CCNX_DEFAULT_SIGNING_PARAMS)); me->expire = CCNX_DEFAULT_EXPIRATION; me->segment = 0; me->fifo_head = 0; me->fifo_tail = 0; me->buf = gst_buffer_new_and_alloc (CCN_FIFO_BLOCK_SIZE); me->obuf = NULL; }
static void gst_shout2send_init (GstShout2send * shout2send) { gst_base_sink_set_sync (GST_BASE_SINK (shout2send), FALSE); shout2send->timer = gst_poll_new_timer (); shout2send->ip = g_strdup (DEFAULT_IP); shout2send->port = DEFAULT_PORT; shout2send->password = g_strdup (DEFAULT_PASSWORD); shout2send->username = g_strdup (DEFAULT_USERNAME); shout2send->streamname = g_strdup (DEFAULT_STREAMNAME); shout2send->description = g_strdup (DEFAULT_DESCRIPTION); shout2send->genre = g_strdup (DEFAULT_GENRE); shout2send->mount = g_strdup (DEFAULT_MOUNT); shout2send->url = g_strdup (DEFAULT_URL); shout2send->protocol = DEFAULT_PROTOCOL; shout2send->ispublic = DEFAULT_PUBLIC; shout2send->tags = gst_tag_list_new_empty (); shout2send->conn = NULL; shout2send->audio_format = SHOUT_FORMAT_VORBIS; shout2send->connected = FALSE; shout2send->songmetadata = NULL; shout2send->songartist = NULL; shout2send->songtitle = NULL; }
static void gst_aml_asink_init (GstAmlAsink * amlasink) { GstAudioSink *bsink; bsink = GST_AUDIO_SINK (amlasink); gst_base_sink_set_sync (GST_BASE_SINK (amlasink), FALSE); gst_base_sink_set_async_enabled (GST_BASE_SINK(amlasink), FALSE); }
static void gst_analyzer_sink_init (GstAnalyzerSink * analyzersink) { analyzersink->dump = DEFAULT_DUMP; analyzersink->num_buffers = DEFAULT_NUM_BUFFERS; analyzersink->frame_num = 0; analyzersink->location = NULL; analyzersink->codec_info = NULL; gst_base_sink_set_sync (GST_BASE_SINK (analyzersink), DEFAULT_SYNC); }
static void gst_gio_base_sink_init (GstGioBaseSink * sink, GstGioBaseSinkClass * gclass) { gst_pad_set_query_function (GST_BASE_SINK_PAD (sink), GST_DEBUG_FUNCPTR (gst_gio_base_sink_query)); gst_base_sink_set_sync (GST_BASE_SINK (sink), FALSE); sink->cancel = g_cancellable_new (); }
static void gst_fd_sink_init (GstFdSink * fdsink) { fdsink->fd = 1; fdsink->uri = g_strdup_printf ("fd://%d", fdsink->fd); fdsink->bytes_written = 0; fdsink->current_pos = 0; gst_base_sink_set_sync (GST_BASE_SINK (fdsink), FALSE); }
static void gst_ladspa_sink_type_init (GstLADSPASink * ladspa, LADSPA_Descriptor * desc) { GstLADSPASinkClass *ladspa_class = GST_LADSPA_SINK_GET_CLASS (ladspa); GstBaseSink *base = GST_BASE_SINK (ladspa); gst_ladspa_init (&ladspa->ladspa, &ladspa_class->ladspa); ladspa->num_buffers = GST_LADSPA_SINK_DEFAULT_NUM_BUFFERS; gst_base_sink_set_sync (base, GST_LADSPA_SINK_DEFAULT_SYNC); }
static void gst_fake_sink_init (GstFakeSink * fakesink) { fakesink->silent = DEFAULT_SILENT; fakesink->dump = DEFAULT_DUMP; fakesink->last_message = g_strdup (DEFAULT_LAST_MESSAGE); fakesink->state_error = DEFAULT_STATE_ERROR; fakesink->signal_handoffs = DEFAULT_SIGNAL_HANDOFFS; fakesink->num_buffers = DEFAULT_NUM_BUFFERS; gst_base_sink_set_sync (GST_BASE_SINK (fakesink), DEFAULT_SYNC); }
static void gst_multi_file_sink_init (GstMultiFileSink * multifilesink, GstMultiFileSinkClass * g_class) { multifilesink->filename = g_strdup (DEFAULT_LOCATION); multifilesink->index = DEFAULT_INDEX; multifilesink->post_messages = DEFAULT_POST_MESSAGES; gst_base_sink_set_sync (GST_BASE_SINK (multifilesink), FALSE); multifilesink->next_segment = GST_CLOCK_TIME_NONE; }
static void gst_file_sink_init (GstFileSink * filesink) { filesink->filename = NULL; filesink->file = NULL; filesink->current_pos = 0; filesink->buffer_mode = DEFAULT_BUFFER_MODE; filesink->buffer_size = DEFAULT_BUFFER_SIZE; filesink->buffer = NULL; filesink->append = FALSE; gst_base_sink_set_sync (GST_BASE_SINK (filesink), FALSE); }
static void gst_fd_sink_init (GstFdSink * fdsink, GstFdSinkClass * klass) { GstPad *pad; pad = GST_BASE_SINK_PAD (fdsink); gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_fd_sink_query)); fdsink->fd = 1; fdsink->uri = g_strdup_printf ("fd://%d", fdsink->fd); fdsink->bytes_written = 0; fdsink->current_pos = 0; gst_base_sink_set_sync (GST_BASE_SINK (fdsink), FALSE); }
bool ofGstUtils::setPipelineWithSink(GstElement * pipeline, GstElement * sink, bool isStream_){ gstPipeline = pipeline; gstSink = sink; isStream = isStream_; if(gstSink){ gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); } if(gstSink && string(gst_plugin_feature_get_name( GST_PLUGIN_FEATURE(gst_element_get_factory(gstSink))))=="appsink"){ isAppSink = true; }else{ isAppSink = false; } return startPipeline(); }
bool ofGstUtils::loadMovie(string name){ bpp = 24; bLoaded = false; bPaused = true; speed = 1.0f; bHavePixelsChanged = false; if( name.find( "://",0 ) == string::npos){ name = "file://"+ofToDataPath(name,true); bIsStream = false; }else{ bIsStream = true; } ofLog(OF_LOG_VERBOSE,"loading "+name); gstData.loop = g_main_loop_new (NULL, FALSE); gstPipeline = gst_element_factory_make("playbin","player"); g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL); // create the oF appsink for video rgb without sync to clock gstSink = gst_element_factory_make("appsink", NULL); GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, 24, //"depth", G_TYPE_INT, 24, /*"endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0xff0000, "green_mask",G_TYPE_INT,0x00ff00, "blue_mask",G_TYPE_INT,0x0000ff,*/ NULL); gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps); gst_caps_unref(caps); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), false); g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL); GstElement *audioSink = gst_element_factory_make("gconfaudiosink", NULL); g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL); return startPipeline();; }
static void gst_multi_file_sink_init (GstMultiFileSink * multifilesink) { multifilesink->filename = g_strdup (DEFAULT_LOCATION); multifilesink->index = DEFAULT_INDEX; multifilesink->post_messages = DEFAULT_POST_MESSAGES; multifilesink->max_files = DEFAULT_MAX_FILES; multifilesink->max_file_size = DEFAULT_MAX_FILE_SIZE; multifilesink->max_file_duration = DEFAULT_MAX_FILE_DURATION; multifilesink->aggregate_gops = DEFAULT_AGGREGATE_GOPS; multifilesink->gop_adapter = NULL; gst_base_sink_set_sync (GST_BASE_SINK (multifilesink), FALSE); multifilesink->next_segment = GST_CLOCK_TIME_NONE; multifilesink->force_key_unit_count = -1; }
static void gst_file_sink_init (GstFileSink * filesink, GstFileSinkClass * g_class) { GstPad *pad; pad = GST_BASE_SINK_PAD (filesink); gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_file_sink_query)); filesink->filename = NULL; filesink->file = NULL; filesink->buffer_mode = DEFAULT_BUFFER_MODE; filesink->buffer_size = DEFAULT_BUFFER_SIZE; filesink->buffer = NULL; filesink->append = FALSE; gst_base_sink_set_sync (GST_BASE_SINK (filesink), FALSE); }
bool ofGstUtils::setPipelineWithSink(string pipeline){ bHavePixelsChanged = false; bIsCustomWithSink = true; gstData.loop = g_main_loop_new (NULL, FALSE); gchar* pipeline_string = g_strdup((pipeline).c_str()); GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); return startPipeline(); }
void CameraController::start_capture() { if(m_impl) { uint32_t w = m_impl->m_capture_mode.width, h = m_impl->m_capture_mode.height; uint32_t fps_nom = m_impl->m_capture_mode.framerate_nom, fps_denom = m_impl->m_capture_mode.framerate_denom; const char* app_sink_name = "kinski_appsink"; std::string pipeline_str = "v4l2src device=/dev/video%d ! " "image/jpeg, width=%d, height=%d, framerate=%d/%d !" // "video/x-raw, width=%d, height=%d !" "decodebin !" "videoconvert !" "appsink name=%s enable-last-sample=0 caps=\"video/x-raw,format=RGB\""; pipeline_str = crocore::format(pipeline_str, m_impl->m_device_id, w, h, fps_nom, fps_denom, app_sink_name); GError *error = nullptr; // construct a pipeline GstElement *pipeline = gst_parse_launch(pipeline_str.c_str(), &error); if(error) { LOG_ERROR << "could not construct pipeline: " << error->message; g_error_free(error); } else { GstElement* sink = gst_bin_get_by_name(GST_BIN(pipeline), app_sink_name); gst_base_sink_set_sync(GST_BASE_SINK(sink), true); m_impl->m_gst_util.use_pipeline(pipeline, sink); m_impl->m_gst_util.set_pipeline_state(GST_STATE_READY); #if !defined(KINSKI_ARM) m_impl->m_buffer_front = gl::Buffer(GL_PIXEL_UNPACK_BUFFER, GL_STREAM_COPY); m_impl->m_buffer_back = gl::Buffer(GL_PIXEL_UNPACK_BUFFER, GL_STREAM_COPY); #endif m_impl->m_gst_util.set_pipeline_state(GST_STATE_PLAYING); } } }
bool ofGstUtils::setPipeline(string pipeline, int bpp, bool isStream){ this->bpp = bpp; bHavePixelsChanged = false; bIsStream = isStream; gstData.loop = g_main_loop_new (NULL, FALSE); gchar* pipeline_string = g_strdup((pipeline + " ! appsink name=sink ").c_str()); // caps=video/x-raw-rgb GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); return startPipeline(); }
static void empathy_video_widget_set_property (GObject *object, guint property_id, const GValue *value, GParamSpec *pspec) { EmpathyVideoWidgetPriv *priv = GET_PRIV (object); gboolean boolval; switch (property_id) { case PROP_SYNC: boolval = g_value_get_boolean (value); gst_base_sink_set_sync (GST_BASE_SINK (priv->sink), boolval); break; case PROP_ASYNC: boolval = g_value_get_boolean (value); gst_base_sink_set_async_enabled (GST_BASE_SINK (priv->sink), boolval); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); } }
bool ofGstUtils::setPipeline(string pipeline, int bpp, bool isStream, int w, int h){ this->bpp = bpp; bHavePixelsChanged = false; bIsStream = isStream; gstData.loop = g_main_loop_new (NULL, FALSE); if(w!=-1 && h!=-1){ width=w; height=h; } string caps; if(bpp==8) caps="video/x-raw-gray, depth=8, bpp=8"; else if(bpp==32) caps="video/x-raw-rgb, depth=32, bpp=32"; else caps="video/x-raw-rgb, depth=24, bpp=24"; gchar* pipeline_string = g_strdup((pipeline + " ! appsink name=sink caps=\"" + caps + "\"").c_str()); // caps=video/x-raw-rgb GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string); if(error!=NULL){ ofLog(OF_LOG_ERROR,"couldnt create pipeline: " + string(error->message)); return false; } gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); return startPipeline(); }
// ---------------------------------------------------------------------------- // Handle the "pad-added" message void GStreamerImportFileHandle::OnPadAdded(GstPad *pad) { // Retrieve the stream caps...skip stream if unavailable GstCaps *caps = gst_pad_get_current_caps(pad); if (!caps) { WARN(mPipeline, ("OnPadAdded: unable to retrieve stream caps")); return; } // Get the caps structure...no need to release GstStructure *str = gst_caps_get_structure(caps, 0); if (!str) { WARN(mPipeline, ("OnPadAdded: unable to retrieve caps structure")); gst_caps_unref(caps); return; } // Only accept audio streams...no need to release const gchar *name = gst_structure_get_name(str); if (!g_strrstr(name, "audio")) { WARN(mPipeline, ("OnPadAdded: bypassing '%s' stream", name)); gst_caps_unref(caps); return; } // Allocate a new stream context GStreamContext *c = g_new0(GStreamContext, 1); if (!c) { WARN(mPipeline, ("OnPadAdded: unable to allocate stream context")); gst_caps_unref(caps); return; } // Set initial state c->mUse = true; // Always add it to the context list to keep the number of contexts // in sync with the number of streams g_mutex_lock(&mStreamsLock); g_ptr_array_add(mStreams, c); g_mutex_unlock(&mStreamsLock); // Need pointer to context during pad removal (pad-remove signal) SETCTX(pad, c); // Save the stream's start time and duration gst_pad_query_position(pad, GST_FORMAT_TIME, &c->mPosition); gst_pad_query_duration(pad, GST_FORMAT_TIME, &c->mDuration); // Retrieve the number of channels and validate gint channels = -1; gst_structure_get_int(str, "channels", &channels); if (channels <= 0) { WARN(mPipeline, ("OnPadAdded: channel count is invalid %d", channels)); gst_caps_unref(caps); return; } c->mNumChannels = channels; // Retrieve the sample rate and validate gint rate = -1; gst_structure_get_int(str, "rate", &rate); if (rate <= 0) { WARN(mPipeline, ("OnPadAdded: sample rate is invalid %d", rate)); gst_caps_unref(caps); return; } c->mSampleRate = (double) rate; c->mType = g_strdup(name); if (c->mType == NULL) { WARN(mPipeline, ("OnPadAdded: unable to allocate audio type")); gst_caps_unref(caps); return; } // Done with capabilities gst_caps_unref(caps); // Create audioconvert element c->mConv = gst_element_factory_make("audioconvert", NULL); if (!c->mConv) { WARN(mPipeline, ("OnPadAdded: failed to create audioconvert element")); return; } // Create appsink element c->mSink = gst_element_factory_make("appsink", NULL); if (!c->mSink) { WARN(mPipeline, ("OnPadAdded: failed to create appsink element")); return; } SETCTX(c->mSink, c); // Set the appsink callbacks and add the context pointer gst_app_sink_set_callbacks(GST_APP_SINK(c->mSink), &AppSinkCallbacks, this, NULL); // Set the capabilities that we desire caps = gst_static_caps_get(&supportedCaps); if (!caps) { WARN(mPipeline, ("OnPadAdded: failed to create static caps")); return; } gst_app_sink_set_caps(GST_APP_SINK(c->mSink), caps); gst_caps_unref(caps); // Do not sync to the clock...process as quickly as possible gst_base_sink_set_sync(GST_BASE_SINK(c->mSink), FALSE); // Don't drop buffers...allow queue to build unfettered gst_app_sink_set_drop(GST_APP_SINK(c->mSink), FALSE); // Add both elements to the pipeline gst_bin_add_many(GST_BIN(mPipeline), c->mConv, c->mSink, NULL); // Link them together if (!gst_element_link(c->mConv, c->mSink)) { WARN(mPipeline, ("OnPadAdded: failed to link autioconvert and appsink")); return; } // Link the audiconvert sink pad to the src pad GstPadLinkReturn ret = GST_PAD_LINK_OK; GstPad *convsink = gst_element_get_static_pad(c->mConv, "sink"); if (convsink) { ret = gst_pad_link(pad, convsink); gst_object_unref(convsink); } if (!convsink || ret != GST_PAD_LINK_OK) { WARN(mPipeline, ("OnPadAdded: failed to link uridecodebin to audioconvert - %d", ret)); return; } // Synchronize audioconvert state with parent if (!gst_element_sync_state_with_parent(c->mConv)) { WARN(mPipeline, ("OnPadAdded: unable to sync audioconvert state")); return; } // Synchronize appsink state with parent if (!gst_element_sync_state_with_parent(c->mSink)) { WARN(mPipeline, ("OnPadAdded: unable to sync appaink state")); return; } return; }
static void gst_checksum_sink_init (GstChecksumSink * checksumsink) { gst_base_sink_set_sync (GST_BASE_SINK (checksumsink), FALSE); checksumsink->hash = G_CHECKSUM_SHA1; }
static void gst_vlc_video_sink_init( GstVlcVideoSink *p_vlc_video_sink ) { gst_base_sink_set_sync( GST_BASE_SINK( p_vlc_video_sink), FALSE ); }
bool ofGstVideoPlayer::loadMovie(string name){ close(); if( name.find( "file://",0 ) != string::npos){ bIsStream = false; }else if( name.find( "://",0 ) == string::npos){ name = "file://"+ofToDataPath(name,true); bIsStream = false; }else{ bIsStream = true; } ofLog(OF_LOG_VERBOSE,"loading "+name); ofGstUtils::startGstMainLoop(); GstElement * gstPipeline = gst_element_factory_make("playbin2","player"); g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL); // create the oF appsink for video rgb without sync to clock GstElement * gstSink = gst_element_factory_make("appsink", "app_sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink), 8); gst_app_sink_set_drop (GST_APP_SINK(gstSink),true); gst_base_sink_set_max_lateness (GST_BASE_SINK(gstSink), -1); int bpp; string mime; switch(internalPixelFormat){ case OF_PIXELS_MONO: mime = "video/x-raw-gray"; bpp = 8; break; case OF_PIXELS_RGB: mime = "video/x-raw-rgb"; bpp = 24; break; case OF_PIXELS_RGBA: case OF_PIXELS_BGRA: mime = "video/x-raw-rgb"; bpp = 32; break; default: mime = "video/x-raw-rgb"; bpp=24; break; } GstCaps *caps = gst_caps_new_simple(mime.c_str(), "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, 24, "endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0xff0000, "green_mask",G_TYPE_INT,0x00ff00, "blue_mask",G_TYPE_INT,0x0000ff, "alpha_mask",G_TYPE_INT,0x000000ff, NULL); gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps); gst_caps_unref(caps); if(threadAppSink){ GstElement * appQueue = gst_element_factory_make("queue","appsink_queue"); g_object_set(G_OBJECT(appQueue), "leaky", 0, "silent", 1, (void*)NULL); GstElement* appBin = gst_bin_new("app_bin"); gst_bin_add(GST_BIN(appBin), appQueue); GstPad* appQueuePad = gst_element_get_static_pad(appQueue, "sink"); GstPad* ghostPad = gst_ghost_pad_new("app_bin_sink", appQueuePad); gst_object_unref(appQueuePad); gst_element_add_pad(appBin, ghostPad); gst_bin_add_many(GST_BIN(appBin), gstSink, NULL); gst_element_link_many(appQueue, gstSink, NULL); g_object_set (G_OBJECT(gstPipeline),"video-sink",appBin,(void*)NULL); }else{ g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL); } #ifdef TARGET_WIN32 GstElement *audioSink = gst_element_factory_make("directsoundsink", NULL); g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL); #endif videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream); if(!bIsStream) return allocate(bpp); else return true; }
bool ofGstVideoPlayer::loadMovie(string name){ close(); if( name.find( "://",0 ) == string::npos){ name = "file://"+ofToDataPath(name,true); bIsStream = false; }else{ bIsStream = true; } ofLog(OF_LOG_VERBOSE,"loading "+name); //GMainLoop* loop = g_main_loop_new (NULL, FALSE); GstElement * gstPipeline = gst_element_factory_make("playbin2","player"); g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL); // create the oF appsink for video rgb without sync to clock GstElement * gstSink = gst_element_factory_make("appsink", "sink"); int bpp; string mime; switch(internalPixelFormat){ case OF_PIXELS_MONO: mime = "video/x-raw-gray"; bpp = 8; break; case OF_PIXELS_RGB: mime = "video/x-raw-rgb"; bpp = 24; break; case OF_PIXELS_RGBA: case OF_PIXELS_BGRA: mime = "video/x-raw-rgb"; bpp = 32; break; default: mime = "video/x-raw-rgb"; bpp=24; break; } GstCaps *caps = gst_caps_new_simple(mime.c_str(), "bpp", G_TYPE_INT, bpp, //"depth", G_TYPE_INT, 24, /*"endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0xff0000, "green_mask",G_TYPE_INT,0x00ff00, "blue_mask",G_TYPE_INT,0x0000ff,*/ NULL); gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps); gst_caps_unref(caps); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), false); gst_app_sink_set_drop (GST_APP_SINK(gstSink),true); g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL); GstElement *audioSink = gst_element_factory_make("gconfaudiosink", NULL); g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL); videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream); if(!bIsStream) return allocate(); else return true; }
bool GSCam::init_stream() { if(!gst_is_initialized()) { // Initialize gstreamer pipeline ROS_DEBUG_STREAM( "Initializing gstreamer..." ); gst_init(0,0); } ROS_DEBUG_STREAM( "Gstreamer Version: " << gst_version_string() ); GError *error = 0; // Assignment to zero is a gst requirement pipeline_ = gst_parse_launch(gsconfig_.c_str(), &error); if (pipeline_ == NULL) { ROS_FATAL_STREAM( error->message ); return false; } // Create RGB sink sink_ = gst_element_factory_make("appsink",NULL); GstCaps * caps = image_encoding_ == sensor_msgs::image_encodings::RGB8 ? gst_caps_new_simple("video/x-raw-rgb", NULL) : gst_caps_new_simple("video/x-raw-gray", NULL); gst_app_sink_set_caps(GST_APP_SINK(sink_), caps); gst_caps_unref(caps); // Set whether the sink should sync // Sometimes setting this to true can cause a large number of frames to be // dropped gst_base_sink_set_sync( GST_BASE_SINK(sink_), (sync_sink_) ? TRUE : FALSE); if(GST_IS_PIPELINE(pipeline_)) { GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline_), GST_PAD_SRC); g_assert(outpad); GstElement *outelement = gst_pad_get_parent_element(outpad); g_assert(outelement); gst_object_unref(outpad); if(!gst_bin_add(GST_BIN(pipeline_), sink_)) { ROS_FATAL("gst_bin_add() failed"); gst_object_unref(outelement); gst_object_unref(pipeline_); return false; } if(!gst_element_link(outelement, sink_)) { ROS_FATAL("GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement)); gst_object_unref(outelement); gst_object_unref(pipeline_); return false; } gst_object_unref(outelement); } else { GstElement* launchpipe = pipeline_; pipeline_ = gst_pipeline_new(NULL); g_assert(pipeline_); gst_object_unparent(GST_OBJECT(launchpipe)); gst_bin_add_many(GST_BIN(pipeline_), launchpipe, sink_, NULL); if(!gst_element_link(launchpipe, sink_)) { ROS_FATAL("GStreamer: cannot link launchpipe -> sink"); gst_object_unref(pipeline_); return false; } } gst_element_set_state(pipeline_, GST_STATE_PAUSED); if (gst_element_get_state(pipeline_, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) { ROS_FATAL("Failed to PAUSE stream, check your gstreamer configuration."); return false; } else { ROS_DEBUG_STREAM("Stream is PAUSED."); } // Create ROS camera interface camera_pub_ = image_transport_.advertiseCamera("camera/image_raw", 1); return true; }
bool ofGstUtils::initGrabber(int w, int h, int framerate){ bpp = 3; if(!camData.bInited) get_video_devices(camData); if(camData.webcam_devices.size()==0){ ofLog(OF_LOG_ERROR,"ofGstUtils: no devices found exiting without initializing"); return false; } ofGstVideoFormat & format = selectFormat(w, h, framerate); ofLog(OF_LOG_NOTICE,"ofGstUtils: selected format: " + ofToString(format.width) + "x" + ofToString(format.height) + " " + format.mimetype + " framerate: " + ofToString(format.choosen_framerate.numerator) + "/" + ofToString(format.choosen_framerate.denominator)); bIsCamera = true; bHavePixelsChanged = false; width = w; height = h; gstData.loop = g_main_loop_new (NULL, FALSE); const char * decodebin = ""; if(format.mimetype != "video/x-raw-yuv" && format.mimetype != "video/x-raw-rgb") decodebin = "decodebin !"; const char * scale = ""; if( format.mimetype != "video/x-raw-rgb" ) scale = "ffmpegcolorspace !"; if( w!=format.width || h!=format.height ) scale = "ffvideoscale method=2 !"; string format_str_pipeline = string("%s name=video_source device=%s ! ") + "%s,width=%d,height=%d,framerate=%d/%d ! " + "%s %s " + "video/x-raw-rgb, width=%d, height=%d, depth=24 ! appsink name=sink caps=video/x-raw-rgb"; gchar* pipeline_string =g_strdup_printf ( format_str_pipeline.c_str(), camData.webcam_devices[deviceID].gstreamer_src.c_str(), camData.webcam_devices[deviceID].video_device.c_str(), format.mimetype.c_str(), format.width, format.height, format.choosen_framerate.numerator, format.choosen_framerate.denominator, decodebin, scale, w,h); ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string); GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); if(startPipeline()){ play(); return true; }else{ return false; } }