QTime Player::position() { if (pipeline) { gint64 value=0; #ifdef GST_API_VERSION_1 if(gst_element_query_position(pipeline, GST_FORMAT_TIME, &value)) { #else GstFormat fmt = GST_FORMAT_TIME; if(gst_element_query_position(pipeline, &fmt, &value)) { #endif m_position = static_cast<uint>( ( value / GST_MSECOND ) ); return QTime(0,0).addMSecs( m_position ); // nanosec -> msec } return QTime(0,0).addMSecs( m_position ); // nanosec -> msec } return QTime(0,0); } QTime Player::length() { gint64 value=0; if ( m_length == 0 && pipeline){ #ifdef GST_API_VERSION_1 if(gst_element_query_duration(pipeline, GST_FORMAT_TIME, &value)) { #else GstFormat fmt = GST_FORMAT_TIME; if(gst_element_query_duration(pipeline, &fmt, &value)) { #endif m_length = static_cast<uint>( ( value / GST_MSECOND )); } qDebug() << Q_FUNC_INFO <<": Can not get duration"; } return QTime(0,0).addMSecs( m_length ); // nanosec -> msec } double Player::volume() { gdouble vol = 0; GstElement *volume = gst_bin_get_by_name(GST_BIN(pipeline), "volume"); g_object_get (G_OBJECT(volume), "volume", &vol, NULL); gst_object_unref(volume); return vol; } void Player::setVolume(double v) { gdouble vol = 1.00 * v; //gdouble vol = 0.01 * v; GstElement *volume = gst_bin_get_by_name(GST_BIN(pipeline), "volume"); g_object_set (G_OBJECT(volume), "volume", vol, NULL); gst_object_unref(volume); }
qint64 SourceObject::GetTotalTime () const { auto format = GST_FORMAT_TIME; gint64 duration = 0; #if GST_VERSION_MAJOR >= 1 if (gst_element_query_duration (GST_ELEMENT (Dec_), format, &duration)) #else if (gst_element_query_duration (GST_ELEMENT (Dec_), &format, &duration)) #endif return duration / GST_MSECOND; return -1; }
qint64 SourceObject::GetRemainingTime () const { auto format = GST_FORMAT_TIME; gint64 duration = 0; #if GST_VERSION_MAJOR >= 1 if (!gst_element_query_duration (GST_ELEMENT (Dec_), format, &duration)) #else if (!gst_element_query_duration (GST_ELEMENT (Dec_), &format, &duration)) #endif return -1; return (duration - LastCurrentTime_) / GST_MSECOND; }
ulong ofGstUtils::getDurationNanos(){ GstFormat format = GST_FORMAT_TIME; #if GST_VERSION_MAJOR==0 if(!gst_element_query_duration(getPipeline(),&format,&durationNanos)) gLogManager.log("GStreamer: cannot query time duration",ELL_WARNING); #else if(!gst_element_query_duration(getPipeline(),format,&durationNanos)) gLogManager.log("GStreamer: cannot query time duration"); #endif return durationNanos; }
int64_t ofGstUtils::getDurationNanos(){ GstFormat format = GST_FORMAT_TIME; #if GST_VERSION_MAJOR==0 if(!gst_element_query_duration(getPipeline(),&format,&durationNanos)) ofLogWarning("ofGstUtils") << "getDurationNanos(): couldn't query time duration"; #else if(!gst_element_query_duration(getPipeline(),format,&durationNanos)) ofLogWarning("ofGstUtils") << "getDurationNanos(): couldn't query time duration"; #endif return durationNanos; }
void ghb_live_preview_progress(signal_user_data_t *ud) { #if defined(_ENABLE_GST) GstFormat fmt = GST_FORMAT_TIME; gint64 len = -1, pos = -1; if (!ud->preview->live_enabled) return; if (ud->preview->state != PREVIEW_STATE_LIVE || ud->preview->seek_lock) return; ud->preview->progress_lock = TRUE; #if GST_CHECK_VERSION(1, 0, 0) if (gst_element_query_duration(ud->preview->play, fmt, &len)) #else if (gst_element_query_duration(ud->preview->play, &fmt, &len)) #endif { if (len != -1 && fmt == GST_FORMAT_TIME) { ud->preview->len = len / GST_MSECOND; } } #if GST_CHECK_VERSION(1, 0, 0) if (gst_element_query_position(ud->preview->play, fmt, &pos)) #else if (gst_element_query_position(ud->preview->play, &fmt, &pos)) #endif { if (pos != -1 && fmt == GST_FORMAT_TIME) { ud->preview->pos = pos / GST_MSECOND; } } if (ud->preview->len > 0) { GtkRange *progress; gdouble percent; percent = (gdouble)ud->preview->pos * 100 / ud->preview->len; progress = GTK_RANGE(GHB_WIDGET(ud->builder, "live_preview_progress")); gtk_range_set_value(progress, percent); } g_idle_add((GSourceFunc)unlock_progress_cb, ud); #endif }
/* If we have pipeline and it is running, query the current position and clip duration and inform * the application */ static gboolean refresh_ui (CustomData *data) { GstFormat fmt = GST_FORMAT_TIME; gint64 current = -1; gint64 position; /* We do not want to update anything unless we have a working pipeline in the PAUSED or PLAYING state */ if (!data || !data->pipeline || data->state < GST_STATE_PAUSED) return TRUE; /* If we didn't know it yet, query the stream duration */ if (!GST_CLOCK_TIME_IS_VALID (data->duration)) { if (!gst_element_query_duration (data->pipeline, &fmt, &data->duration)) { GST_WARNING ("Could not query current duration (normal for still pictures)"); data->duration = 0; } } if (!gst_element_query_position (data->pipeline, &fmt, &position)) { GST_WARNING ("Could not query current position (normal for still pictures)"); position = 0; } /* Java expects these values in milliseconds, and GStreamer provides nanoseconds */ set_current_ui_position (position / GST_MSECOND, data->duration / GST_MSECOND, data); return TRUE; }
void gst_nle_source_query_progress (GstNleSource * nlesrc, gfloat * progress) { gint64 position = 0, duration = 0; gfloat tmp = 0.0; guint num_items = g_list_length (nlesrc->queue); *progress = (gfloat) nlesrc->index / (gfloat) num_items; if (nlesrc->source) { GstFormat format = GST_FORMAT_BYTES; if (nlesrc->cached_duration > 0) duration = nlesrc->cached_duration; else { gst_element_query_duration (nlesrc->source, &format, &duration); nlesrc->cached_duration = duration; } if (duration > 0) { gst_element_query_position (nlesrc->source, &format, &position); tmp = (gfloat) position / (gfloat) duration; tmp *= 1.0 / (gfloat) num_items; } } *progress += tmp; GST_LOG ("progress(%d): %" G_GINT64_FORMAT "/%" G_GINT64_FORMAT "(%g%%)", nlesrc->index, position, duration, *progress * 100); }
static gboolean cb_print_position (GstElement *pipeline) { gint64 pos, len; GstFormat c = GST_FORMAT_TIME; if (gst_element_query_position (pipeline,&c, &pos) && gst_element_query_duration (pipeline, &c, &len)) { // g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", // GST_TIME_ARGS (pos), GST_TIME_ARGS (len)); char buff[100]; snprintf(buff, sizeof(buff), "%" GST_TIME_FORMAT "", GST_TIME_ARGS (pos)); std::string buffAsStdStr = buff; audio_info = buffAsStdStr; //audio_current_second = GST_TIME_ARGS (pos); //audio_max_seconds = GST_TIME_ARGS (len); //std::cout<<audio_info<<std::endl; } /* call me again */ return TRUE; }
qint64 GstEnginePipeline::length() const { GstFormat fmt = GST_FORMAT_TIME; gint64 value = 0; gst_element_query_duration(pipeline_, &fmt, &value); return value; }
/* This function is called periodically to refresh the GUI */ static gboolean refresh_ui (CustomData *data) { GstFormat fmt = GST_FORMAT_TIME; gint64 current = -1; /* We do not want to update anything unless we are in the PAUSED or PLAYING states */ if (data->state < GST_STATE_PAUSED) return TRUE; /* If we didn't know it yet, query the stream duration */ if (!GST_CLOCK_TIME_IS_VALID (data->duration)) { if (!gst_element_query_duration (data->playbin, fmt, &data->duration)) { g_printerr ("Could not query current duration.\n"); } else { /* Set the range of the slider to the clip duration, in SECONDS */ gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND); } } if (gst_element_query_position (data->playbin, fmt, ¤t)) { /* Block the "value-changed" signal, so the slider_cb function is not called * (which would trigger a seek the user has not requested) */ g_signal_handler_block (data->slider, data->slider_update_signal_id); /* Set the position of the slider to the current pipeline positoin, in SECONDS */ gtk_range_set_value (GTK_RANGE (data->slider), (gdouble)current / GST_SECOND); /* Re-enable the signal */ g_signal_handler_unblock (data->slider, data->slider_update_signal_id); } return TRUE; }
static gboolean update_scale (GstElement * element) { gint64 duration = -1; gint64 position = -1; GstFormat format = GST_FORMAT_TIME; gchar dur_str[32], pos_str[32]; if (gst_element_query_position (element, &format, &position) && position != -1) { g_snprintf (pos_str, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (position)); } else { g_snprintf (pos_str, 32, "-:--:--.---------"); } if (gst_element_query_duration (element, &format, &duration) && duration != -1) { g_snprintf (dur_str, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (duration)); } else { g_snprintf (dur_str, 32, "-:--:--.---------"); } g_print ("%s / %s\n", pos_str, dur_str); return TRUE; }
static void update_current_position_bar (GthMediaViewerPage *self) { GstFormat format; gint64 current_value = 0; format = GST_FORMAT_TIME; if (gst_element_query_position (self->priv->playbin, format, ¤t_value)) { char *s; if (self->priv->duration <= 0) { gst_element_query_duration (self->priv->playbin, format, &self->priv->duration); s = _g_format_duration_for_display (GST_TIME_AS_MSECONDS (self->priv->duration)); gtk_label_set_text (GTK_LABEL (GET_WIDGET ("label_duration")), s); g_free (s); } /* g_print ("==> %" G_GINT64_FORMAT " / %" G_GINT64_FORMAT " (%0.3g)\n" , current_value, self->priv->duration, ((double) current_value / self->priv->duration) * 100.0); */ g_signal_handlers_block_by_func(GET_WIDGET ("position_adjustment"), position_value_changed_cb, self); gtk_adjustment_set_value (GTK_ADJUSTMENT (GET_WIDGET ("position_adjustment")), (self->priv->duration > 0) ? ((double) current_value / self->priv->duration) * 100.0 : 0.0); g_signal_handlers_unblock_by_func(GET_WIDGET ("position_adjustment"), position_value_changed_cb, self); s = _g_format_duration_for_display (GST_TIME_AS_MSECONDS (current_value)); gtk_label_set_text (GTK_LABEL (GET_WIDGET ("label_position")), s); g_free (s); } }
static gboolean play_timeout (gpointer user_data) { GstPlay *play = user_data; gint64 pos = -1, dur = -1; if (play->buffering) return TRUE; gst_element_query_position (play->playbin, GST_FORMAT_TIME, &pos); gst_element_query_duration (play->playbin, GST_FORMAT_TIME, &dur); if (pos >= 0 && dur > 0) { gchar dstr[32], pstr[32]; /* FIXME: pretty print in nicer format */ g_snprintf (pstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (pos)); pstr[9] = '\0'; g_snprintf (dstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (dur)); dstr[9] = '\0'; g_print ("%s / %s\r", pstr, dstr); } return TRUE; }
qint64 QDeclarativeVideoEditor::getDuration() { GstFormat format = GST_FORMAT_TIME; gst_element_query_duration (GST_ELEMENT (m_pipeline), &format, &m_duration); qDebug() << "Got duration :" << m_duration; return m_duration; }
static gboolean play_timeout (gpointer user_data) { GstPlay *play = user_data; gint64 pos = -1, dur = -1; gchar status[64] = { 0, }; if (play->buffering) return TRUE; gst_element_query_position (play->playbin, GST_FORMAT_TIME, &pos); gst_element_query_duration (play->playbin, GST_FORMAT_TIME, &dur); if (play->desired_state == GST_STATE_PAUSED) g_snprintf (status, sizeof (status), "Paused"); else memset (status, ' ', sizeof (status) - 1); if (pos >= 0 && dur > 0) { gchar dstr[32], pstr[32]; /* FIXME: pretty print in nicer format */ g_snprintf (pstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (pos)); pstr[9] = '\0'; g_snprintf (dstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (dur)); dstr[9] = '\0'; g_print ("%s / %s %s\r", pstr, dstr, status); } return TRUE; }
static void seek_to (AppInfo * info, gdouble percent) { GstSeekFlags seek_flags; gint64 seek_pos, dur = -1; if (!gst_element_query_duration (info->pipe, GST_FORMAT_TIME, &dur) || dur <= 0) { g_printerr ("Could not query duration\n"); return; } seek_pos = gst_gdouble_to_guint64 (gst_guint64_to_gdouble (dur) * percent); g_print ("Seeking to %" GST_TIME_FORMAT ", accurate: %d\n", GST_TIME_ARGS (seek_pos), info->accurate); seek_flags = GST_SEEK_FLAG_FLUSH; if (info->accurate) seek_flags |= GST_SEEK_FLAG_ACCURATE; else seek_flags |= GST_SEEK_FLAG_KEY_UNIT; if (!gst_element_seek_simple (info->pipe, GST_FORMAT_TIME, seek_flags, seek_pos)) { g_printerr ("Seek failed.\n"); return; } }
static BraseroBurnResult brasero_normalize_clock_tick (BraseroJob *job) { gint64 position = 0.0; gint64 duration = 0.0; BraseroNormalizePrivate *priv; priv = BRASERO_NORMALIZE_PRIVATE (job); gst_element_query_duration (priv->pipeline, GST_FORMAT_TIME, &duration); gst_element_query_position (priv->pipeline, GST_FORMAT_TIME, &position); if (duration > 0) { GSList *tracks; gdouble progress; brasero_job_get_tracks (job, &tracks); progress = (gdouble) position / (gdouble) duration; if (tracks) { gdouble num_tracks; num_tracks = g_slist_length (tracks); progress = (gdouble) (num_tracks - 1.0 - (gdouble) g_slist_length (priv->tracks) + progress) / (gdouble) num_tracks; brasero_job_set_progress (job, progress); } } return BRASERO_BURN_OK; }
static void playIt (GtkWidget *widget, GstElement* pipeline) { /* * sidenote: I find it completly stupid that you can detect the end of stream in * bus_call() with GST_MESSAGE_EOS, but cannot access and reset the state of the pipeline * throught the GMainLoop *loop. (If there is a way please, tell me about it) * So in order to detect the end of stream, you've got two options: * _ create a second bus * _ pass a struct to bus_call * _ get the stream length and compare it to the current position.... * I use the third option here, but I'm only skirting the problem to * keep the code simple and readable. * known issue: if you set the speeed too high (eg:*64), it won't detect the end of * the stream correctly, so you'll need to rewind the stream manually by pressing the stop button */ // rewind stream if we're at the end of the stream gint64 streamPosition, streamLength; GstFormat format = GST_FORMAT_TIME; gst_element_query_position (pipeline, &format, &streamPosition); gst_element_query_duration (pipeline, &format, &streamLength); if (streamPosition==streamLength) stopIt(widget,pipeline); // setting the stream to the playing state g_print("Playing Video\n"); gst_element_set_state (pipeline, GST_STATE_PLAYING); }
static gint64 brasero_transcode_get_duration (BraseroTranscode *transcode) { gint64 duration = -1; BraseroTranscodePrivate *priv; priv = BRASERO_TRANSCODE_PRIVATE (transcode); /* This part is specific to MP3s */ if (priv->mp3_size_pipeline) { /* This is the most reliable way to get the duration for mp3 * read them till the end and get the position. */ gst_element_query_position (priv->pipeline, GST_FORMAT_TIME, &duration); } /* This is for any sort of files */ if (duration == -1 || duration == 0) gst_element_query_duration (priv->pipeline, GST_FORMAT_TIME, &duration); BRASERO_JOB_LOG (transcode, "got duration %" GST_TIME_FORMAT, GST_TIME_ARGS (duration)); if (duration == -1 || duration == 0) brasero_job_error (BRASERO_JOB (transcode), g_error_new (BRASERO_BURN_ERROR, BRASERO_BURN_ERROR_GENERAL, "%s", _("Error while getting duration"))); return duration; }
bool PlayerGst::open(QUrl fname, long start, long length) { sync_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); if(fname.toString().toLower().startsWith("file://")/*fname.toLocalFile().size()*/) { //g_object_set (G_OBJECT (l_src), "location", (const char*)fname.toLocalFile().toLocal8Bit(), NULL); setLink(1, fname); } else if(fname.toString().toLower().startsWith("http://")) { //g_object_set (G_OBJECT (http_src), "location", (const char*)fname.toString().toLocal8Bit(), NULL); setLink(2, fname); } else if(fname.toString().toLower().startsWith("mms://")) { QMessageBox::warning(0, "Error", "The mms protocol not supported now"); return false; } //g_object_set (player, "uri", (const char*)fname.toString().toLocal8Bit(), NULL); sync_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED); //gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED); Gstart = start; Gstart *= 1000000000 / 75; Glength = length; Glength *= 1000000000 / 75; gint64 all=0; GstFormat fmt = GST_FORMAT_TIME; //while(!gst_element_query_duration(pipeline, &fmt, &all)) {} // don't do that again, bad boy if(gst_element_query_duration(pipeline, &fmt, &all)) { if(!Glength) Glength = all - Gstart; } else { Gstart = Glength = 0; } //QMessageBox::information(0, "", QString("start = %1, length = %2, stream = %3").arg(QString::number((qlonglong)Gstart), QString::number((qlonglong)Glength), QString::number((qlonglong)all))); return true; }
int64_t GStreamerReader::QueryDuration() { gint64 duration = 0; GstFormat format = GST_FORMAT_TIME; if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration)) { if (format == GST_FORMAT_TIME) { LOG(PR_LOG_DEBUG, ("pipeline duration %" GST_TIME_FORMAT, GST_TIME_ARGS (duration))); duration = GST_TIME_AS_USECONDS (duration); } } { ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); int64_t media_duration = mDecoder->GetMediaDuration(); if (media_duration != -1 && media_duration > duration) { // We decoded more than the reported duration (which could be estimated) LOG(PR_LOG_DEBUG, ("decoded duration > estimated duration")); duration = media_duration; } } return duration; }
long AudioAbstracter::getDurationNanoseconds () { GstFormat format = GST_FORMAT_TIME; gint64 duration; gst_element_query_duration (p->decoder, &format, &duration); return (long)duration; }
void GstPipe::query_position_and_length() { gint64 pos; if (gst_element_query_position(pipeline_, GST_FORMAT_TIME, &pos) && gst_element_query_duration(pipeline_, GST_FORMAT_TIME , &length_)) { // g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", // GST_TIME_ARGS (pos), GST_TIME_ARGS (length_)); } }
// retrieves the duration of the media file guint64 gst_binding_get_duration (gstPlay *play) { if (!isValid (play)) return 0; gint64 duration; if(gst_element_query_duration (play->element, GST_FORMAT_TIME, &duration)) return duration / GST_MSECOND; return 0; }
gint64 gst_play_get_duration() { GstFormat fmt = GST_FORMAT_TIME; gint64 dur; gst_element_query_duration(GST_ELEMENT(gst_play->pipeline), &fmt, &dur); return dur; }
double AudioPlayerGnu::duration() const { GstFormat format = GST_FORMAT_TIME; gint64 cur; gboolean result = gst_element_query_duration(player, &format, &cur); if(!result || format != GST_FORMAT_TIME) return GST_CLOCK_TIME_NONE; return cur; }
guint64 ofGstUtils::getDurationNanos(){ GstFormat format = GST_FORMAT_TIME; if(!gst_element_query_duration(getPipeline(),&format,&durationNanos)) ofLog(OF_LOG_WARNING,"GStreamer: cannot query time duration"); return durationNanos; }
void GstPlayer::queryDuration(){ gint64 len; if(gst_element_query_duration(pipeline,&formatTime,&len)) duration = len; int min = duration/GST_SECOND/60; int sec = duration/GST_SECOND - min*60; QLOG_TRACE() << this << "duration:" << min << "m" << sec << "s"; }
GstBusSyncReply AVPlayer::bus_async(GstMessage* msg) { switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_EOS: { th_->run_async([this]() { pmanage<MPtr(&PContainer::set_str_str)>("started", "false"); }); break; } case GST_MESSAGE_STATE_CHANGED: { if (position_id_ == 0) { gst_element_query_duration( gst_pipeline_->get_pipeline(), GST_FORMAT_TIME, &track_duration_); if (track_duration_ != 0) { position_id_ = pmanage<MPtr(&PContainer::make_int)>("track_position", [this](const int& pos) { std::lock_guard<std::mutex> lock(seek_mutex_); position_ = pos; seek_called_ = true; gst_pipeline_->play(false); return true; }, [this]() { return position_; }, "Track position", "Current position of the track", 0, 0, track_duration_ / GST_SECOND); position_task_ = std::make_unique<PeriodicTask<>>( [this]() { gint64 position; gst_element_query_position( gst_pipeline_->get_pipeline(), GST_FORMAT_TIME, &position); position_ = static_cast<int>(position / GST_SECOND); pmanage<MPtr(&PContainer::notify)>(position_id_); }, std::chrono::milliseconds(500)); } } std::lock_guard<std::mutex> lock(seek_mutex_); if (seek_called_) { seek_called_ = false; gst_element_seek_simple(gst_pipeline_->get_pipeline(), GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, position_ * GST_SECOND); gst_pipeline_->play(true); } break; } default: break; } return GST_BUS_PASS; }