QTime Player::position() { if (pipeline) { gint64 value=0; #ifdef GST_API_VERSION_1 if(gst_element_query_position(pipeline, GST_FORMAT_TIME, &value)) { #else GstFormat fmt = GST_FORMAT_TIME; if(gst_element_query_position(pipeline, &fmt, &value)) { #endif m_position = static_cast<uint>( ( value / GST_MSECOND ) ); return QTime(0,0).addMSecs( m_position ); // nanosec -> msec } return QTime(0,0).addMSecs( m_position ); // nanosec -> msec } return QTime(0,0); } QTime Player::length() { gint64 value=0; if ( m_length == 0 && pipeline){ #ifdef GST_API_VERSION_1 if(gst_element_query_duration(pipeline, GST_FORMAT_TIME, &value)) { #else GstFormat fmt = GST_FORMAT_TIME; if(gst_element_query_duration(pipeline, &fmt, &value)) { #endif m_length = static_cast<uint>( ( value / GST_MSECOND )); } qDebug() << Q_FUNC_INFO <<": Can not get duration"; } return QTime(0,0).addMSecs( m_length ); // nanosec -> msec } double Player::volume() { gdouble vol = 0; GstElement *volume = gst_bin_get_by_name(GST_BIN(pipeline), "volume"); g_object_get (G_OBJECT(volume), "volume", &vol, NULL); gst_object_unref(volume); return vol; } void Player::setVolume(double v) { gdouble vol = 1.00 * v; //gdouble vol = 0.01 * v; GstElement *volume = gst_bin_get_by_name(GST_BIN(pipeline), "volume"); g_object_set (G_OBJECT(volume), "volume", vol, NULL); gst_object_unref(volume); }
void ofGstUtils::setSpeed(float _speed){ GstFormat format = GST_FORMAT_TIME; GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_SKIP | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH); gint64 pos; if(_speed==0){ gst_element_set_state (gstPipeline, GST_STATE_PAUSED); return; } #if GST_VERSION_MAJOR==0 if(!gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos) || pos<0){ //ofLog(OF_LOG_ERROR,"GStreamer: cannot query position"); return; } #else if(!gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos) || pos<0){ //ofLog(OF_LOG_ERROR,"GStreamer: cannot query position"); return; } #endif speed = _speed; //pos = (float)gstData.lastFrame * (float)fps_d / (float)fps_n * GST_SECOND; if(!bPaused) gst_element_set_state (gstPipeline, GST_STATE_PLAYING); if(speed>0){ if(!gst_element_seek(GST_ELEMENT(gstPipeline),speed, format, flags, GST_SEEK_TYPE_SET, pos, GST_SEEK_TYPE_SET, -1)) { gLogManager.log("GStreamer: unable to change speed",ELL_WARNING); } }else{ if(!gst_element_seek(GST_ELEMENT(gstPipeline),speed, format, flags, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos)) { gLogManager.log("GStreamer: unable to change speed",ELL_WARNING); } } gLogManager.log("Gstreamer: speed change to "+core::StringConverter::toString(speed),ELL_INFO); }
void ofGstUtils::setSpeed(float _speed){ GstFormat format = GST_FORMAT_TIME; GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_SKIP | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH); gint64 pos; if(_speed==0){ gst_element_set_state (gstPipeline, GST_STATE_PAUSED); return; } #if GST_VERSION_MAJOR==0 if(!gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos) || pos<0){ //ofLogError("ofGstUtils") << "setSpeed(): couldn't query position"; return; } #else if(!gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos) || pos<0){ //ofLogError("ofGstUtils") << "setSpeed(): couldn't query position"; return; } #endif speed = _speed; //pos = (float)gstData.lastFrame * (float)fps_d / (float)fps_n * GST_SECOND; if(!bPaused) gst_element_set_state (gstPipeline, GST_STATE_PLAYING); if(speed>0){ if(!gst_element_seek(GST_ELEMENT(gstPipeline),speed, format, flags, GST_SEEK_TYPE_SET, pos, GST_SEEK_TYPE_SET, -1)) { ofLogWarning("ofGstUtils") << "setSpeed(): unable to change speed"; } }else{ if(!gst_element_seek(GST_ELEMENT(gstPipeline),speed, format, flags, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, pos)) { ofLogWarning("ofGstUtils") << "setSpeed(): unable to change speed"; } } ofLogVerbose("ofGstUtils") << "setSpeed(): speed changed to " << speed; }
qint64 SourceObject::GetCurrentTime () { if (GetState () != SourceState::Paused) { auto format = GST_FORMAT_TIME; gint64 position = 0; #if GST_VERSION_MAJOR >= 1 gst_element_query_position (GST_ELEMENT (Dec_), format, &position); #else gst_element_query_position (GST_ELEMENT (Dec_), &format, &position); #endif LastCurrentTime_ = position; } return LastCurrentTime_ / GST_MSECOND; }
void ghb_live_preview_progress(signal_user_data_t *ud) { #if defined(_ENABLE_GST) GstFormat fmt = GST_FORMAT_TIME; gint64 len = -1, pos = -1; if (!ud->preview->live_enabled) return; if (ud->preview->state != PREVIEW_STATE_LIVE || ud->preview->seek_lock) return; ud->preview->progress_lock = TRUE; #if GST_CHECK_VERSION(1, 0, 0) if (gst_element_query_duration(ud->preview->play, fmt, &len)) #else if (gst_element_query_duration(ud->preview->play, &fmt, &len)) #endif { if (len != -1 && fmt == GST_FORMAT_TIME) { ud->preview->len = len / GST_MSECOND; } } #if GST_CHECK_VERSION(1, 0, 0) if (gst_element_query_position(ud->preview->play, fmt, &pos)) #else if (gst_element_query_position(ud->preview->play, &fmt, &pos)) #endif { if (pos != -1 && fmt == GST_FORMAT_TIME) { ud->preview->pos = pos / GST_MSECOND; } } if (ud->preview->len > 0) { GtkRange *progress; gdouble percent; percent = (gdouble)ud->preview->pos * 100 / ud->preview->len; progress = GTK_RANGE(GHB_WIDGET(ud->builder, "live_preview_progress")); gtk_range_set_value(progress, percent); } g_idle_add((GSourceFunc)unlock_progress_cb, ud); #endif }
void on_update_clock(ClutterTimeline *timeline, gint frame_num, gpointer data) { HandCar *all = (HandCar *)data; gchar *txt; gint64 time = -1; gint hours, minutes, seconds; if(!gst_element_query_position (all->player, &all->format, &time)) clutter_label_set_text (CLUTTER_LABEL(all->label_time), "<span size='6000'>UNKNOWN</span>"); else if (time != -1) { time /= GST_SECOND; hours = time / (60 * 60); minutes = (time - (hours *60 * 60)) / 60; seconds = time % 60; txt = g_strdup_printf ("%02d:%02d:%02d", hours, minutes, seconds); clutter_label_set_text(CLUTTER_LABEL(all->label_time), txt); g_free (txt); } }
static gboolean update_scale (GstElement * element) { gint64 duration = -1; gint64 position = -1; GstFormat format = GST_FORMAT_TIME; gchar dur_str[32], pos_str[32]; if (gst_element_query_position (element, &format, &position) && position != -1) { g_snprintf (pos_str, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (position)); } else { g_snprintf (pos_str, 32, "-:--:--.---------"); } if (gst_element_query_duration (element, &format, &duration) && duration != -1) { g_snprintf (dur_str, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (duration)); } else { g_snprintf (dur_str, 32, "-:--:--.---------"); } g_print ("%s / %s\n", pos_str, dur_str); return TRUE; }
/* * PsychGSGetMovieTimeIndex() -- Return current playback time of movie. */ double PsychGSGetMovieTimeIndex(int moviehandle) { GstElement *theMovie = NULL; GstFormat fmt; gint64 pos_nsecs; if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) { PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided!"); } // Fetch references to objects we need: theMovie = movieRecordBANK[moviehandle].theMovie; if (theMovie == NULL) { PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle !!!"); } fmt = GST_FORMAT_TIME; if (!gst_element_query_position(theMovie, &fmt, &pos_nsecs)) { printf("PTB-WARNING: Could not query position in movie %i in seconds. Returning zero.\n", moviehandle); pos_nsecs = 0; } // Retrieve timeindex: return((double) pos_nsecs / (double) 1e9); }
static gboolean tick_timeout_cb(SjExtractor *extractor) { gint64 nanos; gint secs; GstState state, pending_state; g_return_val_if_fail (SJ_IS_EXTRACTOR (extractor), FALSE); gst_element_get_state (extractor->priv->pipeline, &state, &pending_state, 0); if (state != GST_STATE_PLAYING && pending_state != GST_STATE_PLAYING) { extractor->priv->tick_id = 0; return FALSE; } if (!gst_element_query_position (extractor->priv->cdsrc, GST_FORMAT_TIME, &nanos)) { g_warning (_("Could not get current track position")); return TRUE; } secs = nanos / GST_SECOND; if (secs != extractor->priv->seconds) { g_signal_emit (extractor, signals[PROGRESS], 0, secs); } return TRUE; }
qint64 GstEnginePipeline::position() const { if (pipeline_is_initialised_) gst_element_query_position(pipeline_, GST_FORMAT_TIME, &last_known_position_ns_); return last_known_position_ns_; }
void PlayerGst::timerUpdate() { GstMessage* message; while(!usePlaybin && (message = gst_bus_pop(bus), message)) { switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR: { GError *err; gchar *debug; gst_message_parse_error (message, &err, &debug); QString str; str = "Error #"+QString::number(err->code)+" in module "+QString::number(err->domain)+"\n"+QString::fromUtf8(err->message); if(err->code == 6 && err->domain == 851) { str += "\nMay be you should to install gstreamer0.10-plugins-ugly or gstreamer0.10-plugins-bad"; } QMessageBox::warning(0, "Gstreamer error", str); g_error_free (err); g_free (debug); break; } case GST_MESSAGE_EOS: need_finish(); //QMessageBox::information(0, "", "EOS"); return; default: break; } } if(playing()) { gint64 p; GstFormat fmt = GST_FORMAT_TIME; gst_element_query_position(pipeline, &fmt, &p); emit position((double)(p - Gstart) / Glength); } }
P_INVOKE void bp_set_subtitle_uri (BansheePlayer *player, const gchar *uri) { g_return_if_fail (IS_BANSHEE_PLAYER (player)); gint64 pos = -1; GstState state; gboolean paused = FALSE; // Gstreamer playbin do not support to set suburi during playback // so have to stop/play and seek gst_element_get_state (player->playbin, &state, NULL, 0); paused = (state == GST_STATE_PAUSED); if (state >= GST_STATE_PAUSED) { gst_element_query_position (player->playbin, GST_FORMAT_BYTES, &pos); gst_element_set_state (player->playbin, GST_STATE_READY); // Force to wait asynch operation gst_element_get_state (player->playbin, &state, NULL, -1); } g_object_set (G_OBJECT (player->playbin), "suburi", uri, NULL); gst_element_set_state (player->playbin, paused ? GST_STATE_PAUSED : GST_STATE_PLAYING); // Force to wait asynch operation gst_element_get_state (player->playbin, &state, NULL, -1); if (pos != -1) { gst_element_seek_simple (player->playbin, GST_FORMAT_BYTES, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, pos); } }
void gst_nle_source_query_progress (GstNleSource * nlesrc, gfloat * progress) { gint64 position = 0, duration = 0; gfloat tmp = 0.0; guint num_items = g_list_length (nlesrc->queue); *progress = (gfloat) nlesrc->index / (gfloat) num_items; if (nlesrc->source) { GstFormat format = GST_FORMAT_BYTES; if (nlesrc->cached_duration > 0) duration = nlesrc->cached_duration; else { gst_element_query_duration (nlesrc->source, &format, &duration); nlesrc->cached_duration = duration; } if (duration > 0) { gst_element_query_position (nlesrc->source, &format, &position); tmp = (gfloat) position / (gfloat) duration; tmp *= 1.0 / (gfloat) num_items; } } *progress += tmp; GST_LOG ("progress(%d): %" G_GINT64_FORMAT "/%" G_GINT64_FORMAT "(%g%%)", nlesrc->index, position, duration, *progress * 100); }
static gboolean cb_print_position (GstElement *pipeline) { gint64 pos, len; GstFormat c = GST_FORMAT_TIME; if (gst_element_query_position (pipeline,&c, &pos) && gst_element_query_duration (pipeline, &c, &len)) { // g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", // GST_TIME_ARGS (pos), GST_TIME_ARGS (len)); char buff[100]; snprintf(buff, sizeof(buff), "%" GST_TIME_FORMAT "", GST_TIME_ARGS (pos)); std::string buffAsStdStr = buff; audio_info = buffAsStdStr; //audio_current_second = GST_TIME_ARGS (pos); //audio_max_seconds = GST_TIME_ARGS (len); //std::cout<<audio_info<<std::endl; } /* call me again */ return TRUE; }
bool cPlayback::SetPosition(int position, bool absolute) { if(playing == false) return false; #if defined (ENABLE_LIBEPLAYER3) float pos = (position/1000.0); if(player && player->playback) player->playback->Command(player, PLAYBACK_SEEK, (void*)&pos); #endif #if defined (ENABLE_GSTREAMER) gint64 time_nanoseconds; gint64 pos; GstFormat fmt = GST_FORMAT_TIME; if(m_gst_playbin) { gst_element_query_position(m_gst_playbin, &fmt, &pos); time_nanoseconds = pos + (position * 1000000000); if(time_nanoseconds < 0) time_nanoseconds = 0; gst_element_seek(m_gst_playbin, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, time_nanoseconds, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE); } #endif return true; }
static void test_position_query_time (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf = get_buffer_info (e, 0); gint64 pos; gboolean res = gst_element_query_position ((GstElement *) e, GST_FORMAT_TIME, &pos); fail_unless (res, NULL); ck_assert_uint64_eq (bf->duration, pos); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
/* If we have pipeline and it is running, query the current position and clip duration and inform * the application */ static gboolean refresh_ui (CustomData *data) { GstFormat fmt = GST_FORMAT_TIME; gint64 current = -1; gint64 position; /* We do not want to update anything unless we have a working pipeline in the PAUSED or PLAYING state */ if (!data || !data->pipeline || data->state < GST_STATE_PAUSED) return TRUE; /* If we didn't know it yet, query the stream duration */ if (!GST_CLOCK_TIME_IS_VALID (data->duration)) { if (!gst_element_query_duration (data->pipeline, &fmt, &data->duration)) { GST_WARNING ("Could not query current duration (normal for still pictures)"); data->duration = 0; } } if (!gst_element_query_position (data->pipeline, &fmt, &position)) { GST_WARNING ("Could not query current position (normal for still pictures)"); position = 0; } /* Java expects these values in milliseconds, and GStreamer provides nanoseconds */ set_current_ui_position (position / GST_MSECOND, data->duration / GST_MSECOND, data); return TRUE; }
qint64 GstEnginePipeline::position() const { GstFormat fmt = GST_FORMAT_TIME; gint64 value = 0; gst_element_query_position(pipeline_, &fmt, &value); return value; }
static double em_pos_get(void *video) { Emotion_Gstreamer_Video *ev; GstFormat fmt; gint64 val; gboolean ret; ev = video; fmt = GST_FORMAT_TIME; if (!ev->pipeline) return 0.0; ret = gst_element_query_position(ev->pipeline, &fmt, &val); if (!ret) return ev->position; if (fmt != GST_FORMAT_TIME) { ERR("requrested position in time, but got %s instead.", gst_format_get_name(fmt)); return ev->position; } ev->position = val / 1000000000.0; return ev->position; }
/* This function is called periodically to refresh the GUI */ static gboolean refresh_ui (CustomData *data) { GstFormat fmt = GST_FORMAT_TIME; gint64 current = -1; /* We do not want to update anything unless we are in the PAUSED or PLAYING states */ if (data->state < GST_STATE_PAUSED) return TRUE; /* If we didn't know it yet, query the stream duration */ if (!GST_CLOCK_TIME_IS_VALID (data->duration)) { if (!gst_element_query_duration (data->playbin, fmt, &data->duration)) { g_printerr ("Could not query current duration.\n"); } else { /* Set the range of the slider to the clip duration, in SECONDS */ gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND); } } if (gst_element_query_position (data->playbin, fmt, ¤t)) { /* Block the "value-changed" signal, so the slider_cb function is not called * (which would trigger a seek the user has not requested) */ g_signal_handler_block (data->slider, data->slider_update_signal_id); /* Set the position of the slider to the current pipeline positoin, in SECONDS */ gtk_range_set_value (GTK_RANGE (data->slider), (gdouble)current / GST_SECOND); /* Re-enable the signal */ g_signal_handler_unblock (data->slider, data->slider_update_signal_id); } return TRUE; }
static gboolean play_timeout (gpointer user_data) { GstPlay *play = user_data; gint64 pos = -1, dur = -1; if (play->buffering) return TRUE; gst_element_query_position (play->playbin, GST_FORMAT_TIME, &pos); gst_element_query_duration (play->playbin, GST_FORMAT_TIME, &dur); if (pos >= 0 && dur > 0) { gchar dstr[32], pstr[32]; /* FIXME: pretty print in nicer format */ g_snprintf (pstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (pos)); pstr[9] = '\0'; g_snprintf (dstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (dur)); dstr[9] = '\0'; g_print ("%s / %s\r", pstr, dstr); } return TRUE; }
static BraseroBurnResult brasero_normalize_clock_tick (BraseroJob *job) { gint64 position = 0.0; gint64 duration = 0.0; BraseroNormalizePrivate *priv; priv = BRASERO_NORMALIZE_PRIVATE (job); gst_element_query_duration (priv->pipeline, GST_FORMAT_TIME, &duration); gst_element_query_position (priv->pipeline, GST_FORMAT_TIME, &position); if (duration > 0) { GSList *tracks; gdouble progress; brasero_job_get_tracks (job, &tracks); progress = (gdouble) position / (gdouble) duration; if (tracks) { gdouble num_tracks; num_tracks = g_slist_length (tracks); progress = (gdouble) (num_tracks - 1.0 - (gdouble) g_slist_length (priv->tracks) + progress) / (gdouble) num_tracks; brasero_job_set_progress (job, progress); } } return BRASERO_BURN_OK; }
static gboolean play_timeout (gpointer user_data) { GstPlay *play = user_data; gint64 pos = -1, dur = -1; gchar status[64] = { 0, }; if (play->buffering) return TRUE; gst_element_query_position (play->playbin, GST_FORMAT_TIME, &pos); gst_element_query_duration (play->playbin, GST_FORMAT_TIME, &dur); if (play->desired_state == GST_STATE_PAUSED) g_snprintf (status, sizeof (status), "Paused"); else memset (status, ' ', sizeof (status) - 1); if (pos >= 0 && dur > 0) { gchar dstr[32], pstr[32]; /* FIXME: pretty print in nicer format */ g_snprintf (pstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (pos)); pstr[9] = '\0'; g_snprintf (dstr, 32, "%" GST_TIME_FORMAT, GST_TIME_ARGS (dur)); dstr[9] = '\0'; g_print ("%s / %s %s\r", pstr, dstr, status); } return TRUE; }
static gint64 brasero_transcode_get_duration (BraseroTranscode *transcode) { gint64 duration = -1; BraseroTranscodePrivate *priv; priv = BRASERO_TRANSCODE_PRIVATE (transcode); /* This part is specific to MP3s */ if (priv->mp3_size_pipeline) { /* This is the most reliable way to get the duration for mp3 * read them till the end and get the position. */ gst_element_query_position (priv->pipeline, GST_FORMAT_TIME, &duration); } /* This is for any sort of files */ if (duration == -1 || duration == 0) gst_element_query_duration (priv->pipeline, GST_FORMAT_TIME, &duration); BRASERO_JOB_LOG (transcode, "got duration %" GST_TIME_FORMAT, GST_TIME_ARGS (duration)); if (duration == -1 || duration == 0) brasero_job_error (BRASERO_JOB (transcode), g_error_new (BRASERO_BURN_ERROR, BRASERO_BURN_ERROR_GENERAL, "%s", _("Error while getting duration"))); return duration; }
bool MediaImpl::_eos() const { if (_movieReady) { Q_ASSERT( _appsink0 ); if (_rate > 0) { gboolean videoEos; g_object_get (G_OBJECT (_appsink0), "eos", &videoEos, NULL); return (bool) (videoEos); } else { /* Obtain the current position, needed for the seek event */ gint64 position; if (!gst_element_query_position (_pipeline, GST_FORMAT_TIME, &position)) { g_printerr ("Unable to retrieve current position.\n"); return false; } return (position == 0); } } else return false; }
static void playIt (GtkWidget *widget, GstElement* pipeline) { /* * sidenote: I find it completly stupid that you can detect the end of stream in * bus_call() with GST_MESSAGE_EOS, but cannot access and reset the state of the pipeline * throught the GMainLoop *loop. (If there is a way please, tell me about it) * So in order to detect the end of stream, you've got two options: * _ create a second bus * _ pass a struct to bus_call * _ get the stream length and compare it to the current position.... * I use the third option here, but I'm only skirting the problem to * keep the code simple and readable. * known issue: if you set the speeed too high (eg:*64), it won't detect the end of * the stream correctly, so you'll need to rewind the stream manually by pressing the stop button */ // rewind stream if we're at the end of the stream gint64 streamPosition, streamLength; GstFormat format = GST_FORMAT_TIME; gst_element_query_position (pipeline, &format, &streamPosition); gst_element_query_duration (pipeline, &format, &streamLength); if (streamPosition==streamLength) stopIt(widget,pipeline); // setting the stream to the playing state g_print("Playing Video\n"); gst_element_set_state (pipeline, GST_STATE_PLAYING); }
static void update_current_position_bar (GthMediaViewerPage *self) { GstFormat format; gint64 current_value = 0; format = GST_FORMAT_TIME; if (gst_element_query_position (self->priv->playbin, format, ¤t_value)) { char *s; if (self->priv->duration <= 0) { gst_element_query_duration (self->priv->playbin, format, &self->priv->duration); s = _g_format_duration_for_display (GST_TIME_AS_MSECONDS (self->priv->duration)); gtk_label_set_text (GTK_LABEL (GET_WIDGET ("label_duration")), s); g_free (s); } /* g_print ("==> %" G_GINT64_FORMAT " / %" G_GINT64_FORMAT " (%0.3g)\n" , current_value, self->priv->duration, ((double) current_value / self->priv->duration) * 100.0); */ g_signal_handlers_block_by_func(GET_WIDGET ("position_adjustment"), position_value_changed_cb, self); gtk_adjustment_set_value (GTK_ADJUSTMENT (GET_WIDGET ("position_adjustment")), (self->priv->duration > 0) ? ((double) current_value / self->priv->duration) * 100.0 : 0.0); g_signal_handlers_unblock_by_func(GET_WIDGET ("position_adjustment"), position_value_changed_cb, self); s = _g_format_duration_for_display (GST_TIME_AS_MSECONDS (current_value)); gtk_label_set_text (GTK_LABEL (GET_WIDGET ("label_position")), s); g_free (s); } }
static gboolean do_seek (App * app) { gint64 in_pos, out_pos; gfloat rate = 1.0; GstFormat fmt = GST_FORMAT_TIME; GstSeekFlags flags = 0; int ret; if (app->current_segment >= app->segment_count) { GST_WARNING ("seek segment not found!"); return FALSE; } GST_INFO ("do_seek..."); flags |= GST_SEEK_FLAG_FLUSH; // flags |= GST_SEEK_FLAG_ACCURATE; flags |= GST_SEEK_FLAG_KEY_UNIT; flags |= GST_SEEK_FLAG_SEGMENT; gst_element_query_position ((app->pipeline), &fmt, &in_pos); GST_DEBUG ("do_seek::initial gst_element_query_position = %lld ms", in_pos / 1000000); in_pos = MPEGTIME_TO_GSTTIME (app->seek_segments[app->current_segment].in_pts); GST_DEBUG ("do_seek::in_time for segment %i = %lld ms", app->current_segment, in_pos / 1000000); out_pos = -1; // MPEGTIME_TO_GSTTIME (app->seek_segments[app->current_segment].out_pts); GST_DEBUG ("do_seek::out_time for segment %i = %lld ms", app->current_segment, out_pos / 1000000); ret = gst_element_seek ((app->pipeline), rate, GST_FORMAT_TIME, flags, GST_SEEK_TYPE_SET, in_pos, GST_SEEK_TYPE_SET, out_pos); gst_element_query_position ((app->pipeline), &fmt, &in_pos); GST_DEBUG ("do_seek::seek command returned %i. new gst_element_query_position = %lld ms", ret, in_pos / 1000000); if (ret) app->current_segment++; return ret; }
long AudioAbstracter::getPositionNanoseconds () { GstFormat format = GST_FORMAT_TIME; gint64 position; gst_element_query_position (p->decoder, &format, &position); return (long)position; }
// retrieves the position of the media file guint64 gst_binding_get_position (gstPlay *play) { if (!isValid (play)) return 0; gint64 position; if(gst_element_query_position (play->element, GST_FORMAT_TIME, &position)) return position / GST_MSECOND; return 0; }