KUIRecord::~KUIRecord()
{
  gst_element_send_event (screenEnc, gst_event_new_eos ());
  gst_element_send_event (camEnc, gst_event_new_eos ());
  gst_element_send_event (audioEnc, gst_event_new_eos());
  gst_element_send_event(pipeline, gst_event_new_flush_stop());
  gst_element_set_state(pipeline, GST_STATE_NULL);
  gst_object_unref(pipeline);  
}
static void
check_correct_buffer (guint8 * src_data, guint src_size, guint8 * dst_data,
    guint dst_size)
{
  GstBuffer *buffer = gst_buffer_new_allocate (NULL, src_size, 0);
  GstBuffer *newBuffer;
  GstElement *avisubtitle = setup_avisubtitle ();
  GstEvent *event;

  fail_unless (g_list_length (buffers) == 0, "Buffers list needs to be empty");
  gst_buffer_fill (buffer, 0, src_data, src_size);
  fail_unless (gst_element_set_state (avisubtitle,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");
  ASSERT_BUFFER_REFCOUNT (buffer, "inbuffer", 1);
  event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
      GST_SEEK_TYPE_SET, 2 * GST_SECOND, GST_SEEK_TYPE_SET, 5 * GST_SECOND);
  fail_unless (gst_element_send_event (avisubtitle, event) == FALSE,
      "Seeking is not possible when there is no buffer yet");
  fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK,
      "not accepted a correct buffer");
  /* we gave away our reference to the buffer, don't assume anything */
  buffer = NULL;
  /* a new buffer is created in the list */
  fail_unless (g_list_length (buffers) == 1,
      "No new buffer in the buffers list");
  event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
      GST_SEEK_TYPE_SET, 2 * GST_SECOND, GST_SEEK_TYPE_SET, 5 * GST_SECOND);
  fail_unless (gst_element_send_event (avisubtitle, event) == TRUE,
      "seeking should be working now");
  fail_unless (g_list_length (buffers) == 2,
      "After seeking we need another buffer in the buffers");
  newBuffer = GST_BUFFER (buffers->data);
  buffers = g_list_remove (buffers, newBuffer);
  fail_unless (g_list_length (buffers) == 1, "Buffers list needs to be empty");
  fail_unless (gst_buffer_get_size (newBuffer) == dst_size,
      "size of the new buffer is wrong ( %d != %d)",
      gst_buffer_get_size (newBuffer), dst_size);
  fail_unless (gst_buffer_memcmp (newBuffer, 0, dst_data, dst_size) == 0,
      "data of the buffer is not correct");
  gst_buffer_unref (newBuffer);
  /* free the buffer from seeking */
  gst_buffer_unref (GST_BUFFER (buffers->data));
  buffers = g_list_remove (buffers, buffers->data);
  fail_unless (gst_element_set_state (avisubtitle,
          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS, "could not set to null");
  cleanup_avisubtitle (avisubtitle);
}
Exemple #3
0
static gboolean
play_do_seek (GstElement * pipeline, gint64 pos, gdouble rate,
    GstPlayTrickMode mode)
{
  GstSeekFlags seek_flags;
  GstQuery *query;
  GstEvent *seek;
  gboolean seekable = FALSE;

  query = gst_query_new_seeking (GST_FORMAT_TIME);
  if (!gst_element_query (pipeline, query)) {
    gst_query_unref (query);
    return FALSE;
  }

  gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
  gst_query_unref (query);

  if (!seekable)
    return FALSE;

  seek_flags = GST_SEEK_FLAG_FLUSH;

  switch (mode) {
    case GST_PLAY_TRICK_MODE_DEFAULT:
      seek_flags |= GST_SEEK_FLAG_TRICKMODE;
      break;
    case GST_PLAY_TRICK_MODE_DEFAULT_NO_AUDIO:
      seek_flags |= GST_SEEK_FLAG_TRICKMODE | GST_SEEK_FLAG_TRICKMODE_NO_AUDIO;
      break;
    case GST_PLAY_TRICK_MODE_KEY_UNITS:
      seek_flags |= GST_SEEK_FLAG_TRICKMODE_KEY_UNITS;
      break;
    case GST_PLAY_TRICK_MODE_KEY_UNITS_NO_AUDIO:
      seek_flags |=
          GST_SEEK_FLAG_TRICKMODE_KEY_UNITS | GST_SEEK_FLAG_TRICKMODE_NO_AUDIO;
      break;
    case GST_PLAY_TRICK_MODE_NONE:
    default:
      break;
  }

  if (rate >= 0)
    seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
        seek_flags | GST_SEEK_FLAG_ACCURATE,
        /* start */ GST_SEEK_TYPE_SET, pos,
        /* stop */ GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE);
  else
    seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
        seek_flags | GST_SEEK_FLAG_ACCURATE,
        /* start */ GST_SEEK_TYPE_SET, 0,
        /* stop */ GST_SEEK_TYPE_SET, pos);

  if (!gst_element_send_event (pipeline, seek))
    return FALSE;

  cur_rate = rate;
  trick_mode = mode;
  return TRUE;
}
void ofGstUtils::close(){
	if(bPlaying){
		if(!bIsMovieDone && !bPaused && !isStream){
			eosMutex.lock();
			closing = true;
			gst_element_send_event(gstPipeline,gst_event_new_eos());
			try{
				eosCondition.wait(eosMutex,5000);
			}catch(const Poco::TimeoutException & e){
				ofLogWarning("ofGstUtils") << "didn't received EOS in 5s, closing pipeline anyway";
			}
			eosMutex.unlock();
			closing = false;
		}
	}
	stop();

	if(bLoaded){
		gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL);
		gst_element_get_state(gstPipeline,NULL,NULL,2*GST_SECOND);

		if(busWatchID!=0) g_source_remove(busWatchID);

		gst_object_unref(gstPipeline);
		gstPipeline = NULL;
		gstSink = NULL;
	}

	bLoaded = false;
}
Exemple #5
0
void MediaImpl::resetMovie()
{
    // XXX: There used to be an issue that when we reached EOS (_eos() == true) we could not seek anymore.
    if (_seekEnabled)
    {
        qDebug() << "Seeking at position 0.";
        GstEvent* seek_event;

        if (_rate > 0) {
            seek_event = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
                                             GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_NONE, 0);
        } else {
            seek_event = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
                                             GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_END, 0);
        }
        /* Send the event */
        gst_element_send_event (_appsink0, seek_event);
//    gst_element_seek_simple (_pipeline, GST_FORMAT_TIME,
//                             (GstSeekFlags) (GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0);
//    this->_currentFrameSample = NULL;
        _setMovieReady(true);
    }
    else
    {
        // Just reload movie.
        qDebug() << "Reloading the movie" << _seekEnabled;
        loadMovie(_uri);
    }
}
static void
brasero_transcode_send_volume_event (BraseroTranscode *transcode)
{
	BraseroTranscodePrivate *priv;
	gdouble track_peak = 0.0;
	gdouble track_gain = 0.0;
	GstTagList *tag_list;
	BraseroTrack *track;
	GstEvent *event;
	GValue *value;

	priv = BRASERO_TRANSCODE_PRIVATE (transcode);

	brasero_job_get_current_track (BRASERO_JOB (transcode), &track);

	BRASERO_JOB_LOG (transcode, "Sending audio levels tags");
	if (brasero_track_tag_lookup (track, BRASERO_TRACK_PEAK_VALUE, &value) == BRASERO_BURN_OK)
		track_peak = g_value_get_double (value);

	if (brasero_track_tag_lookup (track, BRASERO_TRACK_GAIN_VALUE, &value) == BRASERO_BURN_OK)
		track_gain = g_value_get_double (value);

	/* it's possible we fail */
	tag_list = gst_tag_list_new (GST_TAG_TRACK_GAIN, track_gain,
				     GST_TAG_TRACK_PEAK, track_peak,
				     NULL);

	/* NOTE: that event is goind downstream */
	event = gst_event_new_tag (tag_list);
	if (!gst_element_send_event (priv->convert, event))
		BRASERO_JOB_LOG (transcode, "Couldn't send tags to rgvolume");

	BRASERO_JOB_LOG (transcode, "Set volume level %lf %lf", track_gain, track_peak);
}
Exemple #7
0
void gstreamer_next_frame() {
	GstElement *video_sink = get_video_sink();
	if (!video_sink)
		return;
	gst_element_send_event (video_sink,
		gst_event_new_step (GST_FORMAT_BUFFERS, 1, playback_rate, TRUE, FALSE));
}
static void
test_play_twice_message_received (GstBus * bus, GstMessage * message,
    GstElement * bin)
{
  gboolean res;
  GstStateChangeReturn state_res;

  GST_INFO ("bus message from \"%" GST_PTR_FORMAT "\": %" GST_PTR_FORMAT,
      GST_MESSAGE_SRC (message), message);

  switch (message->type) {
    case GST_MESSAGE_SEGMENT_DONE:
      play_count++;
      if (play_count == 1) {
        state_res = gst_element_set_state (bin, GST_STATE_READY);
        ck_assert_int_ne (state_res, GST_STATE_CHANGE_FAILURE);

        /* prepare playing again */
        set_state_and_wait (bin, GST_STATE_PAUSED);

        res = gst_element_send_event (bin, gst_event_ref (play_seek_event));
        fail_unless (res == TRUE, NULL);

        state_res = gst_element_set_state (bin, GST_STATE_PLAYING);
        ck_assert_int_ne (state_res, GST_STATE_CHANGE_FAILURE);
      } else {
        g_main_loop_quit (main_loop);
      }
      break;
    default:
      g_assert_not_reached ();
      break;
  }
}
void ofGstUtils::close(){
	if(bPlaying){
		if(!bIsMovieDone && !bPaused && !isStream){
			std::unique_lock<std::mutex> lck(eosMutex);
			closing = true;
			gst_element_send_event(gstPipeline,gst_event_new_eos());
			if(eosCondition.wait_for(lck,std::chrono::milliseconds(5000))==std::cv_status::timeout){
				ofLogWarning("ofGstUtils") << "didn't received EOS in 5s, closing pipeline anyway";
			}
			closing = false;
		}
	}
	stop();

	if(bLoaded){
		gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL);
		gst_element_get_state(gstPipeline,NULL,NULL,2*GST_SECOND);

		if(busWatchID!=0) g_source_remove(busWatchID);

		gst_object_unref(gstPipeline);
		gstPipeline = NULL;
		gstSink = NULL;
	}

	bLoaded = false;
}
Exemple #10
0
static void
_send_seek_event (const GValue * item, gpointer seek_event)
{
  GstElement *child = g_value_get_object (item);

  gst_element_send_event (child, gst_event_ref (seek_event));
}
void ofxGstRTPServer::close(){
	if(appSrcDepth){
		gst_element_send_event(appSrcDepth,gst_event_new_eos());
	}
	if(appSrcVideoRGB){
		gst_element_send_event(appSrcVideoRGB,gst_event_new_eos());
	}
	if(appSrcOsc){
		gst_element_send_event(appSrcOsc,gst_event_new_eos());
	}
	if(gst.getGstElementByName("audiocapture")){
		gst_element_send_event(gst.getGstElementByName("audiocapture"),gst_event_new_eos());
	}
	gst.close();
	vRTPsink = NULL;
	vRTPCsink = NULL;
	vRTPCsrc = NULL;
	vEncoder = NULL;
	dEncoder = NULL;
	aEncoder = NULL;
	appSrcVideoRGB = NULL;
	appSrcDepth = NULL;
	appSrcOsc = NULL;
	bufferPool = NULL;
	bufferPoolDepth = NULL;
	fps = 0;
	prevTimestamp = 0;
	numFrame = 0;
	prevTimestampDepth = 0;
	numFrameDepth = 0;
	prevTimestampOsc = 0;
	numFrameOsc = 0;
	width = 0;
	height = 0;
	lastSessionNumber = 0;
#if ENABLE_NAT_TRANSVERSAL
	videoStream.reset();
	depthStream.reset();
	oscStream.reset();
	audioStream.reset();
#endif
	firstVideoFrame = true;
	firstOscFrame = true;
	firstDepthFrame = true;

	ofRemoveListener(ofEvents().update,this,&ofxGstRTPServer::update);
}
static gboolean
seek_mode_testing (InsanityTest * test)
{
  gboolean res;
  GstEvent *event;
  GstSeekFlags flags = GST_SEEK_FLAG_FLUSH;
  GstSeekType stop_type = GST_SEEK_TYPE_NONE;

  /* Reset global seek props */
  glob_seek_first_buf_ts = GST_CLOCK_TIME_NONE;
  glob_seek_stop_ts = GST_CLOCK_TIME_NONE;
  glob_seek_segment_seektime = 0;

  /* Set seeking arguments */
  switch (glob_in_progress) {
    case TEST_BACKWARD_PLAYBACK:
      glob_seek_rate = -1;
      glob_seek_stop_ts = glob_duration;
      stop_type = GST_SEEK_TYPE_SET;
      break;
    case TEST_FAST_FORWARD:
      glob_seek_rate = 2;
      glob_seek_stop_ts = glob_duration / 2;
      break;
    case TEST_FAST_BACKWARD:
      glob_seek_rate = -2;
      glob_seek_stop_ts = glob_duration;
      stop_type = GST_SEEK_TYPE_SET;
      break;
    default:
      return FALSE;
  }

  glob_seek_got_segment = FALSE;
  event = gst_event_new_seek (glob_seek_rate, GST_FORMAT_TIME,
      flags, GST_SEEK_TYPE_SET, glob_seek_segment_seektime,
      stop_type, glob_seek_stop_ts);

  /* We didn't find any event/message with the seqnum we previously set */
  if (glob_seqnum != 0 && glob_seqnum_found == FALSE)
    glob_wrong_seqnum = TRUE;

  glob_seqnum_found = FALSE;
  glob_seqnum = gst_util_seqnum_next ();
  gst_event_set_seqnum (event, glob_seqnum);
  res = gst_element_send_event (glob_pipeline, event);
  global_last_seek = g_get_monotonic_time ();

  if (!res) {
    validate_current_test (test, FALSE, "Could not send seek event");
    glob_seek_rate = 0;
    glob_seqnum = 0;

    /* ... Next test */
    next_test (test);
  }

  return FALSE;
}
Exemple #13
0
//-----------------------------------------------------------------------------
void tIMX51Video::StopInternal()
{
    if( m_pGstPipeline )
    {
        //qDebug() << "tHalVideoSr2::StopInternal: gst_element_get_state";
        gst_element_get_state( m_pGstPipeline, NULL, NULL, 3000000000ULL );
    }

    if( m_RunningInternal )
    {
        killTimer(m_TimerId);
        m_RunningInternal = false;

        // Send an event down the gstreamer pipeline to stop the data being processed.
        // When the app has received this message, then we know all plugins have got the message.
        // (Needed because ipu_csc is using hw buffers mmap'd inside v4lsrc plugin. Since src
        // plugin stopped before csc, then this can cause problems)
        if( !EosReceived() )
        {
            qDebug() << "tHalVideoSr2::StopInternal: send EOS";
            int timeout = 5000;
            gst_element_send_event( m_pGstPipeline, gst_event_new_eos() );
            while(!EosReceived())
            {
                usleep(1000);
                timeout--;
                if(timeout == 0)
                {
                    qDebug() << "tHalVideoSr2::StopInternal: timeout waiting for EOS";
                    break;
                }
            }
            m_EosReceived = false;
        }
        else
            qDebug() << "tHalVideoSr2::StopInternal: EOS already received?";

        /*qDebug() << "tHalVideoSr2::StopInternal: ->PAUSED";
        gst_element_set_state( m_pGstPipeline, GST_STATE_PAUSED );
        gst_element_get_state( m_pGstPipeline, NULL, NULL, 3000000000ULL );
        while (g_main_context_iteration (NULL, FALSE)); // not sure the purpose of this - copied from 

        qDebug() << "tHalVideoSr2::StopInternal: ->READY";
        gst_element_set_state( m_pGstPipeline, GST_STATE_READY );
        gst_element_get_state( m_pGstPipeline, NULL, NULL, 3000000000ULL );*/

        qDebug() << "tHalVideoSr2::StopInternal: setting GST_STATE_NULL state";
        gst_element_set_state( m_pGstPipeline, GST_STATE_NULL );
        gst_element_get_state( m_pGstPipeline, NULL, NULL, 3000000000ULL );

        //qDebug() << "tHalVideoSr2::StopInternal: gst_object_unref";  
        gst_object_unref( GST_OBJECT( m_pGstPipeline ) );    
        m_pGstPipeline = 0;
    }
    qDebug() << "tHalVideoSr2::StopInternal: done";  
}
Exemple #14
0
static void timeout_callback(GstElement* element, guint session, guint ssrc, gpointer user_data)
{
    if (user_data == NULL)
        printf( "GST: Sending EOS TO THE pipeline !!!!\n");
    else
        printf( "GST: Sending EOS TO THE pipeline in case:%s !!!!\n",(char *)user_data);


    gst_element_send_event(pipeline, gst_event_new_eos());
    g_main_loop_quit (loop);
}
static gboolean
wait_and_do_seek (gpointer data)
{
  InsanityTest *test = data;
  GstEvent *event;
  gboolean res;

  if (GST_CLOCK_TIME_IS_VALID (glob_wait_time)) {
    GstClockTime cur = hls_test_get_position (test);

    if (glob_first_wait == 0)
      glob_first_wait = g_get_monotonic_time ();

    if (cur < glob_wait_time) {
      guint64 diff = g_get_monotonic_time () - glob_first_wait;

      if (diff > glob_playback_time * G_USEC_PER_SEC + PLAY_TIMEOUT)
        glob_play_in_time = FALSE;
      else
        return TRUE;
    }
  }

  glob_first_wait = 0;

  LOG ("Seeking at %i\n", seek_targets[glob_seek_nb].perc);

  /* If duration did not become known yet, we cannot test */
  if (!GST_CLOCK_TIME_IS_VALID (glob_duration)) {
    insanity_test_validate_checklist_item (test, "duration-known", FALSE, NULL);
    insanity_test_done (test);
    return FALSE;
  }

  glob_target = gst_util_uint64_scale (glob_duration,
      seek_targets[glob_seek_nb].perc, 100);

  event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
      GST_SEEK_TYPE_SET, glob_target, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);

  glob_validate_on_playing = "seek";
  res = gst_element_send_event (glob_pipeline, event);
  if (!res) {
    glob_validate_on_playing = NULL;
    insanity_test_validate_checklist_item (test, "seek", FALSE,
        "Failed to send seek event");
    return FALSE;
  }
  seek_targets[glob_seek_nb].seeked = TRUE;
  gst_element_get_state (glob_pipeline, NULL, NULL, SEEK_TIMEOUT);
  insanity_test_validate_checklist_item (test, "seek", TRUE, NULL);

  return FALSE;
}
static void
send_event_in_thread(gpointer data, G_GNUC_UNUSED gpointer user_data)
{
    ThreadData *thread_data = (ThreadData *)data;
    SendEventData *send_event_data;

    send_event_data = &(thread_data->data.send_event_data);
    send_event_data->result = gst_element_send_event(thread_data->element,
                                                     send_event_data->event);
    notify(thread_data);
}
Exemple #17
0
static gboolean
on_timeout (gpointer data)
{
  GstEvent *eos = gst_event_new_eos ();
  if (!gst_element_send_event (GST_ELEMENT (data), eos)) {
    GST_ERROR ("failed to send end of stream event");
    gst_event_unref (eos);
  }

  return FALSE;
}
Exemple #18
0
void  VideoImpl::_updateRate()
{
  // Check different things.
  if (_pipeline == NULL)
  {
    qWarning() << "Cannot set rate: no pipeline!" << endl;
    return;
  }

  if (!_seekEnabled)
  {
    qWarning() << "Cannot set rate: seek not working" << endl;
    return;
  }

  if (!_isMovieReady())
  {
    qWarning() << "Movie is not yet ready to play, cannot seek yet." << endl;
  }

  // Obtain the current position, needed for the seek event.
  gint64 position;
  if (!gst_element_query_position (_pipeline, GST_FORMAT_TIME, &position)) {
    qWarning() << "Unable to retrieve current position." << endl;
    return;
  }

  // Create the seek event.
  GstEvent *seekEvent;
  if (_rate > 0.0) {
    // Rate is positive (playing the video in normal direction)
    // Set new rate as a first argument. Provide position 0 so that we go to 0:00
    seekEvent = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
        GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, 0); // Go to 0:00
  } else {
    // Rate is negative
    // Set new rate as a first arguemnt. Provide the position we were already at.
    seekEvent = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
        GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position);
  }

  // If we have not done so, obtain the sink through which we will send the seek events.
  if (_appsink0 == NULL) {
    g_object_get (_pipeline, "video-sink", &_appsink0, NULL);
  }

  // Send the event.
  if (!gst_element_send_event (_appsink0, seekEvent)) {
    qWarning() << "Cannot perform seek event" << endl;
  }

  qDebug() << "Current rate: " << _rate << "." << endl;
}
static gboolean
update_sdl_scene (gpointer data)
{
  GstElement *pipeline = (GstElement *) data;
  SDL_Event event;

  while (SDL_PollEvent (&event)) {
    if (event.type == SDL_QUIT) {
      gst_element_send_event (GST_ELEMENT (pipeline), gst_event_new_eos ());
      return FALSE;
    }
    if (event.type == SDL_KEYDOWN) {
      if (event.key.keysym.sym == SDLK_ESCAPE) {
        gst_element_send_event (GST_ELEMENT (pipeline), gst_event_new_eos ());
        return FALSE;
      }
    }
  }

  return TRUE;
}
static void
kms_recorder_end_point_send_force_key_unit_event (GstElement * valve)
{
  GstStructure *s;
  GstEvent *force_key_unit_event;

  GST_DEBUG ("Sending key ");
  s = gst_structure_new ("GstForceKeyUnit",
      "all-headers", G_TYPE_BOOLEAN, TRUE, NULL);
  force_key_unit_event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
  gst_element_send_event (valve, force_key_unit_event);
}
Exemple #21
0
void MediaPlayer::seekTo(qint64 pos)
{
    // Seek with m_tempoRate is also used to change the tempo (that's actually the only way in GStreamer)
    GstEvent * seek_event = gst_event_new_seek( m_tempoRate,
                                                GST_FORMAT_TIME,
                                                (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
                                                GST_SEEK_TYPE_SET,
                                                pos * GST_MSECOND,
                                                GST_SEEK_TYPE_NONE,
                                                0 );

    gst_element_send_event( m_gst_pipeline, seek_event );
}
Exemple #22
0
static void
pipeline_seek (APP_STATE_T * state, gint64 position)
{
  if (state->pipeline) {
    GstEvent *event;
    event = gst_event_new_seek (1.0,
        GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
        GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE);
    if (!gst_element_send_event (state->vsink, event)) {
      g_print ("seek failed\n");
    }
  }
}
/*
 *  PsychGSSetMovieTimeIndex()  -- Set current playback time of movie, perform active seek if needed.
 */
double PsychGSSetMovieTimeIndex(int moviehandle, double timeindex, psych_bool indexIsFrames)
{
    GstElement		*theMovie;
    double		oldtime;
    long		targetIndex;
    GstEvent            *event;
    
    if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided!");
    }
    
    // Fetch references to objects we need:
    theMovie = movieRecordBANK[moviehandle].theMovie;    
    if (theMovie == NULL) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle !!!");
    }
    
    // Retrieve current timeindex:
    oldtime = PsychGSGetMovieTimeIndex(moviehandle);

    // TODO NOTE: We could use GST_SEEK_FLAG_SKIP to allow framedropping on fast forward/reverse playback...

    // Index based or target time based seeking?
    if (indexIsFrames) {
	// Index based seeking:		
	// TODO FIXME: This doesn't work (well) at all! Something's wrong here...
	// Seek to given targetIndex:
	targetIndex = (long) (timeindex + 0.5);

	// Simple seek, frame buffer (index) oriented, with pipeline flush and accurate seek,
	// i.e., not locked to keyframes, but frame-accurate: GST_FORMAT_DEFAULT?
	// gst_element_seek_simple(theMovie, GST_FORMAT_BUFFERS, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, targetIndex);
	event = gst_event_new_seek(1.0, GST_FORMAT_BUFFERS, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
				   GST_SEEK_TYPE_SET, targetIndex, GST_SEEK_TYPE_END, 0);
	gst_element_send_event(theMovie, event);
    }
    else {
	// Time based seeking:
	// Set new timeindex as time in seconds:

	// Simple seek, time-oriented, with pipeline flush and accurate seek,
	// i.e., not locked to keyframes, but frame-accurate:
	gst_element_seek_simple(theMovie, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, (gint64) (timeindex * (double) 1e9));
    }

    // Block until seek completed, failed or timeout of 30 seconds reached:
    gst_element_get_state(theMovie, NULL, NULL, (GstClockTime) (30 * 1e9));

    // Return old time value of previous position:
    return(oldtime);
}
void ofxGstRTPServer::emitDepthKeyFrame(){
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));
	gst_object_ref(clock);
	GstClockTime time = gst_clock_get_time (clock);
	GstClockTime now =  time - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	GstEvent * keyFrameEvent = gst_video_event_new_downstream_force_key_unit(now,
															 time,
															 now,
															 TRUE,
															 0);
	gst_element_send_event(appSrcDepth,keyFrameEvent);

}
static gboolean close_pipeline(gpointer data)
{
    GMainLoop *loop = (GMainLoop *) data;
    GstMessage *msg;

    gst_element_send_event(pipeline, gst_event_new_eos());
    msg = gst_bus_timed_pop_filtered(GST_ELEMENT_BUS(pipeline), GST_CLOCK_TIME_NONE, GST_MESSAGE_EOS | GST_MESSAGE_ERROR);

    gst_message_unref(msg);

    g_main_loop_quit(loop);
    
    return FALSE;
}
static void
recorder_close_pipeline (ShellRecorder *recorder)
{
  if (recorder->current_pipeline != NULL)
    {
      /* This will send an EOS (end-of-stream) message after the last frame
       * is written. The bus watch for the pipeline will get it and do
       * final cleanup
       */
      gst_element_send_event (recorder->current_pipeline->pipeline,
          gst_event_new_eos());
      recorder->current_pipeline = NULL;
    }
}
static void
do_step (GstElement * bin)
{
  gdouble rate;

  rate = sin (period);

  period += M_PI / 150;

  rate += 1.2;

  gst_element_send_event (bin,
      gst_event_new_step (GST_FORMAT_TIME, 40 * GST_MSECOND, rate, FALSE,
          FALSE));
}
Exemple #28
0
void  MediaImpl::_updateRate()
{
    if (_pipeline == NULL)
    {
        qDebug() << "Cannot set rate: no pipeline!" << endl;
        return;
    }

    if (!_seekEnabled)
    {
        qDebug() << "Cannot set rate: seek not working" << endl;
        return;
    }

    if (!_isMovieReady())
    {
        qDebug() << "Movie is not yet ready to play, cannot seek yet." << endl;
    }

    gint64 position;
    GstEvent *seekEvent;

    /* Obtain the current position, needed for the seek event */
    if (!gst_element_query_position (_pipeline, GST_FORMAT_TIME, &position)) {
        g_printerr ("Unable to retrieve current position.\n");
        return;
    }

    /* Create the seek event */
    if (_rate > 0) {
        seekEvent = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
                                        GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, 0);
    } else {
        seekEvent = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
                                        GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position);
    }

    if (_appsink0 == NULL) {
        /* If we have not done so, obtain the sink through which we will send the seek events */
        g_object_get (_pipeline, "video-sink", &_appsink0, NULL);
    }

    /* Send the event */
    gst_element_send_event (_appsink0, seekEvent);

    g_print ("Current rate: %g\n", _rate);
}
static gboolean
gst_uri_downloader_set_range (GstUriDownloader * downloader,
    gint64 range_start, gint64 range_end)
{
  g_return_val_if_fail (range_start >= 0, FALSE);
  g_return_val_if_fail (range_end >= -1, FALSE);

  if (range_start || (range_end >= 0)) {
    GstEvent *seek;

    seek = gst_event_new_seek (1.0, GST_FORMAT_BYTES, GST_SEEK_FLAG_FLUSH,
        GST_SEEK_TYPE_SET, range_start, GST_SEEK_TYPE_SET, range_end);

    return gst_element_send_event (downloader->priv->urisrc, seek);
  }
  return TRUE;
}
static gboolean
do_step (GstElement * bin)
{
  gdouble length;

  length = sin (period);

  period += G_PI / 40;

  length += 1.1;
  length *= 100 * GST_MSECOND;

  gst_element_send_event (bin,
      gst_event_new_step (GST_FORMAT_TIME, length, 1.0, TRUE, FALSE));

  return FALSE;
}