示例#1
0
static gboolean
gst_hls_demux_get_next_fragment (GstHLSDemux * demux)
{
  GstBuffer *buf;
  guint avail;
  const gchar *next_fragment_uri;
  GstClockTime duration;
  GstClockTime timestamp;
  gboolean discont;

  if (!gst_m3u8_client_get_next_fragment (demux->client, &discont,
          &next_fragment_uri, &duration, &timestamp)) {
    GST_INFO_OBJECT (demux, "This playlist doesn't contain more fragments");
    demux->end_of_playlist = TRUE;
    gst_task_start (demux->task);
    return FALSE;
  }

  GST_INFO_OBJECT (demux, "Fetching next fragment %s", next_fragment_uri);

  if (!gst_hls_demux_fetch_location (demux, next_fragment_uri)) {
    /* FIXME: The gst_m3u8_get_next_fragment increments the sequence number
       but another thread might call get_next_fragment and this decrement
       will not redownload the failed fragment, but might duplicate the
       download of a succeeded fragment
     */
    g_atomic_int_add (&demux->client->sequence, -1);
    return FALSE;
  }

  avail = gst_adapter_available (demux->download);
  buf = gst_adapter_take_buffer (demux->download, avail);
  GST_BUFFER_DURATION (buf) = duration;
  GST_BUFFER_TIMESTAMP (buf) = timestamp;

  /* We actually need to do this every time we switch bitrate */
  if (G_UNLIKELY (demux->do_typefind)) {
    GstCaps *caps = gst_type_find_helper_for_buffer (NULL, buf, NULL);

    if (!demux->input_caps || !gst_caps_is_equal (caps, demux->input_caps)) {
      gst_caps_replace (&demux->input_caps, caps);
      /* gst_pad_set_caps (demux->srcpad, demux->input_caps); */
      GST_INFO_OBJECT (demux, "Input source caps: %" GST_PTR_FORMAT,
          demux->input_caps);
      demux->do_typefind = FALSE;
    } else
      gst_caps_unref (caps);
  }
  gst_buffer_set_caps (buf, demux->input_caps);

  if (discont) {
    GST_DEBUG_OBJECT (demux, "Marking fragment as discontinuous");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
  }

  g_queue_push_tail (demux->queue, buf);
  gst_task_start (demux->task);
  gst_adapter_clear (demux->download);
  return TRUE;
}
示例#2
0
void test_lock_start()
{
  GstTask *t;
  gboolean ret;
    //xmlfile = "test_lock_start";
 std_log(LOG_FILENAME_LINE, "Test Started test_lock_start");

  t = gst_task_create (task_func, NULL);
   fail_if (t == NULL);
  TEST_ASSERT_FAIL
   gst_task_set_lock (t, &task_mutex);
  task_cond = g_cond_new ();
  task_lock = g_mutex_new ();
  g_mutex_lock (task_lock);
   GST_DEBUG ("starting");
  ret = gst_task_start (t);
  fail_unless (ret == TRUE);
  TEST_ASSERT_FAIL
  /* wait for it to spin up */
  GST_DEBUG ("waiting");
  g_cond_wait (task_cond, task_lock);
  GST_DEBUG ("done waiting");
  g_mutex_unlock (task_lock);
  /* cannot set mutex now */
  ASSERT_WARNING (gst_task_set_lock (t, &task_mutex));//b failing
  GST_DEBUG ("joining");
  ret = gst_task_join (t);
    fail_unless (ret == TRUE);
  TEST_ASSERT_FAIL
  gst_object_unref (t);
  std_log(LOG_FILENAME_LINE, "Test Successful");
    create_xml(0);
}
示例#3
0
static gboolean
gst_hls_demux_sink_event (GstPad * pad, GstEvent * event)
{
  GstHLSDemux *demux = GST_HLS_DEMUX (gst_pad_get_parent (pad));
  GstQuery *query;
  gboolean ret;
  gchar *uri;


  switch (event->type) {
    case GST_EVENT_EOS:{
      gchar *playlist;

      if (demux->playlist == NULL) {
        GST_WARNING_OBJECT (demux, "Received EOS without a playlist.");
        break;
      }

      GST_DEBUG_OBJECT (demux,
          "Got EOS on the sink pad: main playlist fetched");

      query = gst_query_new_uri ();
      ret = gst_pad_peer_query (demux->sinkpad, query);
      if (ret) {
        gst_query_parse_uri (query, &uri);
        gst_hls_demux_set_location (demux, uri);
        g_free (uri);
      }
      gst_query_unref (query);

      playlist = gst_hls_src_buf_to_utf8_playlist ((gchar *)
          GST_BUFFER_DATA (demux->playlist), GST_BUFFER_SIZE (demux->playlist));
      gst_buffer_unref (demux->playlist);
      if (playlist == NULL) {
        GST_WARNING_OBJECT (demux, "Error validating first playlist.");
      } else if (!gst_m3u8_client_update (demux->client, playlist)) {
        /* In most cases, this will happen if we set a wrong url in the
         * source element and we have received the 404 HTML response instead of
         * the playlist */
        GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."), NULL);
        return FALSE;
      }

      if (!ret && gst_m3u8_client_is_live (demux->client)) {
        GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
            ("Failed querying the playlist uri, "
                "required for live sources."), NULL);
        return FALSE;
      }

      gst_task_start (demux->task);
      gst_event_unref (event);
      return TRUE;
    }
    default:
      break;
  }

  return gst_pad_event_default (pad, event);
}
示例#4
0
void test_no_lock()
{
  GstTask *t;
  gboolean ret;
    //xmlfile = "test_no_lock";
 std_log(LOG_FILENAME_LINE, "Test Started test_no_lock");

  t = gst_task_create (task_func, NULL);
  fail_if (t == NULL);
  TEST_ASSERT_FAIL

  /* stop should be possible without lock */
  gst_task_stop (t);
  /* pause should give a warning */
  ASSERT_WARNING (ret = gst_task_pause (t));   //b failing
   fail_unless (ret == FALSE);
    TEST_ASSERT_FAIL
    /* start should give a warning */
  ASSERT_WARNING (ret = gst_task_start (t));
  fail_unless (ret == FALSE);
   TEST_ASSERT_FAIL
     /* stop should be possible without lock */
  gst_task_stop (t);
  gst_object_unref (t);
  std_log(LOG_FILENAME_LINE, "Test Successful");
    create_xml(0);
}
示例#5
0
static void
	start_eos_task (GstAmlVdec *amlvdec)
{
    if (! amlvdec->eos_task) {
        amlvdec->eos_task =
        gst_task_new ((GstTaskFunction) gst_amlvdec_polling_eos, amlvdec,NULL);
        gst_task_set_lock (amlvdec->eos_task, &amlvdec->eos_lock);
    }
    gst_task_start (amlvdec->eos_task);
}
void
gst_curl_multi_context_ref (GstCurlMultiContext * thiz)
{
  g_mutex_lock (&thiz->mutex);
  if (thiz->refcount == 0) {
    /* Set up various in-task properties */

    /* set up curl */
    thiz->multi_handle = curl_multi_init ();

    curl_multi_setopt (thiz->multi_handle,
                       CURLMOPT_PIPELINING, 1);
#ifdef CURLMOPT_MAX_HOST_CONNECTIONS
    curl_multi_setopt (thiz->multi_handle,
                       CURLMOPT_MAX_HOST_CONNECTIONS, 1);
#endif

    /* Start the thread */
#if GST_CHECK_VERSION(1,0,0)
    thiz->task = gst_task_new (
            (GstTaskFunction) gst_curl_multi_context_loop,
            (gpointer) thiz, NULL);
#else
    thiz->task = gst_task_create (
            (GstTaskFunction) gst_curl_multi_context_loop,
            (gpointer) thiz);
#endif
    gst_task_set_lock (thiz->task,
                       &thiz->task_rec_mutex);
    if (gst_task_start (thiz->task) == FALSE) {
      /*
       * This is a pretty critical failure and is not recoverable, so commit
       * sudoku and run away.
       */
      GST_ERROR ("Couldn't start curl_multi task! Aborting.");
      abort ();
    }
    GST_INFO ("Curl multi loop has been correctly initialised!");
  }
  thiz->refcount++;
  g_mutex_unlock (&thiz->mutex);
}
示例#7
0
static GstStateChangeReturn
dvb_base_bin_change_state (GstElement * element, GstStateChange transition)
{
  DvbBaseBin *dvbbasebin;
  GstStateChangeReturn ret;

  dvbbasebin = GST_DVB_BASE_BIN (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      if (dvbbasebin->tsparse == NULL) {
        GST_ELEMENT_ERROR (dvbbasebin, CORE, MISSING_PLUGIN, (NULL),
            ("No 'tsparse' element, check your GStreamer installation."));
        return GST_STATE_CHANGE_FAILURE;
      }
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      gst_poll_set_flushing (dvbbasebin->poll, FALSE);
      g_rec_mutex_lock (&dvbbasebin->lock);
      gst_task_start (dvbbasebin->task);
      g_rec_mutex_unlock (&dvbbasebin->lock);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      gst_poll_set_flushing (dvbbasebin->poll, TRUE);
      g_rec_mutex_lock (&dvbbasebin->lock);
      gst_task_stop (dvbbasebin->task);
      g_rec_mutex_unlock (&dvbbasebin->lock);
      dvb_base_bin_reset (dvbbasebin);
      break;
    default:
      break;
  }

  return ret;
}
示例#8
0
/**
 * Tell the GST sink element it is time to prepare to do work
 *
 * This is one of the last functions GStreamer will call when the pipeline
 * is being put together. It is the last place the element has a chance to
 * allocate resources and in our case startup our background task for network
 * connectivity.
 * After this function returns, we are ready to start processing data from the pipeliine.
 *
 * We allocate some of the last minute buffers, and setup a connection to the network;
 * this is used primarily by the background task, but we need access to it for name initialization.
 *
 * Next we initialize our fifo queue, and startup the background task.
 *
 * Lastly we return to the GST to begin processing information.
 *
 * \param bsink		-> to the context sink element
 * \return true if the initialization went well and we are ready to process data, false otherwise
 */
static gboolean
gst_ccnxsink_start (GstBaseSink * bsink)
{
  Gstccnxsink *me;

  gboolean b_ret = FALSE;

  me = GST_CCNXSINK (bsink);
  me->temp = ccn_charbuf_create ();
  me->lastPublish = ccn_charbuf_create ();

  GST_DEBUG ("CCNxSink: starting, getting connections");

  GST_DEBUG ("CCNxSink: step 1, %s", me->uri);

  /* setup the connection to ccnx */
  setup_ccn (me);
  GST_DEBUG ("CCNxSink: ccn is setup");

  /* setup and start the background task */
  eventTask = gst_task_create (ccn_event_thread, me);
  if (NULL == eventTask) {
    GST_ELEMENT_ERROR (me, RESOURCE, READ, (NULL),
        ("creating event thread failed"));
    return FALSE;
  }
  me->fifo_cond = g_cond_new ();
  me->fifo_lock = g_mutex_new ();
  gst_task_set_lock (eventTask, &task_mutex);
  eventCond = g_cond_new ();
  eventLock = g_mutex_new ();
  b_ret = gst_task_start (eventTask);
  if (FALSE == b_ret) {
    GST_ELEMENT_ERROR (me, RESOURCE, READ, (NULL),
        ("starting event thread failed"));
    return FALSE;
  }
  GST_DEBUG ("CCNxSink: event thread started");

  return TRUE;
}
示例#9
0
GstAlsaMixer *
gst_alsa_mixer_new (const char *device, GstAlsaMixerDirection dir)
{
  GstAlsaMixer *ret = NULL;

  g_return_val_if_fail (device != NULL, NULL);

  ret = g_new0 (GstAlsaMixer, 1);

  if (pipe (ret->pfd) == -1)
    goto error;

  ret->rec_mutex = g_new (GStaticRecMutex, 1);
  g_static_rec_mutex_init (ret->rec_mutex);

  ret->task_mutex = g_new (GStaticRecMutex, 1);
  g_static_rec_mutex_init (ret->task_mutex);

  ret->task = gst_task_create (task_monitor_alsa, ret);
  gst_task_set_lock (ret->task, ret->task_mutex);

  ret->device = g_strdup (device);
  ret->dir = dir;

  if (!gst_alsa_mixer_open (ret))
    goto error;

  if (gst_task_start (ret->task) == FALSE) {
    GST_WARNING ("Could not start alsamixer task");
  }

  return ret;

  /* ERRORS */
error:
  {
    gst_alsa_mixer_free (ret);
    return NULL;
  }
}
示例#10
0
static gboolean
gst_swfdec_sink_event (GstPad * pad, GstEvent * event)
{
  GstSwfdec *swfdec;
  gboolean ret;

  swfdec = GST_SWFDEC (GST_PAD_PARENT (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
      swfdec_decoder_eof (swfdec->decoder);
      gst_task_start (swfdec->task);
      gst_event_unref (event);
      ret = TRUE;
      break;
    default:
      ret = gst_pad_event_default (pad, event);
      break;
  }

  return ret;
}
示例#11
0
static GstStateChangeReturn
gst_qt_moov_recover_change_state (GstElement * element,
    GstStateChange transition)
{
  GstStateChangeReturn ret;
  GstQTMoovRecover *qtmr = GST_QT_MOOV_RECOVER_CAST (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      qtmr->task = gst_task_create (gst_qt_moov_recover_run, qtmr);
      qtmr->task_mutex = g_new (GStaticRecMutex, 1);
      g_static_rec_mutex_init (qtmr->task_mutex);
      gst_task_set_lock (qtmr->task, qtmr->task_mutex);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      gst_task_start (qtmr->task);
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      gst_task_stop (qtmr->task);
      gst_task_join (qtmr->task);
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      g_assert (gst_task_get_state (qtmr->task) == GST_TASK_STOPPED);
      gst_object_unref (qtmr->task);
      qtmr->task = NULL;
      g_static_rec_mutex_free (qtmr->task_mutex);
      break;
    default:
      break;
  }
  return ret;
}
示例#12
0
static GstStateChangeReturn
gst_hls_demux_change_state (GstElement * element, GstStateChange transition)
{
  GstStateChangeReturn ret;
  GstHLSDemux *demux = GST_HLS_DEMUX (element);

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      gst_hls_demux_reset (demux, FALSE);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      /* Start the streaming loop in paused only if we already received
         the main playlist. It might have been stopped if we were in PAUSED
         state and we filled our queue with enough cached fragments
       */
      if (gst_m3u8_client_get_uri (demux->client)[0] != '\0')
        gst_task_start (demux->updates_task);
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      gst_task_stop (demux->updates_task);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      demux->cancelled = TRUE;
      gst_hls_demux_stop (demux);
      gst_task_join (demux->stream_task);
      gst_hls_demux_reset (demux, FALSE);
      break;
    default:
      break;
  }
  return ret;
}
static gboolean
gst_decklink_src_start (GstElement * element)
{
    GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element);
    IDeckLinkIterator *iterator;
    DeckLinkCaptureDelegate *delegate;
    //IDeckLinkDisplayModeIterator *mode_iterator;
    //IDeckLinkDisplayMode *mode;
    BMDAudioSampleType sample_depth;
    int channels;
    HRESULT ret;
    const GstDecklinkMode *mode;
    IDeckLinkConfiguration *config;
    BMDVideoConnection conn;
    BMDAudioConnection aconn;
    int i;

    GST_DEBUG_OBJECT (decklinksrc, "start");

    iterator = CreateDeckLinkIteratorInstance ();
    if (iterator == NULL) {
        GST_ERROR ("no driver");
        return FALSE;
    }

    ret = iterator->Next (&decklinksrc->decklink);
    if (ret != S_OK) {
        GST_ERROR ("no card");
        return FALSE;
    }
    for (i = 0; i < decklinksrc->subdevice; i++) {
        ret = iterator->Next (&decklinksrc->decklink);
        if (ret != S_OK) {
            GST_ERROR ("no card");
            return FALSE;
        }
    }

    ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkInput,
            (void **) &decklinksrc->input);
    if (ret != S_OK) {
        GST_ERROR ("query interface failed");
        return FALSE;
    }

    delegate = new DeckLinkCaptureDelegate ();
    delegate->priv = decklinksrc;
    decklinksrc->input->SetCallback (delegate);

    ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkConfiguration,
            (void **) &config);
    if (ret != S_OK) {
        GST_ERROR ("query interface failed");
        return FALSE;
    }

    switch (decklinksrc->connection) {
    default:
    case GST_DECKLINK_CONNECTION_SDI:
        conn = bmdVideoConnectionSDI;
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_CONNECTION_HDMI:
        conn = bmdVideoConnectionHDMI;
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_CONNECTION_OPTICAL_SDI:
        conn = bmdVideoConnectionOpticalSDI;
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_CONNECTION_COMPONENT:
        conn = bmdVideoConnectionComponent;
        aconn = bmdAudioConnectionAnalog;
        break;
    case GST_DECKLINK_CONNECTION_COMPOSITE:
        conn = bmdVideoConnectionComposite;
        aconn = bmdAudioConnectionAnalog;
        break;
    case GST_DECKLINK_CONNECTION_SVIDEO:
        conn = bmdVideoConnectionSVideo;
        aconn = bmdAudioConnectionAnalog;
        break;
    }

    ret = config->SetInt (bmdDeckLinkConfigVideoInputConnection, conn);
    if (ret != S_OK) {
        GST_ERROR ("set configuration (input source)");
        return FALSE;
    }

    if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {
        ret = config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,
                              bmdAnalogVideoFlagCompositeSetup75);
        if (ret != S_OK) {
            GST_ERROR ("set configuration (composite setup)");
            return FALSE;
        }
    }

    switch (decklinksrc->audio_connection) {
    default:
    case GST_DECKLINK_AUDIO_CONNECTION_AUTO:
        break;
    case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED:
        aconn = bmdAudioConnectionEmbedded;
        break;
    case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU:
        aconn = bmdAudioConnectionAESEBU;
        break;
    case GST_DECKLINK_AUDIO_CONNECTION_ANALOG:
        aconn = bmdAudioConnectionAnalog;
        break;
    }
    ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, aconn);
    if (ret != S_OK) {
        GST_ERROR ("set configuration (audio input connection)");
        return FALSE;
    }
#if 0
    ret = decklinksrc->input->GetDisplayModeIterator (&mode_iterator);
    if (ret != S_OK) {
        GST_ERROR ("failed to get display mode iterator");
        return FALSE;
    }

    i = 0;
    while (mode_iterator->Next (&mode) == S_OK) {
        const char *mode_name;

        mode->GetName (&mode_name);

        GST_DEBUG ("%d: mode name: %s", i, mode_name);

        mode->Release ();
        i++;
    }
#endif

    mode = gst_decklink_get_mode (decklinksrc->mode);

    ret = decklinksrc->input->EnableVideoInput (mode->mode, bmdFormat8BitYUV, 0);
    if (ret != S_OK) {
        GST_ERROR ("enable video input failed");
        return FALSE;
    }

    sample_depth = bmdAudioSampleType16bitInteger;
    channels = 2;
    ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz,
            sample_depth, channels);
    if (ret != S_OK) {
        GST_ERROR ("enable video input failed");
        return FALSE;
    }

    ret = decklinksrc->input->StartStreams ();
    if (ret != S_OK) {
        GST_ERROR ("start streams failed");
        return FALSE;
    }

    g_static_rec_mutex_lock (&decklinksrc->task_mutex);
    gst_task_start (decklinksrc->task);
    g_static_rec_mutex_unlock (&decklinksrc->task_mutex);

    return TRUE;
}
/* FIXME: post error messages for the misc. failures */
static gboolean
gst_decklink_src_start (GstElement * element)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element);
  DeckLinkCaptureDelegate *delegate;
  BMDAudioSampleType sample_depth;
  int channels;
  HRESULT ret;
  const GstDecklinkMode *mode;
  IDeckLinkConfiguration *config;
  BMDVideoConnection conn;
  BMDAudioConnection aconn;

  GST_DEBUG_OBJECT (decklinksrc, "start");

  decklinksrc->decklink = gst_decklink_get_nth_device (decklinksrc->device);
  if (decklinksrc->decklink == NULL) {
    return FALSE;
  }

  decklinksrc->input = gst_decklink_get_nth_input (decklinksrc->device);

  delegate = new DeckLinkCaptureDelegate ();
  delegate->priv = decklinksrc;
  ret = decklinksrc->input->SetCallback (delegate);
  if (ret != S_OK) {
    GST_ERROR ("set callback failed (input source)");
    return FALSE;
  }

  decklinksrc->config = gst_decklink_get_nth_config (decklinksrc->device);
  config = decklinksrc->config;

  switch (decklinksrc->connection) {
    default:
    case GST_DECKLINK_CONNECTION_SDI:
      conn = bmdVideoConnectionSDI;
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_CONNECTION_HDMI:
      conn = bmdVideoConnectionHDMI;
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_CONNECTION_OPTICAL_SDI:
      conn = bmdVideoConnectionOpticalSDI;
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_CONNECTION_COMPONENT:
      conn = bmdVideoConnectionComponent;
      aconn = bmdAudioConnectionAnalog;
      break;
    case GST_DECKLINK_CONNECTION_COMPOSITE:
      conn = bmdVideoConnectionComposite;
      aconn = bmdAudioConnectionAnalog;
      break;
    case GST_DECKLINK_CONNECTION_SVIDEO:
      conn = bmdVideoConnectionSVideo;
      aconn = bmdAudioConnectionAnalog;
      break;
  }

  ret = config->SetInt (bmdDeckLinkConfigVideoInputConnection, conn);
  if (ret != S_OK) {
    GST_ERROR ("set configuration (input source)");
    return FALSE;
  }

  if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {
    ret = config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,
        bmdAnalogVideoFlagCompositeSetup75);
    if (ret != S_OK) {
      GST_ERROR ("set configuration (composite setup)");
      return FALSE;
    }
  }

  switch (decklinksrc->audio_connection) {
    default:
    case GST_DECKLINK_AUDIO_CONNECTION_AUTO:
      /* set above */
      break;
    case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED:
      aconn = bmdAudioConnectionEmbedded;
      break;
    case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU:
      aconn = bmdAudioConnectionAESEBU;
      break;
    case GST_DECKLINK_AUDIO_CONNECTION_ANALOG:
      aconn = bmdAudioConnectionAnalog;
      break;
  }
  ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, aconn);
  if (ret != S_OK) {
    GST_ERROR ("set configuration (audio input connection)");
    return FALSE;
  }

  mode = gst_decklink_get_mode (decklinksrc->mode);

  ret = decklinksrc->input->EnableVideoInput (mode->mode, bmdFormat8BitYUV, 0);
  if (ret != S_OK) {
    GST_ERROR ("enable video input failed");
    return FALSE;
  }

  sample_depth = bmdAudioSampleType16bitInteger;
  channels = 2;
  ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz,
      sample_depth, channels);
  if (ret != S_OK) {
    GST_ERROR ("enable video input failed");
    return FALSE;
  }

  ret = decklinksrc->input->StartStreams ();
  if (ret != S_OK) {
    GST_ERROR ("start streams failed");
    return FALSE;
  }

  g_rec_mutex_lock (&decklinksrc->task_mutex);
  gst_task_start (decklinksrc->task);
  g_rec_mutex_unlock (&decklinksrc->task_mutex);

  return TRUE;
}
示例#15
0
static GstStateChangeReturn
gst_merger_change_state (GstElement * element, GstStateChange transition)
{
  GstMerger *merger = GST_MERGER (element);
  GstStateChangeReturn ret;

  GST_INFO_OBJECT (merger, "Transition %d", transition);

  switch (transition) {

    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
    {
      bool res;

      GST_INFO_OBJECT (merger, "Stoping Task");
      merger->stop = 1;
      g_cond_signal (&merger->task_cond);
      GST_INFO_OBJECT (merger, "Stoping Task.");
      res = gst_task_stop (merger->task);
      GST_INFO_OBJECT (merger, "Stoping Task.. %d", res);

      GST_INFO_OBJECT (merger, "Task state %d", merger->task->state);
      res = gst_task_join (merger->task);
      GST_INFO_OBJECT (merger, "Stoping Task... %d", res);
    }
      break;

    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
    {
      bool res;
      GST_INFO_OBJECT (merger, "Starting Task");
      merger->stop = 0;
      res = gst_task_start (merger->task);
      GST_INFO_OBJECT (merger, "Starting Task. %d", res);
    }
      break;

    case GST_STATE_CHANGE_READY_TO_PAUSED:
      gst_segment_init (&merger->s_segment, GST_FORMAT_UNDEFINED);
      g_queue_init (&merger->bufs_l);
      g_queue_init (&merger->bufs_r);
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PAUSED_TO_READY:{
      GstBuffer *buf;
      while ((buf = g_queue_pop_head (&merger->bufs_l)))
        gst_buffer_unref (buf);
      while ((buf = g_queue_pop_head (&merger->bufs_r)))
        gst_buffer_unref (buf);
      break;
    }
    default:
      break;
  }
  return ret;
}
示例#16
0
GstFlowReturn
gst_swfdec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstFlowReturn res = GST_FLOW_OK;
  int ret;
  GstSwfdec *swfdec = GST_SWFDEC (GST_PAD_PARENT (pad));

  g_static_rec_mutex_lock (&swfdec->mutex);
  GST_DEBUG_OBJECT (swfdec, "about to call swfdec_decoder_parse");
  ret = swfdec_decoder_parse (swfdec->decoder);
  if (ret == SWF_NEEDBITS) {
    guint buf_size;
    GstBuffer *prev_buffer;

    GST_DEBUG_OBJECT (swfdec, "SWF_NEEDBITS, feeding data to swfdec-decoder");
    buf_size = gst_adapter_available (swfdec->adapter);
    if (buf_size) {
      prev_buffer = gst_buffer_new_and_alloc (buf_size);
      memcpy (GST_BUFFER_DATA (prev_buffer), gst_adapter_peek (swfdec->adapter,
              buf_size), buf_size);
      gst_adapter_flush (swfdec->adapter, buf_size);

      swfdec_decoder_add_buffer (swfdec->decoder,
          gst_swfdec_buffer_to_swf (prev_buffer));
    }

    swfdec_decoder_add_buffer (swfdec->decoder,
        gst_swfdec_buffer_to_swf (buffer));

  } else if (ret == SWF_CHANGE) {

    GstCaps *caps;
    double rate;

    GstTagList *taglist;

    GST_DEBUG_OBJECT (swfdec, "SWF_CHANGE");
    gst_adapter_push (swfdec->adapter, buffer);

    swfdec_decoder_get_image_size (swfdec->decoder,
        &swfdec->width, &swfdec->height);
    swfdec_decoder_get_rate (swfdec->decoder, &rate);
    swfdec->interval = GST_SECOND / rate;

    swfdec->frame_rate_n = (int) (rate * 256.0);
    swfdec->frame_rate_d = 256;

    caps = gst_caps_copy (gst_pad_get_pad_template_caps (swfdec->videopad));
    gst_caps_set_simple (caps,
        "framerate", GST_TYPE_FRACTION, swfdec->frame_rate_n,
        swfdec->frame_rate_d, "height", G_TYPE_INT, swfdec->height, "width",
        G_TYPE_INT, swfdec->width, NULL);
    if (gst_pad_set_caps (swfdec->videopad, caps)) {
      /* good */
    } else {
      gst_caps_unref (caps);
      GST_ELEMENT_ERROR (swfdec, CORE, NEGOTIATION, (NULL), (NULL));
      res = GST_FLOW_ERROR;
      goto done;
    }
    gst_caps_unref (caps);

    caps = gst_caps_copy (gst_pad_get_pad_template_caps (swfdec->audiopad));
    if (gst_pad_set_caps (swfdec->audiopad, caps)) {
      swfdec->have_format = TRUE;
    } else {
      gst_caps_unref (caps);
      GST_ELEMENT_ERROR (swfdec, CORE, NEGOTIATION, (NULL), (NULL));
      res = GST_FLOW_ERROR;
      goto done;
    }

    gst_caps_unref (caps);


    taglist = gst_tag_list_new ();
    gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
        GST_TAG_ENCODER_VERSION, swfdec_decoder_get_version (swfdec->decoder),
        NULL);
    gst_element_found_tags (GST_ELEMENT (swfdec), taglist);
  } else if (ret == SWF_EOF) {
    GST_DEBUG_OBJECT (swfdec, "SWF_EOF");
    gst_swfdec_render (swfdec, ret);
    gst_task_start (swfdec->task);
  }

done:
  g_static_rec_mutex_unlock (&swfdec->mutex);
  return res;

}
示例#17
0
static gboolean
gst_hls_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
  GstHLSDemux *demux;

  demux = GST_HLS_DEMUX (parent);

  switch (event->type) {
    case GST_EVENT_SEEK:
    {
      gdouble rate;
      GstFormat format;
      GstSeekFlags flags;
      GstSeekType start_type, stop_type;
      gint64 start, stop;
      GList *walk;
      GstClockTime position, current_pos, target_pos;
      gint current_sequence;
      GstM3U8MediaFile *file;

      GST_INFO_OBJECT (demux, "Received GST_EVENT_SEEK");

      if (gst_m3u8_client_is_live (demux->client)) {
        GST_WARNING_OBJECT (demux, "Received seek event for live stream");
        return FALSE;
      }

      gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
          &stop_type, &stop);

      if (format != GST_FORMAT_TIME)
        return FALSE;

      GST_DEBUG_OBJECT (demux, "seek event, rate: %f start: %" GST_TIME_FORMAT
          " stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start),
          GST_TIME_ARGS (stop));

      GST_M3U8_CLIENT_LOCK (demux->client);
      file = GST_M3U8_MEDIA_FILE (demux->client->current->files->data);
      current_sequence = file->sequence;
      current_pos = 0;
      target_pos = (GstClockTime) start;
      for (walk = demux->client->current->files; walk; walk = walk->next) {
        file = walk->data;

        current_sequence = file->sequence;
        if (current_pos <= target_pos
            && target_pos < current_pos + file->duration) {
          break;
        }
        current_pos += file->duration;
      }
      GST_M3U8_CLIENT_UNLOCK (demux->client);

      if (walk == NULL) {
        GST_WARNING_OBJECT (demux, "Could not find seeked fragment");
        return FALSE;
      }

      if (flags & GST_SEEK_FLAG_FLUSH) {
        GST_DEBUG_OBJECT (demux, "sending flush start");
        gst_pad_push_event (demux->srcpad, gst_event_new_flush_start ());
      }

      demux->cancelled = TRUE;
      gst_task_pause (demux->stream_task);
      gst_uri_downloader_cancel (demux->downloader);
      gst_task_stop (demux->updates_task);
      gst_task_pause (demux->stream_task);

      /* wait for streaming to finish */
      g_rec_mutex_lock (&demux->stream_lock);

      demux->need_cache = TRUE;
      while (!g_queue_is_empty (demux->queue)) {
        GstFragment *fragment = g_queue_pop_head (demux->queue);
        g_object_unref (fragment);
      }
      g_queue_clear (demux->queue);

      GST_M3U8_CLIENT_LOCK (demux->client);
      GST_DEBUG_OBJECT (demux, "seeking to sequence %d", current_sequence);
      demux->client->sequence = current_sequence;
      gst_m3u8_client_get_current_position (demux->client, &position);
      demux->position_shift = start - position;
      demux->need_segment = TRUE;
      GST_M3U8_CLIENT_UNLOCK (demux->client);


      if (flags & GST_SEEK_FLAG_FLUSH) {
        GST_DEBUG_OBJECT (demux, "sending flush stop");
        gst_pad_push_event (demux->srcpad, gst_event_new_flush_stop (TRUE));
      }

      demux->cancelled = FALSE;
      gst_task_start (demux->stream_task);
      g_rec_mutex_unlock (&demux->stream_lock);

      return TRUE;
    }
    default:
      break;
  }

  return gst_pad_event_default (pad, parent, event);
}
示例#18
0
/**
 * Tell the GST source element it is time to prepare to do work
 *
 * This is one of the last functions GStreamer will call when the pipeline
 * is being put together. It is the last place the element has a chance to
 * allocate resources and in our case startup our background task for network
 * connectivity.
 * After this function returns, we are ready to start processing data from the pipeliine.
 *
 * We allocate some of the last minute buffers, and setup a connection to the network;
 * this is used primarily by the background task, but we need access to it for name initialization.
 *
 * Next we initialize our fifo queue, and startup the background task.
 *
 * Lastly we return to the GST to begin processing information.
 *
 * \param bsrc		-> to the context source element
 * \return true if the initialization went well and we are ready to process data, false otherwise
 */
static gboolean
gst_ccnxsrc_start (GstBaseSrc * bsrc)
{
  Gstccnxsrc *src;
  CcnxInterestState *istate;

  struct ccn_charbuf *p_name = NULL;
  uintmax_t *p_seg = NULL;
  gint i_ret = 0;
  gboolean b_ret = FALSE;

  src = GST_CCNXSRC (bsrc);
  GST_DEBUG ("starting, getting connections");

  /* setup the connection to ccnx */
  if ((src->ccn = ccn_create ()) == NULL) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("ccn_create failed"));
    return FALSE;
  }
  if (-1 == ccn_connect (src->ccn, ccndHost ())) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("ccn_connect failed to %s",
            ccndHost ()));
    return FALSE;
  }
  loadKey (src->ccn, &src->sp);

  /* A closure is what defines what to do when an inbound interest or data arrives */
  if ((src->ccn_closure = calloc (1, sizeof (struct ccn_closure))) == NULL) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("closure alloc failed"));
    return FALSE;
  }

  /* We setup the closure to keep our context [src] and to call the incoming_content() function */
  src->ccn_closure->data = src;
  src->ccn_closure->p = incoming_content;

  /* Allocate buffers and construct the name from the uri the user gave us */
  GST_INFO ("step 1");
  if ((p_name = ccn_charbuf_create ()) == NULL) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("p_name alloc failed"));
    return FALSE;
  }
  if ((src->p_name = ccn_charbuf_create ()) == NULL) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("src->p_name alloc failed"));
    return FALSE;
  }
  if ((i_ret = ccn_name_from_uri (p_name, src->uri)) < 0) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("name from uri failed for \"%s\"", src->uri));
    return FALSE;
  }

  /* Find out what the latest one of these is called, and keep it in our context */
  ccn_charbuf_append (src->p_name, p_name->buf, p_name->length);
  i_ret = ccn_resolve_version (src->ccn, src->p_name, CCN_V_HIGHEST,
      CCN_VERSION_TIMEOUT);

  GST_INFO ("step 20 - name so far...");
  // hDump(src->p_name->buf, src->p_name->length);
  src->i_seg = 0;
  if (i_ret == 0) {             /* name is versioned, so get the meta data to obtain the length */
    p_seg = get_segment (src->ccn, src->p_name, CCN_HEADER_TIMEOUT);
    if (p_seg != NULL) {
      src->i_seg = *p_seg;
      GST_INFO ("step 25 - next seg: %d", src->i_seg);
      free (p_seg);
    }
  }
  ccn_charbuf_destroy (&p_name);

  /* Even though the recent segment published is likely to be >> 0, we still need to ask for segment 0 */
  /* because it seems to contain valuable stream information. Attempts to skip segment 0 resulted in no */
  /* proper rendering of the stream on my screen during testing */
  i_ret = request_segment (src, 0);
  if (i_ret < 0) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("interest sending failed"));
    return FALSE;
  }
  src->post_seg = 0;
  istate = allocInterestState (src);
  if (!istate) {                // This should not happen, but maybe
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("trouble allocating interest state structure"));
    return FALSE;
  }
  istate->seg = 0;
  istate->state = OInterest_waiting;

  /* Now start up the background work which will fetch all the rest of the R/T segments */
  eventTask = gst_task_create (ccn_event_thread, src);
  if (NULL == eventTask) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("creating event thread failed"));
    return FALSE;
  }
  src->fifo_cond = g_cond_new ();
  src->fifo_lock = g_mutex_new ();
  gst_task_set_lock (eventTask, &task_mutex);
  eventCond = g_cond_new ();
  eventLock = g_mutex_new ();
  b_ret = gst_task_start (eventTask);
  if (FALSE == b_ret) {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("starting event thread failed"));
    return FALSE;
  }
  GST_DEBUG ("event thread started");

  /* Done! */
  return TRUE;
}
static gboolean
gst_ss_demux_handle_src_event (GstPad * pad, GstEvent * event)
{
  GstSSDemux *demux = GST_SS_DEMUX (gst_pad_get_parent (pad));

  switch (event->type) {
    case GST_EVENT_SEEK:
    {
      gdouble rate;
      GstFormat format;
      GstSeekFlags flags;
      GstSeekType start_type, stop_type;
      gint64 start, stop;
      gint i = 0;
      GstSSDemuxStream *stream = NULL;

      GST_INFO_OBJECT (demux, "Received GST_EVENT_SEEK");

      // TODO: should be able to seek in DVR window
      if (GST_SSM_PARSE_IS_LIVE_PRESENTATION (demux->parser)) {
        GST_WARNING_OBJECT (demux, "Received seek event for live stream");
        return FALSE;
      }

      gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
          &stop_type, &stop);

      if (format != GST_FORMAT_TIME) {
        GST_WARNING_OBJECT (demux, "Only time format is supported in seek");
        return FALSE;
      }

      GST_DEBUG_OBJECT (demux, "seek event, rate: %f start: %" GST_TIME_FORMAT
          " stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start),
          GST_TIME_ARGS (stop));


      for( i = 0; i < SS_STREAM_NUM; i++) {
        if (stream = demux->streams[i]) {
          g_cond_signal (stream->cond);
          gst_task_stop (stream->stream_task);
        }
      }

      if (flags & GST_SEEK_FLAG_FLUSH) {
        GST_INFO_OBJECT (demux, "sending flush start");

        for( i = 0; i < SS_STREAM_NUM; i++) {
          if (stream = demux->streams[i]) {
            gst_pad_push_event (stream->pad, gst_event_new_flush_start ());
          }
        }
      }

      gst_ssm_parse_seek_manifest (demux->parser, start);

      if (flags & GST_SEEK_FLAG_FLUSH) {
        GST_INFO_OBJECT (demux, "sending flush stop");
        for( i = 0; i < SS_STREAM_NUM; i++) {
          if (stream = demux->streams[i]) {
            gst_pad_push_event (stream->pad, gst_event_new_flush_stop ());
            GST_LOG_OBJECT (stream->pad, "Starting pad TASK again...\n");
            stream->sent_ns = FALSE;
            stream->frag_cnt = 0; /*resetting to start buffering on SEEK */
            gst_task_start (stream->stream_task);
          }
        }
      }

      return TRUE;
    }
    default:
      break;
  }

  return gst_pad_event_default (pad, event);
}
示例#20
0
static void
gst_hls_demux_stream_loop (GstHLSDemux * demux)
{
  GstFragment *fragment;
  GstBuffer *buf;
  GstFlowReturn ret;
  GstCaps *bufcaps, *srccaps = NULL;

  /* Loop for the source pad task. The task is started when we have
   * received the main playlist from the source element. It tries first to
   * cache the first fragments and then it waits until it has more data in the
   * queue. This task is woken up when we push a new fragment to the queue or
   * when we reached the end of the playlist  */

  if (G_UNLIKELY (demux->need_cache)) {
    if (!gst_hls_demux_cache_fragments (demux))
      goto cache_error;

    /* we can start now the updates thread (only if on playing) */
    if (GST_STATE (demux) == GST_STATE_PLAYING)
      gst_task_start (demux->updates_task);
    GST_INFO_OBJECT (demux, "First fragments cached successfully");
  }

  if (g_queue_is_empty (demux->queue)) {
    if (demux->end_of_playlist)
      goto end_of_playlist;

    goto pause_task;
  }

  fragment = g_queue_pop_head (demux->queue);
  buf = gst_fragment_get_buffer (fragment);

  /* Figure out if we need to create/switch pads */
  if (G_LIKELY (demux->srcpad))
    srccaps = gst_pad_get_current_caps (demux->srcpad);
  bufcaps = gst_fragment_get_caps (fragment);
  if (G_UNLIKELY (!srccaps || !gst_caps_is_equal_fixed (bufcaps, srccaps)
          || demux->need_segment)) {
    switch_pads (demux, bufcaps);
    demux->need_segment = TRUE;
  }
  gst_caps_unref (bufcaps);
  if (G_LIKELY (srccaps))
    gst_caps_unref (srccaps);
  g_object_unref (fragment);

  if (demux->need_segment) {
    GstSegment segment;
    GstClockTime start = GST_BUFFER_PTS (buf);

    start += demux->position_shift;
    /* And send a newsegment */
    GST_DEBUG_OBJECT (demux, "Sending new-segment. segment start:%"
        GST_TIME_FORMAT, GST_TIME_ARGS (start));
    gst_segment_init (&segment, GST_FORMAT_TIME);
    segment.start = start;
    segment.time = start;
    gst_pad_push_event (demux->srcpad, gst_event_new_segment (&segment));
    demux->need_segment = FALSE;
    demux->position_shift = 0;
  }

  ret = gst_pad_push (demux->srcpad, buf);
  if (ret != GST_FLOW_OK)
    goto error_pushing;

  return;

end_of_playlist:
  {
    GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS");
    gst_pad_push_event (demux->srcpad, gst_event_new_eos ());
    gst_hls_demux_stop (demux);
    return;
  }

cache_error:
  {
    gst_task_pause (demux->stream_task);
    if (!demux->cancelled) {
      GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
          ("Could not cache the first fragments"), (NULL));
      gst_hls_demux_stop (demux);
    }
    return;
  }

error_pushing:
  {
    /* FIXME: handle error */
    GST_DEBUG_OBJECT (demux, "Error pushing buffer: %s... stopping task",
        gst_flow_get_name (ret));
    gst_hls_demux_stop (demux);
    return;
  }

pause_task:
  {
    gst_task_pause (demux->stream_task);
    return;
  }
}
static gboolean
gst_ss_demux_sink_event (GstPad * pad, GstEvent * event)
{
  GstSSDemux *demux = GST_SS_DEMUX (gst_pad_get_parent (pad));
  GstQuery *query;
  gboolean ret;
  gchar *uri;

  switch (event->type) {
    case GST_EVENT_EOS: {
      int i = 0;
      if (demux->manifest == NULL) {
        GST_WARNING_OBJECT (demux, "Received EOS without a manifest.");
        break;
      }

      GST_DEBUG_OBJECT (demux, "Got EOS on the sink pad: mainifest file fetched");

      query = gst_query_new_uri ();
      ret = gst_pad_peer_query (demux->sinkpad, query);
      if (ret) {
        gst_query_parse_uri (query, &uri);
        demux->parser = gst_ssm_parse_new (uri);
        g_free (uri);
      } else {
        GST_ERROR_OBJECT (demux, "failed to query URI from upstream");
        return FALSE;
      }
      gst_query_unref (query);

      GST_LOG_OBJECT (demux, "data = %p & size = %d", GST_BUFFER_DATA(demux->manifest), GST_BUFFER_SIZE(demux->manifest));
      if (!gst_ssm_parse_manifest (demux->parser, (char *)GST_BUFFER_DATA(demux->manifest), GST_BUFFER_SIZE(demux->manifest))) {
        /* In most cases, this will happen if we set a wrong url in the
         * source element and we have received the 404 HTML response instead of
         * the playlist */
        GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."),
            (NULL));
        return FALSE;
      }

      for( i = 0; i < SS_STREAM_NUM; i++) {
        if (gst_ssm_parse_check_stream (demux->parser, i)) {
          GstSSDemuxStream *stream = g_new0 (GstSSDemuxStream, 1);

          // Add pad emission of the stream
          gst_ss_demux_stream_init (demux, stream, i);
          g_static_rec_mutex_init (&stream->stream_lock);
          stream->stream_task = gst_task_create ((GstTaskFunction) gst_ss_demux_stream_loop, demux);
          gst_task_set_lock (stream->stream_task, &stream->stream_lock);
          demux->streams[i] = stream;
          g_print ("Starting stream - %d task loop...\n", i);
          gst_task_start (stream->stream_task);
        }
      }

      gst_event_unref (event);
      return TRUE;
    }
    case GST_EVENT_NEWSEGMENT:
      /* Swallow newsegments, we'll push our own */
      gst_event_unref (event);
      return TRUE;
    default:
      break;
  }

  return gst_pad_event_default (pad, event);
}
示例#22
0
static gboolean
gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean caching)
{
  GstFragment *download;
  const gchar *next_fragment_uri;
  GstClockTime duration;
  GstClockTime timestamp;
  GstBuffer *buf;
  gboolean discont;

  if (!gst_m3u8_client_get_next_fragment (demux->client, &discont,
          &next_fragment_uri, &duration, &timestamp)) {
    GST_INFO_OBJECT (demux, "This playlist doesn't contain more fragments");
    demux->end_of_playlist = TRUE;
    gst_task_start (demux->stream_task);
    return FALSE;
  }

  GST_INFO_OBJECT (demux, "Fetching next fragment %s", next_fragment_uri);

  download = gst_uri_downloader_fetch_uri (demux->downloader,
      next_fragment_uri);

  if (download == NULL)
    goto error;

  buf = gst_fragment_get_buffer (download);
  GST_BUFFER_DURATION (buf) = duration;
  GST_BUFFER_PTS (buf) = timestamp;

  /* We actually need to do this every time we switch bitrate */
  if (G_UNLIKELY (demux->do_typefind)) {
    GstCaps *caps = gst_fragment_get_caps (download);

    if (!demux->input_caps || !gst_caps_is_equal (caps, demux->input_caps)) {
      gst_caps_replace (&demux->input_caps, caps);
      /* gst_pad_set_caps (demux->srcpad, demux->input_caps); */
      GST_INFO_OBJECT (demux, "Input source caps: %" GST_PTR_FORMAT,
          demux->input_caps);
      demux->do_typefind = FALSE;
    }
    gst_caps_unref (caps);
  } else {
    gst_fragment_set_caps (download, demux->input_caps);
  }

  if (discont) {
    GST_DEBUG_OBJECT (demux, "Marking fragment as discontinuous");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
  }

  g_queue_push_tail (demux->queue, download);
  if (!caching) {
    GST_TASK_SIGNAL (demux->updates_task);
    gst_task_start (demux->stream_task);
  }
  return TRUE;

error:
  {
    gst_hls_demux_stop (demux);
    return FALSE;
  }
}
示例#23
0
static gboolean
gst_hls_demux_src_event (GstPad * pad, GstEvent * event)
{
  GstHLSDemux *demux;

  demux = GST_HLS_DEMUX (gst_pad_get_element_private (pad));

  switch (event->type) {
    case GST_EVENT_SEEK:
    {
      gdouble rate;
      GstFormat format;
      GstSeekFlags flags;
      GstSeekType start_type, stop_type;
      gint64 start, stop;
      GList *walk;
      gint current_pos;
      gint current_sequence;
      gint target_second;
      GstM3U8MediaFile *file;

      GST_INFO_OBJECT (demux, "Received GST_EVENT_SEEK");

      if (gst_m3u8_client_is_live (demux->client)) {
        GST_WARNING_OBJECT (demux, "Received seek event for live stream");
        return FALSE;
      }

      gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
          &stop_type, &stop);

      if (format != GST_FORMAT_TIME)
        return FALSE;

      GST_DEBUG_OBJECT (demux, "seek event, rate: %f start: %" GST_TIME_FORMAT
          " stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start),
          GST_TIME_ARGS (stop));

      file = GST_M3U8_MEDIA_FILE (demux->client->current->files->data);
      current_sequence = file->sequence;
      current_pos = 0;
      target_second = start / GST_SECOND;
      GST_DEBUG_OBJECT (demux, "Target seek to %d", target_second);
      for (walk = demux->client->current->files; walk; walk = walk->next) {
        file = walk->data;

        current_sequence = file->sequence;
        if (current_pos <= target_second
            && target_second < current_pos + file->duration) {
          break;
        }
        current_pos += file->duration;
      }

      if (walk == NULL) {
        GST_WARNING_OBJECT (demux, "Could not find seeked fragment");
        return FALSE;
      }

      if (flags & GST_SEEK_FLAG_FLUSH) {
        GST_DEBUG_OBJECT (demux, "sending flush start");
        gst_pad_push_event (demux->srcpad, gst_event_new_flush_start ());
      }

      demux->cancelled = TRUE;
      gst_task_pause (demux->task);
      g_mutex_lock (demux->fetcher_lock);
      gst_hls_demux_stop_fetcher (demux, TRUE);
      g_mutex_unlock (demux->fetcher_lock);
      g_mutex_lock (demux->thread_lock);
      g_cond_signal (demux->thread_cond);
      g_mutex_unlock (demux->thread_lock);
      gst_task_pause (demux->task);

      /* wait for streaming to finish */
      g_static_rec_mutex_lock (&demux->task_lock);

      demux->need_cache = TRUE;
      while (!g_queue_is_empty (demux->queue)) {
        GstBuffer *buf = g_queue_pop_head (demux->queue);
        gst_buffer_unref (buf);
      }

      GST_DEBUG_OBJECT (demux, "seeking to sequence %d", current_sequence);
      demux->client->sequence = current_sequence;
      demux->position = start;
      demux->need_segment = TRUE;

      if (flags & GST_SEEK_FLAG_FLUSH) {
        GST_DEBUG_OBJECT (demux, "sending flush stop");
        gst_pad_push_event (demux->srcpad, gst_event_new_flush_stop ());
      }

      demux->cancelled = FALSE;
      gst_task_start (demux->task);
      g_static_rec_mutex_unlock (&demux->task_lock);

      return TRUE;
    }
    default:
      break;
  }

  return gst_pad_event_default (pad, event);
}
示例#24
0
static GstStateChangeReturn
gst_swfdec_change_state (GstElement * element, GstStateChange transition)
{
  GstSwfdec *swfdec = GST_SWFDEC (element);
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
    {
      gst_adapter_clear (swfdec->adapter);
      /*
         gst_swfdec_vo_open (swfdec);
         swfdec_decoder_new (swfdec->decoder, swfdec->accel, swfdec->vo);

         swfdec->decoder->is_sequence_needed = 1;
         swfdec->decoder->frame_rate_code = 0;
       */
      swfdec->timestamp = 0;
      swfdec->closed = FALSE;

      /* reset the initial video state */
      swfdec->have_format = FALSE;
      swfdec->format = -1;
      swfdec->width = -1;
      swfdec->height = -1;
      swfdec->first = TRUE;
      break;
    }
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      gst_task_start (swfdec->task);
      break;
    default:
      break;
  }

  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      gst_task_pause (swfdec->task);
      gst_task_join (swfdec->task);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      /* if we are not closed by an EOS event do so now, this cen send a few frames but
       * we are prepared to not really send them (see above) */
      if (!swfdec->closed) {
        /*swf_close (swfdec->decoder); */
        swfdec->closed = TRUE;
      }
      /* gst_swfdec_vo_destroy (swfdec); */
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      gst_task_stop (swfdec->task);
      gst_task_join (swfdec->task);
      break;
    default:
      break;
  }

  return ret;

}