Exemplo n.º 1
0
gboolean GStreamerReader::SeekData(GstAppSrc* aSrc, guint64 aOffset)
{
  aOffset += mDataOffset;

  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  int64_t resourceLength = mResource.GetLength();

  if (gst_app_src_get_size(mSource) == -1) {
    /* It's possible that we didn't know the length when we initialized mSource
     * but maybe we do now
     */
    gst_app_src_set_size(mSource, GetDataLength());
  }

  nsresult rv = NS_ERROR_FAILURE;
  if (aOffset < static_cast<guint64>(resourceLength)) {
    rv = mResource.Seek(SEEK_SET, aOffset);
  }

  return NS_SUCCEEDED(rv);
}
Exemplo n.º 2
0
void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource)
{
  mSource = GST_APP_SRC(aSource);
  gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, nullptr);
  MediaResource* resource = mDecoder->GetResource();

  /* do a short read to trigger a network request so that GetLength() below
   * returns something meaningful and not -1
   */
  char buf[512];
  unsigned int size = 0;
  resource->Read(buf, sizeof(buf), &size);
  resource->Seek(SEEK_SET, 0);

  /* now we should have a length */
  int64_t resourceLength = GetDataLength();
  gst_app_src_set_size(mSource, resourceLength);
  if (resource->IsDataCachedToEndOfResource(0) ||
      (resourceLength != -1 && resourceLength <= SHORT_FILE_SIZE)) {
    /* let the demuxer work in pull mode for local files (or very short files)
     * so that we get optimal seeking accuracy/performance
     */
    LOG(PR_LOG_DEBUG, "configuring random access, len %lld", resourceLength);
    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS);
  } else {
    /* make the demuxer work in push mode so that seeking is kept to a minimum
     */
    LOG(PR_LOG_DEBUG, "configuring push mode, len %lld", resourceLength);
    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE);
  }

  // Set the source MIME type to stop typefind trying every. single. format.
  GstCaps *caps =
    GStreamerFormatHelper::ConvertFormatsToCaps(mDecoder->GetResource()->GetContentType().get(),
                                                nullptr);

  gst_app_src_set_caps(aSource, caps);
  gst_caps_unref(caps);
}
Exemplo n.º 3
0
bool QGstAppSrc::setup(GstElement* appsrc)
{
    if (m_setup || m_stream == 0 || appsrc == 0)
        return false;

    m_appSrc = GST_APP_SRC(appsrc);
    m_callbacks.need_data   = &QGstAppSrc::on_need_data;
    m_callbacks.enough_data = &QGstAppSrc::on_enough_data;
    m_callbacks.seek_data   = &QGstAppSrc::on_seek_data;
    gst_app_src_set_callbacks(m_appSrc, (GstAppSrcCallbacks*)&m_callbacks, this, (GDestroyNotify)&QGstAppSrc::destroy_notify);

    g_object_get(G_OBJECT(m_appSrc), "max-bytes", &m_maxBytes, NULL);

    if (m_sequential)
        m_streamType = GST_APP_STREAM_TYPE_STREAM;
    else
        m_streamType = GST_APP_STREAM_TYPE_RANDOM_ACCESS;
    gst_app_src_set_stream_type(m_appSrc, m_streamType);
    gst_app_src_set_size(m_appSrc, (m_sequential) ? -1 : m_stream->size());

    return  m_setup = true;
}
Exemplo n.º 4
0
gboolean GStreamerReader::SeekData(GstAppSrc* aSrc, guint64 aOffset)
{
  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
  MediaResource* resource = mDecoder->GetResource();
  int64_t resourceLength = resource->GetLength();

  if (gst_app_src_get_size(mSource) == -1) {
    /* It's possible that we didn't know the length when we initialized mSource
     * but maybe we do now
     */
    gst_app_src_set_size(mSource, resourceLength);
  }

  nsresult rv = NS_ERROR_FAILURE;
  if (aOffset < static_cast<guint64>(resourceLength)) {
    rv = resource->Seek(SEEK_SET, aOffset);
  }

  if (NS_FAILED(rv)) {
    LOG(PR_LOG_ERROR, ("seek at %lu failed", aOffset));
  }

  return NS_SUCCEEDED(rv);
}
Exemplo n.º 5
0
/*!
 * \brief CvVideoWriter_GStreamer::open
 * \param filename filename to output to
 * \param fourcc desired codec fourcc
 * \param fps desired framerate
 * \param frameSize the size of the expected frames
 * \param is_color color or grayscale
 * \return success
 *
 * We support 2 modes of operation. Either the user enters a filename and a fourcc
 * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
 * In the latter case, we just push frames on the appsink with appropriate caps.
 * In the former case, we try to deduce the correct container from the filename,
 * and the correct encoder from the fourcc profile.
 *
 * If the file extension did was not recognize, an avi container is used
 *
 */
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
                                    double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    // check arguments
    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);

    // init gstreamer
    gst_initializer::init();

    // init vars
    bool manualpipeline = true;
    int  bufsize = 0;
    GError *err = NULL;
    const char* mime = NULL;
    GstStateChangeReturn stateret;

    GstCaps* caps = NULL;
    GstCaps* videocaps = NULL;

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
    GstCaps* containercaps = NULL;
    GstEncodingContainerProfile* containerprofile = NULL;
    GstEncodingVideoProfile* videoprofile = NULL;
#endif

    GstIterator* it = NULL;
    gboolean done = FALSE;
    GstElement *element = NULL;
    gchar* name = NULL;
    GstElement* splitter = NULL;
    GstElement* combiner = NULL;

    // we first try to construct a pipeline from the given string.
    // if that fails, we assume it is an ordinary filename

    __BEGIN__;

    encodebin = gst_parse_launch(filename, &err);
    manualpipeline = (encodebin != NULL);

    if(manualpipeline)
    {
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sources(GST_BIN(encodebin));
        if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sources (GST_BIN(encodebin));
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
                  source = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);

        if (!source){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
            return false;
        }
#endif
        pipeline = encodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);

        // we just got a filename and a fourcc code.
        // first, try to guess the container from the filename
        //encodebin = gst_element_factory_make("encodebin", NULL);

        //proxy old non existing fourcc ids. These were used in previous opencv versions,
        //but do not even exist in gstreamer any more
        if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
        if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
        if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');


        //create encoder caps from fourcc

        videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
        if (!videocaps){
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
        }

        //create container caps from file extension
        mime = filenameToMimetype(filename);
        if (!mime) {
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
        }

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        containercaps = gst_caps_from_string(mime);

        //create encodebin profile
        containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
        videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
        gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
#endif

        //create pipeline elements
        encodebin = gst_element_factory_make("encodebin", NULL);

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
#endif
        source = gst_element_factory_make("appsrc", NULL);
        file = gst_element_factory_make("filesink", NULL);
        g_object_set(G_OBJECT(file), "location", filename, NULL);
    }

    if (is_color)
    {
        input_pix_fmt = GST_VIDEO_FORMAT_BGR;
        bufsize = frameSize.width * frameSize.height * 3;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);

#endif

    }
    else
    {
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
        input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
        bufsize = frameSize.width * frameSize.height;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "GRAY8",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);
#endif
#else
        CV_Assert(!"Gstreamer 0.10.29 or newer is required for grayscale input");
#endif
    }

    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
    gst_app_src_set_size (GST_APP_SRC(source), -1);

    g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(source), "block", 1, NULL);
    g_object_set(G_OBJECT(source), "is-live", 0, NULL);


    if(!manualpipeline)
    {
        g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
        gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
        if(!gst_element_link_many(source, encodebin, file, NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }

#if GST_VERSION_MAJOR == 0
    // HACK: remove streamsplitter and streamcombiner from
    // encodebin pipeline to prevent early EOF event handling
    // We always fetch BGR or gray-scale frames, so combiner->spliter
    // endge in graph is useless.
    it = gst_bin_iterate_recurse (GST_BIN(encodebin));
    while (!done) {
      switch (gst_iterator_next (it, (void**)&element)) {
        case GST_ITERATOR_OK:
          name = gst_element_get_name(element);
          if (strstr(name, "streamsplitter"))
            splitter = element;
          else if (strstr(name, "streamcombiner"))
            combiner = element;
          break;
        case GST_ITERATOR_RESYNC:
          gst_iterator_resync (it);
          break;
        case GST_ITERATOR_ERROR:
          done = true;
          break;
        case GST_ITERATOR_DONE:
          done = true;
          break;
      }
    }

    gst_iterator_free (it);

    if (splitter && combiner)
    {
        gst_element_unlink(splitter, combiner);

        GstPad* src  = gst_element_get_pad(combiner, "src");
        GstPad* sink = gst_element_get_pad(combiner, "encodingsink");

        GstPad* srcPeer = gst_pad_get_peer(src);
        GstPad* sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);

        src = gst_element_get_pad(splitter, "encodingsrc");
        sink = gst_element_get_pad(splitter, "sink");

        srcPeer = gst_pad_get_peer(src);
        sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);
    }
#endif

    stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if(stateret  == GST_STATE_CHANGE_FAILURE) {
        handleMessage(pipeline);
        CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }

    framerate = fps;
    num_frames = 0;

    handleMessage(pipeline);

    __END__;

    return true;
}
    void* RendStreamer::run()
    {

        cout << "rendstreamer started " << endl;
        gchar * description=(gchar *) g_malloc(1000);
        g_sprintf(description,
//"webmmux name=stream appsrc name=mysrc ! video/x-raw,format=RGB,width=%d,height=%d,framerate=20/1 ! videoconvert ! vp8enc !  stream.",
"webmmux name=stream appsrc name=mysrc ! video/x-raw,format=RGB,width=%d,height=%d,framerate=5/1 ! videoconvert ! vp8enc  !  stream.",  
                this->pconf->width,this->pconf->height);
        gint argc=0;
        gst_init (&argc, NULL);

        GError *err = NULL;
        GstElement * bin = gst_parse_launch (description, &err);

        g_free(description);

          if ( (!bin) || (err)) {
            g_print ("invalid pipeline: %s\n", err->message);
            g_clear_error (&err);
            exit(-2);
          }

          multisocketsink = gst_element_factory_make ("multisocketsink", NULL);


 printf("parsed %d, %d\n",this->pconf->width,this->pconf->height);
          /* setup appsrc */
          GstElement *appsrc = gst_bin_get_by_name (GST_BIN (bin), "mysrc");
          if (!appsrc) {
            g_print ("no element with name \"appsrc\" found\n");
            gst_object_unref (bin);
            exit(-3);
          }
          /*
          g_object_set (G_OBJECT (appsrc), "caps",
                gst_caps_new_simple ("video/x-raw",
                             "format", G_TYPE_STRING, "RGB",
                             "width", G_TYPE_INT, this->width,
                             "height", G_TYPE_INT, this->height,
                             "framerate", GST_TYPE_FRACTION, 1, 2,
                             NULL), NULL);


    */

     gst_app_src_set_size (GST_APP_SRC (appsrc), (gint64) -1); // total stream size is not known
     gst_app_src_set_stream_type(GST_APP_SRC (appsrc),GST_APP_STREAM_TYPE_STREAM);
     g_object_set (G_OBJECT (appsrc),"format", GST_FORMAT_TIME, NULL);


            /*setup muxer*/
            GstElement *stream = gst_bin_get_by_name (GST_BIN (bin), "stream");
              if (!stream) {
                g_print ("no element with name \"stream\" found\n");
                gst_object_unref (bin);
                exit(-3);
              }

              GstPad *srcpad = gst_element_get_static_pad (stream, "src");
              if (!srcpad) {
                g_print ("no \"src\" pad in element \"stream\" found\n");
                gst_object_unref (stream);
                gst_object_unref (bin);
                exit(-4);
              }

              GstPad *ghostpad = gst_ghost_pad_new ("src", srcpad);
              gst_element_add_pad (GST_ELEMENT (bin), ghostpad);
              gst_object_unref (srcpad);


         /* add to pipeline */
          pipeline = gst_pipeline_new ("pipeline");
          gst_bin_add_many (GST_BIN (pipeline), bin,multisocketsink, NULL);

          /* link with multi socket */
          GstPad *sinkpad = gst_element_get_static_pad (multisocketsink, "sink");
          gst_pad_link (ghostpad, sinkpad);
          gst_object_unref (sinkpad);

          /*get the bus */
          GstBus *bus = gst_element_get_bus (pipeline);
          gst_bus_add_signal_watch (bus);
          g_signal_connect (bus, "message", G_CALLBACK (on_message), NULL);
          gst_object_unref (bus);

            /*call backs */
          g_signal_connect (multisocketsink, "client-socket-removed",G_CALLBACK (on_client_socket_removed), NULL);
          g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), this);

          /* setup main loop */
          loop = g_main_loop_new (NULL, FALSE);
          if (gst_element_set_state (pipeline,GST_STATE_READY) == GST_STATE_CHANGE_FAILURE) {
                    gst_object_unref (pipeline);
                    g_main_loop_unref (loop);
                    g_print ("Failed to set pipeline to ready\n");
                    exit(-5);
          }

          /*setup server*/
          service = g_socket_service_new ();
          g_socket_listener_add_inet_port (G_SOCKET_LISTENER (service), this->pconf->port, NULL,NULL);
          g_signal_connect (service, "incoming", G_CALLBACK (on_new_connection), NULL);
          g_socket_service_start (service);
          g_print ("Listening on http://127.0.0.1:%u/\n",this->pconf->port);

        /* start main loop */
        g_main_loop_run (loop);
        cout << "F**k " << endl;
        return NULL;
    }