Example #1
0
// Clear everything on sight.
void qtractorFiles::clear (void)
{
	m_pAudioListView->clear();
	m_pMidiListView->clear();

	setPlayState(false);
}
//*********************************************************
// Initialize Play
void JamAndShoot::initializePlay(const VisionData& vision, RobocupStrategyData* rsd)  
{
  timer->resetTimer();
  setPlayState(NORMAL_PLAY);
  rsd->setActionFunction(BLOCKER, getActionFunction(BLOCKER) );
  rsd->setActionFunction(DEFENDER, getActionFunction(DEFENDER) );
  rsd->setActionFunction(AGGRESSOR, getActionFunction(AGGRESSOR) );
  rsd->setActionFunction(CREATOR, getActionFunction(CREATOR) );
  rsd->setActionFunction(SPECIAL_OP_DEFENDER, getActionFunction(SPECIAL_OP_DEFENDER) );
  rsd->setActionFunction(SPECIAL_OP_AGGRESSOR, getActionFunction(SPECIAL_OP_AGGRESSOR) );
  rsd->setActionFunction(SPECIAL_OP_CREATOR, getActionFunction(SPECIAL_OP_CREATOR) );
}
Example #3
0
void MediaImpl::unloadMovie()
{
    // Reset variables.
    _terminate = false;
    _seekEnabled = false;

    // Un-ready.
    _setMovieReady(false);
    setPlayState(false);

    // Free allocated resources.
    freeResources();
}
Example #4
0
// Audition/pre-listening player slot.
void qtractorFiles::playSlot ( bool bOn )
{
	if (m_iPlayUpdate > 0)
		return;

	setPlayState(bOn);

	if (bOn) {
		qtractorFileListView *pFileListView = currentFileListView();
		if (pFileListView)
			pFileListView->activateItem();
	}
}
Example #5
0
bool MpvHandler::event(QEvent *event)
{
    if(event->type() == QEvent::User)
    {
        while(mpv)
        {
            mpv_event *event = mpv_wait_event(mpv, 0);
            if(event == nullptr ||
               event->event_id == MPV_EVENT_NONE)
            {
                break;
            }
            HandleErrorCode(event->error);
            switch (event->event_id)
            {
            case MPV_EVENT_PROPERTY_CHANGE:
            {
                mpv_event_property *prop = (mpv_event_property*)event->data;
                if(QString(prop->name) == "playback-time") // playback-time does the same thing as time-pos but works for streaming media
                {
                    if(prop->format == MPV_FORMAT_DOUBLE)
                    {
                        setTime((int)*(double*)prop->data);
                        lastTime = time;
                    }
                }
                else if(QString(prop->name) == "volume")
                {
                    if(prop->format == MPV_FORMAT_DOUBLE)
                        setVolume((int)*(double*)prop->data);
                }
                else if(QString(prop->name) == "sid")
                {
                    if(prop->format == MPV_FORMAT_INT64)
                        setSid(*(int*)prop->data);
                }
                else if(QString(prop->name) == "aid")
                {
                    if(prop->format == MPV_FORMAT_INT64)
                        setAid(*(int*)prop->data);
                }
                else if(QString(prop->name) == "sub-visibility")
                {
                    if(prop->format == MPV_FORMAT_FLAG)
                        setSubtitleVisibility((bool)*(unsigned*)prop->data);
                }
                else if(QString(prop->name) == "mute")
                {
                    if(prop->format == MPV_FORMAT_FLAG)
                        setMute((bool)*(unsigned*)prop->data);
                }
                else if(QString(prop->name) == "core-idle")
                {
                    if(prop->format == MPV_FORMAT_FLAG)
                    {
                        if((bool)*(unsigned*)prop->data && playState == Mpv::Playing)
                            ShowText(tr("Buffering..."), 0);
                        else
                            ShowText(QString(), 0);
                    }
                }
                else if(QString(prop->name) == "paused-for-cache")
                {
                    if(prop->format == MPV_FORMAT_FLAG)
                    {
                        if((bool)*(unsigned*)prop->data && playState == Mpv::Playing)
                            ShowText(tr("Your network is slow or stuck, please wait a bit"), 0);
                        else
                            ShowText(QString(), 0);
                    }
                }
                break;
            }
            case MPV_EVENT_IDLE:
                fileInfo.length = 0;
                setTime(0);
                setPlayState(Mpv::Idle);
                break;
                // these two look like they're reversed but they aren't. the names are misleading.
            case MPV_EVENT_START_FILE:
                setPlayState(Mpv::Loaded);
                break;
            case MPV_EVENT_FILE_LOADED:
                setPlayState(Mpv::Started);
                LoadFileInfo();
                SetProperties();
            case MPV_EVENT_UNPAUSE:
                setPlayState(Mpv::Playing);
                break;
            case MPV_EVENT_PAUSE:
                setPlayState(Mpv::Paused);
                ShowText(QString(), 0);
                break;
            case MPV_EVENT_END_FILE:
                if(playState == Mpv::Loaded)
                    ShowText(tr("File couldn't be opened"));
                setPlayState(Mpv::Stopped);
                break;
            case MPV_EVENT_SHUTDOWN:
                QCoreApplication::quit();
                break;
            case MPV_EVENT_LOG_MESSAGE:
            {
                mpv_event_log_message *message = static_cast<mpv_event_log_message*>(event->data);
                if(message != nullptr)
                    emit messageSignal(message->text);
                break;
            }
            default: // unhandled events
                break;
            }
        }
        return true;
    }
    return QObject::event(event);
}
Example #6
0
bool MediaImpl::loadMovie(QString filename)
{
    gchar* filetestpath = (gchar*) filename.toUtf8().constData();
    if (FALSE == g_file_test(filetestpath, G_FILE_TEST_EXISTS))
    {
        std::cout << "File " << filetestpath << " does not exist" << std::endl;
        return false;
    }
    _uri = filename;

    qDebug() << "Opening movie: " << filename << ".";

    // Free previously allocated structures
    unloadMovie();

    // Initialize GStreamer.
    GstElement *capsfilter0 = NULL;
    GstElement *videoscale0 = NULL;

    // Create the elements.
    if (_isSharedMemorySource)
    {
        _shmsrc0 = gst_element_factory_make ("shmsrc", "shmsrc0");
        _gdpdepay0 = gst_element_factory_make ("gdpdepay", "gdpdepay0");
        _pollSource = g_timeout_source_new (500);
        g_source_set_callback (_pollSource,
                               gstPollShmsrc,
                               this,
                               NULL);
        g_source_attach (_pollSource, g_main_context_default());
        g_source_unref (_pollSource);
    }
    else {
        _uridecodebin0 = gst_element_factory_make ("uridecodebin", "uridecodebin0");
    }
    _queue0 = gst_element_factory_make ("queue", "queue0");
    _videoconvert0 = gst_element_factory_make ("videoconvert", "videoconvert0");
    videoscale0 = gst_element_factory_make ("videoscale", "videoscale0");
    capsfilter0 = gst_element_factory_make ("capsfilter", "capsfilter0");
    _appsink0 = gst_element_factory_make ("appsink", "appsink0");

    // Prepare handler data.
    _padHandlerData.videoToConnect = _queue0;
    _padHandlerData.videoSink = _appsink0;
    _padHandlerData.videoIsConnected = false;

    _audioqueue0 = gst_element_factory_make ("queue", "audioqueue0");
    _audioconvert0 = gst_element_factory_make ("audioconvert", "audioconvert0");
    _audioresample0 = gst_element_factory_make ("audioresample", "audioresample0");
    _audiovolume0 = gst_element_factory_make ("volume", "audiovolume0");
    _audiosink0 = gst_element_factory_make ("autoaudiosink", "audiosink0");

    _padHandlerData.audioToConnect = _audioqueue0;

    // Create the empty pipeline.
    _pipeline = gst_pipeline_new ( "video-source-pipeline" );

    if (!_pipeline ||
            !_queue0 || !_videoconvert0 || ! videoscale0 || ! capsfilter0 ||
            !_appsink0 || !_audioqueue0 || !_audioconvert0 || !_audioresample0 ||
            !_audiovolume0 || !_audiosink0)
    {
        g_printerr ("Not all elements could be created.\n");

        if (! _pipeline) g_printerr("_pipeline");
        if (! _queue0) g_printerr("_queue0");
        if (! _videoconvert0) g_printerr("_videoconvert0");
        if (! videoscale0) g_printerr("videoscale0");
        if (! capsfilter0) g_printerr("capsfilter0");
        if (! _appsink0) g_printerr("_appsink0");
        if (! _audioqueue0) g_printerr("_audioqueue0");
        if (! _audioconvert0) g_printerr("_audioconvert0");
        if (! _audioresample0) g_printerr("_audioresample0");
        if (! _audiovolume0) g_printerr("_audiovolume0");
        if (! _audiosink0) g_printerr("_audiosink0");

        unloadMovie();
        return -1;
    }

    if (_isSharedMemorySource)
    {
        if (! _shmsrc0 || ! _gdpdepay0)
        {
            g_printerr ("Not all elements could be created.\n");
            if (! _shmsrc0) g_printerr("_shmsrc0");
            if (! _gdpdepay0) g_printerr("_gdpdepay0");
            unloadMovie();
            return -1;
        }
    }
    else
    {
        if (! _uridecodebin0)
        {
            g_printerr ("Not all elements could be created.\n");
            if (! _uridecodebin0) g_printerr("_uridecodebin0");
            unloadMovie();
            return -1;
        }
    }

    // Build the pipeline. Note that we are NOT linking the source at this
    // point. We will do it later.
    gst_bin_add_many (GST_BIN (_pipeline),
                      _isSharedMemorySource ? _shmsrc0 : _uridecodebin0, _queue0,
                      _videoconvert0, videoscale0, capsfilter0, _appsink0,
//    _audioqueue0, _audioconvert0, _audioresample0, _audiovolume0, _audiosink0,
                      NULL);

    // special case for shmsrc
    if (_isSharedMemorySource)
    {
        gst_bin_add (GST_BIN(_pipeline), _gdpdepay0);
        if (! gst_element_link_many (_shmsrc0, _gdpdepay0, _queue0, NULL))
        {
            g_printerr ("Could not link shmsrc, deserializer and video queue.\n");
        }
    }
    // link uridecodebin -> queue will be performed by callback

    if (! gst_element_link_many (_queue0, _videoconvert0, capsfilter0, videoscale0, _appsink0, NULL))
    {
        g_printerr ("Could not link video queue, colorspace converter, caps filter, scaler and app sink.\n");
        unloadMovie();
        return false;
    }

//  if (! gst_element_link_many (_audioqueue0, _audioconvert0, _audioresample0,
//        _audiovolume0, _audiosink0, NULL))
//  {
//    g_printerr ("Could not link audio queue, converter, resampler and audio sink.\n");
//    unloadMovie();
//    return false;
//  }

    // Process URI.
    QByteArray ba = filename.toLocal8Bit();
    gchar *filename_tmp = g_strdup((gchar*) filename.toUtf8().constData());
    gchar* uri = NULL; //  (gchar*) filename.toUtf8().constData();
    if (! _isSharedMemorySource && ! gst_uri_is_valid(uri))
    {
        // Try to convert filename to URI.
        GError* error = NULL;
        qDebug() << "Calling gst_filename_to_uri : " << uri;
        uri = gst_filename_to_uri(filename_tmp, &error);
        if (error)
        {
            qDebug() << "Filename to URI error: " << error->message;
            g_error_free(error);
            gst_object_unref (uri);
            freeResources();
            return false;
        }
    }
    g_free(filename_tmp);

    if (_isSharedMemorySource)
    {
        uri =  (gchar*) ba.data();
    }

    // Set URI to be played.
    qDebug() << "URI for uridecodebin: " << uri;
    // FIXME: sometimes it's just the path to the directory that is given, not the file itself.

    // Connect to the pad-added signal
    if (! _isSharedMemorySource)
    {
        g_signal_connect (_uridecodebin0, "pad-added", G_CALLBACK (MediaImpl::gstPadAddedCallback), &_padHandlerData);
        g_object_set (_uridecodebin0, "uri", uri, NULL);
    }
    else
    {
        //qDebug() << "LIVE mode" << uri;
        g_object_set (_shmsrc0, "socket-path", uri, NULL);
        g_object_set (_shmsrc0, "is-live", TRUE, NULL);
        _padHandlerData.videoIsConnected = true;
    }
    g_free(uri);

    // Configure audio appsink.
    // TODO: change from mono to stereo
//  gchar* audioCapsText = g_strdup_printf ("audio/x-raw-float,channels=1,rate=%d,signed=(boolean)true,width=%d,depth=%d,endianness=BYTE_ORDER",
//                                          Engine::signalInfo().sampleRate(), (int)(sizeof(Signal_T)*8), (int)(sizeof(Signal_T)*8) );
//  GstCaps* audioCaps = gst_caps_from_string (audioCapsText);
//  g_object_set (_audioSink, "emit-signals", TRUE,
//                            "caps", audioCaps,
////                            "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
////                            "drop", TRUE,         // ... other buffers are dropped
//                            NULL);
//  g_signal_connect (_audioSink, "new-buffer", G_CALLBACK (VideoImpl::gstNewAudioBufferCallback), &_newAudioBufferHandlerData);
//  gst_caps_unref (audioCaps);
//  g_free (audioCapsText);


    // Configure video appsink.
    GstCaps *videoCaps = gst_caps_from_string ("video/x-raw,format=RGBA");
    g_object_set (capsfilter0, "caps", videoCaps, NULL);
    g_object_set (_appsink0, "emit-signals", TRUE,
                  "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
                  "drop", TRUE,         // ... other buffers are dropped
                  "sync", TRUE,
                  NULL);
    g_signal_connect (_appsink0, "new-sample", G_CALLBACK (MediaImpl::gstNewSampleCallback), this);
    gst_caps_unref (videoCaps);

    g_object_set (_audiovolume0, "mute", false, NULL);
    g_object_set (_audiovolume0, "volume", 0.0, NULL);

    // Listen to the bus.
    _bus = gst_element_get_bus (_pipeline);

    // Start playing.
    if (! _isSharedMemorySource && ! setPlayState(true))
    {
        return false;
    }

    return true;
}
Example #7
0
void MediaImpl::internalPostPlay()
{
  // Pause playback.
  setPlayState(false);
}
Example #8
0
void MediaImpl::internalPrePlay()
{
  // Start/resume playback.
  setPlayState(true);
}
Example #9
0
bool MediaImpl::loadMovie(QString filename)
{
  _uri = filename;

  qDebug() << "Opening movie: " << filename << ".";
  this->_frame = NULL;

  // Free previously allocated structures
  unloadMovie();

  //_firstFrameTime=_formatContext->start_time;

  // Initialize GStreamer.
  gst_init (NULL, NULL);
  GstElement *capsFilter = NULL;
  GstElement *videoScale = NULL;

  // Create the elements.
  _source =          gst_element_factory_make ("uridecodebin", "source");

//  _audioQueue =      gst_element_factory_make ("queue", "aqueue");
//  _audioConvert =    gst_element_factory_make ("audioconvert", "aconvert");
//  _audioResample =   gst_element_factory_make ("audioresample", "aresample");
//  _audioSink =       gst_element_factory_make ("appsink", "asink");
//
  _videoQueue =      gst_element_factory_make ("queue", "vqueue");
  _videoColorSpace = gst_element_factory_make ("videoconvert", "vcolorspace");
  videoScale = gst_element_factory_make ("videoscale", "videoscale0");
  capsFilter = gst_element_factory_make ("capsfilter", "capsfilter0");
  _videoSink =       gst_element_factory_make ("appsink", "vsink");

  // Prepare handler data.
//  _padHandlerData.audioToConnect   = _audioQueue;
  _padHandlerData.videoToConnect   = _videoQueue;
  _padHandlerData.videoSink        = _videoSink;
  //_padHandlerData.audioIsConnected = false;
  _padHandlerData.videoIsConnected = false;

//  _newAudioBufferHandlerData.audioSink          = _audioSink;
//  _newAudioBufferHandlerData.audioBufferAdapter = _audioBufferAdapter;

  // Create the empty pipeline.
  _pipeline = gst_pipeline_new ( "video-source-pipeline" );

  if (!_pipeline || !_source ||
//      !_audioQueue || !_audioConvert || !_audioResample || !_audioSink ||
      !_videoQueue || !_videoColorSpace || ! videoScale || ! capsFilter || ! _videoSink)
  {
    g_printerr ("Not all elements could be created.\n");
    unloadMovie();
    return -1;
  }

  // Build the pipeline. Note that we are NOT linking the source at this
  // point. We will do it later.
  gst_bin_add_many (GST_BIN (_pipeline), _source,
//                    _audioQueue, _audioConvert, _audioResample, _audioSink,
                    _videoQueue, _videoColorSpace, videoScale, capsFilter, _videoSink, NULL);

//  if (!gst_element_link_many(_audioQueue, _audioConvert, _audioResample, _audioSink, NULL)) {
//    g_printerr ("Audio elements could not be linked.\n");
//    unloadMovie();
//    return false;
//  }

  if (!gst_element_link_many (_videoQueue, _videoColorSpace, capsFilter, videoScale, _videoSink, NULL)) {
    g_printerr ("Video elements could not be linked.\n");
    unloadMovie();
    return false;
  }

  // Process URI.
  gchar* uri = (gchar*) filename.toUtf8().constData();
  if (!gst_uri_is_valid(uri))
  {
    // Try to convert filename to URI.
    GError* error = NULL;
    uri = gst_filename_to_uri(uri, &error);
    if (error) {
      qDebug() << "Filename to URI error: " << error->message;
      g_error_free(error);
      gst_object_unref (uri);
      freeResources();
      return false;
    }
  }

  // Set URI to be played.
  qDebug() << "URI for uridecodebin: " << uri;
  // FIXME: sometimes it's just the path to the directory that is given, not the file itself.
  g_object_set (_source, "uri", uri, NULL);
  // Connect to the pad-added signal
  g_signal_connect (_source, "pad-added", G_CALLBACK (MediaImpl::gstPadAddedCallback), &_padHandlerData);

  // Configure audio appsink.
  // TODO: change from mono to stereo
//  gchar* audioCapsText = g_strdup_printf ("audio/x-raw-float,channels=1,rate=%d,signed=(boolean)true,width=%d,depth=%d,endianness=BYTE_ORDER",
//                                          Engine::signalInfo().sampleRate(), (int)(sizeof(Signal_T)*8), (int)(sizeof(Signal_T)*8) );
//  GstCaps* audioCaps = gst_caps_from_string (audioCapsText);
//  g_object_set (_audioSink, "emit-signals", TRUE,
//                            "caps", audioCaps,
////                            "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
////                            "drop", TRUE,         // ... other buffers are dropped
//                            NULL);
//  g_signal_connect (_audioSink, "new-buffer", G_CALLBACK (VideoImpl::gstNewAudioBufferCallback), &_newAudioBufferHandlerData);
//  gst_caps_unref (audioCaps);
//  g_free (audioCapsText);

  // Configure video appsink.
//  GstCaps *videoCaps = gst_caps_from_string ("video/x-raw-rgb");
  GstCaps *videoCaps = gst_caps_from_string ("video/x-raw,format=RGBA");
  g_object_set (capsFilter, "caps", videoCaps, NULL);
  g_object_set (_videoSink, "emit-signals", TRUE,
                            "max-buffers", 1,     // only one buffer (the last) is maintained in the queue
                            "drop", TRUE,         // ... other buffers are dropped
                            NULL);
  g_signal_connect (_videoSink, "new-sample", G_CALLBACK (MediaImpl::gstNewSampleCallback), this);
  gst_caps_unref (videoCaps);

  // Listen to the bus.
  _bus = gst_element_get_bus (_pipeline);

  // Start playing.
  if (!setPlayState(true))
    return false;

  qDebug() << "Pipeline started.";

  //_movieReady = true;
  return true;
}