Пример #1
0
X11Renderer::X11Renderer(VideoWidget *videoWidget)
        : AbstractRenderer(videoWidget)
{
    m_renderWidget = new OverlayWidget(videoWidget, this);
    videoWidget->backend()->logMessage("Creating X11 overlay renderer");
    QPalette palette;
    palette.setColor(QPalette::Background, Qt::black);
    m_videoWidget->setPalette(palette);
    m_videoWidget->setAutoFillBackground(true);
    m_renderWidget->setMouseTracking(true);
    m_videoSink = createVideoSink();
    aspectRatioChanged(videoWidget->aspectRatio());
    setOverlay();
}
Пример #2
0
void MediaPlayer::threadLoadMedia()
{
    m_duration = -1;
    m_errorsDetected = false;

    // Initialize gstreamer if not initialized yet
    if ( !gst_is_initialized() )
    {
        //qputenv( "GST_DEBUG", "*:4" );
#ifdef WIN32
        QString env = QString("GST_PLUGIN_PATH=%1\\gstreamer\\") .arg( QApplication::applicationDirPath() );
        env.replace( "/", "\\" );
        _putenv( qPrintable(env) );

        Logger::debug( "GstMediaPlayer: setting %s", qPrintable( env ) );
#endif
        gst_init(0, 0);
    }

    // Create the empty pipeline (this must be done first)
    m_gst_pipeline = gst_pipeline_new ("karaokepipeline");

    if ( !m_gst_pipeline )
    {
        reportError( "Pipeline could not be created." );
        return;
    }

    // Create the pipeline bus
    m_gst_bus = gst_element_get_bus( m_gst_pipeline );

    if ( !m_gst_bus )
    {
        reportError( "Pipeline bus could not be created." );
        return;
    }

    // Set the handler for the bus
    gst_bus_set_sync_handler( m_gst_bus, cb_busMessageDispatcher, this, 0 );

    // Create our media source, which could be either QIODevice/appsrc or a file
    // this also creates a decoder
    setupSource();

    // Those are mandatory
    if ( !m_gst_pipeline || !m_gst_source || !m_gst_decoder )
    {
        reportError( "Not all elements could be created." );
        return;
    }

    // Link and set up source and decoder if they are not the same object.
    if ( m_gst_source != m_gst_decoder )
    {
        if ( !gst_element_link( m_gst_source, m_gst_decoder ) )
        {
            reportError( "Cannot link source and decoder." );
            return;
        }
    }

    // If we do not have raw data, connect to the pad-added signal
    g_signal_connect( m_gst_decoder, "pad-added", G_CALLBACK (cb_pad_added), this );

    // Pre-create video elements if we need them
    if ( (m_loadOptions & MediaPlayer::LoadVideoStream) != 0 )
    {
        m_gst_video_colorconv = createElement( "videoconvert", "videoconvert" );
        m_gst_video_sink = createVideoSink();

        if ( !m_gst_video_colorconv || !m_gst_video_sink )
        {
            reportError( "Not all elements could be created." );
            return;
        }

        // Link the color converter and video sink
        if ( !gst_element_link( m_gst_video_colorconv, m_gst_video_sink ) )
        {
            reportError( "Cannor link video elements" );
            return;
        }
    }

    // Pre-create audio elements if we need them
    if ( (m_loadOptions & MediaPlayer::LoadAudioStream) != 0 )
    {
        // Load the pitch plugin if it is available
        m_pitchPlugin = pPluginManager->loadPitchChanger();

        // Create the audio elements, and add them to the bin
        m_gst_audioconverter = createElement ("audioconvert", "convert");
        m_gst_audio_volume = createElement("volume", "volume");
        m_gst_audiosink = createElement ("autoaudiosink", "sink");

        // Those are mandatory
        if ( !m_gst_audioconverter || !m_gst_audiosink || !m_gst_audio_volume )
        {
            reportError( "Not all elements could be created." );
            return;
        }

        // This one is optional, although it seems to be present everywhere
        m_gst_audio_tempo = createElement( "scaletempo", "tempo", false );

        // If we have the pitch changer
        if ( m_pitchPlugin && m_pitchPlugin->init() )
            m_gst_audio_pitchadjust = createElement( m_pitchPlugin->elementName(), "audiopitchchanger", false );
        else
            m_gst_audio_pitchadjust = 0;

        // Start linking
        bool linksucceed = true;
        GstElement * last = m_gst_audioconverter;

        if ( m_gst_audio_pitchadjust )
        {
            m_gst_audioconverter2 = createElement ("audioconvert", "convert2");

            linksucceed = gst_element_link_many( m_gst_audioconverter, m_gst_audio_pitchadjust, m_gst_audioconverter2, NULL );
            last = m_gst_audioconverter2;
        }

        // Now link in volume
        linksucceed = gst_element_link( last, m_gst_audio_volume );
        last = m_gst_audio_volume;

        // Now link in tempo if it is available
        if ( linksucceed && m_gst_audio_tempo )
        {
            linksucceed = gst_element_link( last, m_gst_audio_tempo );
            last = m_gst_audio_tempo;
        }

        // And finally the audio sink
        if ( linksucceed )
            linksucceed = gst_element_link( last, m_gst_audiosink );

        if ( !linksucceed )
        {
            reportError( "Audio elements could not be linked." );
            return;
        }
    }

    setPipelineState( GST_STATE_PAUSED );
}