Esempio n. 1
0
void YVPlayer::playCurrent() {
    const char *mmrname = NULL;
    const char *ctxtname = "onebible";
    const char *audioout = NULL;
    const char *inputtype = "track";
    char inputurl[PATH_MAX];
    int rc;
    mmr_connection_t *connection = NULL;

    rc = snprintf(inputurl, PATH_MAX, "file://%s/%s", getenv("HOME"), m_t.toLatin1().data());

    mode_t mode = S_IRUSR | S_IXUSR;
    int audio_oid; // output ID
    strm_dict_t *aoparams = NULL; // output parameters

    audioout = DEFAULT_AUDIO_OUT;

    // TODO: Better error reporting
    if ( ( connection = mmr_connect( mmrname ) ) == NULL ) {
        snprintf(msg, MSG_SIZE, "mmr_connect: %s", strerror(errno));
    } else if ( ( m_ctxt = mmr_context_create( connection, ctxtname, 0, mode ) ) == NULL ) {
        snprintf(msg, MSG_SIZE, "%s: %s", ctxtname, strerror(errno));
    } else if ( audioout && ( audio_oid = mmr_output_attach( m_ctxt, audioout, "audio" ) ) < 0 ) {
    } else if ( aoparams && mmr_output_parameters( m_ctxt, audio_oid, aoparams ) ) {
    } else if ( mmr_input_attach( m_ctxt, inputurl, inputtype ) < 0 ) {
    } else if ( mmr_play( m_ctxt ) < 0 ) {
    } else {
    }

    mmrenderer_request_events(ctxtname, 0, 0); // bps
    return;
}
void BbVideoWindowControl::attachDisplay(mmr_context_t *context)
{
    if (m_videoId != -1) {
        qDebug() << "BbVideoWindowControl: Video output already attached!";
        return;
    }

    if (!context) {
        qDebug() << "BbVideoWindowControl: No media player context!";
        return;
    }

    if (!m_widget) {
        qDebug() << "BbVideoWindowControl: No video widget!";
        return;
    }

    QPlatformNativeInterface * const nativeInterface = QApplication::platformNativeInterface();
    if (!nativeInterface) {
        qDebug() << "BbVideoWindowControl: Unable to get platform native interface. Qt too old?";
        return;
    }

    const char * const groupNameData = static_cast<const char *>(
        nativeInterface->nativeResourceForWidget("windowGroup", m_widget));
    if (!groupNameData) {
        qDebug() << "BbVideoWindowControl: Unable to find window group for widget" << m_widget;
        return;
    }

    const QString groupName = QString::fromAscii(groupNameData);
    m_windowName = QString("BbVideoWindowControl_%1_%2").arg(winIdCounter++)
                                                        .arg(QCoreApplication::applicationPid());
    // Start with an invisible window. If it would be visible right away, it would be at the wrong
    // position, and we can only change the position once we get the window handle.
    const QString videoDeviceUrl =
            QString("screen:?winid=%1&wingrp=%2&initflags=invisible&nodstviewport=1").arg(m_windowName).arg(groupName);

    m_videoId = mmr_output_attach(context, videoDeviceUrl.toAscii(), "video");
    if (m_videoId == -1) {
        qDebug() << mmErrorMessage("mmr_output_attach() for video failed", context);
        return;
    }

    m_context = context;
    updateVideoPosition();
    updateHue();
    updateContrast();
    updateBrightness();
    updateSaturation();
}
void MmRendererMediaPlayerControl::attach()
{
    // Should only be called in detached state
    Q_ASSERT(m_audioId == -1 && !m_inputAttached);

    if (m_media.isNull() || !m_context) {
        setMediaStatus(QMediaPlayer::NoMedia);
        return;
    }

    if (m_videoRendererControl)
        m_videoRendererControl->attachDisplay(m_context);

    if (m_videoWindowControl)
        m_videoWindowControl->attachDisplay(m_context);

    const QByteArray defaultAudioDevice = qgetenv("QQNX_RENDERER_DEFAULT_AUDIO_SINK");
    m_audioId = mmr_output_attach(m_context, defaultAudioDevice.isEmpty() ? "audio:default" : defaultAudioDevice.constData(), "audio");
    if (m_audioId == -1) {
        emitMmError("mmr_output_attach() for audio failed");
        return;
    }

    const QByteArray resourcePath = resourcePathForUrl(m_media.canonicalUrl());
    if (resourcePath.isEmpty()) {
        detach();
        return;
    }

    if (mmr_input_attach(m_context, resourcePath.constData(), "track") != 0) {
        emitMmError(QStringLiteral("mmr_input_attach() failed for ") + QString(resourcePath));
        setMediaStatus(QMediaPlayer::InvalidMedia);
        detach();
        return;
    }

    // For whatever reason, the mmrenderer sends out a MMR_STOPPED event when calling
    // mmr_input_attach() above. Ignore it, as otherwise we'll trigger stopping right after we
    // started.
    m_stopEventsToIgnore++;

    m_inputAttached = true;
    setMediaStatus(QMediaPlayer::LoadedMedia);

    // mm-renderer has buffer properties "status" and "level"
    // QMediaPlayer's buffer status maps to mm-renderer's buffer level
    m_bufferLevel = 0;
    emit bufferStatusChanged(m_bufferLevel);
}
	//
	// background audio (using mmrenderer)
	//
    void SimpleAudioEngine::preloadBackgroundMusic(const char* pszFilePath)
	{
    	if (!s_isBackgroundInitialized)
    	{
    		const char 		*mmrname = NULL;
    		const char 		*ctxtname = "mmrplayer";
    		char 			 cwd[PATH_MAX];
    		mode_t 			 mode = S_IRUSR | S_IXUSR;

    		getcwd(cwd, PATH_MAX);
    		string path = "file://";
    		path += cwd;
    		path += "/";
    		path += pszFilePath;

    		s_mmrConnection = mmr_connect(mmrname);
    		if (!s_mmrConnection)
    		{
    			perror("mmr_connect");
    			s_hasMMRError = true;
    			return;
    		}

    		s_mmrContext = mmr_context_create(s_mmrConnection, ctxtname, 0, mode);
    		if (!s_mmrContext)
    		{
    			perror(ctxtname);
    			s_hasMMRError = true;
    			return;
    		}

    		if ((s_audioOid = mmr_output_attach(s_mmrContext, "audio:default", "audio")) < 0)
    		{
    			mmrerror(s_mmrContext, "audio:default");
    			return;
    		}

    		if (mmr_input_attach(s_mmrContext, path.data(), "autolist") < 0)
    		{
    			fprintf(stderr, "unable to load %s\n", path.data());
    			mmrerror(s_mmrContext, path.data());
    			return;
    		}

    		s_currentBackgroundStr 	  = pszFilePath;
			s_isBackgroundInitialized = true;
			setBackgroundVolume(s_volume);
    	}
	}
Esempio n. 5
0
int ZaMp3::playloop(const char * url)
{
	const char *mmrname = NULL;
	const char *ctxtname = "testplayer";
	const char *audioout = NULL;
	const char *inputtype = "autolist";
	//int     final_return_code = EXIT_FAILURE;
	mode_t mode = S_IRUSR | S_IXUSR;
	int audio_oid; // output ID
	strm_dict_t *aoparams = NULL; // output parameters

	mmr_connection_t *connection;
	mmr_context_t *ctxt;

	audioout = DEFAULT_AUDIO_OUT;

	strm_dict_t *aiparams = strm_dict_new();
	aiparams = strm_dict_set(aiparams, "repeat", "all");


	getcwd(cwd, PATH_MAX);
	rc = snprintf(inputurl, PATH_MAX, "file://%s%s", cwd, url);

	if ( ( connection = mmr_connect( mmrname ) ) == NULL ) {
		perror( "mmr_connect" );
	} else if ( ( ctxt = mmr_context_create( connection, ctxtname, 0, mode ) ) == NULL ) {
		perror( ctxtname );
	} else if ( audioout && ( audio_oid = mmr_output_attach( ctxt, audioout, "audio" ) ) < 0 ) {
		mmrerror( ctxt, audioout );
	} else if ( aoparams && mmr_output_parameters( ctxt, audio_oid, aoparams ) ) {
		mmrerror( ctxt, "output parameters (audio)" );
	} else if ( mmr_input_attach( ctxt, inputurl, inputtype ) < 0 ) {
		mmrerror( ctxt, inputurl );
    } else if ( aiparams && mmr_input_parameters( ctxt, aiparams ) ) {  // NEW
    	mmrerror( ctxt, "input parameters (audio)" );  // NEW
    } else if ( mmr_play( ctxt ) < 0 ) {
		mmrerror( ctxt, "mmr_play" );
	}
	return 0;
}
Esempio n. 6
0
int ZaMp3::play(const char * url)
{
	const char *mmrname = NULL;
	const char *audioout = NULL;
	const char *inputtype = "track";
	//int  final_return_code = EXIT_FAILURE;
	mode_t mode = S_IRUSR | S_IXUSR;
	int audio_oid; // output ID
	strm_dict_t *aoparams = NULL; // output parameters

	if (!ctxt)
	{
		audioout = DEFAULT_AUDIO_OUT;
		getcwd(cwd, PATH_MAX);
		rc = snprintf(inputurl, PATH_MAX, "file://%s%s", cwd, url);
		if (( connection = mmr_connect( mmrname ) ) == NULL )
			perror( "mmr_connect" );
		else if ( ( ctxt = mmr_context_create( connection, ctxtname, 0, mode ) ) == NULL ) {
			perror( "mmr_create" );
			perror( ctxtname );
		} else if ( audioout && ( audio_oid = mmr_output_attach( ctxt, audioout, "audio" ) ) < 0 ) {
			mmrerror( ctxt, audioout );
		} else if ( aoparams && mmr_output_parameters( ctxt, audio_oid, aoparams ) ) {
			mmrerror( ctxt, "output parameters (audio)" );
		} else if ( mmr_input_attach( ctxt, inputurl, inputtype ) < 0 ) {
			mmrerror( ctxt, inputurl );
		} else if ( mmr_play( ctxt ) < 0 ) {
			mmrerror( ctxt, "mmr_play" );
		}
	}
	else
	{
		mmr_stop(ctxt);
		//mmr_input_attach( ctxt, inputurl, inputtype );
		mmr_play( ctxt );
	}

	return 0;
}
Esempio n. 7
0
void BbMediaPlayerControl::attach()
{
    if (m_media.isNull() || !m_context) {
        setMediaStatus(QMediaPlayer::NoMedia);
        return;
    }

    if (m_videoControl)
        m_videoControl->attachDisplay(m_context);

    m_audioId = mmr_output_attach(m_context, "audio:default", "audio");
    if (m_audioId == -1) {
        emitMmError("mmr_output_attach() for audio failed");
        return;
    }

    const QString resourcePath = resourcePathForUrl(m_media.canonicalUrl());
    if (resourcePath.isEmpty()) {
        detach();
        return;
    }

    if (mmr_input_attach(m_context, QFile::encodeName(resourcePath), "track") != 0) {
        emitMmError(QString("mmr_input_attach() for %1 failed").arg(resourcePath));
        setMediaStatus(QMediaPlayer::InvalidMedia);
        detach();
        return;
    }

    // For whatever reason, the mmrenderer sends out a MMR_STOPPED event when calling
    // mmr_input_attach() above. Ignore it, as otherwise we'll trigger stopping right after we
    // started.
    m_stopEventsToIgnore++;

    m_inputAttached = true;
    setMediaStatus(QMediaPlayer::LoadedMedia);
    m_bufferStatus = 0;
    emit bufferStatusChanged(m_bufferStatus);
}
void MmRendererPlayerVideoRendererControl::attachDisplay(mmr_context_t *context)
{
    if (m_videoId != -1) {
        qWarning() << "MmRendererPlayerVideoRendererControl: Video output already attached!";
        return;
    }

    if (!context) {
        qWarning() << "MmRendererPlayerVideoRendererControl: No media player context!";
        return;
    }

    const QByteArray windowGroupId = m_windowGrabber->windowGroupId();
    if (windowGroupId.isEmpty()) {
        qWarning() << "MmRendererPlayerVideoRendererControl: Unable to find window group";
        return;
    }

    const QString windowName = QStringLiteral("MmRendererPlayerVideoRendererControl_%1_%2")
                                             .arg(winIdCounter++)
                                             .arg(QCoreApplication::applicationPid());

    m_windowGrabber->setWindowId(windowName.toLatin1());

    // Start with an invisible window, because we just want to grab the frames from it.
    const QString videoDeviceUrl = QStringLiteral("screen:?winid=%1&wingrp=%2&initflags=invisible&nodstviewport=1")
                                                 .arg(windowName)
                                                 .arg(QString::fromLatin1(windowGroupId));

    m_videoId = mmr_output_attach(context, videoDeviceUrl.toLatin1(), "video");
    if (m_videoId == -1) {
        qWarning() << "mmr_output_attach() for video failed";
        return;
    }

    m_context = context;
}
Esempio n. 9
0
int main(int argc, char *argv[])
{
    int rc;
    int exit_application = 0;

    // Screen variables
    screen_context_t    screen_context = 0;
    screen_window_t     screen_window = 0;

    int screen_size[2] = {0,0};

    // Renderer variables
    mmr_connection_t*     mmr_connection = 0;
    mmr_context_t*        mmr_context = 0;
    strm_dict_t*          dict = NULL;

    // I/O variables
    int                    video_device_output_id = -1;
    int                    audio_device_output_id = -1;

    bps_initialize();

    /*
     * Create the window used for video output.
     */
    if (screen_create_context(&screen_context, SCREEN_APPLICATION_CONTEXT) != 0) {
        return EXIT_FAILURE;
    }

    if (screen_create_window(&screen_window, screen_context) != 0) {
        screen_destroy_context(screen_context);
        return EXIT_FAILURE;
    }

    if (screen_create_window_group(screen_window, window_group_name) != 0) {
        return EXIT_FAILURE;
    }

    int format = SCREEN_FORMAT_RGBA8888;
    if (screen_set_window_property_iv(screen_window, SCREEN_PROPERTY_FORMAT, &format) != 0) {
        return EXIT_FAILURE;
    }

    int usage = SCREEN_USAGE_NATIVE;
    if (screen_set_window_property_iv(screen_window, SCREEN_PROPERTY_USAGE, &usage) != 0) {
        return EXIT_FAILURE;
    }


    if (screen_create_window_buffers(screen_window, 1) != 0) {
        return EXIT_FAILURE;
    }

    /*
     * Configure mm-renderer.
     */
    mmr_connection = mmr_connect(NULL);
    if (mmr_connection == NULL) {
        return EXIT_FAILURE;
    }

    mmr_context = mmr_context_create(mmr_connection, video_context_name, 0, S_IRWXU|S_IRWXG|S_IRWXO);
    if (mmr_context == NULL) {
        return EXIT_FAILURE;
    }

    /*
     * Configure video and audio output.
     */
    video_device_output_id = mmr_output_attach(mmr_context, video_device_url, "video");
    if (video_device_output_id == -1) {
        return EXIT_FAILURE;
    }

    audio_device_output_id = mmr_output_attach(mmr_context, audio_device_url, "audio");
    if (audio_device_output_id == -1) {
        return EXIT_FAILURE;
    }

    // Get the render buffer
    screen_buffer_t temp_buffer[1];
    if (screen_get_window_property_pv( screen_window, SCREEN_PROPERTY_RENDER_BUFFERS, (void**)temp_buffer) != 0) {
        return EXIT_FAILURE;
    }

    // Fill the buffer with a solid color (black)
    int fill_attributes[3] = {SCREEN_BLIT_COLOR, 0x0, SCREEN_BLIT_END};
    if (screen_fill(screen_context, temp_buffer[0], fill_attributes) != 0) {
        return EXIT_FAILURE;
    }

    // Make the window visible
    if (screen_get_window_property_iv(screen_window, SCREEN_PROPERTY_SIZE, screen_size) != 0) {
        return EXIT_FAILURE;
    }

    int temp_rectangle[4] = {0, 0, screen_size[0], screen_size[1]};
    if (screen_post_window(screen_window, temp_buffer[0], 1, temp_rectangle, 0) != 0) {
        return EXIT_FAILURE;
    }

    // Prevent the backlight from going off
    int idle_mode = SCREEN_IDLE_MODE_KEEP_AWAKE;
    if (screen_set_window_property_iv(screen_window, SCREEN_PROPERTY_IDLE_MODE, &idle_mode) != 0) {
        return EXIT_FAILURE;
    }

    // Build up the path where our bundled resource is.
    char cwd[PATH_MAX];
    char media_file[PATH_MAX];
    getcwd(cwd,PATH_MAX);

    rc = snprintf(media_file, PATH_MAX, "file://%s/app/native/pb_sample.mp4", cwd);
    if ((rc == -1) || (rc >= PATH_MAX)) {
        return EXIT_FAILURE;
    }

    /*
     * Start the playback.
     */
    if (mmr_input_attach(mmr_context, media_file, "track") != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_play(mmr_context) != 0) {
        return EXIT_FAILURE;
    }

    /* Do some work to make the aspect ratio correct.
     */
    dict = calculate_rect(screen_size[0], screen_size[1]);
    if (NULL == dict) {
        return EXIT_FAILURE;
    }

    if (mmr_output_parameters(mmr_context, video_device_output_id, dict) != 0) {
        return EXIT_FAILURE;
    }

     /* Note that we allocated memory for the dictionary, but the call to 
      * mmr_output_parameters() deallocates that memory even on failure.
      */  
    dict = NULL;

    screen_request_events(screen_context);
    navigator_request_events(0);

    /*
     * Handle keyboard events and stop playback upon user request.
     */
    for (;;) {
        bps_event_t *event = NULL;
        if (bps_get_event(&event, -1) != BPS_SUCCESS) {
            return EXIT_FAILURE;
        }

        if (event) {

            if (bps_event_get_domain(event) == navigator_get_domain() &&
                bps_event_get_code(event) == NAVIGATOR_EXIT) {

                exit_application = 1;
            }

            if (exit_application) {
                break;
            }
        }
    }

    screen_stop_events(screen_context);

    if (mmr_stop(mmr_context) != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, audio_device_output_id) != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, video_device_output_id) != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_context_destroy(mmr_context) != 0) {
        return EXIT_FAILURE;
    }

    mmr_context = 0;
    video_device_output_id = -1;
    audio_device_output_id = -1;

    mmr_disconnect(mmr_connection);
    mmr_connection = 0;

    bps_shutdown();

    if (screen_destroy_window(screen_window) != 0) {
        return EXIT_FAILURE;
    }

    if (screen_destroy_context(screen_context) != 0) {
        return EXIT_FAILURE;
    }

    screen_context = 0;
    screen_window = 0;

    return EXIT_SUCCESS;
}
Esempio n. 10
0
int main( int argc, char **argv ) {
    const char *mmrname = NULL;
    const char *ctxtname = "testplayer";
    const char *audioout = NULL;
    const char *inputtype = "track";
    char cwd[PATH_MAX];
    char inputurl[PATH_MAX];
    int rc;
    int final_return_code = EXIT_FAILURE;
    int exit_application = 0;
    mmr_connection_t *connection = NULL;
    mmr_context_t *ctxt = NULL;

    /*
     * Before we can listen for events from the BlackBerry(R) 10 OS platform
     * services, we need to initialize the BPS infrastructure
     */
    bps_initialize();

    if (setup_screen() != EXIT_SUCCESS) {
        printf("Unable to set up the screen. Exiting.");
        return 0;
    }

    /*
     * Once the BPS infrastructure has been initialized we can register for
     * events from the various BlackBerry(R) 10 OS platform services. The
     * Navigator service manages and delivers application life cycle and
     * visibility events.
     * For this sample, we request Navigator events so we can track when
     * the system is terminating the application (NAVIGATOR_EXIT event). 
     * This allows us to clean up application resources.
     */
    navigator_request_events(0);
    dialog_request_events(0);

    /*
     * Create and display the dialog.
     */
    create_dialog();

    getcwd(cwd, PATH_MAX);
    rc = snprintf(inputurl, PATH_MAX, "file://%s%s", cwd, WAV_RELATIVE_PATH);
    if (rc > PATH_MAX - 1)
    {
          show_dialog_message("File name and path too long");
          goto fail;
    }

    mode_t mode = S_IRUSR | S_IXUSR;
    int audio_oid; // output ID
    strm_dict_t *aoparams = NULL; // output parameters

    audioout = DEFAULT_AUDIO_OUT;

    if ( ( connection = mmr_connect( mmrname ) ) == NULL ) {
        snprintf(msg, MSG_SIZE, "mmr_connect: %s", strerror(errno));
        show_dialog_message(msg);
    } else if ( ( ctxt = mmr_context_create( connection, ctxtname, 0, mode ) ) == NULL ) {
        snprintf(msg, MSG_SIZE, "%s: %s", ctxtname, strerror(errno));
        show_dialog_message(msg);
    } else if ( audioout && ( audio_oid = mmr_output_attach( ctxt, audioout, "audio" ) ) < 0 ) {
        mmrerror( ctxt, audioout );
    } else if ( aoparams && mmr_output_parameters( ctxt, audio_oid, aoparams ) ) {
        mmrerror( ctxt, "output parameters (audio)" );
    } else if ( mmr_input_attach( ctxt, inputurl, inputtype ) < 0 ) {
        mmrerror( ctxt, inputurl );
    } else if ( mmr_play( ctxt ) < 0 ) {
        mmrerror( ctxt, "mmr_play" );
    } else if (BPS_SUCCESS != bps_add_sigevent_handler( &mmr_sigevent, mmr_sigevent_handler, ctxt ) ) { 
        snprintf( msg, MSG_SIZE, "bps_add_sigevent_handler() failure %s", strerror( errno ) );
        show_dialog_message( msg );
    } else if ( drain_and_arm_mmr_events ( ctxt ) ) {
        snprintf( msg, MSG_SIZE, "drain_and_arm_mmr_events() failure %s", strerror( errno ) );
        show_dialog_message( msg );
    } else {
        show_dialog_message( "Playing Audio\n" );
        final_return_code = EXIT_SUCCESS;
    }

fail:
    /*
     * Process Navigator events until we receive a NAVIGATOR_EXIT event.
     */
    while (!exit_application) {
        /*
         * Using a negative timeout (-1) in the call to bps_get_event(...)
         * ensures that we don't busy wait by blocking until an event is
         * available.
         */
        bps_event_t *event = NULL;
        bps_get_event(&event, -1);

        if (event) {
            /*
             * If it is a NAVIGATOR_EXIT event then set the exit_application
             * flag so the application will stop processing events, clean up
             * and exit
             */
            if (bps_event_get_domain(event) == navigator_get_domain()) {
                if (NAVIGATOR_EXIT == bps_event_get_code(event)) {
                    if (final_return_code == EXIT_SUCCESS) {
                        mmr_stop( ctxt );             // Not really necessary -- mmr_input_detach() would take care of this
                        mmr_input_detach( ctxt );     // Not really necessary -- mmr_context_destroy()  would take care of this
                        mmr_context_destroy( ctxt );  // Not really necessary -- mmr_disconnect() would take care of this
                        mmr_disconnect( connection ); // Not really necessary -- exiting would take care of this
                    }
                    exit_application = 1;
                }
            } else if (bps_event_get_domain(event) == dialog_get_domain()) {
                if (DIALOG_RESPONSE == bps_event_get_code(event)) {
                    handle_dialog_response_events(event);
                }
            }
        }
    }

    /*
     * Destroy the dialog, if it exists and cleanup screen resources.
     */
    destroy_dialog();
    cleanup_screen();
    /*
     * Clean up the BPS infrastructure and exit
     */
    bps_shutdown();

    return final_return_code;

}
Esempio n. 11
0
int main(int argc, char *argv[])
{
    int rc;

    // Renderer variables
    mmr_connection_t*     mmr_connection = 0;
    mmr_context_t*        mmr_context = 0;
    strm_dict_t*          dict = NULL;

    // I/O variables
    int                    video_device_output_id = -1;
    int                    audio_device_output_id = -1;

    // Position of the play and stop button.
    static int ctrl_x = 0;
    static int ctrl_y = 0;

    EGLint surface_width;
    EGLint surface_height;

    srand(time(0));
    app_id = rand();

    // I/O devices
    static char *audio_device_url    = "audio:default";
    static char video_device_url[PATH_MAX];
    rc = snprintf(video_device_url, PATH_MAX, "screen:?winid=videosamplewindowgroup_%d&wingrp=videosamplewindowgroup_%d", app_id, app_id);
    if (rc >= PATH_MAX) {
        fprintf(stderr, "URL too long\n");
    }

    // Name of video context...with a random number appended.
    static char video_context_name[PATH_MAX];
    rc = snprintf(video_context_name, PATH_MAX, "samplevideocontextname_%d", app_id);
    if (rc >= PATH_MAX) {
        fprintf(stderr, "Video context name too long\n");
    }

    // Window group name...with the same random number appended.
    static char window_group_name[PATH_MAX];
    rc = snprintf(window_group_name, PATH_MAX, "videosamplewindowgroup_%d", app_id);
    if (rc >= PATH_MAX) {
        fprintf(stderr, "Video context name too long\n");
    }

    // Video file bundled with our app
    static const char *video_file_relative_path = "app/native/pb_sample.mp4";


    bps_initialize();

    // Create the Screen Context.
    if (screen_create_context(&g_screen_ctx, SCREEN_APPLICATION_CONTEXT) != 0) {
        fprintf(stderr, "screen_create_context failed\n");
        return EXIT_FAILURE;
    }

    // Create the window and initialize EGL for GL_ES_1 rendering
    rc = initialize_egl_window(g_screen_ctx, window_group_name);
    if (rc != EXIT_SUCCESS) {
        fprintf(stderr, "initialize_egl_window failed\n");
        return EXIT_FAILURE;
    }

    // Query width and height of the window surface created by utility code
    eglQuerySurface(g_egl_disp, g_egl_surf, EGL_WIDTH, &surface_width);
    eglQuerySurface(g_egl_disp, g_egl_surf, EGL_HEIGHT, &surface_height);
    EGLint err = eglGetError();
    if (err != EGL_SUCCESS) {
        fprintf(stderr, "Unable to query EGL surface dimensions\n");
        return EXIT_FAILURE;
    }

    // Initialize GL for 2D rendering
    glViewport(0, 0, (int)surface_width, (int) surface_height);

    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();

    glOrthof(0.0f, 1.0f, 0.0f, 1.0f, -1.0f, 1.0f);

    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();

    // Set world coordinates to coincide with screen pixels
    glScalef(1.0f / (float)surface_width, 1.0f / (float)surface_height, 1.0f);

    // We can calculate location and verticies of the controls
    ctrl_x = (float)surface_width  / 2 - ctrl_w / 2;
    ctrl_y = (float)surface_height / 2 - ctrl_h / 2;

    g_triangle_vertices[0] = ctrl_x;
    g_triangle_vertices[1] = ctrl_y;

    g_triangle_vertices[2] = ctrl_x;
    g_triangle_vertices[3] = ctrl_y + ctrl_h;

    g_triangle_vertices[4] = ctrl_x + ctrl_w;
    g_triangle_vertices[5] = ctrl_y + ctrl_h / 2;

    g_square_vertices[0] = ctrl_x;
    g_square_vertices[1] = ctrl_y;
    g_square_vertices[2] = ctrl_x;
    g_square_vertices[3] = ctrl_y + ctrl_h;
    g_square_vertices[4] = ctrl_x + ctrl_w;
    g_square_vertices[5] = ctrl_y + ctrl_h;
    g_square_vertices[6] = ctrl_x + ctrl_w;
    g_square_vertices[7] = ctrl_y;
    g_square_vertices[8] = ctrl_x;
    g_square_vertices[9] = ctrl_y;

    // Configure mm-renderer.
    mmr_connection = mmr_connect(NULL);
    if (mmr_connection == NULL) {
        fprintf(stderr, "mmr_connect failed\n");
        return EXIT_FAILURE;
    }

    mmr_context = mmr_context_create(mmr_connection, video_context_name, 0, S_IRWXU|S_IRWXG|S_IRWXO);
    if (mmr_context == NULL) {
        fprintf(stderr, "mmr_context_create failed\n");
        return EXIT_FAILURE;
    }

    // Configure video and audio output.
    video_device_output_id = mmr_output_attach(mmr_context, video_device_url, "video");
    if (video_device_output_id == -1) {
        fprintf(stderr, "mmr_output_attach(video) failed\n");
        return EXIT_FAILURE;
    }

    audio_device_output_id = mmr_output_attach(mmr_context, audio_device_url, "audio");
    if (audio_device_output_id == -1) {
        fprintf(stderr, "mmr_output_attach(audio) failed\n");
        return EXIT_FAILURE;
    }

    // render 'paused'
    render(true);


    // Build up the path where our bundled resource is.
    char cwd[PATH_MAX];
    char media_file[PATH_MAX];
    getcwd(cwd,PATH_MAX);

    rc = snprintf(media_file, PATH_MAX, "file://%s/%s", cwd, video_file_relative_path);
    if ((rc == -1) || (rc >= PATH_MAX)) {
        fprintf(stderr, "snprintf(media_file) failed\n");
        return EXIT_FAILURE;
    }

    // Attach the input media.
    if (mmr_input_attach(mmr_context, media_file, "track") != 0) {
        fprintf(stderr, "mmr_input_attach(track) failed\n");
        return EXIT_FAILURE;
    }


    int video_speed = 0;

    // Set the speed to 0 to pause the video initially
    if (mmr_speed_set(mmr_context, video_speed) != 0) {
        fprintf(stderr, "mmr_set_speed(0) failed\n");
        return EXIT_FAILURE;
    }

    // Change to the play state, although speed is zero
    if (mmr_play(mmr_context) != 0) {
        fprintf(stderr, "mmr_play failed\n");
        return EXIT_FAILURE;
    }

    /* Do some work to make the aspect ratio correct.
     */
    dict = calculate_rect(surface_width, surface_height);
    if (NULL == dict) {
        fprintf(stderr, "calculate_rect failed\n");
        return EXIT_FAILURE;
    }

    if (mmr_output_parameters(mmr_context, video_device_output_id, dict) != 0) {
        fprintf(stderr, "mmr_output_parameters failed\n");
        return EXIT_FAILURE;
    }

    /* Note that we allocated memory for the dictionary, but the call to 
     * mmr_output_parameters() deallocates that memory even on failure.
     */
    dict = NULL;

    screen_request_events(g_screen_ctx);
    navigator_request_events(0);

    screen_window_t video_window = (screen_window_t)0;
    bool app_window_above = true;
    int screen_val;
    int exit_value = EXIT_SUCCESS;

    // Handle keyboard events and stop playback upon user request.
    for (;;) {
        bps_event_t *event = NULL;
        if (bps_get_event(&event, 0) != BPS_SUCCESS) {
            return EXIT_FAILURE;
        }
        if (event) {
            if (bps_event_get_domain(event) == navigator_get_domain()) {
                if (bps_event_get_code(event) == NAVIGATOR_EXIT) {
                    break;
                } else if(NAVIGATOR_SWIPE_DOWN == bps_event_get_code(event)) {
                    if ((screen_window_t)0 != video_window) {

                        app_window_above = !app_window_above;
                        if (app_window_above) {
                            screen_val = 1;
                        } else {
                            screen_val = -1;
                        }
                        if (screen_set_window_property_iv(video_window, SCREEN_PROPERTY_ZORDER, &screen_val) != 0) {
                            fprintf(stderr, "screen_set_window_property(ZORDER) failed\n");
                            exit_value = EXIT_FAILURE;
                            break;
                        }

                        screen_val = 1;
                        if (screen_set_window_property_iv(video_window, SCREEN_PROPERTY_VISIBLE, &screen_val) != 0) {
                            fprintf(stderr, "screen_set_window_property(VISIBLE) failed\n");
                            exit_value = EXIT_FAILURE;
                            break;
                        }

                        rc = screen_flush_context(g_screen_ctx, SCREEN_WAIT_IDLE);
                        if (rc != 0) {
                            fprintf (stderr, "Warning: Failed to flush\n");
                        }
                    }
                }
            } else if (bps_event_get_domain(event) == screen_get_domain()) {
                screen_event_t screen_event = screen_event_get_event(event);
                int event_type;
                screen_get_event_property_iv(screen_event, SCREEN_PROPERTY_TYPE, &event_type);

                if (event_type == SCREEN_EVENT_CREATE && (video_window == (screen_window_t)0)) {
                    char id[256];

                    rc = screen_get_event_property_pv(screen_event, SCREEN_PROPERTY_WINDOW, (void**)&video_window);
                    if (rc != 0) {
                        fprintf(stderr, "screen_get_event_property(WINDOW) failed\n");
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                    fprintf(stderr, "video_window%d\n",(int)video_window);

                    rc = screen_get_window_property_cv(video_window, SCREEN_PROPERTY_ID_STRING, 256, id);
                    if (rc != 0) {
                        fprintf(stderr, "screen_get_window_property(ID) failed\n");
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                    fprintf(stderr, "window ID is %s\n", id);

                    if (strncmp(id, window_group_name, strlen(window_group_name)) != 0) {
                        fprintf(stderr, "window ID mismatch\n");
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                } else if(event_type == SCREEN_EVENT_MTOUCH_TOUCH) {
                    if (video_speed == 0) {
                        video_speed = 1000;
                        render(false);
                    } else {
                        video_speed = 0;
                        render(true);
                    }

                    if (mmr_speed_set(mmr_context, video_speed) != 0) {
                        fprintf(stderr, "mmr_speed_set(%d) failed\n", video_speed);
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                }
            }
        }
    }

    screen_stop_events(g_screen_ctx);

    if (mmr_stop(mmr_context) != 0) {
        fprintf(stderr, "mmr_stop failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, audio_device_output_id) != 0) {
        fprintf(stderr, "mmr_output_detach(audio) failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, video_device_output_id) != 0) {
        fprintf(stderr, "mmr_output_detach(video) failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (mmr_context_destroy(mmr_context) != 0) {
        fprintf(stderr, "mmr_context_destroy failed\n");
        exit_value = EXIT_FAILURE;
    }

    mmr_context = 0;
    video_device_output_id = -1;
    audio_device_output_id = -1;

    mmr_disconnect(mmr_connection);
    mmr_connection = 0;

    bps_shutdown();

    if (screen_destroy_window(g_screen_win) != 0) {
        fprintf(stderr, "screen_destroy_window failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (screen_destroy_context(g_screen_ctx) != 0) {
        fprintf(stderr, "screen_destroy_context failed\n");
        exit_value = EXIT_FAILURE;
    }

    g_screen_ctx = 0;
    g_screen_win = 0;

    return exit_value;
}