Beispiel #1
0
bool YVPlayer::nativeEventFilter(void *message) {
    bps_event_t * const event = static_cast<bps_event_t *>(message);

    unsigned int code = bps_event_get_code(event);

    switch (code) {

    case MMRENDERER_STATUS_UPDATE :
        m_pos = QString(mmrenderer_event_get_position(event)).toInt();
        break;
    case MMRENDERER_STATE_CHANGE :
        if (mmrenderer_event_get_state(event) == MMR_STOPPED) {
            if (m_playing && m_ctxt) {
                m_pos = -1; mmr_stop(m_ctxt); mmr_context_destroy(m_ctxt); m_ctxt = 0;
            }
            m_playing = false;
            emit playingChanged();
        }
        break;
    }

    if (s_previousEventFilter)
        return s_previousEventFilter(message);

    return false;
}
Beispiel #2
0
int ZaMp3::stop()
{
	if (ctxt)
	return mmr_stop(ctxt);
	else
	return 0;
}
Beispiel #3
0
void YVPlayer::setPlaying(bool p) {
    m_playing = p;
    emit playingChanged();
    if (!p) {
        if (m_reply) {
            m_reply->deleteLater();
            m_reply = 0;
        } else if (m_ctxt) {
            mmr_stop(m_ctxt);
        }
        return;
    }

    if (!m_t.size()) {
        m_reply = m_nm->get(QNetworkRequest(QUrl::fromPercentEncoding(m_url.toLatin1())));
    } else {
        if (m_ctxt) {
            if (m_pos == -1) m_pos = 0;
            mmr_seek(m_ctxt, QString::number(m_pos ).toLatin1().data());
            mmr_play(m_ctxt);
        } else {
            playCurrent();
        }
    }
}
    static void stopBackground(bool bReleaseData)
    {
    	s_playStatus = STOPPED;

		if (s_mmrContext)
			mmr_stop(s_mmrContext);

		if (bReleaseData)
		{
			if (s_mmrContext)
			{
				mmr_input_detach(s_mmrContext);
				mmr_context_destroy(s_mmrContext);
			}

			if (s_mmrConnection)
				mmr_disconnect(s_mmrConnection);

			if (s_repeatDictionary)
				strm_dict_destroy(s_repeatDictionary);

			if (s_volumeDictionary)
				strm_dict_destroy(s_volumeDictionary);

			s_mmrContext = 0;
			s_mmrConnection = 0;
			s_repeatDictionary = 0;
			s_volumeDictionary = 0;
			s_hasMMRError = false;
			s_currentBackgroundStr = "";
			s_isBackgroundInitialized = false;
		}
    }
void MmRendererMediaPlayerControl::stopInternal(StopCommand stopCommand)
{
    setPosition(0);

    if (m_state != QMediaPlayer::StoppedState) {

        if (stopCommand == StopMmRenderer) {
            ++m_stopEventsToIgnore;
            mmr_stop(m_context);
        }

        setState(QMediaPlayer::StoppedState);
    }
}
Beispiel #6
0
void YVPlayer::setPath(QString url) {
    if (m_ctxt) {
        m_pos = -1;
        mmr_stop(m_ctxt);
        mmr_context_destroy(m_ctxt);
        m_ctxt = 0;
    }
    if (m_reply) {
        m_reply->deleteLater();
        m_reply = 0;
    }
    m_t = "";
    m_url = url;
    emit playabilityChanged();
    setPlaying(false);
}
Beispiel #7
0
void BbMediaPlayerControl::stopInternal(StopCommand stopCommand)
{
    if (m_state != QMediaPlayer::StoppedState) {

        if (stopCommand == StopMmRenderer) {
            ++m_stopEventsToIgnore;
            mmr_stop(m_context);
        }

        setState(QMediaPlayer::StoppedState);
    }

    if (m_position != 0) {
        m_position = 0;
        emit positionChanged(0);
    }
}
Beispiel #8
0
int ZaMp3::play(const char * url)
{
	const char *mmrname = NULL;
	const char *audioout = NULL;
	const char *inputtype = "track";
	//int  final_return_code = EXIT_FAILURE;
	mode_t mode = S_IRUSR | S_IXUSR;
	int audio_oid; // output ID
	strm_dict_t *aoparams = NULL; // output parameters

	if (!ctxt)
	{
		audioout = DEFAULT_AUDIO_OUT;
		getcwd(cwd, PATH_MAX);
		rc = snprintf(inputurl, PATH_MAX, "file://%s%s", cwd, url);
		if (( connection = mmr_connect( mmrname ) ) == NULL )
			perror( "mmr_connect" );
		else if ( ( ctxt = mmr_context_create( connection, ctxtname, 0, mode ) ) == NULL ) {
			perror( "mmr_create" );
			perror( ctxtname );
		} else if ( audioout && ( audio_oid = mmr_output_attach( ctxt, audioout, "audio" ) ) < 0 ) {
			mmrerror( ctxt, audioout );
		} else if ( aoparams && mmr_output_parameters( ctxt, audio_oid, aoparams ) ) {
			mmrerror( ctxt, "output parameters (audio)" );
		} else if ( mmr_input_attach( ctxt, inputurl, inputtype ) < 0 ) {
			mmrerror( ctxt, inputurl );
		} else if ( mmr_play( ctxt ) < 0 ) {
			mmrerror( ctxt, "mmr_play" );
		}
	}
	else
	{
		mmr_stop(ctxt);
		//mmr_input_attach( ctxt, inputurl, inputtype );
		mmr_play( ctxt );
	}

	return 0;
}
Beispiel #9
0
int main(int argc, char *argv[])
{
    int rc;
    int exit_application = 0;

    // Screen variables
    screen_context_t    screen_context = 0;
    screen_window_t     screen_window = 0;

    int screen_size[2] = {0,0};

    // Renderer variables
    mmr_connection_t*     mmr_connection = 0;
    mmr_context_t*        mmr_context = 0;
    strm_dict_t*          dict = NULL;

    // I/O variables
    int                    video_device_output_id = -1;
    int                    audio_device_output_id = -1;

    bps_initialize();

    /*
     * Create the window used for video output.
     */
    if (screen_create_context(&screen_context, SCREEN_APPLICATION_CONTEXT) != 0) {
        return EXIT_FAILURE;
    }

    if (screen_create_window(&screen_window, screen_context) != 0) {
        screen_destroy_context(screen_context);
        return EXIT_FAILURE;
    }

    if (screen_create_window_group(screen_window, window_group_name) != 0) {
        return EXIT_FAILURE;
    }

    int format = SCREEN_FORMAT_RGBA8888;
    if (screen_set_window_property_iv(screen_window, SCREEN_PROPERTY_FORMAT, &format) != 0) {
        return EXIT_FAILURE;
    }

    int usage = SCREEN_USAGE_NATIVE;
    if (screen_set_window_property_iv(screen_window, SCREEN_PROPERTY_USAGE, &usage) != 0) {
        return EXIT_FAILURE;
    }


    if (screen_create_window_buffers(screen_window, 1) != 0) {
        return EXIT_FAILURE;
    }

    /*
     * Configure mm-renderer.
     */
    mmr_connection = mmr_connect(NULL);
    if (mmr_connection == NULL) {
        return EXIT_FAILURE;
    }

    mmr_context = mmr_context_create(mmr_connection, video_context_name, 0, S_IRWXU|S_IRWXG|S_IRWXO);
    if (mmr_context == NULL) {
        return EXIT_FAILURE;
    }

    /*
     * Configure video and audio output.
     */
    video_device_output_id = mmr_output_attach(mmr_context, video_device_url, "video");
    if (video_device_output_id == -1) {
        return EXIT_FAILURE;
    }

    audio_device_output_id = mmr_output_attach(mmr_context, audio_device_url, "audio");
    if (audio_device_output_id == -1) {
        return EXIT_FAILURE;
    }

    // Get the render buffer
    screen_buffer_t temp_buffer[1];
    if (screen_get_window_property_pv( screen_window, SCREEN_PROPERTY_RENDER_BUFFERS, (void**)temp_buffer) != 0) {
        return EXIT_FAILURE;
    }

    // Fill the buffer with a solid color (black)
    int fill_attributes[3] = {SCREEN_BLIT_COLOR, 0x0, SCREEN_BLIT_END};
    if (screen_fill(screen_context, temp_buffer[0], fill_attributes) != 0) {
        return EXIT_FAILURE;
    }

    // Make the window visible
    if (screen_get_window_property_iv(screen_window, SCREEN_PROPERTY_SIZE, screen_size) != 0) {
        return EXIT_FAILURE;
    }

    int temp_rectangle[4] = {0, 0, screen_size[0], screen_size[1]};
    if (screen_post_window(screen_window, temp_buffer[0], 1, temp_rectangle, 0) != 0) {
        return EXIT_FAILURE;
    }

    // Prevent the backlight from going off
    int idle_mode = SCREEN_IDLE_MODE_KEEP_AWAKE;
    if (screen_set_window_property_iv(screen_window, SCREEN_PROPERTY_IDLE_MODE, &idle_mode) != 0) {
        return EXIT_FAILURE;
    }

    // Build up the path where our bundled resource is.
    char cwd[PATH_MAX];
    char media_file[PATH_MAX];
    getcwd(cwd,PATH_MAX);

    rc = snprintf(media_file, PATH_MAX, "file://%s/app/native/pb_sample.mp4", cwd);
    if ((rc == -1) || (rc >= PATH_MAX)) {
        return EXIT_FAILURE;
    }

    /*
     * Start the playback.
     */
    if (mmr_input_attach(mmr_context, media_file, "track") != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_play(mmr_context) != 0) {
        return EXIT_FAILURE;
    }

    /* Do some work to make the aspect ratio correct.
     */
    dict = calculate_rect(screen_size[0], screen_size[1]);
    if (NULL == dict) {
        return EXIT_FAILURE;
    }

    if (mmr_output_parameters(mmr_context, video_device_output_id, dict) != 0) {
        return EXIT_FAILURE;
    }

     /* Note that we allocated memory for the dictionary, but the call to 
      * mmr_output_parameters() deallocates that memory even on failure.
      */  
    dict = NULL;

    screen_request_events(screen_context);
    navigator_request_events(0);

    /*
     * Handle keyboard events and stop playback upon user request.
     */
    for (;;) {
        bps_event_t *event = NULL;
        if (bps_get_event(&event, -1) != BPS_SUCCESS) {
            return EXIT_FAILURE;
        }

        if (event) {

            if (bps_event_get_domain(event) == navigator_get_domain() &&
                bps_event_get_code(event) == NAVIGATOR_EXIT) {

                exit_application = 1;
            }

            if (exit_application) {
                break;
            }
        }
    }

    screen_stop_events(screen_context);

    if (mmr_stop(mmr_context) != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, audio_device_output_id) != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, video_device_output_id) != 0) {
        return EXIT_FAILURE;
    }

    if (mmr_context_destroy(mmr_context) != 0) {
        return EXIT_FAILURE;
    }

    mmr_context = 0;
    video_device_output_id = -1;
    audio_device_output_id = -1;

    mmr_disconnect(mmr_connection);
    mmr_connection = 0;

    bps_shutdown();

    if (screen_destroy_window(screen_window) != 0) {
        return EXIT_FAILURE;
    }

    if (screen_destroy_context(screen_context) != 0) {
        return EXIT_FAILURE;
    }

    screen_context = 0;
    screen_window = 0;

    return EXIT_SUCCESS;
}
Beispiel #10
0
int main( int argc, char **argv ) {
    const char *mmrname = NULL;
    const char *ctxtname = "testplayer";
    const char *audioout = NULL;
    const char *inputtype = "track";
    char cwd[PATH_MAX];
    char inputurl[PATH_MAX];
    int rc;
    int final_return_code = EXIT_FAILURE;
    int exit_application = 0;
    mmr_connection_t *connection = NULL;
    mmr_context_t *ctxt = NULL;

    /*
     * Before we can listen for events from the BlackBerry(R) 10 OS platform
     * services, we need to initialize the BPS infrastructure
     */
    bps_initialize();

    if (setup_screen() != EXIT_SUCCESS) {
        printf("Unable to set up the screen. Exiting.");
        return 0;
    }

    /*
     * Once the BPS infrastructure has been initialized we can register for
     * events from the various BlackBerry(R) 10 OS platform services. The
     * Navigator service manages and delivers application life cycle and
     * visibility events.
     * For this sample, we request Navigator events so we can track when
     * the system is terminating the application (NAVIGATOR_EXIT event). 
     * This allows us to clean up application resources.
     */
    navigator_request_events(0);
    dialog_request_events(0);

    /*
     * Create and display the dialog.
     */
    create_dialog();

    getcwd(cwd, PATH_MAX);
    rc = snprintf(inputurl, PATH_MAX, "file://%s%s", cwd, WAV_RELATIVE_PATH);
    if (rc > PATH_MAX - 1)
    {
          show_dialog_message("File name and path too long");
          goto fail;
    }

    mode_t mode = S_IRUSR | S_IXUSR;
    int audio_oid; // output ID
    strm_dict_t *aoparams = NULL; // output parameters

    audioout = DEFAULT_AUDIO_OUT;

    if ( ( connection = mmr_connect( mmrname ) ) == NULL ) {
        snprintf(msg, MSG_SIZE, "mmr_connect: %s", strerror(errno));
        show_dialog_message(msg);
    } else if ( ( ctxt = mmr_context_create( connection, ctxtname, 0, mode ) ) == NULL ) {
        snprintf(msg, MSG_SIZE, "%s: %s", ctxtname, strerror(errno));
        show_dialog_message(msg);
    } else if ( audioout && ( audio_oid = mmr_output_attach( ctxt, audioout, "audio" ) ) < 0 ) {
        mmrerror( ctxt, audioout );
    } else if ( aoparams && mmr_output_parameters( ctxt, audio_oid, aoparams ) ) {
        mmrerror( ctxt, "output parameters (audio)" );
    } else if ( mmr_input_attach( ctxt, inputurl, inputtype ) < 0 ) {
        mmrerror( ctxt, inputurl );
    } else if ( mmr_play( ctxt ) < 0 ) {
        mmrerror( ctxt, "mmr_play" );
    } else if (BPS_SUCCESS != bps_add_sigevent_handler( &mmr_sigevent, mmr_sigevent_handler, ctxt ) ) { 
        snprintf( msg, MSG_SIZE, "bps_add_sigevent_handler() failure %s", strerror( errno ) );
        show_dialog_message( msg );
    } else if ( drain_and_arm_mmr_events ( ctxt ) ) {
        snprintf( msg, MSG_SIZE, "drain_and_arm_mmr_events() failure %s", strerror( errno ) );
        show_dialog_message( msg );
    } else {
        show_dialog_message( "Playing Audio\n" );
        final_return_code = EXIT_SUCCESS;
    }

fail:
    /*
     * Process Navigator events until we receive a NAVIGATOR_EXIT event.
     */
    while (!exit_application) {
        /*
         * Using a negative timeout (-1) in the call to bps_get_event(...)
         * ensures that we don't busy wait by blocking until an event is
         * available.
         */
        bps_event_t *event = NULL;
        bps_get_event(&event, -1);

        if (event) {
            /*
             * If it is a NAVIGATOR_EXIT event then set the exit_application
             * flag so the application will stop processing events, clean up
             * and exit
             */
            if (bps_event_get_domain(event) == navigator_get_domain()) {
                if (NAVIGATOR_EXIT == bps_event_get_code(event)) {
                    if (final_return_code == EXIT_SUCCESS) {
                        mmr_stop( ctxt );             // Not really necessary -- mmr_input_detach() would take care of this
                        mmr_input_detach( ctxt );     // Not really necessary -- mmr_context_destroy()  would take care of this
                        mmr_context_destroy( ctxt );  // Not really necessary -- mmr_disconnect() would take care of this
                        mmr_disconnect( connection ); // Not really necessary -- exiting would take care of this
                    }
                    exit_application = 1;
                }
            } else if (bps_event_get_domain(event) == dialog_get_domain()) {
                if (DIALOG_RESPONSE == bps_event_get_code(event)) {
                    handle_dialog_response_events(event);
                }
            }
        }
    }

    /*
     * Destroy the dialog, if it exists and cleanup screen resources.
     */
    destroy_dialog();
    cleanup_screen();
    /*
     * Clean up the BPS infrastructure and exit
     */
    bps_shutdown();

    return final_return_code;

}
Beispiel #11
0
int main(int argc, char *argv[])
{
    int rc;

    // Renderer variables
    mmr_connection_t*     mmr_connection = 0;
    mmr_context_t*        mmr_context = 0;
    strm_dict_t*          dict = NULL;

    // I/O variables
    int                    video_device_output_id = -1;
    int                    audio_device_output_id = -1;

    // Position of the play and stop button.
    static int ctrl_x = 0;
    static int ctrl_y = 0;

    EGLint surface_width;
    EGLint surface_height;

    srand(time(0));
    app_id = rand();

    // I/O devices
    static char *audio_device_url    = "audio:default";
    static char video_device_url[PATH_MAX];
    rc = snprintf(video_device_url, PATH_MAX, "screen:?winid=videosamplewindowgroup_%d&wingrp=videosamplewindowgroup_%d", app_id, app_id);
    if (rc >= PATH_MAX) {
        fprintf(stderr, "URL too long\n");
    }

    // Name of video context...with a random number appended.
    static char video_context_name[PATH_MAX];
    rc = snprintf(video_context_name, PATH_MAX, "samplevideocontextname_%d", app_id);
    if (rc >= PATH_MAX) {
        fprintf(stderr, "Video context name too long\n");
    }

    // Window group name...with the same random number appended.
    static char window_group_name[PATH_MAX];
    rc = snprintf(window_group_name, PATH_MAX, "videosamplewindowgroup_%d", app_id);
    if (rc >= PATH_MAX) {
        fprintf(stderr, "Video context name too long\n");
    }

    // Video file bundled with our app
    static const char *video_file_relative_path = "app/native/pb_sample.mp4";


    bps_initialize();

    // Create the Screen Context.
    if (screen_create_context(&g_screen_ctx, SCREEN_APPLICATION_CONTEXT) != 0) {
        fprintf(stderr, "screen_create_context failed\n");
        return EXIT_FAILURE;
    }

    // Create the window and initialize EGL for GL_ES_1 rendering
    rc = initialize_egl_window(g_screen_ctx, window_group_name);
    if (rc != EXIT_SUCCESS) {
        fprintf(stderr, "initialize_egl_window failed\n");
        return EXIT_FAILURE;
    }

    // Query width and height of the window surface created by utility code
    eglQuerySurface(g_egl_disp, g_egl_surf, EGL_WIDTH, &surface_width);
    eglQuerySurface(g_egl_disp, g_egl_surf, EGL_HEIGHT, &surface_height);
    EGLint err = eglGetError();
    if (err != EGL_SUCCESS) {
        fprintf(stderr, "Unable to query EGL surface dimensions\n");
        return EXIT_FAILURE;
    }

    // Initialize GL for 2D rendering
    glViewport(0, 0, (int)surface_width, (int) surface_height);

    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();

    glOrthof(0.0f, 1.0f, 0.0f, 1.0f, -1.0f, 1.0f);

    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();

    // Set world coordinates to coincide with screen pixels
    glScalef(1.0f / (float)surface_width, 1.0f / (float)surface_height, 1.0f);

    // We can calculate location and verticies of the controls
    ctrl_x = (float)surface_width  / 2 - ctrl_w / 2;
    ctrl_y = (float)surface_height / 2 - ctrl_h / 2;

    g_triangle_vertices[0] = ctrl_x;
    g_triangle_vertices[1] = ctrl_y;

    g_triangle_vertices[2] = ctrl_x;
    g_triangle_vertices[3] = ctrl_y + ctrl_h;

    g_triangle_vertices[4] = ctrl_x + ctrl_w;
    g_triangle_vertices[5] = ctrl_y + ctrl_h / 2;

    g_square_vertices[0] = ctrl_x;
    g_square_vertices[1] = ctrl_y;
    g_square_vertices[2] = ctrl_x;
    g_square_vertices[3] = ctrl_y + ctrl_h;
    g_square_vertices[4] = ctrl_x + ctrl_w;
    g_square_vertices[5] = ctrl_y + ctrl_h;
    g_square_vertices[6] = ctrl_x + ctrl_w;
    g_square_vertices[7] = ctrl_y;
    g_square_vertices[8] = ctrl_x;
    g_square_vertices[9] = ctrl_y;

    // Configure mm-renderer.
    mmr_connection = mmr_connect(NULL);
    if (mmr_connection == NULL) {
        fprintf(stderr, "mmr_connect failed\n");
        return EXIT_FAILURE;
    }

    mmr_context = mmr_context_create(mmr_connection, video_context_name, 0, S_IRWXU|S_IRWXG|S_IRWXO);
    if (mmr_context == NULL) {
        fprintf(stderr, "mmr_context_create failed\n");
        return EXIT_FAILURE;
    }

    // Configure video and audio output.
    video_device_output_id = mmr_output_attach(mmr_context, video_device_url, "video");
    if (video_device_output_id == -1) {
        fprintf(stderr, "mmr_output_attach(video) failed\n");
        return EXIT_FAILURE;
    }

    audio_device_output_id = mmr_output_attach(mmr_context, audio_device_url, "audio");
    if (audio_device_output_id == -1) {
        fprintf(stderr, "mmr_output_attach(audio) failed\n");
        return EXIT_FAILURE;
    }

    // render 'paused'
    render(true);


    // Build up the path where our bundled resource is.
    char cwd[PATH_MAX];
    char media_file[PATH_MAX];
    getcwd(cwd,PATH_MAX);

    rc = snprintf(media_file, PATH_MAX, "file://%s/%s", cwd, video_file_relative_path);
    if ((rc == -1) || (rc >= PATH_MAX)) {
        fprintf(stderr, "snprintf(media_file) failed\n");
        return EXIT_FAILURE;
    }

    // Attach the input media.
    if (mmr_input_attach(mmr_context, media_file, "track") != 0) {
        fprintf(stderr, "mmr_input_attach(track) failed\n");
        return EXIT_FAILURE;
    }


    int video_speed = 0;

    // Set the speed to 0 to pause the video initially
    if (mmr_speed_set(mmr_context, video_speed) != 0) {
        fprintf(stderr, "mmr_set_speed(0) failed\n");
        return EXIT_FAILURE;
    }

    // Change to the play state, although speed is zero
    if (mmr_play(mmr_context) != 0) {
        fprintf(stderr, "mmr_play failed\n");
        return EXIT_FAILURE;
    }

    /* Do some work to make the aspect ratio correct.
     */
    dict = calculate_rect(surface_width, surface_height);
    if (NULL == dict) {
        fprintf(stderr, "calculate_rect failed\n");
        return EXIT_FAILURE;
    }

    if (mmr_output_parameters(mmr_context, video_device_output_id, dict) != 0) {
        fprintf(stderr, "mmr_output_parameters failed\n");
        return EXIT_FAILURE;
    }

    /* Note that we allocated memory for the dictionary, but the call to 
     * mmr_output_parameters() deallocates that memory even on failure.
     */
    dict = NULL;

    screen_request_events(g_screen_ctx);
    navigator_request_events(0);

    screen_window_t video_window = (screen_window_t)0;
    bool app_window_above = true;
    int screen_val;
    int exit_value = EXIT_SUCCESS;

    // Handle keyboard events and stop playback upon user request.
    for (;;) {
        bps_event_t *event = NULL;
        if (bps_get_event(&event, 0) != BPS_SUCCESS) {
            return EXIT_FAILURE;
        }
        if (event) {
            if (bps_event_get_domain(event) == navigator_get_domain()) {
                if (bps_event_get_code(event) == NAVIGATOR_EXIT) {
                    break;
                } else if(NAVIGATOR_SWIPE_DOWN == bps_event_get_code(event)) {
                    if ((screen_window_t)0 != video_window) {

                        app_window_above = !app_window_above;
                        if (app_window_above) {
                            screen_val = 1;
                        } else {
                            screen_val = -1;
                        }
                        if (screen_set_window_property_iv(video_window, SCREEN_PROPERTY_ZORDER, &screen_val) != 0) {
                            fprintf(stderr, "screen_set_window_property(ZORDER) failed\n");
                            exit_value = EXIT_FAILURE;
                            break;
                        }

                        screen_val = 1;
                        if (screen_set_window_property_iv(video_window, SCREEN_PROPERTY_VISIBLE, &screen_val) != 0) {
                            fprintf(stderr, "screen_set_window_property(VISIBLE) failed\n");
                            exit_value = EXIT_FAILURE;
                            break;
                        }

                        rc = screen_flush_context(g_screen_ctx, SCREEN_WAIT_IDLE);
                        if (rc != 0) {
                            fprintf (stderr, "Warning: Failed to flush\n");
                        }
                    }
                }
            } else if (bps_event_get_domain(event) == screen_get_domain()) {
                screen_event_t screen_event = screen_event_get_event(event);
                int event_type;
                screen_get_event_property_iv(screen_event, SCREEN_PROPERTY_TYPE, &event_type);

                if (event_type == SCREEN_EVENT_CREATE && (video_window == (screen_window_t)0)) {
                    char id[256];

                    rc = screen_get_event_property_pv(screen_event, SCREEN_PROPERTY_WINDOW, (void**)&video_window);
                    if (rc != 0) {
                        fprintf(stderr, "screen_get_event_property(WINDOW) failed\n");
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                    fprintf(stderr, "video_window%d\n",(int)video_window);

                    rc = screen_get_window_property_cv(video_window, SCREEN_PROPERTY_ID_STRING, 256, id);
                    if (rc != 0) {
                        fprintf(stderr, "screen_get_window_property(ID) failed\n");
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                    fprintf(stderr, "window ID is %s\n", id);

                    if (strncmp(id, window_group_name, strlen(window_group_name)) != 0) {
                        fprintf(stderr, "window ID mismatch\n");
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                } else if(event_type == SCREEN_EVENT_MTOUCH_TOUCH) {
                    if (video_speed == 0) {
                        video_speed = 1000;
                        render(false);
                    } else {
                        video_speed = 0;
                        render(true);
                    }

                    if (mmr_speed_set(mmr_context, video_speed) != 0) {
                        fprintf(stderr, "mmr_speed_set(%d) failed\n", video_speed);
                        exit_value = EXIT_FAILURE;
                        break;
                    }
                }
            }
        }
    }

    screen_stop_events(g_screen_ctx);

    if (mmr_stop(mmr_context) != 0) {
        fprintf(stderr, "mmr_stop failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, audio_device_output_id) != 0) {
        fprintf(stderr, "mmr_output_detach(audio) failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (mmr_output_detach(mmr_context, video_device_output_id) != 0) {
        fprintf(stderr, "mmr_output_detach(video) failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (mmr_context_destroy(mmr_context) != 0) {
        fprintf(stderr, "mmr_context_destroy failed\n");
        exit_value = EXIT_FAILURE;
    }

    mmr_context = 0;
    video_device_output_id = -1;
    audio_device_output_id = -1;

    mmr_disconnect(mmr_connection);
    mmr_connection = 0;

    bps_shutdown();

    if (screen_destroy_window(g_screen_win) != 0) {
        fprintf(stderr, "screen_destroy_window failed\n");
        exit_value = EXIT_FAILURE;
    }

    if (screen_destroy_context(g_screen_ctx) != 0) {
        fprintf(stderr, "screen_destroy_context failed\n");
        exit_value = EXIT_FAILURE;
    }

    g_screen_ctx = 0;
    g_screen_win = 0;

    return exit_value;
}