Example #1
0
static CSample *LoadSample(const char *name, enum _play_audio_flags_ flag)
{
	CSample *sampleWav = LoadWav(name, flag);

	if (sampleWav) {
		return sampleWav;
	}
#ifdef USE_VORBIS
	CSample *sampleVorbis = LoadVorbis(name, flag);
	if (sampleVorbis) {
		return sampleVorbis;
	}
#endif
#ifdef USE_MIKMOD
	CSample *sampleMikMod = LoadMikMod(name, flag);
	if (sampleMikMod) {
		return sampleMikMod;
	}
#endif
#ifdef USE_FLUIDSYNTH
	CSample *sampleFluidSynth = LoadFluidSynth(name, flag);
	if (sampleFluidSynth) {
		return sampleFluidSynth;
	}
#endif
	return NULL;
}
Example #2
0
	SPOggFile::SPOggFile( SPString path )
	{
		isLoaded = false;
		songLength = 0;
		this->path = path;
		Open(path);
		LoadVorbis();
	}
Example #3
0
/**
**  Play a video file.
**
**  @param name   Filename of movie file.
**
**  @return       Non-zero if file isn't a supported movie.
*/
int PlayMovie(const std::string &name)
{
    OggData data;
    CFile f;
    SDL_Rect rect;
    SDL_Overlay *yuv_overlay;
    CSample *sample;
    const EventCallback *old_callbacks;
    EventCallback callbacks;
    unsigned int start_ticks;
    int need_data;
    int diff;
    char buffer[PATH_MAX];

    LibraryFileName(name.c_str(), buffer, sizeof(buffer));

    if (f.open(buffer, CL_OPEN_READ) == -1) {
        fprintf(stderr, "Can't open file `%s'\n", name.c_str());
        return -1;
    }

    memset(&data, 0, sizeof(data));
    if (OggInit(&f, &data) || !data.video) {
        OggFree(&data);
        f.close();
        return -1;
    }

    data.File = &f;

    if (data.tinfo.frame_width * 300 / 4 > data.tinfo.frame_height * 100) {
        rect.w = Video.Width;
        rect.h = Video.Width * data.tinfo.frame_height / data.tinfo.frame_width;
        rect.x = 0;
        rect.y = (Video.Height - rect.h) / 2;
    } else {
        rect.w = Video.Height * data.tinfo.frame_width / data.tinfo.frame_height;
        rect.h = Video.Height;
        rect.x = (Video.Width - rect.w) / 2;
        rect.y = 0;
    }

    yuv_overlay = SDL_CreateYUVOverlay(data.tinfo.frame_width,
                                       data.tinfo.frame_height, SDL_YV12_OVERLAY, TheScreen);

    if (yuv_overlay == NULL) {
        fprintf(stderr, "SDL_CreateYUVOverlay: %s\n", SDL_GetError());
        OggFree(&data);
        f.close();
        return 0;
    }

    StopMusic();
    if ((sample = LoadVorbis(buffer, PlayAudioStream))) {
        if ((sample->Channels != 1 && sample->Channels != 2) ||
                sample->SampleSize != 16) {
            fprintf(stderr, "Unsupported sound format in movie\n");
            delete sample;
            SDL_FreeYUVOverlay(yuv_overlay);
            OggFree(&data);
            f.close();
            return 0;
        }
        PlayMusic(sample);
    }

    callbacks.ButtonPressed = MovieCallbackButtonPressed;
    callbacks.ButtonReleased = MovieCallbackButtonReleased;
    callbacks.MouseMoved = MovieCallbackMouseMove;
    callbacks.MouseExit = MovieCallbackMouseExit;
    callbacks.KeyPressed = MovieCallbackKeyPressed;
    callbacks.KeyReleased = MovieCallbackKeyReleased;
    callbacks.KeyRepeated = MovieCallbackKeyRepeated;
    callbacks.NetworkEvent = NetworkEvent;

    old_callbacks = GetCallbacks();
    SetCallbacks(&callbacks);

    Invalidate();
    RealizeVideoMemory();

    MovieStop = false;
    start_ticks = SDL_GetTicks();
    need_data = 1;
    while (!MovieStop) {
        if (need_data) {
            if (TheoraProcessData(&data)) {
                break;
            }
            need_data = 0;
        }

        diff = SDL_GetTicks() - start_ticks - static_cast<int>(
                   theora_granule_time(&data.tstate, data.tstate.granulepos) * 1000);

        if (diff > 100) {
            // too far behind, skip some frames
            need_data = 1;
            continue;
        }
        if (diff > 0) {
            OutputTheora(&data, yuv_overlay, &rect);
            need_data = 1;
        }

        WaitEventsOneFrame();
    }

    StopMusic();
    SDL_FreeYUVOverlay(yuv_overlay);

    OggFree(&data);
    f.close();

    SetCallbacks(old_callbacks);

    return 0;
}
Example #4
0
/**
**  Play a video file.
**
**  @param name   Filename of movie file.
**
**  @return       Non-zero if file isn't a supported movie.
*/
int PlayMovie(const std::string &name)
{
	int videoWidth, videoHeight;
#if defined(USE_OPENGL) || defined(USE_GLES)
	videoWidth  = Video.ViewportWidth;
	videoHeight = Video.ViewportHeight;
#else
	videoWidth  = Video.Width;
	videoHeight = Video.Height;
#endif

	const std::string filename = LibraryFileName(name.c_str());

	CFile f;
	if (f.open(filename.c_str(), CL_OPEN_READ) == -1) {
		fprintf(stderr, "Can't open file '%s'\n", name.c_str());
		return 0;
	}

	OggData data;
	memset(&data, 0, sizeof(data));
	if (OggInit(&f, &data) || !data.video) {
		OggFree(&data);
		f.close();
		return -1;
	}

	data.File = &f;
	SDL_Rect rect;

	if (data.tinfo.frame_width * 300 / 4 > data.tinfo.frame_height * 100) {
		rect.w = videoWidth;
		rect.h = videoWidth * data.tinfo.frame_height / data.tinfo.frame_width;
		rect.x = 0;
		rect.y = (videoHeight - rect.h) / 2;
	} else {
		rect.w = videoHeight * data.tinfo.frame_width / data.tinfo.frame_height;
		rect.h = videoHeight;
		rect.x = (videoWidth - rect.w) / 2;
		rect.y = 0;
	}

#ifdef USE_OPENGL
	// When SDL_OPENGL is used, it is not possible to call SDL_CreateYUVOverlay, so turn temporary OpenGL off
	// With GLES is all ok
	if (UseOpenGL) {
		SDL_SetVideoMode(Video.ViewportWidth, Video.ViewportHeight, Video.Depth, SDL_GetVideoSurface()->flags & ~SDL_OPENGL);
	}
#endif

	SDL_FillRect(SDL_GetVideoSurface(), NULL, 0);
	Video.ClearScreen();
	SDL_Overlay *yuv_overlay = SDL_CreateYUVOverlay(data.tinfo.frame_width, data.tinfo.frame_height, SDL_YV12_OVERLAY, TheScreen);

	if (yuv_overlay == NULL) {
		fprintf(stderr, "SDL_CreateYUVOverlay: %s\n", SDL_GetError());
		OggFree(&data);
		f.close();
		return 0;
	}

	StopMusic();
	CSample *sample = LoadVorbis(filename.c_str(), PlayAudioStream);
	if (sample) {
		if ((sample->Channels != 1 && sample->Channels != 2) || sample->SampleSize != 16) {
			fprintf(stderr, "Unsupported sound format in movie\n");
			delete sample;
			SDL_FreeYUVOverlay(yuv_overlay);
			OggFree(&data);
			f.close();
			return 0;
		}
		PlayMusic(sample);
	}

	EventCallback callbacks;

	callbacks.ButtonPressed = MovieCallbackButtonPressed;
	callbacks.ButtonReleased = MovieCallbackButtonReleased;
	callbacks.MouseMoved = MovieCallbackMouseMove;
	callbacks.MouseExit = MovieCallbackMouseExit;
	callbacks.KeyPressed = MovieCallbackKeyPressed;
	callbacks.KeyReleased = MovieCallbackKeyReleased;
	callbacks.KeyRepeated = MovieCallbackKeyRepeated;
	callbacks.NetworkEvent = NetworkEvent;

	const EventCallback *old_callbacks = GetCallbacks();
	SetCallbacks(&callbacks);

	Invalidate();
	RealizeVideoMemory();

	MovieStop = false;
	const unsigned int start_ticks = SDL_GetTicks();
	bool need_data = true;
	while (!MovieStop) {
		if (need_data) {
			if (TheoraProcessData(&data)) {
				break;
			}
			need_data = false;
		}

		const int diff = SDL_GetTicks() - start_ticks
						 - static_cast<int>(theora_granule_time(&data.tstate, data.tstate.granulepos) * 1000);

		if (diff > 100) {
			// too far behind, skip some frames
			need_data = true;
			continue;
		}
		if (diff > 0) {
			OutputTheora(&data, yuv_overlay, &rect);
			need_data = true;
		}

		WaitEventsOneFrame();
	}

	StopMusic();
	SDL_FreeYUVOverlay(yuv_overlay);

	OggFree(&data);
	f.close();

#ifdef USE_OPENGL
	if (UseOpenGL) {
		SDL_SetVideoMode(Video.ViewportWidth, Video.ViewportHeight, Video.Depth, SDL_GetVideoSurface()->flags | SDL_OPENGL);
		ReloadOpenGL();
	}
#endif

	SetCallbacks(old_callbacks);

	return 0;
}