コード例 #1
0
static int
DSOUND_OpenDevice(_THIS, void *handle, const char *devname, int iscapture)
{
    HRESULT result;
    SDL_bool valid_format = SDL_FALSE;
    SDL_bool tried_format = SDL_FALSE;
    SDL_AudioFormat test_format = SDL_FirstAudioFormat(this->spec.format);
    LPGUID guid = (LPGUID) handle;

    /* Initialize all variables that we clean on shutdown */
    this->hidden = (struct SDL_PrivateAudioData *)
        SDL_malloc((sizeof *this->hidden));
    if (this->hidden == NULL) {
        return SDL_OutOfMemory();
    }
    SDL_memset(this->hidden, 0, (sizeof *this->hidden));

    /* Open the audio device */
    result = pDirectSoundCreate8(guid, &this->hidden->sound, NULL);
    if (result != DS_OK) {
        DSOUND_CloseDevice(this);
        return SetDSerror("DirectSoundCreate", result);
    }

    while ((!valid_format) && (test_format)) {
        switch (test_format) {
        case AUDIO_U8:
        case AUDIO_S16:
        case AUDIO_S32:
        case AUDIO_F32:
            tried_format = SDL_TRUE;
            this->spec.format = test_format;
            this->hidden->num_buffers = CreateSecondary(this, NULL);
            if (this->hidden->num_buffers > 0) {
                valid_format = SDL_TRUE;
            }
            break;
        }
        test_format = SDL_NextAudioFormat();
    }

    if (!valid_format) {
        DSOUND_CloseDevice(this);
        if (tried_format) {
            return -1;  /* CreateSecondary() should have called SDL_SetError(). */
        }
        return SDL_SetError("DirectSound: Unsupported audio format");
    }

    /* The buffer will auto-start playing in DSOUND_WaitDevice() */
    this->hidden->mixlen = this->spec.size;

    return 0;                   /* good to go. */
}
コード例 #2
0
static void win32_display_frame( vo_driver_t * vo_driver, vo_frame_t * vo_frame )
{
	win32_driver_t * win32_driver = ( win32_driver_t * ) vo_driver;
	win32_frame_t * win32_frame = ( win32_frame_t * ) vo_frame;
	int offset;
	int size;

	// if the required width, height or format has changed
	// then recreate the secondary buffer

	if( ( win32_driver->req_format	!= win32_frame->format	) ||
		( win32_driver->width		!= win32_frame->width	) ||
		( win32_driver->height		!= win32_frame->height	) )
	{
		CreateSecondary( win32_driver, win32_frame->width, win32_frame->height, win32_frame->format );
	}

	// determine desired ratio

	win32_driver->ratio = win32_frame->ratio;

	// lock our surface to update its contents

	win32_driver->contents = Lock( win32_driver->secondary );

	// surface unavailable, skip frame render

	if( !win32_driver->contents )
	{
		vo_frame->free( vo_frame );
		return;
	}

	// if our actual frame format is the native screen
	// pixel format, we need to convert it

	if( win32_driver->act_format == IMGFMT_NATIVE )
	{
		// use the software color conversion functions
		// to rebuild the frame in our native screen
		// pixel format ... this is slow

		if( win32_driver->req_format == XINE_IMGFMT_YV12 )
		{
			// convert from yv12 to native
			// screen pixel format

#if NEW_YUV
			win32_driver->yuv2rgb->configure( win32_driver->yuv2rgb,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width, win32_driver->width/2,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width * win32_driver->bytespp );
#else
			yuv2rgb_setup( win32_driver->yuv2rgb,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width, win32_driver->width/2,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width * win32_driver->bytespp );

#endif

			win32_driver->yuv2rgb->yuv2rgb_fun( win32_driver->yuv2rgb,
										win32_driver->contents,
										win32_frame->vo_frame.base[0],
										win32_frame->vo_frame.base[1],
										win32_frame->vo_frame.base[2] );
		}

		if( win32_driver->req_format == XINE_IMGFMT_YUY2 )
		{
			// convert from yuy2 to native
			// screen pixel format
#if NEW_YUV
			win32_driver->yuv2rgb->configure( win32_driver->yuv2rgb,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width, win32_driver->width/2,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width * win32_driver->bytespp );
#else

			yuv2rgb_setup( win32_driver->yuv2rgb,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width, win32_driver->width/2,
						   win32_driver->width, win32_driver->height,
						   win32_driver->width * win32_driver->bytespp );

#endif
			win32_driver->yuv2rgb->yuy22rgb_fun( win32_driver->yuv2rgb,
										win32_driver->contents,
										win32_frame->vo_frame.base[0] );
		}

#if RGB_SUPPORT
		if( win32_driver->req_format == IMGFMT_RGB )
		{
			// convert from 24 bit rgb to native
			// screen pixel format

			// TODO : rgb2rgb conversion
		}
#endif
	}
	else
	{
		// the actual format is identical to our
		// stream format. we just need to copy it

		switch(win32_frame->format)
		{
		    case XINE_IMGFMT_YV12:
			{
				vo_frame_t *frame;
				uint8_t *img;

				frame = vo_frame;
				img = (uint8_t *)win32_driver->contents;

				offset = 0;
				size = frame->pitches[0] * frame->height;
		        memcpy( img+offset, frame->base[0], size);

				offset += size;
				size = frame->pitches[2]* frame->height / 2;
				memcpy( img+offset, frame->base[2], size);
				
				offset += size;
				size = frame->pitches[1] * frame->height / 2;
				memcpy( img+offset, frame->base[1], size);
			}
				break;
			case XINE_IMGFMT_YUY2:
		        memcpy( win32_driver->contents, win32_frame->vo_frame.base[0], win32_frame->vo_frame.pitches[0] * win32_frame->vo_frame.height * 2);
				break;
			default:
		        memcpy( win32_driver->contents, win32_frame->vo_frame.base[0], win32_frame->vo_frame.pitches[0] * win32_frame->vo_frame.height * 2);
				break;
		}
	}

	// unlock the surface 

	Unlock( win32_driver->secondary );

	// scale, clip and display our frame

	DisplayFrame( win32_driver );

	// tag our frame as displayed
    if((win32_driver->current != NULL) && (win32_driver->current != vo_frame)) {
        vo_frame->free(&win32_driver->current->vo_frame);
	}
    win32_driver->current = vo_frame;  
}
コード例 #3
0
static int
DSOUND_OpenDevice(_THIS, const char *devname, int iscapture)
{
    HRESULT result;
    WAVEFORMATEX waveformat;
    int valid_format = 0;
    SDL_AudioFormat test_format = SDL_FirstAudioFormat(this->spec.format);
    FindDevGUIDData devguid;
    LPGUID guid = NULL;

    if (devname != NULL) {
        devguid.found = 0;
        devguid.devname = devname;
        if (iscapture)
            pDirectSoundCaptureEnumerateW(FindDevGUID, &devguid);
        else
            pDirectSoundEnumerateW(FindDevGUID, &devguid);

        if (!devguid.found) {
            SDL_SetError("DirectSound: Requested device not found");
            return 0;
        }
        guid = &devguid.guid;
    }

    /* Initialize all variables that we clean on shutdown */
    this->hidden = (struct SDL_PrivateAudioData *)
        SDL_malloc((sizeof *this->hidden));
    if (this->hidden == NULL) {
        SDL_OutOfMemory();
        return 0;
    }
    SDL_memset(this->hidden, 0, (sizeof *this->hidden));

    while ((!valid_format) && (test_format)) {
        switch (test_format) {
        case AUDIO_U8:
        case AUDIO_S16:
        case AUDIO_S32:
            this->spec.format = test_format;
            valid_format = 1;
            break;
        }
        test_format = SDL_NextAudioFormat();
    }

    if (!valid_format) {
        DSOUND_CloseDevice(this);
        SDL_SetError("DirectSound: Unsupported audio format");
        return 0;
    }

    SDL_memset(&waveformat, 0, sizeof(waveformat));
    waveformat.wFormatTag = WAVE_FORMAT_PCM;
    waveformat.wBitsPerSample = SDL_AUDIO_BITSIZE(this->spec.format);
    waveformat.nChannels = this->spec.channels;
    waveformat.nSamplesPerSec = this->spec.freq;
    waveformat.nBlockAlign =
        waveformat.nChannels * (waveformat.wBitsPerSample / 8);
    waveformat.nAvgBytesPerSec =
        waveformat.nSamplesPerSec * waveformat.nBlockAlign;

    /* Update the fragment size as size in bytes */
    SDL_CalculateAudioSpec(&this->spec);

    /* Open the audio device */
    result = pDirectSoundCreate8(guid, &this->hidden->sound, NULL);
    if (result != DS_OK) {
        DSOUND_CloseDevice(this);
        SetDSerror("DirectSoundCreate", result);
        return 0;
    }

    /* Create the audio buffer to which we write */
    this->hidden->num_buffers = CreateSecondary(this, NULL, &waveformat);
    if (this->hidden->num_buffers < 0) {
        DSOUND_CloseDevice(this);
        return 0;
    }

    /* The buffer will auto-start playing in DSOUND_WaitDevice() */
    this->hidden->mixlen = this->spec.size;

    return 1;                   /* good to go. */
}
コード例 #4
0
ファイル: SDL_directsound.c プロジェクト: 0-wiz-0/mame
static int
DSOUND_OpenDevice(_THIS, void *handle, const char *devname, int iscapture)
{
    const DWORD numchunks = 8;
    HRESULT result;
    SDL_bool valid_format = SDL_FALSE;
    SDL_bool tried_format = SDL_FALSE;
    SDL_AudioFormat test_format = SDL_FirstAudioFormat(this->spec.format);
    LPGUID guid = (LPGUID) handle;
	DWORD bufsize;
	
    /* Initialize all variables that we clean on shutdown */
    this->hidden = (struct SDL_PrivateAudioData *)
        SDL_malloc((sizeof *this->hidden));
    if (this->hidden == NULL) {
        return SDL_OutOfMemory();
    }
    SDL_zerop(this->hidden);

    /* Open the audio device */
    if (iscapture) {
        result = pDirectSoundCaptureCreate8(guid, &this->hidden->capture, NULL);
        if (result != DS_OK) {
            return SetDSerror("DirectSoundCaptureCreate8", result);
        }
    } else {
        result = pDirectSoundCreate8(guid, &this->hidden->sound, NULL);
        if (result != DS_OK) {
            return SetDSerror("DirectSoundCreate8", result);
        }
        result = IDirectSound_SetCooperativeLevel(this->hidden->sound,
                                                  GetDesktopWindow(),
                                                  DSSCL_NORMAL);
        if (result != DS_OK) {
            return SetDSerror("DirectSound SetCooperativeLevel", result);
        }
    }

    while ((!valid_format) && (test_format)) {
        switch (test_format) {
        case AUDIO_U8:
        case AUDIO_S16:
        case AUDIO_S32:
        case AUDIO_F32:
            tried_format = SDL_TRUE;

            this->spec.format = test_format;

            /* Update the fragment size as size in bytes */
            SDL_CalculateAudioSpec(&this->spec);

            bufsize = numchunks * this->spec.size;
            if ((bufsize < DSBSIZE_MIN) || (bufsize > DSBSIZE_MAX)) {
                SDL_SetError("Sound buffer size must be between %d and %d",
                             (DSBSIZE_MIN < numchunks) ? 1 : DSBSIZE_MIN / numchunks,
                             DSBSIZE_MAX / numchunks);
            } else {
                int rc;
				WAVEFORMATEX wfmt;
                SDL_zero(wfmt);
                if (SDL_AUDIO_ISFLOAT(this->spec.format)) {
                    wfmt.wFormatTag = WAVE_FORMAT_IEEE_FLOAT;
                } else {
                    wfmt.wFormatTag = WAVE_FORMAT_PCM;
                }

                wfmt.wBitsPerSample = SDL_AUDIO_BITSIZE(this->spec.format);
                wfmt.nChannels = this->spec.channels;
                wfmt.nSamplesPerSec = this->spec.freq;
                wfmt.nBlockAlign = wfmt.nChannels * (wfmt.wBitsPerSample / 8);
                wfmt.nAvgBytesPerSec = wfmt.nSamplesPerSec * wfmt.nBlockAlign;

                rc = iscapture ? CreateCaptureBuffer(this, bufsize, &wfmt) : CreateSecondary(this, bufsize, &wfmt);
                if (rc == 0) {
                    this->hidden->num_buffers = numchunks;
                    valid_format = SDL_TRUE;
                }
            }
            break;
        }
        test_format = SDL_NextAudioFormat();
    }

    if (!valid_format) {
        if (tried_format) {
            return -1;  /* CreateSecondary() should have called SDL_SetError(). */
        }
        return SDL_SetError("DirectSound: Unsupported audio format");
    }

    /* Playback buffers will auto-start playing in DSOUND_WaitDevice() */

    return 0;                   /* good to go. */
}
コード例 #5
0
ファイル: video_out_directx.c プロジェクト: Caught/openpliPC
static void win32_display_frame( vo_driver_t * vo_driver, vo_frame_t * vo_frame )
{
  win32_driver_t  *win32_driver = ( win32_driver_t * ) vo_driver;
  win32_frame_t   *win32_frame  = ( win32_frame_t * ) vo_frame;


  /* if the required width, height or format has changed
   * then recreate the secondary buffer */

  if( ( win32_driver->req_format	!= win32_frame->format	) ||
      ( win32_driver->width		!= win32_frame->width	) ||
      ( win32_driver->height		!= win32_frame->height	) )
    {
      CreateSecondary( win32_driver, win32_frame->width, win32_frame->height, win32_frame->format );
    }

  /* determine desired ratio */

  win32_driver->ratio = win32_frame->ratio;

  /* lock our surface to update its contents */

  win32_driver->contents = Lock( win32_driver, win32_driver->secondary );

  /* surface unavailable, skip frame render */

  if( !win32_driver->contents )
    {
      vo_frame->free( vo_frame );
      return;
    }

  /* if our actual frame format is the native screen
   * pixel format, we need to convert it */

  if( win32_driver->act_format == IMGFMT_NATIVE )
    {
      /* use the software color conversion functions
       * to rebuild the frame in our native screen
       * pixel format ... this is slow */

      if( win32_driver->req_format == XINE_IMGFMT_YV12 )
	{
	  /* convert from yv12 to native
	   * screen pixel format */

#if NEW_YUV
	  win32_driver->yuv2rgb->configure( win32_driver->yuv2rgb,
					    win32_driver->width, win32_driver->height,
					    win32_frame->vo_frame.pitches[0], win32_frame->vo_frame.pitches[1],
					    win32_driver->width, win32_driver->height,
					    win32_driver->width * win32_driver->bytespp);
#else
	  yuv2rgb_setup( win32_driver->yuv2rgb,
			 win32_driver->width, win32_driver->height,
			 win32_frame->vo_frame.pitches[0], win32_frame->vo_frame.pitches[1],
			 win32_driver->width, win32_driver->height,
			 win32_driver->width * win32_driver->bytespp );

#endif

	  win32_driver->yuv2rgb->yuv2rgb_fun( win32_driver->yuv2rgb,
					      win32_driver->contents,
					      win32_frame->vo_frame.base[0],
					      win32_frame->vo_frame.base[1],
					      win32_frame->vo_frame.base[2] );
	}

      if( win32_driver->req_format == XINE_IMGFMT_YUY2 )
	{
	  /* convert from yuy2 to native
	   * screen pixel format */
#if NEW_YUV
	  win32_driver->yuv2rgb->configure( win32_driver->yuv2rgb,
					    win32_driver->width, win32_driver->height,
					    win32_frame->vo_frame.pitches[0], win32_frame->vo_frame.pitches[0] / 2,
					    win32_driver->width, win32_driver->height,
					    win32_driver->width * win32_driver->bytespp );
#else

	  yuv2rgb_setup( win32_driver->yuv2rgb,
			 win32_driver->width, win32_driver->height,
			 win32_frame->vo_frame.pitches[0], win32_frame->vo_frame.pitches[0] / 2,
			 win32_driver->width, win32_driver->height,
			 win32_driver->width * win32_driver->bytespp );

#endif
	  win32_driver->yuv2rgb->yuy22rgb_fun( win32_driver->yuv2rgb,
					       win32_driver->contents,
					       win32_frame->vo_frame.base[0] );
	}

#if RGB_SUPPORT
      if( win32_driver->req_format == IMGFMT_RGB )
	{
	  /* convert from 24 bit rgb to native
	   * screen pixel format */

	  /* TODO : rgb2rgb conversion */
	}
#endif
    }
  else
    {
      /* the actual format is identical to our
       * stream format. we just need to copy it */

    int line;
    uint8_t * src;
    vo_frame_t * frame = vo_frame;
    uint8_t * dst = (uint8_t *)win32_driver->contents;

    switch(win32_frame->format)
	{
      case XINE_IMGFMT_YV12:
        src = frame->base[0];
        for (line = 0; line < frame->height ; line++){
          xine_fast_memcpy( dst, src, frame->width);
          src += vo_frame->pitches[0];
          dst += win32_driver->ddsd.lPitch;
        }

        src = frame->base[2];
        for (line = 0; line < frame->height/2 ; line++){
          xine_fast_memcpy( dst, src, frame->width/2);
          src += vo_frame->pitches[2];
          dst += win32_driver->ddsd.lPitch/2;
        }

        src = frame->base[1];
        for (line = 0; line < frame->height/2 ; line++){
          xine_fast_memcpy( dst, src, frame->width/2);
          src += vo_frame->pitches[1];
          dst += win32_driver->ddsd.lPitch/2;
        }
	  break;

	case XINE_IMGFMT_YUY2:
	default:
      src = frame->base[0];
      for (line = 0; line < frame->height ; line++){
	    xine_fast_memcpy( dst, src, frame->width*2);
	    src += vo_frame->pitches[0];
	    dst += win32_driver->ddsd.lPitch;
	  }
	  break;
	}
  }

  /* unlock the surface  */

  Unlock( win32_driver->secondary );

  /* scale, clip and display our frame */

  DisplayFrame( win32_driver );

  /* tag our frame as displayed */
  if((win32_driver->current != NULL) && ((vo_frame_t *)win32_driver->current != vo_frame)) {
    vo_frame->free(&win32_driver->current->vo_frame);
  }
  win32_driver->current = (win32_frame_t *)vo_frame;

}
コード例 #6
0
ファイル: SDL_dx5audio.c プロジェクト: bohwaz/ozex
static int DX5_OpenAudio(_THIS, SDL_AudioSpec *spec)
{
	HRESULT      result;
	WAVEFORMATEX waveformat;

	/* Set basic WAVE format parameters */
	memset(&waveformat, 0, sizeof(waveformat));
	waveformat.wFormatTag = WAVE_FORMAT_PCM;

	/* Determine the audio parameters from the AudioSpec */
	switch ( spec->format & 0xFF ) {
		case 8:
			/* Unsigned 8 bit audio data */
			spec->format = AUDIO_U8;
			silence = 0x80;
			waveformat.wBitsPerSample = 8;
			break;
		case 16:
			/* Signed 16 bit audio data */
			spec->format = AUDIO_S16;
			silence = 0x00;
			waveformat.wBitsPerSample = 16;
			break;
		default:
			SDL_SetError("Unsupported audio format");
			return(-1);
	}
	waveformat.nChannels = spec->channels;
	waveformat.nSamplesPerSec = spec->freq;
	waveformat.nBlockAlign =
		waveformat.nChannels * (waveformat.wBitsPerSample/8);
	waveformat.nAvgBytesPerSec = 
		waveformat.nSamplesPerSec * waveformat.nBlockAlign;

	/* Update the fragment size as size in bytes */
	SDL_CalculateAudioSpec(spec);

	/* Open the audio device */
	result = DSoundCreate(NULL, &sound, NULL);
	if ( result != DS_OK ) {
		SetDSerror("DirectSoundCreate", result);
		return(-1);
	}

	/* Create the audio buffer to which we write */
	NUM_BUFFERS = -1;
#ifdef USE_PRIMARY_BUFFER
	if ( mainwin ) {
		NUM_BUFFERS = CreatePrimary(sound, mainwin, &mixbuf,
						&waveformat, spec->size);
	}
#endif /* USE_PRIMARY_BUFFER */
	if ( NUM_BUFFERS < 0 ) {
		NUM_BUFFERS = CreateSecondary(sound, mainwin, &mixbuf,
						&waveformat, spec->size);
		if ( NUM_BUFFERS < 0 ) {
			return(-1);
		}
#ifdef DEBUG_SOUND
		fprintf(stderr, "Using secondary audio buffer\n");
#endif
	}
#ifdef DEBUG_SOUND
	else
		fprintf(stderr, "Using primary audio buffer\n");
#endif

	/* The buffer will auto-start playing in DX5_WaitAudio() */
	playing = 0;
	mixlen = spec->size;

#ifdef USE_POSITION_NOTIFY
	/* See if we can use DirectX 6 event notification */
	if ( CreateAudioEvent(this) == 0 ) {
		this->WaitAudio = DX6_WaitAudio_EventWait;
	} else {
		this->WaitAudio = DX5_WaitAudio_BusyWait;
	}
#endif
	return(0);
}