예제 #1
0
static int
MME_OpenDevice(_THIS, const char *devname, int iscapture)
{
    int valid_format = 0;
    MMRESULT result;
    Uint8 *mixbuf = NULL;
    int i;

    /* Initialize all variables that we clean on shutdown */
    this->hidden = (struct SDL_PrivateAudioData *)
        SDL_malloc((sizeof *this->hidden));
    if (this->hidden == NULL) {
        SDL_OutOfMemory();
        return 0;
    }
    SDL_memset(this->hidden, 0, (sizeof *this->hidden));

    /* Set basic WAVE format parameters */
    this->hidden->shm = mmeAllocMem(sizeof(*this->hidden->shm));
    if (this->hidden->shm == NULL) {
        MME_CloseDevice(this);
        SDL_OutOfMemory();
        return 0;
    }

    SDL_memset(this->hidden->shm, '\0', sizeof(*this->hidden->shm));
    this->hidden->shm->sound = 0;
    this->hidden->shm->wFmt.wf.wFormatTag = WAVE_FORMAT_PCM;

    /* Determine the audio parameters from the AudioSpec */
    /* Try for a closest match on audio format */
    for (test_format = SDL_FirstAudioFormat(this->spec.format);
         !valid_format && test_format;) {
        valid_format = 1;
        switch (test_format) {
        case AUDIO_U8:
        case AUDIO_S16:
        case AUDIO_S32:
            break;
        default:
            valid_format = 0;
            test_format = SDL_NextAudioFormat();
        }
    }

    if (!valid_format) {
        MME_CloseDevice(this);
        SDL_SetError("Unsupported audio format");
        return 0;
    }

    this->spec.format = test_format;
    this->hidden->shm->wFmt.wBitsPerSample = SDL_AUDIO_BITSIZE(test_format);

    /* !!! FIXME: Can this handle more than stereo? */
    this->hidden->shm->wFmt.wf.nChannels = this->spec.channels;
    this->hidden->shm->wFmt.wf.nSamplesPerSec = this->spec.freq;
    this->hidden->shm->wFmt.wf.nBlockAlign =
        this->hidden->shm->wFmt.wf.nChannels *
        this->hidden->shm->wFmt.wBitsPerSample / 8;
    this->hidden->shm->wFmt.wf.nAvgBytesPerSec =
        this->hidden->shm->wFmt.wf.nSamplesPerSec *
        this->hidden->shm->wFmt.wf.nBlockAlign;

    /* Check the buffer size -- minimum of 1/4 second (word aligned) */
    if (this->spec.samples < (this->spec.freq / 4))
        this->spec.samples = ((this->spec.freq / 4) + 3) & ~3;

    /* Update the fragment size as size in bytes */
    SDL_CalculateAudioSpec(&this->spec);

    /* Open the audio device */
    result = waveOutOpen(&(this->hidden->shm->sound),
                         WAVE_MAPPER,
                         &(this->hidden->shm->wFmt.wf),
                         MME_Callback,
                         NULL, (CALLBACK_FUNCTION | WAVE_OPEN_SHAREABLE));
    if (result != MMSYSERR_NOERROR) {
        MME_CloseDevice(this);
        SetMMerror("waveOutOpen()", result);
        return 0;
    }

    /* Create the sound buffers */
    mixbuf = (Uint8 *) mmeAllocBuffer(NUM_BUFFERS * (this->spec.size));
    if (mixbuf == NULL) {
        MME_CloseDevice(this);
        SDL_OutOfMemory();
        return 0;
    }
    this->hidden->mixbuf = mixbuf;

    for (i = 0; i < NUM_BUFFERS; i++) {
        this->hidden->shm->wHdr[i].lpData = &mixbuf[i * (this->spec.size)];
        this->hidden->shm->wHdr[i].dwBufferLength = this->spec.size;
        this->hidden->shm->wHdr[i].dwFlags = 0;
        this->hidden->shm->wHdr[i].dwUser = i;
        this->hidden->shm->wHdr[i].dwLoops = 0; /* loop control counter */
        this->hidden->shm->wHdr[i].lpNext = NULL;       /* reserved for driver */
        this->hidden->shm->wHdr[i].reserved = 0;
        inUse[i] = FALSE;
    }
    this->hidden->next_buffer = 0;

    return 1;
}
예제 #2
0
static int MME_OpenAudio(_THIS, SDL_AudioSpec *spec)
{
    MMRESULT result;
    int i;

    mixbuf = NULL;

    /* Set basic WAVE format parameters */
    shm = mmeAllocMem(sizeof(*shm));
    if ( shm == NULL ) {
	SDL_SetError("Out of memory: shm");
	return(-1);
    }
    shm->sound = 0;
    shm->wFmt.wf.wFormatTag = WAVE_FORMAT_PCM;

    /* Determine the audio parameters from the AudioSpec */
    switch ( spec->format & 0xFF ) {
	case 8:
	    /* Unsigned 8 bit audio data */
	    spec->format = AUDIO_U8;
	    shm->wFmt.wBitsPerSample = 8;
	    break;
	case 16:
	    /* Signed 16 bit audio data */
	    spec->format = AUDIO_S16;
	    shm->wFmt.wBitsPerSample = 16;
	    break;
	    default:
	    SDL_SetError("Unsupported audio format");
	    return(-1);
    }

    shm->wFmt.wf.nChannels = spec->channels;
    shm->wFmt.wf.nSamplesPerSec = spec->freq;
    shm->wFmt.wf.nBlockAlign =
	shm->wFmt.wf.nChannels * shm->wFmt.wBitsPerSample / 8;
    shm->wFmt.wf.nAvgBytesPerSec =
	shm->wFmt.wf.nSamplesPerSec * shm->wFmt.wf.nBlockAlign;

    /* Check the buffer size -- minimum of 1/4 second (word aligned) */
    if ( spec->samples < (spec->freq/4) )
	spec->samples = ((spec->freq/4)+3)&~3;

    /* Update the fragment size as size in bytes */
    SDL_CalculateAudioSpec(spec);

    /* Open the audio device */
    result = waveOutOpen(&(shm->sound),
			 WAVE_MAPPER,
			 &(shm->wFmt.wf),
			 MME_CALLBACK,
			 NULL,
			 (CALLBACK_FUNCTION|WAVE_OPEN_SHAREABLE));
    if ( result != MMSYSERR_NOERROR ) {
	    SetMMerror("waveOutOpen()", result);
	    return(-1);
    }

    /* Create the sound buffers */
    mixbuf = (Uint8 *)mmeAllocBuffer(NUM_BUFFERS * (spec->size));
    if ( mixbuf == NULL ) {
	SDL_SetError("Out of memory: mixbuf");
	return(-1);
    }

    for (i = 0; i < NUM_BUFFERS; i++) {
	shm->wHdr[i].lpData         = &mixbuf[i * (spec->size)];
	shm->wHdr[i].dwBufferLength = spec->size;
	shm->wHdr[i].dwFlags        = 0;
	shm->wHdr[i].dwUser         = i;
	shm->wHdr[i].dwLoops        = 0;       /* loop control counter */
	shm->wHdr[i].lpNext         = NULL;    /* reserved for driver */
	shm->wHdr[i].reserved       = 0;
	inUse[i] = FALSE;
    }
    next_buffer = 0;
    return 0;
}
예제 #3
0
void MmeGrabber::startup()
{
    MMRESULT s;
    int size;
    
    /* fprintf(stderr, "vic: Called MmeGrabber::startup\n"); */
    
    if (running_)
      return;
    
    if(port_) {
	s=videoSetPortNum(handle_,port_);
	if (mme_error(s, "couldn't configure mme"))
	  goto bail;
    }

    if(standard_) {
	s=videoSetStandard(handle_,standard_);
	if (mme_error(s, "couldn't configure mme"))
	  goto bail;
    }

    s = videoConfigure(handle_, DVM_FORMAT,
		       VIDEO_CONFIGURE_GET|VIDEO_CONFIGURE_MIN,
		       0, bmh, sizeof(*bmh), 0, 0);
    size = bmh->b.bmi.biSizeImage;/*XXX ridiculously large*/
    s = videoConfigure(handle_, DVM_FORMAT, VIDEO_CONFIGURE_SET,
		       0, bmh, sizeof(*bmh), 0, 0);
    if (mme_error(s, "couldn't configure mme"))
      goto bail;
	
#ifdef notdef
	size = 320 * 240 * 3;
#endif
	vh_->lpData = (LPSTR)mmeAllocBuffer(size);
	if (vh_->lpData == 0) {
		fprintf(stderr, "vic: couldn't allocate mme frame memory\n");
		goto bail;
	}
	vh_->dwBufferLength = size;
	
	
	s = videoStreamInit(handle_, 1000000 /*10fps - ignored */,
			    mme_callback,
			    0, CALLBACK_FUNCTION);
	if (mme_error(s, "couldn't initialize mme stream\n"))
		return;
	s = videoStreamPrepareHeader(handle_, vh_, sizeof(*vh_));
	if (mme_error(s, "couldn't prepare mme video hdr"))
		return;
	s = videoStreamStart(handle_);
	if (mme_error(s, "couldn't start mme video stream\n"))
		return;
	running_ = 1;
	frameclock_ = gettimeofday_usecs();

	outstanding_ = 0;
	last_grab_ = 0;

	grab();
	return;

bail:
	mmeFreeMem(bmh);
        bmh = 0;
	videoClose(handle_);
	vh_ = 0;
	valid_ = 0;
}
예제 #4
0
파일: sound.c 프로젝트: bernds/UAE
int init_sound (void)
{
    int rate;
    int dspbits;
    int channels;
    MMRESULT	  status;
    LPPCMWAVEFORMAT waveformat;

    if (currprefs.sound_maxbsiz < 128 || currprefs.sound_maxbsiz > 16384) {
	fprintf(stderr, "Sound buffer size %d out of range.\n", currprefs.sound_maxbsiz);
	currprefs.sound_maxbsiz = 8192;
    }

    sndbufsize = currprefs.sound_maxbsiz;

    dspbits = currprefs.sound_bits;

    rate = currprefs.sound_freq;

    channels = 1;

    if((waveformat = (LPPCMWAVEFORMAT)
	mmeAllocMem(sizeof(PCMWAVEFORMAT))) == NULL ) {
	fprintf(stderr, "Failed to allocate PCMWAVEFORMAT struct\n");
	return 0;
    }
    waveformat->wf.nSamplesPerSec = rate;
    waveformat->wf.nChannels = channels;
    waveformat->wBitsPerSample = dspbits;
    waveformat->wf.wFormatTag = WAVE_FORMAT_PCM;

    bytes_per_sample = waveformat->wf.nChannels *
	(waveformat->wBitsPerSample/8);
    waveformat->wf.nBlockAlign = bytes_per_sample;
    waveformat->wf.nAvgBytesPerSec = bytes_per_sample *
	waveformat->wf.nSamplesPerSec;

    /* Open the audio device with desired rate/format */
    status = waveOutOpen( &mme_handle,
			  WAVE_MAPPER,
			  (LPWAVEFORMAT)waveformat,
			  (void (*)())mme_callback,
			  (unsigned int)NULL,
			  CALLBACK_FUNCTION | WAVE_OPEN_SHAREABLE );
    mmeFreeMem(waveformat);

    if(status != MMSYSERR_NOERROR) {
	fprintf(stderr, "waveOutOpen failed - status = %d\n", status);
	return 0;
    }

    /* Allocate wave header for use in write */
    if((WaveHeader = (LPWAVEHDR)
	mmeAllocMem(sizeof(WAVEHDR))) == NULL ) {
	fprintf(stderr, "Failed to allocate WAVEHDR struct\n");
	return 0;
    }
    /* Allocate shared audio buffer for communicating with audio device */
    if ((mme_audiobuf = (LPSTR)
	 mmeAllocBuffer(sndbufsize*SOUND_NUMBUF*bytes_per_sample*2)) == NULL) {
	fprintf(stderr, "Failed to allocate shared audio buffer\n");
	mmeFreeMem(WaveHeader);
	return 0;
    }
    sndbuffer = mme_audiobuf;
    obtained_freq = rate;

    if (dspbits == 16) {
	init_sound_table16 ();
	sample_handler = sample16_handler;
    } else {
	init_sound_table8 ();
	sample_handler = sample8_handler;
    }
    sound_available = 1;
    printf ("Sound driver found and configured for %d bits at %d Hz, buffer is %d bytes\n", dspbits, rate, sndbufsize);
    mme_sndbufpt = sndbufpt = sndbuffer;
    return 1;
}