Beispiel #1
0
int app_OpenSound(int samples_per_sync, int sample_rate) {
    Float64 sampleRate = 44100.0;
    int i;

    LOGDEBUG("app_SoundOpen()");
    
    app_MuteSound();
    
    if(preferences.muted)
    {
    	return 0;
    }

    soundInit = 0;

    in.mDataFormat.mSampleRate = sampleRate;
    in.mDataFormat.mFormatID = kAudioFormatLinearPCM;
    in.mDataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
                                | kAudioFormatFlagIsPacked;
    in.mDataFormat.mBytesPerPacket = 4;
    in.mDataFormat.mFramesPerPacket = 1;
    in.mDataFormat.mBytesPerFrame = 2;
    in.mDataFormat.mChannelsPerFrame = 2;
    in.mDataFormat.mBitsPerChannel = 16;

    /* Pre-buffer before we turn on audio */
    UInt32 err;
    err = AudioQueueNewOutput(&in.mDataFormat,
                      AQBufferCallback,
                      &in,
                      NULL,
                      kCFRunLoopCommonModes,
                      0,
                      &in.queue);
   if (err) {
     LOGDEBUG("AudioQueueNewOutput err %d\n", err);
   }

   in.frameCount = 512 * 1; //512; //(1024 * (16)) / 4;
   UInt32 bufferBytes = in.frameCount * in.mDataFormat.mBytesPerFrame;

   for (i=0; i<AUDIO_BUFFERS; i++) {
      err = AudioQueueAllocateBuffer(in.queue, bufferBytes, &in.mBuffers[i]);
      if (err) {
	LOGDEBUG("AudioQueueAllocateBuffer[%d] err %d\n",i, err);
      }
      /* "Prime" by calling the callback once per buffer */
      AQBufferCallback (&in, in.queue, in.mBuffers[i]);
   }

   soundInit = 1;
   LOGDEBUG("app_QueueSample.AudioQueueStart");
   err = AudioQueueStart(in.queue, NULL);
   
    return 0;
}
Beispiel #2
0
int app_OpenSound(int buffersize) {
    Float64 sampleRate = 22050.0;
    int i;
    UInt32 bufferBytes;
	
	soundBufferSize = buffersize;
	
    app_MuteSound();
	
    soundInit = 0;
	
    in.mDataFormat.mSampleRate = sampleRate;
    in.mDataFormat.mFormatID = kAudioFormatLinearPCM;
    in.mDataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
	| kAudioFormatFlagIsPacked;
    in.mDataFormat.mBytesPerPacket    =   4;
    in.mDataFormat.mFramesPerPacket   =   isStereo ? 1 : 2;
    in.mDataFormat.mBytesPerFrame     =   isStereo ? 4 : 2;
    in.mDataFormat.mChannelsPerFrame  =   isStereo ? 2 : 1;
    in.mDataFormat.mBitsPerChannel    =   16;
	
	
    /* Pre-buffer before we turn on audio */
    UInt32 err;
    err = AudioQueueNewOutput(&in.mDataFormat,
							  AQBufferCallback,
							  NULL,
							  NULL,
							  kCFRunLoopCommonModes,
							  0,
							  &in.queue);
	
	bufferBytes = AUDIO_BUFFER_SIZE;
	
	for (i=0; i<AUDIO_BUFFERS; i++) 
	{
		err = AudioQueueAllocateBuffer(in.queue, bufferBytes, &in.mBuffers[i]);
		/* "Prime" by calling the callback once per buffer */
		//AQBufferCallback (&in, in.queue, in.mBuffers[i]);
		in.mBuffers[i]->mAudioDataByteSize = AUDIO_BUFFER_SIZE; //samples_per_frame * 2; //inData->mDataFormat.mBytesPerFrame; //(inData->frameCount * 4 < (sndOutLen) ? inData->frameCount * 4 : (sndOutLen));
		AudioQueueEnqueueBuffer(in.queue, in.mBuffers[i], 0, NULL);
	}
	
	soundInit = 1;
	err = AudioQueueStart(in.queue, NULL);
	
	return 0;
}
Beispiel #3
0
int app_OpenSound()
{
  Float64 sampleRate = 44100.0;
  int i;
  UInt32 bufferBytes;
  Uint32 err;
    
  app_MuteSound();
  
  soundInit = 0;
  
  if(!config.enable_sound)
    return 0;

  in.mDataFormat.mSampleRate = sampleRate;
  in.mDataFormat.mFormatID = kAudioFormatLinearPCM;
  in.mDataFormat.mFormatFlags =
   kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
  in.mDataFormat.mBytesPerPacket = 4;
  in.mDataFormat.mFramesPerPacket = 2;
  in.mDataFormat.mBytesPerFrame = 2;
  in.mDataFormat.mChannelsPerFrame = 1;
  in.mDataFormat.mBitsPerChannel = 16;
    
  // Pre-buffer before we turn on audio
  err = AudioQueueNewOutput(&in.mDataFormat, AQBufferCallback,
   NULL, NULL, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode,
   0, &in.queue);
  
  bufferBytes = IPHONE_AUDIO_BUFFER_SIZE;
  
  for(i = 0; i < IPHONE_AUDIO_BUFFERS; i++)
  {
    err = AudioQueueAllocateBuffer(in.queue, bufferBytes, &in.mBuffers[i]);
    in.mBuffers[i]->mAudioDataByteSize = IPHONE_AUDIO_BUFFER_SIZE;
    // "Prime" by calling the callback once per buffer
    AudioQueueEnqueueBuffer(in.queue, in.mBuffers[i], 0, NULL);
  }
  
  soundInit = 1;
  err = AudioQueueStart(in.queue, NULL);
  
  return 0;
}
Beispiel #4
0
int msdos_init_sound(void)
{
	/* Ask the user if no soundcard was chosen */
	if (soundcard == -1)
	{
		soundcard=1;
	}

	if (soundcard == 0)     /* silence */
	{
		/* update the Machine structure to show that sound is disabled */
		Machine->sample_rate = 0;
		return 0;
	}

	/* use stereo output if supported */
	gp2x_sound_stereo=0;
	if (usestereo)
	{
		if (Machine->drv->sound_attributes & SOUND_SUPPORTS_STEREO)
			gp2x_sound_stereo=1;
	}

	stream_cache_data = 0;
	stream_cache_len = 0;
	stream_cache_stereo = 0;

    gp2x_sound_rate=44100;//options.samplerate;//44100; //options.samplerate;

	/* update the Machine structure to reflect the actual sample rate */
	Machine->sample_rate = gp2x_sound_rate;

	logerror("set stereo: %d\n",gp2x_sound_stereo);
	logerror("set sample rate: %d\n",Machine->sample_rate);

	osd_set_mastervolume(attenuation);	/* set the startup volume */

	app_MuteSound();
	app_DemuteSound();

	return 0;
}
void app_Halt(void) {
	app_MuteSound();
    __emulation_run = 0;
}
Beispiel #6
0
/* Mute Sound Thread*/
void gp2x_sound_thread_mute(void)
{
#ifndef NOSOUND
	app_MuteSound();
#endif
}
Beispiel #7
0
void gp2x_stop_sound()
{
	app_MuteSound();
}