Ejemplo n.º 1
0
int app_OpenSound(int samples_per_sync, int sample_rate) {
    Float64 sampleRate = 44100.0;
    int i;

    LOGDEBUG("app_SoundOpen()");
    
    app_MuteSound();
    
    if(preferences.muted)
    {
    	return 0;
    }

    soundInit = 0;

    in.mDataFormat.mSampleRate = sampleRate;
    in.mDataFormat.mFormatID = kAudioFormatLinearPCM;
    in.mDataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
                                | kAudioFormatFlagIsPacked;
    in.mDataFormat.mBytesPerPacket = 4;
    in.mDataFormat.mFramesPerPacket = 1;
    in.mDataFormat.mBytesPerFrame = 2;
    in.mDataFormat.mChannelsPerFrame = 2;
    in.mDataFormat.mBitsPerChannel = 16;

    /* Pre-buffer before we turn on audio */
    UInt32 err;
    err = AudioQueueNewOutput(&in.mDataFormat,
                      AQBufferCallback,
                      &in,
                      NULL,
                      kCFRunLoopCommonModes,
                      0,
                      &in.queue);
   if (err) {
     LOGDEBUG("AudioQueueNewOutput err %d\n", err);
   }

   in.frameCount = 512 * 1; //512; //(1024 * (16)) / 4;
   UInt32 bufferBytes = in.frameCount * in.mDataFormat.mBytesPerFrame;

   for (i=0; i<AUDIO_BUFFERS; i++) {
      err = AudioQueueAllocateBuffer(in.queue, bufferBytes, &in.mBuffers[i]);
      if (err) {
	LOGDEBUG("AudioQueueAllocateBuffer[%d] err %d\n",i, err);
      }
      /* "Prime" by calling the callback once per buffer */
      AQBufferCallback (&in, in.queue, in.mBuffers[i]);
   }

   soundInit = 1;
   LOGDEBUG("app_QueueSample.AudioQueueStart");
   err = AudioQueueStart(in.queue, NULL);
   
    return 0;
}
Ejemplo n.º 2
0
int playbuffer(void *pcmbuffer, unsigned long len) {
    AQCallbackStruct aqc;
    UInt32 err, bufferSize;
    int i;
    
    aqc.mDataFormat.mSampleRate = SAMPLE_RATE;
    aqc.mDataFormat.mFormatID = kAudioFormatLinearPCM;
    aqc.mDataFormat.mFormatFlags =
    kLinearPCMFormatFlagIsSignedInteger
    | kAudioFormatFlagIsPacked;
    aqc.mDataFormat.mBytesPerPacket = 4;
    aqc.mDataFormat.mFramesPerPacket = 1;
    aqc.mDataFormat.mBytesPerFrame = 4;
    aqc.mDataFormat.mChannelsPerFrame = 2;
    aqc.mDataFormat.mBitsPerChannel = 16;
    aqc.frameCount = FRAME_COUNT;
    aqc.sampleLen = len / BYTES_PER_SAMPLE;
    aqc.playPtr = 0;
    aqc.pcmBuffer = (sampleFrame *)pcmbuffer;
    
    err = AudioQueueNewOutput(&aqc.mDataFormat,
                              AQBufferCallback,
                              &aqc,
                              NULL,
                              kCFRunLoopCommonModes,
                              0,
                              &aqc.queue);
    if (err)
        return err;
    
    aqc.frameCount = FRAME_COUNT;
    bufferSize = aqc.frameCount * aqc.mDataFormat.mBytesPerFrame;
    
    for (i=0; i<AUDIO_BUFFERS; i++) {
        err = AudioQueueAllocateBuffer(aqc.queue, bufferSize,
                                       &aqc.mBuffers[i]);
        if (err)
            return err;
        AQBufferCallback(&aqc, aqc.queue, aqc.mBuffers[i]);
    }
    
    err = AudioQueueStart(aqc.queue, NULL);
    if (err)
        return err;
    struct timeval tv = {1.0, 0};
    while(aqc.playPtr < aqc.sampleLen) { select(0, NULL, NULL, NULL, &tv); }
    sleep(1);
    return 0;
}
bool AudioQueueStreamOut::Open(const char *FileName)
{
    delete [] mInfo.mPacketDescs;
    mInfo.mPacketDescs = NULL;
    m_totalFrames = 0;
    mInfo.m_SeekToPacket = -1;
 	try {
   
        CFURLRef sndFile = CFURLCreateFromFileSystemRepresentation (NULL, (const UInt8 *)FileName, strlen(FileName), false);
        if (!sndFile) return false;
            
        OSStatus result = AudioFileOpenURL (sndFile, 0x1/*fsRdPerm*/, 0/*inFileTypeHint*/, &mInfo.mAudioFile);
        CFRelease (sndFile);
                                
        UInt32 size = sizeof(mInfo.mDataFormat);
        XThrowIfError(AudioFileGetProperty(mInfo.mAudioFile, 
                                    kAudioFilePropertyDataFormat, &size, &mInfo.mDataFormat), "couldn't get file's data format");
        
        printf ("File format: "); mInfo.mDataFormat.Print();

        XThrowIfError(AudioQueueNewOutput(&mInfo.mDataFormat, AudioQueueStreamOut::AQBufferCallback, this, 
                                    NULL, kCFRunLoopCommonModes, 0, &mInfo.mQueue), "AudioQueueNew failed");

        UInt32 bufferByteSize;
        
        // we need to calculate how many packets we read at a time, and how big a buffer we need
        // we base this on the size of the packets in the file and an approximate duration for each buffer
        {
            bool isFormatVBR = (mInfo.mDataFormat.mBytesPerPacket == 0 || mInfo.mDataFormat.mFramesPerPacket == 0);
            
            // first check to see what the max size of a packet is - if it is bigger
            // than our allocation default size, that needs to become larger
            UInt32 maxPacketSize;
            size = sizeof(maxPacketSize);
            XThrowIfError(AudioFileGetProperty(mInfo.mAudioFile, 
                                    kAudioFilePropertyPacketSizeUpperBound, &size, &maxPacketSize), "couldn't get file's max packet size");
            
            // adjust buffer size to represent about a half second of audio based on this format
            CalculateBytesForTime (mInfo.mDataFormat, maxPacketSize, 0.5/*seconds*/, &bufferByteSize, &mInfo.mNumPacketsToRead);
            
            if (isFormatVBR)
                mInfo.mPacketDescs = new AudioStreamPacketDescription [mInfo.mNumPacketsToRead];
            else
                mInfo.mPacketDescs = NULL; // we don't provide packet descriptions for constant bit rate formats (like linear PCM)
                
            printf ("Buffer Byte Size: %d, Num Packets to Read: %d\n", (int)bufferByteSize, (int)mInfo.mNumPacketsToRead);
        }

        // (2) If the file has a cookie, we should get it and set it on the AQ
        size = sizeof(UInt32);
        result = AudioFileGetPropertyInfo (mInfo.mAudioFile, kAudioFilePropertyMagicCookieData, &size, NULL);

        if (!result && size) {
            char* cookie = new char [size];		
            XThrowIfError (AudioFileGetProperty (mInfo.mAudioFile, kAudioFilePropertyMagicCookieData, &size, cookie), "get cookie from file");
            XThrowIfError (AudioQueueSetProperty(mInfo.mQueue, kAudioQueueProperty_MagicCookie, cookie, size), "set cookie on queue");
            delete [] cookie;
        }

            // prime the queue with some data before starting
        mInfo.mDone = false;
        mInfo.mCurrentPacket = 0;
        for (UInt32 i = 0; i < sizeof(mInfo.mBuffers)/sizeof(mInfo.mBuffers[0]); ++i) {
            XThrowIfError(AudioQueueAllocateBuffer(mInfo.mQueue, bufferByteSize, &mInfo.mBuffers[i]), "AudioQueueAllocateBuffer failed");

            AQBufferCallback (this, mInfo.mQueue, mInfo.mBuffers[i]);
            
            if (mInfo.mDone) break;
        }	
        return IMUSIKStreamOutDefault::Create(NULL);
	}
	catch (CAXException e) {
		char buf[256];
		fprintf(stderr, "Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
	}
    
    return false;
}