Example #1
0
  void
  MMAudioDevice::update() {
    ADR_GUARD("MMAudioDevice::update");

    // if a buffer is done playing, add it to the queue again
    for (int i = 0; i < BUFFER_COUNT; ++i) {
      WAVEHDR& wh = m_buffers[i];
      if (wh.dwFlags & WHDR_DONE) {

        // unprepare
        MMRESULT result = waveOutUnprepareHeader(m_device, &wh, sizeof(wh));
        if (result != MMSYSERR_NOERROR) {
          ADR_LOG("waveOutUnprepareHeader failed");
        }

        // fill with new samples
        read(BUFFER_LENGTH / 4, wh.lpData);
        wh.dwFlags = 0;

        // prepare
        result = waveOutPrepareHeader(m_device, &wh, sizeof(wh));
        if (result != MMSYSERR_NOERROR) {
          ADR_LOG("waveOutPrepareHeader failed");
        }

        // write
        result = waveOutWrite(m_device, &wh, sizeof(wh));
        if (result != MMSYSERR_NOERROR) {
          ADR_LOG("waveOutWrite failed");
        }
      }
    }
    Sleep(10);
  }
Example #2
0
  void
  ALAudioDevice::update() {
    ADR_GUARD("ALAudioDevice::update");

    // how much data can we write?
    const int filled = alGetFilled(m_port);
    int can_write = 5000 - filled;  // empty portion of the buffer

    // write 1024 frames at a time
    static const int BUFFER_SIZE = 1024;
    u8 buffer[BUFFER_SIZE * 4];
    while (can_write > 0) {
      int transfer_count = std::min(can_write, BUFFER_SIZE);

      ADR_LOG("reading");

      read(transfer_count, buffer);

      ADR_LOG("writing");

      alWriteFrames(m_port, buffer, transfer_count);
      can_write -= transfer_count;
    }

    usleep(50000);  // 50 milliseconds
  }
Example #3
0
  MMAudioDevice::MMAudioDevice(HWAVEOUT device, int rate)
    : MixerDevice(rate)
  {
    ADR_GUARD("MMAudioDevice::MMAudioDevice");

    m_device = device;
    m_current_buffer = 0;

    // fill each buffer with samples and prepare it for output
    for (int i = 0; i < BUFFER_COUNT; ++i) {
      WAVEHDR& wh = m_buffers[i];
      memset(&wh, 0, sizeof(wh));
      wh.lpData         = (char*)m_samples + i * BUFFER_LENGTH;
      wh.dwBufferLength = BUFFER_LENGTH;

      read(BUFFER_LENGTH / 4, wh.lpData);

      MMRESULT result = waveOutPrepareHeader(m_device, &wh, sizeof(wh));
      if (result != MMSYSERR_NOERROR) {
        ADR_LOG("waveOutPrepareHeader failed");
      }

      result = waveOutWrite(m_device, &wh, sizeof(wh));
      if (result != MMSYSERR_NOERROR) {
        ADR_LOG("waveOutWrite failed");
      }
    }
  }
Example #4
0
  bool
  AIFFInputStream::findSoundChunk() {
    ADR_GUARD("AIFFInputStream::findSoundChunk");

    // seek to just after the IFF header
    m_file->seek(12, File::BEGIN);

    // search for a sound chunk
    while (true) {
      u8 chunk_header[8];
      if (m_file->read(chunk_header, 8) != 8) {
        ADR_LOG("Couldn't read SSND chunk header");
        return false;
      }
      u32 chunk_length = read32_be(chunk_header + 4);

      // if we found a data chunk, excellent!
      if (memcmp(chunk_header, "SSND", 4) == 0) {
        ADR_LOG("Found sound chunk");

        u8 chunk_contents[8];
        if (m_file->read(chunk_contents, 8) != 8) {
          ADR_LOG("Couldn't read SSND chunk contents");
          return false;
        }
        if (read32_be(chunk_contents + 0) != 0 ||
            read32_be(chunk_contents + 4) != 0)
        {
          ADR_LOG("Block-aligned AIFF files not supported!");
          return false;
        }

        // calculate the frame size so we can truncate the data chunk
        int frame_size = m_channel_count * GetSampleSize(m_sample_format);

        m_data_chunk_location  = m_file->tell();
        m_data_chunk_length    = (chunk_length - 8) / frame_size;
        m_frames_left_in_chunk = m_data_chunk_length;
        return true;

      } else {

        ADR_IF_DEBUG {
          const u8* ci = chunk_header;
          char str[80];
          sprintf(str, "Skipping: %d bytes in chunk '%c%c%c%c'",
                  (int)chunk_length, ci[0], ci[1], ci[2], ci[3]);
          ADR_LOG(str);
        }

        // skip the rest of the chunk
        if (!skipBytes(chunk_length)) {
          // oops, end of stream
          return false;
        }

      }
    }
  }
Example #5
0
 void AbstractDevice::eventThread(void* arg) {
   ADR_GUARD("AbstractDevice::eventThread[static]");
   ADR_LOG(arg ? "arg is valid" : "arg is not valid");
   
   AbstractDevice* This = static_cast<AbstractDevice*>(arg);
   This->eventThread();
 }
Example #6
0
 void
 AIFFInputStream::reset() {
   // seek to the beginning of the data chunk
   m_frames_left_in_chunk = m_data_chunk_length;
   if (!m_file->seek(m_data_chunk_location, File::BEGIN)) {
     ADR_LOG("Seek in AIFFInputStream::reset");
   }
 }
Example #7
0
  AbstractDevice::AbstractDevice() {
    m_thread_exists = false;
    m_thread_should_die = false;

    bool result = AI_CreateThread(eventThread, this, 2);
    if (!result) {
      ADR_LOG("THREAD CREATION FAILED");
    }
  }
Example #8
0
  OutputStream*
  DSAudioDevice::openStream(SampleSource* source) {
    if (!source) {
      return 0;
    }

    ADR_GUARD("DSAudioDevice::openStream");

    int channel_count, sample_rate;
    SampleFormat sample_format;
    source->getFormat(channel_count, sample_rate, sample_format);

    const int frame_size = channel_count * GetSampleSize(sample_format);

    // calculate an ideal buffer size
    const int buffer_length = sample_rate * m_buffer_length / 1000;

    // define the wave format
    WAVEFORMATEX wfx;
    memset(&wfx, 0, sizeof(wfx));
    wfx.wFormatTag      = WAVE_FORMAT_PCM;
    wfx.nChannels       = channel_count;
    wfx.nSamplesPerSec  = sample_rate;
    wfx.nAvgBytesPerSec = sample_rate * frame_size;
    wfx.nBlockAlign     = frame_size;
    wfx.wBitsPerSample  = GetSampleSize(sample_format) * 8;
    wfx.cbSize          = sizeof(wfx);

    DSBUFFERDESC dsbd;
    memset(&dsbd, 0, sizeof(dsbd));
    dsbd.dwSize        = sizeof(dsbd);
    dsbd.dwFlags       = DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_CTRLPAN |
                         DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLFREQUENCY;
    if (m_global_focus) {
      dsbd.dwFlags |= DSBCAPS_GLOBALFOCUS;
    }
    dsbd.dwBufferBytes = frame_size * buffer_length;
    dsbd.lpwfxFormat   = &wfx;

    // create the DirectSound buffer
    IDirectSoundBuffer* buffer;
    HRESULT result = m_direct_sound->CreateSoundBuffer(&dsbd, &buffer, NULL);
    if (FAILED(result) || !buffer) {
      return 0;
    }

    ADR_LOG("CreateSoundBuffer succeeded");

    // now create the output stream
    DSOutputStream* stream = new DSOutputStream(
      this, buffer, buffer_length, source);

    // add it the list of streams and return
    SYNCHRONIZED(this);
    m_open_streams.push_back(stream);
    return stream;
  }
Example #9
0
 CondVar::CondVar() {
   ADR_GUARD("CondVar::CondVar");
   m_impl = new Impl;
   m_impl->event = CreateEvent(0, FALSE, FALSE, 0);
   if (!m_impl->event) {
     ADR_LOG("CreateEvent() failed");
     abort();
   }
 }
Example #10
0
  DSOutputBuffer::DSOutputBuffer(
    DSAudioDevice* device,
    IDirectSoundBuffer* buffer,
    int length,
    int frame_size)
  {
    ADR_GUARD("DSOutputBuffer::DSOutputBuffer");

    m_device     = device; // keep the device alive while the buffer exists
    m_buffer     = buffer;
    m_length     = length;
    m_frame_size = frame_size;

    DWORD frequency;
    m_buffer->GetFrequency(&frequency);
    m_base_frequency = frequency;

    m_repeating  = false;
    m_volume     = 1;
    m_pan        = 0;

    m_stop_event = 0;

    // Set up the stop notification if we can.
    HRESULT rv = m_buffer->QueryInterface(
        IID_IDirectSoundNotify, (void**)&m_notify);
    if (SUCCEEDED(rv) && m_notify) {
      m_stop_event = CreateEvent(0, FALSE, FALSE, 0);
      if (!m_stop_event) {
        ADR_LOG("DSOutputBuffer stop event creation failed.  Not firing stop events.");
      } else {
        DSBPOSITIONNOTIFY n;
        n.dwOffset = DSBPN_OFFSETSTOP;
        n.hEventNotify = m_stop_event;
        m_notify->SetNotificationPositions(1, &n);
      }
    }
  }
Example #11
0
  OutputStream*
  DSAudioDevice::openBuffer(
    void* samples, int frame_count,
    int channel_count, int sample_rate, SampleFormat sample_format)
  {
    ADR_GUARD("DSAudioDevice::openBuffer");

    const int frame_size = channel_count * GetSampleSize(sample_format);

    WAVEFORMATEX wfx;
    memset(&wfx, 0, sizeof(wfx));
    wfx.wFormatTag      = WAVE_FORMAT_PCM;
    wfx.nChannels       = channel_count;
    wfx.nSamplesPerSec  = sample_rate;
    wfx.nAvgBytesPerSec = sample_rate * frame_size;
    wfx.nBlockAlign     = frame_size;
    wfx.wBitsPerSample  = GetSampleSize(sample_format) * 8;
    wfx.cbSize          = sizeof(wfx);

    DSBUFFERDESC dsbd;
    memset(&dsbd, 0, sizeof(dsbd));
    dsbd.dwSize  = sizeof(dsbd);
    dsbd.dwFlags = DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_CTRLPAN |
                   DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLFREQUENCY |
                   DSBCAPS_STATIC | DSBCAPS_CTRLPOSITIONNOTIFY;
    if (m_global_focus) {
      dsbd.dwFlags |= DSBCAPS_GLOBALFOCUS;
    }

    const int buffer_frame_count = std::max(m_min_buffer_length, frame_count);
    const int buffer_size = buffer_frame_count * frame_size;
    dsbd.dwBufferBytes = buffer_size;
    dsbd.lpwfxFormat   = &wfx;

    // create the DS buffer
    IDirectSoundBuffer* buffer;
    HRESULT result = m_direct_sound->CreateSoundBuffer(
      &dsbd, &buffer, NULL);
    if (FAILED(result) || !buffer) {
      return 0;
    }

    ADR_IF_DEBUG {
      DSBCAPS caps;
      caps.dwSize = sizeof(caps);
      result = buffer->GetCaps(&caps);
      if (FAILED(result)) {
        buffer->Release();
        return 0;
      } else {
        std::ostringstream ss;
        ss << "actual buffer size: " << caps.dwBufferBytes << std::endl
           << "buffer_size: " << buffer_size;
        ADR_LOG(ss.str().c_str());
      }
    }

    void* data;
    DWORD data_size;
    result = buffer->Lock(0, buffer_size, &data, &data_size, 0, 0, 0);
    if (FAILED(result)) {
      buffer->Release();
      return 0;
    }

    ADR_IF_DEBUG {
      std::ostringstream ss;
      ss << "buffer size: " << buffer_size << std::endl
         << "data size:   " << data_size << std::endl
         << "frame count: " << frame_count;
      ADR_LOG(ss.str().c_str());
    }

    const int actual_size = frame_count * frame_size;
    memcpy(data, samples, actual_size);
    memset((u8*)data + actual_size, 0, buffer_size - actual_size);

    buffer->Unlock(data, data_size, 0, 0);

    DSOutputBuffer* b = new DSOutputBuffer(
      this, buffer, buffer_frame_count, frame_size);
    SYNCHRONIZED(this);
    m_open_buffers.push_back(b);
    return b;
  }
Example #12
0
  bool
  AIFFInputStream::findCommonChunk() {
    ADR_GUARD("AIFFInputStream::findCommonChunk");

    // seek to just after the IFF header
    m_file->seek(12, File::BEGIN);

    // search for a common chunk
    for (;;) {
      u8 chunk_header[8];
      if (m_file->read(chunk_header, 8) != 8) {
        return false;
      }
      u32 chunk_length = read32_be(chunk_header + 4);

      // if we found a format chunk, excellent!
      if (memcmp(chunk_header, "COMM", 4) == 0 && chunk_length >= 18) {
        ADR_LOG("Found common chunk");

        // read common chunk
        u8 chunk[18];
        if (m_file->read(chunk, 18) != 18) {
          return false;
        }

        chunk_length -= 18;

        // parse the memory into useful information
        u16 channel_count   = read16_be(chunk + 0);
        //u32 frame_count     = read32_be(chunk + 2);
        u16 bits_per_sample = read16_be(chunk + 6);
        u32 sample_rate     = readLD_be(chunk + 8);

        // we only support mono and stereo, 8-bit or 16-bit
        if (channel_count > 2 ||
            !isValidSampleSize(bits_per_sample)) {
          ADR_LOG("Invalid AIFF");
          return false;
        }

        // skip the rest of the chunk
        if (!skipBytes(chunk_length)) {
          ADR_LOG("failed skipping rest of common chunk");
          return false;
        }

        // figure out the sample format
        if (bits_per_sample == 8) {
          m_sample_format = SF_U8;
        } else if (bits_per_sample == 16) {
          m_sample_format = SF_S16;
        } else {
          return false;
        }

        // store the other important attributes
        m_channel_count = channel_count;
        m_sample_rate   = sample_rate;
        return true;

      } else {

        // skip the rest of the chunk
        if (!skipBytes(chunk_length)) {
          // oops, end of stream
          return false;
        }

      }
    }
  }