Ejemplo n.º 1
0
static bool set_format(struct ao *ao, pa_format_info *format)
{
    ao->format = af_fmt_from_planar(ao->format);

    format->encoding = map_digital_format(ao->format);
    if (format->encoding == PA_ENCODING_PCM) {
        const struct format_map *fmt_map = format_maps;

        while (fmt_map->mp_format != ao->format) {
            if (fmt_map->mp_format == AF_FORMAT_UNKNOWN) {
                MP_VERBOSE(ao, "Unsupported format, using default\n");
                fmt_map = format_maps;
                break;
            }
            fmt_map++;
        }
        ao->format = fmt_map->mp_format;

        pa_format_info_set_sample_format(format, fmt_map->pa_format);
    }

    struct pa_channel_map map;

    if (!select_chmap(ao, &map))
        return false;

    pa_format_info_set_rate(format, ao->samplerate);
    pa_format_info_set_channels(format, ao->channels.num);
    pa_format_info_set_channel_map(format, &map);

    return ao->samplerate < PA_RATE_MAX && pa_format_info_valid(format);
}
Ejemplo n.º 2
0
gboolean
gst_pulse_fill_format_info (GstAudioRingBufferSpec * spec, pa_format_info ** f,
    guint * channels)
{
  pa_format_info *format;
  pa_sample_format_t sf = PA_SAMPLE_INVALID;
  GstAudioInfo *ainfo = &spec->info;

  format = pa_format_info_new ();

  if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW
      && GST_AUDIO_INFO_WIDTH (ainfo) == 8) {
    format->encoding = PA_ENCODING_PCM;
    sf = PA_SAMPLE_ULAW;
  } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW
      && GST_AUDIO_INFO_WIDTH (ainfo) == 8) {
    format->encoding = PA_ENCODING_PCM;
    sf = PA_SAMPLE_ALAW;
  } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) {
    format->encoding = PA_ENCODING_PCM;
    if (!gstaudioformat_to_pasampleformat (GST_AUDIO_INFO_FORMAT (ainfo), &sf))
      goto fail;
  } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_AC3) {
    format->encoding = PA_ENCODING_AC3_IEC61937;
  } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_EAC3) {
    format->encoding = PA_ENCODING_EAC3_IEC61937;
  } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_DTS) {
    format->encoding = PA_ENCODING_DTS_IEC61937;
  } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG) {
    format->encoding = PA_ENCODING_MPEG_IEC61937;
  } else {
    goto fail;
  }

  if (format->encoding == PA_ENCODING_PCM) {
    pa_format_info_set_sample_format (format, sf);
    pa_format_info_set_channels (format, GST_AUDIO_INFO_CHANNELS (ainfo));
  }

  pa_format_info_set_rate (format, GST_AUDIO_INFO_RATE (ainfo));

  if (!pa_format_info_valid (format))
    goto fail;

  *f = format;
  *channels = GST_AUDIO_INFO_CHANNELS (ainfo);

  return TRUE;

fail:
  if (format)
    pa_format_info_free (format);
  return FALSE;
}
Ejemplo n.º 3
0
/* This is called whenever the context status changes */
static void context_state_callback(pa_context *c, void *userdata) {
    fail_unless(c != NULL);

    switch (pa_context_get_state(c)) {
        case PA_CONTEXT_CONNECTING:
        case PA_CONTEXT_AUTHORIZING:
        case PA_CONTEXT_SETTING_NAME:
            break;

        case PA_CONTEXT_READY: {

            int i;
            fprintf(stderr, "Connection established.\n");

            for (i = 0; i < NSTREAMS; i++) {
                char name[64];
                pa_format_info *formats[1];

                formats[0] = pa_format_info_new();
                formats[0]->encoding = PA_ENCODING_PCM;
                pa_format_info_set_sample_format(formats[0], PA_SAMPLE_FLOAT32);
                pa_format_info_set_rate(formats[0], SAMPLE_HZ);
                pa_format_info_set_channels(formats[0], 1);

                fprintf(stderr, "Creating stream %i\n", i);

                snprintf(name, sizeof(name), "stream #%i", i);

                streams[i] = pa_stream_new_extended(c, name, formats, 1, NULL);
                fail_unless(streams[i] != NULL);
                pa_stream_set_state_callback(streams[i], stream_state_callback, (void*) (long) i);
                pa_stream_connect_playback(streams[i], NULL, &buffer_attr, PA_STREAM_START_CORKED, NULL, i == 0 ? NULL : streams[0]);

                pa_format_info_free(formats[0]);
            }

            break;
        }

        case PA_CONTEXT_TERMINATED:
            mainloop_api->quit(mainloop_api, 0);
            break;

        case PA_CONTEXT_FAILED:
        default:
            fprintf(stderr, "Context error: %s\n", pa_strerror(pa_context_errno(c)));
            fail();
    }
}
Ejemplo n.º 4
0
pa_format_info* pa_format_info_from_sample_spec(pa_sample_spec *ss, pa_channel_map *map) {
    char cm[PA_CHANNEL_MAP_SNPRINT_MAX];
    pa_format_info *f;

    pa_assert(ss && pa_sample_spec_valid(ss));
    pa_assert(!map || pa_channel_map_valid(map));

    f = pa_format_info_new();
    f->encoding = PA_ENCODING_PCM;

    pa_format_info_set_sample_format(f, ss->format);
    pa_format_info_set_rate(f, ss->rate);
    pa_format_info_set_channels(f, ss->channels);

    if (map) {
        pa_channel_map_snprint(cm, sizeof(cm), map);
        pa_format_info_set_prop_string(f, PA_PROP_FORMAT_CHANNEL_MAP, cm);
    }

    return f;
}
static void context_state_cb(pa_context *c, void *userdata) {
    struct userdata *u = userdata;
    pa_assert(u);

    switch (pa_context_get_state(c)) {
        case PA_CONTEXT_UNCONNECTED:
        case PA_CONTEXT_CONNECTING:
        case PA_CONTEXT_AUTHORIZING:
        case PA_CONTEXT_SETTING_NAME:
            break;
        case PA_CONTEXT_READY: {
            pa_proplist *proplist;
            pa_buffer_attr bufferattr;
            pa_usec_t requested_latency;
            char *username = pa_get_user_name_malloc();
            char *hostname = pa_get_host_name_malloc();
            /* TODO: old tunnel put here the remote sink_name into stream name e.g. 'Null Output for lynxis@lazus' */
            char *stream_name = pa_sprintf_malloc(_("Tunnel for %s@%s"), username, hostname);
            pa_xfree(hostname);
            pa_xfree(username);

            pa_log_debug("Connection successful. Creating stream.");
            pa_assert(!u->stream);

            proplist = tunnel_new_proplist(u);
            
            if(u->transcode.encoding != -1) {
            
                  unsigned int n_formats = 1;
                  pa_format_info *formats[1];
  
                  formats[0] = pa_format_info_new();
                  formats[0]->encoding = u->transcode.encoding;
                  pa_format_info_set_sample_format(formats[0], u->sink->sample_spec.format);
                  pa_format_info_set_rate(formats[0], u->sink->sample_spec.rate);
                  pa_format_info_set_channels(formats[0], u->sink->sample_spec.channels);
                  pa_format_info_set_channel_map(formats[0],  &u->sink->channel_map);
                  pa_transcode_set_format_info(&u->transcode, formats[0]);

                  u->stream = pa_stream_new_extended(u->context, stream_name, formats, n_formats, proplist);
                       
            }
            else
                  u->stream = pa_stream_new_with_proplist(u->context,
                                                    stream_name,
                                                    &u->sink->sample_spec,
                                                    &u->sink->channel_map,
                                                    proplist);
            pa_proplist_free(proplist);
            pa_xfree(stream_name);

            if (!u->stream) {
                pa_log_error("Could not create a stream.");
                u->thread_mainloop_api->quit(u->thread_mainloop_api, TUNNEL_THREAD_FAILED_MAINLOOP);
                return;
            }

            requested_latency = pa_sink_get_requested_latency_within_thread(u->sink);
            if (requested_latency == (pa_usec_t) -1)
                requested_latency = u->sink->thread_info.max_latency;

            reset_bufferattr(&bufferattr);
            bufferattr.tlength = pa_usec_to_bytes(requested_latency, &u->sink->sample_spec);

            pa_stream_set_state_callback(u->stream, stream_state_cb, userdata);
            pa_stream_set_buffer_attr_callback(u->stream, stream_changed_buffer_attr_cb, userdata);
            if (pa_stream_connect_playback(u->stream,
                                           u->remote_sink_name,
                                           &bufferattr,
                                           PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_DONT_MOVE | PA_STREAM_START_CORKED | PA_STREAM_AUTO_TIMING_UPDATE,
                                           NULL,
                                           NULL) < 0) {
                pa_log_error("Could not connect stream.");
                u->thread_mainloop_api->quit(u->thread_mainloop_api, TUNNEL_THREAD_FAILED_MAINLOOP);
            }
            u->connected = true;
            break;
        }
        case PA_CONTEXT_FAILED:
            pa_log_debug("Context failed: %s.", pa_strerror(pa_context_errno(u->context)));
            u->connected = false;
            u->thread_mainloop_api->quit(u->thread_mainloop_api, TUNNEL_THREAD_FAILED_MAINLOOP);
            break;
        case PA_CONTEXT_TERMINATED:
            pa_log_debug("Context terminated.");
            u->connected = false;
            u->thread_mainloop_api->quit(u->thread_mainloop_api, TUNNEL_THREAD_FAILED_MAINLOOP);
            break;
    }
}
Ejemplo n.º 6
0
CPulseAEStream::CPulseAEStream(pa_context *context, pa_threaded_mainloop *mainLoop, enum AEDataFormat format, unsigned int sampleRate, CAEChannelInfo channelLayout, unsigned int options) : m_fader(this)
{
  ASSERT(channelLayout.Count());
  m_Destroyed = false;
  m_Initialized = false;
  m_Paused = false;

  m_Stream = NULL;
  m_Context = context;
  m_MainLoop = mainLoop;

  m_format = format;
  m_sampleRate = sampleRate;
  m_channelLayout = channelLayout;
  m_options = options;

  m_DrainOperation = NULL;
  m_slave = NULL;

  pa_threaded_mainloop_lock(m_MainLoop);

  m_SampleSpec.channels = channelLayout.Count();
  m_SampleSpec.rate = m_sampleRate;

  switch (m_format)
  {
    case AE_FMT_U8    : m_SampleSpec.format = PA_SAMPLE_U8; break;
    case AE_FMT_S16NE : m_SampleSpec.format = PA_SAMPLE_S16NE; break;
    case AE_FMT_S16LE : m_SampleSpec.format = PA_SAMPLE_S16LE; break;
    case AE_FMT_S16BE : m_SampleSpec.format = PA_SAMPLE_S16BE; break;
    case AE_FMT_S24NE3: m_SampleSpec.format = PA_SAMPLE_S24NE; break;
    case AE_FMT_S24NE4: m_SampleSpec.format = PA_SAMPLE_S24_32NE; break;
    case AE_FMT_S32NE : m_SampleSpec.format = PA_SAMPLE_S32NE; break;
    case AE_FMT_S32LE : m_SampleSpec.format = PA_SAMPLE_S32LE; break;
    case AE_FMT_S32BE : m_SampleSpec.format = PA_SAMPLE_S32BE; break;
    case AE_FMT_FLOAT : m_SampleSpec.format = PA_SAMPLE_FLOAT32NE; break;
#if PA_CHECK_VERSION(1,0,0)
    case AE_FMT_DTS   :
    case AE_FMT_EAC3  :
    case AE_FMT_AC3   : m_SampleSpec.format = PA_SAMPLE_S16NE; break;
#endif

    default:
      CLog::Log(LOGERROR, "PulseAudio: Invalid format %i", format);
      pa_threaded_mainloop_unlock(m_MainLoop);
      m_format = AE_FMT_INVALID;
      return;
  }

  if (!pa_sample_spec_valid(&m_SampleSpec))
  {
    CLog::Log(LOGERROR, "PulseAudio: Invalid sample spec");
    pa_threaded_mainloop_unlock(m_MainLoop);
    Destroy();
    return /*false*/;
  }

  m_frameSize = pa_frame_size(&m_SampleSpec);

  struct pa_channel_map map;
  map.channels = m_channelLayout.Count();

  for (unsigned int ch = 0; ch < m_channelLayout.Count(); ++ch)
    switch(m_channelLayout[ch])
    {
      case AE_CH_NULL: break;
      case AE_CH_MAX : break;
      case AE_CH_RAW : break;
      case AE_CH_FL  : map.map[ch] = PA_CHANNEL_POSITION_FRONT_LEFT           ; break;
      case AE_CH_FR  : map.map[ch] = PA_CHANNEL_POSITION_FRONT_RIGHT          ; break;
      case AE_CH_FC  : map.map[ch] = PA_CHANNEL_POSITION_FRONT_CENTER         ; break;
      case AE_CH_BC  : map.map[ch] = PA_CHANNEL_POSITION_REAR_CENTER          ; break;
      case AE_CH_BL  : map.map[ch] = PA_CHANNEL_POSITION_REAR_LEFT            ; break;
      case AE_CH_BR  : map.map[ch] = PA_CHANNEL_POSITION_REAR_RIGHT           ; break;
      case AE_CH_LFE : map.map[ch] = PA_CHANNEL_POSITION_LFE                  ; break;
      case AE_CH_FLOC: map.map[ch] = PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER ; break;
      case AE_CH_FROC: map.map[ch] = PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER; break;
      case AE_CH_SL  : map.map[ch] = PA_CHANNEL_POSITION_SIDE_LEFT            ; break;
      case AE_CH_SR  : map.map[ch] = PA_CHANNEL_POSITION_SIDE_RIGHT           ; break;
      case AE_CH_TC  : map.map[ch] = PA_CHANNEL_POSITION_TOP_CENTER           ; break;
      case AE_CH_TFL : map.map[ch] = PA_CHANNEL_POSITION_TOP_FRONT_LEFT       ; break;
      case AE_CH_TFR : map.map[ch] = PA_CHANNEL_POSITION_TOP_FRONT_RIGHT      ; break;
      case AE_CH_TFC : map.map[ch] = PA_CHANNEL_POSITION_TOP_CENTER           ; break;
      case AE_CH_TBL : map.map[ch] = PA_CHANNEL_POSITION_TOP_REAR_LEFT        ; break;
      case AE_CH_TBR : map.map[ch] = PA_CHANNEL_POSITION_TOP_REAR_RIGHT       ; break;
      case AE_CH_TBC : map.map[ch] = PA_CHANNEL_POSITION_TOP_REAR_CENTER      ; break;
      default: break;
    }

  m_MaxVolume     = CAEFactory::GetEngine()->GetVolume();
  m_Volume        = 1.0f;
  pa_volume_t paVolume = pa_sw_volume_from_linear((double)(m_Volume * m_MaxVolume));
  pa_cvolume_set(&m_ChVolume, m_SampleSpec.channels, paVolume);

#if PA_CHECK_VERSION(1,0,0)
  pa_format_info *info[1];
  info[0] = pa_format_info_new();
  switch(m_format)
  {
    case AE_FMT_DTS : info[0]->encoding = PA_ENCODING_DTS_IEC61937 ; break;
    case AE_FMT_EAC3: info[0]->encoding = PA_ENCODING_EAC3_IEC61937; break;
    case AE_FMT_AC3 : info[0]->encoding = PA_ENCODING_AC3_IEC61937 ; break;
    default:          info[0]->encoding = PA_ENCODING_PCM          ; break;
  }
  pa_format_info_set_rate         (info[0], m_SampleSpec.rate);
  pa_format_info_set_channels     (info[0], m_SampleSpec.channels);
  pa_format_info_set_channel_map  (info[0], &map);
  pa_format_info_set_sample_format(info[0], m_SampleSpec.format);
  m_Stream = pa_stream_new_extended(m_Context, "audio stream", info, 1, NULL);
  pa_format_info_free(info[0]);
#else
  m_Stream = pa_stream_new(m_Context, "audio stream", &m_SampleSpec, &map);
#endif

  if (m_Stream == NULL)
  {
    CLog::Log(LOGERROR, "PulseAudio: Could not create a stream");
    pa_threaded_mainloop_unlock(m_MainLoop);
    Destroy();
    return /*false*/;
  }

  pa_stream_set_state_callback(m_Stream, CPulseAEStream::StreamStateCallback, this);
  pa_stream_set_write_callback(m_Stream, CPulseAEStream::StreamRequestCallback, this);
  pa_stream_set_latency_update_callback(m_Stream, CPulseAEStream::StreamLatencyUpdateCallback, this);
  pa_stream_set_underflow_callback(m_Stream, CPulseAEStream::StreamUnderflowCallback, this);

  int flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE;
  if (options && AESTREAM_FORCE_RESAMPLE)
    flags |= PA_STREAM_VARIABLE_RATE;

  if (pa_stream_connect_playback(m_Stream, NULL, NULL, (pa_stream_flags)flags, &m_ChVolume, NULL) < 0)
  {
    CLog::Log(LOGERROR, "PulseAudio: Failed to connect stream to output");
    pa_threaded_mainloop_unlock(m_MainLoop);
    Destroy();
    return /*false*/;
  }

  /* Wait until the stream is ready */
  do
  {
    pa_threaded_mainloop_wait(m_MainLoop);
    CLog::Log(LOGDEBUG, "PulseAudio: Stream %s", StreamStateToString(pa_stream_get_state(m_Stream)));
  }
  while (pa_stream_get_state(m_Stream) != PA_STREAM_READY && pa_stream_get_state(m_Stream) != PA_STREAM_FAILED);

  if (pa_stream_get_state(m_Stream) == PA_STREAM_FAILED)
  {
    CLog::Log(LOGERROR, "PulseAudio: Waited for the stream but it failed");
    pa_threaded_mainloop_unlock(m_MainLoop);
    Destroy();
    return /*false*/;
  }

  m_cacheSize = pa_stream_writable_size(m_Stream);

  pa_threaded_mainloop_unlock(m_MainLoop);

  m_Initialized = true;

  CLog::Log(LOGINFO, "PulseAEStream::Initialized");
  CLog::Log(LOGINFO, "  Sample Rate   : %d", m_sampleRate);
  CLog::Log(LOGINFO, "  Sample Format : %s", CAEUtil::DataFormatToStr(m_format));
  CLog::Log(LOGINFO, "  Channel Count : %d", m_channelLayout.Count());
  CLog::Log(LOGINFO, "  Channel Layout: %s", ((std::string)m_channelLayout).c_str());
  CLog::Log(LOGINFO, "  Frame Size    : %d", m_frameSize);
  CLog::Log(LOGINFO, "  Cache Size    : %d", m_cacheSize);

  Resume();

  return /*true*/;
}
Ejemplo n.º 7
0
bool CAESinkPULSE::Initialize(AEAudioFormat &format, std::string &device)
{
  {
    CSingleLock lock(m_sec);
    m_IsAllocated = false;
  }
  m_passthrough = false;
  m_BytesPerSecond = 0;
  m_BufferSize = 0;
  m_filled_bytes = 0;
  m_lastPackageStamp = 0;
  m_Channels = 0;
  m_Stream = NULL;
  m_Context = NULL;
  m_periodSize = 0;

  if (!SetupContext(NULL, &m_Context, &m_MainLoop))
  {
    CLog::Log(LOGNOTICE, "PulseAudio might not be running. Context was not created.");
    Deinitialize();
    return false;
  }

  pa_threaded_mainloop_lock(m_MainLoop);

  struct pa_channel_map map;
  pa_channel_map_init(&map);

   // PULSE cannot cope with e.g. planar formats so we fallback to FLOAT
   // when we receive an invalid pulse format
   if (AEFormatToPulseFormat(format.m_dataFormat) == PA_SAMPLE_INVALID)
   {
     CLog::Log(LOGDEBUG, "PULSE does not support format: %s - will fallback to AE_FMT_FLOAT", CAEUtil::DataFormatToStr(format.m_dataFormat));
     format.m_dataFormat = AE_FMT_FLOAT;
   }

  m_passthrough = AE_IS_RAW(format.m_dataFormat);

  if(m_passthrough)
  {
    map.channels = 2;
    format.m_channelLayout = AE_CH_LAYOUT_2_0;
  }
  else
  {
    map = AEChannelMapToPAChannel(format.m_channelLayout);
    // if count has changed we need to fit the AE Map
    if(map.channels != format.m_channelLayout.Count())
      format.m_channelLayout = PAChannelToAEChannelMap(map);
  }
  m_Channels = format.m_channelLayout.Count();

  // store information about current sink
  SinkInfoStruct sinkStruct;
  sinkStruct.mainloop = m_MainLoop;
  sinkStruct.device_found = false;

  // get real sample rate of the device we want to open - to avoid resampling
  bool isDefaultDevice = (device == "Default");
  WaitForOperation(pa_context_get_sink_info_by_name(m_Context, isDefaultDevice ? NULL : device.c_str(), SinkInfoCallback, &sinkStruct), m_MainLoop, "Get Sink Info");
  // only check if the device is existing - don't alter the sample rate
  if (!sinkStruct.device_found)
  {
    CLog::Log(LOGERROR, "PulseAudio: Sink %s not found", device.c_str());
    pa_threaded_mainloop_unlock(m_MainLoop);
    Deinitialize();
    return false;
  }

  // Pulse can resample everything between 1 hz and 192000 hz
  // Make sure we are in the range that we originally added
  format.m_sampleRate = std::max(5512U, std::min(format.m_sampleRate, 192000U));

  pa_format_info *info[1];
  info[0] = pa_format_info_new();
  info[0]->encoding = AEFormatToPulseEncoding(format.m_dataFormat);
  if(!m_passthrough)
  {
    pa_format_info_set_sample_format(info[0], AEFormatToPulseFormat(format.m_dataFormat));
    pa_format_info_set_channel_map(info[0], &map);
  }
  pa_format_info_set_channels(info[0], m_Channels);

  // PA requires m_encodedRate in order to do EAC3
  unsigned int samplerate = format.m_sampleRate;
  if (m_passthrough && (AEFormatToPulseEncoding(format.m_dataFormat) == PA_ENCODING_EAC3_IEC61937))
  {
    // this is only used internally for PA to use EAC3
    samplerate = format.m_encodedRate;
  }

  pa_format_info_set_rate(info[0], samplerate);

  if (!pa_format_info_valid(info[0]))
  {
    CLog::Log(LOGERROR, "PulseAudio: Invalid format info");
    pa_format_info_free(info[0]);
    pa_threaded_mainloop_unlock(m_MainLoop);
    Deinitialize();
    return false;
  }

  pa_sample_spec spec;
  #if PA_CHECK_VERSION(2,0,0)
    pa_format_info_to_sample_spec(info[0], &spec, NULL);
  #else
    spec.rate = (AEFormatToPulseEncoding(format.m_dataFormat) == PA_ENCODING_EAC3_IEC61937) ? 4 * samplerate : samplerate;
    spec.format = AEFormatToPulseFormat(format.m_dataFormat);
    spec.channels = m_Channels;
  #endif
  if (!pa_sample_spec_valid(&spec))
  {
    CLog::Log(LOGERROR, "PulseAudio: Invalid sample spec");
    pa_format_info_free(info[0]);
    pa_threaded_mainloop_unlock(m_MainLoop);
    Deinitialize();
    return false;
  }

  m_BytesPerSecond = pa_bytes_per_second(&spec);
  unsigned int frameSize = pa_frame_size(&spec);

  m_Stream = pa_stream_new_extended(m_Context, "kodi audio stream", info, 1, NULL);
  pa_format_info_free(info[0]);

  if (m_Stream == NULL)
  {
    CLog::Log(LOGERROR, "PulseAudio: Could not create a stream");
    pa_threaded_mainloop_unlock(m_MainLoop);
    Deinitialize();
    return false;
  }

  pa_stream_set_state_callback(m_Stream, StreamStateCallback, m_MainLoop);
  pa_stream_set_write_callback(m_Stream, StreamRequestCallback, m_MainLoop);
  pa_stream_set_latency_update_callback(m_Stream, StreamLatencyUpdateCallback, m_MainLoop);

  // default buffer construction
  // align with AE's max buffer
  unsigned int latency = m_BytesPerSecond / 2.5; // 400 ms
  unsigned int process_time = latency / 4; // 100 ms
  if(sinkStruct.isHWDevice)
  {
    // on hw devices buffers can be further reduced
    // 200ms max latency
    // 50ms min packet size
    latency = m_BytesPerSecond / 5;
    process_time = latency / 4;
  }

  pa_buffer_attr buffer_attr;
  buffer_attr.fragsize = latency;
  buffer_attr.maxlength = (uint32_t) -1;
  buffer_attr.minreq = process_time;
  buffer_attr.prebuf = (uint32_t) -1;
  buffer_attr.tlength = latency;

  if (pa_stream_connect_playback(m_Stream, isDefaultDevice ? NULL : device.c_str(), &buffer_attr, ((pa_stream_flags)(PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_ADJUST_LATENCY)), NULL, NULL) < 0)
  {
    CLog::Log(LOGERROR, "PulseAudio: Failed to connect stream to output");
    pa_threaded_mainloop_unlock(m_MainLoop);
    Deinitialize();
    return false;
  }

  /* Wait until the stream is ready */
  do
  {
    pa_threaded_mainloop_wait(m_MainLoop);
    CLog::Log(LOGDEBUG, "PulseAudio: Stream %s", StreamStateToString(pa_stream_get_state(m_Stream)));
  }
  while (pa_stream_get_state(m_Stream) != PA_STREAM_READY && pa_stream_get_state(m_Stream) != PA_STREAM_FAILED);

  if (pa_stream_get_state(m_Stream) == PA_STREAM_FAILED)
  {
    CLog::Log(LOGERROR, "PulseAudio: Waited for the stream but it failed");
    pa_threaded_mainloop_unlock(m_MainLoop);
    Deinitialize();
    return false;
  }

  const pa_buffer_attr *a;

  if (!(a = pa_stream_get_buffer_attr(m_Stream)))
  {
    CLog::Log(LOGERROR, "PulseAudio: %s", pa_strerror(pa_context_errno(m_Context)));
    pa_threaded_mainloop_unlock(m_MainLoop);
    Deinitialize();
    return false;
  }
  else
  {
    unsigned int packetSize = a->minreq;
    m_BufferSize = a->tlength;
    m_periodSize = a->minreq;

    format.m_frames = packetSize / frameSize;
  }

  {
    CSingleLock lock(m_sec);
    // Register Callback for Sink changes
    pa_context_set_subscribe_callback(m_Context, SinkChangedCallback, this);
    const pa_subscription_mask_t mask = PA_SUBSCRIPTION_MASK_SINK;
    pa_operation *op = pa_context_subscribe(m_Context, mask, NULL, this);
    if (op != NULL)
      pa_operation_unref(op);

    // Register Callback for Sink Info changes - this handles volume
    pa_context_set_subscribe_callback(m_Context, SinkInputInfoChangedCallback, this);
    const pa_subscription_mask_t mask_input = PA_SUBSCRIPTION_MASK_SINK_INPUT;
    pa_operation* op_sinfo = pa_context_subscribe(m_Context, mask_input, NULL, this);
    if (op_sinfo != NULL)
      pa_operation_unref(op_sinfo);
  }

  pa_threaded_mainloop_unlock(m_MainLoop);
  
  format.m_frameSize = frameSize;
  format.m_frameSamples = format.m_frames * format.m_channelLayout.Count();
  m_format = format;
  format.m_dataFormat = m_passthrough ? AE_FMT_S16NE : format.m_dataFormat;

  CLog::Log(LOGNOTICE, "PulseAudio: Opened device %s in %s mode with Buffersize %u ms",
                      device.c_str(), m_passthrough ? "passthrough" : "pcm",
                      (unsigned int) ((m_BufferSize / (float) m_BytesPerSecond) * 1000));

  // Cork stream will resume when adding first package
  Pause(true);
  {
    CSingleLock lock(m_sec);
    m_IsAllocated = true;
  }

  return true;
}
Ejemplo n.º 8
0
static int
pulseaudio_check_passthru(audio_decoder_t *ad, int codec_id)
{
  decoder_t *d = (decoder_t *)ad;

  int e;

  switch(codec_id) {
  case AV_CODEC_ID_DTS:
    e = PA_ENCODING_DTS_IEC61937;
    break;
  case AV_CODEC_ID_AC3:
    e = PA_ENCODING_AC3_IEC61937;
    break;
  case AV_CODEC_ID_EAC3:
    e = PA_ENCODING_EAC3_IEC61937;
    break;
#if 0
  case AV_CODEC_ID_MP1:
  case AV_CODEC_ID_MP2:
  case AV_CODEC_ID_MP3:
    e = PA_ENCODING_MPEG_IEC61937;
    break;
#endif
  default:
    return 0;
  }

  pa_threaded_mainloop_lock(mainloop);

  if(pulseaudio_make_context_ready()) {
    pa_threaded_mainloop_unlock(mainloop);
    return 0;
  }

  if(d->s) {
    pa_stream_disconnect(d->s);
    pa_stream_unref(d->s);
  }

  pa_format_info *vec[1];

  vec[0] = pa_format_info_new();

  vec[0]->encoding = e;
  pa_format_info_set_rate(vec[0], 48000);
  pa_format_info_set_channels(vec[0], 2);

  d->s = pa_stream_new_extended(ctx, "Showtime playback", vec, 1, NULL);
  int flags = 0;

  pa_stream_set_state_callback(d->s, stream_state_callback, d);
  pa_stream_set_write_callback(d->s, stream_write_callback, d);

  flags |= PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_INTERPOLATE_TIMING | 
    PA_STREAM_NOT_MONOTONIC;

  pa_stream_connect_playback(d->s, NULL, NULL, flags, NULL, NULL);

  while(1) {
    switch(pa_stream_get_state(d->s)) {
    case PA_STREAM_UNCONNECTED:
    case PA_STREAM_CREATING:
      pa_threaded_mainloop_wait(mainloop);
      continue;

    case PA_STREAM_READY:
      pa_threaded_mainloop_unlock(mainloop);
      ad->ad_tile_size = 512;
      d->ss = *pa_stream_get_sample_spec(d->s);
      return 1;

    case PA_STREAM_TERMINATED:
    case PA_STREAM_FAILED:
      pa_threaded_mainloop_unlock(mainloop);
      return 0;
    }
  }
}