Beispiel #1
0
char* context_info_str(pa_context* c)
{
    return g_strdup_printf(
            "Server String: %s\n"
            "Library Protocol Version: %u\n"
            "Server Protocol Version: %u\n"
            "Is Local: %s\n"
            "Client Index: %u\n"
            "Tile Size: %zu",
            pa_context_get_server(c),
            pa_context_get_protocol_version(c),
            pa_context_get_server_protocol_version(c),
            pa_context_is_local(c) ? "yes" : "no",
            pa_context_get_index(c),
            pa_context_get_tile_size(c, NULL));
}
Beispiel #2
0
static int
pulseaudio_audio_reconfig(audio_decoder_t *ad)
{
  decoder_t *d = (decoder_t *)ad;
  int i;

  pa_threaded_mainloop_lock(mainloop);

  if(pulseaudio_make_context_ready()) {
    pa_threaded_mainloop_unlock(mainloop);
    return -1;
  }

  if(d->s) {
    pa_stream_disconnect(d->s);
    pa_stream_unref(d->s);
  }

  pa_channel_map map;


  ad->ad_out_sample_rate = ad->ad_in_sample_rate;
  d->ss.rate = ad->ad_in_sample_rate;
  
  switch(ad->ad_in_sample_format) {
  case AV_SAMPLE_FMT_S32:
  case AV_SAMPLE_FMT_S32P:
    ad->ad_out_sample_format = AV_SAMPLE_FMT_S32;
    d->ss.format = PA_SAMPLE_S32NE;
    d->framesize = sizeof(int32_t);
    break;

  case AV_SAMPLE_FMT_S16:
  case AV_SAMPLE_FMT_S16P:
    ad->ad_out_sample_format = AV_SAMPLE_FMT_S16;
    d->ss.format = PA_SAMPLE_S16NE;
    d->framesize = sizeof(int16_t);
    break;

  default:
    ad->ad_out_sample_format = AV_SAMPLE_FMT_FLT;
    d->ss.format = PA_SAMPLE_FLOAT32NE;
    d->framesize = sizeof(float);
    break;
  }

  switch(ad->ad_in_channel_layout) {
  case AV_CH_LAYOUT_MONO:
    d->ss.channels = 1;
    ad->ad_out_channel_layout = AV_CH_LAYOUT_MONO;
    pa_channel_map_init_mono(&map);
    break;


  case AV_CH_LAYOUT_STEREO:
    d->ss.channels = 2;
    ad->ad_out_channel_layout = AV_CH_LAYOUT_STEREO;
    pa_channel_map_init_stereo(&map);

    
  default:
    pa_channel_map_init(&map);
    for(i = 0; i < sizeof(av2pa_map) / sizeof(av2pa_map[0]); i++) {
      if(ad->ad_in_channel_layout & av2pa_map[i].avmask) {
	ad->ad_out_channel_layout |= av2pa_map[i].avmask;
	map.map[map.channels++] = av2pa_map[i].papos;
      }
    }
    d->ss.channels = map.channels;
    break;
  }

  d->framesize *= d->ss.channels;

  ad->ad_tile_size = pa_context_get_tile_size(ctx, &d->ss) / d->framesize;

  char buf[100];
  char buf2[PA_CHANNEL_MAP_SNPRINT_MAX];
  TRACE(TRACE_DEBUG, "PA", "Created stream %s [%s] (tilesize=%d)",
	pa_sample_spec_snprint(buf, sizeof(buf), &d->ss),
	pa_channel_map_snprint(buf2, sizeof(buf2), &map),
	ad->ad_tile_size);

#if PA_API_VERSION >= 12
  pa_proplist *pl = pa_proplist_new();
  media_pipe_t *mp = ad->ad_mp;
  if(mp->mp_flags & MP_VIDEO)
    pa_proplist_sets(pl, PA_PROP_MEDIA_ROLE, "video");
  else
    pa_proplist_sets(pl, PA_PROP_MEDIA_ROLE, "music");

  d->s = pa_stream_new_with_proplist(ctx, "Showtime playback", 
				     &d->ss, &map, pl);
  pa_proplist_free(pl);

#else
  d->s = pa_stream_new(ctx, "Showtime playback", &ss, &map);  
#endif
 
  int flags = 0;

  pa_stream_set_state_callback(d->s, stream_state_callback, d);
  pa_stream_set_write_callback(d->s, stream_write_callback, d);

  flags |= PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_INTERPOLATE_TIMING;

  pa_stream_connect_playback(d->s, NULL, NULL, flags, NULL, NULL);

  while(1) {
    switch(pa_stream_get_state(d->s)) {
    case PA_STREAM_UNCONNECTED:
    case PA_STREAM_CREATING:
      pa_threaded_mainloop_wait(mainloop);
      continue;

    case PA_STREAM_READY:
      pa_threaded_mainloop_unlock(mainloop);
      return 0;

    case PA_STREAM_TERMINATED:
    case PA_STREAM_FAILED:
      pa_threaded_mainloop_unlock(mainloop);
      return 1;
    }
  }
}