Example #1
0
int main(int argc, char *argv[]) {
    pa_mainloop* m = NULL;
    int i, ret = 0;

    for (i = 0; i < SAMPLE_HZ; i++)
        data[i] = (float) sin(((double) i/SAMPLE_HZ)*2*M_PI*SINE_HZ)/2;

    for (i = 0; i < NSTREAMS; i++)
        streams[i] = NULL;

    /* Set up a new main loop */
    m = pa_mainloop_new();
    assert(m);

    mainloop_api = pa_mainloop_get_api(m);

    context = pa_context_new(mainloop_api, argv[0]);
    assert(context);

    pa_context_set_state_callback(context, context_state_callback, NULL);

    /* Connect the context */
    if (pa_context_connect(context, NULL, 0, NULL) < 0) {
        fprintf(stderr, "pa_context_connect() failed.\n");
        goto quit;
    }

    if (pa_mainloop_run(m, &ret) < 0)
        fprintf(stderr, "pa_mainloop_run() failed.\n");

quit:
    pa_context_unref(context);

    for (i = 0; i < NSTREAMS; i++)
        if (streams[i])
            pa_stream_unref(streams[i]);

    pa_mainloop_free(m);

    return ret;
}
Example #2
0
/*
 * Create a new pulse audio main loop and connect to the server
 *
 * Returns a negative value on error
 */
static int pulse_connect(struct pulse_data *data)
{
	data->mainloop = pa_mainloop_new();
	if (!data->mainloop) {
		blog(LOG_ERROR, "pulse-input: Unable to create main loop");
		return -1;
	}

	data->context = pa_context_new_with_proplist(
		pa_mainloop_get_api(data->mainloop), "OBS Studio", data->props);
	if (!data->context) {
		blog(LOG_ERROR, "pulse-input: Unable to create context");
		return -1;
	}

	int status = pa_context_connect(
		data->context, NULL, PA_CONTEXT_NOAUTOSPAWN, NULL);
	if (status < 0) {
		blog(LOG_ERROR, "pulse-input: Unable to connect! Status: %d",
		     status);
		return -1;
	}

	// wait until connected
	for (;;) {
		pulse_iterate(data);
		pa_context_state_t state = pa_context_get_state(data->context);
		if (state == PA_CONTEXT_READY) {
			blog(LOG_DEBUG, "pulse-input: Context ready");
			break;
		}
		if (!PA_CONTEXT_IS_GOOD(state)) {
			blog(LOG_ERROR, "pulse-input: Context connect failed");
			return -1;
		}
	}

	return 0;
}
Example #3
0
audio_class_t *
audio_driver_init(void)
{
  TRACE(TRACE_DEBUG, "PA", "Headerversion: %s, library: %s",
	pa_get_headers_version(), pa_get_library_version());

  mainloop = pa_threaded_mainloop_new();
  api = pa_threaded_mainloop_get_api(mainloop);

  pa_threaded_mainloop_lock(mainloop);
  pa_threaded_mainloop_start(mainloop);

#if PA_API_VERSION >= 12
  pa_proplist *pl = pa_proplist_new();

  pa_proplist_sets(pl, PA_PROP_APPLICATION_ID, "com.lonelycoder.hts.showtime");
  pa_proplist_sets(pl, PA_PROP_APPLICATION_NAME, "Showtime");
  
  /* Create a new connection context */
  ctx = pa_context_new_with_proplist(api, "Showtime", pl);
  pa_proplist_free(pl);
#else
  ctx = pa_context_new(api, "Showtime");
#endif

  pa_context_set_state_callback(ctx, context_state_callback, NULL);

  /* Connect the context */
  if(pa_context_connect(ctx, NULL, 0, NULL) < 0) {
    TRACE(TRACE_ERROR, "PA", "pa_context_connect() failed: %s",
	  pa_strerror(pa_context_errno(ctx)));
    pa_threaded_mainloop_unlock(mainloop);
    return NULL;
  }

  pa_threaded_mainloop_unlock(mainloop);

  return &pulseaudio_audio_class;
}
void executeRequest( StateData* stateData )
{
	// Create mainloop
	pa_mainloop* mainLoop = pa_mainloop_new();
	// Create mainloop API
	pa_mainloop_api* mainLoopApi = pa_mainloop_get_api( mainLoop );
	// Create context
	pa_context* context = pa_context_new( mainLoopApi, "cinder-requests" );
	// Set context state callback
	stateData->mainLoopApi = mainLoopApi;
	pa_context_set_state_callback( context, processContextState, static_cast<void*>( stateData ) );
	// Connect context
	if( pa_context_connect( context, nullptr, PA_CONTEXT_NOFLAGS, nullptr ) >= 0 ) {
		// Run mainloop
		int result = 0;
		if( pa_mainloop_run( mainLoop, &result ) < 0 ) {
			// Handle error
		}
	}
	pa_context_unref( context );
	pa_mainloop_free( mainLoop );
}
Example #5
0
double get_db()
{
    pa_glib_mainloop *pa_ml = NULL;
    pa_mainloop_api *pa_mlapi = NULL;
    //pa_operation *pa_op = NULL;
    pa_context *pa_ctx = NULL;

    GMainContext *mctx = NULL;

    pa_ml = pa_glib_mainloop_new(g_main_context_default());
    pa_mlapi = pa_glib_mainloop_get_api(pa_ml);
    pa_ctx = pa_context_new(pa_mlapi, "deepin");

    pa_context_connect(pa_ctx, NULL, 0, NULL);
    pa_context_set_state_callback(pa_ctx, pa_state_cb, NULL);

    mctx = g_main_context_default();
    mainloop = g_main_loop_new(mctx, FALSE);
    g_main_loop_run(mainloop);
    
    return ret;
}
Example #6
0
int pulse_init(struct pulseaudio_t *pulse) {
    enum pa_context_state state = PA_CONTEXT_CONNECTING;

    pulse->mainloop = pa_mainloop_new();
    pulse->cxt = pa_context_new(pa_mainloop_get_api(pulse->mainloop), "pavol");
    pulse->default_sink = NULL;
    pulse->muted = 0;
    pulse->volume = 0;

    pa_context_set_state_callback(pulse->cxt, pulse_connect_state_cb, &state);
    pa_context_connect(pulse->cxt, NULL, PA_CONTEXT_NOFLAGS, NULL);
    while (state != PA_CONTEXT_READY && state != PA_CONTEXT_FAILED)
        pa_mainloop_iterate(pulse->mainloop, 1, NULL);

    if (state != PA_CONTEXT_READY) {
        fprintf(stderr, "failed to connect to pulse daemon: %s\n",
                pa_strerror(pa_context_errno(pulse->cxt)));
        return 1;
    }

    return 0;
}
Example #7
0
SwfdecPlayback *
swfdec_playback_open (SwfdecPlayer *player, GMainContext *context)
{
  SwfdecPlayback *sound;
  const GList *walk;
  pa_mainloop_api *pa_api;

  g_return_val_if_fail (SWFDEC_IS_PLAYER (player), NULL);
  g_return_val_if_fail (context != NULL, NULL);

  sound = g_new0 (SwfdecPlayback, 1);
  sound->player = player;
  g_signal_connect (player, "advance", G_CALLBACK (advance_before), sound);
  g_signal_connect (player, "audio-added", G_CALLBACK (audio_added), sound);
  g_signal_connect (player, "audio-removed", G_CALLBACK (audio_removed), sound);

  /* Create our mainloop attachment to glib.  XXX: I hope this means we don't
   * have to run the main loop using pa functions.
   */
  sound->pa_mainloop = pa_glib_mainloop_new (context);
  pa_api = pa_glib_mainloop_get_api (sound->pa_mainloop);

  sound->pa = pa_context_new (pa_api, "swfdec");

  pa_context_set_state_callback (sound->pa, context_state_callback, sound);
  pa_context_connect (sound->pa,
		      NULL, /* default server */
		      0, /* default flags */
		      NULL /* spawning api */
		      );

  for (walk = swfdec_player_get_audio (player); walk; walk = walk->next) {
    swfdec_playback_stream_open (sound, walk->data);
  }
  g_main_context_ref (context);
  sound->context = context;
  return sound;
}
int context_create(){
    if(context != NULL){
        return 0;
    }
    connected = false;
    failed = false;

    threaded_main_loop = pa_threaded_mainloop_new();
    pa_mainloop_api *main_loop_api = pa_threaded_mainloop_get_api(threaded_main_loop);
    context =  pa_context_new(main_loop_api, "vumeter");

    pa_context_set_state_callback(context, context_state_callback, NULL);

    if(pa_context_connect(context, NULL, (pa_context_flags_t) PA_CONTEXT_NOAUTOSPAWN, NULL)){
        qDebug("Error connecting to server...");
        pa_threaded_mainloop_stop(threaded_main_loop);
        pa_threaded_mainloop_free(threaded_main_loop);
        return -1;
    }

    pa_threaded_mainloop_start(threaded_main_loop);

    pa_threaded_mainloop_lock(threaded_main_loop);
    while ((! connected) && (! failed)) {
        qDebug("Waiting until connection is established...");
        pa_threaded_mainloop_wait(threaded_main_loop);
        qDebug("Received signal...");
    }
    pa_threaded_mainloop_unlock(threaded_main_loop);

    if(failed){
        pa_context_unref(context);
        context = NULL;
        return -2;
    }

    return 0;
}
Example #9
0
int main(int argc, char *argv[]) {

    // Define our pulse audio loop and connection variables
    pa_mainloop *pa_ml;
    pa_mainloop_api *pa_mlapi;

    // Create a mainloop API and connection to the default server
    pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api(pa_ml);
    context = pa_context_new(pa_mlapi, "Device list");

    // This function connects to the pulse server
    pa_context_connect(context, NULL, 0, NULL);

    // This function defines a callback so the server will tell us its state.
    pa_context_set_state_callback(context, context_state_cb, NULL);

    if (pa_mainloop_run(pa_ml, &ret) < 0) {
	printf("pa_mainloop_run() failed.");
	exit(1);
    }
    printf("func = %s , LINE = %d \n",__func__,__LINE__);
}
Example #10
0
int
cubeb_init(cubeb ** context, char const * context_name)
{
  cubeb * ctx;

  ctx = calloc(1, sizeof(*ctx));

  ctx->mainloop = pa_threaded_mainloop_new();
  ctx->context = pa_context_new(pa_threaded_mainloop_get_api(ctx->mainloop), context_name);

  pa_context_set_state_callback(ctx->context, context_state_callback, ctx->mainloop);
  pa_threaded_mainloop_start(ctx->mainloop);

  pa_threaded_mainloop_lock(ctx->mainloop);
  pa_context_connect(ctx->context, NULL, 0, NULL);
  pa_threaded_mainloop_unlock(ctx->mainloop);

  state_wait(ctx, PA_CONTEXT_READY);

  *context = ctx;

  return CUBEB_OK;
}
Example #11
0
static gboolean enumerate_audio_source_devices(GClosure *callback)
{
    AudioListContext *context;

    context = g_slice_new0(AudioListContext);

    context->callback = callback;

    context->mainloop = pa_glib_mainloop_new(_owr_get_main_context());

    if (!context->mainloop) {
        g_warning("PulseAudio: failed to create glib mainloop");
        goto cleanup;
    }

    context->pa_context = pa_context_new(pa_glib_mainloop_get_api(context->mainloop), "Owr");

    if (!context->pa_context) {
        g_warning("PulseAudio: failed to create context");
        goto cleanup_mainloop;
    }

    pa_context_set_state_callback(context->pa_context,
        (pa_context_notify_cb_t) on_pulse_context_state_change, context);
    pa_context_connect(context->pa_context, NULL, 0, NULL);

done:
    return FALSE;

cleanup_mainloop:
    pa_glib_mainloop_free(context->mainloop);

cleanup:
    finish_pa_list(context);

    goto done;
}
Example #12
0
File: te.c Project: ITikhonov/tem
void audio_init() {
	pa_threaded_mainloop *pa_ml=pa_threaded_mainloop_new();
	pa_mainloop_api *pa_mlapi=pa_threaded_mainloop_get_api(pa_ml);
	pa_context *pa_ctx=pa_context_new(pa_mlapi, "te");
	pa_context_connect(pa_ctx, NULL, 0, NULL);
	int pa_ready = 0;
	pa_context_set_state_callback(pa_ctx, pa_state_cb, &pa_ready);

	pa_threaded_mainloop_start(pa_ml);
	while(pa_ready==0) { ; }

	printf("audio ready\n");

	if (pa_ready == 2) {
		pa_context_disconnect(pa_ctx);
		pa_context_unref(pa_ctx);
		pa_threaded_mainloop_free(pa_ml);
	}

	pa_sample_spec ss;
	ss.rate=96000;
	ss.channels=1;
	ss.format=PA_SAMPLE_S24_32LE;
	ps=pa_stream_new(pa_ctx,"Playback",&ss,NULL);
	pa_stream_set_write_callback(ps,audio_request_cb,NULL);
	pa_stream_set_underflow_callback(ps,audio_underflow_cb,NULL);

	pa_buffer_attr bufattr;
	bufattr.fragsize = (uint32_t)-1;
	bufattr.maxlength = pa_usec_to_bytes(20000,&ss);
	bufattr.minreq = pa_usec_to_bytes(0,&ss);
	bufattr.prebuf = 0;
	bufattr.tlength = pa_usec_to_bytes(20000,&ss);

	pa_stream_connect_playback(ps,NULL,&bufattr,
		PA_STREAM_INTERPOLATE_TIMING|PA_STREAM_ADJUST_LATENCY|PA_STREAM_AUTO_TIMING_UPDATE|PA_STREAM_START_CORKED,NULL,NULL);
}
Example #13
0
static void connect_server(struct ausrv *ausrv)
{
    pa_mainloop_api *api    = pa_glib_mainloop_get_api(ausrv->mainloop);
    char            *server = ausrv->server;

    cancel_timer(ausrv);


    if (server != NULL && !strcmp(ausrv->server, DEFAULT_SERVER))
        server = NULL;
    
    
    /*
     * Note: It is not possible to reconnect a context if it ever gets
     *     disconnected. If we have a context here, get rid of it and
     *     allocate a new one.
     */
    if (ausrv->context != NULL) {
        pa_context_set_state_callback(ausrv->context, NULL, NULL);
        pa_context_set_subscribe_callback(ausrv->context, NULL, NULL);
        pa_context_unref(ausrv->context);
        ausrv->context = NULL;
    }
    
    if ((ausrv->context = pa_context_new(api, pa_client_name)) == NULL) {
        LOG_ERROR("%s(): pa_context_new() failed, exiting", __FUNCTION__);
        exit(1);
    }
    
    pa_context_set_state_callback(ausrv->context, context_callback, ausrv);
    pa_context_set_subscribe_callback(ausrv->context, event_callback, ausrv);


    LOG_INFO("Trying to connect to %s...", server ? server : DEFAULT_SERVER);
    pa_context_connect(ausrv->context, server, PA_CONTEXT_NOAUTOSPAWN, NULL);
}
Example #14
0
PulseAudioSystem::PulseAudioSystem() {
	pasInput = pasOutput = pasSpeaker = NULL;
	bSourceDone=bSinkDone=bServerDone = false;
	iDelayCache = 0;
	bPositionalCache = false;
	bAttenuating = false;
	iRemainingOperations = 0;
	bPulseIsGood = false;
	iSinkId = -1;

	pam = pa_threaded_mainloop_new();
	pa_mainloop_api *api = pa_threaded_mainloop_get_api(pam);

	pa_proplist *proplist;

	proplist = pa_proplist_new();
	pa_proplist_sets(proplist, PA_PROP_APPLICATION_NAME, "Mumble");
	pa_proplist_sets(proplist, PA_PROP_APPLICATION_ID, "net.sourceforge.mumble.mumble");
	pa_proplist_sets(proplist, PA_PROP_APPLICATION_ICON_NAME, "mumble");
	pa_proplist_sets(proplist, PA_PROP_MEDIA_ROLE, "game");

	pacContext = pa_context_new_with_proplist(api, NULL, proplist);
	pa_proplist_free(proplist);

	pa_context_set_subscribe_callback(pacContext, subscribe_callback, this);

	pa_context_set_state_callback(pacContext, context_state_callback, this);
	pa_context_connect(pacContext, NULL, PA_CONTEXT_NOAUTOSPAWN, NULL);

	pade = api->defer_new(api, defer_event_callback, this);
	api->defer_enable(pade, false);

	pa_threaded_mainloop_start(pam);

	bRunning = true;
}
int
sa_stream_create_pcm(
  sa_stream_t      ** _s,
  const char        * client_name,
  sa_mode_t           mode,
  sa_pcm_format_t     format,
  unsigned  int       rate,
  unsigned  int       n_channels
) {
  sa_stream_t   * s = 0;
  char *server = NULL;

  /*
   * Make sure we return a NULL stream pointer on failure.
   */
  if (_s == NULL) {
    return SA_ERROR_INVALID;
  }
  *_s = NULL;

  if (mode != SA_MODE_WRONLY) {
    return SA_ERROR_NOT_SUPPORTED;
  }
  if (format != SA_PCM_FORMAT_S16_LE) {
    return SA_ERROR_NOT_SUPPORTED;
  }

  /*
   * Allocate the instance and required resources.
   */
  if ((s = malloc(sizeof(sa_stream_t))) == NULL) {
    return SA_ERROR_OOM;
  }
  if ((s->bl_head = new_buffer()) == NULL) {
    free(s);
    return SA_ERROR_OOM;
  }

  if (pthread_mutex_init(&s->mutex, NULL) != 0) {
    free(s->bl_head);
    free(s);
    return SA_ERROR_SYSTEM;
  }

  s->stream        = NULL;
  s->m             = NULL;
  s->thread_id     = 0;
  s->playing       = 0;
  s->bytes_written = 0;

  s->bl_tail       = s->bl_head;
  s->n_bufs        = 1;

  s->sample_spec.format = PA_SAMPLE_S16LE;
  s->sample_spec.channels = n_channels;
  s->sample_spec.rate = rate;

  strcpy(s->client_name, client_name);

  /* Set up a new main loop */
  s->m = pa_threaded_mainloop_new();
  pa_threaded_mainloop_start(s->m);

  pa_threaded_mainloop_lock(s->m);

  /* Create a new connection context */
  if (!(s->context = pa_context_new(pa_threaded_mainloop_get_api(s->m), "OggPlay"))) {
    fprintf(stderr, "pa_context_new() failed.\n");
    goto unlock_and_fail;
  }
  pa_context_set_state_callback(s->context, context_state_callback, s);

  pa_context_connect(s->context, server, 0, NULL);

  /* Wait until the context is ready */
  pa_threaded_mainloop_wait(s->m);
  if (pa_context_get_state(s->context) != PA_CONTEXT_READY) {
    fprintf(stderr, "creating Pulseaudio Context failed\n");
    goto unlock_and_fail;
  }
  pa_threaded_mainloop_unlock(s->m);

  *_s = s;
  return SA_SUCCESS;

unlock_and_fail:
  pa_threaded_mainloop_unlock(s->m);
  free(s);
  return SA_ERROR_OOM;
}
Example #16
0
int main(int argc, char *argv[]) {
    pa_mainloop* m = NULL;
    int ret = 1, c;
    char *bn, *server = NULL;
    pa_time_event *time_event = NULL;
    const char *filename = NULL;

    static const struct option long_options[] = {
        {"record",       0, NULL, 'r'},
        {"playback",     0, NULL, 'p'},
        {"device",       1, NULL, 'd'},
        {"server",       1, NULL, 's'},
        {"client-name",  1, NULL, 'n'},
        {"stream-name",  1, NULL, ARG_STREAM_NAME},
        {"version",      0, NULL, ARG_VERSION},
        {"help",         0, NULL, 'h'},
        {"verbose",      0, NULL, 'v'},
        {"volume",       1, NULL, ARG_VOLUME},
        {"rate",         1, NULL, ARG_SAMPLERATE},
        {"format",       1, NULL, ARG_SAMPLEFORMAT},
        {"channels",     1, NULL, ARG_CHANNELS},
        {"channel-map",  1, NULL, ARG_CHANNELMAP},
        {"fix-format",   0, NULL, ARG_FIX_FORMAT},
        {"fix-rate",     0, NULL, ARG_FIX_RATE},
        {"fix-channels", 0, NULL, ARG_FIX_CHANNELS},
        {"no-remap",     0, NULL, ARG_NO_REMAP},
        {"no-remix",     0, NULL, ARG_NO_REMIX},
        {"latency",      1, NULL, ARG_LATENCY},
        {"process-time", 1, NULL, ARG_PROCESS_TIME},
        {"property",     1, NULL, ARG_PROPERTY},
        {"raw",          0, NULL, ARG_RAW},
        {"file-format",  2, NULL, ARG_FILE_FORMAT},
        {"list-file-formats", 0, NULL, ARG_LIST_FILE_FORMATS},
        {"latency-msec", 1, NULL, ARG_LATENCY_MSEC},
        {"process-time-msec", 1, NULL, ARG_PROCESS_TIME_MSEC},
        {NULL,           0, NULL, 0}
    };

    setlocale(LC_ALL, "");
    bindtextdomain(GETTEXT_PACKAGE, PULSE_LOCALEDIR);

    bn = pa_path_get_filename(argv[0]);

    if (strstr(bn, "play")) {
        mode = PLAYBACK;
        raw = FALSE;
    } else if (strstr(bn, "record")) {
        mode = RECORD;
        raw = FALSE;
    } else if (strstr(bn, "cat")) {
        mode = PLAYBACK;
        raw = TRUE;
    } if (strstr(bn, "rec") || strstr(bn, "mon")) {
        mode = RECORD;
        raw = TRUE;
    }

    proplist = pa_proplist_new();

    while ((c = getopt_long(argc, argv, "rpd:s:n:hv", long_options, NULL)) != -1) {

        switch (c) {
            case 'h' :
                help(bn);
                ret = 0;
                goto quit;

            case ARG_VERSION:
                printf(_("pacat %s\n"
                         "Compiled with libpulse %s\n"
                         "Linked with libpulse %s\n"),
                       PACKAGE_VERSION,
                       pa_get_headers_version(),
                       pa_get_library_version());
                ret = 0;
                goto quit;

            case 'r':
                mode = RECORD;
                break;

            case 'p':
                mode = PLAYBACK;
                break;

            case 'd':
                pa_xfree(device);
                device = pa_xstrdup(optarg);
                break;

            case 's':
                pa_xfree(server);
                server = pa_xstrdup(optarg);
                break;

            case 'n': {
                char *t;

                if (!(t = pa_locale_to_utf8(optarg)) ||
                    pa_proplist_sets(proplist, PA_PROP_APPLICATION_NAME, t) < 0) {

                    pa_log(_("Invalid client name '%s'"), t ? t : optarg);
                    pa_xfree(t);
                    goto quit;
                }

                pa_xfree(t);
                break;
            }

            case ARG_STREAM_NAME: {
                char *t;

                if (!(t = pa_locale_to_utf8(optarg)) ||
                    pa_proplist_sets(proplist, PA_PROP_MEDIA_NAME, t) < 0) {

                    pa_log(_("Invalid stream name '%s'"), t ? t : optarg);
                    pa_xfree(t);
                    goto quit;
                }

                pa_xfree(t);
                break;
            }

            case 'v':
                verbose = 1;
                break;

            case ARG_VOLUME: {
                int v = atoi(optarg);
                volume = v < 0 ? 0U : (pa_volume_t) v;
                volume_is_set = TRUE;
                break;
            }

            case ARG_CHANNELS:
                sample_spec.channels = (uint8_t) atoi(optarg);
                sample_spec_set = TRUE;
                break;

            case ARG_SAMPLEFORMAT:
                sample_spec.format = pa_parse_sample_format(optarg);
                sample_spec_set = TRUE;
                break;

            case ARG_SAMPLERATE:
                sample_spec.rate = (uint32_t) atoi(optarg);
                sample_spec_set = TRUE;
                break;

            case ARG_CHANNELMAP:
                if (!pa_channel_map_parse(&channel_map, optarg)) {
                    pa_log(_("Invalid channel map '%s'"), optarg);
                    goto quit;
                }

                channel_map_set = TRUE;
                break;

            case ARG_FIX_CHANNELS:
                flags |= PA_STREAM_FIX_CHANNELS;
                break;

            case ARG_FIX_RATE:
                flags |= PA_STREAM_FIX_RATE;
                break;

            case ARG_FIX_FORMAT:
                flags |= PA_STREAM_FIX_FORMAT;
                break;

            case ARG_NO_REMIX:
                flags |= PA_STREAM_NO_REMIX_CHANNELS;
                break;

            case ARG_NO_REMAP:
                flags |= PA_STREAM_NO_REMAP_CHANNELS;
                break;

            case ARG_LATENCY:
                if (((latency = (size_t) atoi(optarg))) <= 0) {
                    pa_log(_("Invalid latency specification '%s'"), optarg);
                    goto quit;
                }
                break;

            case ARG_PROCESS_TIME:
                if (((process_time = (size_t) atoi(optarg))) <= 0) {
                    pa_log(_("Invalid process time specification '%s'"), optarg);
                    goto quit;
                }
                break;

            case ARG_LATENCY_MSEC:
                if (((latency_msec = (int32_t) atoi(optarg))) <= 0) {
                    pa_log(_("Invalid latency specification '%s'"), optarg);
                    goto quit;
                }
                break;

            case ARG_PROCESS_TIME_MSEC:
                if (((process_time_msec = (int32_t) atoi(optarg))) <= 0) {
                    pa_log(_("Invalid process time specification '%s'"), optarg);
                    goto quit;
                }
                break;

            case ARG_PROPERTY: {
                char *t;

                if (!(t = pa_locale_to_utf8(optarg)) ||
                    pa_proplist_setp(proplist, t) < 0) {

                    pa_xfree(t);
                    pa_log(_("Invalid property '%s'"), optarg);
                    goto quit;
                }

                pa_xfree(t);
                break;
            }

            case ARG_RAW:
                raw = TRUE;
                break;

            case ARG_FILE_FORMAT:
                raw = FALSE;

                if (optarg) {
                    if ((file_format = pa_sndfile_format_from_string(optarg)) < 0) {
                        pa_log(_("Unknown file format %s."), optarg);
                        goto quit;
                    }
                }

                raw = FALSE;
                break;

            case ARG_LIST_FILE_FORMATS:
                pa_sndfile_dump_formats();
                ret = 0;
                goto quit;

            default:
                goto quit;
        }
    }

    if (!pa_sample_spec_valid(&sample_spec)) {
        pa_log(_("Invalid sample specification"));
        goto quit;
    }

    if (optind+1 == argc) {
        int fd;

        filename = argv[optind];

        if ((fd = open(argv[optind], mode == PLAYBACK ? O_RDONLY : O_WRONLY|O_TRUNC|O_CREAT, 0666)) < 0) {
            pa_log(_("open(): %s"), strerror(errno));
            goto quit;
        }

        if (dup2(fd, mode == PLAYBACK ? STDIN_FILENO : STDOUT_FILENO) < 0) {
            pa_log(_("dup2(): %s"), strerror(errno));
            goto quit;
        }

        pa_close(fd);

    } else if (optind+1 <= argc) {
        pa_log(_("Too many arguments."));
        goto quit;
    }

    if (!raw) {
        SF_INFO sfi;
        pa_zero(sfi);

        if (mode == RECORD) {
            /* This might patch up the sample spec */
            if (pa_sndfile_write_sample_spec(&sfi, &sample_spec) < 0) {
                pa_log(_("Failed to generate sample specification for file."));
                goto quit;
            }

            /* Transparently upgrade classic .wav to wavex for multichannel audio */
            if (file_format <= 0) {
                if ((sample_spec.channels == 2 && (!channel_map_set || (channel_map.map[0] == PA_CHANNEL_POSITION_LEFT &&
                                                                        channel_map.map[1] == PA_CHANNEL_POSITION_RIGHT))) ||
                    (sample_spec.channels == 1 && (!channel_map_set || (channel_map.map[0] == PA_CHANNEL_POSITION_MONO))))
                    file_format = SF_FORMAT_WAV;
                else
                    file_format = SF_FORMAT_WAVEX;
            }

            sfi.format |= file_format;
        }

        if (!(sndfile = sf_open_fd(mode == RECORD ? STDOUT_FILENO : STDIN_FILENO,
                                   mode == RECORD ? SFM_WRITE : SFM_READ,
                                   &sfi, 0))) {
            pa_log(_("Failed to open audio file."));
            goto quit;
        }

        if (mode == PLAYBACK) {
            if (sample_spec_set)
                pa_log(_("Warning: specified sample specification will be overwritten with specification from file."));

            if (pa_sndfile_read_sample_spec(sndfile, &sample_spec) < 0) {
                pa_log(_("Failed to determine sample specification from file."));
                goto quit;
            }
            sample_spec_set = TRUE;

            if (!channel_map_set) {
                /* Allow the user to overwrite the channel map on the command line */
                if (pa_sndfile_read_channel_map(sndfile, &channel_map) < 0) {
                    if (sample_spec.channels > 2)
                        pa_log(_("Warning: Failed to determine channel map from file."));
                } else
                    channel_map_set = TRUE;
            }
        }
    }

    if (!channel_map_set)
        pa_channel_map_init_extend(&channel_map, sample_spec.channels, PA_CHANNEL_MAP_DEFAULT);

    if (!pa_channel_map_compatible(&channel_map, &sample_spec)) {
        pa_log(_("Channel map doesn't match sample specification"));
        goto quit;
    }

    if (!raw) {
        pa_proplist *sfp;

        if (mode == PLAYBACK)
            readf_function = pa_sndfile_readf_function(&sample_spec);
        else {
            if (pa_sndfile_write_channel_map(sndfile, &channel_map) < 0)
                pa_log(_("Warning: failed to write channel map to file."));

            writef_function = pa_sndfile_writef_function(&sample_spec);
        }

        /* Fill in libsndfile prop list data */
        sfp = pa_proplist_new();
        pa_sndfile_init_proplist(sndfile, sfp);
        pa_proplist_update(proplist, PA_UPDATE_MERGE, sfp);
        pa_proplist_free(sfp);
    }

    if (verbose) {
        char tss[PA_SAMPLE_SPEC_SNPRINT_MAX], tcm[PA_CHANNEL_MAP_SNPRINT_MAX];

        pa_log(_("Opening a %s stream with sample specification '%s' and channel map '%s'."),
                mode == RECORD ? _("recording") : _("playback"),
                pa_sample_spec_snprint(tss, sizeof(tss), &sample_spec),
                pa_channel_map_snprint(tcm, sizeof(tcm), &channel_map));
    }

    /* Fill in client name if none was set */
    if (!pa_proplist_contains(proplist, PA_PROP_APPLICATION_NAME)) {
        char *t;

        if ((t = pa_locale_to_utf8(bn))) {
            pa_proplist_sets(proplist, PA_PROP_APPLICATION_NAME, t);
            pa_xfree(t);
        }
    }

    /* Fill in media name if none was set */
    if (!pa_proplist_contains(proplist, PA_PROP_MEDIA_NAME)) {
        const char *t;

        if ((t = filename) ||
            (t = pa_proplist_gets(proplist, PA_PROP_APPLICATION_NAME)))
            pa_proplist_sets(proplist, PA_PROP_MEDIA_NAME, t);
    }

    /* Set up a new main loop */
    if (!(m = pa_mainloop_new())) {
        pa_log(_("pa_mainloop_new() failed."));
        goto quit;
    }

    mainloop_api = pa_mainloop_get_api(m);

    pa_assert_se(pa_signal_init(mainloop_api) == 0);
    pa_signal_new(SIGINT, exit_signal_callback, NULL);
    pa_signal_new(SIGTERM, exit_signal_callback, NULL);
#ifdef SIGUSR1
    pa_signal_new(SIGUSR1, sigusr1_signal_callback, NULL);
#endif
    pa_disable_sigpipe();

    if (raw) {
        if (!(stdio_event = mainloop_api->io_new(mainloop_api,
                                                 mode == PLAYBACK ? STDIN_FILENO : STDOUT_FILENO,
                                                 mode == PLAYBACK ? PA_IO_EVENT_INPUT : PA_IO_EVENT_OUTPUT,
                                                 mode == PLAYBACK ? stdin_callback : stdout_callback, NULL))) {
            pa_log(_("io_new() failed."));
            goto quit;
        }
    }

    /* Create a new connection context */
    if (!(context = pa_context_new_with_proplist(mainloop_api, NULL, proplist))) {
        pa_log(_("pa_context_new() failed."));
        goto quit;
    }

    pa_context_set_state_callback(context, context_state_callback, NULL);

    /* Connect the context */
    if (pa_context_connect(context, server, 0, NULL) < 0) {
        pa_log(_("pa_context_connect() failed: %s"), pa_strerror(pa_context_errno(context)));
        goto quit;
    }

    if (verbose) {
        if (!(time_event = pa_context_rttime_new(context, pa_rtclock_now() + TIME_EVENT_USEC, time_event_callback, NULL))) {
            pa_log(_("pa_context_rttime_new() failed."));
            goto quit;
        }
    }

    /* Run the main loop */
    if (pa_mainloop_run(m, &ret) < 0) {
        pa_log(_("pa_mainloop_run() failed."));
        goto quit;
    }

quit:
    if (stream)
        pa_stream_unref(stream);

    if (context)
        pa_context_unref(context);

    if (stdio_event) {
        pa_assert(mainloop_api);
        mainloop_api->io_free(stdio_event);
    }

    if (time_event) {
        pa_assert(mainloop_api);
        mainloop_api->time_free(time_event);
    }

    if (m) {
        pa_signal_done();
        pa_mainloop_free(m);
    }

    pa_xfree(buffer);

    pa_xfree(server);
    pa_xfree(device);

    if (sndfile)
        sf_close(sndfile);

    if (proplist)
        pa_proplist_free(proplist);

    return ret;
}
Example #17
0
static int init(struct ao *ao, char *params)
{
    struct pa_sample_spec ss;
    struct pa_channel_map map;
    char *devarg = NULL;
    char *host = NULL;
    char *sink = NULL;
    const char *version = pa_get_library_version();

    struct priv *priv = talloc_zero(ao, struct priv);
    ao->priv = priv;

    if (params) {
        devarg = strdup(params);
        sink = strchr(devarg, ':');
        if (sink)
            *sink++ = 0;
        if (devarg[0])
            host = devarg;
    }

    priv->broken_pause = false;
    /* not sure which versions are affected, assume 0.9.11* to 0.9.14*
     * known bad: 0.9.14, 0.9.13
     * known good: 0.9.9, 0.9.10, 0.9.15
     * To test: pause, wait ca. 5 seconds, framestep and see if MPlayer
     * hangs somewhen. */
    if (strncmp(version, "0.9.1", 5) == 0 && version[5] >= '1'
        && version[5] <= '4') {
        mp_msg(MSGT_AO, MSGL_WARN,
               "[pulse] working around probably broken pause functionality,\n"
               "        see http://www.pulseaudio.org/ticket/440\n");
        priv->broken_pause = true;
    }

    ss.channels = ao->channels;
    ss.rate = ao->samplerate;

    const struct format_map *fmt_map = format_maps;
    while (fmt_map->mp_format != ao->format) {
        if (fmt_map->mp_format == AF_FORMAT_UNKNOWN) {
            mp_msg(MSGT_AO, MSGL_V,
                   "AO: [pulse] Unsupported format, using default\n");
            fmt_map = format_maps;
            break;
        }
        fmt_map++;
    }
    ao->format = fmt_map->mp_format;
    ss.format = fmt_map->pa_format;

    if (!pa_sample_spec_valid(&ss)) {
        mp_msg(MSGT_AO, MSGL_ERR, "AO: [pulse] Invalid sample spec\n");
        goto fail;
    }

    pa_channel_map_init_auto(&map, ss.channels, PA_CHANNEL_MAP_ALSA);
    ao->bps = pa_bytes_per_second(&ss);

    if (!(priv->mainloop = pa_threaded_mainloop_new())) {
        mp_msg(MSGT_AO, MSGL_ERR, "AO: [pulse] Failed to allocate main loop\n");
        goto fail;
    }

    if (!(priv->context = pa_context_new(pa_threaded_mainloop_get_api(
                                 priv->mainloop), PULSE_CLIENT_NAME))) {
        mp_msg(MSGT_AO, MSGL_ERR, "AO: [pulse] Failed to allocate context\n");
        goto fail;
    }

    pa_context_set_state_callback(priv->context, context_state_cb, ao);

    if (pa_context_connect(priv->context, host, 0, NULL) < 0)
        goto fail;

    pa_threaded_mainloop_lock(priv->mainloop);

    if (pa_threaded_mainloop_start(priv->mainloop) < 0)
        goto unlock_and_fail;

    /* Wait until the context is ready */
    pa_threaded_mainloop_wait(priv->mainloop);

    if (pa_context_get_state(priv->context) != PA_CONTEXT_READY)
        goto unlock_and_fail;

    if (!(priv->stream = pa_stream_new(priv->context, "audio stream", &ss,
                                       &map)))
        goto unlock_and_fail;

    pa_stream_set_state_callback(priv->stream, stream_state_cb, ao);
    pa_stream_set_write_callback(priv->stream, stream_request_cb, ao);
    pa_stream_set_latency_update_callback(priv->stream,
                                          stream_latency_update_cb, ao);
    pa_buffer_attr bufattr = {
        .maxlength = -1,
        .tlength = pa_usec_to_bytes(1000000, &ss),
        .prebuf = -1,
        .minreq = -1,
        .fragsize = -1,
    };
    if (pa_stream_connect_playback(priv->stream, sink, &bufattr,
                                   PA_STREAM_INTERPOLATE_TIMING
                                   | PA_STREAM_AUTO_TIMING_UPDATE, NULL,
                                   NULL) < 0)
        goto unlock_and_fail;

    /* Wait until the stream is ready */
    pa_threaded_mainloop_wait(priv->mainloop);

    if (pa_stream_get_state(priv->stream) != PA_STREAM_READY)
        goto unlock_and_fail;

    pa_threaded_mainloop_unlock(priv->mainloop);

    free(devarg);
    return 0;

unlock_and_fail:

    if (priv->mainloop)
        pa_threaded_mainloop_unlock(priv->mainloop);

fail:
    if (priv->context)
        GENERIC_ERR_MSG(priv->context, "Init failed");
    free(devarg);
    uninit(ao, true);
    return -1;
}

static void cork(struct ao *ao, bool pause)
{
    struct priv *priv = ao->priv;
    pa_threaded_mainloop_lock(priv->mainloop);
    priv->retval = 0;
    if (!waitop(priv, pa_stream_cork(priv->stream, pause, success_cb, ao)) ||
        !priv->retval)
        GENERIC_ERR_MSG(priv->context, "pa_stream_cork() failed");
}

// Play the specified data to the pulseaudio server
static int play(struct ao *ao, void *data, int len, int flags)
{
    struct priv *priv = ao->priv;
    /* For some reason Pulseaudio behaves worse if this is done after
     * the write - rapidly repeated seeks result in bogus increasing
     * reported latency. */
    if (priv->did_reset)
        cork(ao, false);
    pa_threaded_mainloop_lock(priv->mainloop);
    if (pa_stream_write(priv->stream, data, len, NULL, 0,
                        PA_SEEK_RELATIVE) < 0) {
        GENERIC_ERR_MSG(priv->context, "pa_stream_write() failed");
        len = -1;
    }
    if (priv->did_reset) {
        priv->did_reset = false;
        if (!waitop(priv, pa_stream_update_timing_info(priv->stream,
                                                       success_cb, ao))
            || !priv->retval)
            GENERIC_ERR_MSG(priv->context, "pa_stream_UPP() failed");
    } else
        pa_threaded_mainloop_unlock(priv->mainloop);
    return len;
}

// Reset the audio stream, i.e. flush the playback buffer on the server side
static void reset(struct ao *ao)
{
    // pa_stream_flush() works badly if not corked
    cork(ao, true);
    struct priv *priv = ao->priv;
    pa_threaded_mainloop_lock(priv->mainloop);
    priv->retval = 0;
    if (!waitop(priv, pa_stream_flush(priv->stream, success_cb, ao)) ||
        !priv->retval)
        GENERIC_ERR_MSG(priv->context, "pa_stream_flush() failed");
    priv->did_reset = true;
}
Example #18
0
/*! Starts up audio streaming to the host. */
void HostAudio::open()
{
	m_mainloop = pa_threaded_mainloop_new();
	if (!m_mainloop) {
		qDebug("Could not acquire PulseAudio main loop");
		return;
	}
	m_api = pa_threaded_mainloop_get_api(m_mainloop);
	m_context = pa_context_new(m_api, "emumaster");
	pa_context_set_state_callback(m_context, contextStreamCallback, this);

	if (!m_context) {
		qDebug("Could not acquire PulseAudio device context");
		return;
	}
#if defined(MEEGO_EDITION_HARMATTAN)
	if (pa_context_connect(m_context, 0, PA_CONTEXT_NOFLAGS, 0) < 0) {
#elif defined(Q_WS_MAEMO_5)
	if (pa_context_connect(m_context, 0, (pa_context_flags_t)0, 0) < 0) {
#endif
		int error = pa_context_errno(m_context);
		qDebug("Could not connect to PulseAudio server: %s", pa_strerror(error));
		return;
	}
	pa_threaded_mainloop_lock(m_mainloop);
	if (pa_threaded_mainloop_start(m_mainloop) < 0) {
		qDebug("Could not start mainloop");
		return;
	}

	waitForStreamReady();

	pa_sample_spec fmt;
	fmt.channels = 2;
	fmt.format = PA_SAMPLE_S16LE;
	fmt.rate = 44100;

	pa_buffer_attr buffer_attributes;
	buffer_attributes.tlength = pa_bytes_per_second(&fmt) / 5;
	buffer_attributes.maxlength = buffer_attributes.tlength * 3;
	buffer_attributes.minreq = buffer_attributes.tlength / 3;
	buffer_attributes.prebuf = buffer_attributes.tlength;

	m_stream = pa_stream_new(m_context, "emumaster", &fmt, 0);
	if (!m_stream) {
		int error = pa_context_errno(m_context);
		qDebug("Could not acquire new PulseAudio stream: %s", pa_strerror(error));
		return;
	}
	pa_stream_flags_t flags = (pa_stream_flags_t)(PA_STREAM_ADJUST_LATENCY | PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE);
//	pa_stream_flags_t flags = (pa_stream_flags_t) (PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_EARLY_REQUESTS);
	if (pa_stream_connect_playback(m_stream, 0, &buffer_attributes, flags, 0, 0) < 0) {
		m_stream = 0;
		int error = pa_context_errno(m_context);
		qDebug("Could not connect for playback: %s", pa_strerror(error));
		return;
	}

	waitForStreamReady();

	pa_threaded_mainloop_unlock(m_mainloop);
}

/*! Stops audio streaming. */
void HostAudio::close()
{
	if (m_mainloop)
		pa_threaded_mainloop_stop(m_mainloop);
	if (m_stream) {
		pa_stream_unref(m_stream);
		m_stream = 0;
	}
	if (m_context) {
		pa_context_disconnect(m_context);
		pa_context_unref(m_context);
		m_context = 0;
	}
	if (m_mainloop) {
		pa_threaded_mainloop_free(m_mainloop);
		m_mainloop = 0;
	}
}

/*! Streams a frame of audio from emulated system to the host. */
void HostAudio::sendFrame()
{
	if (!m_stream)
		return;
	pa_threaded_mainloop_lock(m_mainloop);
	void *data;
#if defined(MEEGO_EDITION_HARMATTAN)
	size_t size = -1;
	pa_stream_begin_write(m_stream, &data, &size);
#elif defined(Q_WS_MAEMO_5)
	size_t size = 4096;
	static char buf[4096];
	data = buf;
#endif
	size = qMin(size, pa_stream_writable_size(m_stream));
	if (size)
		size = m_emu->fillAudioBuffer(reinterpret_cast<char *>(data), size);
	if (size)
		pa_stream_write(m_stream, data, size, 0, 0, PA_SEEK_RELATIVE);
#if defined(MEEGO_EDITION_HARMATTAN)
	else
		pa_stream_cancel_write(m_stream);
#endif
	pa_threaded_mainloop_unlock(m_mainloop);
}

/*! \internal */
void HostAudio::waitForStreamReady()
{
	pa_context_state_t context_state = pa_context_get_state(m_context);
	while (context_state != PA_CONTEXT_READY) {
		context_state = pa_context_get_state(m_context);
		if (!PA_CONTEXT_IS_GOOD(context_state)) {
			int error = pa_context_errno(m_context);
			qDebug("Context state is not good: %s", pa_strerror(error));
			return;
		} else if (context_state == PA_CONTEXT_READY) {
			break;
		} else {
			//qDebug("PulseAudio context state is %d", context_state);
		}
		pa_threaded_mainloop_wait(m_mainloop);
	}
}
int PulseAudioDriver::setup(bool capture, bool playback, const QString& )
{
	PENTER;
	
	sample_spec.rate = frame_rate;
	sample_spec.channels = 2;
	sample_spec.format = PA_SAMPLE_FLOAT32NE;
	
	assert(pa_sample_spec_valid(&sample_spec));
	
	if (channel_map_set && channel_map.channels != sample_spec.channels) {
		fprintf(stderr, "Channel map doesn't match file.\n");
		return -1;
	}
	
	/* Set up a new main loop */
	if (!(mainloop = pa_mainloop_new())) {
		fprintf(stderr, "pa_mainloop_new() failed.\n");
		return -1;
	}

	mainloop_api = pa_mainloop_get_api(mainloop);

	int r = pa_signal_init(mainloop_api);
	assert(r == 0);

	/* Create a new connection context */
	if (!(context = pa_context_new(mainloop_api, "Traverso"))) {
		fprintf(stderr, "pa_context_new() failed.\n");
		return -1;
	}

	pa_context_set_state_callback(context, context_state_callback, this);

	/* Connect the context */
	pa_context_connect(context, "", (pa_context_flags_t)0, NULL);

	int ret;
	/* Run the main loop */
// 	if (pa_mainloop_run(mainloop, &ret) < 0) {
// 		fprintf(stderr, "pa_mainloop_run() failed.\n");
// 		return -1;
// 	}


	AudioChannel* audiochannel;
	int port_flags;
	char buf[32];
	
	// TODO use the found maxchannel count for the playback stream, instead of assuming 2 !!
	for (int chn = 0; chn < 2; chn++) {

		snprintf (buf, sizeof(buf) - 1, "playback_%d", chn+1);

		audiochannel = device->register_playback_channel(buf, "32 bit float audio", port_flags, frames_per_cycle, chn);
		audiochannel->set_latency( frames_per_cycle + capture_frame_latency );
		playbackChannels.append(audiochannel);
	}

	// TODO use the found maxchannel count for the capture stream, instead of assuming 0 !!
	for (int chn = 0; chn < 2; chn++) {

		snprintf (buf, sizeof(buf) - 1, "capture_%d", chn+1);

		audiochannel = device->register_capture_channel(buf, "32 bit float audio", port_flags, frames_per_cycle, chn);
		audiochannel->set_latency( frames_per_cycle + capture_frame_latency );
		captureChannels.append(audiochannel);
	}

	return 1;
}
Example #20
0
struct pulse_conn_t *pulse_conn_open(const char *name, amp_audio_f func, void *arg, const struct pulse_conf_t *conf)
{
	int err;
	unsigned int i;
	pthread_attr_t attr;
	struct sched_param param;
	struct pulse_conn_t *conn;

	conn = malloc(sizeof(struct pulse_conn_t));
	conn->quit = 0;
	conn->width = (conf->in > conf->out) ? conf->in : conf->out;
	conn->lat = conf->rate * (conf->lat / 1000.0f);
	conn->conf = *conf;
	conn->func = func;
	conn->arg = arg;
	conn->rd = 0;
	conn->wr = conn->lat;
	conn->startup = 2;
	conn->reset[0] = conn->reset[1] = 1;
	conn->nrd = conn->nwr = conf->rate;

	//printf("width: %u\n", conn->width);

	conn->buf = malloc(conn->width * sizeof(void *));
	for(i = 0; i < conn->width; i++)
		memset(conn->buf[i] = malloc(2 * conn->lat * sizeof(float)), 0x00, 2 * conn->lat * sizeof(float));

	if(pipe(conn->pipe) < 0)
		fprintf(stderr, "Cannot create pipe."), exit(1);

	conn->loop = pa_mainloop_new();
	conn->api = pa_mainloop_get_api(conn->loop);
	pa_mainloop_set_poll_func(conn->loop, conn_poll, conn);

	conn->context = pa_context_new(conn->api, name);
	pa_context_set_state_callback(conn->context, conn_state, conn);
	pa_context_connect(conn->context, NULL, 0, NULL);

	err = pthread_attr_init(&attr);
	if(err != 0)
		fprintf(stderr, "Failed to initialize thread attributes. %s.", strerror(err)), exit(1);

	err = pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED);
	if(err != 0)
		fprintf(stderr, "Failed to set schedular inheritance. %s.", strerror(err)), exit(1);

	err = pthread_attr_setschedpolicy(&attr, SCHED_FIFO);
	if(err != 0)
		fprintf(stderr, "Failed to set schedular policy. %s.", strerror(err)), exit(1);

	param.sched_priority = 99;
	err = pthread_attr_setschedparam(&attr, &param);
	if(err != 0)
		fprintf(stderr, "Failed to set schedular parameter. %s.", strerror(err)), exit(1);

	err = pthread_create(&conn->thread, &attr, conn_thread, conn);
	if(err != 0) {
		err = pthread_create(&conn->thread, NULL, conn_thread, conn);
		if(err != 0)
			fprintf(stderr, "Failed to start thread. %s.", strerror(err)), exit(1);
	}

	pthread_attr_destroy(&attr);
	if(err != 0)
		fprintf(stderr, "Failed to destroy thread attributes. %s.", strerror(err)), exit(1);

	return conn;
}
int pa_get_devicelist(pa_devicelist_t *input, pa_devicelist_t *output) {
    // Define our pulse audio loop and connection variables
    pa_mainloop *pa_ml;
    pa_mainloop_api *pa_mlapi;
    pa_operation *pa_op;
    pa_context *pa_ctx;

    // We'll need these state variables to keep track of our requests
    int state = 0;
    int pa_ready = 0;

    // Initialize our device lists
    memset(input, 0, sizeof(pa_devicelist_t) * 16);
    memset(output, 0, sizeof(pa_devicelist_t) * 16);

    // Create a mainloop API and connection to the default server
    pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api(pa_ml);
    pa_ctx = pa_context_new(pa_mlapi, "test");

    // This function connects to the pulse server
    pa_context_connect(pa_ctx, NULL, 0, NULL);

    // This function defines a callback so the server will tell us it's state.
    // Our callback will wait for the state to be ready.  The callback will
    // modify the variable to 1 so we know when we have a connection and it's
    // ready.
    // If there's an error, the callback will set pa_ready to 2
    pa_context_set_state_callback(pa_ctx, pa_state_cb, &pa_ready);

    // Now we'll enter into an infinite loop until we get the data we receive
    // or if there's an error
    for (;;) {
        // We can't do anything until PA is ready, so just iterate the mainloop
        // and continue
        if (pa_ready == 0) {
            pa_mainloop_iterate(pa_ml, 1, NULL);
            continue;
        }
        // We couldn't get a connection to the server, so exit out
        if (pa_ready == 2) {
            pa_context_disconnect(pa_ctx);
            pa_context_unref(pa_ctx);
            pa_mainloop_free(pa_ml);
            return -1;
        }
        // At this point, we're connected to the server and ready to make
        // requests
        switch (state) {
            // State 0: we haven't done anything yet
            case 0:
                // This sends an operation to the server.  pa_sinklist_info is
                // our callback function and a pointer to our devicelist will
                // be passed to the callback The operation ID is stored in the
                // pa_op variable
                pa_op = pa_context_get_sink_info_list(pa_ctx,
                        pa_sinklist_cb,
                        output
                        );

                // Update state for next iteration through the loop
                state++;
                break;
            
            case 1:
                // Now we wait for our operation to complete.  When it's
                // complete our pa_output_devicelist is filled out, and we move
                // along to the next state
                if (pa_operation_get_state(pa_op) == PA_OPERATION_DONE) {
                    pa_operation_unref(pa_op);

                    // Now we perform another operation to get the source
                    // (input device) list just like before.  This time we pass
                    // a pointer to our input structure
                    pa_op = pa_context_get_source_info_list(pa_ctx,
                            pa_sourcelist_cb,
                            input
                            );
                    // Update the state so we know what to do next
                    state++;
                }
                break;
            case 2:
                if (pa_operation_get_state(pa_op) == PA_OPERATION_DONE) {
                    // Now we're done, clean up and disconnect and return
                    pa_operation_unref(pa_op);
                    pa_context_disconnect(pa_ctx);
                    pa_context_unref(pa_ctx);
                    pa_mainloop_free(pa_ml);
                    return 0;
                }
                break;
            default:
                // We should never see this state
                fprintf(stderr, "in state %d\n", state);
                return -1;
        }
        // Iterate the main loop and go again.  The second argument is whether
        // or not the iteration should block until something is ready to be
        // done.  Set it to zero for non-blocking.
        pa_mainloop_iterate(pa_ml, 1, NULL);
    }
}
Example #22
0
/* The implementation of create_output_device_name_list() is based on the
 * example code in
 * http://www.ypass.net/blog/2009/10/
 *    pulseaudio-an-async-example-to-get-device-lists/
 */
mbx_error_code mbx_create_output_device_name_list(char ***dev_names, size_t *n_devs)
{
    pa_mainloop *pa_ml = pa_mainloop_new();
    pa_mainloop_api *pa_mlapi = pa_mainloop_get_api(pa_ml);
    pa_context *pa_ctx = pa_context_new(pa_mlapi, "music box (listing output "
        "devices)");
    pa_operation *pa_op;
    pa_context_state_t pa_context_state = PA_CONTEXT_UNCONNECTED;
    int do_iterate = 1;
    int error = 0;

    pa_context_connect(pa_ctx, NULL, 0, NULL);
    /* The state callback will update the state when we are connected to the
     * PulseAudio server, or if an error occurs. */
    pa_context_set_state_callback(pa_ctx, pa_context_state_cb,
        &pa_context_state);

    while ( do_iterate ) {
        switch ( pa_context_state ) {
            case PA_CONTEXT_UNCONNECTED:
            case PA_CONTEXT_CONNECTING:
            case PA_CONTEXT_AUTHORIZING:
            case PA_CONTEXT_SETTING_NAME:
                pa_mainloop_iterate(pa_ml, 1, NULL); // we must wait.
                break;
            case PA_CONTEXT_READY:
                do_iterate = 0;
                break;
            case PA_CONTEXT_FAILED:
                mbx_log_error(MBX_LOG_AUDIO_OUTPUT, "Connection to PulseAudio server failed: "
                    "%s", pa_strerror(pa_context_errno(pa_ctx)));
                error = 1;
                break;
            case PA_CONTEXT_TERMINATED:
                mbx_log_error(MBX_LOG_AUDIO_OUTPUT, "Connection to PulseAudio server "
                    "terminated unexpectedly.");
                error = 1;
                break;
            default:
                mbx_log_error(MBX_LOG_AUDIO_OUTPUT, "The PulseAudio context has an unexpected "
                    "state: %d", pa_context_state);
                error = 1;
                break;
        }
        if ( error ) {
            do_iterate = 0;
        }
    }
    if ( ! error ) {
        struct list_of_strings result = { NULL, 0, 0 };
        pa_op = pa_context_get_sink_info_list(pa_ctx, pa_sinklist_cb, &result);
        while ( pa_operation_get_state(pa_op) == PA_OPERATION_RUNNING ) {
            pa_mainloop_iterate(pa_ml, 1, NULL); // wait.
        }
        pa_operation_unref(pa_op);
        *dev_names = result.strings;
        *n_devs = result.n_strings;
    }
    pa_context_disconnect(pa_ctx);
    pa_context_unref(pa_ctx);
    pa_mainloop_free(pa_ml);
    return error ? MBX_PULSEAUDIO_ERROR : MBX_SUCCESS;
}
Example #23
0
static int pulse_open()
{
  ENTER(__FUNCTION__);
    pa_sample_spec ss;
    pa_operation *o = NULL;
    int success;
    int ret = PULSE_ERROR;

    assert(!mainloop);
    assert(!context);
    assert(!stream);
    assert(!connected);

    pthread_mutex_init( &pulse_mutex, (const pthread_mutexattr_t *)NULL);

    ss.format = ESPEAK_FORMAT;
    ss.rate = wave_samplerate;
    ss.channels = ESPEAK_CHANNEL;

    if (!pa_sample_spec_valid(&ss))
      return false;

    SHOW_TIME("pa_threaded_mainloop_new (call)");
    if (!(mainloop = pa_threaded_mainloop_new())) {
      SHOW("Failed to allocate main loop\n","");
        goto fail;
    }

    pa_threaded_mainloop_lock(mainloop);

    SHOW_TIME("pa_context_new (call)");
    if (!(context = pa_context_new(pa_threaded_mainloop_get_api(mainloop), "eSpeak"))) {
      SHOW("Failed to allocate context\n","");
      goto unlock_and_fail;
    }

    pa_context_set_state_callback(context, context_state_cb, NULL);
    pa_context_set_subscribe_callback(context, subscribe_cb, NULL);

    SHOW_TIME("pa_context_connect (call)");
    if (pa_context_connect(context, NULL, (pa_context_flags_t)0, NULL) < 0) {
        SHOW("Failed to connect to server: %s", pa_strerror(pa_context_errno(context)));
	ret = PULSE_NO_CONNECTION;
        goto unlock_and_fail;
    }

    SHOW_TIME("pa_threaded_mainloop_start (call)");
    if (pa_threaded_mainloop_start(mainloop) < 0) {
      SHOW("Failed to start main loop","");
        goto unlock_and_fail;
    }

    /* Wait until the context is ready */
    SHOW_TIME("pa_threaded_mainloop_wait");
    pa_threaded_mainloop_wait(mainloop);

    if (pa_context_get_state(context) != PA_CONTEXT_READY) {
        SHOW("Failed to connect to server: %s", pa_strerror(pa_context_errno(context)));
	ret = PULSE_NO_CONNECTION;
 	if (mainloop)
 	  pa_threaded_mainloop_stop(mainloop);
        goto unlock_and_fail;
    }

    SHOW_TIME("pa_stream_new");
    if (!(stream = pa_stream_new(context, "unknown", &ss, NULL))) {
        SHOW("Failed to create stream: %s", pa_strerror(pa_context_errno(context)));
        goto unlock_and_fail;
    }

    pa_stream_set_state_callback(stream, stream_state_cb, NULL);
    pa_stream_set_write_callback(stream, stream_request_cb, NULL);
    pa_stream_set_latency_update_callback(stream, stream_latency_update_cb, NULL);

    pa_buffer_attr a_attr;

    a_attr.maxlength = MAXLENGTH;
    a_attr.tlength = TLENGTH;
    a_attr.prebuf = PREBUF;
    a_attr.minreq = MINREQ;
    a_attr.fragsize = 0;

    SHOW_TIME("pa_connect_playback");
    if (pa_stream_connect_playback(stream, NULL, &a_attr, (pa_stream_flags_t)(PA_STREAM_INTERPOLATE_TIMING|PA_STREAM_AUTO_TIMING_UPDATE), NULL, NULL) < 0) {
        SHOW("Failed to connect stream: %s", pa_strerror(pa_context_errno(context)));
        goto unlock_and_fail;
    }

    /* Wait until the stream is ready */
    SHOW_TIME("pa_threaded_mainloop_wait");
    pa_threaded_mainloop_wait(mainloop);

    if (pa_stream_get_state(stream) != PA_STREAM_READY) {
        SHOW("Failed to connect stream: %s", pa_strerror(pa_context_errno(context)));
        goto unlock_and_fail;
    }

    /* Now subscribe to events */
    SHOW_TIME("pa_context_subscribe");
    if (!(o = pa_context_subscribe(context, PA_SUBSCRIPTION_MASK_SINK_INPUT, context_success_cb, &success))) {
        SHOW("pa_context_subscribe() failed: %s", pa_strerror(pa_context_errno(context)));
        goto unlock_and_fail;
    }
    
    success = 0;
    SHOW_TIME("pa_threaded_mainloop_wait");
    while (pa_operation_get_state(o) != PA_OPERATION_DONE) {
        CHECK_DEAD_GOTO(fail, 1);
        pa_threaded_mainloop_wait(mainloop);
    }

    pa_operation_unref(o);

    if (!success) {
        SHOW("pa_context_subscribe() failed: %s", pa_strerror(pa_context_errno(context)));
        goto unlock_and_fail;
    }

    do_trigger = 0;
    written = 0;
    time_offset_msec = 0;
    just_flushed = 0;
    connected = 1;
    
    pa_threaded_mainloop_unlock(mainloop);
    SHOW_TIME("pulse_open (ret true)");
   
    return PULSE_OK;

unlock_and_fail:

    if (o)
        pa_operation_unref(o);
    
    pa_threaded_mainloop_unlock(mainloop);
    
fail:

    //    pulse_close();

  if (ret == PULSE_NO_CONNECTION) {
    if (context) {
      SHOW_TIME("pa_context_disconnect (call)");
      pa_context_disconnect(context);
      pa_context_unref(context);
      context = NULL;
    }
  
    if (mainloop) {
      SHOW_TIME("pa_threaded_mainloop_free (call)");
      pa_threaded_mainloop_free(mainloop);
      mainloop = NULL;
    }  
  } 
  else {
    pulse_close();
  }

  SHOW_TIME("pulse_open (ret false)");
  
  return ret;

}
Example #24
0
MediaRecorder::MediaRecorder(const char * outfile,int width, int height)
{
    audiofailed = false;
    /* INIT SOUND RECORDING */

    debug_samples_out = fopen("audiosamples.s16","wb");
    audio_samples_written = 0;
    pa_context* pactx;
    pa_mainloop * m = pa_mainloop_new();
    m_api = pa_mainloop_get_api(m);
    pactx = pa_context_new(m_api,"Rec1");
    if ( pa_context_connect(pactx,NULL,(pa_context_flags_t)0,NULL) < 0 )
        printf("Cannot connect to pulseaudio\n");
    int ret;
    pa_context_set_state_callback(pactx, context_state_callback, this);
    pa_mainloop_run(m,&ret);
    std::cout << "Use source: " << monitorsources[defaultsink] << std::endl;

    static const pa_sample_spec ss = {
        .format = PA_SAMPLE_S16LE,
        .rate = 44100,
        .channels = 2
    };
    pa_context_disconnect(pactx);

    int error;
    s = pa_simple_new(NULL,"GLCAP Record",PA_STREAM_RECORD,monitorsources[defaultsink].c_str(), "record", &ss, NULL,NULL , &error);
    if ( !s )
    {
        printf("Cannot create pa_simple\n");
    }

    run = true;
    ready = false;
    firstframe = true;
    this->width = width;
    this->height = height;
    pthread_mutex_init(&encode_mutex,NULL);
    pthread_mutex_init(&sound_buffer_lock,NULL);
    pthread_cond_init(&encode_cond,NULL);
    pthread_create(&encode_thread,NULL,(void*(*)(void*))&MediaRecorder::EncodingThread,this);


    av_log_set_level(AV_LOG_DEBUG);
    outCtx = avformat_alloc_context();

    outCtx->oformat = av_guess_format(NULL, outfile, NULL);
    snprintf(outCtx->filename, sizeof(outCtx->filename), "%s", outfile);
    codec = avcodec_find_encoder(AV_CODEC_ID_MPEG4);
    acodec = avcodec_find_encoder(AV_CODEC_ID_MP2);
    ctx = avcodec_alloc_context3(codec);
    actx = avcodec_alloc_context3(acodec);
    avcodec_get_context_defaults3(actx,acodec);
    avcodec_get_context_defaults3(ctx,codec);
    ctx->width = width;
    ctx->height = height;
    ctx->bit_rate = 6000*1000;
    
    std::cout << ctx->time_base.den << " " << ctx->time_base.num << std::endl;
    
    ctx->time_base.den = TIMEBASE;
    ctx->time_base.num = 1;

    ctx->thread_count = 4;
    ctx->qmin = 2;
    ctx->qmax = 31;
    ctx->b_sensitivity = 100;
    ctx->gop_size = 1;
    ctx->me_method = 1;
    ctx->global_quality = 100;
    ctx->lowres = 0;
    
    ctx->bit_rate_tolerance = 200000;
    actx->sample_fmt = AV_SAMPLE_FMT_S16;
    actx->sample_rate = 44100;
    actx->channels = 2;
    actx->time_base.den = 44100;
    actx->time_base.num = 1;
    actx->bit_rate = 128000;
    actx->frame_size = 8192;
    actx->channel_layout = 3;
    /* ctx->compression_level = 0;
     ctx->trellis = 0;
     ctx->gop_size = 1; /* emit one intra frame every ten frames */
    /*ctx->me_pre_cmp = 0;
    ctx->me_cmp = 0;
    ctx->me_sub_cmp = 0;
    ctx->mb_cmp = 2;
    ctx->pre_dia_size = 0;
    ctx->dia_size = 1;

    ctx->quantizer_noise_shaping = 0; // qns=0
    ctx->noise_reduction = 0; // nr=0
    ctx->mb_decision = 0; // mbd=0 ("realtime" encoding)

    ctx->flags &= ~CODEC_FLAG_QPEL;
    ctx->flags &= ~CODEC_FLAG_4MV;
    ctx->trellis = 0;
    ctx->flags &= ~CODEC_FLAG_CBP_RD;
    ctx->flags &= ~CODEC_FLAG_QP_RD;
    ctx->flags &= ~CODEC_FLAG_MV0;*/
    //ctx->s
    ctx->pix_fmt = PIX_FMT_YUV420P;



    if (avcodec_open2(ctx, codec, NULL) < 0) {
        fprintf(stderr, "Could not open codec\n");
    }
    if (avcodec_open2(actx, acodec, NULL) < 0) {
        fprintf(stderr, "Could not open audio codec\n");
        audiofailed = true;
    }
    printf("frame_size: %d\n",actx->frame_size);
    pthread_create(&record_sound_thread,NULL,(void*(*)(void*))&MediaRecorder::RecordingThread,this);
    AVStream* s = av_new_stream(outCtx,0);
    s->codec = ctx;
    s->r_frame_rate.den = TIMEBASE;
    s->r_frame_rate.num = 1;
    if (!audiofailed )
    {
    AVStream* as = av_new_stream(outCtx,1);
    as->codec = actx;
    as->r_frame_rate.den = 44100;
    as->r_frame_rate.num = 1;
    
    }
    picture = alloc_picture(PIX_FMT_YUV420P, ctx->width, ctx->height);
    if (!picture) {
        fprintf(stderr, "Could not allocate picture\n");
        exit(1);
    }
    tmp_picture = NULL;

    tmp_picture = alloc_picture(PIX_FMT_RGBA, ctx->width, ctx->height);
    if (!tmp_picture) {
        fprintf(stderr, "Could not allocate temporary picture\n");
        exit(1);
    }

    img_convert_ctx = sws_getContext(ctx->width, ctx->height,
                                     PIX_FMT_RGBA,
                                     ctx->width, ctx->height,
                                     PIX_FMT_YUV420P,
                                     SWS_FAST_BILINEAR , NULL, NULL, NULL);
    if (img_convert_ctx == NULL) {
        fprintf(stderr,
                "Cannot initialize the conversion context\n");
        exit(1);
    }
    av_dump_format(outCtx, 0, outfile, 1);
    avio_open2(&outCtx->pb, outfile, AVIO_FLAG_WRITE,NULL,NULL);
    avformat_write_header(outCtx,NULL);
}

MediaRecorder::~MediaRecorder()
{
    run = false;
    ready = false;
    pthread_cond_broadcast(&encode_cond);
    printf("Joining thread..\n");
    pthread_join(encode_thread,NULL);
    printf("Joining recording thread..\n");
    pthread_join(record_sound_thread,NULL);
    printf("Done\n");
    av_write_trailer(outCtx);
    av_free(picture);
    avformat_free_context(outCtx);
    pa_simple_free(s);
    fclose(debug_samples_out);
}
int fcount = 0;
void MediaRecorder::AppendFrame(float time, int width, int height, char* data)
{
    if ( !ready )
        return ;
    printf("AppendFrame\n");
    this->time = getcurrenttime2();
    if ( firstframe )
    {
        starttime = getcurrenttime2();
        firstframe = false;
    }
    this->height = height;
    this->width = width;
    m_data = data;
    ready = false;
    pthread_cond_broadcast(&encode_cond);
    
    /*int i = 0;
     unsigned int numpixels = width * height;
     unsigned int ui = numpixels;
     unsigned int vi = numpixels + numpixels / 4;
     for ( int j = 0; j < height; j++ )
     {
         for ( int k = 0; k < width; k++ )
         {
             int sR = data[i*4+0];
             int sG = data[i*4+1];
             int sB = data[i*4+2];
             picture->data[0][i] = ( (66*sR + 129*sG + 25*sB + 128) >> 8) + 16;
             if (0 == j%2 && 0 == k%2)
             {
                 picture->data[0][ui++] = ( (-38*sR - 74*sG + 112*sB + 128) >> 8) + 128;
                 picture->data[0][vi++] = ( (112*sR - 94*sG - 18*sB + 128) >> 8) + 128;
             }
             i++;

         }
     }*/


    // printf("End flip %f\n",(float)getcurrenttime2());

    //memcpy(tmp_picture->data[0],data,width*height*4);

}

void MediaRecorder::EncodingThread()
{
    while ( run )
    {
        printf("Encode thread ready\n");
        ready = true;
        pthread_cond_wait(&encode_cond,&encode_mutex);
        
        if (!run)
        {
            printf("Encoding finished\n");
            break;
        }
        for ( int y = 0; y < height; y++ )
        {
            /*for ( int x = 0; x < width; x++ )
            {*/
            char r,g,b;
            int oldindex = (y*width);
            int newindex = ((height-1-y)*width);
            memcpy(&tmp_picture->data[0][(newindex)*4],&m_data[oldindex*4],width*4);
            /* r = data[oldindex*4+0];
            g = data[oldindex*4+1];
            b = data[oldindex*4+2];
            tmp_picture->data[0][(newindex)*4+0] = r;
            tmp_picture->data[0][(newindex)*4+1] = g;
            tmp_picture->data[0][(newindex)*4+2] = b; */
            // }

        }
        sws_scale(img_convert_ctx,tmp_picture->data,tmp_picture->linesize,0,height,picture->data,picture->linesize);


        AVPacket p;

        av_init_packet(&p);
        p.data = NULL;
        p.size = 0;
        picture->pts = int64_t((time-starttime)*TIMEBASE);
        
        uint64_t vpts = picture->pts;
     
        // picture->pts = time*30.0;
        int got_frame;
        printf("%p %p\n",ctx, picture);
        if(avcodec_encode_video2(ctx, &p, picture, &got_frame) < 0) return;
        if(got_frame)
        {

            // outContainer is "mp4"

            p.pts = vpts;
            p.dts = AV_NOPTS_VALUE;
            av_write_frame(outCtx, &p);

            av_free_packet(&p);
        }
        //sleep(1);
        printf("End enc frame %f, pts %lld\n",(float)getcurrenttime2(),picture->pts);
        
       
        AVFrame * aframe = avcodec_alloc_frame();

        
        bool unlocked = false;
        
        while ( sound_buffers.size() > 0 )
        {
            uint64_t apts = audio_samples_written;
           /* if ( apts > vpts )
                break;*/
            pthread_mutex_lock(&sound_buffer_lock);
            short * buf = sound_buffers.front();
            sound_buffers.pop_front();
            pthread_mutex_unlock(&sound_buffer_lock);
             if (!audiofailed )
            {
                
                
                unlocked = true;
                aframe->nb_samples = actx->frame_size;
                aframe->channel_layout = actx->channel_layout;
                aframe->format = AV_SAMPLE_FMT_S16;
                aframe->channels = actx->channels;
                avcodec_fill_audio_frame(aframe,actx->channels,AV_SAMPLE_FMT_S16,(char*)buf,actx->frame_size*2*2,0);
              //  avcodec_fill_audio_frame(aframe,actx->channels,actx->sample_fmt,(char*)buf,actx->frame_size*2,0);
    
                printf("sound_buffers.size() = %d\n",sound_buffers.size());
                av_init_packet(&p);
                p.data = NULL;
                p.size = 0;

                
                avcodec_encode_audio2(actx,&p,aframe,&got_frame);
                if ( got_frame )
                {
                    p.stream_index = 1;
                    p.flags |= AV_PKT_FLAG_KEY;
                    
                    

                    av_write_frame(outCtx,&p);
                    av_free_packet(&p);
                }
                audio_samples_written += actx->frame_size;//samples/2 each channel
            }
            //printf("Consumed 1024 samples\n");
            delete[] buf;
            
        }
/*        if ( !unlocked )
            pthread_mutex_unlock(&sound_buffer_lock);*/
        avcodec_free_frame(&aframe);
        
    }
}

bool MediaRecorder::isReady()
{
    return ready;
}
int set_active_port(pa_devicelist_t device, pa_portlist_t port) {
    pa_mainloop *pa_ml;
    pa_mainloop_api *pa_mlapi;
    pa_operation *pa_op;
    pa_context *pa_ctx;
    
    int pa_ready = 0;
    int state = 0;
    
    pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api(pa_ml);
    pa_ctx = pa_context_new(pa_mlapi, "test");
    pa_context_connect(pa_ctx, NULL, 0, NULL);
    pa_context_set_state_callback(pa_ctx, pa_state_cb, &pa_ready);
    
    pa_device_port_t dev_port_set;
    dev_port_set.device = device;
    dev_port_set.port = port;
    
    pa_clientlist_t clientlist[30];
    
    int i = 0;
    
    for (;;) {
        // We can't do anything until PA is ready, so just iterate the mainloop
        // and continue
        if (pa_ready == 0) {
            pa_mainloop_iterate(pa_ml, 1, NULL);
            continue;
        }
        // We couldn't get a connection to the server, so exit out
        if (pa_ready == 2) {
            pa_context_disconnect(pa_ctx);
            pa_context_unref(pa_ctx);
            pa_mainloop_free(pa_ml);
            return -1;
        }
        
        switch(state) {
            case 0:
                // Set source or sink
                switch(device.type) {
                    case JOAPA_SOURCE:
                        pa_op = pa_context_set_source_port_by_index(
                                pa_ctx, 
                                device.index, 
                                port.name, 
                                set_active_port_cb, 
                                &dev_port_set);
                        break;
                    case JOAPA_SINK:
                        pa_op = pa_context_set_sink_port_by_index(
                                pa_ctx, 
                                device.index, 
                                port.name, 
                                set_active_port_cb, 
                                &dev_port_set);
                        break;
                }
                state++;
                break;
            case 1:
                // get clients using a source or sink
                if (pa_operation_get_state(pa_op) == PA_OPERATION_DONE) {
                    pa_operation_unref(pa_op);
                    switch(device.type) {
                        case JOAPA_SOURCE:
                            pa_op = pa_context_get_source_output_info_list(pa_ctx, pa_source_info_cb, &clientlist);
                            break;
                        case JOAPA_SINK:
                            pa_op = pa_context_get_sink_input_info_list(pa_ctx, pa_sink_info_cb, &clientlist);
                            break;
                    }
                    state++;
                }
                break;
            case 2:
                // move the clients to the new source or sink
                if (pa_operation_get_state(pa_op) == PA_OPERATION_DONE) {
                    pa_operation_unref(pa_op);
                    
                    if(!clientlist[i].initialized) {
                        pa_context_disconnect(pa_ctx);
                        pa_context_unref(pa_ctx);
                        pa_mainloop_free(pa_ml);
                        return 0;
                    }
                    //printf("CLIENT: %d\n", clientlist[i].index);
                    
                    switch(device.type) {
                        case JOAPA_SOURCE:
                            pa_op = pa_context_move_source_output_by_index(
                                    pa_ctx,
                                    clientlist[i].index, 
                                    device.index, 
                                    set_active_port_cb, NULL);
                            break;
                        case JOAPA_SINK:
                            pa_op = pa_context_move_sink_input_by_index(
                                    pa_ctx, 
                                    clientlist[i].index, 
                                    device.index, 
                                    set_active_port_cb, NULL);
                            break;
                    }
                    
                    i++;
                }
                break;
            default:
                fprintf(stderr, "in state %d\n", state);
                return -1;
        }
        pa_mainloop_iterate(pa_ml, 1, NULL);       
    }

}
Example #26
0
/* common */
static void *qpa_audio_init(Audiodev *dev)
{
    paaudio *g;
    AudiodevPaOptions *popts = &dev->u.pa;
    const char *server;

    if (!popts->has_server) {
        char pidfile[64];
        char *runtime;
        struct stat st;

        runtime = getenv("XDG_RUNTIME_DIR");
        if (!runtime) {
            return NULL;
        }
        snprintf(pidfile, sizeof(pidfile), "%s/pulse/pid", runtime);
        if (stat(pidfile, &st) != 0) {
            return NULL;
        }
    }

    assert(dev->driver == AUDIODEV_DRIVER_PA);

    g = g_malloc(sizeof(paaudio));
    server = popts->has_server ? popts->server : NULL;

    g->dev = dev;
    g->mainloop = NULL;
    g->context = NULL;

    g->mainloop = pa_threaded_mainloop_new ();
    if (!g->mainloop) {
        goto fail;
    }

    g->context = pa_context_new (pa_threaded_mainloop_get_api (g->mainloop),
                                 server);
    if (!g->context) {
        goto fail;
    }

    pa_context_set_state_callback (g->context, context_state_cb, g);

    if (pa_context_connect(g->context, server, 0, NULL) < 0) {
        qpa_logerr (pa_context_errno (g->context),
                    "pa_context_connect() failed\n");
        goto fail;
    }

    pa_threaded_mainloop_lock (g->mainloop);

    if (pa_threaded_mainloop_start (g->mainloop) < 0) {
        goto unlock_and_fail;
    }

    for (;;) {
        pa_context_state_t state;

        state = pa_context_get_state (g->context);

        if (state == PA_CONTEXT_READY) {
            break;
        }

        if (!PA_CONTEXT_IS_GOOD (state)) {
            qpa_logerr (pa_context_errno (g->context),
                        "Wrong context state\n");
            goto unlock_and_fail;
        }

        /* Wait until the context is ready */
        pa_threaded_mainloop_wait (g->mainloop);
    }

    pa_threaded_mainloop_unlock (g->mainloop);

    return g;

unlock_and_fail:
    pa_threaded_mainloop_unlock (g->mainloop);
fail:
    AUD_log (AUDIO_CAP, "Failed to initialize PA context");
    qpa_audio_fini(g);
    return NULL;
}
Example #27
0
static void *pulse_init(const char *device, unsigned rate, unsigned latency)
{
   pa_sample_spec spec;
   pa_t *pa;
   pa_buffer_attr buffer_attr = {0};
   const pa_buffer_attr *server_attr = NULL;

   memset(&spec, 0, sizeof(spec));
   
   pa = (pa_t*)calloc(1, sizeof(*pa));
   if (!pa)
      goto error;

   pa->mainloop = pa_threaded_mainloop_new();
   if (!pa->mainloop)
      goto error;

   pa->context = pa_context_new(pa_threaded_mainloop_get_api(pa->mainloop), "RetroArch");
   if (!pa->context)
      goto error;

   pa_context_set_state_callback(pa->context, context_state_cb, pa);

   if (pa_context_connect(pa->context, device, PA_CONTEXT_NOFLAGS, NULL) < 0)
      goto error;

   pa_threaded_mainloop_lock(pa->mainloop);
   if (pa_threaded_mainloop_start(pa->mainloop) < 0)
      goto error;

   pa_threaded_mainloop_wait(pa->mainloop);

   if (pa_context_get_state(pa->context) != PA_CONTEXT_READY)
      goto unlock_error;

   spec.format = is_little_endian() ? PA_SAMPLE_FLOAT32LE : PA_SAMPLE_FLOAT32BE;
   spec.channels = 2;
   spec.rate = rate;

   pa->stream = pa_stream_new(pa->context, "audio", &spec, NULL);
   if (!pa->stream)
      goto unlock_error;

   pa_stream_set_state_callback(pa->stream, stream_state_cb, pa);
   pa_stream_set_write_callback(pa->stream, stream_request_cb, pa);
   pa_stream_set_latency_update_callback(pa->stream, stream_latency_update_cb, pa);
   pa_stream_set_underflow_callback(pa->stream, underrun_update_cb, pa);
   pa_stream_set_buffer_attr_callback(pa->stream, buffer_attr_cb, pa);

   buffer_attr.maxlength = -1;
   buffer_attr.tlength = pa_usec_to_bytes(latency * PA_USEC_PER_MSEC, &spec);
   buffer_attr.prebuf = -1;
   buffer_attr.minreq = -1;
   buffer_attr.fragsize = -1;

   if (pa_stream_connect_playback(pa->stream, NULL, &buffer_attr, PA_STREAM_ADJUST_LATENCY, NULL, NULL) < 0)
      goto error;

   pa_threaded_mainloop_wait(pa->mainloop);

   if (pa_stream_get_state(pa->stream) != PA_STREAM_READY)
      goto unlock_error;

   server_attr = pa_stream_get_buffer_attr(pa->stream);
   if (server_attr)
   {
      pa->buffer_size = server_attr->tlength;
      RARCH_LOG("[PulseAudio]: Requested %u bytes buffer, got %u.\n",
            (unsigned)buffer_attr.tlength,
            (unsigned)pa->buffer_size);
   }
   else
      pa->buffer_size = buffer_attr.tlength;

   pa_threaded_mainloop_unlock(pa->mainloop);

   return pa;

unlock_error:
   pa_threaded_mainloop_unlock(pa->mainloop);
error:
   pulse_free(pa); 
   return NULL;
}
Example #28
0
void QPulseAudioThread::run()
{

    const char * error = "FOO";
    int ret = 1, r, c;
    const char *bn = "projectM";
    pa_operation * operation ;
    sample_spec.format = PA_SAMPLE_FLOAT32LE;
    sample_spec.rate = 44100;
    sample_spec.channels = 2;
    pa_context_flags_t flags = ( pa_context_flags_t ) 0;

    verbose = 0;

    if ( !pa_sample_spec_valid ( &sample_spec ) ) {
        fprintf ( stderr, "Invalid sample specification\n" );
        goto quit;
    }

    if ( channel_map_set && channel_map.channels != sample_spec.channels ) {
        fprintf ( stderr, "Channel map doesn't match sample specification\n" );
        goto quit;
    }

    if ( verbose ) {
        char t[PA_SAMPLE_SPEC_SNPRINT_MAX];
        pa_sample_spec_snprint ( t, sizeof ( t ), &sample_spec );
        fprintf ( stderr, "Opening a %s stream with sample specification '%s'.\n", "recording" , t );
    }


    if ( !client_name )
        client_name = pa_xstrdup ( bn );

    //printf("client name:%s", client_name);
    if ( !stream_name )
        stream_name = pa_xstrdup ( client_name );

    /* Set up a new main loop */
    if ( ! ( mainloop = pa_threaded_mainloop_new() ) ) {
        fprintf ( stderr, "pa_mainloop_new() failed.\n" );
        goto quit;
    }

    mainloop_api = pa_threaded_mainloop_get_api ( mainloop );

    r = pa_signal_init ( mainloop_api );
    assert ( r == 0 );
    pa_signal_new ( SIGINT, exit_signal_callback, NULL );
    pa_signal_new ( SIGTERM, exit_signal_callback, NULL );

#ifdef SIGUSR1
    pa_signal_new ( SIGUSR1, sigusr1_signal_callback, NULL );
#endif
#ifdef SIGPIPE
    signal ( SIGPIPE, SIG_IGN );
#endif
    /*
    	if ( ! ( stdio_event = mainloop_api->io_new ( mainloop_api,
    	                       STDOUT_FILENO,
    	                       PA_IO_EVENT_OUTPUT,
    	                       stdout_callback, s_qprojectM_MainWindowPtr ) ) )
    	{
    		fprintf ( stderr, "io_new() failed.\n" );
    		goto quit;
    	}
    /*
    	/* Create a new connection context */
    if ( ! ( context = pa_context_new ( mainloop_api, client_name ) ) ) {
        fprintf ( stderr, "pa_context_new() failed.\n" );
        goto quit;
    }

    pa_context_set_state_callback ( context, context_state_callback, &s_sourceList );
    pa_context_connect ( context, server, flags, NULL );

    if ( verbose ) {
        struct timeval tv;

        pa_gettimeofday ( &tv );
        pa_timeval_add ( &tv, TIME_EVENT_USEC );

        if ( ! ( time_event = mainloop_api->time_new ( mainloop_api, &tv, time_event_callback, NULL ) ) ) {
            fprintf ( stderr, "time_new() failed.\n" );
            goto quit;
        }
    }


    /* Run the main loop */
    if ( pa_threaded_mainloop_start ( mainloop ) < 0 ) {
        fprintf ( stderr, "pa_mainloop_run() failed.\n" );
        goto quit;
    }
quit:
    emit(threadCleanedUp());
    return ;
}
/* Initialization */
std::string RageSoundDriver_PulseAudio::Init()
{
	int error = 0;

	LOG->Trace("Pulse: pa_threaded_mainloop_new()...");
	m_PulseMainLoop = pa_threaded_mainloop_new();
	if(m_PulseMainLoop == nullptr)
	{
		return "pa_threaded_mainloop_new() failed!";
	}

#ifdef PA_PROP_APPLICATION_NAME /* proplist available only since 0.9.11 */
	pa_proplist *plist = pa_proplist_new();
	pa_proplist_sets(plist, PA_PROP_APPLICATION_NAME, PACKAGE_NAME);
	pa_proplist_sets(plist, PA_PROP_APPLICATION_VERSION, PACKAGE_VERSION);
	pa_proplist_sets(plist, PA_PROP_MEDIA_ROLE, "game");

	LOG->Trace("Pulse: pa_context_new_with_proplist()...");

	m_PulseCtx = pa_context_new_with_proplist(
			pa_threaded_mainloop_get_api(m_PulseMainLoop),
			"StepMania", plist);
	pa_proplist_free(plist);

	if(m_PulseCtx == nullptr)
	{
		return "pa_context_new_with_proplist() failed!";
	}
#else
	LOG->Trace("Pulse: pa_context_new()...");
	m_PulseCtx = pa_context_new(
			pa_threaded_mainloop_get_api(m_PulseMainLoop),
			"Stepmania");
	if(m_PulseCtx == nullptr)
	{
		return "pa_context_new() failed!";
	}
#endif

	pa_context_set_state_callback(m_PulseCtx, StaticCtxStateCb, this);

	LOG->Trace("Pulse: pa_context_connect()...");
	error = pa_context_connect(m_PulseCtx, nullptr, (pa_context_flags_t)0, nullptr);

	if(error < 0)
	{
		return fmt::sprintf("pa_contect_connect(): %s",
			pa_strerror(pa_context_errno(m_PulseCtx)));
	}

	LOG->Trace("Pulse: pa_threaded_mainloop_start()...");
	error = pa_threaded_mainloop_start(m_PulseMainLoop);
	if(error < 0)
	{
		return fmt::sprintf("pa_threaded_mainloop_start() returned %i", error);
	}

	/* Create the decode thread, this will be needed for Mix(), that we
 	 * will use as soon as a stream is ready. */
	StartDecodeThread();

	/* Wait for the pulseaudio stream to be ready before returning.
	* An error may occur, if it appends, m_Error becomes non-nullptr. */
	m_Sem.Wait();

	if(m_Error == nullptr)
	{
		return "";
	}
	else
	{
		return m_Error;
	}
}
Example #30
0
/**
 * Initializes the PulseAudio main loop and connects to the PulseAudio server.
 * @return a PulseAudio context on success, or NULL on error
 */
pa_context *vlc_pa_connect (vlc_object_t *obj, pa_threaded_mainloop **mlp)
{
    msg_Dbg (obj, "using library version %s", pa_get_library_version ());
    msg_Dbg (obj, " (compiled with version %s, protocol %u)",
             pa_get_headers_version (), PA_PROTOCOL_VERSION);

    /* Initialize main loop */
    pa_threaded_mainloop *mainloop = pa_threaded_mainloop_new ();
    if (unlikely(mainloop == NULL))
        return NULL;

    if (pa_threaded_mainloop_start (mainloop) < 0)
    {
        pa_threaded_mainloop_free (mainloop);
        return NULL;
    }

    /* Fill in context (client) properties */
    char *ua = var_InheritString (obj, "user-agent");
    pa_proplist *props = pa_proplist_new ();
    if (likely(props != NULL))
    {
        pa_proplist_sets (props, PA_PROP_APPLICATION_NAME, ua);
        pa_proplist_sets (props, PA_PROP_APPLICATION_ID, "org.VideoLAN.VLC");
        pa_proplist_sets (props, PA_PROP_APPLICATION_VERSION, PACKAGE_VERSION);
        pa_proplist_sets (props, PA_PROP_APPLICATION_ICON_NAME, PACKAGE_NAME);
        //pa_proplist_sets (props, PA_PROP_APPLICATION_LANGUAGE, _("C"));
        pa_proplist_sets (props, PA_PROP_APPLICATION_LANGUAGE,
                          setlocale (LC_MESSAGES, NULL));

        pa_proplist_setf (props, PA_PROP_APPLICATION_PROCESS_ID, "%lu",
                          (unsigned long) getpid ());
        //pa_proplist_sets (props, PA_PROP_APPLICATION_PROCESS_BINARY,
        //                  PACKAGE_NAME);

        for (size_t max = sysconf (_SC_GETPW_R_SIZE_MAX), len = max % 1024 + 1024;
             len < max; len += 1024)
        {
            struct passwd pwbuf, *pw;
            char buf[len];

            if (getpwuid_r (getuid (), &pwbuf, buf, sizeof (buf), &pw) == 0
             && pw != NULL)
                pa_proplist_sets (props, PA_PROP_APPLICATION_PROCESS_USER,
                                  pw->pw_name);
        }

        for (size_t max = sysconf (_SC_HOST_NAME_MAX), len = max % 1024 + 1024;
             len < max; len += 1024)
        {
            char hostname[len];

            if (gethostname (hostname, sizeof (hostname)) == 0)
                pa_proplist_sets (props, PA_PROP_APPLICATION_PROCESS_HOST,
                                  hostname);
        }

        const char *session = getenv ("XDG_SESSION_COOKIE");
        if (session != NULL)
        {
            pa_proplist_setf (props, PA_PROP_APPLICATION_PROCESS_MACHINE_ID,
                              "%.32s", session); /* XXX: is this valid? */
            pa_proplist_sets (props, PA_PROP_APPLICATION_PROCESS_SESSION_ID,
                              session);
        }
    }

    /* Connect to PulseAudio daemon */
    pa_context *ctx;
    pa_mainloop_api *api;

    pa_threaded_mainloop_lock (mainloop);
    api = pa_threaded_mainloop_get_api (mainloop);
    ctx = pa_context_new_with_proplist (api, ua, props);
    free (ua);
    if (props != NULL)
        pa_proplist_free (props);
    if (unlikely(ctx == NULL))
        goto fail;

    pa_context_set_state_callback (ctx, context_state_cb, mainloop);
    if (pa_context_connect (ctx, NULL, 0, NULL) < 0
     || context_wait (ctx, mainloop))
    {
        vlc_pa_error (obj, "PulseAudio server connection failure", ctx);
        pa_context_unref (ctx);
        goto fail;
    }
    msg_Dbg (obj, "connected %s to %s as client #%"PRIu32,
             pa_context_is_local (ctx) ? "locally" : "remotely",
             pa_context_get_server (ctx), pa_context_get_index (ctx));
    msg_Dbg (obj, "using protocol %"PRIu32", server protocol %"PRIu32,
             pa_context_get_protocol_version (ctx),
             pa_context_get_server_protocol_version (ctx));

    pa_threaded_mainloop_unlock (mainloop);
    *mlp = mainloop;
    return ctx;

fail:
    pa_threaded_mainloop_unlock (mainloop);
    pa_threaded_mainloop_stop (mainloop);
    pa_threaded_mainloop_free (mainloop);
    return NULL;
}