static void pulse_read_preprocess(MSFilter *f){
	PulseReadState *s=(PulseReadState *)f->data;
	int err;
	pa_sample_spec pss;
	pa_buffer_attr attr;

	if (context==NULL) return;
	
	pss.format=PA_SAMPLE_S16LE;
	pss.channels=s->channels;
	pss.rate=s->rate;
	
	attr.maxlength=-1;
	attr.tlength=-1;
	attr.prebuf=-1;
	attr.minreq=-1;
	attr.fragsize=s->fragsize=latency_req*(float)s->channels*(float)s->rate*2;
	
	s->stream=pa_stream_new(context,"phone",&pss,NULL);
	if (s->stream==NULL){
		ms_error("pa_stream_new() failed: %s",pa_strerror(pa_context_errno(context)));
		return;
	}
	pa_threaded_mainloop_lock(pa_loop);
	err=pa_stream_connect_record(s->stream,NULL,&attr, PA_STREAM_ADJUST_LATENCY);
	pa_threaded_mainloop_unlock(pa_loop);
	if (err!=0){
		ms_error("pa_stream_connect_record() failed");
	}
}
Exemple #2
0
JNIEXPORT void JNICALL
Java_com_harrcharr_pulse_Stream_connectRecord(
		JNIEnv *jenv, jobject jstream, jstring jdev) {
	pa_buffer_attr attr;
	pa_stream *stream = get_stream_ptr(jenv, jstream);

	memset(&attr, 0, sizeof(attr));
	attr.fragsize = sizeof(float);
	attr.maxlength = (uint32_t) -1;

    const char *dev;
    dev = (*jenv)->GetStringUTFChars(jenv, jdev, NULL);
    if (dev == NULL) {
        return; /* OutOfMemoryError already thrown */
    }

	if (pa_stream_connect_record(stream, dev, &attr,
			(pa_stream_flags_t) (PA_STREAM_DONT_INHIBIT_AUTO_SUSPEND|
					PA_STREAM_DONT_MOVE|PA_STREAM_PEAK_DETECT|
					PA_STREAM_ADJUST_LATENCY)) < 0) {
		LOGE("Failed to connect to stream");
		// Throw an exception to java
	}

    (*jenv)->ReleaseStringUTFChars(jenv, jdev, dev);
}
Exemple #3
0
/**
 * Start recording
 *
 * We request the default format used by pulse here because the data will be
 * converted and possibly re-sampled by obs anyway.
 *
 * For now we request a buffer length of 25ms although pulse seems to ignore
 * this setting for monitor streams. For "real" input streams this should work
 * fine though.
 */
static int_fast32_t pulse_start_recording(struct pulse_data *data)
{
	if (pulse_get_server_info(pulse_server_info, (void *) data) < 0) {
		blog(LOG_ERROR, "Unable to get server info !");
		return -1;
	}

	if (pulse_get_source_info(pulse_source_info, data->device,
			(void *) data) < 0) {
		blog(LOG_ERROR, "Unable to get source info !");
		return -1;
	}

	pa_sample_spec spec;
	spec.format   = data->format;
	spec.rate     = data->samples_per_sec;
	spec.channels = data->channels;

	if (!pa_sample_spec_valid(&spec)) {
		blog(LOG_ERROR, "Sample spec is not valid");
		return -1;
	}

	data->speakers = pulse_channels_to_obs_speakers(spec.channels);
	data->bytes_per_frame = pa_frame_size(&spec);

	data->stream = pulse_stream_new(obs_source_get_name(data->source),
		&spec, NULL);
	if (!data->stream) {
		blog(LOG_ERROR, "Unable to create stream");
		return -1;
	}

	pulse_lock();
	pa_stream_set_read_callback(data->stream, pulse_stream_read,
		(void *) data);
	pulse_unlock();

	pa_buffer_attr attr;
	attr.fragsize  = pa_usec_to_bytes(25000, &spec);
	attr.maxlength = (uint32_t) -1;
	attr.minreq    = (uint32_t) -1;
	attr.prebuf    = (uint32_t) -1;
	attr.tlength   = (uint32_t) -1;

	pa_stream_flags_t flags = PA_STREAM_ADJUST_LATENCY;

	pulse_lock();
	int_fast32_t ret = pa_stream_connect_record(data->stream, data->device,
		&attr, flags);
	pulse_unlock();
	if (ret < 0) {
		pulse_stop_recording(data);
		blog(LOG_ERROR, "Unable to connect to stream");
		return -1;
	}

	blog(LOG_INFO, "Started recording from '%s'", data->device);
	return 0;
}
Exemple #4
0
/*
 * Initialize pulseAudio
 */
static int init_pa(recorder_context_t *rctx)
{
    pa_mainloop_api *pa_mlapi;
    pa_proplist *ctx_properties = pa_proplist_new();
    pa_proplist *stream_properties = pa_proplist_new();
    int retval = 0;

    rctx->pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api(rctx->pa_ml);
    rctx->pa_ctx = pa_context_new_with_proplist(pa_mlapi, 
                       "NoApp recorder", ctx_properties);
    pa_context_connect(rctx->pa_ctx, NULL, 0, NULL);

    rctx->pa_ready = 0;
    pa_context_set_state_callback(rctx->pa_ctx, pa_state_cb, rctx);
    while (rctx->pa_ready == 0){
        pa_mainloop_iterate(rctx->pa_ml, 1, NULL);
    }
    if (rctx->pa_ready == 2){
        retval = -1;
        goto exit;
    }

    rctx->recording_stream = pa_stream_new_with_proplist(rctx->pa_ctx,
                                 "NoApp recorder", &rctx->pa_ss, 
                                 NULL, stream_properties);
    retval = pa_stream_connect_record(rctx->recording_stream, NULL, NULL, 0);
    if (retval < 0){
        Log(LOG_ERR, "pa_stream_connect_playback failed\n");
        goto exit;
    }

exit:
    return retval;
}
static int instream_start_pa(SoundIoPrivate *si, SoundIoInStreamPrivate *is) {
    SoundIoInStream *instream = &is->pub;
    SoundIoInStreamPulseAudio *ispa = &is->backend_data.pulseaudio;
    SoundIoPulseAudio *sipa = &si->backend_data.pulseaudio;
    pa_threaded_mainloop_lock(sipa->main_loop);

    pa_stream_flags_t flags = PA_STREAM_AUTO_TIMING_UPDATE;
    if (instream->software_latency > 0.0)
        flags = (pa_stream_flags_t) (flags|PA_STREAM_ADJUST_LATENCY);

    int err = pa_stream_connect_record(ispa->stream,
            instream->device->id,
            &ispa->buffer_attr, flags);
    if (err) {
        pa_threaded_mainloop_unlock(sipa->main_loop);
        return SoundIoErrorOpeningDevice;
    }

    while (!ispa->stream_ready)
        pa_threaded_mainloop_wait(sipa->main_loop);

    pa_operation *update_timing_info_op = pa_stream_update_timing_info(ispa->stream, timing_update_callback, si);
    if ((err = perform_operation(si, update_timing_info_op))) {
        pa_threaded_mainloop_unlock(sipa->main_loop);
        return err;
    }


    pa_threaded_mainloop_unlock(sipa->main_loop);
    return 0;
}
Exemple #6
0
static int instream_start_pa(struct SoundIoPrivate *si, struct SoundIoInStreamPrivate *is) {
    struct SoundIoInStream *instream = &is->pub;
    struct SoundIoInStreamPulseAudio *ispa = &is->backend_data.pulseaudio;
    struct SoundIoPulseAudio *sipa = &si->backend_data.pulseaudio;
    pa_threaded_mainloop_lock(sipa->main_loop);

    pa_stream_flags_t flags = (pa_stream_flags_t)(PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_INTERPOLATE_TIMING);

    int err = pa_stream_connect_record(ispa->stream,
            instream->device->id,
            &ispa->buffer_attr, flags);
    if (err) {
        pa_threaded_mainloop_unlock(sipa->main_loop);
        return SoundIoErrorOpeningDevice;
    }

    while (!SOUNDIO_ATOMIC_LOAD(ispa->stream_ready))
        pa_threaded_mainloop_wait(sipa->main_loop);

    pa_operation *update_timing_info_op = pa_stream_update_timing_info(ispa->stream, timing_update_callback, si);
    if ((err = perform_operation(si, update_timing_info_op))) {
        pa_threaded_mainloop_unlock(sipa->main_loop);
        return err;
    }


    pa_threaded_mainloop_unlock(sipa->main_loop);
    return 0;
}
/**
 * Pulseaudio context state callback
 */
static void
context_state_callback (pa_context * c,
			void *userdata)
{
  GNUNET_assert (c);

  switch (pa_context_get_state (c))
  {
  case PA_CONTEXT_CONNECTING:
  case PA_CONTEXT_AUTHORIZING:
  case PA_CONTEXT_SETTING_NAME:
    break;
  case PA_CONTEXT_READY:
  {
    int r;
    pa_buffer_attr na;

    GNUNET_assert (!stream_in);
    GNUNET_log (GNUNET_ERROR_TYPE_INFO,
		_("Connection established.\n"));
    if (! (stream_in =
	   pa_stream_new (c, "GNUNET_VoIP recorder", &sample_spec, NULL)))
    {
      GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
		  _("pa_stream_new() failed: %s\n"),
		  pa_strerror (pa_context_errno (c)));
      goto fail;
    }
    pa_stream_set_state_callback (stream_in, &stream_state_callback, NULL);
    pa_stream_set_read_callback (stream_in, &stream_read_callback, NULL);
    memset (&na, 0, sizeof (na));
    na.maxlength = UINT32_MAX;
    na.fragsize = pcm_length;
    if ((r = pa_stream_connect_record (stream_in, NULL, &na,
				       PA_STREAM_ADJUST_LATENCY)) < 0)
    {
      GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
		  _("pa_stream_connect_record() failed: %s\n"),
		  pa_strerror (pa_context_errno (c)));
      goto fail;
    }

    break;
  }
  case PA_CONTEXT_TERMINATED:
    quit (0);
    break;
  case PA_CONTEXT_FAILED:
  default:
    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
		_("Connection failure: %s\n"),
		pa_strerror (pa_context_errno (c)));
    goto fail;
  }
  return;

fail:
  quit (1);
}
Exemple #8
0
static pa_stream *qpa_simple_new (
        paaudio *g,
        const char *name,
        pa_stream_direction_t dir,
        const char *dev,
        const pa_sample_spec *ss,
        const pa_channel_map *map,
        const pa_buffer_attr *attr,
        int *rerror)
{
    int r;
    pa_stream *stream;

    pa_threaded_mainloop_lock (g->mainloop);

    stream = pa_stream_new (g->context, name, ss, map);
    if (!stream) {
        goto fail;
    }

    pa_stream_set_state_callback (stream, stream_state_cb, g);
    pa_stream_set_read_callback (stream, stream_request_cb, g);
    pa_stream_set_write_callback (stream, stream_request_cb, g);

    if (dir == PA_STREAM_PLAYBACK) {
        r = pa_stream_connect_playback (stream, dev, attr,
                                        PA_STREAM_INTERPOLATE_TIMING
#ifdef PA_STREAM_ADJUST_LATENCY
                                        |PA_STREAM_ADJUST_LATENCY
#endif
                                        |PA_STREAM_AUTO_TIMING_UPDATE, NULL, NULL);
    } else {
        r = pa_stream_connect_record (stream, dev, attr,
                                      PA_STREAM_INTERPOLATE_TIMING
#ifdef PA_STREAM_ADJUST_LATENCY
                                      |PA_STREAM_ADJUST_LATENCY
#endif
                                      |PA_STREAM_AUTO_TIMING_UPDATE);
    }

    if (r < 0) {
      goto fail;
    }

    pa_threaded_mainloop_unlock (g->mainloop);

    return stream;

fail:
    pa_threaded_mainloop_unlock (g->mainloop);

    if (stream) {
        pa_stream_unref (stream);
    }

    *rerror = pa_context_errno (g->context);

    return NULL;
}
Exemple #9
0
/*
 * start recording
 */
static int_fast32_t pulse_start_recording(struct pulse_data *data)
{
	if (pulse_get_server_info(pulse_server_info, (void *) data) < 0) {
		blog(LOG_ERROR, "pulse-input: Unable to get server info !");
		return -1;
	}

	pa_sample_spec spec;
	spec.format   = data->format;
	spec.rate     = data->samples_per_sec;
	spec.channels = data->channels;

	if (!pa_sample_spec_valid(&spec)) {
		blog(LOG_ERROR, "pulse-input: Sample spec is not valid");
		return -1;
	}

	data->bytes_per_frame = pa_frame_size(&spec);
	blog(LOG_DEBUG, "pulse-input: %u bytes per frame",
	     (unsigned int) data->bytes_per_frame);

	data->stream = pulse_stream_new(obs_source_getname(data->source),
		&spec, NULL);
	if (!data->stream) {
		blog(LOG_ERROR, "pulse-input: Unable to create stream");
		return -1;
	}

	pulse_lock();
	pa_stream_set_read_callback(data->stream, pulse_stream_read,
		(void *) data);
	pulse_unlock();

	pa_buffer_attr attr;
	attr.fragsize  = get_buffer_size(data, 250);
	attr.maxlength = (uint32_t) -1;
	attr.minreq    = (uint32_t) -1;
	attr.prebuf    = (uint32_t) -1;
	attr.tlength   = (uint32_t) -1;

	pa_stream_flags_t flags =
		PA_STREAM_INTERPOLATE_TIMING
		| PA_STREAM_AUTO_TIMING_UPDATE
		| PA_STREAM_ADJUST_LATENCY;

	pulse_lock();
	int_fast32_t ret = pa_stream_connect_record(data->stream, data->device,
		&attr, flags);
	pulse_unlock();
	if (ret < 0) {
		blog(LOG_ERROR, "pulse-input: Unable to connect to stream");
		return -1;
	}

	blog(LOG_DEBUG, "pulse-input: Recording started");
	return 0;
}
Exemple #10
0
static int m_pa_stream_connect(pa_context *pa_ctx)
{
    if (pa_context_get_server_protocol_version (pa_ctx) < 13) {
            return -1;
    }
    printf("server version: %d\n", pa_context_get_server_protocol_version(pa_ctx));
    if (s) {
        pa_stream_disconnect(s);
        pa_stream_unref(s);
    }

    pa_proplist  *proplist;

    pa_buffer_attr attr;
    pa_sample_spec ss;

    int res;
    //char dev_name[40];

    // pa_sample_spec
    ss.channels = 1;
    ss.format = PA_SAMPLE_FLOAT32;
    ss.rate = 25;

    // pa_buffer_attr
    memset(&attr, 0, sizeof(attr));
    attr.fragsize = sizeof(float);
    attr.maxlength = (uint32_t) -1;

    // pa_proplist
    proplist = pa_proplist_new ();
    pa_proplist_sets (proplist, PA_PROP_APPLICATION_ID, "Deepin Sound Settings");

    // create new stream
    if (!(s = pa_stream_new_with_proplist(pa_ctx, "Deepin Sound Settings", &ss, NULL, proplist))) {
        fprintf(stderr, "pa_stream_new error\n");
        return -2;
    }
    pa_proplist_free(proplist);

    pa_stream_set_read_callback(s, on_monitor_read_callback, NULL);
    pa_stream_set_suspended_callback(s, on_monitor_suspended_callback, NULL);

    res = pa_stream_connect_record(s, NULL, &attr, 
                                   (pa_stream_flags_t) (PA_STREAM_DONT_MOVE
                                                        |PA_STREAM_PEAK_DETECT
                                                        |PA_STREAM_ADJUST_LATENCY));
    
    if (res < 0) {
        fprintf(stderr, "Failed to connect monitoring stream\n");
        return -3;
    }
    return 0;
}
Exemple #11
0
/*
 * Create a new pulse audio stream and connect to it
 *
 * Return a negative value on error
 */
static int pulse_connect_stream(struct pulse_data *data)
{
	pa_sample_spec spec;
	spec.format = data->format;
	spec.rate = data->samples_per_sec;
	spec.channels = get_audio_channels(data->speakers);

	if (!pa_sample_spec_valid(&spec)) {
		blog(LOG_ERROR, "pulse-input: Sample spec is not valid");
		return -1;
	}

	data->bytes_per_frame = pa_frame_size(&spec);
	blog(LOG_DEBUG, "pulse-input: %u bytes per frame",
	     (unsigned int) data->bytes_per_frame);

	pa_buffer_attr attr;
	attr.fragsize = get_buffer_size(data, 250);
	attr.maxlength = (uint32_t) -1;
	attr.minreq = (uint32_t) -1;
	attr.prebuf = (uint32_t) -1;
	attr.tlength = (uint32_t) -1;

	data->stream = pa_stream_new_with_proplist(data->context,
		obs_source_getname(data->source), &spec, NULL, data->props);
	if (!data->stream) {
		blog(LOG_ERROR, "pulse-input: Unable to create stream");
		return -1;
	}
	pa_stream_flags_t flags =
		PA_STREAM_INTERPOLATE_TIMING
		| PA_STREAM_AUTO_TIMING_UPDATE
		| PA_STREAM_ADJUST_LATENCY;
	if (pa_stream_connect_record(data->stream, NULL, &attr, flags) < 0) {
		blog(LOG_ERROR, "pulse-input: Unable to connect to stream");
		return -1;
	}

	for (;;) {
		pulse_iterate(data);
		pa_stream_state_t state = pa_stream_get_state(data->stream);
		if (state == PA_STREAM_READY) {
			blog(LOG_DEBUG, "pulse-input: Stream ready");
			break;
		}
		if (!PA_STREAM_IS_GOOD(state)) {
			blog(LOG_ERROR, "pulse-input: Stream connect failed");
			return -1;
		}
	}

	return 0;
}
void QPulseAudioThread::connectHelper (SourceContainer::const_iterator pos)
{
    Q_ASSERT(stream);
    pa_stream_flags_t flags = ( pa_stream_flags_t ) 0;
//	qDebug() << "start2 ";
    assert (pos != s_sourceList.end());
    qDebug() << "connectHelper: " << *pos;
    int r;
    if ( ( ( r = pa_stream_connect_record (stream, (*pos).toStdString().c_str(), NULL, flags ) ) ) < 0 ) {
        fprintf ( stderr, "pa_stream_connect_record() failed: %s\n", pa_strerror ( pa_context_errno ( context ) ) );
    }


}
Exemple #13
0
AudioStream::AudioStream(pa_context *c, pa_threaded_mainloop *m, const char *desc, int type, int smplrate, std::string& deviceName)
    : audiostream_(0), mainloop_(m)
{
    static const pa_channel_map channel_map = {
        1,
        { PA_CHANNEL_POSITION_MONO },
    };

    pa_sample_spec sample_spec = {
        PA_SAMPLE_S16LE, // PA_SAMPLE_FLOAT32LE,
        smplrate,
        1
    };

    assert(pa_sample_spec_valid(&sample_spec));
    assert(pa_channel_map_valid(&channel_map));

    audiostream_ = pa_stream_new(c, desc, &sample_spec, &channel_map);

    if (!audiostream_) {
        ERROR("%s: pa_stream_new() failed : %s" , desc, pa_strerror(pa_context_errno(c)));
        throw std::runtime_error("Could not create stream\n");
    }

    pa_buffer_attr attributes;
    attributes.maxlength = pa_usec_to_bytes(160 * PA_USEC_PER_MSEC, &sample_spec);
    attributes.tlength = pa_usec_to_bytes(80 * PA_USEC_PER_MSEC, &sample_spec);
    attributes.prebuf = 0;
    attributes.fragsize = pa_usec_to_bytes(80 * PA_USEC_PER_MSEC, &sample_spec);
    attributes.minreq = (uint32_t) -1;

    pa_threaded_mainloop_lock(mainloop_);

    if (type == PLAYBACK_STREAM || type == RINGTONE_STREAM)
        pa_stream_connect_playback(audiostream_, deviceName == "" ? NULL : deviceName.c_str(), &attributes,
		(pa_stream_flags_t)(PA_STREAM_ADJUST_LATENCY|PA_STREAM_AUTO_TIMING_UPDATE), NULL, NULL);
    else if (type == CAPTURE_STREAM)
        pa_stream_connect_record(audiostream_, deviceName == "" ? NULL : deviceName.c_str(), &attributes,
		(pa_stream_flags_t)(PA_STREAM_ADJUST_LATENCY|PA_STREAM_AUTO_TIMING_UPDATE));

    pa_threaded_mainloop_unlock(mainloop_);

    pa_stream_set_state_callback(audiostream_, stream_state_callback, NULL);
}
Exemple #14
0
static pa_stream *connect_record_stream(const char *device_name,
    pa_threaded_mainloop *loop, pa_context *context,
    pa_stream_flags_t flags, pa_buffer_attr *attr, pa_sample_spec *spec,
    pa_channel_map *chanmap)
{
    pa_stream_state_t state;
    pa_stream *stream;

    stream = pa_stream_new_with_proplist(context, "Capture Stream", spec, chanmap, prop_filter);
    if(!stream)
    {
        ERR("pa_stream_new_with_proplist() failed: %s\n", pa_strerror(pa_context_errno(context)));
        return NULL;
    }

    pa_stream_set_state_callback(stream, stream_state_callback, loop);

    if(pa_stream_connect_record(stream, device_name, attr, flags) < 0)
    {
        ERR("Stream did not connect: %s\n", pa_strerror(pa_context_errno(context)));
        pa_stream_unref(stream);
        return NULL;
    }

    while((state=pa_stream_get_state(stream)) != PA_STREAM_READY)
    {
        if(!PA_STREAM_IS_GOOD(state))
        {
            ERR("Stream did not get ready: %s\n", pa_strerror(pa_context_errno(context)));
            pa_stream_unref(stream);
            return NULL;
        }

        pa_threaded_mainloop_wait(loop);
    }
    pa_stream_set_state_callback(stream, NULL, NULL);

    return stream;
}
JNIEXPORT jint JNICALL
Java_org_jitsi_impl_neomedia_pulseaudio_PA_stream_1connect_1record
    (JNIEnv *env, jclass clazz, jlong s, jstring dev, jlong attr, jint flags)
{
    const char *devChars
        = dev ? (*env)->GetStringUTFChars(env, dev, NULL) : NULL;
    jint ret;

    if ((*env)->ExceptionCheck(env))
        ret = -1;
    else
    {
        ret
            = pa_stream_connect_record(
                    (pa_stream *) (intptr_t) s,
                    devChars,
                    (const pa_buffer_attr *) (intptr_t) attr,
                    (pa_stream_flags_t) flags);
        (*env)->ReleaseStringUTFChars(env, dev, devChars);
    }
    return ret;
}
Exemple #16
0
static void __context_get_sink_info_callback(pa_context* context,
        const pa_sink_info* info, int is_last, void* data) {

    guac_client* client = (guac_client*) data;
    pa_stream* stream;
    pa_sample_spec spec;
    pa_buffer_attr attr;

    /* Stop if end of list reached */
    if (is_last)
        return;

    guac_client_log_info(client, "Starting streaming from \"%s\"",
            info->description);

    /* Set format */
    spec.format   = PA_SAMPLE_S16LE;
    spec.rate     = GUAC_VNC_AUDIO_RATE;
    spec.channels = GUAC_VNC_AUDIO_CHANNELS;

    attr.maxlength = -1;
    attr.fragsize  = GUAC_VNC_AUDIO_FRAGMENT_SIZE;

    /* Create stream */
    stream = pa_stream_new(context, "Guacamole Audio", &spec, NULL);

    /* Set stream callbacks */
    pa_stream_set_state_callback(stream, __stream_state_callback, client);
    pa_stream_set_read_callback(stream, __stream_read_callback, client);

    /* Start stream */
    pa_stream_connect_record(stream, info->monitor_source_name, &attr,
                PA_STREAM_DONT_INHIBIT_AUTO_SUSPEND
              | PA_STREAM_ADJUST_LATENCY);

}
Exemple #17
0
static void audin_pulse_open(IAudinDevice* device, AudinReceive receive, void* user_data)
{
    pa_stream_state_t state;
    pa_buffer_attr buffer_attr = { 0 };
    AudinPulseDevice* pulse = (AudinPulseDevice*) device;

    if (!pulse->context)
        return;
    if (!pulse->sample_spec.rate || pulse->stream)
        return;

    DEBUG_DVC("");

    pulse->receive = receive;
    pulse->user_data = user_data;

    pa_threaded_mainloop_lock(pulse->mainloop);
    pulse->stream = pa_stream_new(pulse->context, "freerdp_audin",
                                  &pulse->sample_spec, NULL);
    if (!pulse->stream)
    {
        pa_threaded_mainloop_unlock(pulse->mainloop);
        DEBUG_DVC("pa_stream_new failed (%d)",
                  pa_context_errno(pulse->context));
        return;
    }
    pulse->bytes_per_frame = pa_frame_size(&pulse->sample_spec);
    pa_stream_set_state_callback(pulse->stream,
                                 audin_pulse_stream_state_callback, pulse);
    pa_stream_set_read_callback(pulse->stream,
                                audin_pulse_stream_request_callback, pulse);
    buffer_attr.maxlength = (uint32_t) -1;
    buffer_attr.tlength = (uint32_t) -1;
    buffer_attr.prebuf = (uint32_t) -1;
    buffer_attr.minreq = (uint32_t) -1;
    /* 500ms latency */
    buffer_attr.fragsize = pa_usec_to_bytes(500000, &pulse->sample_spec);
    if (pa_stream_connect_record(pulse->stream,
                                 pulse->device_name[0] ? pulse->device_name : NULL,
                                 &buffer_attr, PA_STREAM_ADJUST_LATENCY) < 0)
    {
        pa_threaded_mainloop_unlock(pulse->mainloop);
        DEBUG_WARN("pa_stream_connect_playback failed (%d)",
                   pa_context_errno(pulse->context));
        return;
    }

    for (;;)
    {
        state = pa_stream_get_state(pulse->stream);
        if (state == PA_STREAM_READY)
            break;
        if (!PA_STREAM_IS_GOOD(state))
        {
            DEBUG_WARN("bad stream state (%d)",
                       pa_context_errno(pulse->context));
            break;
        }
        pa_threaded_mainloop_wait(pulse->mainloop);
    }
    pa_threaded_mainloop_unlock(pulse->mainloop);
    if (state == PA_STREAM_READY)
    {
        memset(&pulse->adpcm, 0, sizeof(ADPCM));
        pulse->buffer = xzalloc(pulse->bytes_per_frame * pulse->frames_per_packet);
        pulse->buffer_frames = 0;
        DEBUG_DVC("connected");
    }
    else
    {
        audin_pulse_close(device);
    }
}
Exemple #18
0
void PulseAudioSystem::eventCallback(pa_mainloop_api *api, pa_defer_event *) {
	api->defer_enable(pade, false);

	if (! bSourceDone || ! bSinkDone || ! bServerDone)
		return;

	AudioInputPtr ai = g.ai;
	AudioOutputPtr ao = g.ao;
	AudioInput *raw_ai = ai.get();
	AudioOutput *raw_ao = ao.get();
	PulseAudioInput *pai = dynamic_cast<PulseAudioInput *>(raw_ai);
	PulseAudioOutput *pao = dynamic_cast<PulseAudioOutput *>(raw_ao);

	if (raw_ao) {
		QString odev = outputDevice();
		pa_stream_state ost = pasOutput ? pa_stream_get_state(pasOutput) : PA_STREAM_TERMINATED;
		bool do_stop = false;
		bool do_start = false;

		if (! pao && (ost == PA_STREAM_READY)) {
			do_stop = true;
		} else if (pao) {
			switch (ost) {
				case PA_STREAM_TERMINATED: {
						if (pasOutput)
							pa_stream_unref(pasOutput);

						pa_sample_spec pss = qhSpecMap.value(odev);
						pa_channel_map pcm = qhChanMap.value(odev);
						if ((pss.format != PA_SAMPLE_FLOAT32NE) && (pss.format != PA_SAMPLE_S16NE))
							pss.format = PA_SAMPLE_FLOAT32NE;
						if (pss.rate == 0)
							pss.rate = SAMPLE_RATE;
						if ((pss.channels == 0) || (! g.s.doPositionalAudio()))
							pss.channels = 1;

						pasOutput = pa_stream_new(pacContext, mumble_sink_input, &pss, (pss.channels == 1) ? NULL : &pcm);
						pa_stream_set_state_callback(pasOutput, stream_callback, this);
						pa_stream_set_write_callback(pasOutput, write_callback, this);
					}
				case PA_STREAM_UNCONNECTED:
					do_start = true;
					break;
				case PA_STREAM_READY: {
						if (g.s.iOutputDelay != iDelayCache) {
							do_stop = true;
						} else if (g.s.doPositionalAudio() != bPositionalCache) {
							do_stop = true;
						} else if (odev != qsOutputCache) {
							do_stop = true;
						}
						break;
					}
				default:
					break;
			}
		}
		if (do_stop) {
			qWarning("PulseAudio: Stopping output");
			pa_stream_disconnect(pasOutput);
			iSinkId = -1;
		} else if (do_start) {
			qWarning("PulseAudio: Starting output: %s", qPrintable(odev));
			pa_buffer_attr buff;
			const pa_sample_spec *pss = pa_stream_get_sample_spec(pasOutput);
			const size_t sampleSize = (pss->format == PA_SAMPLE_FLOAT32NE) ? sizeof(float) : sizeof(short);
			const unsigned int iBlockLen = ((pao->iFrameSize * pss->rate) / SAMPLE_RATE) * pss->channels * static_cast<unsigned int>(sampleSize);
			buff.tlength = iBlockLen * (g.s.iOutputDelay+1);
			buff.minreq = iBlockLen;
			buff.maxlength = -1;
			buff.prebuf = -1;
			buff.fragsize = iBlockLen;

			iDelayCache = g.s.iOutputDelay;
			bPositionalCache = g.s.doPositionalAudio();
			qsOutputCache = odev;

			pa_stream_connect_playback(pasOutput, qPrintable(odev), &buff, PA_STREAM_ADJUST_LATENCY, NULL, NULL);
			pa_context_get_sink_info_by_name(pacContext, qPrintable(odev), sink_info_callback, this);
		}
	}

	if (raw_ai) {
		QString idev = inputDevice();
		pa_stream_state ist = pasInput ? pa_stream_get_state(pasInput) : PA_STREAM_TERMINATED;
		bool do_stop = false;
		bool do_start = false;

		if (! pai && (ist == PA_STREAM_READY)) {
			do_stop = true;
		} else if (pai) {
			switch (ist) {
				case PA_STREAM_TERMINATED: {
						if (pasInput)
							pa_stream_unref(pasInput);

						pa_sample_spec pss = qhSpecMap.value(idev);
						if ((pss.format != PA_SAMPLE_FLOAT32NE) && (pss.format != PA_SAMPLE_S16NE))
							pss.format = PA_SAMPLE_FLOAT32NE;
						if (pss.rate == 0)
							pss.rate = SAMPLE_RATE;
						pss.channels = 1;

						pasInput = pa_stream_new(pacContext, "Microphone", &pss, NULL);
						pa_stream_set_state_callback(pasInput, stream_callback, this);
						pa_stream_set_read_callback(pasInput, read_callback, this);
					}
				case PA_STREAM_UNCONNECTED:
					do_start = true;
					break;
				case PA_STREAM_READY: {
						if (idev != qsInputCache) {
							do_stop = true;
						}
						break;
					}
				default:
					break;
			}
		}
		if (do_stop) {
			qWarning("PulseAudio: Stopping input");
			pa_stream_disconnect(pasInput);
		} else if (do_start) {
			qWarning("PulseAudio: Starting input %s",qPrintable(idev));
			pa_buffer_attr buff;
			const pa_sample_spec *pss = pa_stream_get_sample_spec(pasInput);
			const size_t sampleSize = (pss->format == PA_SAMPLE_FLOAT32NE) ? sizeof(float) : sizeof(short);
			const unsigned int iBlockLen = ((pai->iFrameSize * pss->rate) / SAMPLE_RATE) * pss->channels * static_cast<unsigned int>(sampleSize);
			buff.tlength = iBlockLen;
			buff.minreq = iBlockLen;
			buff.maxlength = -1;
			buff.prebuf = -1;
			buff.fragsize = iBlockLen;

			qsInputCache = idev;

			pa_stream_connect_record(pasInput, qPrintable(idev), &buff, PA_STREAM_ADJUST_LATENCY);
		}
	}

	if (raw_ai) {
		QString odev = outputDevice();
		QString edev = qhEchoMap.value(odev);
		pa_stream_state est = pasSpeaker ? pa_stream_get_state(pasSpeaker) : PA_STREAM_TERMINATED;
		bool do_stop = false;
		bool do_start = false;

		if ((! pai || ! g.s.doEcho()) && (est == PA_STREAM_READY)) {
			do_stop = true;
		} else if (pai && g.s.doEcho()) {
			switch (est) {
				case PA_STREAM_TERMINATED: {
						if (pasSpeaker)
							pa_stream_unref(pasSpeaker);

						pa_sample_spec pss = qhSpecMap.value(edev);
						pa_channel_map pcm = qhChanMap.value(edev);
						if ((pss.format != PA_SAMPLE_FLOAT32NE) && (pss.format != PA_SAMPLE_S16NE))
							pss.format = PA_SAMPLE_FLOAT32NE;
						if (pss.rate == 0)
							pss.rate = SAMPLE_RATE;
						if ((pss.channels == 0) || (! g.s.bEchoMulti))
							pss.channels = 1;

						pasSpeaker = pa_stream_new(pacContext, mumble_echo, &pss, (pss.channels == 1) ? NULL : &pcm);
						pa_stream_set_state_callback(pasSpeaker, stream_callback, this);
						pa_stream_set_read_callback(pasSpeaker, read_callback, this);
					}
				case PA_STREAM_UNCONNECTED:
					do_start = true;
					break;
				case PA_STREAM_READY: {
						if (g.s.bEchoMulti != bEchoMultiCache) {
							do_stop = true;
						} else if (edev != qsEchoCache) {
							do_stop = true;
						}
						break;
					}
				default:
					break;
			}
		}
		if (do_stop) {
			qWarning("PulseAudio: Stopping echo");
			pa_stream_disconnect(pasSpeaker);
		} else if (do_start) {
			qWarning("PulseAudio: Starting echo: %s", qPrintable(edev));
			pa_buffer_attr buff;
			const pa_sample_spec *pss = pa_stream_get_sample_spec(pasSpeaker);
			const size_t sampleSize = (pss->format == PA_SAMPLE_FLOAT32NE) ? sizeof(float) : sizeof(short);
			const unsigned int iBlockLen = ((pai->iFrameSize * pss->rate) / SAMPLE_RATE) * pss->channels * static_cast<unsigned int>(sampleSize);
			buff.tlength = iBlockLen;
			buff.minreq = iBlockLen;
			buff.maxlength = -1;
			buff.prebuf = -1;
			buff.fragsize = iBlockLen;

			bEchoMultiCache = g.s.bEchoMulti;
			qsEchoCache = edev;

			pa_stream_connect_record(pasSpeaker, qPrintable(edev), &buff, PA_STREAM_ADJUST_LATENCY);
		}
	}
}
Exemple #19
0
/* This is called whenever the context status changes */
static void context_state_callback(pa_context *c, void *userdata) {
    pa_assert(c);

    switch (pa_context_get_state(c)) {
        case PA_CONTEXT_CONNECTING:
        case PA_CONTEXT_AUTHORIZING:
        case PA_CONTEXT_SETTING_NAME:
            break;

        case PA_CONTEXT_READY: {
            pa_buffer_attr buffer_attr;

            pa_assert(c);
            pa_assert(!stream);

            if (verbose)
                pa_log(_("Connection established.%s"), CLEAR_LINE);

            if (!(stream = pa_stream_new_with_proplist(c, NULL, &sample_spec, &channel_map, proplist))) {
                pa_log(_("pa_stream_new() failed: %s"), pa_strerror(pa_context_errno(c)));
                goto fail;
            }

            pa_stream_set_state_callback(stream, stream_state_callback, NULL);
            pa_stream_set_write_callback(stream, stream_write_callback, NULL);
            pa_stream_set_read_callback(stream, stream_read_callback, NULL);
            pa_stream_set_suspended_callback(stream, stream_suspended_callback, NULL);
            pa_stream_set_moved_callback(stream, stream_moved_callback, NULL);
            pa_stream_set_underflow_callback(stream, stream_underflow_callback, NULL);
            pa_stream_set_overflow_callback(stream, stream_overflow_callback, NULL);
            pa_stream_set_started_callback(stream, stream_started_callback, NULL);
            pa_stream_set_event_callback(stream, stream_event_callback, NULL);
            pa_stream_set_buffer_attr_callback(stream, stream_buffer_attr_callback, NULL);

            pa_zero(buffer_attr);
            buffer_attr.maxlength = (uint32_t) -1;
            buffer_attr.prebuf = (uint32_t) -1;

            if (latency_msec > 0) {
                buffer_attr.fragsize = buffer_attr.tlength = pa_usec_to_bytes(latency_msec * PA_USEC_PER_MSEC, &sample_spec);
                flags |= PA_STREAM_ADJUST_LATENCY;
            } else if (latency > 0) {
                buffer_attr.fragsize = buffer_attr.tlength = (uint32_t) latency;
                flags |= PA_STREAM_ADJUST_LATENCY;
            } else
                buffer_attr.fragsize = buffer_attr.tlength = (uint32_t) -1;

            if (process_time_msec > 0) {
                buffer_attr.minreq = pa_usec_to_bytes(process_time_msec * PA_USEC_PER_MSEC, &sample_spec);
            } else if (process_time > 0)
                buffer_attr.minreq = (uint32_t) process_time;
            else
                buffer_attr.minreq = (uint32_t) -1;

            if (mode == PLAYBACK) {
                pa_cvolume cv;
                if (pa_stream_connect_playback(stream, device, &buffer_attr, flags, volume_is_set ? pa_cvolume_set(&cv, sample_spec.channels, volume) : NULL, NULL) < 0) {
                    pa_log(_("pa_stream_connect_playback() failed: %s"), pa_strerror(pa_context_errno(c)));
                    goto fail;
                }

            } else {
                if (pa_stream_connect_record(stream, device, latency > 0 ? &buffer_attr : NULL, flags) < 0) {
                    pa_log(_("pa_stream_connect_record() failed: %s"), pa_strerror(pa_context_errno(c)));
                    goto fail;
                }
            }

            break;
        }

        case PA_CONTEXT_TERMINATED:
            quit(0);
            break;

        case PA_CONTEXT_FAILED:
        default:
            pa_log(_("Connection failure: %s"), pa_strerror(pa_context_errno(c)));
            goto fail;
    }

    return;

fail:
    quit(1);

}
Exemple #20
0
/*
 * Iterate the main loop while recording is on.
 * This function runs under it's own thread called by audio_pulse_start
 * args:
 *   data - pointer to user data (audio context)
 *
 * asserts:
 *   data is not null
 *
 * returns: pointer to error code
 */
static void *pulse_read_audio(void *data)
{
    audio_context_t *audio_ctx = (audio_context_t *) data;
	/*assertions*/
	assert(audio_ctx != NULL);

    if(verbosity > 0)
		printf("AUDIO: (pulseaudio) read thread started\n");
    pa_mainloop *pa_ml;
    pa_mainloop_api *pa_mlapi;
    pa_buffer_attr bufattr;
    pa_sample_spec ss;
    pa_stream_flags_t flags = 0;
    int r;
    int pa_ready = 0;

    /* Create a mainloop API and connection to the default server */
    pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api(pa_ml);
    pa_ctx = pa_context_new(pa_mlapi, "guvcview Pulse API");

    if(pa_context_connect(pa_ctx, NULL, 0, NULL) < 0)
    {
		fprintf(stderr,"AUDIO: PULSE - unable to connect to server: pa_context_connect failed\n");
		finish(pa_ctx, pa_ml);
		return ((void *) -1);
	}

    /*
	 * This function defines a callback so the server will tell us it's state.
     * Our callback will wait for the state to be ready.  The callback will
     * modify the variable to 1 so we know when we have a connection and it's
     * ready.
     * If there's an error, the callback will set pa_ready to 2
	 */
    pa_context_set_state_callback(pa_ctx, pa_state_cb, &pa_ready);

    /*
     * This function defines a time event callback (called every TIME_EVENT_USEC)
     */
    //pa_context_rttime_new(pa_ctx, pa_rtclock_now() + TIME_EVENT_USEC, time_event_callback, NULL);

    /*
	 * We can't do anything until PA is ready, so just iterate the mainloop
     * and continue
	 */
    while (pa_ready == 0)
    {
        pa_mainloop_iterate(pa_ml, 1, NULL);
    }
    if (pa_ready == 2)
    {
        finish(pa_ctx, pa_ml);
        return ((void *) -1);
    }

	/* set the sample spec (frame rate, channels and format) */
    ss.rate = audio_ctx->samprate;
    ss.channels = audio_ctx->channels;
    ss.format = PA_SAMPLE_FLOAT32LE; /*for PCM -> PA_SAMPLE_S16LE*/

    recordstream = pa_stream_new(pa_ctx, "Record", &ss, NULL);
    if (!recordstream)
        fprintf(stderr, "AUDIO: (pulseaudio) pa_stream_new failed (chan:%d rate:%d)\n", 
			ss.channels, ss.rate);

    /* define the callbacks */
    pa_stream_set_read_callback(recordstream, stream_request_cb, (void *) audio_ctx);

	// Set properties of the record buffer
    pa_zero(bufattr);
    /* optimal value for all is (uint32_t)-1   ~= 2 sec */
    bufattr.maxlength = (uint32_t) -1;
    bufattr.prebuf = (uint32_t) -1;
    bufattr.minreq = (uint32_t) -1;

    if (audio_ctx->latency > 0)
    {
      bufattr.fragsize = bufattr.tlength = pa_usec_to_bytes((audio_ctx->latency * 1000) * PA_USEC_PER_MSEC, &ss);
      flags |= PA_STREAM_ADJUST_LATENCY;
    }
    else
      bufattr.fragsize = bufattr.tlength = (uint32_t) -1;

	flags |= PA_STREAM_INTERPOLATE_TIMING;
    flags |= PA_STREAM_AUTO_TIMING_UPDATE;

    char * dev = audio_ctx->list_devices[audio_ctx->device].name;
    if(verbosity > 0)
		printf("AUDIO: (pulseaudio) connecting to device %s\n\t (channels %d rate %d)\n",
			dev, ss.channels, ss.rate);
    r = pa_stream_connect_record(recordstream, dev, &bufattr, flags);
    if (r < 0)
    {
        fprintf(stderr, "AUDIO: (pulseaudio) skip latency adjustment\n");
        /*
         * Old pulse audio servers don't like the ADJUST_LATENCY flag,
		 * so retry without that
		 */
        r = pa_stream_connect_record(recordstream, dev, &bufattr,
                                     PA_STREAM_INTERPOLATE_TIMING|
                                     PA_STREAM_AUTO_TIMING_UPDATE);
    }
    if (r < 0)
    {
        fprintf(stderr, "AUDIO: (pulseaudio) pa_stream_connect_record failed\n");
        finish(pa_ctx, pa_ml);
        return ((void *) -1);
    }

    get_latency(recordstream);

    /*
     * Iterate the main loop while streaming.  The second argument is whether
     * or not the iteration should block until something is ready to be
     * done.  Set it to zero for non-blocking.
     */
    while (audio_ctx->stream_flag == AUDIO_STRM_ON)
    {
        pa_mainloop_iterate(pa_ml, 1, NULL);
    }

	if(verbosity > 0)
		printf("AUDIO: (pulseaudio) stream terminated(%i)\n", audio_ctx->stream_flag);

    pa_stream_disconnect (recordstream);
    pa_stream_unref (recordstream);
    finish(pa_ctx, pa_ml);
    return ((void *) 0);
}
Exemple #21
0
static void conn_state(pa_context *context, void *arg)
{
	int err;
	struct pulse_conn_t *conn = arg;
	pa_context_state_t state = pa_context_get_state(context);

	switch(state) {
	case PA_CONTEXT_UNCONNECTED:
	case PA_CONTEXT_CONNECTING: break;
	case PA_CONTEXT_AUTHORIZING: break;
	case PA_CONTEXT_SETTING_NAME: break;

	case PA_CONTEXT_FAILED:
	case PA_CONTEXT_TERMINATED:
		break;

	case PA_CONTEXT_READY:
		{
			pa_sample_spec spec;
			pa_buffer_attr attr;

			spec.rate = conn->conf.rate;
			spec.format = PA_SAMPLE_FLOAT32NE;

			if(conn->conf.in > 0) {
				spec.channels = conn->conf.in;
				conn->record = pa_stream_new(conn->context, "Record", &spec, NULL);
				pa_stream_set_read_callback(conn->record, conn_record, conn);
				pa_stream_set_overflow_callback(conn->record, conn_overflow, conn);
			}
			else
				conn->record = NULL;

			if(conn->conf.out > 0) {
				spec.channels = conn->conf.out;
				conn->playback = pa_stream_new(conn->context, "Playback", &spec, NULL);
				pa_stream_set_write_callback(conn->playback, conn_playback, conn);
				pa_stream_set_underflow_callback(conn->playback, conn_underflow, conn);
			}
			else
				conn->playback = NULL;

			attr.fragsize = sizeof(float) * conn->lat;
			attr.maxlength = (uint32_t)-1;
			attr.maxlength = sizeof(float) * conn->lat;
			attr.minreq = 0;
			attr.prebuf = 0;
			attr.tlength = sizeof(float) * conn->lat;

			if(conn->record != NULL) {
				err = pa_stream_connect_record(conn->record, NULL, &attr, PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_ADJUST_LATENCY | PA_STREAM_AUTO_TIMING_UPDATE);
				if(err < 0)
					fprintf(stderr, "Failed to connect to recorder."), exit(1);
			}

			if(conn->playback != NULL) {
				err = pa_stream_connect_playback(conn->playback, NULL, &attr, PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_ADJUST_LATENCY | PA_STREAM_AUTO_TIMING_UPDATE, NULL, NULL);
				if(err < 0)
					fprintf(stderr, "Failed to connect to playback."), exit(1);
			}
		}

		break;
	}
}
Exemple #22
0
krad_pulse_t *kradpulse_create(krad_audio_t *kradaudio) {


    krad_pulse_t *kradpulse;

    if ((kradpulse = calloc (1, sizeof (krad_pulse_t))) == NULL) {
        fprintf(stderr, "mem alloc fail\n");
        exit (1);
    }

    kradpulse->kradaudio = kradaudio;


    kradpulse->samples[0] = malloc(24 * 8192);
    kradpulse->samples[1] = malloc(24 * 8192);
    kradpulse->interleaved_samples = malloc(48 * 8192);

    kradpulse->capture_samples[0] = malloc(24 * 8192);
    kradpulse->capture_samples[1] = malloc(24 * 8192);
    kradpulse->capture_interleaved_samples = malloc(48 * 8192);

    kradpulse->latency = 20000; // start latency in micro seconds
    kradpulse->underflows = 0;
    kradpulse->pa_ready = 0;
    kradpulse->retval = 0;

    // Create a mainloop API and connection to the default server
    kradpulse->pa_ml = pa_mainloop_new();
    kradpulse->pa_mlapi = pa_mainloop_get_api(kradpulse->pa_ml);
    kradpulse->pa_ctx = pa_context_new(kradpulse->pa_mlapi, kradpulse->kradaudio->name);

    pa_context_connect(kradpulse->pa_ctx, NULL, 0, NULL);

    // This function defines a callback so the server will tell us it's state.
    // Our callback will wait for the state to be ready.  The callback will
    // modify the variable to 1 so we know when we have a connection and it's
    // ready.
    // If there's an error, the callback will set pa_ready to 2
    pa_context_set_state_callback(kradpulse->pa_ctx, kradpulse_state_cb, kradpulse);

    // We can't do anything until PA is ready, so just iterate the mainloop
    // and continue
    while (kradpulse->pa_ready == 0) {
        pa_mainloop_iterate(kradpulse->pa_ml, 1, NULL);
    }

    if (kradpulse->pa_ready == 2) {
        kradpulse->retval = -1;
        printf("pulseaudio fail\n");
        exit(1);
    }

    kradpulse->ss.rate = 44100;
    kradpulse->ss.channels = 2;
    kradpulse->ss.format = PA_SAMPLE_FLOAT32LE;

    kradpulse->bufattr.fragsize = (uint32_t)-1;
    kradpulse->bufattr.maxlength = pa_usec_to_bytes(kradpulse->latency, &kradpulse->ss);
    kradpulse->bufattr.minreq = pa_usec_to_bytes(0, &kradpulse->ss);
    kradpulse->bufattr.prebuf = (uint32_t)-1;
    kradpulse->bufattr.tlength = pa_usec_to_bytes(kradpulse->latency, &kradpulse->ss);

    if ((kradaudio->direction == KOUTPUT) || (kradaudio->direction == KDUPLEX)) {
        kradpulse->playstream = pa_stream_new(kradpulse->pa_ctx, "Playback", &kradpulse->ss, NULL);

        if (!kradpulse->playstream) {
            printf("playback pa_stream_new failed\n");
            exit(1);
        }

        pa_stream_set_write_callback(kradpulse->playstream, kradpulse_playback_cb, kradpulse);
        pa_stream_set_underflow_callback(kradpulse->playstream, kradpulse_stream_underflow_cb, kradpulse);

        kradpulse->r = pa_stream_connect_playback(kradpulse->playstream, NULL, &kradpulse->bufattr,
                       PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_ADJUST_LATENCY | PA_STREAM_AUTO_TIMING_UPDATE, NULL, NULL);

        if (kradpulse->r < 0) {
            printf("pa_stream_connect_playback failed\n");
            kradpulse->retval = -1;
            printf("pulseaudio fail\n");
            exit(1);
        }

    }

    if ((kradaudio->direction == KINPUT) || (kradaudio->direction == KDUPLEX)) {
        kradpulse->capturestream = pa_stream_new(kradpulse->pa_ctx, "Capture", &kradpulse->ss, NULL);

        if (!kradpulse->capturestream) {
            printf("capture pa_stream_new failed\n");
            exit(1);
        }


        pa_stream_set_read_callback(kradpulse->capturestream, kradpulse_capture_cb, kradpulse);
        pa_stream_set_underflow_callback(kradpulse->capturestream, kradpulse_stream_underflow_cb, kradpulse);

        kradpulse->r = pa_stream_connect_record(kradpulse->capturestream, NULL, &kradpulse->bufattr, PA_STREAM_NOFLAGS );

        if (kradpulse->r < 0) {
            printf("pa_stream_connect_capture failed\n");
            kradpulse->retval = -1;
            printf("pulseaudio fail\n");
            exit(1);
        }

    }


    kradaudio->sample_rate = kradpulse->ss.rate;


    pthread_create( &kradpulse->loop_thread, NULL, kradpulse_loop_thread, kradpulse);

    return kradpulse;

}
Exemple #23
0
static gboolean
gst_pulsesrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
{
  pa_buffer_attr wanted;
  const pa_buffer_attr *actual;
  GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
  pa_stream_flags_t flags;
  pa_operation *o;
  GstAudioClock *clock;

  pa_threaded_mainloop_lock (pulsesrc->mainloop);

  {
    GstAudioRingBufferSpec s = *spec;
    const pa_channel_map *m;

    m = pa_stream_get_channel_map (pulsesrc->stream);
    gst_pulse_channel_map_to_gst (m, &s);
    gst_audio_ring_buffer_set_channel_positions (GST_AUDIO_BASE_SRC
        (pulsesrc)->ringbuffer, s.info.position);
  }

  /* enable event notifications */
  GST_LOG_OBJECT (pulsesrc, "subscribing to context events");
  if (!(o = pa_context_subscribe (pulsesrc->context,
              PA_SUBSCRIPTION_MASK_SOURCE_OUTPUT, NULL, NULL))) {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
        ("pa_context_subscribe() failed: %s",
            pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
    goto unlock_and_fail;
  }

  pa_operation_unref (o);

  wanted.maxlength = -1;
  wanted.tlength = -1;
  wanted.prebuf = 0;
  wanted.minreq = -1;
  wanted.fragsize = spec->segsize;

  GST_INFO_OBJECT (pulsesrc, "maxlength: %d", wanted.maxlength);
  GST_INFO_OBJECT (pulsesrc, "tlength:   %d", wanted.tlength);
  GST_INFO_OBJECT (pulsesrc, "prebuf:    %d", wanted.prebuf);
  GST_INFO_OBJECT (pulsesrc, "minreq:    %d", wanted.minreq);
  GST_INFO_OBJECT (pulsesrc, "fragsize:  %d", wanted.fragsize);

  flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
      PA_STREAM_NOT_MONOTONIC | PA_STREAM_ADJUST_LATENCY |
      PA_STREAM_START_CORKED;

  if (pulsesrc->mute_set && pulsesrc->mute)
    flags |= PA_STREAM_START_MUTED;

  if (pa_stream_connect_record (pulsesrc->stream, pulsesrc->device, &wanted,
          flags) < 0) {
    goto connect_failed;
  }

  /* our clock will now start from 0 again */
  clock = GST_AUDIO_CLOCK (GST_AUDIO_BASE_SRC (pulsesrc)->clock);
  gst_audio_clock_reset (clock, 0);

  pulsesrc->corked = TRUE;

  for (;;) {
    pa_stream_state_t state;

    state = pa_stream_get_state (pulsesrc->stream);

    if (!PA_STREAM_IS_GOOD (state))
      goto stream_is_bad;

    if (state == PA_STREAM_READY)
      break;

    /* Wait until the stream is ready */
    pa_threaded_mainloop_wait (pulsesrc->mainloop);
  }
  pulsesrc->stream_connected = TRUE;

  /* store the source output index so it can be accessed via a property */
  pulsesrc->source_output_idx = pa_stream_get_index (pulsesrc->stream);
  g_object_notify (G_OBJECT (pulsesrc), "source-output-index");

  if (pulsesrc->volume_set) {
    gst_pulsesrc_set_stream_volume (pulsesrc, pulsesrc->volume);
    pulsesrc->volume_set = FALSE;
  }

  /* get the actual buffering properties now */
  actual = pa_stream_get_buffer_attr (pulsesrc->stream);

  GST_INFO_OBJECT (pulsesrc, "maxlength: %d", actual->maxlength);
  GST_INFO_OBJECT (pulsesrc, "tlength:   %d (wanted: %d)",
      actual->tlength, wanted.tlength);
  GST_INFO_OBJECT (pulsesrc, "prebuf:    %d", actual->prebuf);
  GST_INFO_OBJECT (pulsesrc, "minreq:    %d (wanted %d)", actual->minreq,
      wanted.minreq);
  GST_INFO_OBJECT (pulsesrc, "fragsize:  %d (wanted %d)",
      actual->fragsize, wanted.fragsize);

  if (actual->fragsize >= wanted.fragsize) {
    spec->segsize = actual->fragsize;
  } else {
    spec->segsize = actual->fragsize * (wanted.fragsize / actual->fragsize);
  }
  spec->segtotal = actual->maxlength / spec->segsize;

  if (!pulsesrc->paused) {
    GST_DEBUG_OBJECT (pulsesrc, "uncorking because we are playing");
    gst_pulsesrc_set_corked (pulsesrc, FALSE, FALSE);
  }
  pa_threaded_mainloop_unlock (pulsesrc->mainloop);

  return TRUE;

  /* ERRORS */
connect_failed:
  {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
        ("Failed to connect stream: %s",
            pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
    goto unlock_and_fail;
  }
stream_is_bad:
  {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
        ("Failed to connect stream: %s",
            pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
    goto unlock_and_fail;
  }
unlock_and_fail:
  {
    gst_pulsesrc_destroy_stream (pulsesrc);

    pa_threaded_mainloop_unlock (pulsesrc->mainloop);
    return FALSE;
  }
}
Exemple #24
0
static ALCenum pulse_open_capture(ALCdevice *device, const ALCchar *device_name) //{{{
{
    char *pulse_name = NULL;
    pulse_data *data;
    pa_stream_flags_t flags = 0;
    pa_stream_state_t state;
    pa_channel_map chanmap;

    if(!allCaptureDevNameMap)
        probe_devices(AL_TRUE);

    if(!device_name)
        device_name = pulse_device;
    else if(strcmp(device_name, pulse_device) != 0)
    {
        ALuint i;

        for(i = 0;i < numCaptureDevNames;i++)
        {
            if(strcmp(device_name, allCaptureDevNameMap[i].name) == 0)
            {
                pulse_name = allCaptureDevNameMap[i].device_name;
                break;
            }
        }
        if(i == numCaptureDevNames)
            return ALC_INVALID_VALUE;
    }

    if(pulse_open(device, device_name) == ALC_FALSE)
        return ALC_INVALID_VALUE;

    data = device->ExtraData;
    pa_threaded_mainloop_lock(data->loop);

    data->samples = device->UpdateSize * device->NumUpdates;
    data->frame_size = FrameSizeFromDevFmt(device->FmtChans, device->FmtType);
    data->samples = maxu(data->samples, 100 * device->Frequency / 1000);

    if(!(data->ring = CreateRingBuffer(data->frame_size, data->samples)))
    {
        pa_threaded_mainloop_unlock(data->loop);
        goto fail;
    }

    data->attr.minreq = -1;
    data->attr.prebuf = -1;
    data->attr.maxlength = data->samples * data->frame_size;
    data->attr.tlength = -1;
    data->attr.fragsize = minu(data->samples, 50*device->Frequency/1000) *
                          data->frame_size;

    data->spec.rate = device->Frequency;
    data->spec.channels = ChannelsFromDevFmt(device->FmtChans);

    switch(device->FmtType)
    {
        case DevFmtUByte:
            data->spec.format = PA_SAMPLE_U8;
            break;
        case DevFmtShort:
            data->spec.format = PA_SAMPLE_S16NE;
            break;
        case DevFmtFloat:
            data->spec.format = PA_SAMPLE_FLOAT32NE;
            break;
        case DevFmtByte:
        case DevFmtUShort:
            ERR("Capture format type %#x capture not supported on PulseAudio\n", device->FmtType);
            pa_threaded_mainloop_unlock(data->loop);
            goto fail;
    }

    if(pa_sample_spec_valid(&data->spec) == 0)
    {
        ERR("Invalid sample format\n");
        pa_threaded_mainloop_unlock(data->loop);
        goto fail;
    }

    if(!pa_channel_map_init_auto(&chanmap, data->spec.channels, PA_CHANNEL_MAP_WAVEEX))
    {
        ERR("Couldn't build map for channel count (%d)!\n", data->spec.channels);
        pa_threaded_mainloop_unlock(data->loop);
        goto fail;
    }

    data->stream = pa_stream_new(data->context, "Capture Stream", &data->spec, &chanmap);
    if(!data->stream)
    {
        ERR("pa_stream_new() failed: %s\n",
            pa_strerror(pa_context_errno(data->context)));

        pa_threaded_mainloop_unlock(data->loop);
        goto fail;
    }

    pa_stream_set_state_callback(data->stream, stream_state_callback, data->loop);

    flags |= PA_STREAM_START_CORKED|PA_STREAM_ADJUST_LATENCY;
    if(pa_stream_connect_record(data->stream, pulse_name, &data->attr, flags) < 0)
    {
        ERR("Stream did not connect: %s\n",
            pa_strerror(pa_context_errno(data->context)));

        pa_stream_unref(data->stream);
        data->stream = NULL;

        pa_threaded_mainloop_unlock(data->loop);
        goto fail;
    }

    while((state=pa_stream_get_state(data->stream)) != PA_STREAM_READY)
    {
        if(!PA_STREAM_IS_GOOD(state))
        {
            ERR("Stream did not get ready: %s\n",
                pa_strerror(pa_context_errno(data->context)));

            pa_stream_unref(data->stream);
            data->stream = NULL;

            pa_threaded_mainloop_unlock(data->loop);
            goto fail;
        }

        pa_threaded_mainloop_wait(data->loop);
    }
    pa_stream_set_state_callback(data->stream, stream_state_callback2, device);

    pa_threaded_mainloop_unlock(data->loop);
    return ALC_NO_ERROR;

fail:
    pulse_close(device);
    return ALC_INVALID_VALUE;
} //}}}
int main(int argc, const char *argv[])
{
    pa_mainloop *pa_ml = NULL;
    pa_mainloop_api *pa_mlapi = NULL;
    pa_operation *pa_op = NULL;
    pa_context *pa_ctx = NULL;

    int pa_ready = 0;
    int state = 0;
    
    pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api(pa_ml);
    pa_ctx = pa_context_new(pa_mlapi, "deepin");

    pa_context_connect(pa_ctx, NULL, 0, NULL);
    pa_context_set_state_callback(pa_ctx, pa_state_cb, &pa_ready);

    for (;;) {
        if (0 == pa_ready) {
            pa_mainloop_iterate(pa_ml, 1, NULL);
            continue;
        }
        if (2 == pa_ready) {
            pa_context_disconnect(pa_ctx);
            pa_context_unref(pa_ctx);
            pa_mainloop_free(pa_ml);
            return -1;
        }
        switch (state) {
            case 0:
                if (pa_context_get_server_protocol_version (pa_ctx) < 13) {
                        return -1;
                }
                printf("server version: %d\n", pa_context_get_server_protocol_version(pa_ctx));

                pa_stream *s = NULL;
                pa_proplist   *proplist;

                pa_buffer_attr attr;
                pa_sample_spec ss;

                int res;
                char dev_name[40];

                // pa_sample_spec
                ss.channels = 1;
                ss.format = PA_SAMPLE_FLOAT32;
                ss.rate = 25;

                // pa_buffer_attr
                memset(&attr, 0, sizeof(attr));
                attr.fragsize = sizeof(float);
                attr.maxlength = (uint32_t) -1;


                // pa_proplist
                proplist = pa_proplist_new ();
                pa_proplist_sets (proplist, PA_PROP_APPLICATION_ID, "deepin.sound");

                // create new stream
                if (!(s = pa_stream_new_with_proplist(pa_ctx, "Peak detect", &ss, NULL, proplist))) {
                    fprintf(stderr, "pa_stream_new error\n");
                    return -2;
                }
                pa_proplist_free(proplist);

                /*pa_stream_set_monitor_stream(s, 26);*/

                pa_stream_set_read_callback(s, on_monitor_read_callback, NULL);
                pa_stream_set_suspended_callback(s, on_monitor_suspended_callback, NULL);
                res = pa_stream_connect_record(s, NULL, &attr, 
                                               (pa_stream_flags_t) (PA_STREAM_DONT_MOVE
                                                                    |PA_STREAM_PEAK_DETECT
                                                                    |PA_STREAM_ADJUST_LATENCY));
                
                if (res < 0) {
                    fprintf(stderr, "Failed to connect monitoring stream\n");
                    return -3;
                }
                state++;
                break;
            case 1:
                usleep(100);
                break;
            case 2:
                return 0;
                break;
            default:
                return -1;
        }
        pa_mainloop_iterate(pa_ml, 1, NULL);
    }

    return 0;
}
Exemple #26
0
static gboolean
gst_pulsesrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
{
  pa_buffer_attr wanted;
  const pa_buffer_attr *actual;
  GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);

  pa_threaded_mainloop_lock (pulsesrc->mainloop);

  wanted.maxlength = -1;
  wanted.tlength = -1;
  wanted.prebuf = 0;
  wanted.minreq = -1;
  wanted.fragsize = spec->segsize;

  GST_INFO_OBJECT (pulsesrc, "maxlength: %d", wanted.maxlength);
  GST_INFO_OBJECT (pulsesrc, "tlength:   %d", wanted.tlength);
  GST_INFO_OBJECT (pulsesrc, "prebuf:    %d", wanted.prebuf);
  GST_INFO_OBJECT (pulsesrc, "minreq:    %d", wanted.minreq);
  GST_INFO_OBJECT (pulsesrc, "fragsize:  %d", wanted.fragsize);

  if (pa_stream_connect_record (pulsesrc->stream, pulsesrc->device, &wanted,
          PA_STREAM_INTERPOLATE_TIMING |
          PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_NOT_MONOTONOUS |
#ifdef HAVE_PULSE_0_9_11
          PA_STREAM_ADJUST_LATENCY |
#endif
          PA_STREAM_START_CORKED) < 0) {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
        ("Failed to connect stream: %s",
            pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
    goto unlock_and_fail;
  }

  pulsesrc->corked = TRUE;

  for (;;) {
    pa_stream_state_t state;

    state = pa_stream_get_state (pulsesrc->stream);

    if (!PA_STREAM_IS_GOOD (state)) {
      GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
          ("Failed to connect stream: %s",
              pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
      goto unlock_and_fail;
    }

    if (state == PA_STREAM_READY)
      break;

    /* Wait until the stream is ready */
    pa_threaded_mainloop_wait (pulsesrc->mainloop);
  }

  /* get the actual buffering properties now */
  actual = pa_stream_get_buffer_attr (pulsesrc->stream);

  GST_INFO_OBJECT (pulsesrc, "maxlength: %d", actual->maxlength);
  GST_INFO_OBJECT (pulsesrc, "tlength:   %d (wanted: %d)",
      actual->tlength, wanted.tlength);
  GST_INFO_OBJECT (pulsesrc, "prebuf:    %d", actual->prebuf);
  GST_INFO_OBJECT (pulsesrc, "minreq:    %d (wanted %d)", actual->minreq,
      wanted.minreq);
  GST_INFO_OBJECT (pulsesrc, "fragsize:  %d (wanted %d)",
      actual->fragsize, wanted.fragsize);

  if (actual->fragsize >= wanted.fragsize) {
    spec->segsize = actual->fragsize;
  } else {
    spec->segsize = actual->fragsize * (wanted.fragsize / actual->fragsize);
  }
  spec->segtotal = actual->maxlength / spec->segsize;

  pa_threaded_mainloop_unlock (pulsesrc->mainloop);

  return TRUE;

unlock_and_fail:
  {
    gst_pulsesrc_destroy_stream (pulsesrc);

    pa_threaded_mainloop_unlock (pulsesrc->mainloop);
    return FALSE;
  }
}
Exemple #27
0
pa_simple* pa_simple_new(
        const char *server,
        const char *name,
        pa_stream_direction_t dir,
        const char *dev,
        const char *stream_name,
        const pa_sample_spec *ss,
        const pa_channel_map *map,
        const pa_buffer_attr *attr,
        int *rerror) {

    pa_simple *p;
    int error = PA_ERR_INTERNAL, r;

    CHECK_VALIDITY_RETURN_ANY(rerror, !server || *server, PA_ERR_INVALID, NULL);
    CHECK_VALIDITY_RETURN_ANY(rerror, dir == PA_STREAM_PLAYBACK || dir == PA_STREAM_RECORD, PA_ERR_INVALID, NULL);
    CHECK_VALIDITY_RETURN_ANY(rerror, !dev || *dev, PA_ERR_INVALID, NULL);
    CHECK_VALIDITY_RETURN_ANY(rerror, ss && pa_sample_spec_valid(ss), PA_ERR_INVALID, NULL);
    CHECK_VALIDITY_RETURN_ANY(rerror, !map || (pa_channel_map_valid(map) && map->channels == ss->channels), PA_ERR_INVALID, NULL)

    p = pa_xnew0(pa_simple, 1);
    p->direction = dir;

    if (!(p->mainloop = pa_threaded_mainloop_new()))
        goto fail;

    if (!(p->context = pa_context_new(pa_threaded_mainloop_get_api(p->mainloop), name)))
        goto fail;

    pa_context_set_state_callback(p->context, context_state_cb, p);

    if (pa_context_connect(p->context, server, 0, NULL) < 0) {
        error = pa_context_errno(p->context);
        goto fail;
    }

    pa_threaded_mainloop_lock(p->mainloop);

    if (pa_threaded_mainloop_start(p->mainloop) < 0)
        goto unlock_and_fail;

    for (;;) {
        pa_context_state_t state;

        state = pa_context_get_state(p->context);

        if (state == PA_CONTEXT_READY)
            break;

        if (!PA_CONTEXT_IS_GOOD(state)) {
            error = pa_context_errno(p->context);
            goto unlock_and_fail;
        }

        /* Wait until the context is ready */
        pa_threaded_mainloop_wait(p->mainloop);
    }

    if (!(p->stream = pa_stream_new(p->context, stream_name, ss, map))) {
        error = pa_context_errno(p->context);
        goto unlock_and_fail;
    }

    pa_stream_set_state_callback(p->stream, stream_state_cb, p);
    pa_stream_set_read_callback(p->stream, stream_request_cb, p);
    pa_stream_set_write_callback(p->stream, stream_request_cb, p);
    pa_stream_set_latency_update_callback(p->stream, stream_latency_update_cb, p);

    if (dir == PA_STREAM_PLAYBACK)
        r = pa_stream_connect_playback(p->stream, dev, attr,
                                       PA_STREAM_INTERPOLATE_TIMING
                                       |PA_STREAM_ADJUST_LATENCY
                                       |PA_STREAM_AUTO_TIMING_UPDATE, NULL, NULL);
    else
        r = pa_stream_connect_record(p->stream, dev, attr,
                                     PA_STREAM_INTERPOLATE_TIMING
                                     |PA_STREAM_ADJUST_LATENCY
                                     |PA_STREAM_AUTO_TIMING_UPDATE);

    if (r < 0) {
        error = pa_context_errno(p->context);
        goto unlock_and_fail;
    }

    for (;;) {
        pa_stream_state_t state;

        state = pa_stream_get_state(p->stream);

        if (state == PA_STREAM_READY)
            break;

        if (!PA_STREAM_IS_GOOD(state)) {
            error = pa_context_errno(p->context);
            goto unlock_and_fail;
        }

        /* Wait until the stream is ready */
        pa_threaded_mainloop_wait(p->mainloop);
    }

    pa_threaded_mainloop_unlock(p->mainloop);

    return p;

unlock_and_fail:
    pa_threaded_mainloop_unlock(p->mainloop);

fail:
    if (rerror)
        *rerror = error;
    pa_simple_free(p);
    return NULL;
}
bool QPulseAudioInput::open()
{
    if (m_opened)
        return true;

    QPulseAudioEngine *pulseEngine = QPulseAudioEngine::instance();

    if (!pulseEngine->context() || pa_context_get_state(pulseEngine->context()) != PA_CONTEXT_READY) {
        setError(QAudio::FatalError);
        setState(QAudio::StoppedState);
        return false;
    }

    pa_sample_spec spec = QPulseAudioInternal::audioFormatToSampleSpec(m_format);

    if (!pa_sample_spec_valid(&spec)) {
        setError(QAudio::OpenError);
        setState(QAudio::StoppedState);
        return false;
    }

    m_spec = spec;

#ifdef DEBUG_PULSE
//    QTime now(QTime::currentTime());
//    qDebug()<<now.second()<<"s "<<now.msec()<<"ms :open()";
#endif

    if (m_streamName.isNull())
        m_streamName = QString(QLatin1String("QtmPulseStream-%1-%2")).arg(::getpid()).arg(quintptr(this)).toUtf8();

#ifdef DEBUG_PULSE
        qDebug() << "Format: " << QPulseAudioInternal::sampleFormatToQString(spec.format);
        qDebug() << "Rate: " << spec.rate;
        qDebug() << "Channels: " << spec.channels;
        qDebug() << "Frame size: " << pa_frame_size(&spec);
#endif

    pulseEngine->lock();
    pa_channel_map channel_map;

    pa_channel_map_init_extend(&channel_map, spec.channels, PA_CHANNEL_MAP_DEFAULT);

    if (!pa_channel_map_compatible(&channel_map, &spec))
        qWarning() << "Channel map doesn't match sample specification!";

    m_stream = pa_stream_new(pulseEngine->context(), m_streamName.constData(), &spec, &channel_map);

    pa_stream_set_state_callback(m_stream, inputStreamStateCallback, this);
    pa_stream_set_read_callback(m_stream, inputStreamReadCallback, this);

    pa_stream_set_underflow_callback(m_stream, inputStreamUnderflowCallback, this);
    pa_stream_set_overflow_callback(m_stream, inputStreamOverflowCallback, this);

    m_periodSize = pa_usec_to_bytes(PeriodTimeMs*1000, &spec);

    int flags = 0;
    pa_buffer_attr buffer_attr;
    buffer_attr.maxlength = (uint32_t) -1;
    buffer_attr.prebuf = (uint32_t) -1;
    buffer_attr.tlength = (uint32_t) -1;
    buffer_attr.minreq = (uint32_t) -1;
    flags |= PA_STREAM_ADJUST_LATENCY;

    if (m_bufferSize > 0)
        buffer_attr.fragsize = (uint32_t) m_bufferSize;
    else
        buffer_attr.fragsize = (uint32_t) m_periodSize;

    if (pa_stream_connect_record(m_stream, m_device.data(), &buffer_attr, (pa_stream_flags_t)flags) < 0) {
        qWarning() << "pa_stream_connect_record() failed!";
        pa_stream_unref(m_stream);
        m_stream = 0;
        pulseEngine->unlock();
        setError(QAudio::OpenError);
        setState(QAudio::StoppedState);
        return false;
    }

    while (pa_stream_get_state(m_stream) != PA_STREAM_READY)
        pa_threaded_mainloop_wait(pulseEngine->mainloop());

    const pa_buffer_attr *actualBufferAttr = pa_stream_get_buffer_attr(m_stream);
    m_periodSize = actualBufferAttr->fragsize;
    m_periodTime = pa_bytes_to_usec(m_periodSize, &spec) / 1000;
    if (actualBufferAttr->tlength != (uint32_t)-1)
        m_bufferSize = actualBufferAttr->tlength;

    pulseEngine->unlock();

    connect(pulseEngine, &QPulseAudioEngine::contextFailed, this, &QPulseAudioInput::onPulseContextFailed);

    m_opened = true;
    m_timer->start(m_periodTime);

    m_clockStamp.restart();
    m_timeStamp.restart();
    m_elapsedTimeOffset = 0;
    m_totalTimeValue = 0;

    return true;
}
AudioStream::AudioStream(pa_context *c,
                         pa_threaded_mainloop *m,
                         const char *desc,
                         int type,
                         unsigned samplrate,
                         const PaDeviceInfos* infos,
                         bool ec)
    : audiostream_(0), mainloop_(m)
{
    const pa_channel_map channel_map = infos->channel_map;

    pa_sample_spec sample_spec = {
        PA_SAMPLE_S16LE, // PA_SAMPLE_FLOAT32LE,
        samplrate,
        channel_map.channels
    };

    RING_DBG("%s: trying to create stream with device %s (%dHz, %d channels)", desc, infos->name.c_str(), samplrate, channel_map.channels);

    assert(pa_sample_spec_valid(&sample_spec));
    assert(pa_channel_map_valid(&channel_map));

    std::unique_ptr<pa_proplist, decltype(pa_proplist_free)&> pl (pa_proplist_new(), pa_proplist_free);
    pa_proplist_sets(pl.get(), PA_PROP_FILTER_WANT, "echo-cancel");

    audiostream_ = pa_stream_new_with_proplist(c, desc, &sample_spec, &channel_map, ec ? pl.get() : nullptr);
    if (!audiostream_) {
        RING_ERR("%s: pa_stream_new() failed : %s" , desc, pa_strerror(pa_context_errno(c)));
        throw std::runtime_error("Could not create stream\n");
    }

    pa_buffer_attr attributes;
    attributes.maxlength = pa_usec_to_bytes(160 * PA_USEC_PER_MSEC, &sample_spec);
    attributes.tlength = pa_usec_to_bytes(80 * PA_USEC_PER_MSEC, &sample_spec);
    attributes.prebuf = 0;
    attributes.fragsize = pa_usec_to_bytes(80 * PA_USEC_PER_MSEC, &sample_spec);
    attributes.minreq = (uint32_t) -1;

    {
        PulseMainLoopLock lock(mainloop_);
        const pa_stream_flags_t flags = static_cast<pa_stream_flags_t>(PA_STREAM_ADJUST_LATENCY | PA_STREAM_AUTO_TIMING_UPDATE);

        if (type == PLAYBACK_STREAM || type == RINGTONE_STREAM) {
            pa_stream_connect_playback(audiostream_,
                    infos->name.empty() ? NULL : infos->name.c_str(),
                    &attributes,
                    flags,
                    NULL, NULL);
        } else if (type == CAPTURE_STREAM) {
            pa_stream_connect_record(audiostream_,
                    infos->name.empty() ? NULL : infos->name.c_str(),
                    &attributes,
                    flags);
        }
    }

    pa_stream_set_state_callback(audiostream_, [](pa_stream* s, void* user_data){
        static_cast<AudioStream*>(user_data)->stateChanged(s);
    }, this);
    pa_stream_set_moved_callback(audiostream_, [](pa_stream* s, void* user_data){
        static_cast<AudioStream*>(user_data)->moved(s);
    }, this);
}
static int drvHostPulseAudioOpen(bool fIn, const char *pszName,
                                 pa_sample_spec *pSampleSpec, pa_buffer_attr *pBufAttr,
                                 pa_stream **ppStream)
{
    AssertPtrReturn(pszName, VERR_INVALID_POINTER);
    AssertPtrReturn(pSampleSpec, VERR_INVALID_POINTER);
    AssertPtrReturn(pBufAttr, VERR_INVALID_POINTER);
    AssertPtrReturn(ppStream, VERR_INVALID_POINTER);

    if (!pa_sample_spec_valid(pSampleSpec))
    {
        LogRel(("PulseAudio: Unsupported sample specification for stream \"%s\"\n",
                pszName));
        return VERR_NOT_SUPPORTED;
    }

    int rc = VINF_SUCCESS;

    pa_stream *pStream = NULL;
    uint32_t   flags = PA_STREAM_NOFLAGS;

    LogFunc(("Opening \"%s\", rate=%dHz, channels=%d, format=%s\n",
             pszName, pSampleSpec->rate, pSampleSpec->channels,
             pa_sample_format_to_string(pSampleSpec->format)));

    pa_threaded_mainloop_lock(g_pMainLoop);

    do
    {
        if (!(pStream = pa_stream_new(g_pContext, pszName, pSampleSpec,
                                      NULL /* pa_channel_map */)))
        {
            LogRel(("PulseAudio: Could not create stream \"%s\"\n", pszName));
            rc = VERR_NO_MEMORY;
            break;
        }

        pa_stream_set_state_callback(pStream, drvHostPulseAudioCbStreamState, NULL);

#if PA_API_VERSION >= 12
        /* XXX */
        flags |= PA_STREAM_ADJUST_LATENCY;
#endif

#if 0
        /* Not applicable as we don't use pa_stream_get_latency() and pa_stream_get_time(). */
        flags |= PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE;
#endif
        /* No input/output right away after the stream was started. */
        flags |= PA_STREAM_START_CORKED;

        if (fIn)
        {
            LogFunc(("Input stream attributes: maxlength=%d fragsize=%d\n",
                     pBufAttr->maxlength, pBufAttr->fragsize));

            if (pa_stream_connect_record(pStream, /*dev=*/NULL, pBufAttr, (pa_stream_flags_t)flags) < 0)
            {
                LogRel(("PulseAudio: Could not connect input stream \"%s\": %s\n",
                        pszName, pa_strerror(pa_context_errno(g_pContext))));
                rc = VERR_AUDIO_BACKEND_INIT_FAILED;
                break;
            }
        }
        else
        {
            LogFunc(("Output buffer attributes: maxlength=%d tlength=%d prebuf=%d minreq=%d\n",
                     pBufAttr->maxlength, pBufAttr->tlength, pBufAttr->prebuf, pBufAttr->minreq));

            if (pa_stream_connect_playback(pStream, /*dev=*/NULL, pBufAttr, (pa_stream_flags_t)flags,
                                           /*cvolume=*/NULL, /*sync_stream=*/NULL) < 0)
            {
                LogRel(("PulseAudio: Could not connect playback stream \"%s\": %s\n",
                        pszName, pa_strerror(pa_context_errno(g_pContext))));
                rc = VERR_AUDIO_BACKEND_INIT_FAILED;
                break;
            }
        }

        /* Wait until the stream is ready. */
        for (;;)
        {
            if (!g_fAbortMainLoop)
                pa_threaded_mainloop_wait(g_pMainLoop);
            g_fAbortMainLoop = false;

            pa_stream_state_t sstate = pa_stream_get_state(pStream);
            if (sstate == PA_STREAM_READY)
                break;
            else if (   sstate == PA_STREAM_FAILED
                     || sstate == PA_STREAM_TERMINATED)
            {
                LogRel(("PulseAudio: Failed to initialize stream \"%s\" (state %ld)\n",
                        pszName, sstate));
                rc = VERR_AUDIO_BACKEND_INIT_FAILED;
                break;
            }
        }

        if (RT_FAILURE(rc))
            break;

        const pa_buffer_attr *pBufAttrObtained = pa_stream_get_buffer_attr(pStream);
        AssertPtr(pBufAttrObtained);
        memcpy(pBufAttr, pBufAttrObtained, sizeof(pa_buffer_attr));

        if (fIn)
            LogFunc(("Obtained record buffer attributes: maxlength=%RU32, fragsize=%RU32\n",
                     pBufAttr->maxlength, pBufAttr->fragsize));
        else
            LogFunc(("Obtained playback buffer attributes: maxlength=%d, tlength=%d, prebuf=%d, minreq=%d\n",
                     pBufAttr->maxlength, pBufAttr->tlength, pBufAttr->prebuf, pBufAttr->minreq));

    }
    while (0);

    if (   RT_FAILURE(rc)
        && pStream)
        pa_stream_disconnect(pStream);

    pa_threaded_mainloop_unlock(g_pMainLoop);

    if (RT_FAILURE(rc))
    {
        if (pStream)
            pa_stream_unref(pStream);
    }
    else
        *ppStream = pStream;

    LogFlowFuncLeaveRC(rc);
    return rc;
}