Exemple #1
0
static void webrtc_aec_postprocess(MSFilter *f)
{
	WebRTCAECState *s = (WebRTCAECState *) f->data;

	ms_bufferizer_flush(&s->delayed_ref);
	ms_bufferizer_flush(&s->echo);
	ms_bufferizer_flush(&s->ref);
	if (s->aecmInst != NULL) {
		WebRtcAecm_Free(s->aecmInst);
		s->aecmInst = NULL;
	}
}
Exemple #2
0
static void speex_ec_postprocess(MSFilter *f){
	SpeexECState *s=(SpeexECState*)f->data;

	ms_bufferizer_flush (&s->delayed_ref);
	ms_bufferizer_flush (&s->echo);
	ms_bufferizer_flush (&s->ref);
	if (s->ecstate!=NULL){
		speex_echo_state_destroy(s->ecstate);
		s->ecstate=NULL;
	}
	if (s->den!=NULL){
		speex_preprocess_state_destroy(s->den);
		s->den=NULL;
	}
}
Exemple #3
0
static OSStatus writeRenderProc(void *inRefCon, 
						 AudioUnitRenderActionFlags *inActionFlags,
						 const AudioTimeStamp *inTimeStamp, 
						 UInt32 inBusNumber,
						 UInt32 inNumFrames, 
						 AudioBufferList *ioData)
{
	AUWrite *d=(AUWrite*)inRefCon;
	int read;

	if (ioData->mNumberBuffers!=1) ms_warning("writeRenderProc: %"UINT32_PRINTF" buffers",ioData->mNumberBuffers);
	ms_mutex_lock(&d->common.mutex);
	read=ms_bufferizer_read(d->buffer,ioData->mBuffers[0].mData,ioData->mBuffers[0].mDataByteSize);
	if (ms_bufferizer_get_avail(d->buffer) >10*inNumFrames*2) {
		ms_message("we are late, bufferizer sise is [%i] bytes in framezize is [%"UINT32_PRINTF"] bytes"
					,(int)ms_bufferizer_get_avail(d->buffer)
					,inNumFrames*2);
		ms_bufferizer_flush(d->buffer);
	}

	ms_mutex_unlock(&d->common.mutex);
	if (read==0){
		ms_debug("Silence inserted in audio output unit (%"UINT32_PRINTF" bytes)",ioData->mBuffers[0].mDataByteSize);
		memset(ioData->mBuffers[0].mData,0,ioData->mBuffers[0].mDataByteSize);
	}
	return 0;
}
static void au_configure_write(AUData *d) {
	d->write_started=TRUE;
	au_configure(d);
	ms_mutex_lock(&d->mutex);
	ms_bufferizer_flush(d->bufferizer);
	ms_mutex_unlock(&d->mutex);	
}
	~msandroid_sound_write_data() {
		ms_bufferizer_flush(bufferizer);
		ms_bufferizer_destroy(bufferizer);
		ms_cond_destroy(&cond);
		if (audio_track_class!=0){
			JNIEnv *env = ms_get_jni_env();
			env->DeleteGlobalRef(audio_track_class);
		}
	}
static void au_unconfigure_write(AUData *d){
	if(d->write_started == TRUE) {
		d->write_started=FALSE;
		AudioUnitElement outputBus = 0;
		AudioUnitReset(d->io_unit,
			kAudioUnitScope_Global,
			outputBus);		
	}
	ms_message("[%i] frames of silence inserted for [%i] ms.",d->n_lost_frame,(d->n_lost_frame*1000)/d->rate);
	au_unconfigure(d);
	ms_mutex_lock(&d->mutex);
	ms_bufferizer_flush(d->bufferizer);
	ms_mutex_unlock(&d->mutex);
}
    ~msandroid_sound_write_data() {
        ms_bufferizer_flush(bufferizer);
        ms_bufferizer_destroy(bufferizer);
        ms_cond_destroy(&cond);
        if (audio_track_class!=0) {
            //JNIEnv *env = ms_get_jni_env();

            JNIEnv *env = NULL;
            JavaVM *jvm = ms_get_jvm();
            if (jvm->AttachCurrentThread(&env, NULL)!=0) {
                ms_fatal("AttachCurrentThread() failed !");
            }
            env->DeleteGlobalRef(audio_track_class);
            jvm->DetachCurrentThread();
        }
    }
static OSStatus au_render_cb (
							  void                        *inRefCon,
							  AudioUnitRenderActionFlags  *ioActionFlags,
							  const AudioTimeStamp        *inTimeStamp,
							  UInt32                      inBusNumber,
							  UInt32                      inNumberFrames,
							  AudioBufferList             *ioData
) {
	ms_debug("render cb");
	AUData *d=(AUData*)inRefCon;
	
	if (d->write_started == TRUE) {
		ioData->mBuffers[0].mDataByteSize=inNumberFrames*d->bits/8;
		ioData->mNumberBuffers=1;
		
		ms_mutex_lock(&d->mutex);
		if(ms_bufferizer_get_avail(d->bufferizer) >= inNumberFrames*d->bits/8) {
			ms_bufferizer_read(d->bufferizer, ioData->mBuffers[0].mData, inNumberFrames*d->bits/8);

			if (ms_bufferizer_get_avail(d->bufferizer) >10*inNumberFrames*d->bits/8) {
				ms_debug("we are late, bufferizer sise is %i bytes in framezize is %i bytes",ms_bufferizer_get_avail(d->bufferizer),inNumberFrames*d->bits/8);
				ms_bufferizer_flush(d->bufferizer);
			}
			ms_mutex_unlock(&d->mutex);
			
		} else {
			
			ms_mutex_unlock(&d->mutex);
			memset(ioData->mBuffers[0].mData, 0,ioData->mBuffers[0].mDataByteSize);
			ms_debug("nothing to write, pushing silences, bufferizer size is %i bytes in framezize is %i bytes mDataByteSize %i"
					 ,ms_bufferizer_get_avail(d->bufferizer)
					 ,inNumberFrames*d->bits/8
					 ,ioData->mBuffers[0].mDataByteSize);
			d->n_lost_frame+=inNumberFrames;
		}
	}
	if (!d->is_ringer) { // no need to read in ringer mode
		AudioBufferList readAudioBufferList;
		readAudioBufferList.mBuffers[0].mDataByteSize=inNumberFrames*d->bits/8; 
		readAudioBufferList.mNumberBuffers=1;
		readAudioBufferList.mBuffers[0].mData=NULL;
		readAudioBufferList.mBuffers[0].mNumberChannels=d->nchannels;
		AudioUnitElement inputBus = 1;
		au_read_cb(d, ioActionFlags, inTimeStamp, inputBus, inNumberFrames, &readAudioBufferList);
	}
	return 0;
}
static void* msandroid_write_cb(msandroid_sound_write_data* d) {
	jbyteArray 		write_buff;
	jmethodID 		write_id=0;
	jmethodID play_id=0;
	int min_size=-1;
	int count;
	int max_size=sndwrite_flush_threshold*(float)d->rate*(float)d->nchannels*2.0;
	int check_point_size=3*(float)d->rate*(float)d->nchannels*2.0; /*3 seconds*/
	int nwrites=0;

	set_high_prio();
	int buff_size = d->write_chunk_size;
	JNIEnv *jni_env = ms_get_jni_env();

	// int write  (byte[] audioData, int offsetInBytes, int sizeInBytes)
	write_id = jni_env->GetMethodID(d->audio_track_class,"write", "([BII)I");
	if(write_id==0) {
		ms_error("cannot find AudioTrack.write() method");
		goto end;
	}
	play_id = jni_env->GetMethodID(d->audio_track_class,"play", "()V");
	if(play_id==0) {
		ms_error("cannot find AudioTrack.play() method");
		goto end;
	}
	write_buff = jni_env->NewByteArray(buff_size);
	uint8_t tmpBuff[buff_size];

	//start playing
	jni_env->CallVoidMethod(d->audio_track,play_id);

	ms_mutex_lock(&d->mutex);
	ms_bufferizer_flush(d->bufferizer);
	ms_mutex_unlock(&d->mutex);

	while(d->started) {
		int bufferizer_size;

		ms_mutex_lock(&d->mutex);
		min_size=-1;
		count=0;
		while((bufferizer_size = ms_bufferizer_get_avail(d->bufferizer)) >= d->write_chunk_size) {
			if (min_size==-1) min_size=bufferizer_size;
			else if (bufferizer_size<min_size) min_size=bufferizer_size;

			ms_bufferizer_read(d->bufferizer, tmpBuff, d->write_chunk_size);
			ms_mutex_unlock(&d->mutex);
			jni_env->SetByteArrayRegion(write_buff,0,d->write_chunk_size,(jbyte*)tmpBuff);
			int result = jni_env->CallIntMethod(d->audio_track,write_id,write_buff,0,d->write_chunk_size);
			d->writtenBytes+=result;
			if (result <= 0) {
				ms_error("write operation has failed [%i]",result);
			}
			nwrites++;
			ms_mutex_lock(&d->mutex);
			count+=d->write_chunk_size;
			if (count>check_point_size){
				if (min_size > max_size) {
					ms_warning("we are late, flushing %i bytes",min_size);
					ms_bufferizer_skip_bytes(d->bufferizer,min_size);
				}
				count=0;
			}
		}
		if (d->started) {
			d->sleeping=true;
			ms_cond_wait(&d->cond,&d->mutex);
			d->sleeping=false;
		}
		ms_mutex_unlock(&d->mutex);
	}


	goto end;
	end: {
		ms_thread_exit(NULL);
		return NULL;
	}
}
Exemple #10
0
static void enc_postprocess(MSFilter *f) {
    EncState *s = (EncState*) f->data;
    Encoder_Interface_exit(s->enc);
    s->enc = NULL;
    ms_bufferizer_flush(s->mb);
}