Пример #1
0
int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
  if (!is_component_enabled()) {
    return apm_->kNoError;
  }

  if (!apm_->was_stream_delay_set()) {
    return apm_->kStreamParameterNotSetError;
  }

  if (drift_compensation_enabled_ && !was_stream_drift_set_) {
    return apm_->kStreamParameterNotSetError;
  }

  assert(audio->samples_per_split_channel() <= 160);
  assert(audio->num_channels() == apm_->num_output_channels());

  int err = apm_->kNoError;

  // The ordering convention must be followed to pass to the correct AEC.
  size_t handle_index = 0;
  stream_has_echo_ = false;
  for (int i = 0; i < audio->num_channels(); i++) {
    for (int j = 0; j < apm_->num_reverse_channels(); j++) {
      Handle* my_handle = handle(handle_index);
      err = WebRtcAec_Process(
          my_handle,
          audio->low_pass_split_data(i),
          audio->high_pass_split_data(i),
          audio->low_pass_split_data(i),
          audio->high_pass_split_data(i),
          static_cast<WebRtc_Word16>(audio->samples_per_split_channel()),
          apm_->stream_delay_ms(),
          stream_drift_samples_);

      if (err != apm_->kNoError) {
        err = GetHandleError(my_handle);
        // TODO(ajm): Figure out how to return warnings properly.
        if (err != apm_->kBadStreamParameterWarning) {
          return err;
        }
      }

      WebRtc_Word16 status = 0;
      err = WebRtcAec_get_echo_status(my_handle, &status);
      if (err != apm_->kNoError) {
        return GetHandleError(my_handle);
      }

      if (status == 1) {
        stream_has_echo_ = true;
      }

      handle_index++;
    }
  }

  was_stream_drift_set_ = false;
  return apm_->kNoError;
}
Пример #2
0
int filter_audio(Filter_Audio *f_a, int16_t *data, unsigned int samples)
{

    if (!f_a) {
        return -1;
    }

    unsigned int nsx_samples = f_a->fs / 100;
    if (!samples || (samples % nsx_samples) != 0) {
        return -1;
    }

    _Bool resample = 0;
    unsigned int resampled_samples = 0;
    if (f_a->fs != 16000) {
        samples = (samples / nsx_samples) * 160;
        nsx_samples = 160;
        resample = 1;
    }

    unsigned int temp_samples = samples;
    unsigned int smp = f_a->fs / 100;
    int novoice = 1;

    int16_t *d_l = (int16_t *)STACK_ALLOC(nsx_samples * (2 * sizeof(int16_t) + 2 * sizeof(float)) + smp * sizeof(float));
    int16_t *temp = d_l + nsx_samples;
    float *d_f_l = (float *)(temp + nsx_samples);
    float *d_f_h = d_f_l + nsx_samples;
    float *d_f_u = d_f_h + nsx_samples;

    while (temp_samples) {
        int16_t *d_h = NULL;
        memset(temp, 0, nsx_samples*sizeof(int16_t));
        if (resample) {
            d_h = temp;
            downsample_audio(f_a, d_l, d_h, data + resampled_samples, smp);
        } else {
            memcpy(d_l, data + (samples - temp_samples), nsx_samples * sizeof(int16_t));
        }

        if(f_a->vad_enabled){
            if(WebRtcVad_Process(f_a->Vad_handle, 16000, d_l, nsx_samples) == 1){
                novoice = 0;
            }
        } else {
            novoice = 0;
        }

        if (f_a->gain_enabled) {
            int32_t inMicLevel = 128, outMicLevel;

            if (WebRtcAgc_VirtualMic(f_a->gain_control, d_l, d_h, nsx_samples, inMicLevel, &outMicLevel) == -1)
                return -1;
        }

        S16ToFloatS16(d_l, nsx_samples, d_f_l);
        memset(d_f_h, 0, nsx_samples*sizeof(float));

	if (resample) {
            S16ToFloatS16(d_h, nsx_samples, d_f_h);
        }

        if (f_a->echo_enabled) {
            if (WebRtcAec_Process(f_a->echo_cancellation, d_f_l, d_f_h, d_f_l, d_f_h, nsx_samples, f_a->msInSndCardBuf, 0) == -1) {
                return -1;
            }

            if (resample) {
                FloatS16ToS16(d_f_h, nsx_samples, d_h);
            }
            FloatS16ToS16(d_f_l, nsx_samples, d_l);
        }

        if (f_a->noise_enabled) {
            if (WebRtcNsx_Process(f_a->noise_sup_x, d_l, d_h, d_l, d_h) == -1) {
                return -1;
            }
        }

        if (f_a->gain_enabled) {
            int32_t inMicLevel = 128, outMicLevel;
            uint8_t saturationWarning;

            if (WebRtcAgc_Process(f_a->gain_control, d_l, d_h, nsx_samples, d_l, d_h, inMicLevel, &outMicLevel, 0, &saturationWarning) == -1) {
                return -1;
            }
        }

        if (resample) {
            upsample_audio(f_a, data + resampled_samples, smp, d_l, d_h, nsx_samples);
            S16ToFloat(data + resampled_samples, smp, d_f_u);
            run_filter_zam(&f_a->hpfa, d_f_u, smp);
            run_filter_zam(&f_a->hpfb, d_f_u, smp);

            if (f_a->lowpass_enabled) {
                run_filter_zam(&f_a->lpfa, d_f_u, smp);
                run_filter_zam(&f_a->lpfb, d_f_u, smp);
            }

            run_saturator_zam(d_f_u, smp);
            FloatToS16(d_f_u, smp, data + resampled_samples);
            resampled_samples += smp;
        } else {
            S16ToFloat(d_l, nsx_samples, d_f_l);

            run_filter_zam(&f_a->hpfa, d_f_l, nsx_samples);
            run_filter_zam(&f_a->hpfb, d_f_l, nsx_samples);

            if (f_a->lowpass_enabled) {
                run_filter_zam(&f_a->lpfa, d_f_l, nsx_samples);
                run_filter_zam(&f_a->lpfb, d_f_l, nsx_samples);
            }

            run_saturator_zam(d_f_l, nsx_samples);

            FloatToS16(d_f_l, nsx_samples, d_l);
            memcpy(data + (samples - temp_samples), d_l, nsx_samples * sizeof(int16_t));
        }

        temp_samples -= nsx_samples;


    }

    return !novoice;
}
Пример #3
0
int KotiAEC_process(const int16_t* farend, const int16_t* nearend, int16_t* out)
{
    int ret = -1, i = 0, frame_size = 0;
    switch(aec_core_used)
    {
#ifdef WEBRTC_AEC_CORE_ENABLED
    case WEBRTC_AEC:
        if(farend)
            WebRtcAec_BufferFarend(webrtc_aec_pty.webrtc_aec, farend, webrtc_aec_pty.frame_size);
        if(!WebRtcAec_Process(webrtc_aec_pty.webrtc_aec, nearend, NULL, out, NULL, webrtc_aec_pty.frame_size,
                                   webrtc_aec_pty.sndcard_delay_ms, 0))
        {
            ret = 0;
        }

        if(webrtc_aec_pty.webrtc_ns)
        {
            WebRtcNsx_Process((NsxHandle*)webrtc_aec_pty.webrtc_ns, out, NULL, out, NULL);
            if(webrtc_aec_pty.frame_size == 160)
                WebRtcNsx_Process((NsxHandle*)webrtc_aec_pty.webrtc_ns, out+80, NULL, out+80, NULL);
        }

        if(webrtc_aec_pty.webrtc_agc)
        {
            int32_t out_c; uint8_t warn_status;
            WebRtcAgc_Process(webrtc_aec_pty.webrtc_agc, out, NULL, webrtc_aec_pty.frame_size, out, NULL, 32,
                              &out_c, 1, &warn_status);
        }

//        if(webrtc_aec_pty.webrtc_ns)
//        {
//            WebRtcNsx_Process((NsxHandle*)webrtc_aec_pty.webrtc_ns, out, NULL, out, NULL);
//            if(webrtc_aec_pty.frame_size == 160)
//                WebRtcNsx_Process((NsxHandle*)webrtc_aec_pty.webrtc_ns, out+80, NULL, out+80, NULL);
//        }

        frame_size = webrtc_aec_pty.frame_size;
        break;
    case WEBRTC_AECM:
/*
        if(farend)
            WebRtcAecm_BufferFarend(webrtc_aecm_pty.webrtc_aec, farend, webrtc_aecm_pty.frame_size);
        if(!WebRtcAecm_Process(webrtc_aecm_pty.webrtc_aec, nearend, NULL, out, webrtc_aecm_pty.frame_size,
                              webrtc_aecm_pty.sndcard_delay_ms))
        {
            ret = 0;
        }
*/

	memcpy(proc_tmp_buf, nearend, webrtc_aecm_pty.frame_size*2);
        if(webrtc_aecm_pty.webrtc_ns)
        {
            WebRtcNsx_Process((NsxHandle*)webrtc_aecm_pty.webrtc_ns, proc_tmp_buf, NULL, proc_tmp_buf, NULL);
            if(webrtc_aecm_pty.frame_size == 160)
                WebRtcNsx_Process((NsxHandle*)webrtc_aecm_pty.webrtc_ns, proc_tmp_buf+80, NULL, proc_tmp_buf+80, NULL);
        }

        if(webrtc_aecm_pty.webrtc_agc)
        {
            int32_t out_c; uint8_t warn_status;
            WebRtcAgc_Process(webrtc_aecm_pty.webrtc_agc, proc_tmp_buf, NULL, webrtc_aecm_pty.frame_size, proc_tmp_buf, NULL, 32,
                              &out_c, 1, &warn_status);
        }

	// AEC
        if(farend)
            WebRtcAecm_BufferFarend(webrtc_aecm_pty.webrtc_aec, farend, webrtc_aecm_pty.frame_size);
        if(!WebRtcAecm_Process(webrtc_aecm_pty.webrtc_aec, proc_tmp_buf, NULL, out, webrtc_aecm_pty.frame_size,
                              webrtc_aecm_pty.sndcard_delay_ms))
        {
            ret = 0;
        }

        frame_size = webrtc_aecm_pty.frame_size;
        break;
#endif
    case SPEEX_AEC:
    default:
#ifdef OLD_SPEEX_AEC
        speex_echo_cancel((SpeexEchoState*)speex_aec_pty.speex_echo_state, nearend, farend, out, speex_aec_pty.nosie);
        if(speex_preprocess((SpeexPreprocessState*)speex_aec_pty.speex_preprocess_state, out, speex_aec_pty.nosie) == 1)
            ret = 0;
#else
        if(farend)
            speex_echo_cancellation((SpeexEchoState*)speex_aec_pty.speex_echo_state, nearend, farend, out);
        else
            speex_echo_capture((SpeexEchoState*)speex_aec_pty.speex_echo_state, nearend, out);
//        speex_preprocess_estimate_update((SpeexPreprocessState*)speex_aec_pty.speex_preprocess_state, out);
        if(speex_preprocess_run((SpeexPreprocessState*)speex_aec_pty.speex_preprocess_state, out) == 1)
            ret = 0;
#endif

        frame_size = speex_aec_pty.frame_size;
        break;
    }

    // if the output sound needed amplify
    if(output_sound_amplification != 1.0f && output_sound_amplification > 0)
    {
        for(; i<frame_size; ++i)
            out[i] = out[i]*output_sound_amplification;
    }

    return ret;
}