/* * Let the Echo Canceller know that a frame has been played to the speaker. */ PJ_DEF(pj_status_t) pjmedia_echo_playback( pjmedia_echo_state *echo, pj_int16_t *play_frm ) { /* If EC algo has playback handler, just pass the frame. */ if (echo->op->ec_playback) { return (*echo->op->ec_playback)(echo->state, play_frm); } /* Playing frame should be stored, as it will be used by echo_capture() * as reference frame, delay buffer is used for storing the playing frames * as in case there was clock drift between mic & speaker. * * Ticket #830: * Note that pjmedia_delay_buf_put() may modify the input frame and those * modified frames may not be smooth, i.e: if there were two or more * consecutive pjmedia_delay_buf_get() before next pjmedia_delay_buf_put(), * so we'll just feed the delay buffer with the copy of playing frame, * instead of the original playing frame. However this will cause the EC * uses slight 'different' frames (for reference) than actually played * by the speaker. */ pjmedia_copy_samples(echo->frm_buf, play_frm, echo->samples_per_frame); pjmedia_delay_buf_put(echo->delay_buf, echo->frm_buf); if (!echo->lat_ready) { /* We've not built enough latency in the buffer, so put this frame * in the latency buffer list. */ struct frame *frm; if (pj_list_empty(&echo->lat_free)) { echo->lat_ready = PJ_TRUE; PJ_LOG(5,(echo->obj_name, "Latency bufferring complete")); return PJ_SUCCESS; } frm = echo->lat_free.prev; pj_list_erase(frm); /* Move one frame from delay buffer to the latency buffer. */ pjmedia_delay_buf_get(echo->delay_buf, echo->frm_buf); pjmedia_copy_samples(frm->buf, echo->frm_buf, echo->samples_per_frame); pj_list_push_back(&echo->lat_buf, frm); } return PJ_SUCCESS; }
/* * Let the Echo Canceller know that a frame has been played to the speaker. */ pj_status_t pjs_echo_canceller::playback(pj_int16_t *play_frm, unsigned size) { /* Playing frame should be stored, as it will be used by echo_capture() * as reference frame, delay buffer is used for storing the playing frames * as in case there was clock drift between mic & speaker. * * Ticket #830: * Note that pjmedia_delay_buf_put() may modify the input frame and those * modified frames may not be smooth, i.e: if there were two or more * consecutive pjmedia_delay_buf_get() before next pjmedia_delay_buf_put(), * so we'll just feed the delay buffer with the copy of playing frame, * instead of the original playing frame. However this will cause the EC * uses slight 'different' frames (for reference) than actually played * by the speaker. */ if(samples_per_frame!=size) { PJ_LOG(1, (THIS_FILE, "WRONG SIZE ON PLAYBACK %d != %d",size,samples_per_frame)); return -1; } PPJ_WaitAndLock wl(*lock); pjmedia_copy_samples(frm_buf, play_frm, samples_per_frame); pjmedia_delay_buf_put(delay_buf, frm_buf); if (!lat_ready) { /* We've not built enough latency in the buffer, so put this frame * in the latency buffer list. */ struct frame *frm; if (pj_list_empty(&lat_free)) { lat_ready = PJ_TRUE; PJ_LOG(4, (THIS_FILE, "Latency bufferring complete")); return PJ_SUCCESS; } frm = lat_free.prev; pj_list_erase(frm); /* Move one frame from delay buffer to the latency buffer. */ pjmedia_delay_buf_get(delay_buf, frm_buf); pjmedia_copy_samples(frm->buf, frm_buf, samples_per_frame); pj_list_push_back(&lat_buf, frm); } return PJ_SUCCESS; }
/* Sound capture callback */ static pj_status_t rec_cb(void *user_data, pj_uint32_t timestamp, void *input, unsigned size) { PJ_UNUSED_ARG(user_data); PJ_UNUSED_ARG(timestamp); PJ_UNUSED_ARG(size); pjmedia_delay_buf_put(delaybuf, (pj_int16_t*)input); if (size != SAMPLES_PER_FRAME*2) { PJ_LOG(3, (THIS_FILE, "Size captured = %u", size)); } ++rec_cnt; return PJ_SUCCESS; }
/* Sound capture callback */ static pj_status_t rec_cb(void *user_data, pjmedia_frame *frame) { PJ_UNUSED_ARG(user_data); if (param.ext_fmt.id == PJMEDIA_FORMAT_PCM) { pjmedia_delay_buf_put(delaybuf, (pj_int16_t*)frame->buf); if (frame->size != SAMPLES_PER_FRAME*2) { PJ_LOG(3, (THIS_FILE, "Size captured = %u", frame->size)); } } else { pjmedia_frame_ext *f_src = (pjmedia_frame_ext*)frame; pjmedia_frame_ext *f_dst = (pjmedia_frame_ext*)frame_buf; copy_frame_ext(f_dst, f_src); } ++rec_cnt; return PJ_SUCCESS; }
/* * Put a frame in the reverse port (upstream direction). This frame * will be picked up by get_frame() above. */ static pj_status_t rport_put_frame(pjmedia_port *this_port, pjmedia_frame *frame) { struct reverse_port *rport = (struct reverse_port*) this_port; pj_assert(frame->size <= PJMEDIA_PIA_AVG_FSZ(&rport->base.info)); /* Handle NULL frame */ if (frame->type != PJMEDIA_FRAME_TYPE_AUDIO) { /* Update the number of NULL frames received. Once we have too * many of this, we'll stop calling op_update() to let the * media be suspended. */ if (++rport->buf[DIR_UPSTREAM].null_cnt > rport->max_null_frames) { /* Prevent the counter from overflowing and resetting back * to zero */ rport->buf[DIR_UPSTREAM].null_cnt = rport->max_null_frames + 1; return PJ_SUCCESS; } /* Write zero port to delaybuf so that it doesn't underflow. * If we don't do this, get_frame() on this direction will * cause delaybuf to generate missing frame and the last * frame transmitted to delaybuf will be replayed multiple * times, which doesn't sound good. */ /* Update rport state. */ op_update(rport, DIR_UPSTREAM, OP_PUT); /* Discard frame if rport is paused on this direction */ if (rport->buf[DIR_UPSTREAM].paused) return PJ_SUCCESS; /* Generate zero frame. */ pjmedia_zero_samples(rport->tmp_up_buf, PJMEDIA_PIA_SPF(&this_port->info)); /* Put frame to delay buffer */ return pjmedia_delay_buf_put(rport->buf[DIR_UPSTREAM].dbuf, rport->tmp_up_buf); } /* Not sure how to handle partial frame, so better reject for now */ PJ_ASSERT_RETURN(frame->size == PJMEDIA_PIA_AVG_FSZ(&this_port->info), PJ_EINVAL); /* Reset NULL frame counter */ rport->buf[DIR_UPSTREAM].null_cnt = 0; /* Update rport state. */ op_update(rport, DIR_UPSTREAM, OP_PUT); /* Discard frame if rport is paused on this direction */ if (rport->buf[DIR_UPSTREAM].paused) return PJ_SUCCESS; /* Unfortunately must copy to temporary buffer since delay buf * modifies the frame content. */ pjmedia_copy_samples(rport->tmp_up_buf, (const pj_int16_t*)frame->buf, PJMEDIA_PIA_SPF(&this_port->info)); /* Put frame to delay buffer */ return pjmedia_delay_buf_put(rport->buf[DIR_UPSTREAM].dbuf, rport->tmp_up_buf); }
/* * "Write" a multichannel frame downstream. This would split * the multichannel frame into individual mono channel, and write * it to the appropriate port. */ static pj_status_t put_frame(pjmedia_port *this_port, pjmedia_frame *frame) { struct splitcomb *sc = (struct splitcomb*) this_port; unsigned ch; /* Handle null frame */ if (frame->type == PJMEDIA_FRAME_TYPE_NONE) { for (ch=0; ch < PJMEDIA_PIA_CCNT(&this_port->info); ++ch) { pjmedia_port *port = sc->port_desc[ch].port; if (!port) continue; if (!sc->port_desc[ch].reversed) { pjmedia_port_put_frame(port, frame); } else { struct reverse_port *rport = (struct reverse_port*)port; /* Update the number of NULL frames received. Once we have too * many of this, we'll stop calling op_update() to let the * media be suspended. */ if (++rport->buf[DIR_DOWNSTREAM].null_cnt > rport->max_null_frames) { /* Prevent the counter from overflowing and resetting * back to zero */ rport->buf[DIR_DOWNSTREAM].null_cnt = rport->max_null_frames + 1; continue; } /* Write zero port to delaybuf so that it doesn't underflow. * If we don't do this, get_frame() on this direction will * cause delaybuf to generate missing frame and the last * frame transmitted to delaybuf will be replayed multiple * times, which doesn't sound good. */ /* Update rport state. */ op_update(rport, DIR_DOWNSTREAM, OP_PUT); /* Discard frame if rport is paused on this direction */ if (rport->buf[DIR_DOWNSTREAM].paused) continue; /* Generate zero frame. */ pjmedia_zero_samples(sc->put_buf, PJMEDIA_PIA_SPF(&port->info)); /* Put frame to delay buffer */ pjmedia_delay_buf_put(rport->buf[DIR_DOWNSTREAM].dbuf, sc->put_buf); } } return PJ_SUCCESS; } /* Not sure how we would handle partial frame, so better reject * it for now. */ PJ_ASSERT_RETURN(frame->size == PJMEDIA_PIA_AVG_FSZ(&this_port->info), PJ_EINVAL); /* * Write mono frame into each channels */ for (ch=0; ch < PJMEDIA_PIA_CCNT(&this_port->info); ++ch) { pjmedia_port *port = sc->port_desc[ch].port; if (!port) continue; /* Extract the mono frame to temporary buffer */ extract_mono_frame((const pj_int16_t*)frame->buf, sc->put_buf, ch, PJMEDIA_PIA_CCNT(&this_port->info), (unsigned)frame->size * 8 / PJMEDIA_PIA_BITS(&this_port->info) / PJMEDIA_PIA_CCNT(&this_port->info)); if (!sc->port_desc[ch].reversed) { /* Write to normal port */ pjmedia_frame mono_frame; mono_frame.buf = sc->put_buf; mono_frame.size = frame->size / PJMEDIA_PIA_CCNT(&this_port->info); mono_frame.type = frame->type; mono_frame.timestamp.u64 = frame->timestamp.u64; /* Write */ pjmedia_port_put_frame(port, &mono_frame); } else { /* Write to reversed phase port */ struct reverse_port *rport = (struct reverse_port*)port; /* Reset NULL frame counter */ rport->buf[DIR_DOWNSTREAM].null_cnt = 0; /* Update rport state. */ op_update(rport, DIR_DOWNSTREAM, OP_PUT); if (!rport->buf[DIR_DOWNSTREAM].paused) { pjmedia_delay_buf_put(rport->buf[DIR_DOWNSTREAM].dbuf, sc->put_buf); } } } return PJ_SUCCESS; }