// Compute speech/noise probability // speech/noise probability is returned in: probSpeechFinal //snrLocPrior is the prior SNR for each frequency (in Q11) //snrLocPost is the post SNR for each frequency (in Q11) void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, uint16_t* nonSpeechProbFinal, uint32_t* priorLocSnr, uint32_t* postLocSnr) { uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3; int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32; int32_t frac32, logTmp; int32_t logLrtTimeAvgKsumFX; int16_t indPriorFX16; int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart; int i, normTmp, normTmp2, nShifts; // compute feature based on average LR factor // this is the average over all frequencies of the smooth log LRT logLrtTimeAvgKsumFX = 0; for (i = 0; i < inst->magnLen; i++) { besselTmpFX32 = (int32_t)postLocSnr[i]; // Q11 normTmp = WebRtcSpl_NormU32(postLocSnr[i]); num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp) if (normTmp > 10) { den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp) } else { den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp) } if (den > 0) { besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11 } else { besselTmpFX32 -= num; // Q11 } // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior) // - inst->logLrtTimeAvg[i]); // Here, LRT_TAVG = 0.5 zeros = WebRtcSpl_NormU32(priorLocSnr[i]); frac32 = (int32_t)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19); tmp32 = WEBRTC_SPL_MUL(frac32, frac32); tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19); tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t)frac32, 5412, 12); frac32 = tmp32 + 37; // tmp32 = log2(priorLocSnr[i]) tmp32 = (int32_t)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12 logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8); // log2(priorLocSnr[i])*log(2) tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1); // Q12 inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12 logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12 } inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5, inst->stages + 10); // 5 = BIN_SIZE_LRT / 2 // done with computation of LR factor // //compute the indicator functions // // average LRT feature // FLOAT code // indicator0 = 0.5 * (tanh(widthPrior * // (logLrtTimeAvgKsum - threshPrior0)) + 1.0); tmpIndFX = 16384; // Q14(1.0) tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12 nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5; //use larger width in tanh map for pause regions if (tmp32no1 < 0) { tmpIndFX = 0; tmp32no1 = -tmp32no1; //widthPrior = widthPrior * 2.0; nShifts++; } tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14 // compute indicator function: sigmoid map tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14); if ((tableIndex < 16) && (tableIndex >= 0)) { tmp16no2 = kIndicatorTable[tableIndex]; tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14 tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); if (tmpIndFX == 0) { tmpIndFX = 8192 - tmp16no2; // Q14 } else { tmpIndFX = 8192 + tmp16no2; // Q14 } } indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14 //spectral flatness feature if (inst->weightSpecFlat) { tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10 tmpIndFX = 16384; // Q14(1.0) //use larger width in tanh map for pause regions tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10 nShifts = 4; if (inst->thresholdSpecFlat < tmpU32no1) { tmpIndFX = 0; tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat; //widthPrior = widthPrior * 2.0; nShifts++; } tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), 25); //Q14 tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), 25); //Q14 // compute indicator function: sigmoid map // FLOAT code // indicator1 = 0.5 * (tanh(sgnMap * widthPrior * // (threshPrior1 - tmpFloat1)) + 1.0); tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); if (tableIndex < 16) { tmp16no2 = kIndicatorTable[tableIndex]; tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14); if (tmpIndFX) { tmpIndFX = 8192 + tmp16no2; // Q14 } else { tmpIndFX = 8192 - tmp16no2; // Q14 } } indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14 } //for template spectral-difference if (inst->weightSpecDiff) { tmpU32no1 = 0; if (inst->featureSpecDiff) { normTmp = WEBRTC_SPL_MIN(20 - inst->stages, WebRtcSpl_NormU32(inst->featureSpecDiff)); tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp); // Q(normTmp-2*stages) tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy, 20 - inst->stages - normTmp); if (tmpU32no2 > 0) { // Q(20 - inst->stages) tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2); } else { tmpU32no1 = (uint32_t)(0x7fffffff); } } tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff, 17), 25); tmpU32no2 = tmpU32no1 - tmpU32no3; nShifts = 1; tmpIndFX = 16384; // Q14(1.0) //use larger width in tanh map for pause regions if (tmpU32no2 & 0x80000000) { tmpIndFX = 0; tmpU32no2 = tmpU32no3 - tmpU32no1; //widthPrior = widthPrior * 2.0; nShifts--; } tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts); // compute indicator function: sigmoid map /* FLOAT code indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0); */ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14); if (tableIndex < 16) { tmp16no2 = kIndicatorTable[tableIndex]; tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex]; frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14 tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND( tmp16no1, frac, 14); if (tmpIndFX) { tmpIndFX = 8192 + tmp16no2; } else { tmpIndFX = 8192 - tmp16no2; } } indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14 } //combine the indicator function with the feature weights // FLOAT code // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 * // indicator1 + weightIndPrior2 * indicator2); indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14 // done with computing indicator function //compute the prior probability // FLOAT code // inst->priorNonSpeechProb += PRIOR_UPDATE * // (indPriorNonSpeech - inst->priorNonSpeechProb); tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14 inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( PRIOR_UPDATE_Q14, tmp16, 14); // Q14 //final speech probability: combine prior model with LR factor: memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen); if (inst->priorNonSpeechProb > 0) { for (i = 0; i < inst->magnLen; i++) { // FLOAT code // invLrt = exp(inst->logLrtTimeAvg[i]); // invLrt = inst->priorSpeechProb * invLrt; // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) / // (1.0 - inst->priorSpeechProb + invLrt); // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt; // nonSpeechProbFinal[i] = inst->priorNonSpeechProb / // (inst->priorNonSpeechProb + invLrt); if (inst->logLrtTimeAvgW32[i] < 65300) { tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL( inst->logLrtTimeAvgW32[i], 23637), 14); // Q12 intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12); if (intPart < -8) { intPart = -8; } frac = (int16_t)(tmp32no1 & 0x00000fff); // Q12 // Quadratic approximation of 2^frac tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12 tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12 invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart) + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8 normTmp = WebRtcSpl_NormW32(invLrtFX); normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb)); if (normTmp + normTmp2 >= 7) { if (normTmp + normTmp2 < 15) { invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp); // Q(normTmp+normTmp2-7) tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb)); // Q(normTmp+normTmp2+7) invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2); // Q14 } else { tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb)); // Q22 invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14 } tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb, 8); // Q22 nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1, (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8 } } } } }
int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen, uint16_t seqNumber, uint32_t timeStamp, int32_t fsHz, int mdCodec, int streamingMode) { uint32_t timeIat; /* inter-arrival time */ int i; int32_t tempsum = 0; /* temp summation */ int32_t tempvar; /* temporary variable */ int retval = 0; /* return value */ int16_t packetLenSamp; /* packet speech length in samples */ /****************/ /* Sanity check */ /****************/ if (maxBufLen <= 1 || fsHz <= 0) { /* maxBufLen must be at least 2 and fsHz must both be strictly positive */ return -1; } /****************************/ /* Update packet statistics */ /****************************/ /* Try calculating packet length from current and previous timestamps */ if (!WebRtcNetEQ_IsNewerTimestamp(timeStamp, inst->lastTimeStamp) || !WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo)) { /* Wrong timestamp or sequence order; revert to backup plan */ packetLenSamp = inst->packetSpeechLenSamp; /* use stored value */ } else { /* calculate timestamps per packet */ packetLenSamp = (int16_t) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp, seqNumber - inst->lastSeqNo); } /* Check that the packet size is positive; if not, the statistics cannot be updated. */ if (inst->firstPacketReceived && packetLenSamp > 0) { /* packet size ok */ /* calculate inter-arrival time in integer packets (rounding down) */ timeIat = WebRtcSpl_DivW32W16(inst->packetIatCountSamp, packetLenSamp); /* Special operations for streaming mode */ if (streamingMode != 0) { /* * Calculate IAT in Q8, including fractions of a packet (i.e., more accurate * than timeIat). */ int16_t timeIatQ8 = (int16_t) WebRtcSpl_DivW32W16( WEBRTC_SPL_LSHIFT_W32(inst->packetIatCountSamp, 8), packetLenSamp); /* * Calculate cumulative sum iat with sequence number compensation (ideal arrival * times makes this sum zero). */ inst->cSumIatQ8 += (timeIatQ8 - WEBRTC_SPL_LSHIFT_W32(seqNumber - inst->lastSeqNo, 8)); /* subtract drift term */ inst->cSumIatQ8 -= CSUM_IAT_DRIFT; /* ensure not negative */ inst->cSumIatQ8 = WEBRTC_SPL_MAX(inst->cSumIatQ8, 0); /* remember max */ if (inst->cSumIatQ8 > inst->maxCSumIatQ8) { inst->maxCSumIatQ8 = inst->cSumIatQ8; inst->maxCSumUpdateTimer = 0; } /* too long since the last maximum was observed; decrease max value */ if (inst->maxCSumUpdateTimer > (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz, MAX_STREAMING_PEAK_PERIOD)) { inst->maxCSumIatQ8 -= 4; /* remove 1000*4/256 = 15.6 ms/s */ } } /* end of streaming mode */ /* check for discontinuous packet sequence and re-ordering */ if (WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo + 1)) { /* Compensate for gap in the sequence numbers. * Reduce IAT with expected extra time due to lost packets, but ensure that * the IAT is not negative. */ timeIat -= WEBRTC_SPL_MIN(timeIat, (uint16_t) (seqNumber - (uint16_t) (inst->lastSeqNo + 1))); } else if (!WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo)) { /* compensate for re-ordering */ timeIat += (uint16_t) (inst->lastSeqNo + 1 - seqNumber); } /* saturate IAT at maximum value */ timeIat = WEBRTC_SPL_MIN( timeIat, MAX_IAT ); /* update iatProb = forgetting_factor * iatProb for all elements */ for (i = 0; i <= MAX_IAT; i++) { int32_t tempHi, tempLo; /* Temporary variables */ /* * Multiply iatProbFact (Q15) with iatProb (Q30) and right-shift 15 steps * to come back to Q30. The operation is done in two steps: */ /* * 1) Multiply the high 16 bits (15 bits + sign) of iatProb. Shift iatProb * 16 steps right to get the high 16 bits in a int16_t prior to * multiplication, and left-shift with 1 afterwards to come back to * Q30 = (Q15 * (Q30>>16)) << 1. */ tempHi = WEBRTC_SPL_MUL_16_16(inst->iatProbFact, (int16_t) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16)); tempHi = WEBRTC_SPL_LSHIFT_W32(tempHi, 1); /* left-shift 1 step */ /* * 2) Isolate and multiply the low 16 bits of iatProb. Right-shift 15 steps * afterwards to come back to Q30 = (Q15 * Q30) >> 15. */ tempLo = inst->iatProb[i] & 0x0000FFFF; /* sift out the 16 low bits */ tempLo = WEBRTC_SPL_MUL_16_U16(inst->iatProbFact, (uint16_t) tempLo); tempLo = WEBRTC_SPL_RSHIFT_W32(tempLo, 15); /* Finally, add the high and low parts */ inst->iatProb[i] = tempHi + tempLo; /* Sum all vector elements while we are at it... */ tempsum += inst->iatProb[i]; } /* * Increase the probability for the currently observed inter-arrival time * with 1 - iatProbFact. The factor is in Q15, iatProb in Q30; * hence, left-shift 15 steps to obtain result in Q30. */ inst->iatProb[timeIat] += (32768 - inst->iatProbFact) << 15; tempsum += (32768 - inst->iatProbFact) << 15; /* add to vector sum */ /* * Update iatProbFact (changes only during the first seconds after reset) * The factor converges to IAT_PROB_FACT. */ inst->iatProbFact += (IAT_PROB_FACT - inst->iatProbFact + 3) >> 2; /* iatProb should sum up to 1 (in Q30). */ tempsum -= 1 << 30; /* should be zero */ /* Check if it does, correct if it doesn't. */ if (tempsum > 0) { /* tempsum too large => decrease a few values in the beginning */ i = 0; while (i <= MAX_IAT && tempsum > 0) { /* Remove iatProb[i] / 16 from iatProb, but not more than tempsum */ tempvar = WEBRTC_SPL_MIN(tempsum, inst->iatProb[i] >> 4); inst->iatProb[i++] -= tempvar; tempsum -= tempvar; } }
int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacketInput, WebRtc_UWord32 uw32_timeRec) { RTPPacket_t RTPpacket[2]; int i_k; int i_ok = 0, i_No_Of_Payloads = 1; WebRtc_Word16 flushed = 0; WebRtc_Word16 codecPos; WebRtc_UWord32 diffTS, uw32_tmp; int curr_Codec; WebRtc_Word16 isREDPayload = 0; WebRtc_Word32 temp_bufsize = MCU_inst->PacketBuffer_inst.numPacketsInBuffer; #ifdef NETEQ_RED_CODEC RTPPacket_t* RTPpacketPtr[2]; /* Support for redundancy up to 2 payloads */ RTPpacketPtr[0] = &RTPpacket[0]; RTPpacketPtr[1] = &RTPpacket[1]; #endif /* * Copy from input RTP packet to local copy * (mainly to enable multiple payloads using RED) */ WEBRTC_SPL_MEMCPY_W8(&RTPpacket[0], RTPpacketInput, sizeof(RTPPacket_t)); /* Reinitialize NetEq if it's needed (changed SSRC or first call) */ if ((RTPpacket[0].ssrc != MCU_inst->ssrc) || (MCU_inst->first_packet == 1)) { WebRtcNetEQ_RTCPInit(&MCU_inst->RTCP_inst, RTPpacket[0].seqNumber); MCU_inst->first_packet = 0; /* Flush the buffer */ WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst); /* Store new SSRC */ MCU_inst->ssrc = RTPpacket[0].ssrc; /* Update codecs */ MCU_inst->timeStamp = RTPpacket[0].timeStamp; MCU_inst->current_Payload = RTPpacket[0].payloadType; /*Set MCU to update codec on next SignalMCU call */ MCU_inst->new_codec = 1; /* Reset timestamp scaling */ MCU_inst->TSscalingInitialized = 0; } /* Call RTCP statistics */ i_ok |= WebRtcNetEQ_RTCPUpdate(&(MCU_inst->RTCP_inst), RTPpacket[0].seqNumber, RTPpacket[0].timeStamp, uw32_timeRec); /* If Redundancy is supported and this is the redundancy payload, separate the payloads */ #ifdef NETEQ_RED_CODEC if (RTPpacket[0].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst, kDecoderRED)) { /* Split the payload into a main and a redundancy payloads */ i_ok = WebRtcNetEQ_RedundancySplit(RTPpacketPtr, 2, &i_No_Of_Payloads); if (i_ok < 0) { /* error returned */ return i_ok; } /* * Only accept a few redundancies of the same type as the main data, * AVT events and CNG. */ if ((i_No_Of_Payloads > 1) && (RTPpacket[0].payloadType != RTPpacket[1].payloadType) && (RTPpacket[0].payloadType != WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst, kDecoderAVT)) && (RTPpacket[1].payloadType != WebRtcNetEQ_DbGetPayload( &MCU_inst->codec_DB_inst, kDecoderAVT)) && (!WebRtcNetEQ_DbIsCNGPayload( &MCU_inst->codec_DB_inst, RTPpacket[0].payloadType)) && (!WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst, RTPpacket[1].payloadType))) { i_No_Of_Payloads = 1; } isREDPayload = 1; } #endif /* loop over the number of payloads */ for (i_k = 0; i_k < i_No_Of_Payloads; i_k++) { if (isREDPayload == 1) { RTPpacket[i_k].rcuPlCntr = i_k; } else { RTPpacket[i_k].rcuPlCntr = 0; } /* Force update of SplitInfo if it's iLBC because of potential change between 20/30ms */ if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst, kDecoderILBC)) { i_ok = WebRtcNetEQ_DbGetSplitInfo( &MCU_inst->PayloadSplit_inst, (enum WebRtcNetEQDecoder) WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst, RTPpacket[i_k].payloadType), RTPpacket[i_k].payloadLen); if (i_ok < 0) { /* error returned */ return i_ok; } } /* Get information about timestamp scaling for this payload type */ i_ok = WebRtcNetEQ_GetTimestampScaling(MCU_inst, RTPpacket[i_k].payloadType); if (i_ok < 0) { /* error returned */ return i_ok; } if (MCU_inst->TSscalingInitialized == 0 && MCU_inst->scalingFactor != kTSnoScaling) { /* Must initialize scaling with current timestamps */ MCU_inst->externalTS = RTPpacket[i_k].timeStamp; MCU_inst->internalTS = RTPpacket[i_k].timeStamp; MCU_inst->TSscalingInitialized = 1; } /* Adjust timestamp if timestamp scaling is needed (e.g. SILK or G.722) */ if (MCU_inst->TSscalingInitialized == 1) { WebRtc_UWord32 newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst, RTPpacket[i_k].timeStamp); /* save the incoming timestamp for next time */ MCU_inst->externalTS = RTPpacket[i_k].timeStamp; /* add the scaled difference to last scaled timestamp and save ... */ MCU_inst->internalTS = newTS; RTPpacket[i_k].timeStamp = newTS; } /* Is this a DTMF packet?*/ if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst, kDecoderAVT)) { #ifdef NETEQ_ATEVENT_DECODE if (MCU_inst->AVT_PlayoutOn) { i_ok = WebRtcNetEQ_DtmfInsertEvent(&MCU_inst->DTMF_inst, RTPpacket[i_k].payload, RTPpacket[i_k].payloadLen, RTPpacket[i_k].timeStamp); if (i_ok != 0) { return i_ok; } } #endif #ifdef NETEQ_STEREO if (MCU_inst->usingStereo == 0) { /* do not set this for DTMF packets when using stereo mode */ MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1; } #else MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1; #endif } else if (WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst, RTPpacket[i_k].payloadType)) { /* Is this a CNG packet? how should we handle this?*/ #ifdef NETEQ_CNG_CODEC /* Get CNG sample rate */ WebRtc_UWord16 fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst, RTPpacket[i_k].payloadType); if ((fsCng != MCU_inst->fs) && (fsCng > 8000)) { /* * We have received CNG with a different sample rate from what we are using * now (must be > 8000, since we may use only one CNG type (default) for all * frequencies). Flush buffer and signal new codec. */ WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst); MCU_inst->new_codec = 1; MCU_inst->current_Codec = -1; } i_ok = WebRtcNetEQ_PacketBufferInsert(&MCU_inst->PacketBuffer_inst, &RTPpacket[i_k], &flushed); if (i_ok < 0) { return RECIN_CNG_ERROR; } MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1; #else /* NETEQ_CNG_CODEC not defined */ return RECIN_UNKNOWNPAYLOAD; #endif /* NETEQ_CNG_CODEC */ } else { /* Reinitialize the splitting if the payload and/or the payload length has changed */ curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst, RTPpacket[i_k].payloadType); if (curr_Codec != MCU_inst->current_Codec) { if (curr_Codec < 0) { return RECIN_UNKNOWNPAYLOAD; } MCU_inst->current_Codec = curr_Codec; MCU_inst->current_Payload = RTPpacket[i_k].payloadType; i_ok = WebRtcNetEQ_DbGetSplitInfo(&MCU_inst->PayloadSplit_inst, (enum WebRtcNetEQDecoder) MCU_inst->current_Codec, RTPpacket[i_k].payloadLen); if (i_ok < 0) { /* error returned */ return i_ok; } WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst); MCU_inst->new_codec = 1; } /* update post-call statistics */ if (MCU_inst->new_codec != 1) /* not if in codec change */ { diffTS = RTPpacket[i_k].timeStamp - MCU_inst->timeStamp; /* waiting time */ if (diffTS < 0x0FFFFFFF) /* guard against re-ordering */ { /* waiting time in ms */ diffTS = WEBRTC_SPL_UDIV(diffTS, WEBRTC_SPL_UDIV((WebRtc_UWord32) MCU_inst->fs, 1000) ); if (diffTS < MCU_inst->statInst.minPacketDelayMs) { /* new all-time low */ MCU_inst->statInst.minPacketDelayMs = diffTS; } if (diffTS > MCU_inst->statInst.maxPacketDelayMs) { /* new all-time high */ MCU_inst->statInst.maxPacketDelayMs = diffTS; } /* Update avg waiting time: * avgPacketDelayMs = * (avgPacketCount * avgPacketDelayMs + diffTS)/(avgPacketCount+1) * with proper rounding. */ uw32_tmp = WEBRTC_SPL_UMUL((WebRtc_UWord32) MCU_inst->statInst.avgPacketCount, (WebRtc_UWord32) MCU_inst->statInst.avgPacketDelayMs); uw32_tmp = WEBRTC_SPL_ADD_SAT_W32(uw32_tmp, (diffTS + (MCU_inst->statInst.avgPacketCount>>1))); uw32_tmp = WebRtcSpl_DivU32U16(uw32_tmp, (WebRtc_UWord16) (MCU_inst->statInst.avgPacketCount + 1)); MCU_inst->statInst.avgPacketDelayMs = (WebRtc_UWord16) WEBRTC_SPL_MIN(uw32_tmp, (WebRtc_UWord32) 65535); /* increase counter, but not to more than 65534 */ if (MCU_inst->statInst.avgPacketCount < (0xFFFF - 1)) { MCU_inst->statInst.avgPacketCount++; } } } /* Parse the payload and insert it into the buffer */ i_ok = WebRtcNetEQ_SplitAndInsertPayload(&RTPpacket[i_k], &MCU_inst->PacketBuffer_inst, &MCU_inst->PayloadSplit_inst, &flushed); if (i_ok < 0) { return i_ok; } if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF != 0) { /* first normal packet after CNG or DTMF */ MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = -1; } }