int OutputProcessor::packageAudio(unsigned char* inBuff, int inBuffLen, unsigned char* outBuff, long int pts) { if (audioPackager == 0) { ELOG_DEBUG("No se ha inicializado el codec de output audio RTP"); return -1; } timeval time; gettimeofday(&time, NULL); long millis = (time.tv_sec * 1000) + (time.tv_usec / 1000); RtpHeader head; head.setSeqNumber(audioSeqnum_++); // head.setTimestamp(millis*8); head.setMarker(1); if (pts==0){ // head.setTimestamp(audioSeqnum_*160); head.setTimestamp(av_rescale(audioSeqnum_, (mediaInfo.audioCodec.sampleRate/1000), 1)); }else{ // head.setTimestamp(pts*8); head.setTimestamp(av_rescale(pts, mediaInfo.audioCodec.sampleRate,1000)); } head.setSSRC(44444); head.setPayloadType(mediaInfo.rtpAudioInfo.PT); // memcpy (rtpAudioBuffer_, &head, head.getHeaderLength()); // memcpy(&rtpAudioBuffer_[head.getHeaderLength()], inBuff, inBuffLen); memcpy (outBuff, &head, head.getHeaderLength()); memcpy(&outBuff[head.getHeaderLength()], inBuff, inBuffLen); // sink_->sendData(rtpBuffer_, l); // rtpReceiver_->receiveRtpData(rtpBuffer_, (inBuffLen + RTP_HEADER_LEN)); return (inBuffLen+head.getHeaderLength()); }
void MediaStream::changeDeliverPayloadType(DataPacket *dp, packetType type) { RtpHeader* h = reinterpret_cast<RtpHeader*>(dp->data); RtcpHeader *chead = reinterpret_cast<RtcpHeader*>(dp->data); if (!chead->isRtcp()) { int internalPT = h->getPayloadType(); int externalPT = internalPT; if (type == AUDIO_PACKET) { externalPT = remote_sdp_->getAudioExternalPT(internalPT); } else if (type == VIDEO_PACKET) { externalPT = remote_sdp_->getVideoExternalPT(externalPT); } if (internalPT != externalPT) { h->setPayloadType(externalPT); } } }
// parses incoming payload type, replaces occurence in buf void MediaStream::parseIncomingPayloadType(char *buf, int len, packetType type) { RtcpHeader* chead = reinterpret_cast<RtcpHeader*>(buf); RtpHeader* h = reinterpret_cast<RtpHeader*>(buf); if (!chead->isRtcp()) { int externalPT = h->getPayloadType(); int internalPT = externalPT; if (type == AUDIO_PACKET) { internalPT = remote_sdp_->getAudioInternalPT(externalPT); } else if (type == VIDEO_PACKET) { internalPT = remote_sdp_->getVideoInternalPT(externalPT); } if (externalPT != internalPT) { h->setPayloadType(internalPT); } else { // ELOG_WARN("onTransportData did not find mapping for %i", externalPT); } } }
int OutputProcessor::packageVideo(unsigned char* inBuff, int buffSize, unsigned char* outBuff, long int pts) { if (videoPackager == 0) { ELOG_DEBUG("No se ha inicailizado el codec de output vídeo RTP"); return -1; } // ELOG_DEBUG("To packetize %u", buffSize); if (buffSize <= 0) return -1; RtpVP8Fragmenter frag(inBuff, buffSize, 1100); bool lastFrame = false; unsigned int outlen = 0; timeval time; gettimeofday(&time, NULL); long millis = (time.tv_sec * 1000) + (time.tv_usec / 1000); // timestamp_ += 90000 / mediaInfo.videoCodec.frameRate; //int64_t pts = av_rescale(lastPts_, 1000000, (long int)video_time_base_); do { outlen = 0; frag.getPacket(outBuff, &outlen, &lastFrame); RtpHeader rtpHeader; rtpHeader.setMarker(lastFrame?1:0); rtpHeader.setSeqNumber(seqnum_++); if (pts==0){ rtpHeader.setTimestamp(av_rescale(millis, 90000, 1000)); }else{ rtpHeader.setTimestamp(av_rescale(pts, 90000, 1000)); } rtpHeader.setSSRC(55543); rtpHeader.setPayloadType(100); memcpy(rtpBuffer_, &rtpHeader, rtpHeader.getHeaderLength()); memcpy(&rtpBuffer_[rtpHeader.getHeaderLength()],outBuff, outlen); int l = outlen + rtpHeader.getHeaderLength(); // sink_->sendData(rtpBuffer_, l); rtpReceiver_->receiveRtpData(rtpBuffer_, l); } while (!lastFrame); return 0; }
// parses incoming payload type, replaces occurence in buf void WebRtcConnection::parseIncomingPayloadType(char *buf, int len, packetType type) { RtcpHeader* chead = reinterpret_cast<RtcpHeader*>(buf); RtpHeader* h = reinterpret_cast<RtpHeader*>(buf); if (!chead->isRtcp()) { int externalPT = h->getPayloadType(); int internalPT = externalPT; if (type == AUDIO_PACKET) { internalPT = remoteSdp_.getAudioInternalPT(externalPT); } else if (type == VIDEO_PACKET) { internalPT = remoteSdp_.getVideoInternalPT(externalPT); } if (externalPT != internalPT) { h->setPayloadType(internalPT); // ELOG_ERROR("onTransportData mapping %i to %i", externalPT, internalPT); } else { // ELOG_ERROR("onTransportData did not find mapping for %i", externalPT); } } }
void ExternalOutput::writeVideoData(char* buf, int len){ RtpHeader* head = reinterpret_cast<RtpHeader*>(buf); if (head->getPayloadType() == RED_90000_PT) { int totalLength = head->getHeaderLength(); int rtpHeaderLength = totalLength; RedHeader *redhead = reinterpret_cast<RedHeader*>(buf + totalLength); if (redhead->payloadtype == VP8_90000_PT) { while (redhead->follow) { totalLength += redhead->getLength() + 4; // RED header redhead = reinterpret_cast<RedHeader*>(buf + totalLength); } // Parse RED packet to VP8 packet. // Copy RTP header memcpy(deliverMediaBuffer_, buf, rtpHeaderLength); // Copy payload data memcpy(deliverMediaBuffer_ + totalLength, buf + totalLength + 1, len - totalLength - 1); // Copy payload type RtpHeader *mediahead = reinterpret_cast<RtpHeader*>(deliverMediaBuffer_); mediahead->setPayloadType(redhead->payloadtype); buf = reinterpret_cast<char*>(deliverMediaBuffer_); len = len - 1 - totalLength + rtpHeaderLength; } } if (firstVideoTimestamp_ == -1) { firstVideoTimestamp_ = head->getTimestamp(); } int gotUnpackagedFrame = false; int ret = inputProcessor_->unpackageVideo(reinterpret_cast<unsigned char*>(buf), len, unpackagedBufferpart_, &gotUnpackagedFrame); if (ret < 0){ ELOG_ERROR("Error Unpackaging Video"); return; } initContext(); if (video_stream_ == NULL) { // could not init our context yet. return; } unpackagedSize_ += ret; unpackagedBufferpart_ += ret; if (gotUnpackagedFrame) { unpackagedBufferpart_ -= unpackagedSize_; long long currentTimestamp = head->getTimestamp(); if (currentTimestamp - firstVideoTimestamp_ < 0) { // we wrapped. add 2^32 to correct this. We only handle a single wrap around since that's ~13 hours of recording, minimum. currentTimestamp += 0xFFFFFFFF; } long long timestampToWrite = (currentTimestamp - firstVideoTimestamp_) / (90000 / video_stream_->time_base.den); // All of our video offerings are using a 90khz clock. // Adjust for our start time offset timestampToWrite += videoOffsetMsec_ / (1000 / video_stream_->time_base.den); // in practice, our timebase den is 1000, so this operation is a no-op. /* ELOG_DEBUG("Writing video frame %d with timestamp %u, normalized timestamp %u, video offset msec %u, length %d, input timebase: %d/%d, target timebase: %d/%d", */ /* head->getSeqNumber(), head->getTimestamp(), timestampToWrite, videoOffsetMsec_, unpackagedSize_, */ /* video_stream_->codec->time_base.num, video_stream_->codec->time_base.den, // timebase we requested */ /* video_stream_->time_base.num, video_stream_->time_base.den); // actual timebase */ AVPacket avpkt; av_init_packet(&avpkt); avpkt.data = unpackagedBufferpart_; avpkt.size = unpackagedSize_; avpkt.pts = timestampToWrite; avpkt.stream_index = 0; av_write_frame(context_, &avpkt); av_free_packet(&avpkt); unpackagedSize_ = 0; unpackagedBufferpart_ = unpackagedBuffer_; } }