int OutputProcessor::packageAudio(unsigned char* inBuff, int inBuffLen, unsigned char* outBuff) { if (audioPackager == 0) { ELOG_DEBUG("No se ha inicializado el codec de output audio RTP"); return -1; } timeval time; gettimeofday(&time, NULL); long millis = (time.tv_sec * 1000) + (time.tv_usec / 1000); RTPHeader head; head.setSeqNumber(seqnum_++); head.setTimestamp(millis*8); head.setSSRC(55543); head.setPayloadType(0); memcpy (rtpBuffer_, &head, head.getHeaderLength()); memcpy(&rtpBuffer_[head.getHeaderLength()], inBuff, inBuffLen); // sink_->sendData(rtpBuffer_, l); // rtpReceiver_->receiveRtpData(rtpBuffer_, (inBuffLen + RTP_HEADER_LEN)); return (inBuffLen+head.getHeaderLength()); }
int InputProcessor::unpackageVideo(unsigned char* inBuff, int inBuffLen, unsigned char* outBuff, int* gotFrame, int* estimatedFps, double* videoTs, bool* KFrame) { if (videoUnpackager == 0) { ELOG_DEBUG("Unpackager not correctly initialized"); return -1; } bool tempKeyFrame2 = false; int inBuffOffset = 0; *gotFrame = 0; RTPHeader* head = reinterpret_cast<RTPHeader*>(inBuff); //head->getMarker()); // if ( head->getSSRC() != 55543 /*&& head->payloadtype!=101*/) { // return -1; // } if (head->getPayloadType() != 100) { return -1; } int l = inBuffLen - head->getHeaderLength(); inBuffOffset+=head->getHeaderLength(); erizo::RTPPayloadVP8* parsed = pars.parseVP8( (unsigned char*) &inBuff[inBuffOffset], l, &tempKeyFrame2); memcpy(outBuff, parsed->data, parsed->dataLength); if (tempKeyFrame2) tempKeyFrame = true; // if (tempKeyFrame) { // ELOG_WARN("GOT KEYFRAME"); // } *KFrame = tempKeyFrame; if (head->getMarker()) { *estimatedFps = 0; if (lastVideoTs_){ *estimatedFps = 90000/(head->getTimestamp() - lastVideoTs_); } lastVideoTs_ = head->getTimestamp(); *videoTs = lastVideoTs_; *gotFrame = 1; tempKeyFrame = false; } int ret = parsed->dataLength; delete parsed; return ret; }
int OutputProcessor::packageVideo(unsigned char* inBuff, int buffSize, unsigned char* outBuff) { if (videoPackager == 0) { ELOG_DEBUG("No se ha inicailizado el codec de output vĂdeo RTP"); return -1; } // ELOG_DEBUG("To packetize %u", buffSize); if (buffSize <= 0) return -1; RtpVP8Fragmenter frag(inBuff, buffSize, 1100); bool lastFrame = false; unsigned int outlen = 0; timeval time; gettimeofday(&time, NULL); long millis = (time.tv_sec * 1000) + (time.tv_usec / 1000); // timestamp_ += 90000 / mediaInfo.videoCodec.frameRate; do { outlen = 0; frag.getPacket(outBuff, &outlen, &lastFrame); RTPHeader rtpHeader; rtpHeader.setMarker(lastFrame?1:0); rtpHeader.setSeqNumber(seqnum_++); rtpHeader.setTimestamp(millis*90); rtpHeader.setSSRC(55543); rtpHeader.setPayloadType(100); memcpy(rtpBuffer_, &rtpHeader, rtpHeader.getHeaderLength()); memcpy(&rtpBuffer_[rtpHeader.getHeaderLength()],outBuff, outlen); int l = outlen + rtpHeader.getHeaderLength(); // sink_->sendData(rtpBuffer_, l); rtpReceiver_->receiveRtpData(rtpBuffer_, l); } while (!lastFrame); return 0; }
void ExternalOutput::writeVideoData(char* buf, int len) { RTPHeader* head = reinterpret_cast<RTPHeader*>(buf); if (head->getPayloadType() == RED_90000_PT) { int totalLength = head->getHeaderLength(); int rtpHeaderLength = totalLength; redheader *redhead = (redheader*) (buf + totalLength); if (redhead->payloadtype == VP8_90000_PT) { while (redhead->follow) { totalLength += redhead->getLength() + 4; // RED header redhead = (redheader*) (buf + totalLength); } // Parse RED packet to VP8 packet. // Copy RTP header memcpy(deliverMediaBuffer_, buf, rtpHeaderLength); // Copy payload data memcpy(deliverMediaBuffer_ + totalLength, buf + totalLength + 1, len - totalLength - 1); // Copy payload type RTPHeader *mediahead = reinterpret_cast<RTPHeader*>(deliverMediaBuffer_); mediahead->setPayloadType(redhead->payloadtype); buf = reinterpret_cast<char*>(deliverMediaBuffer_); len = len - 1 - totalLength + rtpHeaderLength; } } if (initTimeVideo_ == -1) { initTimeVideo_ = head->getTimestamp(); } int gotUnpackagedFrame = false; int ret = inputProcessor_->unpackageVideo(reinterpret_cast<unsigned char*>(buf), len, unpackagedBufferpart_, &gotUnpackagedFrame); if (ret < 0) return; this->initContext(); if (video_stream_ == NULL) { // could not init our context yet. return; } unpackagedSize_ += ret; unpackagedBufferpart_ += ret; if (gotUnpackagedFrame) { unpackagedBufferpart_ -= unpackagedSize_; // ELOG_DEBUG("Writing video frame %d with timestamp %u, length %d, input timebase: %d/%d, target timebase: %d/%d", head->getSeqNumber(), // head->getTimestamp(), unpackagedSize_, // video_stream_->codec->time_base.num, video_stream_->codec->time_base.den, // timebase we requested // video_stream_->time_base.num, video_stream_->time_base.den); // actual timebase long long currentTimestamp = head->getTimestamp(); if (currentTimestamp - initTimeVideo_ < 0) { // we wrapped. add 2^32 to correct this. We only handle a single wrap around since that's ~13 hours of recording, minimum. currentTimestamp += 0xFFFFFFFF; } long long timestampToWrite = (currentTimestamp - initTimeVideo_) / (90000 / video_stream_->time_base.den); // All of our video offerings are using a 90khz clock. AVPacket avpkt; av_init_packet(&avpkt); avpkt.data = unpackagedBufferpart_; avpkt.size = unpackagedSize_; avpkt.pts = timestampToWrite; avpkt.stream_index = 0; av_write_frame(context_, &avpkt); av_free_packet(&avpkt); unpackagedSize_ = 0; unpackagedBufferpart_ = unpackagedBuffer_; } }