/**************************************** * RecVideo * Obtiene los packetes y los muestra *****************************************/ int MediaBridgeSession::RecVideo() { //Coders VideoDecoder* decoder = NULL; VideoEncoder* encoder = VideoCodecFactory::CreateEncoder(VideoCodec::SORENSON); //Create new video frame RTMPVideoFrame frame(0,262143); //Set codec frame.SetVideoCodec(RTMPVideoFrame::FLV1); int width=0; int height=0; DWORD numpixels=0; Log(">RecVideo\n"); //Mientras tengamos que capturar while(receivingVideo) { ///Obtenemos el paquete RTPPacket* packet = rtpVideo.GetPacket(); //Check if (!packet) //Next continue; //Get type VideoCodec::Type type = (VideoCodec::Type)packet->GetCodec(); if ((decoder==NULL) || (type!=decoder->type)) { //Si habia uno nos lo cargamos if (decoder!=NULL) delete decoder; //Creamos uno dependiendo del tipo decoder = VideoCodecFactory::CreateDecoder(type); //Check if (!decoder) { delete(packet); continue; } } //Lo decodificamos if(!decoder->DecodePacket(packet->GetMediaData(),packet->GetMediaLength(),0,packet->GetMark())) { delete(packet); continue; } //Get mark bool mark = packet->GetMark(); //Delete packet delete(packet); //Check if it is last one if(!mark) continue; //Check size if (decoder->GetWidth()!=width || decoder->GetHeight()!=height) { //Get dimension width = decoder->GetWidth(); height = decoder->GetHeight(); //Set size numpixels = width*height*3/2; //Set also frame rate and bps encoder->SetFrameRate(25,300,500); //Set them in the encoder encoder->SetSize(width,height); } //Encode next frame VideoFrame *encoded = encoder->EncodeFrame(decoder->GetFrame(),numpixels); //Check if (!encoded) break; //Check size if (frame.GetMaxMediaSize()<encoded->GetLength()) //Not enougth space return Error("Not enought space to copy FLV encodec frame [frame:%d,encoded:%d",frame.GetMaxMediaSize(),encoded->GetLength()); //Get full frame frame.SetVideoFrame(encoded->GetData(),encoded->GetLength()); //Set buffer size frame.SetMediaSize(encoded->GetLength()); //Check type if (encoded->IsIntra()) //Set type frame.SetFrameType(RTMPVideoFrame::INTRA); else //Set type frame.SetFrameType(RTMPVideoFrame::INTER); //Let the connection set the timestamp frame.SetTimestamp(getDifTime(&first)/1000); //Send it SendMediaFrame(&frame); } //Check if (decoder) //Delete delete(decoder); //Check if (encoder) //Delete delete(encoder); Log("<RecVideo\n"); }
int FLVEncoder::EncodeVideo() { timeval prev; //Start Log(">FLVEncoder encode video\n"); //Allocate media frame RTMPVideoFrame frame(0,262143); //Check codec switch(videoCodec) { case VideoCodec::SORENSON: //Ser Video codec frame.SetVideoCodec(RTMPVideoFrame::FLV1); break; case VideoCodec::H264: //Ser Video codec frame.SetVideoCodec(RTMPVideoFrame::AVC); //Set NAL type frame.SetAVCType(RTMPVideoFrame::AVCNALU); //No delay frame.SetAVCTS(0); break; default: return Error("-Wrong codec type %d\n",videoCodec); } //Create the encoder VideoEncoder *encoder = VideoCodecFactory::CreateEncoder(videoCodec,videoProperties); ///Set frame rate encoder->SetFrameRate(fps,bitrate,intra); //Set dimensions encoder->SetSize(width,height); //Start capturing videoInput->StartVideoCapture(width,height,fps); //The time of the first one gettimeofday(&prev,NULL); //No wait for first DWORD frameTime = 0; Log(">FLVEncoder encode vide\n"); //Mientras tengamos que capturar while(encodingVideo) { //Nos quedamos con el puntero antes de que lo cambien BYTE* pic=videoInput->GrabFrame(frameTime); //Ensure we are still encoding if (!encodingVideo) break; //Check pic if (!pic) continue; //Check if we need to send intra if (sendFPU) { //Set it encoder->FastPictureUpdate(); //Do not send anymore sendFPU = false; } //Encode next frame VideoFrame *encoded = encoder->EncodeFrame(pic,videoInput->GetBufferSize()); //Check if (!encoded) break; //Check size if (frame.GetMaxMediaSize()<encoded->GetLength()) { //Not enougth space Error("Not enought space to copy FLV encodec frame [frame:%d,encoded:%d",frame.GetMaxMediaSize(),encoded->GetLength()); //NExt continue; } //Check if (frameTime) { timespec ts; //Lock pthread_mutex_lock(&mutex); //Calculate timeout calcAbsTimeout(&ts,&prev,frameTime); //Wait next or stopped int canceled = !pthread_cond_timedwait(&cond,&mutex,&ts); //Unlock pthread_mutex_unlock(&mutex); //Check if we have been canceled if (canceled) //Exit break; } //Set sending time of previous frame getUpdDifTime(&prev); //Set timestamp encoded->SetTimestamp(getDifTime(&first)/1000); //Set next one frameTime = 1000/fps; //Set duration encoded->SetDuration(frameTime); //Get full frame frame.SetVideoFrame(encoded->GetData(),encoded->GetLength()); //Set buffer size frame.SetMediaSize(encoded->GetLength()); //Check type if (encoded->IsIntra()) //Set type frame.SetFrameType(RTMPVideoFrame::INTRA); else //Set type frame.SetFrameType(RTMPVideoFrame::INTER); //If we need desc but yet not have it if (!frameDesc && encoded->IsIntra() && videoCodec==VideoCodec::H264) { //Create new description AVCDescriptor desc; //Set values desc.SetConfigurationVersion(1); desc.SetAVCProfileIndication(0x42); desc.SetProfileCompatibility(0x80); desc.SetAVCLevelIndication(0x0C); desc.SetNALUnitLength(3); //Get encoded data BYTE *data = encoded->GetData(); //Get size DWORD size = encoded->GetLength(); //get from frame desc.AddParametersFromFrame(data,size); //Crete desc frame frameDesc = new RTMPVideoFrame(getDifTime(&first)/1000,desc); //Lock pthread_mutex_lock(&mutex); //Send it SendMediaFrame(frameDesc); //unlock pthread_mutex_unlock(&mutex); } //Lock pthread_mutex_lock(&mutex); //Set timestamp frame.SetTimestamp(encoded->GetTimeStamp()); //Publish it SendMediaFrame(&frame); //For each listener for(MediaFrameListeners::iterator it = mediaListeners.begin(); it!=mediaListeners.end(); ++it) //Send it (*it)->onMediaFrame(RTMPMediaStream::id,*encoded); //unlock pthread_mutex_unlock(&mutex); } Log("-FLVEncoder encode video end of loop\n"); //Stop the capture videoInput->StopVideoCapture(); //Check if (encoder) //Exit delete(encoder); Log("<FLVEncoder encode vide\n"); //Exit return 1; }
bool MultiConf::AddBroadcastReceiver(RTMPStream *receiver) { broadcast.AddReceiver(receiver); Participants::iterator itBroadcaster = participants.find(m_CurrentBroadCaster); if(itBroadcaster != participants.end()) { RTPParticipant *broadCaster = (RTPParticipant*)itBroadcaster->second; Log("Send idr packet to newly broadcast reciever\n"); IDRPacketSize idrPacketSize = broadCaster->GetIdrPacketSize(); IDRPacket idrPacket = broadCaster->GetIdrPacket(); DWORD currentTimeStamp = broadCaster->GetCurrentTimestamp(); size_t packetSize = idrPacket.size(); //Crete desc frame RTMPVideoFrame frameDesc(0,2048); //Send frameDesc.SetTimestamp(currentTimeStamp); //Set type frameDesc.SetVideoCodec(RTMPVideoFrame::AVC); //Set type frameDesc.SetFrameType(RTMPVideoFrame::INTRA); //Set NALU type frameDesc.SetAVCType(0); //Set no delay frameDesc.SetAVCTS(0); //Create description AVCDescriptor desc; //Set values desc.SetConfigurationVersion(1); //desc.SetAVCProfileIndication(0x42); //desc.SetProfileCompatibility(0x80); //desc.SetAVCLevelIndication(0x14); //desc.SetAVCProfileIndication(idrPacket[0][1]); //desc.SetProfileCompatibility(idrPacket[0][2]); //desc.SetAVCLevelIndication(idrPacket[0][3]); desc.SetAVCProfileIndication(0x64); desc.SetProfileCompatibility(0x00); desc.SetAVCLevelIndication(0x28); desc.SetNALUnitLength(3); desc.AddSequenceParameterSet(idrPacket[0],idrPacketSize[0]); desc.AddPictureParameterSet(idrPacket[1],idrPacketSize[1]); //Serialize DWORD len = desc.Serialize(frameDesc.GetMediaData(),frameDesc.GetMaxMediaSize()); //Set size frameDesc.SetMediaSize(len); //broadcast.OnPublishedFrame(0, &frameDesc); receiver->PlayMediaFrame(&frameDesc); frameDesc.Dump(); RTMPVideoFrame frame(0,65535); //Set codec frame.SetVideoCodec(RTMPVideoFrame::AVC); //Set NALU type frame.SetAVCType(1); //Set no delay frame.SetAVCTS(0); frame.SetTimestamp(currentTimeStamp); frame.SetFrameType(RTMPVideoFrame::INTRA); VideoFrame *videoFrame; RTPDepacketizer *depacketizer = RTPDepacketizer::Create( MediaFrame::Video, VideoCodec::H264); for(int i = 0; i < packetSize; i++) { BYTE *packet = idrPacket[i]; int packet_size = idrPacketSize[i]; videoFrame = (VideoFrame *)depacketizer->AddPayload(packet,packet_size); } frame.SetVideoFrame(videoFrame->GetData(), videoFrame->GetLength()); receiver->PlayMediaFrame(&frame); frame.Dump(); delete depacketizer; } return true; }
/******************************************* * SendVideo * Capturamos el video y lo mandamos *******************************************/ int VideoStream::SendVideo() { timeval prev; timeval lastFPU; DWORD num = 0; QWORD overslept = 0; Acumulator bitrateAcu(1000); Acumulator fpsAcu(1000); Log(">SendVideo [width:%d,size:%d,bitrate:%d,fps:%d,intra:%d]\n",videoGrabWidth,videoGrabHeight,videoBitrate,videoFPS,videoIntraPeriod); //Creamos el encoder VideoEncoder* videoEncoder = VideoCodecFactory::CreateEncoder(videoCodec,videoProperties); //Comprobamos que se haya creado correctamente if (videoEncoder == NULL) //error return Error("Can't create video encoder\n"); //Comrpobamos que tengamos video de entrada if (videoInput == NULL) return Error("No video input"); //Iniciamos el tama�o del video if (!videoInput->StartVideoCapture(videoGrabWidth,videoGrabHeight,videoFPS)) return Error("Couldn't set video capture\n"); //Start at 80% int current = videoBitrate*0.8; //Send at higher bitrate first frame, but skip frames after that so sending bitrate is kept videoEncoder->SetFrameRate(videoFPS,current*5,videoIntraPeriod); //No wait for first QWORD frameTime = 0; //Iniciamos el tamama�o del encoder videoEncoder->SetSize(videoGrabWidth,videoGrabHeight); //The time of the previos one gettimeofday(&prev,NULL); //Fist FPU gettimeofday(&lastFPU,NULL); //Started Log("-Sending video\n"); //Mientras tengamos que capturar while(sendingVideo) { //Nos quedamos con el puntero antes de que lo cambien BYTE *pic = videoInput->GrabFrame(frameTime/1000); //Check picture if (!pic) //Exit continue; //Check if we need to send intra if (sendFPU) { //Do not send anymore sendFPU = false; //Do not send if we just send one (100ms) if (getDifTime(&lastFPU)/1000>minFPUPeriod) { //Send at higher bitrate first frame, but skip frames after that so sending bitrate is kept videoEncoder->SetFrameRate(videoFPS,current*5,videoIntraPeriod); //Reste frametime so it is calcualted afterwards frameTime = 0; //Set it videoEncoder->FastPictureUpdate(); //Update last FPU getUpdDifTime(&lastFPU); } } //Calculate target bitrate int target = current; //Check temporal limits for estimations if (bitrateAcu.IsInWindow()) { //Get real sent bitrate during last second and convert to kbits DWORD instant = bitrateAcu.GetInstantAvg()/1000; //If we are in quarentine if (videoBitrateLimitCount) //Limit sending bitrate target = videoBitrateLimit; //Check if sending below limits else if (instant<videoBitrate) //Increase a 8% each second or fps kbps target += (DWORD)(target*0.08/videoFPS)+1; } //Check target bitrate agains max conf bitrate if (target>videoBitrate*1.2) //Set limit to max bitrate allowing a 20% overflow so instant bitrate can get closer to target target = videoBitrate*1.2; //Check limits counter if (videoBitrateLimitCount>0) //One frame less of limit videoBitrateLimitCount--; //Check if we have a new bitrate if (target && target!=current) { //Reset bitrate videoEncoder->SetFrameRate(videoFPS,target,videoIntraPeriod); //Upate current current = target; } //Procesamos el frame VideoFrame *videoFrame = videoEncoder->EncodeFrame(pic,videoInput->GetBufferSize()); //If was failed if (!videoFrame) //Next continue; //Increase frame counter fpsAcu.Update(getTime()/1000,1); //Check if (frameTime) { timespec ts; //Lock pthread_mutex_lock(&mutex); //Calculate slept time QWORD sleep = frameTime; //Remove extra sleep from prev if (overslept<sleep) //Remove it sleep -= overslept; else //Do not overflow sleep = 1; //Calculate timeout calcAbsTimeoutNS(&ts,&prev,sleep); //Wait next or stopped int canceled = !pthread_cond_timedwait(&cond,&mutex,&ts); //Unlock pthread_mutex_unlock(&mutex); //Check if we have been canceled if (canceled) //Exit break; //Get differencence QWORD diff = getDifTime(&prev); //If it is biffer if (diff>frameTime) //Get what we have slept more overslept = diff-frameTime; else //No oversletp (shoulddn't be possible) overslept = 0; } //Increase frame counter fpsAcu.Update(getTime()/1000,1); //If first if (!frameTime) { //Set frame time, slower frameTime = 5*1000000/videoFPS; //Restore bitrate videoEncoder->SetFrameRate(videoFPS,current,videoIntraPeriod); } else { //Set frame time frameTime = 1000000/videoFPS; } //Add frame size in bits to bitrate calculator bitrateAcu.Update(getDifTime(&ini)/1000,videoFrame->GetLength()*8); //Set frame timestamp videoFrame->SetTimestamp(getDifTime(&ini)/1000); //Check if we have mediaListener if (mediaListener) //Call it mediaListener->onMediaFrame(*videoFrame); //Set sending time of previous frame getUpdDifTime(&prev); //Calculate sending times based on bitrate DWORD sendingTime = videoFrame->GetLength()*8/current; //Adjust to maximum time if (sendingTime>frameTime/1000) //Cap it sendingTime = frameTime/1000; //If it was a I frame if (videoFrame->IsIntra()) //Clean rtp rtx buffer rtp.FlushRTXPackets(); //Send it smoothly smoother.SendFrame(videoFrame,sendingTime); //Dump statistics if (num && ((num%videoFPS*10)==0)) { Debug("-Send bitrate target=%d current=%d avg=%llf rate=[%llf,%llf] fps=[%llf,%llf] limit=%d\n",target,current,bitrateAcu.GetInstantAvg()/1000,bitrateAcu.GetMinAvg()/1000,bitrateAcu.GetMaxAvg()/1000,fpsAcu.GetMinAvg(),fpsAcu.GetMaxAvg(),videoBitrateLimit); bitrateAcu.ResetMinMax(); fpsAcu.ResetMinMax(); } num++; } Log("-SendVideo out of loop\n"); //Terminamos de capturar videoInput->StopVideoCapture(); //Check if (videoEncoder) //Borramos el encoder delete videoEncoder; //Salimos Log("<SendVideo [%d]\n",sendingVideo); return 0; }
int RTPMultiplexerSmoother::SmoothFrame(const MediaFrame* frame,DWORD duration) { //Check if (!frame || !frame->HasRtpPacketizationInfo()) //Error return Error("Frame do not has packetization info"); //Get info const MediaFrame::RtpPacketizationInfo& info = frame->GetRtpPacketizationInfo(); DWORD codec = 0; BYTE *frameData = NULL; DWORD frameSize = 0; //Depending on the type switch(frame->GetType()) { case MediaFrame::Audio: { //get audio frame AudioFrame * audio = (AudioFrame*)frame; //Get codec codec = audio->GetCodec(); //Get data frameData = audio->GetData(); //Get size frameSize = audio->GetLength(); } break; case MediaFrame::Video: { //get Video frame VideoFrame * video = (VideoFrame*)frame; //Get codec codec = video->GetCodec(); //Get data frameData = video->GetData(); //Get size frameSize = video->GetLength(); } break; default: return Error("No smoother for frame"); } DWORD frameLength = 0; //Calculate total length for (int i=0;i<info.size();i++) //Get total length frameLength += info[i]->GetTotalLength(); //Calculate bitrate for frame DWORD current = 0; //For each one for (int i=0;i<info.size();i++) { //Get packet MediaFrame::RtpPacketization* rtp = info[i]; //Create rtp packet RTPPacketSched *packet = new RTPPacketSched(frame->GetType(),codec); //Make sure it is enought length if (rtp->GetPrefixLen()+rtp->GetSize()>packet->GetMaxMediaLength()) //Error continue; //Get pointer to media data BYTE* out = packet->GetMediaData(); //Copy prefic memcpy(out,rtp->GetPrefixData(),rtp->GetPrefixLen()); //Copy data memcpy(out+rtp->GetPrefixLen(),frameData+rtp->GetPos(),rtp->GetSize()); //Set length DWORD len = rtp->GetPrefixLen()+rtp->GetSize(); //Set length packet->SetMediaLength(len); switch(packet->GetMedia()) { case MediaFrame::Video: //Set timestamp packet->SetTimestamp(frame->GetTimeStamp()*90); break; case MediaFrame::Audio: //Set timestamp packet->SetTimestamp(frame->GetTimeStamp()*8); break; default: //Set timestamp packet->SetTimestamp(frame->GetTimeStamp()); } //Check if (i+1==info.size()) //last packet->SetMark(true); else //No last packet->SetMark(false); //Calculate partial lenght current += len; //Calculate sending time offset from first frame packet->SetSendingTime(current*duration/frameLength); //Append it queue.Add(packet); } return 1; }
void H264FrameSource::doGetNextFrame() { // 根据 fps, 计算等待时间 double delay = 1000.0 / videoFPS ; int to_delay = delay * 1000; // us if(!m_videoInput) return; BYTE *pic = m_videoInput->GrabFrame(); //Check picture if (!pic) { fFrameSize = 0; m_started = 0; return; } //Check if we need to send intra if (sendFPU) { videoEncoder->FastPictureUpdate(); } //if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: //} else { // Increment by the play time of the previous data: // unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; // fPresentationTime.tv_sec += uSeconds/1000000; // fPresentationTime.tv_usec = uSeconds%1000000; //} // Remember the play time of this data: //fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; //fDurationInMicroseconds = fLastPlayTime; //fDurationInMicroseconds = 1000.0 / videoFPS; VideoFrame *videoFrame = videoEncoder->EncodeFrame(pic,m_videoInput->GetBufferSize()); //If was failed if (!videoFrame){ //Next fFrameSize = 0; m_started = 0; Log("-----Error encoding video\n"); double delay = 1000.0 / videoFPS; int to_delay = delay * 1000; // us nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay, (TaskFunc*)FramedSource::afterGetting, this); return; } if(sendFPU) sendFPU = false; //Set frame timestamp videoFrame->SetTimestamp(getDifTime(&first)/1000); //Set sending time of previous frame //getUpdDifTime(&prev); //gettimeofday(&fPresentationTime, 0); fFrameSize = videoFrame->GetLength(); memmove(fTo, videoFrame->GetData(), fFrameSize); if (fFrameSize > fMaxSize) { fNumTruncatedBytes = fFrameSize - fMaxSize; fFrameSize = fMaxSize; } else { fNumTruncatedBytes = 0; } gettimeofday(&fPresentationTime, NULL); //to_delay = ((1000 / videoFPS) * fFrameSize / RTPPAYLOADSIZE) * 1000; // us nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay, (TaskFunc*)FramedSource::afterGetting, this); }