Example #1
0
int FLVEncoder::EncodeVideo()
{
	timeval prev;

	//Start
	Log(">FLVEncoder  encode video\n");

	//Allocate media frame
	RTMPVideoFrame frame(0,262143);

	//Check codec
	switch(videoCodec)
	{
		case VideoCodec::SORENSON:
			//Ser Video codec
			frame.SetVideoCodec(RTMPVideoFrame::FLV1);
			break;
		case VideoCodec::H264:
			//Ser Video codec
			frame.SetVideoCodec(RTMPVideoFrame::AVC);
			//Set NAL type
			frame.SetAVCType(RTMPVideoFrame::AVCNALU);
			//No delay
			frame.SetAVCTS(0);
			break;
		default:
			return Error("-Wrong codec type %d\n",videoCodec);
	}
	
	//Create the encoder
	VideoEncoder *encoder = VideoCodecFactory::CreateEncoder(videoCodec,videoProperties);

	///Set frame rate
	encoder->SetFrameRate(fps,bitrate,intra);

	//Set dimensions
	encoder->SetSize(width,height);

	//Start capturing
	videoInput->StartVideoCapture(width,height,fps);

	//The time of the first one
	gettimeofday(&prev,NULL);

	//No wait for first
	DWORD frameTime = 0;

	Log(">FLVEncoder encode vide\n");

	//Mientras tengamos que capturar
	while(encodingVideo)
	{
		//Nos quedamos con el puntero antes de que lo cambien
		BYTE* pic=videoInput->GrabFrame(frameTime);
		
		//Ensure we are still encoding
		if (!encodingVideo)
			break;

		//Check pic
		if (!pic)
			continue;

		//Check if we need to send intra
		if (sendFPU)
		{
			//Set it
			encoder->FastPictureUpdate();
			//Do not send anymore
			sendFPU = false;
		}

		//Encode next frame
		VideoFrame *encoded = encoder->EncodeFrame(pic,videoInput->GetBufferSize());

		//Check
		if (!encoded)
			break;

		//Check size
		if (frame.GetMaxMediaSize()<encoded->GetLength())
		{
			//Not enougth space
			Error("Not enought space to copy FLV encodec frame [frame:%d,encoded:%d",frame.GetMaxMediaSize(),encoded->GetLength());
			//NExt
			continue;
		}

		//Check
		if (frameTime)
		{
			timespec ts;
			//Lock
			pthread_mutex_lock(&mutex);
			//Calculate timeout
			calcAbsTimeout(&ts,&prev,frameTime);
			//Wait next or stopped
			int canceled  = !pthread_cond_timedwait(&cond,&mutex,&ts);
			//Unlock
			pthread_mutex_unlock(&mutex);
			//Check if we have been canceled
			if (canceled)
				//Exit
				break;
		}
		//Set sending time of previous frame
		getUpdDifTime(&prev);

		//Set timestamp
		encoded->SetTimestamp(getDifTime(&first)/1000);

		//Set next one
		frameTime = 1000/fps;

		//Set duration
		encoded->SetDuration(frameTime);
		
		//Get full frame
		frame.SetVideoFrame(encoded->GetData(),encoded->GetLength());

		//Set buffer size
		frame.SetMediaSize(encoded->GetLength());

		//Check type
		if (encoded->IsIntra())
			//Set type
			frame.SetFrameType(RTMPVideoFrame::INTRA);
		else
			//Set type
			frame.SetFrameType(RTMPVideoFrame::INTER);

	
		//If we need desc but yet not have it
		if (!frameDesc && encoded->IsIntra() && videoCodec==VideoCodec::H264)
		{
			//Create new description
			AVCDescriptor desc;
			//Set values
			desc.SetConfigurationVersion(1);
			desc.SetAVCProfileIndication(0x42);
			desc.SetProfileCompatibility(0x80);
			desc.SetAVCLevelIndication(0x0C);
			desc.SetNALUnitLength(3);
			//Get encoded data
			BYTE *data = encoded->GetData();
			//Get size
			DWORD size = encoded->GetLength();
			//get from frame
			desc.AddParametersFromFrame(data,size);
			//Crete desc frame
			frameDesc = new RTMPVideoFrame(getDifTime(&first)/1000,desc);
			//Lock
			pthread_mutex_lock(&mutex);
			//Send it
			SendMediaFrame(frameDesc);
			//unlock
			pthread_mutex_unlock(&mutex);
		}
		
		//Lock
		pthread_mutex_lock(&mutex);
		//Set timestamp
		frame.SetTimestamp(encoded->GetTimeStamp());
		//Publish it
		SendMediaFrame(&frame);
		//For each listener
		for(MediaFrameListeners::iterator it = mediaListeners.begin(); it!=mediaListeners.end(); ++it)
			//Send it
			(*it)->onMediaFrame(RTMPMediaStream::id,*encoded);
		//unlock
		pthread_mutex_unlock(&mutex);
	}
	Log("-FLVEncoder encode video end of loop\n");

	//Stop the capture
	videoInput->StopVideoCapture();

	//Check
	if (encoder)
		//Exit
		delete(encoder);
	Log("<FLVEncoder encode vide\n");
	
	//Exit
	return 1;
}
Example #2
0
int RTPMultiplexerSmoother::SmoothFrame(const MediaFrame* frame,DWORD duration)
{
	//Check
	if (!frame || !frame->HasRtpPacketizationInfo())
		//Error
		return Error("Frame do not has packetization info");

	//Get info
	const MediaFrame::RtpPacketizationInfo& info = frame->GetRtpPacketizationInfo();

	DWORD codec = 0;
	BYTE *frameData = NULL;
	DWORD frameSize = 0;

	//Depending on the type
	switch(frame->GetType())
	{
		case MediaFrame::Audio:
		{
			//get audio frame
			AudioFrame * audio = (AudioFrame*)frame;
			//Get codec
			codec = audio->GetCodec();
			//Get data
			frameData = audio->GetData();
			//Get size
			frameSize = audio->GetLength();
		}
			break;
		case MediaFrame::Video:
		{
			//get Video frame
			VideoFrame * video = (VideoFrame*)frame;
			//Get codec
			codec = video->GetCodec();
			//Get data
			frameData = video->GetData();
			//Get size
			frameSize = video->GetLength();
		}
			break;
		default:
			return Error("No smoother for frame");
	}

	DWORD frameLength = 0;
	//Calculate total length
	for (int i=0;i<info.size();i++)
		//Get total length
		frameLength += info[i]->GetTotalLength();

	//Calculate bitrate for frame
	DWORD current = 0;
	
	//For each one
	for (int i=0;i<info.size();i++)
	{
		//Get packet
		MediaFrame::RtpPacketization* rtp = info[i];

		//Create rtp packet
		RTPPacketSched *packet = new RTPPacketSched(frame->GetType(),codec);

		//Make sure it is enought length
		if (rtp->GetPrefixLen()+rtp->GetSize()>packet->GetMaxMediaLength())
			//Error
			continue;
		
		//Get pointer to media data
		BYTE* out = packet->GetMediaData();
		//Copy prefic
		memcpy(out,rtp->GetPrefixData(),rtp->GetPrefixLen());
		//Copy data
		memcpy(out+rtp->GetPrefixLen(),frameData+rtp->GetPos(),rtp->GetSize());
		//Set length
		DWORD len = rtp->GetPrefixLen()+rtp->GetSize();
		//Set length
		packet->SetMediaLength(len);
		switch(packet->GetMedia())
		{
			case MediaFrame::Video:
				//Set timestamp
				packet->SetTimestamp(frame->GetTimeStamp()*90);
				break;
			case MediaFrame::Audio:
				//Set timestamp
				packet->SetTimestamp(frame->GetTimeStamp()*8);
				break;
			default:
				//Set timestamp
				packet->SetTimestamp(frame->GetTimeStamp());
		}
		//Check
		if (i+1==info.size())
			//last
			packet->SetMark(true);
		else
			//No last
			packet->SetMark(false);
		//Calculate partial lenght
		current += len;
		//Calculate sending time offset from first frame
		packet->SetSendingTime(current*duration/frameLength);
		//Append it
		queue.Add(packet);
	}

	return 1;
}
Example #3
0
bool MultiConf::AddBroadcastReceiver(RTMPStream *receiver)
{
	broadcast.AddReceiver(receiver);
	Participants::iterator itBroadcaster = participants.find(m_CurrentBroadCaster);
	if(itBroadcaster != participants.end())
	{
		RTPParticipant *broadCaster = (RTPParticipant*)itBroadcaster->second;
		Log("Send idr packet to newly broadcast reciever\n");
		IDRPacketSize idrPacketSize = broadCaster->GetIdrPacketSize();
		IDRPacket idrPacket = broadCaster->GetIdrPacket();
		DWORD currentTimeStamp = broadCaster->GetCurrentTimestamp();
		size_t packetSize = idrPacket.size();

		//Crete desc frame
		RTMPVideoFrame frameDesc(0,2048);
		//Send
		frameDesc.SetTimestamp(currentTimeStamp);
		//Set type
		frameDesc.SetVideoCodec(RTMPVideoFrame::AVC);
		//Set type
		frameDesc.SetFrameType(RTMPVideoFrame::INTRA);
		//Set NALU type
		frameDesc.SetAVCType(0);
		//Set no delay
		frameDesc.SetAVCTS(0);
		//Create description
		AVCDescriptor desc;
		//Set values
		desc.SetConfigurationVersion(1);
		//desc.SetAVCProfileIndication(0x42);
		//desc.SetProfileCompatibility(0x80);
		//desc.SetAVCLevelIndication(0x14);
		//desc.SetAVCProfileIndication(idrPacket[0][1]);
		//desc.SetProfileCompatibility(idrPacket[0][2]);
		//desc.SetAVCLevelIndication(idrPacket[0][3]);
		desc.SetAVCProfileIndication(0x64);
		desc.SetProfileCompatibility(0x00);
		desc.SetAVCLevelIndication(0x28);
		desc.SetNALUnitLength(3);
		desc.AddSequenceParameterSet(idrPacket[0],idrPacketSize[0]);
		desc.AddPictureParameterSet(idrPacket[1],idrPacketSize[1]);
		//Serialize
		DWORD len = desc.Serialize(frameDesc.GetMediaData(),frameDesc.GetMaxMediaSize());
		//Set size
		frameDesc.SetMediaSize(len);
		//broadcast.OnPublishedFrame(0, &frameDesc);
		receiver->PlayMediaFrame(&frameDesc);
		frameDesc.Dump();

		RTMPVideoFrame frame(0,65535);
		//Set codec
		frame.SetVideoCodec(RTMPVideoFrame::AVC);
		//Set NALU type
		frame.SetAVCType(1);
		//Set no delay
		frame.SetAVCTS(0);
		frame.SetTimestamp(currentTimeStamp);
		frame.SetFrameType(RTMPVideoFrame::INTRA);
		VideoFrame *videoFrame;
		RTPDepacketizer *depacketizer = RTPDepacketizer::Create( MediaFrame::Video, VideoCodec::H264);
		for(int i = 0; i < packetSize; i++) {
			BYTE *packet = idrPacket[i];
			int packet_size = idrPacketSize[i];
			videoFrame = (VideoFrame *)depacketizer->AddPayload(packet,packet_size);
		}
		frame.SetVideoFrame(videoFrame->GetData(), videoFrame->GetLength());
		receiver->PlayMediaFrame(&frame);
		frame.Dump();
		delete depacketizer;
		
	}
	
	return true;
}
void H264FrameSource::doGetNextFrame()
{  
        // 根据 fps, 计算等待时间  
        double delay = 1000.0 / videoFPS ;
        int to_delay = delay * 1000;    // us  
  
        if(!m_videoInput)
		return;

	BYTE *pic = m_videoInput->GrabFrame();

	//Check picture
	if (!pic) {
		fFrameSize = 0;
		m_started = 0; 
		return;
	}

	//Check if we need to send intra
	if (sendFPU)
	{
		videoEncoder->FastPictureUpdate();
	}

	//if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
      		// This is the first frame, so use the current time:
      		
	//} else {
		// Increment by the play time of the previous data:
	//	unsigned uSeconds	= fPresentationTime.tv_usec + fLastPlayTime;
	//	fPresentationTime.tv_sec += uSeconds/1000000;
	//	fPresentationTime.tv_usec = uSeconds%1000000;
	//}
	
	// Remember the play time of this data:
	//fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
	//fDurationInMicroseconds = fLastPlayTime;
	//fDurationInMicroseconds = 1000.0 / videoFPS;

	VideoFrame *videoFrame = videoEncoder->EncodeFrame(pic,m_videoInput->GetBufferSize());
	
	//If was failed
	if (!videoFrame){
		//Next
		fFrameSize = 0;
		m_started = 0;
		Log("-----Error encoding video\n");
        	double delay = 1000.0 / videoFPS;
        	int to_delay = delay * 1000;    // us  
        	nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay,
                (TaskFunc*)FramedSource::afterGetting, this); 
		return;
	}
	
	if(sendFPU)
		sendFPU = false;

	//Set frame timestamp
	videoFrame->SetTimestamp(getDifTime(&first)/1000);

	//Set sending time of previous frame
	//getUpdDifTime(&prev);

	//gettimeofday(&fPresentationTime, 0);

	fFrameSize = videoFrame->GetLength();

	memmove(fTo, videoFrame->GetData(), fFrameSize);

	if (fFrameSize > fMaxSize) {
		fNumTruncatedBytes = fFrameSize - fMaxSize;
		fFrameSize = fMaxSize;
	}
	else {
		fNumTruncatedBytes = 0;
	}
	
	gettimeofday(&fPresentationTime, NULL);

	//to_delay = ((1000 / videoFPS) * fFrameSize / RTPPAYLOADSIZE) * 1000;    // us  

        nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay,
				(TaskFunc*)FramedSource::afterGetting, this);
	
}