RTPHandler::RTPHandler(char* di, char* dp) { portbase = RCV_PORT; destip = inet_addr(di); destport = (uint16_t) atoi(dp); alive = true; if (destip == INADDR_NONE) { cerr << "Bad IP address specified" << endl; alive = false; return; } destip = ntohl(destip); RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; sessparams.SetOwnTimestampUnit(1.0/CLK_RATE); sessparams.SetAcceptOwnPackets(true); transparams.SetPortbase(portbase); int e = sess.Create(sessparams,&transparams); if(error(e)) return; cout << sessparams.GetMaximumPacketSize()<< endl; RTPIPv4Address addr(destip,destport); e = sess.AddDestination(addr); if(error(e)) return; }
int CRTPRecv::InitRTP(int Port) { RTPUDPv4TransmissionParams TransParams; RTPSessionParams SessionParams; //设置本地通讯端口. TransParams.SetPortbase(Port); //设置时间戳,每秒钟发送的包数量. SessionParams.SetOwnTimestampUnit(1.0/90000.0); SessionParams.SetAcceptOwnPackets(true); //接收自己发送的数据包 //创建RTP对象. int Result = Create(SessionParams, &TransParams); if (Result < 0) { LOG_ERROR("RTP: "<<RTPGetErrorString(Result)); return -1; } //设置默认负载类型. this->SetMaximumPacketSize(65535); //this->SetDefaultMark(true); //this->SetDefaultTimestampIncrement(TIMESTAMP); //this->SetDefaultPayloadType(H264); return 0; }
int MyRTPSession::MyRTP_SetUp(MediaSession * media_session) { if(!media_session) { fprintf(stderr, "%s: Invalid media session\n", __func__); return RTP_ERROR; } if(0 == media_session->TimeRate) { fprintf(stderr, "%s: Invalid MediaSession::TimeRate\n", __func__); return RTP_ERROR; } if(0 == media_session->RTPPort) { fprintf(stderr, "%s: Invalid MediaSession::RTPPort\n", __func__); return RTP_ERROR; } int status; // Now, we'll create a RTP session, set the destination // and poll for incoming data. RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; // IMPORTANT: The local timestamp unit MUST be set, otherwise // RTCP Sender Report info will be calculated wrong // In this case, we'll be just use 8000 samples per second. sessparams.SetOwnTimestampUnit(1.0/media_session->TimeRate); sessparams.SetAcceptOwnPackets(true); transparams.SetPortbase(media_session->RTPPort); status = Create(sessparams,&transparams); return IsError(status); }
int main(void) { int status,i; RTPSession s; RTPSessionParams sessparams; RTPUDPv4TransmissionParams transparams; char blaai[100]; transparams.SetPortbase(10000); sessparams.SetOwnTimestampUnit(1.0/8000.0); sessparams.SetUsePollThread(true); sessparams.SetMaximumPacketSize(10000); status = s.Create(sessparams,&transparams); s.SetLocalName((const uint8_t *)"Jori Liesenborgs",16); s.SetLocalEMail((const uint8_t *)"*****@*****.**",20); s.SetLocalNote((const uint8_t *)"Blaai",5); s.SetNameInterval(3); s.SetEMailInterval(5); s.SetNoteInterval(2); status = s.AddDestination(RTPIPv4Address(ntohl(inet_addr("192.168.2.115")),5000)); //status = s.AddDestination(RTPIPv4Address(ntohl(inet_addr("127.0.0.1")),7000)); int snd = open("/dev/dsp",O_RDWR); int val; val = 0; status = ioctl(snd,SNDCTL_DSP_STEREO,&val); val = 8; status = ioctl(snd,SNDCTL_DSP_SAMPLESIZE,&val); val = 8000; status = ioctl(snd,SNDCTL_DSP_SPEED,&val); val = 7 | (128<<16); ioctl(snd,SNDCTL_DSP_SETFRAGMENT,&val); i = 0; while (i++ < 40000) { if (i == 1000) { //std::cout <<"Disabling note" << std::endl; s.SetNoteInterval(0); } uint8_t data[PACKSIZE]; RTPTime t1 = RTPTime::CurrentTime(); status = read(snd,data,PACKSIZE); RTPTime t2 = RTPTime::CurrentTime(); t2 -= t1; printf("%d.%06d\n",t2.GetSeconds(),t2.GetMicroSeconds()); status = s.SendPacket(data,PACKSIZE,1,false,PACKSIZE); } close(snd); printf("Destroying...\n"); s.BYEDestroy(RTPTime(10,0),(const uint8_t *)"Leaving session",16); return 0; }
int RTPSession::Create(const RTPSessionParams &sessparams,const RTPTransmissionParams *transparams /* = 0 */, RTPTransmitter::TransmissionProtocol protocol) { int status; if (created) return ERR_RTP_SESSION_ALREADYCREATED; usingpollthread = sessparams.IsUsingPollThread(); useSR_BYEifpossible = sessparams.GetSenderReportForBYE(); sentpackets = false; // Check max packet size if ((maxpacksize = sessparams.GetMaximumPacketSize()) < RTP_MINPACKETSIZE) return ERR_RTP_SESSION_MAXPACKETSIZETOOSMALL; // Initialize the transmission component rtptrans = 0; switch(protocol) { case RTPTransmitter::IPv4UDPProto: rtptrans = RTPNew(GetMemoryManager(),RTPMEM_TYPE_CLASS_RTPTRANSMITTER) RTPUDPv4Transmitter(GetMemoryManager()); break; #ifdef RTP_SUPPORT_IPV6 case RTPTransmitter::IPv6UDPProto: rtptrans = RTPNew(GetMemoryManager(),RTPMEM_TYPE_CLASS_RTPTRANSMITTER) RTPUDPv6Transmitter(GetMemoryManager()); break; #endif // RTP_SUPPORT_IPV6 case RTPTransmitter::UserDefinedProto: rtptrans = NewUserDefinedTransmitter(); if (rtptrans == 0) return ERR_RTP_SESSION_USERDEFINEDTRANSMITTERNULL; break; default: return ERR_RTP_SESSION_UNSUPPORTEDTRANSMISSIONPROTOCOL; } if (rtptrans == 0) return ERR_RTP_OUTOFMEM; if ((status = rtptrans->Init(usingpollthread)) < 0) { RTPDelete(rtptrans,GetMemoryManager()); return status; } if ((status = rtptrans->Create(maxpacksize,transparams)) < 0) { RTPDelete(rtptrans,GetMemoryManager()); return status; } deletetransmitter = true; return InternalCreate(sessparams); }
/*********************************************************************************************************** **函数:Rtp_Lock **功能: **输入参数: **返回值: ***********************************************************************************************************/ static int RtpSetup( uint16_t portbase) { int status; RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; sessparams.SetOwnTimestampUnit(1.0/90000.0); sessparams.SetMaximumPacketSize(1200); transparams.SetPortbase(portbase); sess.SetDefaultPayloadType(PLOAD_TYPE); sess.SetDefaultMark(false); sess.SetDefaultTimestampIncrement(DefaultTimestampIncrement); status = sess.Create(sessparams,&transparams); checkerror(status); return status; }
// Maybe this could be used here. Have a try . void Sender::setRTPParams(){ int status; RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; sessparams.SetOwnTimestampUnit(1.0/25.0); sessparams.SetAcceptOwnPackets(true); sessparams.SetUsePredefinedSSRC(true); // sessparams.SetPredefinedSSRC(SSRC); transparams.SetPortbase(baseport); status = this->Create(sessparams, &transparams); checkerror(status); RTPIPv4Address addr(dst_ip, destport); status = this->AddDestination(addr); checkerror(status); }
} int MP3MediaSubSession::InitRTPSession() { //setup session parameters RTPSessionParams sessParams; sessParams.SetOwnTimestampUnit(1.0 / 8000.0); //30 video frames per second sessParams.SetUsePollThread(1); // background thread to call virtual callbacks - set by default, but just to be sure sessParams.SetMaximumPacketSize(MAX_PACKET_SIZE); //setup transmission parameters RTPUDPv4TransmissionParams transParams; transParams.SetPortbase(portNum_); //CREATE THE SESSION int errcode = theRTPSession_.Create(sessParams, &transParams); if ( errcode < 0 ) { string stdErrStr = RTPGetErrorString(errcode); RTSPDEBUG("[Error] : %s", stdErrStr.c_str()); return ERR_RTSP_CRAETE_RTP_SESSION; //unable to create the session }
bool whu_RtpRPicSAg::Init() { #ifdef WIN32 WSADATA dat; WSAStartup(MAKEWORD(2,2),&dat); #endif // WIN32 RTPSessionParams sessionparams; sessionparams.SetOwnTimestampUnit(1.0/8000.0); RTPUDPv4TransmissionParams transparams; transparams.SetPortbase(m_GLocalPort); int status = session.Create(sessionparams,&transparams); if (status < 0) { std::cerr << RTPGetErrorString(status) << std::endl; exit(-1); } //uint8_t localip[]={127,0,0,1}; //uint8_t localip[]={192,168,16,3};//̨ʽ»úIP// //unsigned char sssss[4]={192,168,16,3}; //uint8_t localip[4]; //memcpy(localip,m_GRemoteIpAddRess,4); RTPIPv4Address addr(m_GRemoteIpAddRess,m_GRemotePort); status = session.AddDestination(addr); if (status < 0) { std::cerr << RTPGetErrorString(status) << std::endl; exit(-1); } session.SetDefaultPayloadType(96); session.SetDefaultMark(false); session.SetDefaultTimestampIncrement(160); return true; }
int RTPSession::Create(const RTPSessionParams &sessparams,RTPTransmitter *transmitter) { int status; if (created) return ERR_RTP_SESSION_ALREADYCREATED; usingpollthread = sessparams.IsUsingPollThread(); useSR_BYEifpossible = sessparams.GetSenderReportForBYE(); sentpackets = false; // Check max packet size if ((maxpacksize = sessparams.GetMaximumPacketSize()) < RTP_MINPACKETSIZE) return ERR_RTP_SESSION_MAXPACKETSIZETOOSMALL; rtptrans = transmitter; if ((status = rtptrans->SetMaximumPacketSize(maxpacksize)) < 0) return status; deletetransmitter = false; return InternalCreate(sessparams); }
// Create a new RTP session. If multicast is not being used then multicastIP // should be set to an empty string. static RTPSession createRtpSession(const std::string& multicastIP, unsigned int port) { RTPSession session; //if (setenv("LOGNAME", "video", 0) != 0) { // throw std::runtime_error("Error setting LOGNAME environment variable"); //} // Set up session params RTPSessionParams sessionparams; sessionparams.SetUsePollThread(false); sessionparams.SetMaximumPacketSize(1400); sessionparams.SetAcceptOwnPackets(true); sessionparams.SetOwnTimestampUnit(1.0f/900000.0f); sessionparams.SetResolveLocalHostname(false); sessionparams.SetSessionBandwidth(9000000); sessionparams.SetReceiveMode(RTPTransmitter::AcceptAll); RTPUDPv4TransmissionParams transparams; transparams.SetPortbase(port); int status = session.Create(sessionparams, &transparams); if (status < 0) { throw std::runtime_error("Error creating RTP session"); } // Join multicast groups if they are specified if (multicastIP.size() > 0) { if (!session.SupportsMulticasting()) { throw std::runtime_error("Multicast not supported!"); } else { int joinip = ntohl(inet_addr(multicastIP.c_str())); RTPIPv4Address joinaddr(joinip, port); int jstatus = session.JoinMulticastGroup(joinaddr); if (jstatus < 0) { throw std::runtime_error("Unable to join multicast group"); } } } return session; }
int main(void) { #ifdef NEED_PA_INIT std::string errStr; if (!MIPPAInputOutput::initializePortAudio(errStr)) { std::cerr << "Can't initialize PortAudio: " << errStr << std::endl; return -1; } #endif // NEED_PA_INIT #ifdef WIN32 WSADATA dat; WSAStartup(MAKEWORD(2,2),&dat); #endif // WIN32 MIPTime interval(0.020); // We'll use 20 millisecond intervals. MIPAverageTimer timer(interval); MIPWAVInput sndFileInput; MIPSamplingRateConverter sampConv, sampConv2; MIPSampleEncoder sampEnc, sampEnc2, sampEnc3; MIPULawEncoder uLawEnc; MIPRTPULawEncoder rtpEnc; MIPRTPComponent rtpComp; MIPRTPDecoder rtpDec; MIPRTPULawDecoder rtpULawDec; MIPULawDecoder uLawDec; MIPAudioMixer mixer; MIPComponentAlias rtpCompAlias(&rtpComp); ToggleOutputComponent sndToggleComponent(&sndFileInput); #ifdef MIPCONFIG_SUPPORT_WINMM MIPWinMMOutput sndCardOutput; #else #ifdef MIPCONFIG_SUPPORT_OSS MIPOSSInputOutput sndCardOutput; #else MIPPAInputOutput sndCardOutput; #endif #endif MyChain chain("Sound file player"); RTPSession rtpSession; bool returnValue; // We'll open the file 'soundfile.wav'. returnValue = sndFileInput.open("soundfile.wav", interval); checkError(returnValue, sndFileInput); // We'll convert to a sampling rate of 8000Hz and mono sound. int samplingRate = 8000; int numChannels = 1; returnValue = sampConv.init(samplingRate, numChannels); checkError(returnValue, sampConv); // Initialize the sample encoder: the RTP U-law audio encoder // expects native endian signed 16 bit samples. returnValue = sampEnc.init(MIPRAWAUDIOMESSAGE_TYPE_S16); checkError(returnValue, sampEnc); // Convert samples to U-law encoding returnValue = uLawEnc.init(); checkError(returnValue, uLawEnc); // Initialize the RTP audio encoder: this component will create // RTP messages which can be sent to the RTP component. returnValue = rtpEnc.init(); checkError(returnValue, rtpEnc); // We'll initialize the RTPSession object which is needed by the // RTP component. RTPUDPv4TransmissionParams transmissionParams; RTPSessionParams sessionParams; int portBase = 60000; int status; transmissionParams.SetPortbase(portBase); sessionParams.SetOwnTimestampUnit(1.0/((double)samplingRate)); sessionParams.SetMaximumPacketSize(64000); sessionParams.SetAcceptOwnPackets(true); status = rtpSession.Create(sessionParams,&transmissionParams); checkError(status); // Instruct the RTP session to send data to ourselves. status = rtpSession.AddDestination(RTPIPv4Address(ntohl(inet_addr("127.0.0.1")),portBase)); checkError(status); // Tell the RTP component to use this RTPSession object. returnValue = rtpComp.init(&rtpSession, 160); // 20ms at 8000Hz = 160 samples per RTP packet checkError(returnValue, rtpComp); // Initialize the RTP audio decoder. returnValue = rtpDec.init(true, 0, &rtpSession); checkError(returnValue, rtpDec); // Register the U-law decoder for payload type 0 returnValue = rtpDec.setPacketDecoder(0,&rtpULawDec); checkError(returnValue, rtpDec); // Convert U-law encoded samples to linear encoded samples returnValue = uLawDec.init(); checkError(returnValue, uLawDec); // Transform the received audio data to floating point format. returnValue = sampEnc2.init(MIPRAWAUDIOMESSAGE_TYPE_FLOAT); checkError(returnValue, sampEnc2); // We'll make sure that received audio frames are converted to the right // sampling rate. returnValue = sampConv2.init(samplingRate, numChannels); checkError(returnValue, sampConv2); // Initialize the mixer. returnValue = mixer.init(samplingRate, numChannels, interval); checkError(returnValue, mixer); // Initialize the soundcard output. returnValue = sndCardOutput.open(samplingRate, numChannels, interval); checkError(returnValue, sndCardOutput); #ifdef MIPCONFIG_SUPPORT_WINMM // The WinMM output component uses signed little endian 16 bit samples. returnValue = sampEnc3.init(MIPRAWAUDIOMESSAGE_TYPE_S16LE); #else #ifdef MIPCONFIG_SUPPORT_OSS // The OSS component can use several encoding types. We'll ask // the component to which format samples should be converted. returnValue = sampEnc3.init(sndCardOutput.getRawAudioSubtype()); #else // The PortAudio output component uses signed 16 bit samples returnValue = sampEnc3.init(MIPRAWAUDIOMESSAGE_TYPE_S16); #endif #endif checkError(returnValue, sampEnc3); // Next, we'll create the chain returnValue = chain.setChainStart(&timer); checkError(returnValue, chain); returnValue = chain.addConnection(&timer, &sndToggleComponent); checkError(returnValue, chain); returnValue = chain.addConnection(&sndToggleComponent, &sampConv); checkError(returnValue, chain); returnValue = chain.addConnection(&sampConv, &sampEnc); checkError(returnValue, chain); returnValue = chain.addConnection(&sampEnc, &uLawEnc); checkError(returnValue, chain); returnValue = chain.addConnection(&uLawEnc, &rtpEnc); checkError(returnValue, chain); returnValue = chain.addConnection(&rtpEnc, &rtpComp); checkError(returnValue, chain); returnValue = chain.addConnection(&timer, &rtpCompAlias); checkError(returnValue, chain); returnValue = chain.addConnection(&rtpCompAlias, &rtpDec); checkError(returnValue, chain); // This is where the feedback chain is specified: we want // feedback from the mixer to reach the RTP audio decoder, // so we'll specify that over the links in between, feedback // should be transferred. returnValue = chain.addConnection(&rtpDec, &uLawDec, true); checkError(returnValue, chain); returnValue = chain.addConnection(&uLawDec, &sampEnc2, true); checkError(returnValue, chain); returnValue = chain.addConnection(&sampEnc2, &sampConv2, true); checkError(returnValue, chain); returnValue = chain.addConnection(&sampConv2, &mixer, true); checkError(returnValue, chain); returnValue = chain.addConnection(&mixer, &sampEnc3); checkError(returnValue, chain); returnValue = chain.addConnection(&sampEnc3, &sndCardOutput); checkError(returnValue, chain); // Start the chain returnValue = chain.start(); checkError(returnValue, chain); // We'll wait until enter is pressed int num = 10; for (int i = 0 ; i < num ; i++) { std::cout << "iteration " << (i+1) << "/" << num << std::endl; std::cout << "Press enter for silence" << std::endl; getc(stdin); sndToggleComponent.lock(); sndToggleComponent.setEnabled(false); sndToggleComponent.unlock(); std::cout << "Press enter for sound" << std::endl; getc(stdin); sndToggleComponent.lock(); sndToggleComponent.setEnabled(true); sndToggleComponent.unlock(); } returnValue = chain.stop(); checkError(returnValue, chain); rtpSession.Destroy(); // We'll let the destructors of the other components take care // of their de-initialization. sndCardOutput.close(); // In case we're using PortAudio #ifdef NEED_PA_INIT MIPPAInputOutput::terminatePortAudio(); #endif // NEED_PA_INIT #ifdef WIN32 WSACleanup(); #endif return 0; }
int main(int argc, char ** argv) { uint16_t portbase,destport; uint32_t destip; std:string ipstr; //get the destiny ip from the std input stream // NALU_t *n; printf("Please Enter:./test framenum(500)\n"); if(argc!=2) { printf("Please Enter:./test framenum\n"); exit(EXIT_FAILURE); } //dev_name=argv[1]; frame_num=atoi(argv[1]); //convert a string to an integer if(!frame_num) { printf("frame_num should be over 0 frame\n"); exit(EXIT_FAILURE); } // @0506 here ,there is no need to test YUYV | YUV420 | tmpH264 , for Module Test //openFiles(); //***************************************************************************** //****************** the capturing process *****************start********* printf("capture video begin\n"); open_device(); init_device(); start_capturing(); //fp = fopen(filename, "wa+"); //yuv422 // *********** //alloc_image(); // *********** // *********** //open_x264file("test420.yuv"); init_encoder(); init_picture(yuv_size_2); // *********** portbase = 33334; destport = 9000; //ipstr = "222.197.174.76"; ipstr = "202.115.11.128"; destip = inet_addr(ipstr.c_str()); //destip = ntohl(destip); ///************** very important ************** destip = htonl(destip); //********************** get the rtp parameter from std input stream ****above *** // @2 Setting basic parameter ---------------------------------- RTPUDPv4TransmissionParams transparams; transparams.SetPortbase(portbase); RTPSessionParams sessparams; sessparams.SetOwnTimestampUnit(1.0/90000.0); status=sess.Create(sessparams,&transparams); checkerror(status); RTPIPv4Address addr(destip,destport); status = sess.AddDestination(addr); checkerror(status); sess.SetDefaultPayloadType(96); sess.SetDefaultMark(false); sess.SetDefaultTimestampIncrement(90000.0/10.0); // frame rate control 10 / 25 RTPTime delay(0.040);//RTPTime delay(0.040) //RTPTime::Wait(delay); RTPTime starttime=RTPTime::CurrentTime(); // // @3.0 Preparation for sending NALU package---------------------------------------------------- // char sendbuf[1500]; // char *nalu_payload; // int size=0; // unsigned int timestamp_increase=0,ts_current=0; // OpenBitstreamFile("test.h264"); // n=AllocNALU(8000000);// alloc memory for the <struct NALU> n->maxSize = 8_000_000 = 8 MB // int start=false; // //**************************************************************** //****************** the capturing process ********************end ****** //****************** x264 encode ******************************* start *** // open_yuvfile(filename); // alloc_image(); // swscale_start(); // ****** yuv422 -> I420 ******* // swscale_close(); // open_x264file(dst_filename); // init_encoder(); // init_picture(yuv_size); // encode_frame(); // **** encode x264 format *** // close_encoder(); //****************** x264 encode ******************************* end *** //----------------------------------------------------------- //@1223--01 struct timeval tpstart,tpend; float timeuse; gettimeofday(&tpstart,NULL); ///------ implement capture the pictures next line ----------------------------- tmpFP264 = fopen("softEncodeH264.h264","wb"); mainloop(frame_num); // **** capture frame ***** fclose(tmpFP264); ///----------------------------------------------------------------------------- gettimeofday(&tpend,NULL); timeuse=1000000 * (tpend.tv_sec-tpstart.tv_sec) + tpend.tv_usec - tpstart.tv_usec; timeuse/=1000000; printf("timeuse total Used Time( Second ):%f\n",timeuse); //@1223--02 //----------------------------------------------------------- stop_capturing(); uninit_device(); close_device(); close_encoder(); return 0; }
void Java_cn_nickwar_MainActivity_nativeWorker(JNIEnv* env, jobject obj) { uint16_t portbase=8000,destport=9000; std::string ipstr="192.168.1.102"; uint32_t destip=inet_addr(ipstr.c_str()); int status,i,num; RTPSession session; RTPSessionParams sessionparams; RTPUDPv4TransmissionParams transparams; RTPIPv4Address addr; if (destip == INADDR_NONE) { __android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "Bad IP address specified"); } destip = ntohl(destip); num = 40; sessionparams.SetOwnTimestampUnit(1.0/10.0); sessionparams.SetAcceptOwnPackets(true); transparams.SetPortbase(portbase); addr.SetIP(destip); addr.SetPort(destport); status = session.Create(sessionparams,&transparams); if (status<0) { std::string tmp = "Create:"; __android_log_print(ANDROID_LOG_DEBUG, "pspm.native", (tmp+RTPGetErrorString(status)).c_str()); } status = session.AddDestination(addr); if (status<0) { std::string tmp = "AddDestination:"; __android_log_print(ANDROID_LOG_DEBUG, "pspm.native", (tmp+RTPGetErrorString(status)).c_str()); } while(!m_bExitApp) { session.BeginDataAccess(); unsigned char *buff = NULL; if (session.GotoFirstSourceWithData()) { do { RTPPacket *pack; while((pack = session.GetNextPacket()) !=NULL) { __android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "got packet!\n"); char message[26]; sprintf(message, "got packet"); jstring messageString = env->NewStringUTF(message); env->CallVoidMethod(obj, rtpresultFromJNI, messageString); if (NULL != env->ExceptionOccurred()) { // break; continue; } if (pack->GetPayloadLength()>0) { buff = pack->GetPayloadData(); __android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "packt data:%s",buff); } session.DeletePacket(pack); } } while(session.GotoNextSourceWithData()); } session.EndDataAccess(); // #ifndef RTP_SUPPORT_THREAD status = sess.Poll(); if (status<0) { session.Destroy(); return; } #endif RTPTime::Wait(RTPTime(0,5000)); } session.Destroy(); return; }
int main(int argc, char ** argv) { uint16_t portbase,destport; uint32_t destip; std:string ipstr; //get the destiny ip from the std input stream // NALU_t *n; printf("Please Enter:./test framenum(500)\n"); if(argc!=2) { printf("Please Enter:./test framenum\n"); exit(EXIT_FAILURE); } //dev_name=argv[1]; frame_num=atoi(argv[1]); //convert a string to an integer if(!frame_num) { printf("frame_num should be over 0 frame\n"); exit(EXIT_FAILURE); } // @0506 here ,there is no need to test YUYV | YUV420 | tmpH264 , for Module Test //openFiles(); //***************************************************************************** //****************** the capturing process *****************start********* printf("capture video begin\n"); open_device(); init_device(); start_capturing(); //fp = fopen(filename, "wa+"); //yuv422 // *********** //alloc_image(); // *********** // *********** //open_x264file("test420.yuv"); init_encoder(); init_picture(yuv_size_2); // *********** portbase = 33334; destport = 9000; //ipstr = "222.197.174.76"; // @ 1204 change the ip to the LAN IP //ipstr = "202.115.11.128"; ipstr = "192.168.5.155"; destip = inet_addr(ipstr.c_str()); //destip = ntohl(destip); ///************** very important ************** destip = htonl(destip); //********************** get the rtp parameter from std input stream ****above *** // @2 Setting basic parameter ---------------------------------- RTPUDPv4TransmissionParams transparams; transparams.SetPortbase(portbase); RTPSessionParams sessparams; sessparams.SetOwnTimestampUnit(1.0/90000.0); status=sess.Create(sessparams,&transparams); checkerror(status); RTPIPv4Address addr(destip,destport); status = sess.AddDestination(addr); checkerror(status); sess.SetDefaultPayloadType(96); sess.SetDefaultMark(false); sess.SetDefaultTimestampIncrement(90000.0/10.0); // frame rate control 10 / 25 RTPTime delay(0.040);//RTPTime delay(0.040) //RTPTime::Wait(delay); RTPTime starttime=RTPTime::CurrentTime(); // // @3.0 Preparation for sending NALU package---------------------------------------------------- // char sendbuf[1500]; // char *nalu_payload; // int size=0; // unsigned int timestamp_increase=0,ts_current=0; // OpenBitstreamFile("test.h264"); // n=AllocNALU(8000000);// alloc memory for the <struct NALU> n->maxSize = 8_000_000 = 8 MB // int start=false; // //**************************************************************** mainloop(frame_num); // **** capture frame ***** // fclose(fp); stop_capturing(); uninit_device(); close_device(); close_encoder(); printf("capture video is over\n"); //****************** the capturing process ********************end ****** //****************** x264 encode ******************************* start *** // open_yuvfile(filename); // alloc_image(); // swscale_start(); // ****** yuv422 -> I420 ******* // swscale_close(); // open_x264file(dst_filename); // init_encoder(); // init_picture(yuv_size); // encode_frame(); // **** encode x264 format *** // close_encoder(); //****************** x264 encode ******************************* end *** /////--------------------------------------------------------------------------------------------------- printf("encode x264 video is over\n"); printf("Wait to jrtpsend....\n"); //****************** RTP send *********************** start ********** // @1 Getting basic parameter ------------------------------- // std::cout<<"Enter local portbase:"<<std::endl; // std::cin>>portbase; // std::cout<<std::endl; // std::cout << "Enter the destination IP address" << std::endl; // std::cin >> ipstr; // destip = inet_addr(ipstr.c_str()); // if (destip == INADDR_NONE) // { // std::cerr << "Bad IP address specified" << std::endl; // return -1; // } // destip = ntohl(destip); // std::cout << "Enter the destination port" << std::endl; // std::cin >> destport; // portbase = 33333; // destport = 55555; // ipstr = "222.197.174.76"; // destip = inet_addr(ipstr.c_str()); // //********************** get the rtp parameter from std input stream ****above *** // // @2 Setting basic parameter ---------------------------------- // RTPUDPv4TransmissionParams transparams; // transparams.SetPortbase(portbase); // RTPSessionParams sessparams; // sessparams.SetOwnTimestampUnit(1.0/90000.0); // status=sess.Create(sessparams,&transparams); // checkerror(status); // RTPIPv4Address addr(destip,destport); // status = sess.AddDestination(addr); // checkerror(status); // sess.SetDefaultPayloadType(96); // sess.SetDefaultMark(false); // sess.SetDefaultTimestampIncrement(90000.0/10.0); // RTPTime delay(0.030);//RTPTime delay(0.040) // //RTPTime::Wait(delay); // RTPTime starttime=RTPTime::CurrentTime(); // // @3.0 Preparation for sending NALU package---------------------------------------------------- // char sendbuf[1500]; // char *nalu_payload; // int size=0; // unsigned int timestamp_increase=0,ts_current=0; // OpenBitstreamFile("test.h264"); // n=AllocNALU(8000000);// alloc memory for the <struct NALU> n->maxSize = 8_000_000 = 8 MB // int start=false; // NALU packages sending loop ----------------------------------- // while(!feof(bits)) // { // size=GetAnnexbNALU(n); // having original souce for this function // if(size<4) // { // printf("get nalu error!\n"); // continue; // } // printf("size:%d\n",size); // dump(n); // if(!start) // { // if(n->nal_unit_type==1||n->nal_unit_type==5||n->nal_unit_type==6||n->nal_unit_type==7) // { // printf("begin\n"); // start=true; // } // } // if(n->len <= MAX_RTP_PKT_LENGTH) // { // nalu_hdr=(NALU_HEADER *)&sendbuf[0]; // nalu_hdr->F=n->forbidden_bit; // nalu_hdr->NRI=n->nal_reference_idc>>5; // nalu_hdr->TYPE=n->nal_unit_type; // nalu_payload=&sendbuf[1]; // memcpy(nalu_payload,n->buf+1,n->len-1); // ts_current=ts_current+timestamp_increase; // if(n->nal_unit_type==1||n->nal_unit_type==5) // { // status=sess.SendPacket((void *)sendbuf,n->len,96,true,3600); // } // else // { // status=sess.SendPacket((void *)sendbuf,n->len,96,true,0); // continue; // } // checkerror(status); // } // else if(n->len > MAX_RTP_PKT_LENGTH) // { // int k=0,l=0; // k=n->len/MAX_RTP_PKT_LENGTH; // l=n->len%MAX_RTP_PKT_LENGTH; // int t=0; // while(t<=k) // { // if(!t)//first pkt package // { // memset(sendbuf,0,1500); // fu_ind=(FU_INDICATOR *)&sendbuf[0]; // fu_ind->F=n->forbidden_bit; // fu_ind->NRI=n->nal_reference_idc>>5; // fu_ind->TYPE=28;//FU-A // fu_hdr=(FU_HEADER *)&sendbuf[1]; // fu_hdr->E=0; // fu_hdr->R=0; // fu_hdr->S=1; // fu_hdr->TYPE=n->nal_unit_type; // nalu_payload=&sendbuf[2]; // memcpy(nalu_payload,n->buf+1,MAX_RTP_PKT_LENGTH); // status=sess.SendPacket((void *)sendbuf,MAX_RTP_PKT_LENGTH+2,96,false,0); // checkerror(status); // t++; // } // else if(t==k)//last package // { // memset(sendbuf,0,1500); // fu_ind=(FU_INDICATOR *)&sendbuf[0]; // fu_ind->F=n->forbidden_bit; // fu_ind->NRI=n->nal_reference_idc>>5; // fu_ind->TYPE=28;//FU-A // fu_hdr=(FU_HEADER *)&sendbuf[1]; // fu_hdr->R=0; // fu_hdr->S=0; // fu_hdr->E=1; // fu_hdr->TYPE=n->nal_unit_type; // nalu_payload=&sendbuf[2]; // memcpy(nalu_payload,n->buf+t*MAX_RTP_PKT_LENGTH+1,l-1); // status=sess.SendPacket((void *)sendbuf,l+1,96,true,3600); // checkerror(status); // t++; // } // else if( (t<k) && (t!=0) ) //packageS between the first and the last // { // memset(sendbuf,0,1500); // fu_ind=(FU_INDICATOR *)&sendbuf[0]; // fu_ind->F=n->forbidden_bit; // fu_ind->NRI=n->nal_reference_idc>>5; // fu_ind->TYPE=28;//FU-A // fu_hdr=(FU_HEADER *)&sendbuf[1]; // fu_hdr->R=0; // fu_hdr->S=0; // fu_hdr->E=0;//E=1 // fu_hdr->TYPE=n->nal_unit_type; // nalu_payload=&sendbuf[2]; // memcpy(nalu_payload,n->buf+t*MAX_RTP_PKT_LENGTH+1,MAX_RTP_PKT_LENGTH); // status=sess.SendPacket((void *)sendbuf,MAX_RTP_PKT_LENGTH+2,96,false,0); // checkerror(status); // t++; // } // } // } // RTPTime::Wait(delay);//Wait(delay); // RTPTime time=RTPTime::CurrentTime(); // time-=starttime; // if(time>RTPTime(60.0))break; // } // // @4 Finish sending NALU package // printf("Mission over\n"); // delay=RTPTime(10.0); // sess.BYEDestroy(delay,"Time's up",9); return 0; }
int RTPSession::InternalCreate(const RTPSessionParams &sessparams) { int status; // Initialize packet builder if ((status = packetbuilder.Init(maxpacksize)) < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); return status; } if (sessparams.GetUsePredefinedSSRC()) packetbuilder.AdjustSSRC(sessparams.GetPredefinedSSRC()); #ifdef RTP_SUPPORT_PROBATION // Set probation type sources.SetProbationType(sessparams.GetProbationType()); #endif // RTP_SUPPORT_PROBATION // Add our own ssrc to the source table if ((status = sources.CreateOwnSSRC(packetbuilder.GetSSRC())) < 0) { packetbuilder.Destroy(); if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); return status; } // Set the initial receive mode if ((status = rtptrans->SetReceiveMode(sessparams.GetReceiveMode())) < 0) { packetbuilder.Destroy(); sources.Clear(); if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); return status; } // Init the RTCP packet builder double timestampunit = sessparams.GetOwnTimestampUnit(); uint8_t buf[1024]; size_t buflen = 1024; std::string forcedcname = sessparams.GetCNAME(); if (forcedcname.length() == 0) { if ((status = CreateCNAME(buf,&buflen,sessparams.GetResolveLocalHostname())) < 0) { packetbuilder.Destroy(); sources.Clear(); if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); return status; } } else { strncpy((char *)buf, forcedcname.c_str(), buflen); buf[buflen-1] = 0; buflen = strlen((char *)buf); } if ((status = rtcpbuilder.Init(maxpacksize,timestampunit,buf,buflen)) < 0) { packetbuilder.Destroy(); sources.Clear(); if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); return status; } // Set scheduler parameters rtcpsched.Reset(); rtcpsched.SetHeaderOverhead(rtptrans->GetHeaderOverhead()); RTCPSchedulerParams schedparams; sessionbandwidth = sessparams.GetSessionBandwidth(); controlfragment = sessparams.GetControlTrafficFraction(); if ((status = schedparams.SetRTCPBandwidth(sessionbandwidth*controlfragment)) < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return status; } if ((status = schedparams.SetSenderBandwidthFraction(sessparams.GetSenderControlBandwidthFraction())) < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return status; } if ((status = schedparams.SetMinimumTransmissionInterval(sessparams.GetMinimumRTCPTransmissionInterval())) < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return status; } schedparams.SetUseHalfAtStartup(sessparams.GetUseHalfRTCPIntervalAtStartup()); schedparams.SetRequestImmediateBYE(sessparams.GetRequestImmediateBYE()); rtcpsched.SetParameters(schedparams); // copy other parameters acceptownpackets = sessparams.AcceptOwnPackets(); membermultiplier = sessparams.GetSourceTimeoutMultiplier(); sendermultiplier = sessparams.GetSenderTimeoutMultiplier(); byemultiplier = sessparams.GetBYETimeoutMultiplier(); collisionmultiplier = sessparams.GetCollisionTimeoutMultiplier(); notemultiplier = sessparams.GetNoteTimeoutMultiplier(); // Do thread stuff if necessary #ifdef RTP_SUPPORT_THREAD pollthread = 0; if (usingpollthread) { if (!sourcesmutex.IsInitialized()) { if (sourcesmutex.Init() < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return ERR_RTP_SESSION_CANTINITMUTEX; } } if (!buildermutex.IsInitialized()) { if (buildermutex.Init() < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return ERR_RTP_SESSION_CANTINITMUTEX; } } if (!schedmutex.IsInitialized()) { if (schedmutex.Init() < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return ERR_RTP_SESSION_CANTINITMUTEX; } } if (!packsentmutex.IsInitialized()) { if (packsentmutex.Init() < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return ERR_RTP_SESSION_CANTINITMUTEX; } } pollthread = RTPNew(GetMemoryManager(),RTPMEM_TYPE_CLASS_RTPPOLLTHREAD) RTPPollThread(*this,rtcpsched); if (pollthread == 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return ERR_RTP_OUTOFMEM; } if ((status = pollthread->Start(rtptrans)) < 0) { if (deletetransmitter) RTPDelete(rtptrans,GetMemoryManager()); RTPDelete(pollthread,GetMemoryManager()); packetbuilder.Destroy(); sources.Clear(); rtcpbuilder.Destroy(); return status; } } #endif // RTP_SUPPORT_THREAD created = true; return 0; }
int main(int argc, char** argv) { #if 0 CRTPSender sender; string destip_str = "127.0.0.1"; uint32_t dest_ip = inet_addr(destip_str.c_str()); SetRTPParams(sender,dest_ip,DEST_PORT,BASE_PORT); sender.SetParamsForSendingH264(); #else RTPSession session; RTPSessionParams sessionparams; sessionparams.SetOwnTimestampUnit(1.0/90000.0); RTPUDPv4TransmissionParams transparams; transparams.SetPortbase(8000); int status = session.Create(sessionparams,&transparams); if (status < 0) { std::cerr << RTPGetErrorString(status) << std::endl; exit(-1); } uint8_t localip[]={127,0,0,1}; RTPIPv4Address addr(localip,9000); status = session.AddDestination(addr); if (status < 0) { std::cerr << RTPGetErrorString(status) << std::endl; exit(-1); } session.SetDefaultPayloadType(96); session.SetDefaultMark(false); session.SetDefaultTimestampIncrement(90000.0 /25.0); RTPTime delay(0.040); RTPTime starttime = RTPTime::CurrentTime(); #endif NALU_HEADER *nalu_hdr; FU_INDICATOR *fu_ind; FU_HEADER *fu_hdr; char sendbuf[1500]; char* nalu_payload; unsigned int timestamp_increse=0,ts_current=0; #define ddd OpenBitstreamFile("raw.264");//打开264文件,并将文件指针赋给bits,在此修改文件名实现打开别的264文件。 NALU_t *n; n = AllocNALU(8000000);//为结构体nalu_t及其成员buf分配空间。返回值为指向nalu_t存储空间的指针 bool start=false; while(!feof(bits)) { int size=GetAnnexbNALU(n);//每执行一次,文件的指针指向本次找到的NALU的末尾,下一个位置即为下个NALU的起始码0x000001 if(size<4) { printf("get nul error!\n"); continue; } dump(n);//输出NALU长度和TYPE if(!start) { if(n->nal_unit_type==5||n->nal_unit_type==6|| n->nal_unit_type==7||n->nal_unit_type==7) { printf("begin\n"); start=true; } } //将编码数据写入文件t //fwrite(pNals[i].p_payload, 1, pNals[i].i_payload, pFile); //发送编码文件 #if 1 // 当一个NALU小于MAX_RTP_PKT_LENGTH字节的时候,采用一个单RTP包发送 if(n->len<=MAX_RTP_PKT_LENGTH) { //printf("ddd0\n"); //session.SetDefaultMark(false); //设置NALU HEADER,并将这个HEADER填入sendbuf[12] nalu_hdr =(NALU_HEADER*)&sendbuf[0]; //将sendbuf[12]的地址赋给nalu_hdr,之后对nalu_hdr的写入就将写入sendbuf中; nalu_hdr->F=n->forbidden_bit; nalu_hdr->NRI=n->nal_reference_idc>>5;//有效数据在n->nal_reference_idc的第6,7位,需要右移5位才能将其值赋给nalu_hdr->NRI。 nalu_hdr->TYPE=n->nal_unit_type; nalu_payload=&sendbuf[1];//同理将sendbuf[13]赋给nalu_payload memcpy(nalu_payload,n->buf+1,n->len-1);//去掉nalu头的nalu剩余内容写入sendbuf[13]开始的字符串。 ts_current=ts_current+timestamp_increse; //status = session.SendPacket((void *)sendbuf,n->len); if(n->nal_unit_type==1 || n->nal_unit_type==5) { status = session.SendPacket((void *)sendbuf,n->len,96,true,3600); } else { status = session.SendPacket((void *)sendbuf,n->len,96,true,0);\ //如果是6,7类型的包,不应该延时;之前有停顿,原因这在这 continue; } //发送RTP格式数据包并指定负载类型为96 if (status < 0) { std::cerr << RTPGetErrorString(status) << std::endl; exit(-1); } } else if(n->len>MAX_RTP_PKT_LENGTH)
bool MediaStream::start(QHostAddress ip, int port, int localPort, int codecPayload ) { if ( isRunning() ) stop(); printf("%s %d | %d | %d\n",ip.toString().toAscii().data(), port, localPort, codecPayload); if(d->processThread == NULL) d->processThread = new MediaThread(this); d->outBufferPos = 0; d->outBufferTime = 0; // int localPort = 3000; printf("getFactory(%d)\n", codecPayload); VoiceCodecFactory *factory = CodecsManager::instance()->codecFactory(codecPayload); if ( !factory ) { printf("VoiceCodecFactory not found!\n"); return true; } d->codecPayload = codecPayload; d->decoder = factory->decoder(); d->encoder = factory->encoder(); printf("transparams\n"); // Now, we'll create a RTP session and set the destination // d->transparams.mediaChannel = mediaChannel; // d->transparams.incomingPackets = incomingPackets; d->transparams.SetPortbase(localPort); RTPSessionParams sessparams; sessparams.SetReceiveMode(RTPTransmitter::AcceptAll); sessparams.SetOwnTimestampUnit(1.0/8000.0); // 8KHz sessparams.SetAcceptOwnPackets(true); printf("session.Create()\n"); int status = d->session.Create( sessparams, &d->transparams, RTPTransmitter::SynapseProto ); if ( status<0 ) { qDebug("can't create RTP session, %s", RTPGetErrorString(status).c_str() ); d->session.Destroy(); return false; } printf("session.AddDestination()\n"); RTPIPv4Address addr(ip.toIPv4Address(),port); status = d->session.AddDestination(addr); if ( status<0 ) { qDebug("can't add rtp destination, %s", RTPGetErrorString(status).c_str() ); d->session.Destroy(); return false; } d->session.SetDefaultPayloadType(codecPayload); d->session.SetDefaultMark(false); d->session.SetDefaultTimestampIncrement(160); //initialise audio status = Pa_Initialize(); //////////////////// FOR TESTING if( status != paNoError ) { qDebug( "PortAudio error: %s", Pa_GetErrorText(status) ); // stop(); // return true; } if(status == paNoError) { status = Pa_OpenDefaultStream( &d->audioStream,/* passes back stream pointer */ 1, /* 1 input channel */ 1, /* mono output */ paInt16, /* 16 bit fixed point output */ 8000, /* sample rate */ 240, /* frames per buffer */ 16, /* number of buffers, if zero then use default minimum */ audioCallback, /* specify our custom callback */ d ); /* pass our data through to callback */ status = Pa_StartStream( d->audioStream ); if( status != paNoError ) { qDebug( "PortAudio error: %s", Pa_GetErrorText(status) ); // stop(); // return true; } } // put something to dsp buffer /* char emptyData[160*8]; memset( emptyData, 1, sizeof(emptyData) ); d->dspBuffer->lock(); d->dspBuffer->put( emptyData, sizeof(emptyData) ); d->dspBuffer->unlock(); */ //d->timer.start(1,false); d->isRunning = true; d->processThread->start(); // qDebug("mediastream started"); printf("mediastream started\n"); return true; } // }}}
void SipClient::OnCallStarted() { if ( IsWorking() ) { DEBUG_INFO("SipUADemo::OnCallStarted..."); #ifdef WIN32 WSADATA dat; int iWSRet = WSAStartup(MAKEWORD(2,2),&dat); ASSERT(iWSRet == 0); #endif // WIN32 { VideoCapture cap; if (cap.EnumDevices(false) > 0) m_bCaptureAudio = true; VIDEOSAMPLEINFO vsiLocal; VIDEOFORMATINFO vfiRemote; int nVideoDevices = cap.EnumDevices(); HRESULT hr = E_FAIL; if (nVideoDevices > 0) hr = cap.GetPreviewInfo(nVideoDevices-1, vsiLocal); SPRINTF_S(dbg_str, "Video device detecting: count:%d, HRES:%X" , nVideoDevices , hr); DEBUG_INFO(dbg_str); //if ( cap.EnumDevices() > 0 // && SUCCEEDED(cap.GetPreviewInfo(cap.EnumDevices()-1, vsiLocal)) ) if ( SUCCEEDED(hr) ) { m_bCaptureVideo = true; setVideoSampleInfo(vsiLocal); //TODO: support codecoder selection (for h264, etc.) vfiRemote = VIDEOFORMATINFO( CODEC_FORMAT, H264_WIDTH, H264_HEIGHT, vsiLocal.m_AvgTimePerFrame); setRemoteVideoFormatInfo(vfiRemote); } else { //TODO: support codecoder selection (for h264, etc.) vfiRemote = VIDEOFORMATINFO( CODEC_FORMAT, H264_WIDTH, H264_HEIGHT, FRAMES_PER_SECOND); setRemoteVideoFormatInfo(vfiRemote); } } #ifdef RTP_AUDIO_SENDRECV //Sender { if (m_bCaptureAudio) { SPRINTF_S(dbg_str, "Send Audio to [IP]=%s, [Port]=%s" //", [BasePort]=%s" , GetRemoteAudioIP().c_str() , GetRemoteAudioPort().c_str() //, GetAudioSendPort().c_str() ); DEBUG_INFO(dbg_str); uint16_t portbase,destport; uint32_t destip; std::string ipstr; int status; //ASSERT(GetAudioSendPort().length() > 0); portbase = 6666 + rand() % 6666;//atoi(GetAudioSendPort().c_str()); if (portbase % 2 == 1) portbase += 1; // destination IP address ipstr = GetRemoteAudioIP(); ASSERT(ipstr.length() > 0); destip = inet_addr(ipstr.c_str()); if (destip == INADDR_NONE) { DEBUG_INFO("Bad IP address specified"); DebugBreak(); } // The inet_addr function returns a value in network byte order, but // we need the IP address in host byte order, so we use a call to ntohl destip = ntohl(destip); // destination port ASSERT(GetRemoteAudioPort().length() > 0); destport = atoi(GetRemoteAudioPort().c_str()); // Now, we'll create a RTP session, set the destination, send some // packets and poll for incoming data. RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; // IMPORTANT: The local timestamp unit MUST be set, otherwise // RTCP Sender Report info will be calculated wrong // In this case, we'll be sending 10 samples each second, so we'll // put the timestamp unit to (1.0/10.0) //sessparams.SetOwnTimestampUnit(1.0/8000.0); sessparams.SetOwnTimestampUnit(TIMESTAMP_INC_UNIT); //sessparams.SetAcceptOwnPackets(true); transparams.SetPortbase(portbase); m_pAudioSender = std::shared_ptr<AudioSendSession>( new AudioSendSession() ); status = m_pAudioSender->Create(sessparams,&transparams); checkRtpError(status); RTPIPv4Address addr(destip,destport); status = m_pAudioSender->AddDestination(addr); checkRtpError(status); #ifdef ENABLE_AUDIO_AEC ::SetMicQueue(m_pAudioSender->getMicQueue()); ::SetOutQueue(m_pAudioSender->getPackageQueue()); if (::GetUseAEC()) { ::SetRefQueue(m_pAudioSender->getRefQueue()); } else { ::SetRefQueue(NULL); } #endif//ENABLE_AUDIO_AEC m_pAudioSender->Start(); } else { #ifdef ENABLE_AUDIO_AEC if (::GetUseAEC()) { ::SetUseAEC(false); DEBUG_INFO(L"AEC DIS-ABLED: no device for recording."); } ::SetMicQueue(m_pAudioSender->getMicQueue()); ::SetOutQueue(m_pAudioSender->getPackageQueue()); ::SetRefQueue(NULL); #endif//ENABLE_AUDIO_AEC } } //Receiver { SPRINTF_S(dbg_str, "Receive Audio At [BasePort]=%s", GetAudioReceivePort().c_str()); DEBUG_INFO(dbg_str); // Setup receiver session uint16_t portbase; int status; RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; portbase = atoi(GetAudioReceivePort().c_str()); //SPRINTF_S(dbg_str, "Port Base: %d", // portbase); //DEBUG_INFO(dbg_str); //TODO: portbase = atoi(GetAudioReceivePort().c_str()); // IMPORTANT: The local timestamp unit MUST be set, otherwise // RTCP Sender Report info will be calculated wrong // In this case, we'll be just use 8000 samples per second. //sessparams.SetOwnTimestampUnit(1.0/8000.0); sessparams.SetOwnTimestampUnit(TIMESTAMP_INC_UNIT); transparams.SetPortbase(portbase); m_pAudioReceiver = std::shared_ptr<AudioReceiveSession>( new AudioReceiveSession() ); status = m_pAudioReceiver->Create(sessparams, &transparams); checkRtpError(status); m_pAudioReceiver->Start(); } #endif//RTP_AUDIO_SENDRECV #ifdef RTP_VIDEO_SENDER //Sender { if (m_bCaptureVideo) { SPRINTF_S(dbg_str, "Send Video to [IP]=%s, [Port]=%s" //", [BasePort]=%s" , GetRemoteVideoIP().c_str() , GetRemoteVideoPort().c_str() //, GetVideoSendPort().c_str() ); DEBUG_INFO(dbg_str); uint16_t portbase,destport; uint32_t destip; std::string ipstr; int status; //TODO: portbase = 8888 + rand() % 8888;//atoi(GetVideoReceivePort().c_str()); if (portbase % 2 == 1) portbase += 1; // destination IP address //ASSERT(GetRemoteIP().length() > 0); //TODO: //ipstr = "127.0.0.1";//GetRemoteIP(); //ipstr = "10.148.206.29"; //ipstr = "10.148.206.93"; ipstr = GetRemoteVideoIP(); destip = inet_addr(ipstr.c_str()); if (destip == INADDR_NONE) { DEBUG_INFO("Bad IP address specified"); DebugBreak(); } // The inet_addr function returns a value in network byte order, but // we need the IP address in host byte order, so we use a call to ntohl destip = ntohl(destip); // destination port //ASSERT(GetRemotePort().length() > 0); //TODO: destport = atoi(GetRemoteVideoPort().c_str()); // Now, we'll create a RTP session, set the destination, send some // packets and poll for incoming data. RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; // IMPORTANT: The local timestamp unit MUST be set, otherwise // RTCP Sender Report info will be calculated wrong // In this case, we'll be sending 10 samples each second, so we'll // put the timestamp unit to (1.0/10.0) //sessparams.SetOwnTimestampUnit(1.0/8000.0); sessparams.SetOwnTimestampUnit(TIMESTAMP_INC_UNIT); //sessparams.SetAcceptOwnPackets(true); transparams.SetPortbase(portbase); m_pVideoSender = std::shared_ptr<VideoSendSession>( new VideoSendSession() ); status = m_pVideoSender->Create(sessparams,&transparams); checkRtpError(status); RTPIPv4Address addr(destip,destport); status = m_pVideoSender->AddDestination(addr); checkRtpError(status); //VIDEOSAMPLEINFO vsiLocal; //if ( SUCCEEDED(cap.GetPreviewInfo(cap.EnumDevices()-1, vsiLocal)) ) //{ // setVideoSampleInfo(vsiLocal); // //TODO: support codecoder selection (for h264, etc.) // vfiRemote = VIDEOFORMATINFO( // CODEC_FORMAT, // H264_WIDTH, // H264_HEIGHT, // vsiLocal.m_AvgTimePerFrame); // setRemoteVideoFormatInfo(vfiRemote); ASSERT(m_wndPreview != NULL); m_pVideoSender->SetPreviewWindow(m_wndPreview); m_pVideoSender->Start(); //} } } #endif//RTP_VIDEO_SENDER #ifdef RTP_VIDEO_RECEIVER //Receiver { SPRINTF_S(dbg_str, "Receive Video At [BasePort]=%s" , GetVideoReceivePort().c_str()); DEBUG_INFO(dbg_str); // Setup receiver session uint16_t portbase; int status; RTPUDPv4TransmissionParams transparams; RTPSessionParams sessparams; //TODO: portbase = atoi(GetVideoReceivePort().c_str()); //SPRINTF_S(dbg_str, "Port Base: %d", // portbase); //DEBUG_INFO(dbg_str); // IMPORTANT: The local timestamp unit MUST be set, otherwise // RTCP Sender Report info will be calculated wrong // In this case, we'll be just use 8000 samples per second. //sessparams.SetOwnTimestampUnit(1.0/8000.0); sessparams.SetOwnTimestampUnit(TIMESTAMP_INC_UNIT); transparams.SetPortbase(portbase); m_pVideoReceiver = std::shared_ptr<VideoReceiveSession>( new VideoReceiveSession() ); status = m_pVideoReceiver->Create(sessparams, &transparams); checkRtpError(status); //VideoCapture cap; //if (cap.EnumDevices() > 0 ) //{ // // set remote video format already //} //else //{ // //TODO: support codecoder selection (for h264, etc.) // vfiRemote = VIDEOFORMATINFO( // CODEC_FORMAT, // H264_WIDTH, // H264_HEIGHT, // FRAMES_PER_SECOND); // setRemoteVideoFormatInfo(vfiRemote); //} ASSERT(m_wndRemote != NULL); m_pVideoReceiver->SetRemoteWindow(m_wndRemote); m_pVideoReceiver->Start(); } #endif//RTP_VIDEO_RECEIVER if (this->EnableEvents()) mListener->Callback(SIPUA_CALLSTARTED); DEBUG_INFO("SipUADemo::OnCallStarted DONE"); } }
int main(void) { int packetsPerSecond = 100; MIPTime interval(1.0/(double)packetsPerSecond); // We'll use 10 millisecond intervals. MIPAverageTimer timer(interval); MIPOSCInput oscInput; MIPOSCEncoder oscEnc; MIPRTPOSCEncoder rtpEnc; MIPRTPComponent rtpComp; MIPRTPDecoder rtpDec; MIPRTPOSCDecoder rtpOSCDec; MIPOSCDecoder oscDec; MIPOSCOutput oscOutput; MyChain chain("OSC Sender"); RTPSession rtpSession; bool returnValue; // Convert Messages to MIPOSCMessages returnValue = oscEnc.init(); checkError(returnValue, oscEnc); // Initialize the RTP OSC encoder: this component will create // RTP messages which can be sent to the RTP component. returnValue = rtpEnc.init(); checkError(returnValue, rtpEnc); // We'll initialize the RTPSession object which is needed by the // RTP component. RTPUDPv4TransmissionParams transmissionParams; RTPSessionParams sessionParams; int portBase = 60000; int status; transmissionParams.SetPortbase(portBase); sessionParams.SetOwnTimestampUnit(1.0/((double)packetsPerSecond)); sessionParams.SetMaximumPacketSize(64000); sessionParams.SetAcceptOwnPackets(true); status = rtpSession.Create(sessionParams,&transmissionParams); checkError(status); // Instruct the RTP session to send data to ourselves. status = rtpSession.AddDestination(RTPIPv4Address(ntohl(inet_addr("127.0.0.1")),portBase)); checkError(status); // Tell the RTP component to use this RTPSession object. returnValue = rtpComp.init(&rtpSession); checkError(returnValue, rtpComp); returnValue = rtpDec.init(false, 0, &rtpSession); checkError(returnValue, rtpDec); returnValue = rtpDec.setPacketDecoder(0, &rtpOSCDec); checkError(returnValue, rtpDec); returnValue = oscDec.init(); checkError(returnValue, oscDec); // Next, we'll create the chain returnValue = chain.setChainStart(&timer); checkError(returnValue, chain); returnValue = chain.addConnection(&timer, &oscInput); checkError(returnValue, chain); returnValue = chain.addConnection(&oscInput, &oscEnc); checkError(returnValue, chain); returnValue = chain.addConnection(&oscEnc, &rtpEnc); checkError(returnValue, chain); returnValue = chain.addConnection(&rtpEnc, &rtpComp); checkError(returnValue, chain); returnValue = chain.addConnection(&rtpComp, &rtpDec); checkError(returnValue, chain); returnValue = chain.addConnection(&rtpDec, &oscDec, true); checkError(returnValue, chain); returnValue = chain.addConnection(&oscDec, &oscOutput); checkError(returnValue, chain); // Start the chain returnValue = chain.start(); checkError(returnValue, chain); // We'll wait until enter is pressed int counter = 0; sleep(1); for(int i=0; i<4; i++) { lo_message m = lo_message_new(); lo_message_add_int32(m,counter++); oscInput.push(m, "/testpfad"); sleep(1); } getc(stdin); returnValue = chain.stop(); checkError(returnValue, chain); rtpSession.Destroy(); return 0; }