예제 #1
0
RTPHandler::RTPHandler(char* di, char* dp)
{
	portbase = RCV_PORT;
	destip = inet_addr(di);
   destport = (uint16_t) atoi(dp);
   alive = true;

	if (destip == INADDR_NONE)
	{
		cerr << "Bad IP address specified" << endl;
		alive = false;
      return;
	}

	destip = ntohl(destip);
	
	RTPUDPv4TransmissionParams transparams;
	RTPSessionParams sessparams;
 
	sessparams.SetOwnTimestampUnit(1.0/CLK_RATE);		
	
	sessparams.SetAcceptOwnPackets(true);
	transparams.SetPortbase(portbase);
   int e = sess.Create(sessparams,&transparams);
   if(error(e)) return;
	cout << sessparams.GetMaximumPacketSize()<< endl;
	RTPIPv4Address addr(destip,destport);
	
   e = sess.AddDestination(addr);
	if(error(e)) return;
}
예제 #2
0
int MyRTPSession::MyRTP_SetUp(MediaSession * media_session)
{
	if(!media_session) {
		fprintf(stderr, "%s: Invalid media session\n", __func__);
		return RTP_ERROR;
	}
	if(0 == media_session->TimeRate) {
		fprintf(stderr, "%s: Invalid MediaSession::TimeRate\n", __func__);
		return RTP_ERROR;
	}
	if(0 == media_session->RTPPort) {
		fprintf(stderr, "%s: Invalid MediaSession::RTPPort\n", __func__);
		return RTP_ERROR;
	}

	int status;

	// Now, we'll create a RTP session, set the destination
	// and poll for incoming data.

	RTPUDPv4TransmissionParams transparams;
	RTPSessionParams sessparams;

	// IMPORTANT: The local timestamp unit MUST be set, otherwise
	//            RTCP Sender Report info will be calculated wrong
	// In this case, we'll be just use 8000 samples per second.
	sessparams.SetOwnTimestampUnit(1.0/media_session->TimeRate);         

	sessparams.SetAcceptOwnPackets(true);
	transparams.SetPortbase(media_session->RTPPort);
	status = Create(sessparams,&transparams);  
	return IsError(status);
}
예제 #3
0
int CRTPRecv::InitRTP(int Port)
{
	RTPUDPv4TransmissionParams TransParams;
	RTPSessionParams SessionParams;

	//设置本地通讯端口.
	TransParams.SetPortbase(Port);
	//设置时间戳,每秒钟发送的包数量.
	SessionParams.SetOwnTimestampUnit(1.0/90000.0);
	SessionParams.SetAcceptOwnPackets(true);	//接收自己发送的数据包

	//创建RTP对象.
	int Result = Create(SessionParams, &TransParams);
	if (Result < 0)
	{
		LOG_ERROR("RTP: "<<RTPGetErrorString(Result));
		return -1;
	}
	

	//设置默认负载类型.
	this->SetMaximumPacketSize(65535);
	//this->SetDefaultMark(true); 
	//this->SetDefaultTimestampIncrement(TIMESTAMP);
	//this->SetDefaultPayloadType(H264);

	return 0;
}
예제 #4
0
// Create a new RTP session.  If multicast is not being used then multicastIP
// should be set to an empty string.
static RTPSession createRtpSession(const std::string& multicastIP, 
                                   unsigned int port)
{
    RTPSession session;

    //if (setenv("LOGNAME", "video", 0) != 0) {
    //    throw std::runtime_error("Error setting LOGNAME environment variable");
    //}

    // Set up session params
    RTPSessionParams sessionparams;
    sessionparams.SetUsePollThread(false);
    sessionparams.SetMaximumPacketSize(1400);
    sessionparams.SetAcceptOwnPackets(true);
    sessionparams.SetOwnTimestampUnit(1.0f/900000.0f);
    sessionparams.SetResolveLocalHostname(false); 
    sessionparams.SetSessionBandwidth(9000000);
    sessionparams.SetReceiveMode(RTPTransmitter::AcceptAll);

    RTPUDPv4TransmissionParams transparams;
    transparams.SetPortbase(port);

    int status = session.Create(sessionparams, &transparams);
    if (status < 0) {
        throw std::runtime_error("Error creating RTP session");
    }

    // Join multicast groups if they are specified
    if (multicastIP.size() > 0) {
        if (!session.SupportsMulticasting()) {
            throw std::runtime_error("Multicast not supported!");
        } else {
            int joinip = ntohl(inet_addr(multicastIP.c_str())); 

            RTPIPv4Address joinaddr(joinip, port);
            int jstatus = session.JoinMulticastGroup(joinaddr);

            if (jstatus < 0) {
                throw std::runtime_error("Unable to join multicast group");
            }
        }
    }

    return session;
}
예제 #5
0
// Maybe this could be used here. Have a try .
void Sender::setRTPParams(){
	int status;

	RTPUDPv4TransmissionParams transparams;
	RTPSessionParams sessparams;
	sessparams.SetOwnTimestampUnit(1.0/25.0);
	sessparams.SetAcceptOwnPackets(true);
	sessparams.SetUsePredefinedSSRC(true);
//	sessparams.SetPredefinedSSRC(SSRC);

	transparams.SetPortbase(baseport);

	status = this->Create(sessparams, &transparams);
	checkerror(status);

	RTPIPv4Address addr(dst_ip, destport);

	status = this->AddDestination(addr);
	checkerror(status);
}
예제 #6
0
int main(void)
{
#ifdef NEED_PA_INIT
	std::string errStr;

	if (!MIPPAInputOutput::initializePortAudio(errStr))
	{
		std::cerr << "Can't initialize PortAudio: " << errStr << std::endl;
		return -1;
	}
#endif // NEED_PA_INIT
#ifdef WIN32
	WSADATA dat;
	WSAStartup(MAKEWORD(2,2),&dat);
#endif // WIN32

	MIPTime interval(0.020); // We'll use 20 millisecond intervals.
	MIPAverageTimer timer(interval);
	MIPWAVInput sndFileInput;
	MIPSamplingRateConverter sampConv, sampConv2;
	MIPSampleEncoder sampEnc, sampEnc2, sampEnc3;
	MIPULawEncoder uLawEnc;
	MIPRTPULawEncoder rtpEnc;
	MIPRTPComponent rtpComp;
	MIPRTPDecoder rtpDec;
	MIPRTPULawDecoder rtpULawDec;
	MIPULawDecoder uLawDec;
	MIPAudioMixer mixer;
	MIPComponentAlias rtpCompAlias(&rtpComp);
	ToggleOutputComponent sndToggleComponent(&sndFileInput);
#ifdef MIPCONFIG_SUPPORT_WINMM
	MIPWinMMOutput sndCardOutput;
#else
#ifdef MIPCONFIG_SUPPORT_OSS
	MIPOSSInputOutput sndCardOutput;
#else
	MIPPAInputOutput sndCardOutput;
#endif
#endif
	MyChain chain("Sound file player");
	RTPSession rtpSession;
	bool returnValue;

	// We'll open the file 'soundfile.wav'.

	returnValue = sndFileInput.open("soundfile.wav", interval);
	checkError(returnValue, sndFileInput);
	
	// We'll convert to a sampling rate of 8000Hz and mono sound.
	
	int samplingRate = 8000;
	int numChannels = 1;

	returnValue = sampConv.init(samplingRate, numChannels);
	checkError(returnValue, sampConv);

	// Initialize the sample encoder: the RTP U-law audio encoder
	// expects native endian signed 16 bit samples.
	
	returnValue = sampEnc.init(MIPRAWAUDIOMESSAGE_TYPE_S16);
	checkError(returnValue, sampEnc);

	// Convert samples to U-law encoding
	returnValue = uLawEnc.init();
	checkError(returnValue, uLawEnc);

	// Initialize the RTP audio encoder: this component will create
	// RTP messages which can be sent to the RTP component.

	returnValue = rtpEnc.init();
	checkError(returnValue, rtpEnc);

	// We'll initialize the RTPSession object which is needed by the
	// RTP component.
	
	RTPUDPv4TransmissionParams transmissionParams;
	RTPSessionParams sessionParams;
	int portBase = 60000;
	int status;

	transmissionParams.SetPortbase(portBase);
	sessionParams.SetOwnTimestampUnit(1.0/((double)samplingRate));
	sessionParams.SetMaximumPacketSize(64000);
	sessionParams.SetAcceptOwnPackets(true);
	
	status = rtpSession.Create(sessionParams,&transmissionParams);
	checkError(status);

	// Instruct the RTP session to send data to ourselves.
	status = rtpSession.AddDestination(RTPIPv4Address(ntohl(inet_addr("127.0.0.1")),portBase));
	checkError(status);

	// Tell the RTP component to use this RTPSession object.
	returnValue = rtpComp.init(&rtpSession, 160); // 20ms at 8000Hz = 160 samples per RTP packet
	checkError(returnValue, rtpComp);
	
	// Initialize the RTP audio decoder.
	returnValue = rtpDec.init(true, 0, &rtpSession);
	checkError(returnValue, rtpDec);

	// Register the U-law decoder for payload type 0
	returnValue = rtpDec.setPacketDecoder(0,&rtpULawDec);
	checkError(returnValue, rtpDec);

	// Convert U-law encoded samples to linear encoded samples
	returnValue = uLawDec.init();
	checkError(returnValue, uLawDec);

	// Transform the received audio data to floating point format.
	returnValue = sampEnc2.init(MIPRAWAUDIOMESSAGE_TYPE_FLOAT);
	checkError(returnValue, sampEnc2);

	// We'll make sure that received audio frames are converted to the right
	// sampling rate.
	returnValue = sampConv2.init(samplingRate, numChannels);
	checkError(returnValue, sampConv2);

	// Initialize the mixer.
	returnValue = mixer.init(samplingRate, numChannels, interval);
	checkError(returnValue, mixer);

	// Initialize the soundcard output.
	returnValue = sndCardOutput.open(samplingRate, numChannels, interval);
	checkError(returnValue, sndCardOutput);

#ifdef MIPCONFIG_SUPPORT_WINMM
	// The WinMM output component uses signed little endian 16 bit samples.
	returnValue = sampEnc3.init(MIPRAWAUDIOMESSAGE_TYPE_S16LE);
#else
#ifdef MIPCONFIG_SUPPORT_OSS
	// The OSS component can use several encoding types. We'll ask
	// the component to which format samples should be converted.
	returnValue = sampEnc3.init(sndCardOutput.getRawAudioSubtype());
#else
	// The PortAudio output component uses signed 16 bit samples
	returnValue = sampEnc3.init(MIPRAWAUDIOMESSAGE_TYPE_S16);
#endif
#endif
	checkError(returnValue, sampEnc3);

	// Next, we'll create the chain
	returnValue = chain.setChainStart(&timer);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&timer, &sndToggleComponent);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&sndToggleComponent, &sampConv);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&sampConv, &sampEnc);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&sampEnc, &uLawEnc);
	checkError(returnValue, chain);
	
	returnValue = chain.addConnection(&uLawEnc, &rtpEnc);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&rtpEnc, &rtpComp);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&timer, &rtpCompAlias);
	checkError(returnValue, chain);
	
	returnValue = chain.addConnection(&rtpCompAlias, &rtpDec);
	checkError(returnValue, chain);

	// This is where the feedback chain is specified: we want
	// feedback from the mixer to reach the RTP audio decoder,
	// so we'll specify that over the links in between, feedback
	// should be transferred.

	returnValue = chain.addConnection(&rtpDec, &uLawDec, true);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&uLawDec, &sampEnc2, true);
	checkError(returnValue, chain);
	
	returnValue = chain.addConnection(&sampEnc2, &sampConv2, true);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&sampConv2, &mixer, true);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&mixer, &sampEnc3);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&sampEnc3, &sndCardOutput);
	checkError(returnValue, chain);
	
	// Start the chain

	returnValue = chain.start();
	checkError(returnValue, chain);

	// We'll wait until enter is pressed

	int num = 10;

	for (int i = 0 ; i < num ; i++)
	{
		std::cout << "iteration " << (i+1) << "/" << num << std::endl;
		std::cout << "Press enter for silence" << std::endl;

		getc(stdin);
		sndToggleComponent.lock();
		sndToggleComponent.setEnabled(false);
		sndToggleComponent.unlock();

		std::cout << "Press enter for sound" << std::endl;

		getc(stdin);
		sndToggleComponent.lock();
		sndToggleComponent.setEnabled(true);
		sndToggleComponent.unlock();
	}

	returnValue = chain.stop();
	checkError(returnValue, chain);

	rtpSession.Destroy();
	
	// We'll let the destructors of the other components take care
	// of their de-initialization.

	sndCardOutput.close(); // In case we're using PortAudio
#ifdef NEED_PA_INIT
	MIPPAInputOutput::terminatePortAudio();
#endif // NEED_PA_INIT

#ifdef WIN32
	WSACleanup();
#endif
	return 0;
}
예제 #7
0
void Java_cn_nickwar_MainActivity_nativeWorker(JNIEnv* env, jobject obj) {
	uint16_t portbase=8000,destport=9000;
	std::string ipstr="192.168.1.102";
	uint32_t destip=inet_addr(ipstr.c_str());
	int status,i,num;

	RTPSession session;
	RTPSessionParams sessionparams;
	RTPUDPv4TransmissionParams transparams;
	RTPIPv4Address addr;

	if (destip == INADDR_NONE) {
		__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "Bad IP address specified");
	}

	destip = ntohl(destip);

	num = 40;

	sessionparams.SetOwnTimestampUnit(1.0/10.0);
	sessionparams.SetAcceptOwnPackets(true);

	transparams.SetPortbase(portbase);

	addr.SetIP(destip);
	addr.SetPort(destport);

	status = session.Create(sessionparams,&transparams);

	if (status<0) {
		std::string tmp = "Create:";
		__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", (tmp+RTPGetErrorString(status)).c_str());
	}
	status = session.AddDestination(addr);
	if (status<0) {
		std::string tmp = "AddDestination:";
		__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", (tmp+RTPGetErrorString(status)).c_str());
	}

	while(!m_bExitApp)
	{
		session.BeginDataAccess();

		unsigned char *buff = NULL;
		if (session.GotoFirstSourceWithData())
		{
			do
			{
				RTPPacket *pack;
				while((pack = session.GetNextPacket()) !=NULL)
				{
					__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "got packet!\n");
					char message[26];
					sprintf(message, "got packet");
					jstring messageString = env->NewStringUTF(message);
					env->CallVoidMethod(obj, rtpresultFromJNI, messageString);

					if (NULL != env->ExceptionOccurred()) {
						//						break;
						continue;
					}
					if (pack->GetPayloadLength()>0) {
						buff = pack->GetPayloadData();
						__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "packt data:%s",buff);
					}
					session.DeletePacket(pack);
				}
			}
			while(session.GotoNextSourceWithData());
		}

		session.EndDataAccess();
		//
#ifndef RTP_SUPPORT_THREAD
		status = sess.Poll();
		if (status<0) {
			session.Destroy();
			return;
		}
#endif

		RTPTime::Wait(RTPTime(0,5000));
	}

	session.Destroy();
	return;
}
bool MediaStream::start(QHostAddress ip, int port, int localPort, int codecPayload )
{
    if ( isRunning() )
        stop();

    printf("%s %d | %d | %d\n",ip.toString().toAscii().data(), port, localPort, codecPayload);

    if(d->processThread == NULL)
	d->processThread = new MediaThread(this);
    d->outBufferPos = 0;
    d->outBufferTime = 0;

//    int localPort = 3000;
printf("getFactory(%d)\n", codecPayload);
    VoiceCodecFactory *factory = CodecsManager::instance()->codecFactory(codecPayload);
    if ( !factory ) {
        printf("VoiceCodecFactory not found!\n");
        return true;
    }

    d->codecPayload = codecPayload;
    d->decoder =  factory->decoder();
    d->encoder =  factory->encoder();

printf("transparams\n");
    // Now, we'll create a RTP session and set the destination
//     d->transparams.mediaChannel = mediaChannel;
//     d->transparams.incomingPackets = incomingPackets;
    d->transparams.SetPortbase(localPort);
	
    RTPSessionParams sessparams;
    sessparams.SetReceiveMode(RTPTransmitter::AcceptAll);

    sessparams.SetOwnTimestampUnit(1.0/8000.0); // 8KHz
	sessparams.SetAcceptOwnPackets(true);

printf("session.Create()\n");
    int status = d->session.Create( sessparams, &d->transparams, RTPTransmitter::SynapseProto );

    if ( status<0 ) {
        qDebug("can't create RTP session, %s", RTPGetErrorString(status).c_str() );
        d->session.Destroy(); 
        return false;
    }

printf("session.AddDestination()\n");
    RTPIPv4Address addr(ip.toIPv4Address(),port);
	status = d->session.AddDestination(addr);

    if ( status<0 ) {
         qDebug("can't add rtp destination, %s", RTPGetErrorString(status).c_str() );
        d->session.Destroy(); 
        return false;
    }

    d->session.SetDefaultPayloadType(codecPayload);
    d->session.SetDefaultMark(false);
    d->session.SetDefaultTimestampIncrement(160);

    //initialise audio

    status = Pa_Initialize();
//////////////////// FOR TESTING
    if( status != paNoError ) {
        qDebug( "PortAudio error: %s", Pa_GetErrorText(status) );
//        stop();
 //       return true;
    }

if(status == paNoError) {

    status = Pa_OpenDefaultStream(
        &d->audioStream,/* passes back stream pointer */
        1,              /* 1 input channel */
        1,              /* mono output */
        paInt16,        /* 16 bit fixed point output */
        8000,           /* sample rate */
        240,            /* frames per buffer */
        16,             /* number of buffers, if zero then use default minimum */
        audioCallback,  /* specify our custom callback */
        d );            /* pass our data through to callback */

    status = Pa_StartStream( d->audioStream );
    if( status != paNoError ) {
         qDebug( "PortAudio error: %s", Pa_GetErrorText(status) );
//        stop();
//        return true;
    }
}
    

    // put something to dsp buffer
    /*
    char emptyData[160*8];
    memset(  emptyData, 1, sizeof(emptyData) );
    d->dspBuffer->lock();
    d->dspBuffer->put( emptyData, sizeof(emptyData) );
    d->dspBuffer->unlock();
    */



    //d->timer.start(1,false);
    d->isRunning = true;
    d->processThread->start();
    
//    qDebug("mediastream started");
    printf("mediastream started\n");
    return true;
} // }}}
예제 #9
0
int main(void)
{
	int packetsPerSecond = 100;
	MIPTime interval(1.0/(double)packetsPerSecond); // We'll use 10 millisecond intervals.
	MIPAverageTimer timer(interval);
	MIPOSCInput oscInput;
	MIPOSCEncoder oscEnc;
	MIPRTPOSCEncoder rtpEnc;
	MIPRTPComponent rtpComp;
	MIPRTPDecoder rtpDec;
	MIPRTPOSCDecoder rtpOSCDec;
	MIPOSCDecoder oscDec;
	MIPOSCOutput oscOutput;

	MyChain chain("OSC Sender");
	RTPSession rtpSession;
	bool returnValue;


	// Convert Messages to MIPOSCMessages
	returnValue = oscEnc.init();
	checkError(returnValue, oscEnc);

	// Initialize the RTP OSC encoder: this component will create
	// RTP messages which can be sent to the RTP component.

	returnValue = rtpEnc.init();
	checkError(returnValue, rtpEnc);

	// We'll initialize the RTPSession object which is needed by the
	// RTP component.

	RTPUDPv4TransmissionParams transmissionParams;
	RTPSessionParams sessionParams;
	int portBase = 60000;
	int status;

	transmissionParams.SetPortbase(portBase);
	sessionParams.SetOwnTimestampUnit(1.0/((double)packetsPerSecond));
	sessionParams.SetMaximumPacketSize(64000);
	sessionParams.SetAcceptOwnPackets(true);

	status = rtpSession.Create(sessionParams,&transmissionParams);
	checkError(status);

	// Instruct the RTP session to send data to ourselves.
	status = rtpSession.AddDestination(RTPIPv4Address(ntohl(inet_addr("127.0.0.1")),portBase));
	checkError(status);

	// Tell the RTP component to use this RTPSession object.
	returnValue = rtpComp.init(&rtpSession);
	checkError(returnValue, rtpComp);

	returnValue = rtpDec.init(false, 0, &rtpSession);
	checkError(returnValue, rtpDec);

	returnValue = rtpDec.setPacketDecoder(0, &rtpOSCDec);
	checkError(returnValue, rtpDec);

	returnValue = oscDec.init();
	checkError(returnValue, oscDec);

	// Next, we'll create the chain
	returnValue = chain.setChainStart(&timer);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&timer, &oscInput);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&oscInput, &oscEnc);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&oscEnc, &rtpEnc);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&rtpEnc, &rtpComp);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&rtpComp, &rtpDec);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&rtpDec, &oscDec, true);
	checkError(returnValue, chain);

	returnValue = chain.addConnection(&oscDec, &oscOutput);
	checkError(returnValue, chain);

	// Start the chain

	returnValue = chain.start();
	checkError(returnValue, chain);

	// We'll wait until enter is pressed

	int counter = 0;

	sleep(1);
	for(int i=0; i<4; i++) {
		lo_message m = lo_message_new();
		lo_message_add_int32(m,counter++);
		oscInput.push(m, "/testpfad");
		sleep(1);
	}
	getc(stdin);

	returnValue = chain.stop();
	checkError(returnValue, chain);

	rtpSession.Destroy();

	return 0;
}