Beispiel #1
0
uint8_t * MyRTPSession::GetMyRTPData(uint8_t * data_buf, size_t * size, unsigned long timeout_ms)
{
	if(!data_buf) {
		fprintf(stderr, "%s: Invalide argument('data_buf==NULL')", __func__);
		return NULL;
	}

	if(!size) {
		fprintf(stderr, "%s: Invalide argument('size==NULL')", __func__);
		return NULL;
	}

	unsigned long UsleepTimes = (timeout_ms + USLEEP_UNIT - 1) / USLEEP_UNIT; // floor the 'timeout_ms / USLEEP_UNIT'

	do {
#ifndef RTP_SUPPORT_THREAD
		int status = Poll();
		if(!IsError(status)) return NULL;
#endif 

		BeginDataAccess();

		// check incoming packets
		if (!GotoFirstSourceWithData()) {
			EndDataAccess();
			usleep(USLEEP_UNIT);
			UsleepTimes--;
			continue;
			// return NULL;
		}
		RTPPacket *pack;

		if(!(pack = GetNextPacket()))
		{
			EndDataAccess();
			usleep(USLEEP_UNIT);
			UsleepTimes--;
			continue;
			// return NULL;
		}

		size_t PacketSize = 0;
		uint8_t * Packet = NULL;
		Packet = pack->GetPayloadData();
		PacketSize = pack->GetPayloadLength();
		// printf("data length: %lu\n", PacketSize);

		*size = PacketSize;
		memcpy(data_buf, Packet, PacketSize);

		// we don't longer need the packet, so
		// we'll delete it
		DeletePacket(pack);
		EndDataAccess();
		UsleepTimes = 0; // Got the data. So not need to sleep any more.
	} while(UsleepTimes > 0);

	return data_buf;
}
bool whu_RtpRPicSAg::RecvPic(IplImage* RecvImg)
{
	bool done = false;
	int status;
	char RecvBuf[1204];
	RTPTime delay(0.020);
	RTPTime starttime = RTPTime::CurrentTime();
	int lengh ,i;
	uchar* ptr;
	session.BeginDataAccess();
		if (session.GotoFirstSource())
		{
			int line=0;
			do
			{
				RTPPacket *packet;
				
				while ((packet = session.GetNextPacket()) != 0)
				{
					timestamp1 = packet->GetTimestamp();
					lengh=packet->GetPayloadLength();
					RawData = packet->GetPayloadData();
					memcpy(RecvBuf,RawData,1204);
					memcpy(&line,RecvBuf,4);
					if (line>=0&&line<RecvImg->height)
					{
						
						ptr=(uchar*)(RecvImg->imageData+line*RecvImg->widthStep);
						memcpy(ptr,RecvBuf+4,1200);
					}
					else{
						printf("loss packet\n");
					}
					
					session.DeletePacket(packet);
				}
			} while (session.GotoNextSource());
		}
		session.EndDataAccess();

		RTPTime::Wait(delay);

		RTPTime t = RTPTime::CurrentTime();
		t -= starttime;
		if (t > RTPTime(60.0))
			done = true;
		return true;
}
Beispiel #3
0
// Thread function for RTP session.
static void* rtpSessionThread(void *arg)
{
    VideoStream* video = reinterpret_cast<VideoStream*>(arg);

    u_int8_t bigBuffer[MAX_FRAME_SIZE];

    unsigned int lastPacketTimestamp = 0;
    unsigned int currentIndex = 0;
    double last_time = 0;

    RTPSession session = createRtpSession(video->getMulticastIP(), 
                                          video->getPort());

    while (1) {
        session.Poll();

        // Distribute data from the session to connected clients if we've
        // got anything
        session.BeginDataAccess();

        if (session.GotoFirstSourceWithData()) {
            do {
                RTPPacket *packet = NULL;

                while ((packet = session.GetNextPacket()) != NULL) {
                    if ((packet->GetPayloadLength() > sizeof(bigBuffer)) || 
                        (packet->GetPayloadLength() == 0)) {
                        // Free the packet, we're not going to use it.
                        session.DeletePacket(packet);
                        continue; // Exit this level of the loop and drop it
                    }

                    // Check timestamps for new data.  A new timestamp means
                    // this is from a different time.
                    if (packet->GetTimestamp() != lastPacketTimestamp) {
                        video->decode((uint8_t*)&bigBuffer[0], currentIndex);

                        currentIndex = 0;
                        memset(&bigBuffer[0], 0, sizeof(bigBuffer));
                    } // End new timestamp optimization.


                    // Copy data into buffer
                    if (currentIndex + packet->GetPayloadLength() > sizeof(bigBuffer)) {
                        throw std::runtime_error("Frame buffer overflow");
                    }

                    memcpy(&bigBuffer[currentIndex], packet->GetPayloadData(),
                           packet->GetPayloadLength());
                    currentIndex += packet->GetPayloadLength();

                    // Mark our last timestamp
                    lastPacketTimestamp = packet->GetTimestamp();

                    // Free the packet.
                    session.DeletePacket(packet);
                } 
            } while (session.GotoNextSourceWithData()); 
        }

        session.EndDataAccess();
        RTPTime delay(0, 100); // 100usec

        // Update More Data
        bool moreData;
        session.WaitForIncomingData(delay, &moreData);
    }

    // Leave the session while sending BYE.
    RTPTime timeout(0.75f); //  Wait briefly.
    const char* reason = "Session Destroyed.";
    unsigned int reasonlen = strlen(reason);

    if (session.IsActive())
        session.BYEDestroy(timeout, reason, reasonlen);
}  
void Java_cn_nickwar_MainActivity_nativeWorker(JNIEnv* env, jobject obj) {
	uint16_t portbase=8000,destport=9000;
	std::string ipstr="192.168.1.102";
	uint32_t destip=inet_addr(ipstr.c_str());
	int status,i,num;

	RTPSession session;
	RTPSessionParams sessionparams;
	RTPUDPv4TransmissionParams transparams;
	RTPIPv4Address addr;

	if (destip == INADDR_NONE) {
		__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "Bad IP address specified");
	}

	destip = ntohl(destip);

	num = 40;

	sessionparams.SetOwnTimestampUnit(1.0/10.0);
	sessionparams.SetAcceptOwnPackets(true);

	transparams.SetPortbase(portbase);

	addr.SetIP(destip);
	addr.SetPort(destport);

	status = session.Create(sessionparams,&transparams);

	if (status<0) {
		std::string tmp = "Create:";
		__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", (tmp+RTPGetErrorString(status)).c_str());
	}
	status = session.AddDestination(addr);
	if (status<0) {
		std::string tmp = "AddDestination:";
		__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", (tmp+RTPGetErrorString(status)).c_str());
	}

	while(!m_bExitApp)
	{
		session.BeginDataAccess();

		unsigned char *buff = NULL;
		if (session.GotoFirstSourceWithData())
		{
			do
			{
				RTPPacket *pack;
				while((pack = session.GetNextPacket()) !=NULL)
				{
					__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "got packet!\n");
					char message[26];
					sprintf(message, "got packet");
					jstring messageString = env->NewStringUTF(message);
					env->CallVoidMethod(obj, rtpresultFromJNI, messageString);

					if (NULL != env->ExceptionOccurred()) {
						//						break;
						continue;
					}
					if (pack->GetPayloadLength()>0) {
						buff = pack->GetPayloadData();
						__android_log_print(ANDROID_LOG_DEBUG, "pspm.native", "packt data:%s",buff);
					}
					session.DeletePacket(pack);
				}
			}
			while(session.GotoNextSourceWithData());
		}

		session.EndDataAccess();
		//
#ifndef RTP_SUPPORT_THREAD
		status = sess.Poll();
		if (status<0) {
			session.Destroy();
			return;
		}
#endif

		RTPTime::Wait(RTPTime(0,5000));
	}

	session.Destroy();
	return;
}
//called on timer
//all data is processed here
void MediaStream::timerClick()
{
//d->mutex.lock();

#ifndef TEST_AUDIO    
    int status = d->session.Poll();
    if ( status<0 ) {
        qDebug("Poll: %s", RTPGetErrorString(status).c_str() );
    }
//    printf("JStat2 %d : %d\n", d->micBuffer->size(),d->dspBuffer->size());

    //checkRtpError( status );
    // check incoming packets
    d->session.BeginDataAccess();
    if ( d->session.GotoFirstSourceWithData() ) {

        qDebug("have rtp data");
        do {
            RTPSourceData *sourceData = d->session.GetCurrentSourceInfo();

            RTPPacket *pack;
            if ((pack = d->session.GetNextPacket()) != NULL) {
                qDebug("Get packet N %ld", pack->GetExtendedSequenceNumber());


                // debug("Got  packet with payload type %d, size %d", pack->GetPayloadType(), pack->GetPayloadLength() );

                // TODO initialise decoder here using pack payload type, maybe use QIntDict of decoders
                if ( d->decoder ) {
                    short* decodedData = 0;

                    int size = d->decoder->decode((char*)pack->GetPayloadData(), pack->GetPayloadLength(), &decodedData );

                    if ( size > 0 ) {

                        // adjust the volume
                        for ( int i=0; i<size; i++ ) {
                            double val = double(decodedData[i]) * call_dlg_dsp_level / 50.0;
                            if ( val > 32700.0 )
                                val = 32700.0;
                            if ( val < -32700.0 )
                                val = -32700.0;

                            decodedData[i] = short(val);
                        }
                        
                        // write to DSP buffer
                        d->dspBuffer->lock();
                        d->dspBuffer->put( (char*)decodedData, size*2 );
                        d->dspBuffer->unlock();
                        
                        processDspData(decodedData,size);

                        delete[] decodedData;
                    }
                    qDebug("decoded data (%d byes) with payload type %d",  size*2, pack->GetPayloadType() );


                } else {
                    qDebug("can't decode data with payload type %d", pack->GetPayloadType() );
                }

                // we don't longer need the packet, so
                // we'll delete it
                delete pack;
            }
        } while ( d->session.GotoNextSourceWithData());
    }
    d->session.EndDataAccess();

    // send the packet
    // check for in data


    short *data = 0;
    int micDataSize = 0; // size of readed mic data in samples

    d->micBuffer->lock();
    micDataSize = d->micBuffer->size()/2;
    if ( micDataSize ) {
        data = new short[micDataSize];
        memcpy( data, d->micBuffer->data(), micDataSize*2 );
        d->micBuffer->fetch( micDataSize*2 );
    }
    d->micBuffer->unlock();

    // adjust mic volume
    for ( int i=0; i<micDataSize; i++ ) {
        double val = double(data[i]) * call_dlg_mic_level / 50.0;
        if ( val > 32700.0 )
            val = 32700.0;
        if ( val < -32700.0 )
            val = -32700.0;

        data[i] = short(val);
    }

    if(micDataSize == 0) {
	micDataSize = 160;
	data = new short[160];
    }

    // examine the data here, to calculate levels
    processMicData(data, micDataSize);


    if ( data ) {
        char * encodedData = 0;
        int readed = micDataSize;
        int size = 0;

        qDebug("have mic data %d", micDataSize );
        
        
        do {
            int readed = 0;
            size = d->encoder->encode( data, micDataSize, &encodedData, &readed );

            int localPayload = d->codecPayload; // TODO get local payload here

            qDebug("readed %d  encoded %d", readed, size );

            delete[] data;
            data = 0;
            micDataSize = 0;

            // TODO: for pcmu packet (payload==0) send packets of certain size
            if ( size > 0 ) {
                memcpy( d->outBuffer+d->outBufferPos, encodedData, size );
                d->outBufferPos += size;
                d->outBufferTime += readed;
                if ( d->outBufferPos ) {
                    //checkRtpError( 

                    if ( d->session.IsActive() && d->sendPacketsFlag ) {
                        int status = d->session.SendPacket( (void *)d->outBuffer, (int)d->outBufferPos, (unsigned char)localPayload , false, (long)d->outBufferTime );
                        if ( status<0 ) {
                             qDebug("can't SendPacket, %s", RTPGetErrorString(status).c_str() );
                        }
                    }
                    qDebug("sent packet");
                }

                    

                d->outBufferPos = 0;
                d->outBufferTime = 0;
            }

            if ( encodedData ) {
                delete[] encodedData;
                encodedData = 0;
            }

        } while (size > 0);
    }
    
    status = d->session.Poll();
    if ( status<0 ) {
         qDebug("Poll: %s", RTPGetErrorString(status).c_str() );
    }
#else // TEST_AUDIO

    short *data = 0;
    int micDataSize = 0; // size of readed mic data in samples

    d->micBuffer->lock();
    micDataSize = d->micBuffer->size()/2;
    if ( micDataSize ) {
        data = new short[micDataSize];
        memcpy( data, d->micBuffer->data(), micDataSize*2 );
        d->micBuffer->fetch( micDataSize*2 );
    }
    d->micBuffer->unlock();

    if (data) {
        // write to DSP buffer
        d->dspBuffer->lock();
        d->dspBuffer->put( (char*)data,micDataSize*2 );
        d->dspBuffer->unlock();

    }

    static int totalSamples = 0;
    totalSamples += micDataSize;


    if ( micDataSize )
        printf("total audio samples: %d  %d   \r", micDataSize, totalSamples);
    

#endif // TEST_AUDIO
    
//    d->mutex->unlock();
}
Beispiel #6
0
void ReceiverSession::RunNetwork() {
    log("Sending Hi package");
    const char *hi = "HI";
    int status = SendPacket(hi, sizeof(hi), 0, false,
                            sizeof(hi));// Say Hi, should cause the server to send data
    _checkerror(status);

    while (codec == NULL && isRunning) {
        log("Waiting for codec RTCP package...");
        RTPTime::Wait(RTPTime(1, 0));// Wait 1s
    }
    if (!isRunning) return;

    // Start decoder
    status = AMediaCodec_start(codec);
    if (status != AMEDIA_OK) return;
    log("Started decoder");

    // Extracting format data
    int32_t samples = 44100, channels = 1;
    AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samples);
    AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channels);
    audioplayer_initPlayback((uint32_t) samples, (uint32_t) channels);

    bool hasInput = true, hasOutput = true;
    int32_t beginTimestamp = -1, lastTimestamp = 0;
    uint16_t lastSeqNum = 0;
    while (hasInput && isRunning) {
        BeginDataAccess();
        if (GotoFirstSourceWithData()) {
            do {
                RTPPacket *pack;
                while ((pack = GetNextPacket()) != NULL) {
                    // We repurposed the marker flag as end of file
                    hasInput = !pack->HasMarker();

                    // Calculate playback time and do some lost package corrections
                    uint32_t timestamp = pack->GetTimestamp();
                    if (beginTimestamp == -1) {// record first timestamp and use differences
                        beginTimestamp = timestamp;
                        lastSeqNum = pack->GetSequenceNumber() - (uint16_t) 1;
                    }
                    timestamp -= beginTimestamp;
                    if (pack->HasExtension()
                        && pack->GetExtensionID() == AUDIOSYNC_EXTENSION_HEADER_ID
                        && pack->GetExtensionLength() == sizeof(int64_t)) {
                        int64_t *usec = (int64_t*) pack->GetExtensionData();
                        audioplayer_syncPlayback(ntohq(*usec), timestamp);
                    }

                    /*if (pack->HasExtension()) {
                        debugLog("Ext: %" PRIu16 " %lld", pack->GetExtensionID(), (long long)pack->GetExtensionLength());
                    }*/

                    // Handle lost packets, TODO How does this work with multiple senders?
                    if (pack->GetSequenceNumber() != lastSeqNum + 1) {
                        // TODO handle mutliple packages with same timestamp. (Decode together?)
                        /*if (timestamp == lastTimestamp)*/
                        log("Packets jumped %u => %u | %.2f => %.2fs.", lastSeqNum,
                            pack->GetSequenceNumber(), lastTimestamp / 1E6,
                            timestamp / 1E6);
                        // TODO evaluate the impact of this time gap parameter
                        if (timestamp - lastTimestamp > SECOND_MICRO/20) {//50 ms
                            // According to the docs we need to flushIf data is not adjacent.
                            // It is unclear how big these gaps can be and still be tolerable.
                            // During testing this call did cause the codec
                            // to throw errors. most likely in combination with splitted packages,
                            // where one of a a set of packages with the same timestamp got lost
                            log("Flushing codec");
                            AMediaCodec_flush(codec);
                        }
                    }
                    lastSeqNum = pack->GetSequenceNumber();
                    lastTimestamp = timestamp;

                    if (hasInput) {
                        //log("Received %.2f", timestamp / 1000000.0);
                        uint8_t *payload = pack->GetPayloadData();
                        size_t length = pack->GetPayloadLength();
                        status = decoder_enqueueBuffer(codec, payload, length, (int64_t) timestamp);
                        if (status != AMEDIA_OK) hasInput = false;
                    } else {
                        log("Receiver: End of file");
                        // Tell the codec we are done
                        decoder_enqueueBuffer(codec, NULL, -1, (int64_t) timestamp);
                    }
                    hasOutput = decoder_dequeueBuffer(codec, &audioplayer_enqueuePCMFrames);

                    DeletePacket(pack);
                }
            } while (GotoNextSourceWithData());
        }
        EndDataAccess();

        struct timespec req;
        req.tv_sec = 0;
        req.tv_nsec = 1000*1000;
        audioplayer_monitorPlayback();
        // We should give other threads the opportunity to run
        nanosleep(&req, NULL);// TODO base time on duration of received audio?
        audioplayer_monitorPlayback();
    }
    log("Received all data, ending RTP session.");
    BYEDestroy(RTPTime(1, 0), 0, 0);

    while (hasOutput && status == AMEDIA_OK && isRunning) {
        hasOutput = decoder_dequeueBuffer(codec, &audioplayer_enqueuePCMFrames);
        RTPTime::Wait(RTPTime(0, 5000));
    }
    AMediaCodec_stop(codec);
    log("Finished decoding");

    while(isRunning) {
        audioplayer_monitorPlayback();
        RTPTime::Wait(RTPTime(0, 50000));// 10ms
    }
    audioplayer_stopPlayback();
}