Esempio n. 1
0
bool _VIDEO_AVC::Serialize(IOBuffer &dest) {
	uint8_t temp[sizeof (_spsLength) + sizeof (_ppsLength) + sizeof (uint32_t)];
	EHTONSP(temp, _spsLength);
	dest.ReadFromBuffer(temp, sizeof (_spsLength));
	dest.ReadFromBuffer(_pSPS, _spsLength);
	EHTONSP(temp, _ppsLength);
	dest.ReadFromBuffer(temp, sizeof (_ppsLength));
	dest.ReadFromBuffer(_pPPS, _ppsLength);
	EHTONLP(temp, _widthOverride);
	dest.ReadFromBuffer(temp, sizeof (uint32_t));
	EHTONLP(temp, _heightOverride);
	dest.ReadFromBuffer(temp, sizeof (uint32_t));
	return true;
}
Esempio n. 2
0
OutboundConnectivity::OutboundConnectivity(bool forceTcp, RTSPProtocol *pRTSPProtocol)
: BaseConnectivity() {
	_forceTcp = forceTcp;
	_pRTSPProtocol = pRTSPProtocol;

	_pOutStream = NULL;

	memset(&_dataMessage, 0, sizeof (_dataMessage));
	_dataMessage.MSGHDR_MSG_IOV = new IOVEC[1];
	_dataMessage.MSGHDR_MSG_IOVLEN = 1;
	_dataMessage.MSGHDR_MSG_NAMELEN = sizeof (sockaddr_in);

	memset(&_rtcpMessage, 0, sizeof (_rtcpMessage));
	_rtcpMessage.MSGHDR_MSG_IOV = new IOVEC[1];
	_rtcpMessage.MSGHDR_MSG_IOVLEN = 1;
	_rtcpMessage.MSGHDR_MSG_NAMELEN = sizeof (sockaddr_in);
	_rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_LEN = 28;
	_rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE = new IOVEC_IOV_BASE_TYPE[_rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_LEN];
	((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[0] = 0x80; //V,P,RC
	((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[1] = 0xc8; //PT=SR=200
	EHTONSP(((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 2, 6); //length
	EHTONLP(((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 4, 0); //SSRC
	_pRTCPNTP = ((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 8;
	_pRTCPRTP = ((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 16;
	_pRTCPSPC = ((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 20;
	_pRTCPSOC = ((uint8_t *) _rtcpMessage.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 24;

	_hasVideo = false;
	_videoDataFd = (SOCKET) (-1);
	_videoDataPort = 0;
	_videoRTCPFd = (SOCKET) (-1);
	_videoRTCPPort = 0;
	_pVideoNATData = NULL;
	_pVideoNATRTCP = NULL;

	_hasAudio = false;
	_audioDataFd = (SOCKET) (-1);
	_audioDataPort = 0;
	_audioRTCPFd = (SOCKET) (-1);
	_audioRTCPPort = 0;
	_pAudioNATData = NULL;
	_pAudioNATRTCP = NULL;

	_startupTime = (uint64_t) time(NULL);

	_amountSent = 0;
}
Esempio n. 3
0
OutboundConnectivity::OutboundConnectivity()
: BaseConnectivity() {
	_pOutStream = NULL;

	memset(&_dataMessage, 0, sizeof (_dataMessage));
	_dataMessage.msg_iov = new iovec[1];
	_dataMessage.msg_iovlen = 1;
	_dataMessage.msg_namelen = sizeof (sockaddr_in);

	memset(&_rtcpMessage, 0, sizeof (_rtcpMessage));
	_rtcpMessage.msg_iov = new iovec[1];
	_rtcpMessage.msg_iovlen = 1;
	_rtcpMessage.msg_namelen = sizeof (sockaddr_in);
	_rtcpMessage.msg_iov[0].iov_len = 28;
	_rtcpMessage.msg_iov[0].iov_base = new uint8_t[_rtcpMessage.msg_iov[0].iov_len];
	((uint8_t *) _rtcpMessage.msg_iov[0].iov_base)[0] = 0x80; //V,P,RC
	((uint8_t *) _rtcpMessage.msg_iov[0].iov_base)[1] = 0xc8; //PT=SR=200
	EHTONSP(((uint8_t *) _rtcpMessage.msg_iov[0].iov_base) + 2, 6); //length
	EHTONLP(((uint8_t *) _rtcpMessage.msg_iov[0].iov_base) + 4, 0); //SSRC
	_pRTCPNTP = ((uint8_t *) _rtcpMessage.msg_iov[0].iov_base) + 8;
	_pRTCPRTP = ((uint8_t *) _rtcpMessage.msg_iov[0].iov_base) + 16;
	_pRTCPSPC = ((uint8_t *) _rtcpMessage.msg_iov[0].iov_base) + 20;
	_pRTCPSOC = ((uint8_t *) _rtcpMessage.msg_iov[0].iov_base) + 24;

	_hasAudio = false;
	_hasVideo = false;
	_videoDataFd = -1;
	_videoDataPort = 0;
	_videoRTCPFd = -1;
	_videoRTCPPort = 0;
	_audioDataFd = -1;
	_audioDataPort = 0;
	_audioRTCPFd = -1;
	_audioRTCPPort = 0;

	_startupTime = (uint64_t) time(NULL);
}
Esempio n. 4
0
void CommonTestsSuite::test_Endianess() {
	uint16_t ui16 = 0x0102;
	uint32_t ui32 = 0x01020304;
	uint64_t ui64 = 0x0102030405060708LL;
	double d = 123.456;

	//host to network
	uint8_t *pBuffer = NULL;
	ui16 = EHTONS(ui16);
	pBuffer = (uint8_t *) & ui16;
	TS_ASSERT(pBuffer[0] == 0x01);
	TS_ASSERT(pBuffer[1] == 0x02);

	pBuffer = NULL;
	ui32 = EHTONL(ui32);
	pBuffer = (uint8_t *) & ui32;
	TS_ASSERT(pBuffer[0] == 0x01);
	TS_ASSERT(pBuffer[1] == 0x02);
	TS_ASSERT(pBuffer[2] == 0x03);
	TS_ASSERT(pBuffer[3] == 0x04);

	pBuffer = NULL;
	ui32 = 0x01020304;
	ui32 = EHTONA(ui32);
	pBuffer = (uint8_t *) & ui32;
	TS_ASSERT(pBuffer[0] == 0x02);
	TS_ASSERT(pBuffer[1] == 0x03);
	TS_ASSERT(pBuffer[2] == 0x04);
	TS_ASSERT(pBuffer[3] == 0x01);

	pBuffer = NULL;
	ui64 = EHTONLL(ui64);
	pBuffer = (uint8_t *) & ui64;
	TS_ASSERT(pBuffer[0] == 0x01);
	TS_ASSERT(pBuffer[1] == 0x02);
	TS_ASSERT(pBuffer[2] == 0x03);
	TS_ASSERT(pBuffer[3] == 0x04);
	TS_ASSERT(pBuffer[4] == 0x05);
	TS_ASSERT(pBuffer[5] == 0x06);
	TS_ASSERT(pBuffer[6] == 0x07);
	TS_ASSERT(pBuffer[7] == 0x08);

	pBuffer = NULL;
	EHTOND(d, ui64);
	pBuffer = (uint8_t *) & ui64;
	TS_ASSERT(pBuffer[0] == 0x40);
	TS_ASSERT(pBuffer[1] == 0x5e);
	TS_ASSERT(pBuffer[2] == 0xdd);
	TS_ASSERT(pBuffer[3] == 0x2f);
	TS_ASSERT(pBuffer[4] == 0x1a);
	TS_ASSERT(pBuffer[5] == 0x9f);
	TS_ASSERT(pBuffer[6] == 0xbe);
	TS_ASSERT(pBuffer[7] == 0x77);

	//network to host pointer
	char buffer[] = {
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f,
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f,
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f,
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f,
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f,
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f,
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f,
		0x00, 0x01, 0x02, 0x03,
		0x04, 0x05, 0x06, 0x07,
		0x08, 0x09, 0x0a, 0x0b,
		0x0c, 0x0d, 0x0e, 0x0f
	};

	ui16 = ENTOHSP(buffer);
	TS_ASSERT(ui16 == 0x0001);
	ui16 = ENTOHSP(buffer + 1);
	TS_ASSERT(ui16 == 0x0102);
	ui16 = ENTOHSP(buffer + 2);
	TS_ASSERT(ui16 == 0x0203);
	ui16 = ENTOHSP(buffer + 3);
	TS_ASSERT(ui16 == 0x0304);
	ui16 = ENTOHSP(buffer + 4);
	TS_ASSERT(ui16 == 0x0405);
	ui16 = ENTOHSP(buffer + 5);
	TS_ASSERT(ui16 == 0x0506);
	ui16 = ENTOHSP(buffer + 6);
	TS_ASSERT(ui16 == 0x0607);
	ui16 = ENTOHSP(buffer + 7);
	TS_ASSERT(ui16 == 0x0708);
	ui16 = ENTOHSP(buffer + 8);
	TS_ASSERT(ui16 == 0x0809);
	ui16 = ENTOHSP(buffer + 9);
	TS_ASSERT(ui16 == 0x090a);
	ui16 = ENTOHSP(buffer + 10);
	TS_ASSERT(ui16 == 0x0a0b);
	ui16 = ENTOHSP(buffer + 11);
	TS_ASSERT(ui16 == 0x0b0c);
	ui16 = ENTOHSP(buffer + 12);
	TS_ASSERT(ui16 == 0x0c0d);
	ui16 = ENTOHSP(buffer + 13);
	TS_ASSERT(ui16 == 0x0d0e);
	ui16 = ENTOHSP(buffer + 14);
	TS_ASSERT(ui16 == 0x0e0f);

	ui32 = ENTOHLP(buffer);
	TS_ASSERT(ui32 == 0x00010203);
	ui32 = ENTOHLP(buffer + 1);
	TS_ASSERT(ui32 == 0x01020304);
	ui32 = ENTOHLP(buffer + 2);
	TS_ASSERT(ui32 == 0x02030405);
	ui32 = ENTOHLP(buffer + 3);
	TS_ASSERT(ui32 == 0x03040506);
	ui32 = ENTOHLP(buffer + 4);
	TS_ASSERT(ui32 == 0x04050607);
	ui32 = ENTOHLP(buffer + 5);
	TS_ASSERT(ui32 == 0x05060708);
	ui32 = ENTOHLP(buffer + 6);
	TS_ASSERT(ui32 == 0x06070809);
	ui32 = ENTOHLP(buffer + 7);
	TS_ASSERT(ui32 == 0x0708090a);
	ui32 = ENTOHLP(buffer + 8);
	TS_ASSERT(ui32 == 0x08090a0b);
	ui32 = ENTOHLP(buffer + 9);
	TS_ASSERT(ui32 == 0x090a0b0c);
	ui32 = ENTOHLP(buffer + 10);
	TS_ASSERT(ui32 == 0x0a0b0c0d);
	ui32 = ENTOHLP(buffer + 11);
	TS_ASSERT(ui32 == 0x0b0c0d0e);
	ui32 = ENTOHLP(buffer + 12);
	TS_ASSERT(ui32 == 0x0c0d0e0f);

	ui32 = ENTOHAP(buffer);
	TS_ASSERT(ui32 == 0x03000102);
	ui32 = ENTOHAP(buffer + 1);
	TS_ASSERT(ui32 == 0x04010203);
	ui32 = ENTOHAP(buffer + 2);
	TS_ASSERT(ui32 == 0x05020304);
	ui32 = ENTOHAP(buffer + 3);
	TS_ASSERT(ui32 == 0x06030405);
	ui32 = ENTOHAP(buffer + 4);
	TS_ASSERT(ui32 == 0x07040506);
	ui32 = ENTOHAP(buffer + 5);
	TS_ASSERT(ui32 == 0x08050607);
	ui32 = ENTOHAP(buffer + 6);
	TS_ASSERT(ui32 == 0x09060708);
	ui32 = ENTOHAP(buffer + 7);
	TS_ASSERT(ui32 == 0x0a070809);
	ui32 = ENTOHAP(buffer + 8);
	TS_ASSERT(ui32 == 0x0b08090a);
	ui32 = ENTOHAP(buffer + 9);
	TS_ASSERT(ui32 == 0x0c090a0b);
	ui32 = ENTOHAP(buffer + 10);
	TS_ASSERT(ui32 == 0x0d0a0b0c);
	ui32 = ENTOHAP(buffer + 11);
	TS_ASSERT(ui32 == 0x0e0b0c0d);
	ui32 = ENTOHAP(buffer + 12);
	TS_ASSERT(ui32 == 0x0f0c0d0e);

	ui64 = ENTOHLLP(buffer);
	TS_ASSERT(ui64 == 0x0001020304050607LL);
	ui64 = ENTOHLLP(buffer + 1);
	TS_ASSERT(ui64 == 0x0102030405060708LL);
	ui64 = ENTOHLLP(buffer + 2);
	TS_ASSERT(ui64 == 0x0203040506070809LL);
	ui64 = ENTOHLLP(buffer + 3);
	TS_ASSERT(ui64 == 0x030405060708090aLL);
	ui64 = ENTOHLLP(buffer + 4);
	TS_ASSERT(ui64 == 0x0405060708090a0bLL);
	ui64 = ENTOHLLP(buffer + 5);
	TS_ASSERT(ui64 == 0x05060708090a0b0cLL);
	ui64 = ENTOHLLP(buffer + 6);
	TS_ASSERT(ui64 == 0x060708090a0b0c0dLL);
	ui64 = ENTOHLLP(buffer + 7);
	TS_ASSERT(ui64 == 0x0708090a0b0c0d0eLL);
	ui64 = ENTOHLLP(buffer + 8);
	TS_ASSERT(ui64 == 0x08090a0b0c0d0e0fLL);

	char *pTempBuffer = new char[64 + 8];
	unsigned char rawDouble[] = {0x40, 0x5E, 0xDD, 0x2F, 0x1A, 0x9F, 0xBE, 0x77};
	double tempDoubleVal = 0;
	for (int i = 0; i <= 64; i++) {
		memset(pTempBuffer, 0, i);
		memcpy(pTempBuffer + i, rawDouble, 8);
		memset(pTempBuffer + i + 8, 0, 64 + 8 - i - 8);
		ENTOHDP((pTempBuffer + i), tempDoubleVal);
		TS_ASSERT(d == tempDoubleVal);
	}
	delete[] pTempBuffer;

	//network to host
#ifdef LITTLE_ENDIAN_BYTE_ALIGNED
	TS_ASSERT(ENTOHA(0x01040302) == 0x01020304);
	TS_ASSERT(ENTOHLL(0x0807060504030201LL) == 0x0102030405060708LL);
	ENTOHD(0x77BE9F1A2FDD5E40LL, tempDoubleVal);
	TS_ASSERT(d == tempDoubleVal);
#endif /* LITTLE_ENDIAN_BYTE_ALIGNED */

#ifdef LITTLE_ENDIAN_SHORT_ALIGNED
	TS_ASSERT(ENTOHA(0x01040302) == 0x01020304);
	TS_ASSERT(ENTOHLL(0x0807060504030201LL) == 0x0102030405060708LL);
	ENTOHD(0x77BE9F1A2FDD5E40LL, tempDoubleVal);
	TS_ASSERT(d == tempDoubleVal);
#endif /* LITTLE_ENDIAN_SHORT_ALIGNED */

#ifdef BIG_ENDIAN_BYTE_ALIGNED
	TS_ASSERT(ENTOHA(0x02030401) == 0x01020304);
	TS_ASSERT(ENTOHLL(0x0102030405060708LL) == 0x0102030405060708LL);
#error ENTOHD not tested
#endif /* BIG_ENDIAN_BYTE_ALIGNED */

#ifdef BIG_ENDIAN_SHORT_ALIGNED
#error BIG_ENDIAN_SHORT_ALIGNED set of tests not yet implemented!!! Please take care of this first!!!
#endif /* BIG_ENDIAN_SHORT_ALIGNED */

	//double mirror
	TS_ASSERT(ENTOHS(EHTONS(0x0102)) == 0x0102);
	TS_ASSERT(EHTONS(ENTOHS(0x0102)) == 0x0102);

	TS_ASSERT(ENTOHL(EHTONL(0x01020304)) == 0x01020304);
	TS_ASSERT(EHTONL(ENTOHL(0x01020304)) == 0x01020304);

	TS_ASSERT(ENTOHLL(EHTONLL(0x0102030405060708LL)) == 0x0102030405060708LL);
	TS_ASSERT(EHTONLL(ENTOHLL(0x0102030405060708LL)) == 0x0102030405060708LL);

	//EHTOND/ENTOHD are different. Requires 2 parameters. So, no double mirror

	TS_ASSERT(ENTOHA(EHTONA(0x01020304)) == 0x01020304);
	TS_ASSERT(EHTONA(ENTOHA(0x01020304)) == 0x01020304);

	// Buffer Put routines
	for (int i = 0; i < 16; i++) {
		EHTONSP(buffer + i, 0x0102);
		TS_ASSERT(ENTOHSP(buffer + i) == 0x0102);

		EHTONLP(buffer + i, 0x01020304);
		TS_ASSERT(ENTOHLP(buffer + i) == 0x01020304);

		EHTONLLP(buffer + i, 0x0102030405060708LL);
		TS_ASSERT(ENTOHLLP(buffer + i) == 0x0102030405060708LL);

		EHTONDP(d, (buffer + i));
		ENTOHDP(buffer + i, tempDoubleVal);
		TS_ASSERT(d == tempDoubleVal);
	}
}
bool OutNetRTPUDPH264Stream::PushAudioData(IOBuffer &buffer, double pts, double dts) {
	if (_pAudioInfo == NULL) {
		_stats.audio.droppedPacketsCount++;
		_stats.audio.droppedBytesCount += GETAVAILABLEBYTESCOUNT(buffer);
		return true;
	}
	uint32_t dataLength = GETAVAILABLEBYTESCOUNT(buffer);
	uint8_t *pData = GETIBPOINTER(buffer);
#ifdef MULTIPLE_AUS
	//	FINEST("_auCount: %"PRIu32"; max: %"PRIu32"; have: %"PRIu32"; total: %"PRIu32,
	//			_auCount,
	//			MAX_AUS_COUNT,
	//			12 //RTP header
	//			+ 2 //AU-headers-length
	//			+ _auCount * 2 //n instances of AU-header
	//			+ GETAVAILABLEBYTESCOUNT(_auBuffer), //existing data
	//
	//			12 //RTP header
	//			+ 2 //AU-headers-length
	//			+ _auCount * 2 //n instances of AU-header
	//			+ GETAVAILABLEBYTESCOUNT(_auBuffer) //existing data
	//			+ dataLength
	//			);
	if ((_auCount >= MAX_AUS_COUNT)
			|| ((
			12 //RTP header
			+ 2 //AU-headers-length
			+ _auCount * 2 //n instances of AU-header
			+ GETAVAILABLEBYTESCOUNT(_auBuffer) //existing data
			+ dataLength) //new data about to be appended
			> MAX_RTP_PACKET_SIZE)) {

		//5. counter
		EHTONSP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 2, _audioCounter);
		_audioCounter++;

		//6. Timestamp
		EHTONLP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 4,
				(uint32_t) (_auPts * _audioSampleRate / 1000.000));

		//7. AU-headers-length
		EHTONSP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 12, _auCount * 16);

		//7. put the actual buffer
		_audioData.MSGHDR_MSG_IOV[2].IOVEC_IOV_LEN = GETAVAILABLEBYTESCOUNT(_auBuffer);
		_audioData.MSGHDR_MSG_IOV[2].IOVEC_IOV_BASE = (IOVEC_IOV_BASE_TYPE *) (GETIBPOINTER(_auBuffer));

		//8. Send the data
		//FINEST("-----SEND------");
		if (!_pConnectivity->FeedAudioData(_audioData, pts, dts)) {
			FATAL("Unable to feed data");
			return false;
		}

		_auCount = 0;
	}

	//9. reset the pts and au buffer if this is the first AU
	if (_auCount == 0) {
		_auBuffer.IgnoreAll();
		_auPts = pts;
		_audioData.MSGHDR_MSG_IOV[1].IOVEC_IOV_LEN = 0;
	}

	//10. Store the data
	_auBuffer.ReadFromBuffer(pData, dataLength);

	//11. Store the AU-header
	uint16_t auHeader = (uint16_t) ((dataLength) << 3);
	EHTONSP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[1].IOVEC_IOV_BASE + 2 * _auCount), auHeader);
	_audioData.MSGHDR_MSG_IOV[1].IOVEC_IOV_LEN += 2;

	//12. increment the number of AUs
	_auCount++;

	_stats.audio.packetsCount++;
	_stats.audio.bytesCount += GETAVAILABLEBYTESCOUNT(buffer);

	//13. Done
	return true;

#else /* MULTIPLE_AUS */

	/*
	0                   1                   2                   3
	0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   |V=2|P|X|  CC   |M|     PT      |       sequence number         |
   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   |                           timestamp                           |
   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   |           synchronization source (SSRC) identifier            |
   +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
   |            contributing source (CSRC) identifiers             |
   |                             ....                              |
   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+

   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
   |AU-headers-length|AU-header|AU-header|      |AU-header|padding|
   |                 |   (1)   |   (2)   |      |   (n)   | bits  |
   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
	 */

	//5. counter
	EHTONSP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 2, _audioCounter);
	_audioCounter++;

	//6. Timestamp
	EHTONLP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 4,
			BaseConnectivity::ToRTPTS(pts, (uint32_t) _audioSampleRate));

	//7. AU-headers-length
	EHTONSP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 12, 16);

	//8. AU-header
	uint16_t auHeader = (uint16_t) ((dataLength) << 3);
	EHTONSP(((uint8_t *) _audioData.MSGHDR_MSG_IOV[1].IOVEC_IOV_BASE), auHeader);
	_audioData.MSGHDR_MSG_IOV[1].IOVEC_IOV_LEN = 2;

	//7. put the actual buffer
	_audioData.MSGHDR_MSG_IOV[2].IOVEC_IOV_LEN = dataLength;
	_audioData.MSGHDR_MSG_IOV[2].IOVEC_IOV_BASE = (IOVEC_IOV_BASE_TYPE *) (pData);

	if (!_pConnectivity->FeedAudioData(_audioData, pts, dts)) {
		FATAL("Unable to feed data");
		return false;
	}

	_stats.audio.packetsCount++;
	_stats.audio.bytesCount += GETAVAILABLEBYTESCOUNT(buffer);

	return true;
#endif /* MULTIPLE_AUS */
}
bool OutNetRTPUDPH264Stream::PushVideoData(IOBuffer &buffer, double pts, double dts,
		bool isKeyFrame) {
	if (_pVideoInfo == NULL) {
		_stats.video.droppedPacketsCount++;
		_stats.video.droppedBytesCount += GETAVAILABLEBYTESCOUNT(buffer);
		return true;
	}
	if ((isKeyFrame || _firstVideoFrame)
			&&(_pVideoInfo->_type == CODEC_VIDEO_H264)
			&&(_lastVideoPts != pts)) {
		_firstVideoFrame = false;
		//fix for mode=0 kind of transfer where we get sliced IDRs
		//only send the SPS/PPS on the first IDR slice from the keyframe
		_lastVideoPts = pts;
		VideoCodecInfoH264 *pTemp = (VideoCodecInfoH264 *) _pVideoInfo;
		if (!PushVideoData(pTemp->GetSPSBuffer(), dts, dts, false)) {
			FATAL("Unable to feed SPS");
			return false;
		}
		if (!PushVideoData(pTemp->GetPPSBuffer(), dts, dts, false)) {
			FATAL("Unable to feed PPS");
			return false;
		}
	}

	/*
	 *
	 *  0                   1                   2                   3
	 *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
	 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
	 * |V=2|P|X|  CC   |M|     PT      |       sequence number         |
	 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
	 * |                           timestamp                           |
	 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
	 * |           synchronization source (SSRC) identifier            |
	 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
	 * |F|NRI|   28    |S|E|R|  Type   |                               |
	 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+                               |
	 * |                                                               |
	 * |                         FU payload                            |
	 * |                                                               |
	 * |                               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
	 * |                               :...OPTIONAL RTP padding        |
	 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
	 *
	 *
	 *
	 * Version: (2 bits) Indicates the version of the protocol. Current version is 2.[19]
	 *
	 * P (Padding): (1 bit) Used to indicate if there are extra padding bytes at
	 * the end of the RTP packet. A padding might be used to fill up a block of
	 * certain size, for example as required by an encryption algorithm. The
	 * last byte of the padding contains the number of how many padding
	 * bytes were added (including itself).[19][13]:12
	 *
	 * X (Extension): (1 bit) Indicates presence of an Extension header between
	 * standard header and payload data. This is application or profile specific.[19]
	 *
	 * CC (CSRC Count): (4 bits) Contains the number of CSRC identifiers
	 * (defined below) that follow the fixed header.[13]:12
	 *
	 * M (Marker): (1 bit) Used at the application level and defined by a
	 * profile. If it is set, it means that the current data has some special
	 * relevance for the application.[13]:13
	 *
	 * PT (Payload Type): (7 bits) Indicates the format of the payload and
	 * determines its interpretation by the application. This is specified by an
	 * RTP profile. For example, see RTP Profile for audio and video conferences
	 * with minimal control (RFC 3551).[20]
	 *
	 * Sequence Number: (16 bits) The sequence number is incremented by one for
	 * each RTP data packet sent and is to be used by the receiver to detect
	 * packet loss and to restore packet sequence. The RTP does not specify any
	 * action on packet loss; it is left to the application to take appropriate
	 * action. For example, video applications may play the last known frame in
	 * place of the missing frame.[21] According to RFC 3550, the initial value
	 * of the sequence number should be random to make known-plaintext attacks
	 * on encryption more difficult.[13]:13 RTP provides no guarantee of delivery,
	 * but the presence of sequence numbers makes it possible to detect missing
	 * packets.[1]
	 *
	 *
	 * Timestamp: (32 bits) Used to enable the receiver to play back the received
	 * samples at appropriate intervals. When several media streams are present,
	 * the timestamps are independent in each stream, and may not be relied upon
	 * for media synchronization. The granularity of the timing is application
	 * specific. For example, an audio application that samples data once every
	 * 125 µs (8 kHz, a common sample rate in digital telephony) could use that
	 * value as its clock resolution. The clock granularity is one of the details
	 * that is specified in the RTP profile for an application.[21]
	 *
	 *
	 * SSRC: (32 bits) Synchronization source identifier uniquely identifies the
	 * source of a stream. The synchronization sources within the same RTP
	 * session will be unique.[13]:15
	 */

	uint32_t dataLength = GETAVAILABLEBYTESCOUNT(buffer);
	uint8_t *pData = GETIBPOINTER(buffer);

	uint32_t sentAmount = 0;
	uint32_t chunkSize = 0;
	while (sentAmount != dataLength) {
		chunkSize = dataLength - sentAmount;
		chunkSize = chunkSize < _maxRTPPacketSize ? chunkSize : _maxRTPPacketSize;

		//1. Flags
		if (sentAmount + chunkSize == dataLength) {
			((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[1] = 0xe1;
		} else {
			((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[1] = 0x61;
		}

		//2. counter
		EHTONSP(((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 2, _videoCounter);
		_videoCounter++;

		//3. Timestamp
		EHTONLP(((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE) + 4,
				BaseConnectivity::ToRTPTS(pts, (uint32_t) _videoSampleRate));

		if (chunkSize == dataLength) {
			//4. No chunking
			_videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_LEN = 12;
			_videoData.MSGHDR_MSG_IOV[1].IOVEC_IOV_BASE = (IOVEC_IOV_BASE_TYPE *) pData;
			_videoData.MSGHDR_MSG_IOV[1].IOVEC_IOV_LEN = chunkSize;
		} else {
			//5. Chunking
			_videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_LEN = 14;

			if (sentAmount == 0) {
				//6. First chunk
				((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[12] = (pData[0]&0xe0) | NALU_TYPE_FUA;
				((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[13] = (pData[0]&0x1f) | 0x80;
				_videoData.MSGHDR_MSG_IOV[1].IOVEC_IOV_BASE = (IOVEC_IOV_BASE_TYPE *) (pData + 1);
				_videoData.MSGHDR_MSG_IOV[1].IOVEC_IOV_LEN = chunkSize - 1;
			} else {
				if (sentAmount + chunkSize == dataLength) {
					//7. Last chunk
					((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[13] &= 0x1f;
					((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[13] |= 0x40;
				} else {
					//8. Middle chunk
					((uint8_t *) _videoData.MSGHDR_MSG_IOV[0].IOVEC_IOV_BASE)[13] &= 0x1f;
				}
				_videoData.MSGHDR_MSG_IOV[1].IOVEC_IOV_BASE = (IOVEC_IOV_BASE_TYPE *) pData;
				_videoData.MSGHDR_MSG_IOV[1].IOVEC_IOV_LEN = chunkSize;
			}
		}

		_pConnectivity->FeedVideoData(_videoData, pts, dts);
		sentAmount += chunkSize;
		pData += chunkSize;
	}
	_stats.video.packetsCount++;
	_stats.video.bytesCount += GETAVAILABLEBYTESCOUNT(buffer);
	return true;
}
Esempio n. 7
0
bool OutNetRTMP4TSStream::FeedVideoData(uint8_t *pData, uint32_t dataLength,
		double absoluteTimestamp) {
	switch (NALU_TYPE(pData[0])) {
		case NALU_TYPE_SPS:
		{
			//1. Prepare the SPS part from video codec
			if (dataLength > 128) {
				FATAL("SPS too big");
				return false;
			}
			memcpy(_pSPSPPS + 6, pData + 1, 3); //profile,profile compat,level
			EHTONSP(_pSPSPPS + 11, (uint16_t) dataLength);
			memcpy(_pSPSPPS + 13, pData, dataLength);
			_PPSStart = 13 + dataLength;
			_spsAvailable = true;
			return true;
		}
		case NALU_TYPE_PPS:
		{
			//2. Prepare the PPS part from video codec
			if (dataLength > 128) {
				FATAL("PPS too big");
				return false;
			}
			if (!_spsAvailable) {
				WARN("No SPS available yet");
				return true;
			}

			_pSPSPPS[_PPSStart] = 1;
			EHTONSP(_pSPSPPS + _PPSStart + 1, (uint16_t) dataLength);
			memcpy(_pSPSPPS + _PPSStart + 1 + 2, pData, dataLength);
			_spsAvailable = false;

			//3. Send the video codec
			if (!BaseOutNetRTMPStream::FeedData(
					_pSPSPPS, //pData
					_PPSStart + 1 + 2 + dataLength, //dataLength
					0, //processedLength
					_PPSStart + 1 + 2 + dataLength, //totalLength
					absoluteTimestamp, //absoluteTimestamp
					false //isAudio
					)) {
				FATAL("Unable to send video codec setup");
				return false;
			}

			_videoCodecSent = true;

			return true;
		}
		case NALU_TYPE_IDR:
		case NALU_TYPE_SLICE:
		{
			//10. Make room for the RTMP header
			_videoBuffer.ReadFromRepeat(0, 9);

			//11. Add the raw data
			_videoBuffer.ReadFromBuffer(pData, dataLength);

			uint8_t *pBuffer = GETIBPOINTER(_videoBuffer);

			//12. Setup the RTMP header
			pBuffer[0] = (NALU_TYPE(pData[0]) == NALU_TYPE_IDR) ? 0x17 : 0x27;
			pBuffer[1] = 0x01;
			pBuffer[2] = pBuffer[3] = pBuffer[4] = 0;
			EHTONLP(pBuffer + 5, dataLength); //----MARKED-LONG---

			//13. Send it
			if (!BaseOutNetRTMPStream::FeedData(
					pBuffer, //pData
					dataLength + 9, //dataLength
					0, //processedLength
					dataLength + 9, //totalLength
					absoluteTimestamp, //absoluteTimestamp
					false //isAudio
					)) {
				FATAL("Unable to send video");
				return false;
			}

			//14. Cleanup
			_videoBuffer.IgnoreAll();

			return true;
		}
		case NALU_TYPE_PD:
		case NALU_TYPE_SEI:
		case NALU_TYPE_FILL:
		{
			return true;
		}
		default:
		{
			WARN("Ignoring NAL: %s", STR(NALUToString(pData[0])));
			return true;
		}
	}
}