示例#1
0
Boolean MediaSession::lookupByName(UsageEnvironment& env,
				   char const* instanceName,
				   MediaSession*& resultSession) {
  resultSession = NULL; // unless we succeed

  Medium* medium;
  if (!Medium::lookupByName(env, instanceName, medium)) return False;

  if (!medium->isMediaSession()) {
    env.setResultMsg(instanceName, " is not a 'MediaSession' object");
    return False;
  }

  resultSession = (MediaSession*)medium;
  return True;
}
示例#2
0
Boolean RTCPInstance::lookupByName(UsageEnvironment& env,
				   char const* instanceName,
				   RTCPInstance*& resultInstance) {
  resultInstance = NULL; // unless we succeed

  Medium* medium;
  if (!Medium::lookupByName(env, instanceName, medium)) return False;

  if (!medium->isRTCPInstance()) {
    env.setResultMsg(instanceName, " is not a RTCP instance");
    return False;
  }

  resultInstance = (RTCPInstance*)medium;
  return True;
}
示例#3
0
Boolean RTPSource::lookupByName(UsageEnvironment& env,
				char const* sourceName,
				RTPSource*& resultSource) {
  resultSource = NULL; // unless we succeed

  MediaSource* source;
  if (!MediaSource::lookupByName(env, sourceName, source)) return False;

  if (!source->isRTPSource()) {
    env.setResultMsg(sourceName, " is not a RTP source");
    return False;
  }

  resultSource = (RTPSource*)source;
  return True;
}
示例#4
0
Boolean MediaSource::lookupByName(UsageEnvironment& env,
				  char const* sourceName,
				  MediaSource*& resultSource) {
  resultSource = NULL; // unless we succeed

  Medium* medium;
  if (!Medium::lookupByName(env, sourceName, medium)) return False;

  if (!medium->isSource()) {
    env.setResultMsg(sourceName, " is not a media source");
    return False;
  }

  resultSource = (MediaSource*)medium;
  return True;
}
int readSocket(UsageEnvironment& env,
				 int socket, unsigned char* buffer, unsigned bufferSize,
				 struct sockaddr_in& fromAddress,
				 struct timeval* timeout) {
  int bytesRead = -1;
  do {
	  int result = blockUntilReadable(env, socket, timeout);
	  if (timeout != NULL && result == 0) {
		  bytesRead = 0;
		  break;
		} else if (result <= 0) {
		  break;
		}

	  SOCKLEN_T addressSize = sizeof fromAddress;
	  bytesRead = recvfrom(socket, (char*)buffer, bufferSize, 0,
			 (struct sockaddr*)&fromAddress,
			 &addressSize);
	  if (bytesRead < 0) {
			//##### HACK to work around bugs in Linux and Windows:
		  int err = env.getErrno();
		  if (err == 111 /*ECONNREFUSED (Linux)*/
#if defined(__WIN32__) || defined(_WIN32)
		// What a piece of crap Windows is.  Sometimes
		// recvfrom() returns -1, but with an 'errno' of 0.
		// This appears not to be a real error; just treat
		// it as if it were a read of zero bytes, and hope
		// we don't have to do anything else to 'reset'
		// this alleged error:
		|| err == 0
#else
		|| err == EAGAIN
#endif
		|| err == 113 /*EHOSTUNREACH (Linux)*/) {
							//Why does Linux return this for datagram sock?
	fromAddress.sin_addr.s_addr = 0;
	return 0;
			}
			//##### END HACK
		  socketErr(env, "recvfrom() error: ");
		  break;
		}
	} while (0);

  return bytesRead;
}
void openURL(UsageEnvironment& env, char const* progName, char const* rtspURL) 
{
  // Begin by creating a "RTSPClient" object.  Note that there is a separate "RTSPClient" object for each stream that we wish
  // to receive (even if more than stream uses the same "rtsp://" URL).
  RTSPClient* rtspClient = ourRTSPClient::createNew(env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, progName);
  if (rtspClient == NULL) {
    env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n";
    return;
  }

 // ++rtspClientCount;

  // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream.
  // Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response.
  // Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop:
  rtspClient->sendDescribeCommand(continueAfterDESCRIBE); 
}
示例#7
0
RTSPServer::RTSPServer(UsageEnvironment& env,
					 int ourSocket, Port ourPort,
					 UserAuthenticationDatabase* authDatabase,
					 unsigned reclamationTestSeconds)
	: Medium(env),
	  fServerSocket(ourSocket), fServerPort(ourPort),
	  fAuthDB(authDatabase), fReclamationTestSeconds(reclamationTestSeconds),
	  fServerMediaSessions(HashTable::create(STRING_HASH_KEYS)) {
#ifdef USE_SIGNALS
	// Ignore the SIGPIPE signal, so that clients on the same host that are killed
	// don't also kill us:
  signal(SIGPIPE, SIG_IGN);
#endif

	// Arrange to handle connections from others:
//	printf("RTSPServer: turnOnBackgroundReadHandling\n");	//jay
  env.taskScheduler().turnOnBackgroundReadHandling(fServerSocket,
				(TaskScheduler::BackgroundHandlerProc*)&incomingConnectionHandler,
							 this);
}
Boolean socketJoinGroup(UsageEnvironment& env, int socket,
			netAddressBits groupAddress){
  if (!IsMulticastAddress(groupAddress)) return True; // ignore this case

  struct ip_mreq imr;
  imr.imr_multiaddr.s_addr = groupAddress;
  imr.imr_interface.s_addr = ReceivingInterfaceAddr;
  if (setsockopt(socket, IPPROTO_IP, IP_ADD_MEMBERSHIP,
		 (const char*)&imr, sizeof (struct ip_mreq)) < 0) {
#if defined(__WIN32__) || defined(_WIN32)
	  if (env.getErrno() != 0) {
			// That piece-of-shit toy operating system (Windows) sometimes lies
			// about setsockopt() failing!
#endif
		  socketErr(env, "setsockopt(IP_ADD_MEMBERSHIP) error: ");
		  return False;
#if defined(__WIN32__) || defined(_WIN32)
		}
#endif
	}

  return True;
}
示例#9
0
void HTTPSink::appendPortNum(UsageEnvironment& env,
			     Port const& port) {
  char tmpBuf[10]; // large enough to hold a port # string
  sprintf(tmpBuf, " %d", ntohs(port.num()));
  env.appendToResultMsg(tmpBuf);
}
示例#10
0
// -----------------------------------------
//    entry point
// -----------------------------------------
int main(int argc, char** argv) 
{
	// default parameters
	const char *dev_name = "/dev/video0";	
	int format = V4L2_PIX_FMT_H264;
	int width = 640;
	int height = 480;
	int queueSize = 10;
	int fps = 25;
	unsigned short rtspPort = 8554;
	unsigned short rtspOverHTTPPort = 0;
	bool multicast = false;
	int verbose = 0;
	std::string outputFile;
	bool useMmap = true;
	std::string url = "unicast";
	std::string murl = "multicast";
	bool useThread = true;
	std::string maddr;
	bool repeatConfig = true;
	int timeout = 65;

	// decode parameters
	int c = 0;     
	while ((c = getopt (argc, argv, "v::Q:O:" "I:P:T:m:u:M:ct:" "rsfF:W:H:" "h")) != -1)
	{
		switch (c)
		{
			case 'v':	verbose = 1; if (optarg && *optarg=='v') verbose++;  break;
			case 'Q':	queueSize = atoi(optarg); break;
			case 'O':	outputFile = optarg; break;
			// RTSP/RTP
			case 'I':       ReceivingInterfaceAddr = inet_addr(optarg); break;
			case 'P':	rtspPort = atoi(optarg); break;
			case 'T':	rtspOverHTTPPort = atoi(optarg); break;
			case 'u':	url = optarg; break;
			case 'm':	multicast = true; murl = optarg; break;
			case 'M':	multicast = true; maddr = optarg; break;
			case 'c':	repeatConfig = false; break;
			case 't':	timeout = atoi(optarg); break;
			// V4L2
			case 'r':	useMmap =  false; break;
			case 's':	useThread =  false; break;
			case 'f':	format = 0; break;
			case 'F':	fps = atoi(optarg); break;
			case 'W':	width = atoi(optarg); break;
			case 'H':	height = atoi(optarg); break;

			case 'h':
			default:
			{
				std::cout << argv[0] << " [-v[v]] [-Q queueSize] [-O file]"                                        << std::endl;
				std::cout << "\t          [-I interface] [-P RTSP port] [-T RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout]" << std::endl;
				std::cout << "\t          [-r] [-s] [-W width] [-H height] [-F fps] [device] [device]"           << std::endl;
				std::cout << "\t -v       : verbose"                                                               << std::endl;
				std::cout << "\t -vv      : very verbose"                                                          << std::endl;
				std::cout << "\t -Q length: Number of frame queue  (default "<< queueSize << ")"                   << std::endl;
				std::cout << "\t -O output: Copy captured frame to a file or a V4L2 device"                        << std::endl;
				std::cout << "\t RTSP options :"                                                                   << std::endl;
				std::cout << "\t -I addr  : RTSP interface (default autodetect)"                                   << std::endl;
				std::cout << "\t -P port  : RTSP port (default "<< rtspPort << ")"                                 << std::endl;
				std::cout << "\t -T port  : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")"               << std::endl;
				std::cout << "\t -u url   : unicast url (default " << url << ")"                                   << std::endl;
				std::cout << "\t -m url   : multicast url (default " << murl << ")"                                << std::endl;
				std::cout << "\t -M addr  : multicast group:port (default is random_address:20000)"                << std::endl;
				std::cout << "\t -c       : don't repeat config (default repeat config before IDR frame)"          << std::endl;
				std::cout << "\t -t secs  : RTCP expiration timeout (default " << timeout << ")"                   << std::endl;
				std::cout << "\t V4L2 options :"                                                                   << std::endl;
				std::cout << "\t -r       : V4L2 capture using read interface (default use memory mapped buffers)" << std::endl;
				std::cout << "\t -s       : V4L2 capture using live555 mainloop (default use a reader thread)"     << std::endl;
				std::cout << "\t -f       : V4L2 capture using current format (-W,-H,-F are ignore)"               << std::endl;
				std::cout << "\t -W width : V4L2 capture width (default "<< width << ")"                           << std::endl;
				std::cout << "\t -H height: V4L2 capture height (default "<< height << ")"                         << std::endl;
				std::cout << "\t -F fps   : V4L2 capture framerate (default "<< fps << ")"                         << std::endl;
				std::cout << "\t device   : V4L2 capture device (default "<< dev_name << ")"                       << std::endl;
				exit(0);
			}
		}
	}
	std::list<std::string> devList;
	while (optind<argc)
	{
		devList.push_back(argv[optind]);
		optind++;
	}
	if (devList.empty())
	{
		devList.push_back(dev_name);
	}

	// init logger
	initLogger(verbose);
     
	// create live555 environment
	TaskScheduler* scheduler = BasicTaskScheduler::createNew();
	UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);	

	// split multicast info
	std::istringstream is(maddr);
	std::string ip;
	getline(is, ip, ':');						
	struct in_addr destinationAddress;
	destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
	if (!ip.empty())
	{
		destinationAddress.s_addr = inet_addr(ip.c_str());
	}						
	
	std::string port;
	getline(is, port, ':');						
	unsigned short rtpPortNum = 20000;
	if (!port.empty())
	{
		rtpPortNum = atoi(port.c_str());
	}	
	unsigned short rtcpPortNum = rtpPortNum+1;
	unsigned char ttl = 5;
	
	// create RTSP server
	RTSPServer* rtspServer = createRTSPServer(*env, rtspPort, rtspOverHTTPPort, timeout);
	if (rtspServer == NULL) 
	{
		LOG(ERROR) << "Failed to create RTSP server: " << env->getResultMsg();
	}
	else
	{			
		int nbSource = 0;
		std::list<std::string>::iterator devIt;
		for ( devIt=devList.begin() ; devIt!=devList.end() ; ++devIt)
		{
			std::string deviceName(*devIt);
			
			// Init capture
			LOG(NOTICE) << "Create V4L2 Source..." << deviceName;
			V4L2DeviceParameters param(deviceName.c_str(),format,width,height,fps, verbose);
			V4l2Capture* videoCapture = V4l2DeviceFactory::CreateVideoCapure(param, useMmap);
			if (videoCapture)
			{
				nbSource++;
				format = videoCapture->getFormat();				
				int outfd = -1;
				
				V4l2Output* out = NULL;
				if (!outputFile.empty())
				{
					V4L2DeviceParameters outparam(outputFile.c_str(), videoCapture->getFormat(), videoCapture->getWidth(), videoCapture->getHeight(), 0,verbose);
					V4l2Output* out = V4l2DeviceFactory::CreateVideoOutput(outparam, useMmap);
					if (out != NULL)
					{
						outfd = out->getFd();
					}
				}
				
				LOG(NOTICE) << "Start V4L2 Capture..." << deviceName;
				if (!videoCapture->captureStart())
				{
					LOG(NOTICE) << "Cannot start V4L2 Capture for:" << deviceName;
				}
				V4L2DeviceSource* videoES = NULL;
				if (format == V4L2_PIX_FMT_H264)
				{
					videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread, repeatConfig);
				}
				else
				{
					videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread);
				}
				if (videoES == NULL) 
				{
					LOG(FATAL) << "Unable to create source for device " << deviceName;
					delete videoCapture;
				}
				else
				{	
					// extend buffer size if needed
					if (videoCapture->getBufferSize() > OutPacketBuffer::maxSize)
					{
						OutPacketBuffer::maxSize = videoCapture->getBufferSize();
					}
					
					StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false);
					
					std::string baseUrl;
					if (devList.size() > 1)
					{
						baseUrl = basename(deviceName.c_str());
						baseUrl.append("/");
					}
					
					// Create Multicast Session
					if (multicast)						
					{		
						LOG(NOTICE) << "RTP  address " << inet_ntoa(destinationAddress) << ":" << rtpPortNum;
						LOG(NOTICE) << "RTCP address " << inet_ntoa(destinationAddress) << ":" << rtcpPortNum;
						addSession(rtspServer, baseUrl+murl, MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, replicator,format));					
						
						// increment ports for next sessions
						rtpPortNum+=2;
						rtcpPortNum+=2;
						
					}
					// Create Unicast Session
					addSession(rtspServer, baseUrl+url, UnicastServerMediaSubsession::createNew(*env,replicator,format));
				}	
				if (out)
				{
					delete out;
				}
			}
		}

		if (nbSource>0)
		{
			// main loop
			signal(SIGINT,sighandler);
			env->taskScheduler().doEventLoop(&quit); 
			LOG(NOTICE) << "Exiting....";			
		}
		
		Medium::close(rtspServer);
	}
	
	env->reclaim();
	delete scheduler;	
	
	return 0;
}
示例#11
0
int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

  UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
  // To implement client access control to the RTSP server, do the following:
  authDB = new UserAuthenticationDatabase;
  authDB->addUserRecord("username1", "password1"); // replace these with real strings
  // Repeat the above with each <username>, <password> that you wish to allow
  // access to the server.
#endif

  // Create the RTSP server.  Try first with the default port number (554),
  // and then with the alternative port number (8554):
  RTSPServer* rtspServer;
  portNumBits rtspServerPortNum = 554;  //先使用554默认端口创建RTSP server
  rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
  if (rtspServer == NULL) {     //若使用554端口创建失败,则使用8554端口创建 Server
    rtspServerPortNum = 8554;
    rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
  }
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }

  *env << "LIVE555 Media Server\n";
  *env << "\tversion " << MEDIA_SERVER_VERSION_STRING
       << " (LIVE555 Streaming Media library version "
       << LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n";

  char* urlPrefix = rtspServer->rtspURLPrefix();
  *env << "Play streams from this server using the URL\n\t"
       << urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n";
  *env << "Each file's type is inferred from its name suffix:\n";
  *env << "\t\".264\" => a H.264 Video Elementary Stream file\n";
  *env << "\t\".265\" => a H.265 Video Elementary Stream file\n";
  *env << "\t\".aac\" => an AAC Audio (ADTS format) file\n";
  *env << "\t\".ac3\" => an AC-3 Audio file\n";
  *env << "\t\".amr\" => an AMR Audio file\n";
  *env << "\t\".dv\" => a DV Video file\n";
  *env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n";
  *env << "\t\".mkv\" => a Matroska audio+video+(optional)subtitles file\n";
  *env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n";
  *env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n";
  *env << "\t\".ogg\" or \".ogv\" or \".opus\" => an Ogg audio and/or video file\n";
  *env << "\t\".ts\" => a MPEG Transport Stream file\n";
  *env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n";
  *env << "\t\".vob\" => a VOB (MPEG-2 video with AC-3 audio) file\n";
  *env << "\t\".wav\" => a WAV Audio file\n";
  *env << "\t\".webm\" => a WebM audio(Vorbis)+video(VP8) file\n";
  *env << "See http://www.live555.com/mediaServer/ for additional documentation.\n";

  // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
  // Try first with the default HTTP port (80), and then with the alternative HTTP
  // port numbers (8000 and 8080).

  if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
    *env << "(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling, or for HTTP live streaming (for indexed Transport Stream files only).)\n";
  } else {
    *env << "(RTSP-over-HTTP tunneling is not available.)\n";
  }

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}
示例#12
0
netAddressBits ourIPAddress(UsageEnvironment& env) {
  static netAddressBits ourAddress = 0;
  int sock = -1;
  struct in_addr testAddr;

  if (ourAddress == 0) {
		// We need to find our source address
	  struct sockaddr_in fromAddr;
	  fromAddr.sin_addr.s_addr = 0;

		// Get our address by sending a (0-TTL) multicast packet,
		// receiving it, and looking at the source address used.
		// (This is kinda bogus, but it provides the best guarantee
		// that other nodes will think our address is the same as we do.)
	  do {
		  loopbackWorks = 0; // until we learn otherwise

		  testAddr.s_addr = our_inet_addr("228.67.43.91"); // arbitrary
		  Port testPort(15947); // ditto

		  sock = setupDatagramSocket(env, testPort);
		  if (sock < 0) break;

		  if (!socketJoinGroup(env, sock, testAddr.s_addr)) break;

		  unsigned char testString[] = "hostIdTest";
		  unsigned testStringLength = sizeof testString;

		  if (!writeSocket(env, sock, testAddr, testPort, 0,
					 testString, testStringLength)) break;

		  unsigned char readBuffer[20];
		  struct timeval timeout;
		  timeout.tv_sec = 5;
		  timeout.tv_usec = 0;
		  int bytesRead = readSocket(env, sock,
				 readBuffer, sizeof readBuffer,
				 fromAddr, &timeout);
		  if (bytesRead == 0 // timeout occurred
		|| bytesRead != (int)testStringLength
		|| strncmp((char*)readBuffer, (char*)testString,
				 testStringLength) != 0) {
	break;
			}

		  loopbackWorks = 1;
		} while (0);

	  if (!loopbackWorks) do {
			// We couldn't find our address using multicast loopback
			// so try instead to look it up directly.
		  char hostname[100];
		  hostname[0] = '\0';
#ifndef CRIS
		  gethostname(hostname, sizeof hostname);
#endif
		  if (hostname[0] == '\0') {
	env.setResultErrMsg("initial gethostname() failed");
	break;
			}

#if defined(VXWORKS)
#include <hostLib.h>
		  if (ERROR == (ourAddress = hostGetByName( hostname ))) break;
#else
		  struct hostent* hstent
	= (struct hostent*)gethostbyname(hostname);
		  if (hstent == NULL || hstent->h_length != 4) {
	env.setResultErrMsg("initial gethostbyname() failed");
	break;
			}
			// Take the first address that's not bad
			// (This code, like many others, won't handle IPv6)
		  netAddressBits addr = 0;
		  for (unsigned i = 0; ; ++i) {
	char* addrPtr = hstent->h_addr_list[i];
	if (addrPtr == NULL) break;

	netAddressBits a = *(netAddressBits*)addrPtr;
	if (!badAddress(a)) {
	  addr = a;
	  break;
	}
			}
		  if (addr != 0) {
	fromAddr.sin_addr.s_addr = addr;
			} else {
	env.setResultMsg("no address");
	break;
			}
		} while (0);

		// Make sure we have a good address:
	  netAddressBits from = fromAddr.sin_addr.s_addr;
	  if (badAddress(from)) {
		  char tmp[100];
		  sprintf(tmp,
				"This computer has an invalid IP address: 0x%x",
				(netAddressBits)(ntohl(from)));
		  env.setResultMsg(tmp);
		  from = 0;
		}

	  ourAddress = from;
#endif

	  if (sock >= 0) {
		  socketLeaveGroup(env, sock, testAddr.s_addr);
		  closeSocket(sock);
		}

		// Use our newly-discovered IP address, and the current time,
		// to initialize the random number generator's seed:
	  struct timeval timeNow;
	  gettimeofday(&timeNow, NULL);
	  unsigned seed = ourAddress^timeNow.tv_sec^timeNow.tv_usec;
	  our_srandom(seed);
	}
  return ourAddress;
}
示例#13
0
void setupDarwinStreaming(UsageEnvironment& env, WISInput& inputDevice) {
  // Create a 'Darwin injector' object:
  injector = DarwinInjector::createNew(env, applicationName);

  // For RTCP:
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen + 1];
  gethostname((char *) CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0';      // just in case

  /******************audio***********************/
  if (audioFormat != AFMT_NONE) {
    // Create the audio source:
    sourceAudio = createAudioSource(env, inputDevice.audioSource());

    if (packageFormat != PFMT_TRANSPORT_STREAM) { // there's a separate RTP stream for audio
      // Create 'groupsocks' for RTP and RTCP.
      // (Note: Because we will actually be streaming through a remote Darwin server,
      // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.)
      struct in_addr dummyDestAddress;
      dummyDestAddress.s_addr = 0;
      rtpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0);
      rtcpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0);
      
      // Create a RTP sink for the audio stream:
      sinkAudio = createAudioRTPSink(env, rtpGroupsockAudio);

      // Create (and start) a 'RTCP instance' for this RTP sink:
      unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share
      rtcpAudio = RTCPInstance::createNew(env, rtcpGroupsockAudio,
					  totalSessionBandwidthAudio, CNAME,
					  sinkAudio, NULL /* we're a server */);
          // Note: This starts RTCP running automatically

      // Add these to our 'Darwin injector':
      injector->addStream(sinkAudio, rtcpAudio);
    }
  }
  /******************end audio***********************/

  /******************video***********************/
  if (videoFormat != VFMT_NONE) {
    // Create the video source:
    if (packageFormat == PFMT_TRANSPORT_STREAM) {
      MPEG2TransportStreamFromESSource* tsSource
	= MPEG2TransportStreamFromESSource::createNew(env);
      tsSource->addNewVideoSource(inputDevice.videoSource(), 2);
      if (sourceAudio != NULL) tsSource->addNewAudioSource(sourceAudio, 2);
      // Gather the Transport packets into network packet-sized chunks:
      sourceVideo = MPEG2TransportStreamAccumulator::createNew(env, tsSource);
      sourceAudio = NULL;
    } else {
      switch (videoFormat) {
      case VFMT_NONE: // not used
	break;
      case VFMT_MJPEG: {
	sourceVideo = WISJPEGStreamSource::createNew(inputDevice.videoSource());
	break;
      }
      case VFMT_MPEG1:
      case VFMT_MPEG2: {
	sourceVideo = MPEG1or2VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
	break;
      }
      case VFMT_MPEG4: {
	sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
	break;
      }
      }
    }

    // Create 'groupsocks' for RTP and RTCP.
    // (Note: Because we will actually be streaming through a remote Darwin server,
    // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.)
    struct in_addr dummyDestAddress;
    dummyDestAddress.s_addr = 0;
    rtpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);
    rtcpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);

    // Create a RTP sink for the video stream:
    unsigned char payloadFormatCode = 97; // if dynamic
    setVideoRTPSinkBufferSize();
    if (packageFormat == PFMT_TRANSPORT_STREAM) {
      sinkVideo = SimpleRTPSink::createNew(env, rtpGroupsockVideo,
					   33, 90000, "video", "mp2t",
					   1, True, False/*no 'M' bit*/);
    } else {
      switch (videoFormat) {
      case VFMT_NONE: // not used
	break;
      case VFMT_MJPEG: {
	sinkVideo = JPEGVideoRTPSink::createNew(env, rtpGroupsockVideo);
	break;
      }
      case VFMT_MPEG1:
      case VFMT_MPEG2: {
	sinkVideo = MPEG1or2VideoRTPSink::createNew(env, rtpGroupsockVideo);
	break;
      }
      case VFMT_MPEG4: {
	sinkVideo = MPEG4ESVideoRTPSink::createNew(env, rtpGroupsockVideo, payloadFormatCode);
	break;
      }
      }
    }

    // Create (and start) a 'RTCP instance' for this RTP sink:
    unsigned totalSessionBandwidthVideo = (videoBitrate+500)/1000; // in kbps; for RTCP b/w share
    rtcpVideo = RTCPInstance::createNew(env, rtcpGroupsockVideo,
					totalSessionBandwidthVideo, CNAME,
					sinkVideo, NULL /* we're a server */);
        // Note: This starts RTCP running automatically

    // Add these to our 'Darwin injector':
    injector->addStream(sinkVideo, rtcpVideo);
  }
  /******************end video***********************/

  // Next, specify the destination Darwin Streaming Server:
  char const* remoteStreamName = "test.sdp";//#####@@@@@
  if (!injector->setDestination(remoteDSSNameOrAddress, remoteStreamName,
                                applicationName, "LIVE555 Streaming Media")) {
    env << "Failed to connect to remote Darwin Streaming Server: " << env.getResultMsg() << "\n";
    exit(1);
  }

  env << "Play this stream (from the Darwin Streaming Server) using the URL:\n"
       << "\trtsp://" << remoteDSSNameOrAddress << "/" << remoteStreamName << "\n";

}
示例#14
0
WAVAudioFileSource::WAVAudioFileSource(UsageEnvironment& env, FILE* fid)
  : AudioInputDevice(env, 0, 0, 0, 0)/* set the real parameters later */,
    fFid(fid), fFidIsSeekable(False), fLastPlayTime(0), fHaveStartedReading(False), fWAVHeaderSize(0), fFileSize(0),
    fScaleFactor(1), fLimitNumBytesToStream(False), fNumBytesToStream(0), fAudioFormat(WA_UNKNOWN) {
  // Check the WAV file header for validity.
  // Note: The following web pages contain info about the WAV format:
  // http://www.ringthis.com/dev/wave_format.htm
  // http://www.lightlink.com/tjweber/StripWav/Canon.html
  // http://www.onicos.com/staff/iz/formats/wav.html

  Boolean success = False; // until we learn otherwise
  do {
    // RIFF Chunk:
    if (nextc != 'R' || nextc != 'I' || nextc != 'F' || nextc != 'F') break;
    if (!skipBytes(fid, 4)) break;
    if (nextc != 'W' || nextc != 'A' || nextc != 'V' || nextc != 'E') break;

    // Skip over any chunk that's not a FORMAT ('fmt ') chunk:
    u_int32_t tmp;
    if (!get4Bytes(fid, tmp)) break;
    while (tmp != 0x20746d66/*'fmt ', little-endian*/) {
      // Skip this chunk:
      u_int32_t chunkLength;
      if (!get4Bytes(fid, chunkLength)) break;
      if (!skipBytes(fid, chunkLength)) break;
      if (!get4Bytes(fid, tmp)) break;
    }

    // FORMAT Chunk (the 4-byte header code has already been parsed):
    unsigned formatLength;
    if (!get4Bytes(fid, formatLength)) break;
    unsigned short audioFormat;
    if (!get2Bytes(fid, audioFormat)) break;

    fAudioFormat = (unsigned char)audioFormat;
    if (fAudioFormat != WA_PCM && fAudioFormat != WA_PCMA && fAudioFormat != WA_PCMU && fAudioFormat != WA_IMA_ADPCM) {
      // It's a format that we don't (yet) understand
      env.setResultMsg("Audio format is not one that we handle (PCM/PCMU/PCMA or IMA ADPCM)");
      break;
    }
    unsigned short numChannels;
    if (!get2Bytes(fid, numChannels)) break;
    fNumChannels = (unsigned char)numChannels;
    if (fNumChannels < 1 || fNumChannels > 2) { // invalid # channels
      char errMsg[100];
      sprintf(errMsg, "Bad # channels: %d", fNumChannels);
      env.setResultMsg(errMsg);
      break;
    }
    if (!get4Bytes(fid, fSamplingFrequency)) break;
    if (fSamplingFrequency == 0) {
      env.setResultMsg("Bad sampling frequency: 0");
      break;
    }
    if (!skipBytes(fid, 6)) break; // "nAvgBytesPerSec" (4 bytes) + "nBlockAlign" (2 bytes)
    unsigned short bitsPerSample;
    if (!get2Bytes(fid, bitsPerSample)) break;
    fBitsPerSample = (unsigned char)bitsPerSample;
    if (fBitsPerSample == 0) {
      env.setResultMsg("Bad bits-per-sample: 0");
      break;
    }
    if (!skipBytes(fid, formatLength - 16)) break;

    // FACT chunk (optional):
    int c = nextc;
    if (c == 'f') {
      if (nextc != 'a' || nextc != 'c' || nextc != 't') break;
      unsigned factLength;
      if (!get4Bytes(fid, factLength)) break;
      if (!skipBytes(fid, factLength)) break;
      c = nextc;
    }

    // EYRE chunk (optional):
    if (c == 'e') {
      if (nextc != 'y' || nextc != 'r' || nextc != 'e') break;
      unsigned eyreLength;
      if (!get4Bytes(fid, eyreLength)) break;
      if (!skipBytes(fid, eyreLength)) break;
      c = nextc;
    }

    // DATA Chunk:
    if (c != 'd' || nextc != 'a' || nextc != 't' || nextc != 'a') break;
    if (!skipBytes(fid, 4)) break;

    // The header is good; the remaining data are the sample bytes.
    fWAVHeaderSize = (unsigned)TellFile64(fid);
    success = True;
  } while (0);

  if (!success) {
    env.setResultMsg("Bad WAV file format");
    // Set "fBitsPerSample" to zero, to indicate failure:
    fBitsPerSample = 0;
    return;
  }

  fPlayTimePerSample = 1e6/(double)fSamplingFrequency;

  // Although PCM is a sample-based format, we group samples into
  // 'frames' for efficient delivery to clients.  Set up our preferred
  // frame size to be close to 20 ms, if possible, but always no greater
  // than 1400 bytes (to ensure that it will fit in a single RTP packet)
  unsigned maxSamplesPerFrame = (1400*8)/(fNumChannels*fBitsPerSample);
  unsigned desiredSamplesPerFrame = (unsigned)(0.02*fSamplingFrequency);
  unsigned samplesPerFrame = desiredSamplesPerFrame < maxSamplesPerFrame ? desiredSamplesPerFrame : maxSamplesPerFrame;
  fPreferredFrameSize = (samplesPerFrame*fNumChannels*fBitsPerSample)/8;

  fFidIsSeekable = FileIsSeekable(fFid);
#ifndef READ_FROM_FILES_SYNCHRONOUSLY
  // Now that we've finished reading the WAV header, all future reads (of audio samples) from the file will be asynchronous:
  makeSocketNonBlocking(fileno(fFid));
#endif
}
示例#15
0
WAVAudioFileSource::WAVAudioFileSource(UsageEnvironment& env, FILE* fid)
  : AudioInputDevice(env, 0, 0, 0, 0)/* set the real parameters later */,
    fFid(fid), fLastPlayTime(0), fWAVHeaderSize(0), fFileSize(0), fScaleFactor(1),
    fLimitNumBytesToStream(False), fNumBytesToStream(0), fAudioFormat(WA_UNKNOWN) {
  // Check the WAV file header for validity.
  // Note: The following web pages contain info about the WAV format:
  // http://www.ringthis.com/dev/wave_format.htm
  // http://www.lightlink.com/tjweber/StripWav/Canon.html
  // http://www.wotsit.org/list.asp?al=W

  Boolean success = False; // until we learn otherwise
  do {
    // RIFF Chunk:
    if (nextc != 'R' || nextc != 'I' || nextc != 'F' || nextc != 'F') break;
    if (!skipBytes(fid, 4)) break;
    if (nextc != 'W' || nextc != 'A' || nextc != 'V' || nextc != 'E') break;

    // FORMAT Chunk:
    if (nextc != 'f' || nextc != 'm' || nextc != 't' || nextc != ' ') break;
    unsigned formatLength;
    if (!get4Bytes(fid, formatLength)) break;
    unsigned short audioFormat;
    if (!get2Bytes(fid, audioFormat)) break;

    fAudioFormat = (unsigned char)audioFormat;
    if (fAudioFormat != WA_PCM && fAudioFormat != WA_PCMA && fAudioFormat != WA_PCMU && fAudioFormat != WA_IMA_ADPCM) {
      // It's a format that we don't (yet) understand
      env.setResultMsg("Audio format is not one that we handle (PCM/PCMU/PCMA or IMA ADPCM)");
      break;
    }
    unsigned short numChannels;
    if (!get2Bytes(fid, numChannels)) break;
    fNumChannels = (unsigned char)numChannels;
    if (fNumChannels < 1 || fNumChannels > 2) { // invalid # channels
      char errMsg[100];
      sprintf(errMsg, "Bad # channels: %d", fNumChannels);
      env.setResultMsg(errMsg);
      break;
    }
    if (!get4Bytes(fid, fSamplingFrequency)) break;
    if (fSamplingFrequency == 0) {
      env.setResultMsg("Bad sampling frequency: 0");
      break;
    }
    if (!skipBytes(fid, 6)) break; // "nAvgBytesPerSec" (4 bytes) + "nBlockAlign" (2 bytes)
    unsigned short bitsPerSample;
    if (!get2Bytes(fid, bitsPerSample)) break;
    fBitsPerSample = (unsigned char)bitsPerSample;
    if (fBitsPerSample == 0) {
      env.setResultMsg("Bad bits-per-sample: 0");
      break;
    }
    if (!skipBytes(fid, formatLength - 16)) break;

    // FACT chunk (optional):
    int c = nextc;
    if (c == 'f') {
      if (nextc != 'a' || nextc != 'c' || nextc != 't') break;
      unsigned factLength;
      if (!get4Bytes(fid, factLength)) break;
      if (!skipBytes(fid, factLength)) break;
      c = nextc;
    }

    // DATA Chunk:
    if (c != 'd' || nextc != 'a' || nextc != 't' || nextc != 'a') break;
    if (!skipBytes(fid, 4)) break;

    // The header is good; the remaining data are the sample bytes.
    fWAVHeaderSize = ftell(fid);
    success = True;
  } while (0);

  if (!success) {
    env.setResultMsg("Bad WAV file format");
    // Set "fBitsPerSample" to zero, to indicate failure:
    fBitsPerSample = 0;
    return;
  }

  fPlayTimePerSample = 1e6/(double)fSamplingFrequency;

  // Although PCM is a sample-based format, we group samples into
  // 'frames' for efficient delivery to clients.  Set up our preferred
  // frame size to be close to 20 ms, if possible, but always no greater
  // than 1400 bytes (to ensure that it will fit in a single RTP packet)
  unsigned maxSamplesPerFrame = (1400*8)/(fNumChannels*fBitsPerSample);
  unsigned desiredSamplesPerFrame = (unsigned)(0.02*fSamplingFrequency);
  unsigned samplesPerFrame = desiredSamplesPerFrame < maxSamplesPerFrame ? desiredSamplesPerFrame : maxSamplesPerFrame;
  fPreferredFrameSize = (samplesPerFrame*fNumChannels*fBitsPerSample)/8;
}
示例#16
0
Boolean SIPClient::parseSIPURL(UsageEnvironment& env, char const* url,
			       NetAddress& address,
			       portNumBits& portNum) {
  do {
    // Parse the URL as "sip:<username>@<address>:<port>/<etc>"
    // (with ":<port>" and "/<etc>" optional)
    // Also, skip over any "<username>[:<password>]@" preceding <address>
    char const* prefix = "sip:";
    unsigned const prefixLength = 4;
    if (_strncasecmp(url, prefix, prefixLength) != 0) {
      env.setResultMsg("URL is not of the form \"", prefix, "\"");
      break;
    }

    unsigned const parseBufferSize = 100;
    char parseBuffer[parseBufferSize];
    unsigned addressStartIndex = prefixLength;
    while (url[addressStartIndex] != '\0'
	   && url[addressStartIndex++] != '@') {}
    char const* from = &url[addressStartIndex];

    // Skip over any "<username>[:<password>]@"
    char const* from1 = from;
    while (*from1 != '\0' && *from1 != '/') {
      if (*from1 == '@') {
	from = ++from1;
	break;
      }
      ++from1;
    }

    char* to = &parseBuffer[0];
    unsigned i;
    for (i = 0; i < parseBufferSize; ++i) {
      if (*from == '\0' || *from == ':' || *from == '/') {
	// We've completed parsing the address
	*to = '\0';
	break;
      }
      *to++ = *from++;
    }
    if (i == parseBufferSize) {
      env.setResultMsg("URL is too long");
      break;
    }

    NetAddressList addresses(parseBuffer);
    if (addresses.numAddresses() == 0) {
      env.setResultMsg("Failed to find network address for \"",
			   parseBuffer, "\"");
      break;
    }
    address = *(addresses.firstAddress());

    portNum = 5060; // default value
    char nextChar = *from;
    if (nextChar == ':') {
      int portNumInt;
      if (sscanf(++from, "%d", &portNumInt) != 1) {
	env.setResultMsg("No port number follows ':'");
	break;
      }
      if (portNumInt < 1 || portNumInt > 65535) {
	env.setResultMsg("Bad port number");
	break;
      }
      portNum = (portNumBits)portNumInt;
    }

    return True;
  } while (0);

  return False;
}
示例#17
0
SIPClient::SIPClient(UsageEnvironment& env,
		     unsigned char desiredAudioRTPPayloadFormat,
		     char const* mimeSubtype,
		     int verbosityLevel, char const* applicationName)
  : Medium(env),
    fT1(500000 /* 500 ms */),
    fDesiredAudioRTPPayloadFormat(desiredAudioRTPPayloadFormat),
    fVerbosityLevel(verbosityLevel), fCSeq(0),
    fUserAgentHeaderStr(NULL), fUserAgentHeaderStrLen(0),
    fURL(NULL), fURLSize(0),
    fToTagStr(NULL), fToTagStrSize(0),
    fUserName(NULL), fUserNameSize(0),
    fInviteSDPDescription(NULL), fInviteSDPDescriptionReturned(NULL),
    fInviteCmd(NULL), fInviteCmdSize(0) {
  if (mimeSubtype == NULL) mimeSubtype = "";
  fMIMESubtype = strDup(mimeSubtype);
  fMIMESubtypeSize = strlen(fMIMESubtype);

  if (applicationName == NULL) applicationName = "";
  fApplicationName = strDup(applicationName);
  fApplicationNameSize = strlen(fApplicationName);

  struct in_addr ourAddress;
  ourAddress.s_addr = ourIPAddress(env); // hack
  fOurAddressStr = strDup(AddressString(ourAddress).val());
  fOurAddressStrSize = strlen(fOurAddressStr);

  fOurSocket = new Groupsock(env, ourAddress, 0, 255);
  if (fOurSocket == NULL) {
    env << "ERROR: Failed to create socket for addr "
	<< fOurAddressStr << ": "
	<< env.getResultMsg() << "\n";
  }

  // Now, find out our source port number.  Hack: Do this by first trying to
  // send a 0-length packet, so that the "getSourcePort()" call will work.
  fOurSocket->output(envir(), (unsigned char*)"", 0);
  Port srcPort(0);
  getSourcePort(env, fOurSocket->socketNum(), srcPort);
  if (srcPort.num() != 0) {
    fOurPortNum = ntohs(srcPort.num());
  } else {
    // No luck.  Try again using a default port number:
    fOurPortNum = 5060;
    delete fOurSocket;
    fOurSocket = new Groupsock(env, ourAddress, fOurPortNum, 255);
    if (fOurSocket == NULL) {
      env << "ERROR: Failed to create socket for addr "
	  << fOurAddressStr << ", port "
	  << fOurPortNum << ": "
	  << env.getResultMsg() << "\n";
    }
  }

  // Set the "User-Agent:" header to use in each request:
  char const* const libName = "LIVE555 Streaming Media v";
  char const* const libVersionStr = LIVEMEDIA_LIBRARY_VERSION_STRING;
  char const* libPrefix; char const* libSuffix;
  if (applicationName == NULL || applicationName[0] == '\0') {
    applicationName = libPrefix = libSuffix = "";
  } else {
    libPrefix = " (";
    libSuffix = ")";
  }
  unsigned userAgentNameSize
    = fApplicationNameSize + strlen(libPrefix) + strlen(libName) + strlen(libVersionStr) + strlen(libSuffix) + 1;
  char* userAgentName = new char[userAgentNameSize];
  sprintf(userAgentName, "%s%s%s%s%s",
	  applicationName, libPrefix, libName, libVersionStr, libSuffix);
  setUserAgentString(userAgentName);
  delete[] userAgentName;

  reset();
}
示例#18
0
netAddressBits ourIPAddress(UsageEnvironment& env) {
  static netAddressBits ourAddress = 0;
  int sock = -1;
  struct in_addr testAddr;

  if (ReceivingInterfaceAddr != INADDR_ANY) {
    // Hack: If we were told to receive on a specific interface address, then 
    // define this to be our ip address:
    ourAddress = ReceivingInterfaceAddr;
  }

  if (ourAddress == 0) {
    // We need to find our source address
    struct sockaddr_in fromAddr;
    fromAddr.sin_addr.s_addr = 0;

    // Get our address by sending a (0-TTL) multicast packet,
    // receiving it, and looking at the source address used.
    // (This is kinda bogus, but it provides the best guarantee
    // that other nodes will think our address is the same as we do.)
    do {
      loopbackWorks = 0; // until we learn otherwise

      testAddr.s_addr = our_inet_addr("228.67.43.91"); // arbitrary
      Port testPort(15947); // ditto

      sock = setupDatagramSocket(env, testPort);
      if (sock < 0) break;

      if (!socketJoinGroup(env, sock, testAddr.s_addr)) break;

      unsigned char testString[] = "hostIdTest";
      unsigned testStringLength = sizeof testString;

      if (!writeSocket(env, sock, testAddr, testPort.num(), 0,
		       testString, testStringLength)) break;

      // Block until the socket is readable (with a 5-second timeout):
      fd_set rd_set;
      FD_ZERO(&rd_set);
      FD_SET((unsigned)sock, &rd_set);
      const unsigned numFds = sock+1;
      struct timeval timeout;
      timeout.tv_sec = 5;
      timeout.tv_usec = 0;
      int result = select(numFds, &rd_set, NULL, NULL, &timeout);
      if (result <= 0) break;

      unsigned char readBuffer[20];
      int bytesRead = readSocket(env, sock,
				 readBuffer, sizeof readBuffer,
				 fromAddr);
      if (bytesRead != (int)testStringLength
	  || strncmp((char*)readBuffer, (char*)testString, testStringLength) != 0) {
	break;
      }

      // We use this packet's source address, if it's good:
      loopbackWorks = !badAddressForUs(fromAddr.sin_addr.s_addr);
    } while (0);

    if (sock >= 0) {
      socketLeaveGroup(env, sock, testAddr.s_addr);
      closeSocket(sock);
    }

    if (!loopbackWorks) do {
      // We couldn't find our address using multicast loopback,
      // so try instead to look it up directly - by first getting our host name, and then resolving this host name
      char hostname[100];
      hostname[0] = '\0';
      int result = gethostname(hostname, sizeof hostname);
      if (result != 0 || hostname[0] == '\0') {
	env.setResultErrMsg("initial gethostname() failed");
	break;
      }

      // Try to resolve "hostname" to an IP address:
      NetAddressList addresses(hostname);
      NetAddressList::Iterator iter(addresses);
      NetAddress const* address;

      // Take the first address that's not bad:
      netAddressBits addr = 0;
      while ((address = iter.nextAddress()) != NULL) {
	netAddressBits a = *(netAddressBits*)(address->data());
	if (!badAddressForUs(a)) {
	  addr = a;
	  break;
	}
      }

      // Assign the address that we found to "fromAddr" (as if the 'loopback' method had worked), to simplify the code below: 
      fromAddr.sin_addr.s_addr = addr;
    } while (0);

    // Make sure we have a good address:
    netAddressBits from = fromAddr.sin_addr.s_addr;
    if (badAddressForUs(from)) {
      char tmp[100];
      sprintf(tmp, "This computer has an invalid IP address: %s", AddressString(from).val());
      env.setResultMsg(tmp);
      from = 0;
    }

    ourAddress = from;

    // Use our newly-discovered IP address, and the current time,
    // to initialize the random number generator's seed:
    struct timeval timeNow;
    gettimeofday(&timeNow, NULL);
    unsigned seed = ourAddress^timeNow.tv_sec^timeNow.tv_usec;
    our_srandom(seed);
  }
  return ourAddress;
}
示例#19
0
int main(int argc, char** argv) {
  init_signals();
  setpriority(PRIO_PROCESS, 0, 0);
  int IsSilence = 0;
  int svcEnable = 0;
  int cnt=0;
  int activePortCnt=0;
  if( GetSampleRate() == 16000 )
  {
	audioOutputBitrate = 128000;
	audioSamplingFrequency = 16000;
  }else{
	audioOutputBitrate = 64000;
	audioSamplingFrequency = 8000;
  }
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
  int msg_type, video_type;
  APPROInput* MjpegInputDevice = NULL;
  APPROInput* H264InputDevice = NULL;
  APPROInput* Mpeg4InputDevice = NULL;
  static pid_t child[4] = {
	-1,-1,-1,-1
  };

  StreamingMode streamingMode = STREAMING_UNICAST;
  netAddressBits multicastAddress = 0;//our_inet_addr("224.1.4.6");
  portNumBits videoRTPPortNum = 0;
  portNumBits audioRTPPortNum = 0;

  IsSilence = 0;
  svcEnable = 0;
  audioType = AUDIO_G711;
  streamingMode = STREAMING_UNICAST;

  for( cnt = 1; cnt < argc ;cnt++ )
  {
	if( strcmp( argv[cnt],"-m" )== 0  )
	{
		streamingMode = STREAMING_MULTICAST_SSM;
	}

	if( strcmp( argv[cnt],"-s" )== 0  )
	{
		IsSilence = 1;
	}

	if( strcmp( argv[cnt],"-a" )== 0  )
	{
		audioType = AUDIO_AAC;
	}

	if( strcmp( argv[cnt],"-v" )== 0  )
	{
		svcEnable = 1;
	}
  }

#if 0
  printf("###########IsSilence = %d ################\n",IsSilence);
  printf("###########streamingMode = %d ################\n",streamingMode);
  printf("###########audioType = %d ################\n",audioType);
  printf("###########svcEnable = %d ################\n",svcEnable);
#endif

  child[0] = fork();

  if( child[0] != 0 )
  {
	child[1] = fork();
  }

  if( child[0] != 0 && child[1] != 0 )
  {
	child[2] = fork();
  }

  if( child[0] != 0 && child[1] != 0 && child[2] != 0 )
  {
	child[3] = fork();
  }

  if(svcEnable) {
	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0)
	  {
		child[4] = fork();
	  }

	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0)
	  {
		child[5] = fork();
	  }

	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0)
	  {
		child[6] = fork();
	  }

	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0)
	  {
		child[7] = fork();
	  }
  }

  if( child[0] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE4;
	video_type = VIDEO_TYPE_H264_CIF;
	rtspServerPortNum = 8556;
	H264VideoBitrate = 12000000;
	videoRTPPortNum = 6012;
	audioRTPPortNum = 6014;
  }
  if( child[1] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE3;
	video_type = VIDEO_TYPE_MJPEG;
	rtspServerPortNum = 8555;
	MjpegVideoBitrate = 12000000;
	videoRTPPortNum = 6008;
	audioRTPPortNum = 6010;
  }
  if( child[2] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE;
	video_type = VIDEO_TYPE_MPEG4;
	rtspServerPortNum = 8553;
	Mpeg4VideoBitrate = 12000000;
	videoRTPPortNum = 6000;
	audioRTPPortNum = 6002;
  }
  if( child[3] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE2;
	video_type = VIDEO_TYPE_MPEG4_CIF;
	rtspServerPortNum = 8554;
	Mpeg4VideoBitrate = 12000000;
	videoRTPPortNum = 6004;
	audioRTPPortNum = 6006;
  }

  if(svcEnable) {
	  if( child[4] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE5;
		video_type = VIDEO_TYPE_H264_SVC_30FPS;
		rtspServerPortNum = 8601;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6016;
		audioRTPPortNum = 6018;
	  }
	  if( child[5] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE6;
		video_type = VIDEO_TYPE_H264_SVC_15FPS;
		rtspServerPortNum = 8602;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6020;
		audioRTPPortNum = 6022;
	  }
	  if( child[6] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE7;
		video_type = VIDEO_TYPE_H264_SVC_7FPS;
		rtspServerPortNum = 8603;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6024;
		audioRTPPortNum = 6026;
	  }
	  if( child[7] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE8;
		video_type = VIDEO_TYPE_H264_SVC_3FPS;
		rtspServerPortNum = 8604;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6028;
		audioRTPPortNum = 6030;
	  }
	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0 && child[7] != 0)
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE9;
		video_type = VIDEO_TYPE_H264;
		rtspServerPortNum = 8557;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6032;
		audioRTPPortNum = 6034;
	  }
 }
 else {
  	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0)
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE5;
		video_type = VIDEO_TYPE_H264;
		rtspServerPortNum = 8557;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6032;
		audioRTPPortNum = 6034;
	  }
 }

  videoType = video_type;

  // Objects used for multicast streaming:
  static Groupsock* rtpGroupsockAudio = NULL;
  static Groupsock* rtcpGroupsockAudio = NULL;
  static Groupsock* rtpGroupsockVideo = NULL;
  static Groupsock* rtcpGroupsockVideo = NULL;
  static FramedSource* sourceAudio = NULL;
  static RTPSink* sinkAudio = NULL;
  static RTCPInstance* rtcpAudio = NULL;
  static FramedSource* sourceVideo = NULL;
  static RTPSink* sinkVideo = NULL;
  static RTCPInstance* rtcpVideo = NULL;

  share_memory_init(msg_type);

  //init_signals();

  *env << "Initializing...\n";


  // Initialize the WIS input device:
  if( video_type == VIDEO_TYPE_MJPEG)
  {
	  MjpegInputDevice = APPROInput::createNew(*env, VIDEO_TYPE_MJPEG);
	  if (MjpegInputDevice == NULL) {
	    err(*env) << "Failed to create MJPEG input device\n";
	    exit(1);
	  }
  }

  if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS ||
		video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS)
  {
	  H264InputDevice = APPROInput::createNew(*env, video_type);
	  if (H264InputDevice == NULL) {
	    err(*env) << "Failed to create MJPEG input device\n";
	    exit(1);
	  }
  }

  if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF )
  {
	  Mpeg4InputDevice = APPROInput::createNew(*env, video_type);
	  if (Mpeg4InputDevice == NULL) {
		err(*env) << "Failed to create MPEG4 input device\n";
		exit(1);
	  }
  }

  // Create the RTSP server:
  RTSPServer* rtspServer = NULL;
  // Normal case: Streaming from a built-in RTSP server:
  rtspServer = RTSPServer::createNew(*env, rtspServerPortNum, NULL);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }

  *env << "...done initializing\n";

  if( streamingMode == STREAMING_UNICAST )
  {
	  if( video_type == VIDEO_TYPE_MJPEG)
	  {
	    ServerMediaSession* sms
	      = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);
	    sms->addSubsession(WISJPEGVideoServerMediaSubsession
				 ::createNew(sms->envir(), *MjpegInputDevice, MjpegVideoBitrate));
	    if( IsSilence == 0)
	    {
			sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *MjpegInputDevice));
	    }

	    rtspServer->addServerMediaSession(sms);

	    char *url = rtspServer->rtspURL(sms);
	    *env << "Play this stream using the URL:\n\t" << url << "\n";
	    delete[] url;
	  }

	  if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS ||
			video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type ==VIDEO_TYPE_H264_SVC_3FPS)
	  {
            ServerMediaSession* sms;
            sms
	      = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);
	    sms->addSubsession(WISH264VideoServerMediaSubsession
				 ::createNew(sms->envir(), *H264InputDevice, H264VideoBitrate));
	    if( IsSilence == 0)
	    {
	    	sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *H264InputDevice));

	    }
	    rtspServer->addServerMediaSession(sms);

	    char *url = rtspServer->rtspURL(sms);
	    *env << "Play this stream using the URL:\n\t" << url << "\n";
	    delete[] url;
	  }

	    // Create a record describing the media to be streamed:
	  if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF )
	  {
	    ServerMediaSession* sms
	      = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);
	    sms->addSubsession(WISMPEG4VideoServerMediaSubsession
				 ::createNew(sms->envir(), *Mpeg4InputDevice, Mpeg4VideoBitrate));
	    if( IsSilence == 0)
	    {
	    	sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *Mpeg4InputDevice));
	    }

	    rtspServer->addServerMediaSession(sms);


	    char *url = rtspServer->rtspURL(sms);
	    *env << "Play this stream using the URL:\n\t" << url << "\n";
	    delete[] url;
	  }
  }else{


	if (streamingMode == STREAMING_MULTICAST_SSM)
	{
		if (multicastAddress == 0)
			multicastAddress = chooseRandomIPv4SSMAddress(*env);
	} else if (multicastAddress != 0) {
		streamingMode = STREAMING_MULTICAST_ASM;
	}

	struct in_addr dest; dest.s_addr = multicastAddress;
	const unsigned char ttl = 255;

	// For RTCP:
	const unsigned maxCNAMElen = 100;
	unsigned char CNAME[maxCNAMElen + 1];
	gethostname((char *) CNAME, maxCNAMElen);
	CNAME[maxCNAMElen] = '\0';      // just in case

	ServerMediaSession* sms=NULL;

	if( video_type == VIDEO_TYPE_MJPEG)
	{
		sms = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);

		sourceAudio = MjpegInputDevice->audioSource();
		sourceVideo = WISJPEGStreamSource::createNew(MjpegInputDevice->videoSource());
		// Create 'groupsocks' for RTP and RTCP:
	    const Port rtpPortVideo(videoRTPPortNum);
	    const Port rtcpPortVideo(videoRTPPortNum+1);
	    rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl);
	    rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl);
	    if (streamingMode == STREAMING_MULTICAST_SSM) {
	      rtpGroupsockVideo->multicastSendOnly();
	      rtcpGroupsockVideo->multicastSendOnly();
	    }
		setVideoRTPSinkBufferSize();
		sinkVideo = JPEGVideoRTPSink::createNew(*env, rtpGroupsockVideo);

	}

	if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF ||
		video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS ||
			video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS)
	{
 		sms = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);

		sourceAudio = H264InputDevice->audioSource();
		sourceVideo = H264VideoStreamFramer::createNew(*env, H264InputDevice->videoSource());

		// Create 'groupsocks' for RTP and RTCP:
	    const Port rtpPortVideo(videoRTPPortNum);
	    const Port rtcpPortVideo(videoRTPPortNum+1);
	    rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl);
	    rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl);
	    if (streamingMode == STREAMING_MULTICAST_SSM) {
	      rtpGroupsockVideo->multicastSendOnly();
	      rtcpGroupsockVideo->multicastSendOnly();
	    }
		setVideoRTPSinkBufferSize();
		{
			char BuffStr[200];
			extern int GetSprop(void *pBuff, char vType);
			GetSprop(BuffStr,video_type);
			sinkVideo = H264VideoRTPSink::createNew(*env, rtpGroupsockVideo,96, 0x64001F,BuffStr);
		}

	}

	// Create a record describing the media to be streamed:
	if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF )
	{
		sms = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);

		sourceAudio = Mpeg4InputDevice->audioSource();
		sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(*env, Mpeg4InputDevice->videoSource());

		// Create 'groupsocks' for RTP and RTCP:
	    const Port rtpPortVideo(videoRTPPortNum);
	    const Port rtcpPortVideo(videoRTPPortNum+1);
	    rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl);
	    rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl);
	    if (streamingMode == STREAMING_MULTICAST_SSM) {
	      rtpGroupsockVideo->multicastSendOnly();
	      rtcpGroupsockVideo->multicastSendOnly();
	    }
		setVideoRTPSinkBufferSize();
		sinkVideo = MPEG4ESVideoRTPSink::createNew(*env, rtpGroupsockVideo,97);

	}
	/* VIDEO Channel initial */
	if(1)
	{
		// Create (and start) a 'RTCP instance' for this RTP sink:
		unsigned totalSessionBandwidthVideo = (Mpeg4VideoBitrate+500)/1000; // in kbps; for RTCP b/w share
		rtcpVideo = RTCPInstance::createNew(*env, rtcpGroupsockVideo,
					totalSessionBandwidthVideo, CNAME,
					sinkVideo, NULL /* we're a server */ ,
					streamingMode == STREAMING_MULTICAST_SSM);
	    // Note: This starts RTCP running automatically
		sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkVideo, rtcpVideo));

		// Start streaming:
		sinkVideo->startPlaying(*sourceVideo, NULL, NULL);
	}
	/* AUDIO Channel initial */
	if( IsSilence == 0)
	{
		// there's a separate RTP stream for audio
		// Create 'groupsocks' for RTP and RTCP:
		const Port rtpPortAudio(audioRTPPortNum);
		const Port rtcpPortAudio(audioRTPPortNum+1);

		rtpGroupsockAudio = new Groupsock(*env, dest, rtpPortAudio, ttl);
		rtcpGroupsockAudio = new Groupsock(*env, dest, rtcpPortAudio, ttl);

		if (streamingMode == STREAMING_MULTICAST_SSM)
		{
			rtpGroupsockAudio->multicastSendOnly();
			rtcpGroupsockAudio->multicastSendOnly();
		}
		if( audioSamplingFrequency == 16000 )
		{

			if( audioType == AUDIO_G711)
			{
				sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "PCMU", 1);
			}
			else
			{
				char const* encoderConfigStr = "1408";// (2<<3)|(8>>1) = 0x14 ; ((8<<7)&0xFF)|(1<<3)=0x08 ;
				sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio,
						       96,
						       audioSamplingFrequency,
						       "audio", "AAC-hbr",
						       encoderConfigStr, audioNumChannels);
			}
		}
		else{
			if(audioType == AUDIO_G711)
			{
				sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 0, audioSamplingFrequency, "audio", "PCMU", 1);
			}
			else{
				char const* encoderConfigStr =  "1588";// (2<<3)|(11>>1) = 0x15 ; ((11<<7)&0xFF)|(1<<3)=0x88 ;
				sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio,
						       96,
						       audioSamplingFrequency,
						       "audio", "AAC-hbr",
						       encoderConfigStr, audioNumChannels);

			}
		}

		// Create (and start) a 'RTCP instance' for this RTP sink:
		unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share
		rtcpAudio = RTCPInstance::createNew(*env, rtcpGroupsockAudio,
					  totalSessionBandwidthAudio, CNAME,
					  sinkAudio, NULL /* we're a server */,
					  streamingMode == STREAMING_MULTICAST_SSM);
		// Note: This starts RTCP running automatically
		sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkAudio, rtcpAudio));

		// Start streaming:
		sinkAudio->startPlaying(*sourceAudio, NULL, NULL);
    }

	rtspServer->addServerMediaSession(sms);
	{
		struct in_addr dest; dest.s_addr = multicastAddress;
		char *url = rtspServer->rtspURL(sms);
		//char *url2 = inet_ntoa(dest);
		*env << "Mulicast Play this stream using the URL:\n\t" << url << "\n";
		//*env << "2 Mulicast addr:\n\t" << url2 << "\n";
		delete[] url;
	}
  }


  // Begin the LIVE555 event loop:
  env->taskScheduler().doEventLoop(&watchVariable); // does not return


  if( streamingMode!= STREAMING_UNICAST )
  {
	Medium::close(rtcpAudio);
	Medium::close(sinkAudio);
	Medium::close(sourceAudio);
	delete rtpGroupsockAudio;
	delete rtcpGroupsockAudio;

	Medium::close(rtcpVideo);
	Medium::close(sinkVideo);
	Medium::close(sourceVideo);
	delete rtpGroupsockVideo;
	delete rtcpGroupsockVideo;

  }

  Medium::close(rtspServer); // will also reclaim "sms" and its "ServerMediaSubsession"s
  if( MjpegInputDevice != NULL )
  {
	Medium::close(MjpegInputDevice);
  }

  if( H264InputDevice != NULL )
  {
	Medium::close(H264InputDevice);
  }

  if( Mpeg4InputDevice != NULL )
  {
	Medium::close(Mpeg4InputDevice);
  }

  env->reclaim();

  delete scheduler;

  ApproInterfaceExit();

  return 0; // only to prevent compiler warning

}
示例#20
0
extern "C" demuxer_t* demux_open_rtp(demuxer_t* demuxer) {
  struct MPOpts *opts = demuxer->opts;
  Boolean success = False;
  do {
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    if (scheduler == NULL) break;
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
    if (env == NULL) break;

    RTSPClient* rtspClient = NULL;
    SIPClient* sipClient = NULL;

    if (demuxer == NULL || demuxer->stream == NULL) break;  // shouldn't happen
    demuxer->stream->eof = 0; // just in case

    // Look at the stream's 'priv' field to see if we were initiated
    // via a SDP description:
    char* sdpDescription = (char*)(demuxer->stream->priv);
    if (sdpDescription == NULL) {
      // We weren't given a SDP description directly, so assume that
      // we were given a RTSP or SIP URL:
      char const* protocol = demuxer->stream->streaming_ctrl->url->protocol;
      char const* url = demuxer->stream->streaming_ctrl->url->url;
      extern int verbose;
      if (strcmp(protocol, "rtsp") == 0) {
	rtspClient = RTSPClient::createNew(*env, verbose, "MPlayer");
	if (rtspClient == NULL) {
	  fprintf(stderr, "Failed to create RTSP client: %s\n",
		  env->getResultMsg());
	  break;
	}
	sdpDescription = openURL_rtsp(rtspClient, url);
      } else { // SIP
	unsigned char desiredAudioType = 0; // PCMU (use 3 for GSM)
	sipClient = SIPClient::createNew(*env, desiredAudioType, NULL,
					 verbose, "MPlayer");
	if (sipClient == NULL) {
	  fprintf(stderr, "Failed to create SIP client: %s\n",
		  env->getResultMsg());
	  break;
	}
	sipClient->setClientStartPortNum(8000);
	sdpDescription = openURL_sip(sipClient, url);
      }

      if (sdpDescription == NULL) {
	fprintf(stderr, "Failed to get a SDP description from URL \"%s\": %s\n",
		url, env->getResultMsg());
	break;
      }
    }

    // Now that we have a SDP description, create a MediaSession from it:
    MediaSession* mediaSession = MediaSession::createNew(*env, sdpDescription);
    if (mediaSession == NULL) break;


    // Create a 'RTPState' structure containing the state that we just created,
    // and store it in the demuxer's 'priv' field, for future reference:
    RTPState* rtpState = new RTPState;
    rtpState->sdpDescription = sdpDescription;
    rtpState->rtspClient = rtspClient;
    rtpState->sipClient = sipClient;
    rtpState->mediaSession = mediaSession;
    rtpState->audioBufferQueue = rtpState->videoBufferQueue = NULL;
    rtpState->flags = 0;
    rtpState->firstSyncTime.tv_sec = rtpState->firstSyncTime.tv_usec = 0;
    demuxer->priv = rtpState;

    int audiofound = 0, videofound = 0;
    // Create RTP receivers (sources) for each subsession:
    MediaSubsessionIterator iter(*mediaSession);
    MediaSubsession* subsession;
    unsigned desiredReceiveBufferSize;
    while ((subsession = iter.next()) != NULL) {
      // Ignore any subsession that's not audio or video:
      if (strcmp(subsession->mediumName(), "audio") == 0) {
	if (audiofound) {
	  fprintf(stderr, "Additional subsession \"audio/%s\" skipped\n", subsession->codecName());
	  continue;
	}
	desiredReceiveBufferSize = 100000;
      } else if (strcmp(subsession->mediumName(), "video") == 0) {
	if (videofound) {
	  fprintf(stderr, "Additional subsession \"video/%s\" skipped\n", subsession->codecName());
	  continue;
	}
	desiredReceiveBufferSize = 2000000;
      } else {
	continue;
      }

      if (rtsp_port)
          subsession->setClientPortNum (rtsp_port);

      if (!subsession->initiate()) {
	fprintf(stderr, "Failed to initiate \"%s/%s\" RTP subsession: %s\n", subsession->mediumName(), subsession->codecName(), env->getResultMsg());
      } else {
	fprintf(stderr, "Initiated \"%s/%s\" RTP subsession on port %d\n", subsession->mediumName(), subsession->codecName(), subsession->clientPortNum());

	// Set the OS's socket receive buffer sufficiently large to avoid
	// incoming packets getting dropped between successive reads from this
	// subsession's demuxer.  Depending on the bitrate(s) that you expect,
	// you may wish to tweak the "desiredReceiveBufferSize" values above.
	int rtpSocketNum = subsession->rtpSource()->RTPgs()->socketNum();
	int receiveBufferSize
	  = increaseReceiveBufferTo(*env, rtpSocketNum,
				    desiredReceiveBufferSize);
	if (verbose > 0) {
	  fprintf(stderr, "Increased %s socket receive buffer to %d bytes \n",
		  subsession->mediumName(), receiveBufferSize);
	}

	if (rtspClient != NULL) {
	  // Issue a RTSP "SETUP" command on the chosen subsession:
	  if (!rtspClient->setupMediaSubsession(*subsession, False,
						rtsp_transport_tcp)) break;
	  if (!strcmp(subsession->mediumName(), "audio"))
	    audiofound = 1;
	  if (!strcmp(subsession->mediumName(), "video"))
            videofound = 1;
	}
      }
    }

    if (rtspClient != NULL) {
      // Issue a RTSP aggregate "PLAY" command on the whole session:
      if (!rtspClient->playMediaSession(*mediaSession)) break;
    } else if (sipClient != NULL) {
      sipClient->sendACK(); // to start the stream flowing
    }

    // Now that the session is ready to be read, do additional
    // MPlayer codec-specific initialization on each subsession:
    iter.reset();
    while ((subsession = iter.next()) != NULL) {
      if (subsession->readSource() == NULL) continue; // not reading this

      unsigned flags = 0;
      if (strcmp(subsession->mediumName(), "audio") == 0) {
	rtpState->audioBufferQueue
	  = new ReadBufferQueue(subsession, demuxer, "audio");
	rtpState->audioBufferQueue->otherQueue = &(rtpState->videoBufferQueue);
	rtpCodecInitialize_audio(demuxer, subsession, flags);
      } else if (strcmp(subsession->mediumName(), "video") == 0) {
	rtpState->videoBufferQueue
	  = new ReadBufferQueue(subsession, demuxer, "video");
	rtpState->videoBufferQueue->otherQueue = &(rtpState->audioBufferQueue);
	rtpCodecInitialize_video(demuxer, subsession, flags);
      }
      rtpState->flags |= flags;
    }
    success = True;
  } while (0);
  if (!success) return NULL; // an error occurred

  // Hack: If audio and video are demuxed together on a single RTP stream,
  // then create a new "demuxer_t" structure to allow the higher-level
  // code to recognize this:
  if (demux_is_multiplexed_rtp_stream(demuxer)) {
    stream_t* s = new_ds_stream(demuxer->video);
    demuxer_t* od = demux_open(opts, s, DEMUXER_TYPE_UNKNOWN,
			       opts->audio_id, opts->video_id, opts->sub_id,
                               NULL);
    demuxer = new_demuxers_demuxer(od, od, od);
  }

  return demuxer;
}
示例#21
0
int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

  UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
  // To implement client access control to the RTSP server, do the following:
  authDB = new UserAuthenticationDatabase;
  authDB->addUserRecord("username1", "password1"); // replace these with real strings
  // Repeat the above with each <username>, <password> that you wish to allow
  // access to the server.
#endif

  // Create the RTSP server.  Try first with the default port number (554),
  // and then with the alternative port number (8554):
  RTSPServer* rtspServer;
#ifdef VANLINK_DVR_RTSP_PLAYBACK
  portNumBits rtspServerPortNum = 654;//add by sxh rtsp
  rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
  if (rtspServer == NULL) {
    rtspServerPortNum = 8654;
    rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
  }
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
#else
  portNumBits rtspServerPortNum = 554;
  rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
  if (rtspServer == NULL) {
   rtspServerPortNum = 8554;
   rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB);
  }
  if (rtspServer == NULL) {
   *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
   exit(1);
  }
#endif
  
 

  *env << "LIVE555 Media Server\n";
  *env << "\tversion " << MEDIA_SERVER_VERSION_STRING
       << " (LIVE555 Streaming Media library version "
       << LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n";

  char* urlPrefix = rtspServer->rtspURLPrefix();
  *env << "Play streams from this server using the URL\n\t"
       << urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n";
  *env << "Each file's type is inferred from its name suffix:\n";
  *env << "\t\".aac\" => an AAC Audio (ADTS format) file\n";
  *env << "\t\".amr\" => an AMR Audio file\n";
  *env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n";
  *env << "\t\".dv\" => a DV Video file\n";
  *env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n";
  *env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n";
  *env << "\t\".ts\" => a MPEG Transport Stream file\n";
  *env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n";
  *env << "\t\".wav\" => a WAV Audio file\n";
  *env << "See http://www.live555.com/mediaServer/ for additional documentation.\n";

#if 0 // RTSP-over-HTTP tunneling is not yet working
  // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
  // Try first with the default HTTP port (80), and then with the alternative HTTP
  // port number (8000).
  RTSPOverHTTPServer* rtspOverHTTPServer;
  portNumBits httpServerPortNum = 80;
  rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum);
  if (rtspOverHTTPServer == NULL) {
    httpServerPortNum = 8000;
    rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum);
  }
  if (rtspOverHTTPServer == NULL) {
    *env << "(No server for RTSP-over-HTTP tunneling was created.)\n";
  } else {
    *env << "(We use port " << httpServerPortNum << " for RTSP-over-HTTP tunneling.)\n";
  }
#endif

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}
示例#22
0
static void socketErr(UsageEnvironment& env, char const* errorMsg) {
	env.setResultErrMsg(errorMsg);
}
示例#23
0
// -----------------------------------------
//    entry point
// -----------------------------------------
int main(int argc, char** argv) 
{
	// default parameters
	const char *dev_name = "/dev/video0";	
	int format = V4L2_PIX_FMT_H264;
	int width = 640;
	int height = 480;
	int queueSize = 10;
	int fps = 25;
	unsigned short rtpPortNum = 20000;
	unsigned short rtcpPortNum = rtpPortNum+1;
	unsigned char ttl = 5;
	struct in_addr destinationAddress;
	unsigned short rtspPort = 8554;
	unsigned short rtspOverHTTPPort = 0;
	bool multicast = false;
	int verbose = 0;
	std::string outputFile;
	bool useMmap = false;

	// decode parameters
	int c = 0;     
	while ((c = getopt (argc, argv, "hW:H:Q:P:F:v::O:T:mM")) != -1)
	{
		switch (c)
		{
			case 'O':	outputFile = optarg; break;
			case 'v':	verbose = 1; if (optarg && *optarg=='v') verbose++;  break;
			case 'm':	multicast = true; break;
			case 'W':	width = atoi(optarg); break;
			case 'H':	height = atoi(optarg); break;
			case 'Q':	queueSize = atoi(optarg); break;
			case 'P':	rtspPort = atoi(optarg); break;
			case 'T':	rtspOverHTTPPort = atoi(optarg); break;
			case 'F':	fps = atoi(optarg); break;
			case 'M':	useMmap = true; break;
			case 'h':
			{
				std::cout << argv[0] << " [-v[v]][-m] [-P RTSP port][-P RTSP/HTTP port][-Q queueSize] [-M] [-W width] [-H height] [-F fps] [-O file] [device]" << std::endl;
				std::cout << "\t -v       : verbose " << std::endl;
				std::cout << "\t -v v     : very verbose " << std::endl;
				std::cout << "\t -Q length: Number of frame queue  (default "<< queueSize << ")" << std::endl;
				std::cout << "\t -O file  : Dump capture to a file" << std::endl;
				std::cout << "\t RTSP options :" << std::endl;
				std::cout << "\t -m       : Enable multicast output" << std::endl;
				std::cout << "\t -P port  : RTSP port (default "<< rtspPort << ")" << std::endl;
				std::cout << "\t -H port  : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
				std::cout << "\t V4L2 options :" << std::endl;
				std::cout << "\t -M       : V4L2 capture using memory mapped buffers (default use read interface)" << std::endl;
				std::cout << "\t -F fps   : V4L2 capture framerate (default "<< fps << ")" << std::endl;
				std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl;
				std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl;
				std::cout << "\t device   : V4L2 capture device (default "<< dev_name << ")" << std::endl;
				exit(0);
			}
		}
	}
	if (optind<argc)
	{
		dev_name = argv[optind];
	}
     
	// create live555 environment
	TaskScheduler* scheduler = BasicTaskScheduler::createNew();
	UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);	
	
	// create RTSP server
	RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspPort);
	if (rtspServer == NULL) 
	{
		*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
	}
	else
	{
		// set http tunneling
		if (rtspOverHTTPPort)
		{
			rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort);
		}
		
		// Init capture
		*env << "Create V4L2 Source..." << dev_name << "\n";
		V4L2DeviceParameters param(dev_name,format,width,height,fps,verbose);
		V4L2Device* videoCapture = NULL;
		if (useMmap)
		{
			videoCapture = V4L2MMAPDeviceSource::createNew(param);
		}
		else
		{
			videoCapture = V4L2READDeviceSource::createNew(param);
		}
		V4L2DeviceSource* videoES =  V4L2DeviceSource::createNew(*env, param, videoCapture, outputFile, queueSize, verbose);
		if (videoES == NULL) 
		{
			*env << "Unable to create source for device " << dev_name << "\n";
		}
		else
		{
			destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);	
			OutPacketBuffer::maxSize = videoCapture->getBufferSize();
			StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false);

			// Create Server Multicast Session
			if (multicast)
			{
				addSession(rtspServer, "multicast", MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, 96, replicator,format));
			}
			
			// Create Server Unicast Session
			addSession(rtspServer, "unicast", UnicastServerMediaSubsession::createNew(*env,replicator,format));

			// main loop
			signal(SIGINT,sighandler);
			env->taskScheduler().doEventLoop(&quit); 
			*env << "Exiting..\n";			
		}
		
		Medium::close(videoES);
		delete videoCapture;
		Medium::close(rtspServer);
	}
	
	env->reclaim();
	delete scheduler;	
	
	return 0;
}
示例#24
0
static void printErr(UsageEnvironment& env, char const* str = NULL) {
  if (str != NULL) err(env) << str;
  env << ": " << strerror(env.getErrno()) << "\n";
}