RTPSink* WISJPEGVideoServerMediaSubsession
::createNewRTPSink(Groupsock* rtpGroupsock,
		   unsigned char /*rtpPayloadTypeIfDynamic*/,
		   FramedSource* /*inputSource*/) {
  setVideoRTPSinkBufferSize();
  return JPEGVideoRTPSink::createNew(envir(), rtpGroupsock);
}
RTPSink* WISPCMAudioServerMediaSubsession
::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic,
		   FramedSource* /*inputSource*/) {
  setVideoRTPSinkBufferSize();
 
	return SimpleRTPSink::createNew(envir(), rtpGroupsock, 96,
					 AUDIO_SAMPLE_FREQ, "audio", "PCMU", AUDIO_NUM_CHANS);
}
RTPSink* WISH264VideoServerMediaSubsession
::createNewRTPSink(Groupsock* rtpGroupsock,
		   unsigned char rtpPayloadTypeIfDynamic,
		   FramedSource* /*inputSource*/) {
  setVideoRTPSinkBufferSize();
  //return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
  char BuffStr[200];
  extern int GetSprop(void *pBuff, char vType);
  GetSprop(BuffStr,fWISInput.videoType);
  return H264VideoRTPSink::createNew(envir(), rtpGroupsock,
  								96, 0x64001F, BuffStr);
}
Exemplo n.º 4
0
int main(int argc, char** argv) {
  init_signals();
  setpriority(PRIO_PROCESS, 0, 0);
  int IsSilence = 0;
  int svcEnable = 0;
  int cnt=0;
  int activePortCnt=0;
  if( GetSampleRate() == 16000 )
  {
	audioOutputBitrate = 128000;
	audioSamplingFrequency = 16000;
  }else{
	audioOutputBitrate = 64000;
	audioSamplingFrequency = 8000;
  }
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
  int msg_type, video_type;
  APPROInput* MjpegInputDevice = NULL;
  APPROInput* H264InputDevice = NULL;
  APPROInput* Mpeg4InputDevice = NULL;
  static pid_t child[4] = {
	-1,-1,-1,-1
  };

  StreamingMode streamingMode = STREAMING_UNICAST;
  netAddressBits multicastAddress = 0;//our_inet_addr("224.1.4.6");
  portNumBits videoRTPPortNum = 0;
  portNumBits audioRTPPortNum = 0;

  IsSilence = 0;
  svcEnable = 0;
  audioType = AUDIO_G711;
  streamingMode = STREAMING_UNICAST;

  for( cnt = 1; cnt < argc ;cnt++ )
  {
	if( strcmp( argv[cnt],"-m" )== 0  )
	{
		streamingMode = STREAMING_MULTICAST_SSM;
	}

	if( strcmp( argv[cnt],"-s" )== 0  )
	{
		IsSilence = 1;
	}

	if( strcmp( argv[cnt],"-a" )== 0  )
	{
		audioType = AUDIO_AAC;
	}

	if( strcmp( argv[cnt],"-v" )== 0  )
	{
		svcEnable = 1;
	}
  }

#if 0
  printf("###########IsSilence = %d ################\n",IsSilence);
  printf("###########streamingMode = %d ################\n",streamingMode);
  printf("###########audioType = %d ################\n",audioType);
  printf("###########svcEnable = %d ################\n",svcEnable);
#endif

  child[0] = fork();

  if( child[0] != 0 )
  {
	child[1] = fork();
  }

  if( child[0] != 0 && child[1] != 0 )
  {
	child[2] = fork();
  }

  if( child[0] != 0 && child[1] != 0 && child[2] != 0 )
  {
	child[3] = fork();
  }

  if(svcEnable) {
	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0)
	  {
		child[4] = fork();
	  }

	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0)
	  {
		child[5] = fork();
	  }

	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0)
	  {
		child[6] = fork();
	  }

	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0)
	  {
		child[7] = fork();
	  }
  }

  if( child[0] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE4;
	video_type = VIDEO_TYPE_H264_CIF;
	rtspServerPortNum = 8556;
	H264VideoBitrate = 12000000;
	videoRTPPortNum = 6012;
	audioRTPPortNum = 6014;
  }
  if( child[1] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE3;
	video_type = VIDEO_TYPE_MJPEG;
	rtspServerPortNum = 8555;
	MjpegVideoBitrate = 12000000;
	videoRTPPortNum = 6008;
	audioRTPPortNum = 6010;
  }
  if( child[2] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE;
	video_type = VIDEO_TYPE_MPEG4;
	rtspServerPortNum = 8553;
	Mpeg4VideoBitrate = 12000000;
	videoRTPPortNum = 6000;
	audioRTPPortNum = 6002;
  }
  if( child[3] == 0 )
  {
	/* parent, success */
	msg_type = LIVE_MSG_TYPE2;
	video_type = VIDEO_TYPE_MPEG4_CIF;
	rtspServerPortNum = 8554;
	Mpeg4VideoBitrate = 12000000;
	videoRTPPortNum = 6004;
	audioRTPPortNum = 6006;
  }

  if(svcEnable) {
	  if( child[4] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE5;
		video_type = VIDEO_TYPE_H264_SVC_30FPS;
		rtspServerPortNum = 8601;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6016;
		audioRTPPortNum = 6018;
	  }
	  if( child[5] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE6;
		video_type = VIDEO_TYPE_H264_SVC_15FPS;
		rtspServerPortNum = 8602;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6020;
		audioRTPPortNum = 6022;
	  }
	  if( child[6] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE7;
		video_type = VIDEO_TYPE_H264_SVC_7FPS;
		rtspServerPortNum = 8603;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6024;
		audioRTPPortNum = 6026;
	  }
	  if( child[7] == 0 )
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE8;
		video_type = VIDEO_TYPE_H264_SVC_3FPS;
		rtspServerPortNum = 8604;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6028;
		audioRTPPortNum = 6030;
	  }
	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0 && child[7] != 0)
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE9;
		video_type = VIDEO_TYPE_H264;
		rtspServerPortNum = 8557;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6032;
		audioRTPPortNum = 6034;
	  }
 }
 else {
  	  if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0)
	  {
		/* parent, success */
		msg_type = LIVE_MSG_TYPE5;
		video_type = VIDEO_TYPE_H264;
		rtspServerPortNum = 8557;
		H264VideoBitrate = 12000000;
		videoRTPPortNum = 6032;
		audioRTPPortNum = 6034;
	  }
 }

  videoType = video_type;

  // Objects used for multicast streaming:
  static Groupsock* rtpGroupsockAudio = NULL;
  static Groupsock* rtcpGroupsockAudio = NULL;
  static Groupsock* rtpGroupsockVideo = NULL;
  static Groupsock* rtcpGroupsockVideo = NULL;
  static FramedSource* sourceAudio = NULL;
  static RTPSink* sinkAudio = NULL;
  static RTCPInstance* rtcpAudio = NULL;
  static FramedSource* sourceVideo = NULL;
  static RTPSink* sinkVideo = NULL;
  static RTCPInstance* rtcpVideo = NULL;

  share_memory_init(msg_type);

  //init_signals();

  *env << "Initializing...\n";


  // Initialize the WIS input device:
  if( video_type == VIDEO_TYPE_MJPEG)
  {
	  MjpegInputDevice = APPROInput::createNew(*env, VIDEO_TYPE_MJPEG);
	  if (MjpegInputDevice == NULL) {
	    err(*env) << "Failed to create MJPEG input device\n";
	    exit(1);
	  }
  }

  if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS ||
		video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS)
  {
	  H264InputDevice = APPROInput::createNew(*env, video_type);
	  if (H264InputDevice == NULL) {
	    err(*env) << "Failed to create MJPEG input device\n";
	    exit(1);
	  }
  }

  if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF )
  {
	  Mpeg4InputDevice = APPROInput::createNew(*env, video_type);
	  if (Mpeg4InputDevice == NULL) {
		err(*env) << "Failed to create MPEG4 input device\n";
		exit(1);
	  }
  }

  // Create the RTSP server:
  RTSPServer* rtspServer = NULL;
  // Normal case: Streaming from a built-in RTSP server:
  rtspServer = RTSPServer::createNew(*env, rtspServerPortNum, NULL);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }

  *env << "...done initializing\n";

  if( streamingMode == STREAMING_UNICAST )
  {
	  if( video_type == VIDEO_TYPE_MJPEG)
	  {
	    ServerMediaSession* sms
	      = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);
	    sms->addSubsession(WISJPEGVideoServerMediaSubsession
				 ::createNew(sms->envir(), *MjpegInputDevice, MjpegVideoBitrate));
	    if( IsSilence == 0)
	    {
			sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *MjpegInputDevice));
	    }

	    rtspServer->addServerMediaSession(sms);

	    char *url = rtspServer->rtspURL(sms);
	    *env << "Play this stream using the URL:\n\t" << url << "\n";
	    delete[] url;
	  }

	  if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS ||
			video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type ==VIDEO_TYPE_H264_SVC_3FPS)
	  {
            ServerMediaSession* sms;
            sms
	      = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);
	    sms->addSubsession(WISH264VideoServerMediaSubsession
				 ::createNew(sms->envir(), *H264InputDevice, H264VideoBitrate));
	    if( IsSilence == 0)
	    {
	    	sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *H264InputDevice));

	    }
	    rtspServer->addServerMediaSession(sms);

	    char *url = rtspServer->rtspURL(sms);
	    *env << "Play this stream using the URL:\n\t" << url << "\n";
	    delete[] url;
	  }

	    // Create a record describing the media to be streamed:
	  if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF )
	  {
	    ServerMediaSession* sms
	      = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);
	    sms->addSubsession(WISMPEG4VideoServerMediaSubsession
				 ::createNew(sms->envir(), *Mpeg4InputDevice, Mpeg4VideoBitrate));
	    if( IsSilence == 0)
	    {
	    	sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *Mpeg4InputDevice));
	    }

	    rtspServer->addServerMediaSession(sms);


	    char *url = rtspServer->rtspURL(sms);
	    *env << "Play this stream using the URL:\n\t" << url << "\n";
	    delete[] url;
	  }
  }else{


	if (streamingMode == STREAMING_MULTICAST_SSM)
	{
		if (multicastAddress == 0)
			multicastAddress = chooseRandomIPv4SSMAddress(*env);
	} else if (multicastAddress != 0) {
		streamingMode = STREAMING_MULTICAST_ASM;
	}

	struct in_addr dest; dest.s_addr = multicastAddress;
	const unsigned char ttl = 255;

	// For RTCP:
	const unsigned maxCNAMElen = 100;
	unsigned char CNAME[maxCNAMElen + 1];
	gethostname((char *) CNAME, maxCNAMElen);
	CNAME[maxCNAMElen] = '\0';      // just in case

	ServerMediaSession* sms=NULL;

	if( video_type == VIDEO_TYPE_MJPEG)
	{
		sms = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);

		sourceAudio = MjpegInputDevice->audioSource();
		sourceVideo = WISJPEGStreamSource::createNew(MjpegInputDevice->videoSource());
		// Create 'groupsocks' for RTP and RTCP:
	    const Port rtpPortVideo(videoRTPPortNum);
	    const Port rtcpPortVideo(videoRTPPortNum+1);
	    rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl);
	    rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl);
	    if (streamingMode == STREAMING_MULTICAST_SSM) {
	      rtpGroupsockVideo->multicastSendOnly();
	      rtcpGroupsockVideo->multicastSendOnly();
	    }
		setVideoRTPSinkBufferSize();
		sinkVideo = JPEGVideoRTPSink::createNew(*env, rtpGroupsockVideo);

	}

	if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF ||
		video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS ||
			video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS)
	{
 		sms = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);

		sourceAudio = H264InputDevice->audioSource();
		sourceVideo = H264VideoStreamFramer::createNew(*env, H264InputDevice->videoSource());

		// Create 'groupsocks' for RTP and RTCP:
	    const Port rtpPortVideo(videoRTPPortNum);
	    const Port rtcpPortVideo(videoRTPPortNum+1);
	    rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl);
	    rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl);
	    if (streamingMode == STREAMING_MULTICAST_SSM) {
	      rtpGroupsockVideo->multicastSendOnly();
	      rtcpGroupsockVideo->multicastSendOnly();
	    }
		setVideoRTPSinkBufferSize();
		{
			char BuffStr[200];
			extern int GetSprop(void *pBuff, char vType);
			GetSprop(BuffStr,video_type);
			sinkVideo = H264VideoRTPSink::createNew(*env, rtpGroupsockVideo,96, 0x64001F,BuffStr);
		}

	}

	// Create a record describing the media to be streamed:
	if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF )
	{
		sms = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM);

		sourceAudio = Mpeg4InputDevice->audioSource();
		sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(*env, Mpeg4InputDevice->videoSource());

		// Create 'groupsocks' for RTP and RTCP:
	    const Port rtpPortVideo(videoRTPPortNum);
	    const Port rtcpPortVideo(videoRTPPortNum+1);
	    rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl);
	    rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl);
	    if (streamingMode == STREAMING_MULTICAST_SSM) {
	      rtpGroupsockVideo->multicastSendOnly();
	      rtcpGroupsockVideo->multicastSendOnly();
	    }
		setVideoRTPSinkBufferSize();
		sinkVideo = MPEG4ESVideoRTPSink::createNew(*env, rtpGroupsockVideo,97);

	}
	/* VIDEO Channel initial */
	if(1)
	{
		// Create (and start) a 'RTCP instance' for this RTP sink:
		unsigned totalSessionBandwidthVideo = (Mpeg4VideoBitrate+500)/1000; // in kbps; for RTCP b/w share
		rtcpVideo = RTCPInstance::createNew(*env, rtcpGroupsockVideo,
					totalSessionBandwidthVideo, CNAME,
					sinkVideo, NULL /* we're a server */ ,
					streamingMode == STREAMING_MULTICAST_SSM);
	    // Note: This starts RTCP running automatically
		sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkVideo, rtcpVideo));

		// Start streaming:
		sinkVideo->startPlaying(*sourceVideo, NULL, NULL);
	}
	/* AUDIO Channel initial */
	if( IsSilence == 0)
	{
		// there's a separate RTP stream for audio
		// Create 'groupsocks' for RTP and RTCP:
		const Port rtpPortAudio(audioRTPPortNum);
		const Port rtcpPortAudio(audioRTPPortNum+1);

		rtpGroupsockAudio = new Groupsock(*env, dest, rtpPortAudio, ttl);
		rtcpGroupsockAudio = new Groupsock(*env, dest, rtcpPortAudio, ttl);

		if (streamingMode == STREAMING_MULTICAST_SSM)
		{
			rtpGroupsockAudio->multicastSendOnly();
			rtcpGroupsockAudio->multicastSendOnly();
		}
		if( audioSamplingFrequency == 16000 )
		{

			if( audioType == AUDIO_G711)
			{
				sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "PCMU", 1);
			}
			else
			{
				char const* encoderConfigStr = "1408";// (2<<3)|(8>>1) = 0x14 ; ((8<<7)&0xFF)|(1<<3)=0x08 ;
				sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio,
						       96,
						       audioSamplingFrequency,
						       "audio", "AAC-hbr",
						       encoderConfigStr, audioNumChannels);
			}
		}
		else{
			if(audioType == AUDIO_G711)
			{
				sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 0, audioSamplingFrequency, "audio", "PCMU", 1);
			}
			else{
				char const* encoderConfigStr =  "1588";// (2<<3)|(11>>1) = 0x15 ; ((11<<7)&0xFF)|(1<<3)=0x88 ;
				sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio,
						       96,
						       audioSamplingFrequency,
						       "audio", "AAC-hbr",
						       encoderConfigStr, audioNumChannels);

			}
		}

		// Create (and start) a 'RTCP instance' for this RTP sink:
		unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share
		rtcpAudio = RTCPInstance::createNew(*env, rtcpGroupsockAudio,
					  totalSessionBandwidthAudio, CNAME,
					  sinkAudio, NULL /* we're a server */,
					  streamingMode == STREAMING_MULTICAST_SSM);
		// Note: This starts RTCP running automatically
		sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkAudio, rtcpAudio));

		// Start streaming:
		sinkAudio->startPlaying(*sourceAudio, NULL, NULL);
    }

	rtspServer->addServerMediaSession(sms);
	{
		struct in_addr dest; dest.s_addr = multicastAddress;
		char *url = rtspServer->rtspURL(sms);
		//char *url2 = inet_ntoa(dest);
		*env << "Mulicast Play this stream using the URL:\n\t" << url << "\n";
		//*env << "2 Mulicast addr:\n\t" << url2 << "\n";
		delete[] url;
	}
  }


  // Begin the LIVE555 event loop:
  env->taskScheduler().doEventLoop(&watchVariable); // does not return


  if( streamingMode!= STREAMING_UNICAST )
  {
	Medium::close(rtcpAudio);
	Medium::close(sinkAudio);
	Medium::close(sourceAudio);
	delete rtpGroupsockAudio;
	delete rtcpGroupsockAudio;

	Medium::close(rtcpVideo);
	Medium::close(sinkVideo);
	Medium::close(sourceVideo);
	delete rtpGroupsockVideo;
	delete rtcpGroupsockVideo;

  }

  Medium::close(rtspServer); // will also reclaim "sms" and its "ServerMediaSubsession"s
  if( MjpegInputDevice != NULL )
  {
	Medium::close(MjpegInputDevice);
  }

  if( H264InputDevice != NULL )
  {
	Medium::close(H264InputDevice);
  }

  if( Mpeg4InputDevice != NULL )
  {
	Medium::close(Mpeg4InputDevice);
  }

  env->reclaim();

  delete scheduler;

  ApproInterfaceExit();

  return 0; // only to prevent compiler warning

}
Exemplo n.º 5
0
void setupDarwinStreaming(UsageEnvironment& env, WISInput& inputDevice) {
  // Create a 'Darwin injector' object:
  injector = DarwinInjector::createNew(env, applicationName);

  // For RTCP:
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen + 1];
  gethostname((char *) CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0';      // just in case

  /******************audio***********************/
  if (audioFormat != AFMT_NONE) {
    // Create the audio source:
    sourceAudio = createAudioSource(env, inputDevice.audioSource());

    if (packageFormat != PFMT_TRANSPORT_STREAM) { // there's a separate RTP stream for audio
      // Create 'groupsocks' for RTP and RTCP.
      // (Note: Because we will actually be streaming through a remote Darwin server,
      // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.)
      struct in_addr dummyDestAddress;
      dummyDestAddress.s_addr = 0;
      rtpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0);
      rtcpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0);
      
      // Create a RTP sink for the audio stream:
      sinkAudio = createAudioRTPSink(env, rtpGroupsockAudio);

      // Create (and start) a 'RTCP instance' for this RTP sink:
      unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share
      rtcpAudio = RTCPInstance::createNew(env, rtcpGroupsockAudio,
					  totalSessionBandwidthAudio, CNAME,
					  sinkAudio, NULL /* we're a server */);
          // Note: This starts RTCP running automatically

      // Add these to our 'Darwin injector':
      injector->addStream(sinkAudio, rtcpAudio);
    }
  }
  /******************end audio***********************/

  /******************video***********************/
  if (videoFormat != VFMT_NONE) {
    // Create the video source:
    if (packageFormat == PFMT_TRANSPORT_STREAM) {
      MPEG2TransportStreamFromESSource* tsSource
	= MPEG2TransportStreamFromESSource::createNew(env);
      tsSource->addNewVideoSource(inputDevice.videoSource(), 2);
      if (sourceAudio != NULL) tsSource->addNewAudioSource(sourceAudio, 2);
      // Gather the Transport packets into network packet-sized chunks:
      sourceVideo = MPEG2TransportStreamAccumulator::createNew(env, tsSource);
      sourceAudio = NULL;
    } else {
      switch (videoFormat) {
      case VFMT_NONE: // not used
	break;
      case VFMT_MJPEG: {
	sourceVideo = WISJPEGStreamSource::createNew(inputDevice.videoSource());
	break;
      }
      case VFMT_MPEG1:
      case VFMT_MPEG2: {
	sourceVideo = MPEG1or2VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
	break;
      }
      case VFMT_MPEG4: {
	sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
	break;
      }
      }
    }

    // Create 'groupsocks' for RTP and RTCP.
    // (Note: Because we will actually be streaming through a remote Darwin server,
    // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.)
    struct in_addr dummyDestAddress;
    dummyDestAddress.s_addr = 0;
    rtpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);
    rtcpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);

    // Create a RTP sink for the video stream:
    unsigned char payloadFormatCode = 97; // if dynamic
    setVideoRTPSinkBufferSize();
    if (packageFormat == PFMT_TRANSPORT_STREAM) {
      sinkVideo = SimpleRTPSink::createNew(env, rtpGroupsockVideo,
					   33, 90000, "video", "mp2t",
					   1, True, False/*no 'M' bit*/);
    } else {
      switch (videoFormat) {
      case VFMT_NONE: // not used
	break;
      case VFMT_MJPEG: {
	sinkVideo = JPEGVideoRTPSink::createNew(env, rtpGroupsockVideo);
	break;
      }
      case VFMT_MPEG1:
      case VFMT_MPEG2: {
	sinkVideo = MPEG1or2VideoRTPSink::createNew(env, rtpGroupsockVideo);
	break;
      }
      case VFMT_MPEG4: {
	sinkVideo = MPEG4ESVideoRTPSink::createNew(env, rtpGroupsockVideo, payloadFormatCode);
	break;
      }
      }
    }

    // Create (and start) a 'RTCP instance' for this RTP sink:
    unsigned totalSessionBandwidthVideo = (videoBitrate+500)/1000; // in kbps; for RTCP b/w share
    rtcpVideo = RTCPInstance::createNew(env, rtcpGroupsockVideo,
					totalSessionBandwidthVideo, CNAME,
					sinkVideo, NULL /* we're a server */);
        // Note: This starts RTCP running automatically

    // Add these to our 'Darwin injector':
    injector->addStream(sinkVideo, rtcpVideo);
  }
  /******************end video***********************/

  // Next, specify the destination Darwin Streaming Server:
  char const* remoteStreamName = "test.sdp";//#####@@@@@
  if (!injector->setDestination(remoteDSSNameOrAddress, remoteStreamName,
                                applicationName, "LIVE555 Streaming Media")) {
    env << "Failed to connect to remote Darwin Streaming Server: " << env.getResultMsg() << "\n";
    exit(1);
  }

  env << "Play this stream (from the Darwin Streaming Server) using the URL:\n"
       << "\trtsp://" << remoteDSSNameOrAddress << "/" << remoteStreamName << "\n";

}