Exemplo n.º 1
0
void shutdownStream(RTSPClient* rtspClient, int exitCode) {
	(void)exitCode;

//  UsageEnvironment& env = rtspClient->envir(); // alias
  StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

	int channel = ((ourRTSPClient*)rtspClient)->_channel;
	while(pthread_mutex_lock(&g_context[channel].mutex) != 0 && errno == EINTR){};

  // First, check whether any subsessions have still to be closed:
  if (scs.session != NULL) { 
    Boolean someSubsessionsWereActive = False;
    MediaSubsessionIterator iter(*scs.session);
    MediaSubsession* subsession;

    while ((subsession = iter.next()) != NULL) {
      if (subsession->sink != NULL) {
	Medium::close(subsession->sink);
	subsession->sink = NULL;

	if (subsession->rtcpInstance() != NULL) {
	  subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
	}

	someSubsessionsWereActive = True;
      }
    }

    if (someSubsessionsWereActive) {
      // Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
      // Don't bother handling the response to the "TEARDOWN".
      rtspClient->sendTeardownCommand(*scs.session, NULL);
    }
  }

//  env << *rtspClient << "Closing the stream.\n";
  Medium::close(rtspClient);
    // Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.

  //if (--rtspClientCount == 0) 
  {
    // The final stream has ended, so exit the application now.
    // (Of course, if you're embedding this code into your own application, you might want to comment this out,
    // and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".)
   	// exit(exitCode);
	g_context[channel].shutdown		= true;
	g_context[channel].eventLoopVar	= 1;
  }

  while(pthread_mutex_unlock(&g_context[channel].mutex) != 0 && errno == EINTR){};
}
Exemplo n.º 2
0
AVIFileSink::AVIFileSink(UsageEnvironment& env,
			 MediaSession& inputSession,
			 char const* outputFileName,
			 unsigned bufferSize,
			 unsigned short movieWidth, unsigned short movieHeight,
			 unsigned movieFPS, Boolean packetLossCompensate)
  : Medium(env), fInputSession(inputSession),
    fIndexRecordsHead(NULL), fIndexRecordsTail(NULL), fNumIndexRecords(0),
    fBufferSize(bufferSize), fPacketLossCompensate(packetLossCompensate),
    fAreCurrentlyBeingPlayed(False), fNumSubsessions(0), fNumBytesWritten(0),
    fHaveCompletedOutputFile(False),
    fMovieWidth(movieWidth), fMovieHeight(movieHeight), fMovieFPS(movieFPS) {
  fOutFid = OpenOutputFile(env, outputFileName);
  if (fOutFid == NULL) return;

  // Set up I/O state for each input subsession:
  MediaSubsessionIterator iter(fInputSession);
  MediaSubsession* subsession;
  while ((subsession = iter.next()) != NULL) {
    // Ignore subsessions without a data source:
    FramedSource* subsessionSource = subsession->readSource();
    if (subsessionSource == NULL) continue;

    // If "subsession's" SDP description specified screen dimension
    // or frame rate parameters, then use these.
    if (subsession->videoWidth() != 0) {
      fMovieWidth = subsession->videoWidth();
    }
    if (subsession->videoHeight() != 0) {
      fMovieHeight = subsession->videoHeight();
    }
    if (subsession->videoFPS() != 0) {
      fMovieFPS = subsession->videoFPS();
    }

    AVISubsessionIOState* ioState
      = new AVISubsessionIOState(*this, *subsession);
    subsession->miscPtr = (void*)ioState;

    // Also set a 'BYE' handler for this subsession's RTCP instance:
    if (subsession->rtcpInstance() != NULL) {
      subsession->rtcpInstance()->setByeHandler(onRTCPBye, ioState);
    }

    ++fNumSubsessions;
  }

  // Begin by writing an AVI header:
  addFileHeader_AVI();
}
Exemplo n.º 3
0
void CAimer39RTSPClient::shutdownStream( RTSPClient* rtspClient ) 
{
	UsageEnvironment& env = rtspClient->envir(); // alias
	StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

	CAimer39RTSPClient * arc = findClient( (ourRTSPClient*)rtspClient );

	if ( NULL == arc ) {
		env << "some how the system in to a dangerous situation!" << "\n";
		return;
	}

	// First, check whether any subsessions have still to be closed:
	if (scs.session != NULL) { 
		Boolean someSubsessionsWereActive = False;
		MediaSubsessionIterator iter(*scs.session);
		MediaSubsession* subsession;

		while ((subsession = iter.next()) != NULL) {
			if (subsession->sink != NULL) {
				Medium::close(subsession->sink);
				subsession->sink = NULL;

				if (subsession->rtcpInstance() != NULL) {
					subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
				}

				someSubsessionsWereActive = True;
			}
		}

		if (someSubsessionsWereActive) {
			// Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
			// Don't bother handling the response to the "TEARDOWN".
			rtspClient->sendTeardownCommand(*scs.session, NULL);
		}
	}

	env << *rtspClient << "Closing the stream.\n";
	arc->m_bIsShutDown = true;
	if (arc->m_pFinishCallback)	arc->m_pFinishCallback(arc->m_pCallBackParam, arc); //call back inform stream over
	Medium::close(rtspClient);
	// Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.
}
Exemplo n.º 4
0
// Used to shut down and close a stream (including its "RTSPClient" object):
void StreamClient::shutdownStream()
{
    // First, check whether any subsessions have still to be closed:
    if (_state.session != nullptr) {
        Boolean someSubsessionsWereActive = False;
        MediaSubsessionIterator iter(*_state.session);
        MediaSubsession * subsession = nullptr;

        while ((subsession = iter.next()) != nullptr) {
            if (subsession->sink != nullptr) {
                Medium::close(subsession->sink);
                subsession->sink = nullptr;

                if (subsession->rtcpInstance() != nullptr) {
                    subsession->rtcpInstance()->setByeHandler(nullptr, nullptr); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
                }
                someSubsessionsWereActive = True;
            }
        }

        if (someSubsessionsWereActive) {
            sendTeardown(*_state.session);
        }
    }

    crLogN("StreamClient::shutdownStream() Closing the stream.");
    Medium::close(this);
    // Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.

//    if (--rtspClientCount == 0) {
//        // The final stream has ended, so exit the application now.
//        // (Of course, if you're embedding this code into your own application, you might want to comment this out,
//        // and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".)
//        exit(exitCode);
//    }
    exit(0);
}
Exemplo n.º 5
0
int CMediaNet::MediaNet_Thread( void * pThisVoid )
{
	CMediaNet *pThis = ( CMediaNet* )pThisVoid;

	do 
	{
		// 开始初始化.
		pThis->SetRtspStatus( RTSPStatus_Init );

		// Begin by setting up our usage environment:
		TaskScheduler* scheduler = BasicTaskScheduler::createNew();
		env = BasicUsageEnvironment::createNew(*scheduler);

		progName = "M_CU";

		string strUrl = pThis->m_strRTSPUrlA;

		gettimeofday(&startTime, NULL);

		unsigned short desiredPortNum = 0;

		// unfortunately we can't use getopt() here, as Windoze doesn't have it

		// Create our client object:
		ourClient = createClient(*env, verbosityLevel, progName);
		if (ourClient == NULL) 
		{
			*env << "Failed to create " << clientProtocolName
				<< " client: " << env->getResultMsg() << "\n";

			pThis->SetRtspStatus( RTSPStatus_Error_Connect_Srv );
			break;
		}

		// 开始获取Opition.
		pThis->SetRtspStatus( RTSPStatus_Opitiion );
		// Begin by sending an "OPTIONS" command:
		char* optionsResponse
			= getOptionsResponse(ourClient, pThis->m_strRTSPUrlA.c_str(), username, password);

		if (optionsResponse == NULL) 
		{
			*env << clientProtocolName << " \"OPTIONS\" request failed: "
				<< env->getResultMsg() << "\n";

			pThis->SetRtspStatus( RTSPStatus_Error_Connect_Srv );
			break;
		} 
		else 
		{
			*env << clientProtocolName << " \"OPTIONS\" request returned: "
				<< optionsResponse << "\n";
		}
		if( optionsResponse )
		{
			delete[] optionsResponse;
		}
			

		// 开始获取Description.
		// Open the URL, to get a SDP description:
		pThis->SetRtspStatus( RTSPStatus_Description );
		char* sdpDescription
			= getSDPDescriptionFromURL(ourClient, pThis->m_strRTSPUrlA.c_str(), username, password,
			proxyServerName, proxyServerPortNum,
			desiredPortNum);
		if (sdpDescription == NULL) 
		{
			*env << "Failed to get a SDP description from URL \"" << pThis->m_strRTSPUrlA.c_str()
				<< "\": " << env->getResultMsg() << "\n";
			pThis->SetRtspStatus( RTSPStatus_Error_Connect_Srv );
			break;
		}

		*env << "Opened URL \"" << pThis->m_strRTSPUrlA.c_str()
			<< "\", returning a SDP description:\n" << sdpDescription << "\n";

		// Create a media session object from this SDP description:
		session = MediaSession::createNew(*env, sdpDescription);
		delete[] sdpDescription;
		if (session == NULL) 
		{
			*env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";
			pThis->SetRtspStatus( RTSPStatus_Error_Connect_Srv );
			break;
		} 
		else if (!session->hasSubsessions()) 
		{
			*env << "This session has no media subsessions (i.e., \"m=\" lines)\n";
			pThis->SetRtspStatus( RTSPStatus_Error_Connect_Srv );
			break;
		}

		// Then, setup the "RTPSource"s for the session:
		MediaSubsessionIterator iter(*session);
		MediaSubsession *subsession;
		Boolean madeProgress = False;
		char const* singleMediumToTest = singleMedium;
		while ((subsession = iter.next()) != NULL) 
		{
			// If we've asked to receive only a single medium, then check this now:
			if (singleMediumToTest != NULL) 
			{
				if (strcmp(subsession->mediumName(), singleMediumToTest) != 0) 
				{
					*env << "Ignoring \"" << subsession->mediumName()
						<< "/" << subsession->codecName()
						<< "\" subsession, because we've asked to receive a single " << singleMedium
						<< " session only\n";
					continue;
				} 
				else 
				{
					// Receive this subsession only
					singleMediumToTest = "xxxxx";
					// this hack ensures that we get only 1 subsession of this type
				}
			}

			desiredPortNum = 0;
			if (desiredPortNum != 0) 
			{
				subsession->setClientPortNum(desiredPortNum);
				desiredPortNum += 2;
			}

			if (true) 
			{
				if (!subsession->initiate(simpleRTPoffsetArg)) 
				{
					*env << "Unable to create receiver for \"" << subsession->mediumName()
						<< "/" << subsession->codecName()
						<< "\" subsession: " << env->getResultMsg() << "\n";
				} 
				else 
				{
					*env << "Created receiver for \"" << subsession->mediumName()
						<< "/" << subsession->codecName()
						<< "\" subsession (client ports " << subsession->clientPortNum()
						<< "-" << subsession->clientPortNum()+1 << ")\n";
					madeProgress = True;

					if (subsession->rtpSource() != NULL) 
					{
						// Because we're saving the incoming data, rather than playing
						// it in real time, allow an especially large time threshold
						// (1 second) for reordering misordered incoming packets:
						unsigned const thresh = 1000000; // 1 second
						subsession->rtpSource()->setPacketReorderingThresholdTime(thresh);

						if (socketInputBufferSize > 0) 
						{
							// Set the RTP source's input buffer size as specified:
							int socketNum
								= subsession->rtpSource()->RTPgs()->socketNum();
							unsigned curBufferSize
								= getReceiveBufferSize(*env, socketNum);
							unsigned newBufferSize
								= setReceiveBufferTo(*env, socketNum, socketInputBufferSize);
							*env << "Changed socket receive buffer size for the \""
								<< subsession->mediumName()
								<< "/" << subsession->codecName()
								<< "\" subsession from "
								<< curBufferSize << " to "
								<< newBufferSize << " bytes\n";
						}
					}
				}
			} 
			else 
			{
				mcu::tlog << _T( "Use port: " ) << (int)subsession->clientPortNum() << endl;
				if (subsession->clientPortNum() == 0) 
				{
					*env << "No client port was specified for the \""
						<< subsession->mediumName()
						<< "/" << subsession->codecName()
						<< "\" subsession.  (Try adding the \"-p <portNum>\" option.)\n";
				} 
				else 
				{
					madeProgress = True;
				}
			}
		}
		if (!madeProgress) 
			break;

		// Perform additional 'setup' on each subsession, before playing them:
		pThis->SetRtspStatus( RTSPStatus_Setup );
		unsigned nResponseCode = NULL;
		BOOL bSetupSuccess = setupStreams( &nResponseCode );
		if ( !bSetupSuccess )
		{
			// setup失败!
			if ( RTSPResp_Error_Server_Full == nResponseCode )
			{
				pThis->SetRtspStatus( RTSPStatus_Error_Server_Full );
			}
			else
			{
				pThis->SetRtspStatus( RTSPStatus_Idle );
			}
			break;
		}
		// Create output files:
		
		if ( true  ) 
		{
				// Create and start "FileSink"s for each subsession: 
				madeProgress = False;
				iter.reset();
				while ((subsession = iter.next()) != NULL) 
				{
					if (subsession->readSource() == NULL) continue; // was not initiated

					MediaSink *pDecodeSink = 0;
					if (strcmp(subsession->mediumName(), "video") == 0 )
					{
						int nBandWidth = subsession->GetBandWidth();

						if ( strcmp(subsession->codecName(), "MP4V-ES") == 0 )
						{
							CMpeg4StreamDecodeSink *pMsds = CMpeg4StreamDecodeSink::CreateNew( *env, 20000, nBandWidth );
							 pDecodeSink = pMsds;
							
						}
						else if ( strcmp( subsession->codecName(), "H264" ) == 0 )
						{
							 CH264StreamDecodeSink *pHsds = CH264StreamDecodeSink::CreateNew( *env, 20000, nBandWidth );
							 pDecodeSink = pHsds;
						}
						else
						{
							continue;
						}
					}				

					subsession->sink = pDecodeSink;
					if (subsession->sink == NULL) 
					{
						*env << "Failed to create CH264StreamDecodeSink \""  << "\n";
					} 


					subsession->sink->startPlaying(*(subsession->readSource()),
						subsessionAfterPlaying,
						subsession);

					// Also set a handler to be called if a RTCP "BYE" arrives
					// for this subsession:
					if (subsession->rtcpInstance() != NULL) 
					{
						subsession->rtcpInstance()->setByeHandler(subsessionByeHandler,
							subsession);
					}

					// 发送NAT探测包。
					unsigned char temp[112] = {0};
					temp[0] = 0x80;
					subsession->rtpSource()->RTPgs()->output( *env, 0,temp, 112 );

					madeProgress = True;
				}
			}


		// Finally, start playing each subsession, to start the data flow:
		pThis->SetRtspStatus( RTSPStatus_Play );
		startPlayingStreams();


		pThis->SetRtspStatus( RTSPStatus_Running );
		// 传入结束标志指针。 
		env->taskScheduler().doEventLoop( &pThis->m_runFlag ); 

		pThis->SetRtspStatus( RTSPStatus_Idle );

	} while(0);	

	return 0;
}
bool MtkRTSPClient::handSetup(char* resultString)
{
	CHECK_NULL_COND(session, false); 
	CHECK_NULL_COND(rtsp::env, false);

	bool bSuccess = false;
	
	// Then, setup the "RTPSource"s for the session:
	MediaSubsessionIterator iter(*(session));
	MediaSubsession *subsession = NULL;
	while ((subsession = iter.next()) != NULL) 
	{					
		if (subsession->readSource() == NULL) 
		{
			LOG_ERR("warning");
			continue; // was not initiated
		}

		if (subsession->sink != NULL)/*already be set*/
		{
			continue;
		}

		unsigned int type = getBufType(subsession);
		if (type == 0)
		{
			LOG_ERR("error type=%d", type);
			continue;
		}
		
		{
			iSetupCount--;
			/*set mediay info*/
			setMediaInfo(subsession, type);
		}

		CmpbSink *sink = NULL;
		if ((type != mediatype_audio) && (strcmp(subsession->codecName(), "H264") == 0))
		{
			sink = CmpbH264Sink::createNew(*env, *subsession, type, fileSinkBufferSize);
		}
        else if ((type == mediatype_audio) && 
                    ((stMediaInfo.audioCodec == MEDIACODEC_AC3) || 
                     (stMediaInfo.audioCodec == MEDIACODEC_EAC3) ||
                     (stMediaInfo.audioCodec == MEDIACODEC_MPEG4_GENERIC)))
		{
			sink = CmpbAACSink::createNew(*env, *subsession, type, fileSinkBufferSize);
		}
        else if ((type == mediatype_audio) && (stMediaInfo.audioCodec == MEDIACODEC_MP4A_LATM))
		{
			sink = CmpbLATMSink::createNew(*env, *subsession, type, fileSinkBufferSize);
		}
		else
		{
			sink = CmpbSink::createNew(*env, *subsession, type, fileSinkBufferSize);
		}
		subsession->sink = sink;
		if (subsession->sink == NULL) 
		{
			LOG_ERR("error!"); 
		} 
		else 
		{		
#if 0 /*this should be remove to cmpb sink*/           
			if ((type != mediatype_audio) && (strcmp(subsession->codecName(), "MP4V-ES") == 0)
				&& (subsession->fmtp_config() != NULL)) 
			{
			    // For MPEG-4 video RTP streams, the 'config' information
			    // from the SDP description contains useful VOL etc. headers.
			    // Insert this data at the front of the output file:
			    unsigned configLen;
			    unsigned char* configData
			      = parseGeneralConfigStr(subsession->fmtp_config(), configLen);
			    struct timeval timeNow;
			    gettimeofday(&timeNow, NULL);
			    sink->sendData(configData, configLen, timeNow);
			    delete[] configData;
		  	}
#endif			
			subsession->sink->startPlaying(*(subsession->readSource()),
												subsessionAfterPlaying,
													subsession);
			// Also set a handler to be called if a RTCP "BYE" arrives
			// for this subsession:
			if (subsession->rtcpInstance() != NULL) 
			{
				subsession->rtcpInstance()->setByeHandler(subsessionAfterPlaying, subsession);
			}

			bSuccess = true;
		}

		break;

	}

	if (iSetupCount == 0)
	{
		mediaInfoReady(); 
	}

	return bSuccess ;
}
Exemplo n.º 7
0
void shutdownStream(RTSPClient* rtspClient, int exitCode) {
	OUTPUT_DEBUG_STRING("%s \n", __FUNCTION__);
	
	OUTPUT_DEBUG_STRING("open times = %d close times = %d\n", open_times, ++close_times);


	UsageEnvironment& env = rtspClient->envir(); // alias
	StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

	// First, check whether any subsessions have still to be closed:
	if (scs.session != NULL) { 
		Boolean someSubsessionsWereActive = False;
		MediaSubsessionIterator iter(*scs.session);
		MediaSubsession* subsession;

		while ((subsession = iter.next()) != NULL) {
			if (subsession->sink != NULL) {
				//subsession->sink->stopPlaying();
				Medium::close(subsession->sink);
				subsession->sink = NULL;

				OUTPUT_DEBUG_STRING("22 add = %d, del = %d\n", sum_add, ++sum_del);
				
				if (subsession->rtcpInstance() != NULL) {
					subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
				}
				someSubsessionsWereActive = True;
			}
		}

		if (someSubsessionsWereActive) {
			// Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
			// Don't bother handling the response to the "TEARDOWN".
			//gEnv->taskScheduler().unscheduleDelayedTask(scs.streamTimerTask);
			rtspClient->sendTeardownCommand(*scs.session, NULL);
			
		}
		
		//Medium::close(scs.session);
		//gEnv->taskScheduler().unscheduleDelayedTask(scs.streamTimerTask);
		Medium::close(rtspClient);
		rtspClient = NULL;
		
		//gEnv->reclaim(); //del by huguohu
		//gEnv = NULL;
		if(gScheduler != NULL){
			delete gScheduler; 
			gScheduler = NULL;	
		}

	}

	

	

	


	//env << *rtspClient << "Closing the stream.\n";
	//Medium::close(scs.session);
	

	// Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.


	// 
	// 	if (--rtspClientCount == 0) {
	// 		// The final stream has ended, so exit the application now.
	// 		// (Of course, if you're embedding this code into your own application, you might want to comment this out,
	// 		// and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".)
	// 		//exit(exitCode);
	// 	}

	
}
Exemplo n.º 8
0
bool CRTSPClient::OpenStream(char* url)
{
  XBMC->Log(LOG_DEBUG, "CRTSPClient::OpenStream()");
  m_session=NULL;
  
  strcpy(m_url,url);
  // Open the URL, to get a SDP description: 
  char* sdpDescription= getSDPDescriptionFromURL(m_ourClient, url, ""/*username*/, ""/*password*/,""/*proxyServerName*/, 0/*proxyServerPortNum*/,1234/*desiredPortNum*/);
  if (sdpDescription == NULL) 
  {
    XBMC->Log(LOG_DEBUG, "Failed to get a SDP description from URL %s %s",url ,m_env->getResultMsg() );
    shutdown();
    return false;
  }
  XBMC->Log(LOG_DEBUG, "Opened URL %s %s",url,sdpDescription);

  char* range=strstr(sdpDescription,"a=range:npt=");
  if (range!=NULL)
  {
    char *pStart = range+strlen("a=range:npt=");
    char *pEnd = strstr(range,"-") ;
    if (pEnd!=NULL)
    {
      pEnd++ ;
      double Start=atof(pStart) ;
      double End=atof(pEnd) ;

      XBMC->Log(LOG_DEBUG, "rangestart:%f rangeend:%f", Start,End);
      m_duration=(long) ((End-Start)*1000.0);
    }
  }
  // Create a media session object from this SDP description:
  m_session = MediaSession::createNew(*m_env, sdpDescription);
  delete[] sdpDescription;
  if (m_session == NULL) 
  {
    XBMC->Log(LOG_DEBUG, "Failed to create a MediaSession object from the SDP description:%s ",m_env->getResultMsg());
    shutdown();
    return false;
  } 
  else if (!m_session->hasSubsessions()) 
  {
    XBMC->Log(LOG_DEBUG, "This session has no media subsessions");
    shutdown();
    return false;
  }

  // Then, setup the "RTPSource"s for the session:
  MediaSubsessionIterator iter(*m_session);
  MediaSubsession *subsession;
  Boolean madeProgress = False;
  char const* singleMediumToTest = singleMedium;
  while ((subsession = iter.next()) != NULL) 
  {
    // If we've asked to receive only a single medium, then check this now:
    if (singleMediumToTest != NULL) 
    {
      if (strcmp(subsession->mediumName(), singleMediumToTest) != 0) 
      {
        XBMC->Log(LOG_DEBUG, "Ignoring %s %s %s" , subsession->mediumName(),subsession->codecName(),singleMedium);
        continue;
      } 
      else 
      {
        // Receive this subsession only
        singleMediumToTest = "xxxxx";
        // this hack ensures that we get only 1 subsession of this type
      }
    }
    if (desiredPortNum != 0) 
    {
      subsession->setClientPortNum(desiredPortNum);
      desiredPortNum += 2;
    }

    if (createReceivers) 
    {
      if (!subsession->initiate(simpleRTPoffsetArg)) 
      {
        XBMC->Log(LOG_DEBUG, "Unable to create receiver for %s %s %s" ,subsession->mediumName(),subsession->codecName(),m_env->getResultMsg());
      } 
      else 
      {
        XBMC->Log(LOG_DEBUG, "Created receiver for type=%s codec=%s ports: %d %d " ,subsession->mediumName(),subsession->codecName(),subsession->clientPortNum(),subsession->clientPortNum()+1 );
        madeProgress = True;

        if (subsession->rtpSource() != NULL) 
        {
          // Because we're saving the incoming data, rather than playing
          // it in real time, allow an especially large time threshold
          // (1 second) for reordering misordered incoming packets:
          
          int socketNum= subsession->rtpSource()->RTPgs()->socketNum();
          XBMC->Log(LOG_DEBUG, "rtsp:increaseReceiveBufferTo to 2000000 for s:%d",socketNum);
          increaseReceiveBufferTo( *m_env, socketNum, 2000000 );

          unsigned const thresh = 1000000; // 1 second 
          subsession->rtpSource()->setPacketReorderingThresholdTime(thresh);

          if (socketInputBufferSize > 0) 
          {
            // Set the RTP source's input buffer size as specified:
            int socketNum= subsession->rtpSource()->RTPgs()->socketNum();
            unsigned curBufferSize= getReceiveBufferSize(*m_env, socketNum);
            unsigned newBufferSize= setReceiveBufferTo(*m_env, socketNum, socketInputBufferSize);
            XBMC->Log(LOG_DEBUG,  "Changed socket receive buffer size for the %s %s %d %d",
            subsession->mediumName(),subsession->codecName(),curBufferSize,newBufferSize);
          }
        }
      }
    } 
    else 
    {
      if (subsession->clientPortNum() == 0) 
      {
        XBMC->Log(LOG_DEBUG, "No client port was specified for the %s %s",subsession->mediumName(),subsession->codecName());
      } 
      else 
      {
        madeProgress = True;
      }
    }
  }
  if (!madeProgress) 
  {
    shutdown();
    return false;
  }
  
  // Perform additional 'setup' on each subsession, before playing them:
  if (!setupStreams())
  {
    return false;
  }

  // Create output files:
  // Create and start "FileSink"s for each subsession:
  madeProgress = False;
  iter.reset();
  while ((subsession = iter.next()) != NULL) 
  {
    if (subsession->readSource() == NULL) continue; // was not initiated
    
    // Mediaportal:
    CMemorySink* fileSink= CMemorySink::createNew(*m_env, *m_buffer, fileSinkBufferSize);
    // XBMC test via file:
    //FileSink* fileSink = FileSink::createNew(*m_env, m_outFileName, fileSinkBufferSize, false); //oneFilePerFrame

    subsession->sink = fileSink;
    if (subsession->sink == NULL) 
    {
      XBMC->Log(LOG_DEBUG, "Failed to create FileSink %s",m_env->getResultMsg());
      shutdown();
      return false;
    } 
    XBMC->Log(LOG_DEBUG, "Created output sink: %s", m_outFileName);
    subsession->sink->startPlaying(*(subsession->readSource()),my_subsessionAfterPlaying,subsession);
    
    // Also set a handler to be called if a RTCP "BYE" arrives
    // for this subsession:
    if (subsession->rtcpInstance() != NULL) 
    {
      subsession->rtcpInstance()->setByeHandler(my_subsessionByeHandler,subsession);
    }
    madeProgress = True;
  }

  return true;
}
Exemplo n.º 9
0
int CMPIPTV_RTSP::OpenConnection(void)
{
  this->logger.Log(LOGGER_INFO, METHOD_START_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME);
  int result = STATUS_OK;

  this->rtspClient = RTSPClient::createNew(*this->rtspEnvironment);
  result |= (this->rtspClient == NULL);

  if (result == STATUS_OK)
  {
    // RTSPClient works with char, not with TCHAR
    char *tempRtspUrl = ConvertToMultiByte(this->rtspUrl);
    result |= (tempRtspUrl == NULL);
    if (result == STATUS_OK)
    {
      char* optionsResult = this->rtspClient->sendOptionsCmd(tempRtspUrl, NULL, NULL, NULL, this->receiveDataTimeout / 2000);
      result |= (optionsResult == NULL);

      if (result != STATUS_OK)
      {
        TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("error occured while sending OPTIONS command"));
        this->LogRtspMessage(LOGGER_ERROR, message);
        FREE_MEM(message);
      }
      else
      {
        TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("OPTIONS result"));
        this->LogFullRtspMessage(LOGGER_VERBOSE, message, optionsResult);
        FREE_MEM(message);

        char *describeResult = this->rtspClient->describeURL(tempRtspUrl, NULL, FALSE, this->receiveDataTimeout / 2000);
        result |= (describeResult == NULL);

        if (result != STATUS_OK)
        {
          TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("error occured while sending DESCRIBE command"));
          this->LogRtspMessage(LOGGER_ERROR, message);
          FREE_MEM(message);
        }
        else
        {
          TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("DESCRIBE result"));
          this->LogFullRtspMessage(LOGGER_VERBOSE, message, describeResult);
          FREE_MEM(message);

          this->rtspSession = MediaSession::createNew(*this->rtspEnvironment, describeResult);
          result |= (this->rtspSession == NULL);

          if (result != STATUS_OK)
          {
            TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("error occured while creating new session"));
            this->LogRtspMessage(LOGGER_ERROR, message);
            FREE_MEM(message);
          }
          else
          {
            result |= (!this->rtspSession->hasSubsessions());
            if (result != STATUS_OK)
            {
              this->logger.Log(LOGGER_ERROR, METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("session doesn't have subsessions"));
            }
            else
            {
              // Then, setup the "RTPSource"s for the session:
              MediaSubsessionIterator iter(*this->rtspSession);
              MediaSubsession *subsession = NULL;

              while ((result == STATUS_OK) && ((subsession = iter.next()) != NULL))
              {
                char *tempSubSessionName = (char *)subsession->mediumName();
                char *tempSubSessionCodecName = (char *)subsession->codecName();
#ifdef _MBCS
                TCHAR *subSessionName = ConvertToMultiByteA(tempSubSessionName);
                TCHAR *subSessionCodecName = ConvertToMultiByteA(tempSubSessionCodecName);
#else
                TCHAR *subSessionName = ConvertToUnicodeA(tempSubSessionName);
                TCHAR *subSessionCodecName = ConvertToUnicodeA(tempSubSessionCodecName);
#endif
                if (!subsession->initiate())
                {
                  result = STATUS_ERROR;
                  TCHAR *message = FormatString(_T("%s: %s: unable to create receiver for subsession '%s', codec '%s'"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName, subSessionCodecName);
                  this->LogRtspMessage(LOGGER_ERROR, message);
                  FREE_MEM(message);
                }
                else
                {
                  this->logger.Log(LOGGER_VERBOSE, _T("%s: %s: created receiver for subsession '%s', codec '%s'"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName, subSessionCodecName);

                  // set session ID, doesn't matter what
                  subsession->sessionId = tempSubSessionName;

                  if (subsession->rtpSource() != NULL)
                  {
                    // because we're saving the incoming data, rather than playing
                    // it in real time, allow an especially large time threshold
                    // (1 second) for reordering misordered incoming packets:
                    unsigned const thresh = 1000000; // 1 second
                    subsession->rtpSource()->setPacketReorderingThresholdTime(thresh);

                    // set the RTP source's OS socket buffer size as appropriate
                    int socketNum = subsession->rtpSource()->RTPgs()->socketNum();
                    unsigned int currentBufferSize = getReceiveBufferSize(*this->rtspEnvironment, socketNum);

                    if (this->defaultBufferSize > currentBufferSize)
                    {
                      setReceiveBufferTo(*this->rtspEnvironment, socketNum, this->defaultBufferSize);
                      unsigned setBufferSize = getReceiveBufferSize(*this->rtspEnvironment, socketNum);

                      if (setBufferSize == this->defaultBufferSize)
                      {
                        this->logger.Log(LOGGER_VERBOSE, _T("%s: %s: set buffer size for subsession '%s' successful, previous size: %i, requested size: %i, current size: %i"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName, currentBufferSize, this->defaultBufferSize, setBufferSize);
                      }
                      else
                      {
                        result = STATUS_ERROR;
                        this->logger.Log(LOGGER_ERROR, _T("%s: %s: set buffer size for subsession '%s' failed, previous size: %i, requested size: %i, current size: %i"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName, currentBufferSize, this->defaultBufferSize, setBufferSize);
                      }
                    }

                    if (_tcsncicmp(subSessionName, _T("audio"), 5) == 0)
                    {
                      // audio
                      this->logger.Log(LOGGER_VERBOSE, _T("%s: %s: audio subsession '%s'"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName);
                      result |= (!rtspClient->setupMediaSubsession(*subsession));

                      if (result != STATUS_OK)
                      {
                        // error occured
                        TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("error while setup subsession"));
                        this->LogRtspMessage(LOGGER_ERROR, message);
                        FREE_MEM(message);
                      }
                      else
                      {
                        this->logger.Log(LOGGER_WARNING, METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("subsession audio codec not supported"));
                      }
                    }
                    else if (_tcsncicmp(subSessionName, _T("video"), 5) == 0)
                    {
                      // video
                      this->logger.Log(LOGGER_VERBOSE, _T("%s: %s: video subsession '%s'"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName);
                      result |= (!rtspClient->setupMediaSubsession(*subsession));

                      if (result != STATUS_OK)
                      {
                        // error occured
                        TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("error while setup subsession"));
                        this->LogRtspMessage(LOGGER_ERROR, message);
                        FREE_MEM(message);
                      }
                      else
                      {
                        if (_tcsncicmp(subSessionCodecName, _T("MP2T"), 4) == 0)
                        {
                          // MPEG2 Transport Stream
                          // set new RTSP source
                          this->rtspSource = subsession->rtpSource();

                          if (subsession->rtcpInstance() != NULL)
                          {
                            this->logger.Log(LOGGER_VERBOSE, METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("set subsession 'Bye' handler"));
                            subsession->rtcpInstance()->setByeHandler(SubsessionByeHandler, this);
                          }
                        }
                        else if (_tcsncicmp(subSessionCodecName, _T("H264"), 4) == 0)
                        {
                          // H264 codec, HD TV
                          this->logger.Log(LOGGER_ERROR, METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("H264 not supported"));
                          result = STATUS_ERROR;
                        }
                        else
                        {
                          // SD TV
                          this->logger.Log(LOGGER_ERROR, METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("other subsession video codec than MP2T not supported"));
                          result = STATUS_ERROR;
                        }
                      }
                    }
                    else
                    {
                      this->logger.Log(LOGGER_WARNING, _T("%s: %s: unknown subsession '%s', ignored"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName);
                    }
                  }
                  else
                  {
                    this->logger.Log(LOGGER_WARNING, _T("%s: %s: subsession '%s' doesn't have RTP source"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, subSessionName);
                  }
                }

                // free subsession name and codec name
                FREE_MEM(subSessionName);
                FREE_MEM(subSessionCodecName);
              }

              // we should have some RTSP source
              result |= (this->rtspSource == NULL);

              if (result == STATUS_OK)
              {
                result |= (!this->rtspClient->playMediaSession(*this->rtspSession));

                if (result != STATUS_OK)
                {
                  // error occured
                  TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("error while playing session"));
                  this->LogRtspMessage(LOGGER_ERROR, message);
                  FREE_MEM(message);
                }
                else
                {
                  // create UDP socket and start playing
                  struct in_addr destinationAddress;
                  destinationAddress.s_addr = our_inet_addr("127.0.0.1");

                  unsigned int port = this->rtspUdpPortRangeStart;
                  do
                  {
                    this->logger.Log(LOGGER_VERBOSE, _T("%s: %s: UDP port %u"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, port);

                    // special construction force not reuse same UDP port
                    {
                      NoReuse noReuse;
                      this->rtspUdpGroupsock = new Groupsock(*this->rtspEnvironment, destinationAddress, port, 1);
                    }

                    if (this->rtspUdpGroupsock->socketNum() == (-1))
                    {
                      this->logger.Log(LOGGER_ERROR, _T("%s: %s: UDP port %u occupied, trying another port"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, port);
                      port++;
                      delete this->rtspUdpGroupsock;
                      this->rtspUdpGroupsock = NULL;
                    }
                  }
                  while ((this->rtspUdpGroupsock == NULL) && (port <= this->rtspUdpPortRangeEnd));

                  result |= (this->rtspUdpGroupsock == NULL);
                  if (result != STATUS_OK)
                  {
                    this->logger.Log(LOGGER_ERROR, METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("cannot create UDP sink, no free port"));
                  }
                  else
                  {
                    this->rtspUdpSink = BasicUDPSink::createNew(*this->rtspEnvironment, this->rtspUdpGroupsock, this->rtspUdpSinkMaxPayloadSize);
                    result |= (this->rtspUdpSink == NULL);

                    if (result != STATUS_OK)
                    {
                      TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("cannot create UDP sink"));
                      this->LogRtspMessage(LOGGER_ERROR, message);
                      FREE_MEM(message);
                    }
                    else
                    {
                      if (this->rtspUdpSink->startPlaying(*this->rtspSource, NULL, NULL))
                      {
                        this->logger.Log(LOGGER_INFO, METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("playing started"));

                        // now create UDP connection
                        TCHAR *url = FormatString(_T("udp://@127.0.0.1:%u"), port);
                        result |= (url == NULL);

                        if (result == STATUS_OK)
                        {
                          // parse UDP url
                          // ParseURL calls ClearSession and IsConnected must return FALSE
                          // in another case will be current RTSP connection closed
                          result = this->CMPIPTV_UDP::ParseUrl(url, NULL);

                          if (result == STATUS_OK)
                          {
                            // connect to UDP url
                            result = this->CMPIPTV_UDP::OpenConnection();
                          }
                        }
                        FREE_MEM(url);
                      }
                      else
                      {
                        result = STATUS_ERROR;
                        TCHAR *message = FormatString(METHOD_MESSAGE_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, _T("error occured while starting playing"));
                        this->LogRtspMessage(LOGGER_ERROR, message);
                        FREE_MEM(message);
                      }
                    }
                  }
                }
              }
            }
          }
        }
      }

      if (optionsResult != NULL)
      {
        delete[] optionsResult;
        optionsResult = NULL;
      }
    }
    FREE_MEM(tempRtspUrl);
  }

  if (result == STATUS_OK)
  {
    // start winsock worker thread
    this->rtspSchedulerThreadHandle = CreateThread( 
      NULL,                                   // default security attributes
      0,                                      // use default stack size  
      &CMPIPTV_RTSP::RtspSchedulerWorker,     // thread function name
      this,                                   // argument to thread function 
      0,                                      // use default creation flags 
      &this->rtspSchedulerThreadId);          // returns the thread identifier

    if (this->rtspSchedulerThreadHandle == NULL)
    {
      // thread not created
      result = STATUS_ERROR;
      this->logger.Log(LOGGER_ERROR, _T("%s: %s: cannot create RTSP scheduler thread, error: %i"), PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME, GetLastError());
    }
  }

  if (result != STATUS_OK)
  {
    // if failed opening connection, than close connection
    this->CloseConnection();
  }

  this->logger.Log(LOGGER_INFO, (result == STATUS_OK) ? METHOD_END_FORMAT : METHOD_END_FAIL_FORMAT, PROTOCOL_IMPLEMENTATION_NAME, METHOD_OPEN_CONNECTION_NAME);
  return (result == STATUS_OK) ? STATUS_OK : STATUS_ERROR;
}
Exemplo n.º 10
0
Boolean MediaSession
::initiateByMediaType(char const* mimeType,
		      MediaSubsession*& resultSubsession,
		      PrioritizedRTPStreamSelector*& resultMultiSource,
		      int& resultMultiSourceSessionId,
		      int useSpecialRTPoffset) {
  // Look through this session's subsessions for media that match "mimeType"
  resultSubsession = NULL;
  resultMultiSource = NULL;
  resultMultiSourceSessionId = 0;
  unsigned maxStaggerSeconds = 0;
  MediaSubsessionIterator iter(*this);
  MediaSubsession* subsession;
  while ((subsession = iter.next()) != NULL) {
    if (resultMultiSourceSessionId != 0
	&& subsession->mctSLAPSessionId() != resultMultiSourceSessionId) {
      // We're using a multi-source SLAP session, but this subsession
      // isn't part of it
      continue;
    }

    Boolean wasAlreadyInitiated = subsession->readSource() != NULL;
    if (!wasAlreadyInitiated) {
      // Try to create a source for this subsession:
      if (!subsession->initiate(useSpecialRTPoffset)) return False;
    }

    // Make sure the source's MIME type is one that we handle:
    if (strcmp(subsession->readSource()->MIMEtype(), mimeType) != 0) {
      if (!wasAlreadyInitiated) subsession->deInitiate();
      continue;
    }

    if (subsession->mctSLAPSessionId() == 0) {
      // Normal case: a single session
      resultSubsession = subsession;
      break; // use this
    } else {
      // Special case: a multi-source SLAP session
      resultMultiSourceSessionId = subsession->mctSLAPSessionId();
      unsigned subsessionStaggerSeconds = subsession->mctSLAPStagger();
      if (subsessionStaggerSeconds > maxStaggerSeconds) {
	maxStaggerSeconds = subsessionStaggerSeconds;
      }
    }
  }

  if (resultSubsession == NULL && resultMultiSourceSessionId == 0) {
    envir().setResultMsg("Session has no usable media subsession");
    return False;
  }

  if (resultMultiSourceSessionId != 0) {
    // We have a multi-source MCT SLAP session; create a selector for it:
    unsigned seqNumStagger = computeSeqNumStagger(maxStaggerSeconds);
    resultMultiSource
      = PrioritizedRTPStreamSelector::createNew(envir(), seqNumStagger);
    if (resultMultiSource == NULL) return False;
    // Note: each subsession has its own RTCP instance; we don't return them

    // Then run through the subsessions again, adding each of the sources:
    iter.reset();
    while ((subsession = iter.next()) != NULL) {
      if (subsession->mctSLAPSessionId() == resultMultiSourceSessionId) {
	resultMultiSource->addInputRTPStream(subsession->rtpSource(),
					     subsession->rtcpInstance());
      }
    }
  }

  return True;
}
Exemplo n.º 11
0
int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  progName = argv[0];

  gettimeofday(&startTime, NULL);

#ifdef USE_SIGNALS
  // Allow ourselves to be shut down gracefully by a SIGHUP or a SIGUSR1:
  signal(SIGHUP, signalHandlerShutdown);
  signal(SIGUSR1, signalHandlerShutdown);
#endif

  unsigned short desiredPortNum = 0;

  // unfortunately we can't use getopt() here, as Windoze doesn't have it
  while (argc > 2) {
    char* const opt = argv[1];
    if (opt[0] != '-') usage();
    switch (opt[1]) {

    case 'p': { // specify start port number
      int portArg;
      if (sscanf(argv[2], "%d", &portArg) != 1) {
	usage();
      }
      if (portArg <= 0 || portArg >= 65536 || portArg&1) {
	*env << "bad port number: " << portArg
		<< " (must be even, and in the range (0,65536))\n";
	usage();
      }
      desiredPortNum = (unsigned short)portArg;
      ++argv; --argc;
      break;
    }

    case 'r': { // do not receive data (instead, just 'play' the stream(s))
      createReceivers = False;
      break;
    }

    case 'q': { // output a QuickTime file (to stdout)
      outputQuickTimeFile = True;
      break;
    }

    case '4': { // output a 'mp4'-format file (to stdout)
      outputQuickTimeFile = True;
      generateMP4Format = True;
      break;
    }

    case 'i': { // output an AVI file (to stdout)
      outputAVIFile = True;
      break;
    }

    case 'I': { // specify input interface...
      NetAddressList addresses(argv[2]);
      if (addresses.numAddresses() == 0) {
	*env << "Failed to find network address for \"" << argv[2] << "\"";
	break;
      }
      ReceivingInterfaceAddr = *(unsigned*)(addresses.firstAddress()->data());
      ++argv; --argc;
      break;
    }

    case 'a': { // receive/record an audio stream only
      audioOnly = True;
      singleMedium = "audio";
      break;
    }

    case 'v': { // receive/record a video stream only
      videoOnly = True;
      singleMedium = "video";
      break;
    }

    case 'V': { // disable verbose output
      verbosityLevel = 0;
      break;
    }

    case 'd': { // specify duration, or how much to delay after end time
      float arg;
      if (sscanf(argv[2], "%g", &arg) != 1) {
	usage();
      }
      if (argv[2][0] == '-') { // not "arg<0", in case argv[2] was "-0"
	// a 'negative' argument was specified; use this for "durationSlop":
	duration = 0; // use whatever's in the SDP
	durationSlop = -arg;
      } else {
	duration = arg;
	durationSlop = 0;
      }
      ++argv; --argc;
      break;
    }

    case 'D': { // specify maximum number of seconds to wait for packets:
      if (sscanf(argv[2], "%u", &interPacketGapMaxTime) != 1) {
	usage();
      }
      ++argv; --argc;
      break;
    }

    case 'c': { // play continuously
      playContinuously = True;
      break;
    }

    case 'S': { // specify an offset to use with "SimpleRTPSource"s
      if (sscanf(argv[2], "%d", &simpleRTPoffsetArg) != 1) {
	usage();
      }
      if (simpleRTPoffsetArg < 0) {
	*env << "offset argument to \"-S\" must be >= 0\n";
	usage();
      }
      ++argv; --argc;
      break;
    }

    case 'O': { // Don't send an "OPTIONS" request before "DESCRIBE"
      sendOptionsRequest = False;
      break;
    }

    case 'o': { // Send only the "OPTIONS" request to the server
      sendOptionsRequestOnly = True;
      break;
    }

    case 'm': { // output multiple files - one for each frame
      oneFilePerFrame = True;
      break;
    }

    case 'n': { // notify the user when the first data packet arrives
      notifyOnPacketArrival = True;
      break;
    }

    case 't': {
      // stream RTP and RTCP over the TCP 'control' connection
      if (controlConnectionUsesTCP) {
	streamUsingTCP = True;
      } else {
	usage();
      }
      break;
    }

    case 'T': {
      // stream RTP and RTCP over a HTTP connection
      if (controlConnectionUsesTCP) {
	if (argc > 3 && argv[2][0] != '-') {
	  // The next argument is the HTTP server port number:
	  if (sscanf(argv[2], "%hu", &tunnelOverHTTPPortNum) == 1
	      && tunnelOverHTTPPortNum > 0) {
	    ++argv; --argc;
	    break;
	  }
	}
      }

      // If we get here, the option was specified incorrectly:
      usage();
      break;
    }

    case 'u': { // specify a username and password
      username = argv[2];
      password = argv[3];
      argv+=2; argc-=2;
      if (allowProxyServers && argc > 3 && argv[2][0] != '-') {
	// The next argument is the name of a proxy server:
	proxyServerName = argv[2];
	++argv; --argc;

	if (argc > 3 && argv[2][0] != '-') {
	  // The next argument is the proxy server port number:
	  if (sscanf(argv[2], "%hu", &proxyServerPortNum) != 1) {
	    usage();
	  }
	  ++argv; --argc;
	}
      }
      break;
    }

    case 'A': { // specify a desired audio RTP payload format
      unsigned formatArg;
      if (sscanf(argv[2], "%u", &formatArg) != 1
	  || formatArg >= 96) {
	usage();
      }
      desiredAudioRTPPayloadFormat = (unsigned char)formatArg;
      ++argv; --argc;
      break;
    }

    case 'M': { // specify a MIME subtype for a dynamic RTP payload type
      mimeSubtype = argv[2];
      if (desiredAudioRTPPayloadFormat==0) desiredAudioRTPPayloadFormat =96;
      ++argv; --argc;
      break;
    }

    case 'w': { // specify a width (pixels) for an output QuickTime or AVI movie
      if (sscanf(argv[2], "%hu", &movieWidth) != 1) {
	usage();
      }
      movieWidthOptionSet = True;
      ++argv; --argc;
      break;
    }

    case 'h': { // specify a height (pixels) for an output QuickTime or AVI movie
      if (sscanf(argv[2], "%hu", &movieHeight) != 1) {
	usage();
      }
      movieHeightOptionSet = True;
      ++argv; --argc;
      break;
    }

    case 'f': { // specify a frame rate (per second) for an output QT or AVI movie
      if (sscanf(argv[2], "%u", &movieFPS) != 1) {
	usage();
      }
      movieFPSOptionSet = True;
      ++argv; --argc;
      break;
    }

    case 'F': { // specify a prefix for the audio and video output files
      fileNamePrefix = argv[2];
      ++argv; --argc;
      break;
    }

    case 'b': { // specify the size of buffers for "FileSink"s
      if (sscanf(argv[2], "%u", &fileSinkBufferSize) != 1) {
	usage();
      }
      ++argv; --argc;
      break;
    }

    case 'B': { // specify the size of input socket buffers
      if (sscanf(argv[2], "%u", &socketInputBufferSize) != 1) {
	usage();
      }
      ++argv; --argc;
      break;
    }

    // Note: The following option is deprecated, and may someday be removed:
    case 'l': { // try to compensate for packet loss by repeating frames
      packetLossCompensate = True;
      break;
    }

    case 'y': { // synchronize audio and video streams
      syncStreams = True;
      break;
    }

    case 'H': { // generate hint tracks (as well as the regular data tracks)
      generateHintTracks = True;
      break;
    }

    case 'Q': { // output QOS measurements
      qosMeasurementIntervalMS = 1000; // default: 1 second

      if (argc > 3 && argv[2][0] != '-') {
	// The next argument is the measurement interval,
	// in multiples of 100 ms
	if (sscanf(argv[2], "%u", &qosMeasurementIntervalMS) != 1) {
	  usage();
	}
	qosMeasurementIntervalMS *= 100;
	++argv; --argc;
      }
      break;
    }

    case 's': { // specify initial seek time (trick play)
      double arg;
      if (sscanf(argv[2], "%lg", &arg) != 1 || arg < 0) {
	usage();
      }
      initialSeekTime = arg;
      ++argv; --argc;
      break;
    }

    case 'z': { // scale (trick play)
      float arg;
      if (sscanf(argv[2], "%g", &arg) != 1 || arg == 0.0f) {
	usage();
      }
      scale = arg;
      ++argv; --argc;
      break;
    }

    default: {
      usage();
      break;
    }
    }

    ++argv; --argc;
  }
  if (argc != 2) usage();
  if (outputQuickTimeFile && outputAVIFile) {
    *env << "The -i and -q (or -4) flags cannot both be used!\n";
    usage();
  }
  Boolean outputCompositeFile = outputQuickTimeFile || outputAVIFile;
  if (!createReceivers && outputCompositeFile) {
    *env << "The -r and -q (or -4 or -i) flags cannot both be used!\n";
    usage();
  }
  if (outputCompositeFile && !movieWidthOptionSet) {
    *env << "Warning: The -q, -4 or -i option was used, but not -w.  Assuming a video width of "
	 << movieWidth << " pixels\n";
  }
  if (outputCompositeFile && !movieHeightOptionSet) {
    *env << "Warning: The -q, -4 or -i option was used, but not -h.  Assuming a video height of "
	 << movieHeight << " pixels\n";
  }
  if (outputCompositeFile && !movieFPSOptionSet) {
    *env << "Warning: The -q, -4 or -i option was used, but not -f.  Assuming a video frame rate of "
	 << movieFPS << " frames-per-second\n";
  }
  if (audioOnly && videoOnly) {
    *env << "The -a and -v flags cannot both be used!\n";
    usage();
  }
  if (sendOptionsRequestOnly && !sendOptionsRequest) {
    *env << "The -o and -O flags cannot both be used!\n";
    usage();
  }
  if (tunnelOverHTTPPortNum > 0) {
    if (streamUsingTCP) {
      *env << "The -t and -T flags cannot both be used!\n";
      usage();
    } else {
      streamUsingTCP = True;
    }
  }
  if (!createReceivers && notifyOnPacketArrival) {
    *env << "Warning: Because we're not receiving stream data, the -n flag has no effect\n";
  }
  if (durationSlop < 0) {
    // This parameter wasn't set, so use a default value.
    // If we're measuring QOS stats, then don't add any slop, to avoid
    // having 'empty' measurement intervals at the end.
    durationSlop = qosMeasurementIntervalMS > 0 ? 0.0 : 5.0;
  }

  char* url = argv[1];

  // Create our client object:
  ourClient = createClient(*env, verbosityLevel, progName);
  if (ourClient == NULL) {
    *env << "Failed to create " << clientProtocolName
		<< " client: " << env->getResultMsg() << "\n";
    shutdown();
  }

  if (sendOptionsRequest) {
    // Begin by sending an "OPTIONS" command:
    char* optionsResponse
      = getOptionsResponse(ourClient, url, username, password);
    if (sendOptionsRequestOnly) {
      if (optionsResponse == NULL) {
	*env << clientProtocolName << " \"OPTIONS\" request failed: "
	     << env->getResultMsg() << "\n";
      } else {
	*env << clientProtocolName << " \"OPTIONS\" request returned: "
	     << optionsResponse << "\n";
      }
      shutdown();
    }
    delete[] optionsResponse;
  }

  // Open the URL, to get a SDP description:
  char* sdpDescription
    = getSDPDescriptionFromURL(ourClient, url, username, password,
			       proxyServerName, proxyServerPortNum,
			       desiredPortNum);
  if (sdpDescription == NULL) {
    *env << "Failed to get a SDP description from URL \"" << url
		<< "\": " << env->getResultMsg() << "\n";
    shutdown();
  }

  *env << "Opened URL \"" << url
	  << "\", returning a SDP description:\n" << sdpDescription << "\n";

  // Create a media session object from this SDP description:
  session = MediaSession::createNew(*env, sdpDescription);
  delete[] sdpDescription;
  if (session == NULL) {
    *env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";
    shutdown();
  } else if (!session->hasSubsessions()) {
    *env << "This session has no media subsessions (i.e., \"m=\" lines)\n";
    shutdown();
  }

  // Then, setup the "RTPSource"s for the session:
  MediaSubsessionIterator iter(*session);
  MediaSubsession *subsession;
  Boolean madeProgress = False;
  char const* singleMediumToTest = singleMedium;
  while ((subsession = iter.next()) != NULL) {
    // If we've asked to receive only a single medium, then check this now:
    if (singleMediumToTest != NULL) {
      if (strcmp(subsession->mediumName(), singleMediumToTest) != 0) {
		  *env << "Ignoring \"" << subsession->mediumName()
			  << "/" << subsession->codecName()
			  << "\" subsession, because we've asked to receive a single " << singleMedium
			  << " session only\n";
	continue;
      } else {
	// Receive this subsession only
	singleMediumToTest = "xxxxx";
	    // this hack ensures that we get only 1 subsession of this type
      }
    }

    if (desiredPortNum != 0) {
      subsession->setClientPortNum(desiredPortNum);
      desiredPortNum += 2;
    }

    if (createReceivers) {
      if (!subsession->initiate(simpleRTPoffsetArg)) {
	*env << "Unable to create receiver for \"" << subsession->mediumName()
	     << "/" << subsession->codecName()
	     << "\" subsession: " << env->getResultMsg() << "\n";
      } else {
	*env << "Created receiver for \"" << subsession->mediumName()
	     << "/" << subsession->codecName()
	     << "\" subsession (client ports " << subsession->clientPortNum()
	     << "-" << subsession->clientPortNum()+1 << ")\n";
	madeProgress = True;
	
	if (subsession->rtpSource() != NULL) {
	  // Because we're saving the incoming data, rather than playing
	  // it in real time, allow an especially large time threshold
	  // (1 second) for reordering misordered incoming packets:
	  unsigned const thresh = 1000000; // 1 second
	  subsession->rtpSource()->setPacketReorderingThresholdTime(thresh);
	  
	  // Set the RTP source's OS socket buffer size as appropriate - either if we were explicitly asked (using -B),
	  // or if the desired FileSink buffer size happens to be larger than the current OS socket buffer size.
	  // (The latter case is a heuristic, on the assumption that if the user asked for a large FileSink buffer size,
	  // then the input data rate may be large enough to justify increasing the OS socket buffer size also.)
	  int socketNum = subsession->rtpSource()->RTPgs()->socketNum();
	  unsigned curBufferSize = getReceiveBufferSize(*env, socketNum);
	  if (socketInputBufferSize > 0 || fileSinkBufferSize > curBufferSize) {
	    unsigned newBufferSize = socketInputBufferSize > 0 ? socketInputBufferSize : fileSinkBufferSize;
	    newBufferSize = setReceiveBufferTo(*env, socketNum, newBufferSize);
	    if (socketInputBufferSize > 0) { // The user explicitly asked for the new socket buffer size; announce it:
	      *env << "Changed socket receive buffer size for the \""
		   << subsession->mediumName()
		   << "/" << subsession->codecName()
		   << "\" subsession from "
		   << curBufferSize << " to "
		   << newBufferSize << " bytes\n";
	    }
	  }
	}
      }
    } else {
      if (subsession->clientPortNum() == 0) {
	*env << "No client port was specified for the \""
	     << subsession->mediumName()
	     << "/" << subsession->codecName()
	     << "\" subsession.  (Try adding the \"-p <portNum>\" option.)\n";
      } else {
		madeProgress = True;
      }
    }
  }
  if (!madeProgress) shutdown();

  // Perform additional 'setup' on each subsession, before playing them:
  setupStreams();

  // Create output files:
  if (createReceivers) {
    if (outputQuickTimeFile) {
      // Create a "QuickTimeFileSink", to write to 'stdout':
      qtOut = QuickTimeFileSink::createNew(*env, *session, "stdout",
					   fileSinkBufferSize,
					   movieWidth, movieHeight,
					   movieFPS,
					   packetLossCompensate,
					   syncStreams,
					   generateHintTracks,
					   generateMP4Format);
      if (qtOut == NULL) {
	*env << "Failed to create QuickTime file sink for stdout: " << env->getResultMsg();
	shutdown();
      }

      qtOut->startPlaying(sessionAfterPlaying, NULL);
    } else if (outputAVIFile) {
      // Create an "AVIFileSink", to write to 'stdout':
      aviOut = AVIFileSink::createNew(*env, *session, "stdout",
				      fileSinkBufferSize,
				      movieWidth, movieHeight,
				      movieFPS,
				      packetLossCompensate);
      if (aviOut == NULL) {
	*env << "Failed to create AVI file sink for stdout: " << env->getResultMsg();
	shutdown();
      }

      aviOut->startPlaying(sessionAfterPlaying, NULL);
    } else {
      // Create and start "FileSink"s for each subsession:
      madeProgress = False;
      iter.reset();
      while ((subsession = iter.next()) != NULL) {
	if (subsession->readSource() == NULL) continue; // was not initiated

	// Create an output file for each desired stream:
	char outFileName[1000];
	if (singleMedium == NULL) {
	  // Output file name is
	  //     "<filename-prefix><medium_name>-<codec_name>-<counter>"
	  static unsigned streamCounter = 0;
	  snprintf(outFileName, sizeof outFileName, "%s%s-%s-%d",
		   fileNamePrefix, subsession->mediumName(),
		   subsession->codecName(), ++streamCounter);
	} else {
	  sprintf(outFileName, "stdout");
	}
	FileSink* fileSink;
	if (strcmp(subsession->mediumName(), "audio") == 0 &&
	    (strcmp(subsession->codecName(), "AMR") == 0 ||
	     strcmp(subsession->codecName(), "AMR-WB") == 0)) {
	  // For AMR audio streams, we use a special sink that inserts AMR frame hdrs:
	  fileSink = AMRAudioFileSink::createNew(*env, outFileName,
						 fileSinkBufferSize, oneFilePerFrame);
	} else if (strcmp(subsession->mediumName(), "video") == 0 &&
	    (strcmp(subsession->codecName(), "H264") == 0)) {
	  // For H.264 video stream, we use a special sink that insert start_codes:
	  fileSink = H264VideoFileSink::createNew(*env, outFileName,
						 fileSinkBufferSize, oneFilePerFrame);
	} else {
	  // Normal case:
	  fileSink = FileSink::createNew(*env, outFileName,
					 fileSinkBufferSize, oneFilePerFrame);
	}
	subsession->sink = fileSink;
	if (subsession->sink == NULL) {
	  *env << "Failed to create FileSink for \"" << outFileName
		  << "\": " << env->getResultMsg() << "\n";
	} else {
	  if (singleMedium == NULL) {
	    *env << "Created output file: \"" << outFileName << "\"\n";
	  } else {
	    *env << "Outputting data from the \"" << subsession->mediumName()
			<< "/" << subsession->codecName()
			<< "\" subsession to 'stdout'\n";
	  }

	  if (strcmp(subsession->mediumName(), "video") == 0 &&
	      strcmp(subsession->codecName(), "MP4V-ES") == 0 &&
	      subsession->fmtp_config() != NULL) {
	    // For MPEG-4 video RTP streams, the 'config' information
	    // from the SDP description contains useful VOL etc. headers.
	    // Insert this data at the front of the output file:
	    unsigned configLen;
	    unsigned char* configData
	      = parseGeneralConfigStr(subsession->fmtp_config(), configLen);
	    struct timeval timeNow;
	    gettimeofday(&timeNow, NULL);
	    fileSink->addData(configData, configLen, timeNow);
	    delete[] configData;
	  }

	  subsession->sink->startPlaying(*(subsession->readSource()),
					 subsessionAfterPlaying,
					 subsession);

	  // Also set a handler to be called if a RTCP "BYE" arrives
	  // for this subsession:
	  if (subsession->rtcpInstance() != NULL) {
	    subsession->rtcpInstance()->setByeHandler(subsessionByeHandler,
						      subsession);
	  }

	  madeProgress = True;
	}
      }
      if (!madeProgress) shutdown();
    }
  }

  // Finally, start playing each subsession, to start the data flow:

  startPlayingStreams();

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}