static void test (UsageEnvironment &env) { fprintf(stderr, "test: begin...\n"); char done = 0; int delay = 100 * 1000; env.taskScheduler().scheduleDelayedTask(delay, test_task, 0); env.taskScheduler().doEventLoop(&done); fprintf(stderr, "test: end..\n"); }
int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // We need at least one "rtsp://" URL argument: if (argc < 2) { usage(*env, argv[0]); return 1; } // There are argc-1 URLs: argv[1] through argv[argc-1]. Open and start streaming each one: for (int i = 1; i <= argc-1; ++i) { openURL(*env, argv[0], argv[i]); } // All subsequent activity takes place within the event loop: env->taskScheduler().doEventLoop(&eventLoopWatchVariable); // This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero. return 0; // If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above), // and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects, // then you can also reclaim the (small) memory used by these objects by uncommenting the following code: /* env->reclaim(); env = NULL; delete scheduler; scheduler = NULL; */ }
extern "C" void demux_close_rtp(demuxer_t* demuxer) { // Reclaim all RTP-related state: // Get the RTP state that was stored in the demuxer's 'priv' field: RTPState* rtpState = (RTPState*)(demuxer->priv); if (rtpState == NULL) return; teardownRTSPorSIPSession(rtpState); UsageEnvironment* env = NULL; TaskScheduler* scheduler = NULL; if (rtpState->mediaSession != NULL) { env = &(rtpState->mediaSession->envir()); scheduler = &(env->taskScheduler()); } Medium::close(rtpState->mediaSession); Medium::close(rtpState->rtspClient); Medium::close(rtpState->sipClient); delete rtpState->audioBufferQueue; delete rtpState->videoBufferQueue; delete[] rtpState->sdpDescription; delete rtpState; #ifdef CONFIG_LIBAVCODEC av_freep(&avcctx); #endif env->reclaim(); delete scheduler; }
void CRTSPSession::rtsp_fun() { //::startRTSP(m_progName.c_str(), m_rtspUrl.c_str(), m_ndebugLever); TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); if (openURL(*env, m_progName.c_str(), m_rtspUrl.c_str(), m_debugLevel) == 0) { m_nStatus = 1; env->taskScheduler().doEventLoop(&eventLoopWatchVariable); m_running = false; eventLoopWatchVariable = 0; if (m_rtspClient) { shutdownStream(m_rtspClient,0); } m_rtspClient = NULL; } env->reclaim(); env = NULL; delete scheduler; scheduler = NULL; m_nStatus = 2; }
GenericMediaServer ::GenericMediaServer(UsageEnvironment& env, int ourSocket, Port ourPort) : Medium(env), fServerSocket(ourSocket), fServerPort(ourPort), fServerMediaSessions(HashTable::create(STRING_HASH_KEYS)), fClientConnections(HashTable::create(ONE_WORD_HASH_KEYS)), fClientSessions(HashTable::create(STRING_HASH_KEYS)) { ignoreSigPipeOnSocket(fServerSocket); // so that clients on the same host that are killed don't also kill us // Arrange to handle connections from others: env.taskScheduler().turnOnBackgroundReadHandling(fServerSocket, incomingConnectionHandler, this); }
int sendBeepSound(const char* rtspURL, const char* username, const char* password) { FILE* fp = fopen(WAVE_FILE, "r"); if ( fp == NULL ) { LOG("wave file not exists : %s", WAVE_FILE); return -1; } else { fclose(fp); } // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish // to receive (even if more than stream uses the same "rtsp://" URL). ourRTSPClient* rtspClient = ourRTSPClient::createNew(*env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, "SCBT BackChannel"); if (rtspClient == NULL) { *env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env->getResultMsg() << "\n"; env->reclaim(); env = NULL; delete scheduler; scheduler = NULL; return -2; } rtspClient->bRequireBackChannel = bEnableBackChannel; // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream. // Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response. // Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop: Authenticator auth; auth.setUsernameAndPassword(username, password); rtspClient->sendDescribeCommand(continueAfterDESCRIBE, &auth); //continueAfterSETUP(rtspClient, 0, new char[2]); //startPlay(rtspClient); // All subsequent activity takes place within the event loop: env->taskScheduler().doEventLoop(&(rtspClient->scs.eventLoopWatchVariable)); // This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero. // If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above), // and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects, // then you can also reclaim the (small) memory used by these objects by uncommenting the following code: env->reclaim(); env = NULL; delete scheduler; scheduler = NULL; return 0; }
// This task runs once an hour to check and see if the log needs to roll. void live555Thread::Entry() { OSMutexLocker locker(&fMutex); if(fLive555Env == NULL) return; //qtss_printf("live555Thread loop start\n"); UsageEnvironment* env = (UsageEnvironment*)fLive555Env; fLive555EventLoopWatchVariablePtr = &fLive555EventLoopWatchVariable; env->taskScheduler().doEventLoop(fLive555EventLoopWatchVariablePtr); //qtss_printf("live555Thread loop over\n"); }
int _tmain22(int argc, _TCHAR* argv[]) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = RtspVideoBasicUsageEnvironment::createNew(*scheduler); //RtspVideoBasicUsageEnvironment::createNew(*scheduler); //BasicUsageEnvironment::createNew(*scheduler); // openURL(*env, "Live555Test", "rtsp://169.254.0.99/live.sdp"); // openURL(*env, "Live555Test", "rtsp://localhost:8554/stream"); openURL(*env, "Live555Test", "rtsp://127.0.0.1:8554/stream"); // All subsequent activity takes place within the event loop: env->taskScheduler().doEventLoop(&eventLoopWatchVariable); // This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero. return 0; }
int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server: RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } // Add live stream WW_H264VideoSource * videoSource = 0; ServerMediaSession * sms = ServerMediaSession::createNew(*env, "live", 0, "ww live test"); sms->addSubsession(WW_H264VideoServerMediaSubsession::createNew(*env, videoSource)); rtspServer->addServerMediaSession(sms); char * url = rtspServer->rtspURL(sms); *env << "using url \"" << url << "\"\n"; delete[] url; // Run loop env->taskScheduler().doEventLoop(); rtspServer->removeServerMediaSession(sms); Medium::close(rtspServer); env->reclaim(); delete scheduler; return 1; }
LIVE_API unsigned __stdcall ServerConnect(void *pv) { TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = new UserAuthenticationDatabase; authDB->addUserRecord("1", "1"); RTSPServer* rtspServer; portNumBits rtspServerPortNum = 554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { rtspServerPortNum = 8554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); } env->taskScheduler().doEventLoop(); // does not return return 1; }
//************************************************************************************** void StreamShutdown() { if (m_rtspServer != NULL) { LogDebug("Stream server:Shutting down RTSP server"); MPRTSPServer *server = m_rtspServer; m_rtspServer = NULL; Medium::close(server); } if (m_env != NULL) { LogDebug("Stream server:Cleaning up environment"); UsageEnvironment *env = m_env; m_env = NULL; TaskScheduler *scheduler = &env->taskScheduler(); env->reclaim(); delete scheduler; } }
RTSPServer::RTSPServer(UsageEnvironment& env, int ourSocket, Port ourPort, UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds) : Medium(env), fServerSocket(ourSocket), fServerPort(ourPort), fAuthDB(authDatabase), fReclamationTestSeconds(reclamationTestSeconds), fServerMediaSessions(HashTable::create(STRING_HASH_KEYS)) { #ifdef USE_SIGNALS // Ignore the SIGPIPE signal, so that clients on the same host that are killed // don't also kill us: signal(SIGPIPE, SIG_IGN); #endif // Arrange to handle connections from others: // printf("RTSPServer: turnOnBackgroundReadHandling\n"); //jay env.taskScheduler().turnOnBackgroundReadHandling(fServerSocket, (TaskScheduler::BackgroundHandlerProc*)&incomingConnectionHandler, this); }
int main() { CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); MFStartup(MF_VERSION); TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); in_addr dstAddr = { 127, 0, 0, 1 }; Groupsock rtpGroupsock(*env, dstAddr, 1233, 255); rtpGroupsock.addDestination(dstAddr, 1234, 0); RTPSink * rtpSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96); MediaFoundationH264LiveSource * mediaFoundationH264Source = MediaFoundationH264LiveSource::createNew(*env); rtpSink->startPlaying(*mediaFoundationH264Source, NULL, NULL); // This function call does not return. env->taskScheduler().doEventLoop(); return 0; }
//int main(int argc, char** argv) { int myRTSPClient(char *pInfo, char *pURL) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); //openURL(*env, "TestRTSPClient.exe", "rtsp://mm2.pcslab.com/mm/7h800.mp4"); openURL(*env, pInfo, pURL); // All subsequent activity takes place within the event loop: env->taskScheduler().doEventLoop(&eventLoopWatchVariable); // This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero. return 0; // If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above), // and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects, // then you can also reclaim the (small) memory used by these objects by uncommenting the following code: /* env->reclaim(); env = NULL; delete scheduler; scheduler = NULL; */ }
void live555Thread::live555EventLoop(char* watchVariable) { if((fLive555Env == NULL) || (watchVariable == NULL) ) return; //停止主线程 this->fLive555EventLoopWatchVariable = ~0; OSMutexLocker locker(&fMutex); //等待线程 //qtss_printf("live555Thread event loop start\n"); UsageEnvironment* env = (UsageEnvironment*)fLive555Env; fLive555EventLoopWatchVariablePtr = watchVariable; env->taskScheduler().doEventLoop(watchVariable); fLive555EventLoopWatchVariablePtr = NULL; //qtss_printf("live555Thread event loop over\n"); //重新开始主线程 fLive555EventLoopWatchVariable = 0; fLive555EventLoopWatchVariablePtr = &fLive555EventLoopWatchVariable; //locker.Unlock(); this->Start(); }
int main(int argc, char** argv) { init_signals(); setpriority(PRIO_PROCESS, 0, 0); int IsSilence = 0; int svcEnable = 0; int cnt=0; int activePortCnt=0; if( GetSampleRate() == 16000 ) { audioOutputBitrate = 128000; audioSamplingFrequency = 16000; }else{ audioOutputBitrate = 64000; audioSamplingFrequency = 8000; } // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); int msg_type, video_type; APPROInput* MjpegInputDevice = NULL; APPROInput* H264InputDevice = NULL; APPROInput* Mpeg4InputDevice = NULL; static pid_t child[4] = { -1,-1,-1,-1 }; StreamingMode streamingMode = STREAMING_UNICAST; netAddressBits multicastAddress = 0;//our_inet_addr("224.1.4.6"); portNumBits videoRTPPortNum = 0; portNumBits audioRTPPortNum = 0; IsSilence = 0; svcEnable = 0; audioType = AUDIO_G711; streamingMode = STREAMING_UNICAST; for( cnt = 1; cnt < argc ;cnt++ ) { if( strcmp( argv[cnt],"-m" )== 0 ) { streamingMode = STREAMING_MULTICAST_SSM; } if( strcmp( argv[cnt],"-s" )== 0 ) { IsSilence = 1; } if( strcmp( argv[cnt],"-a" )== 0 ) { audioType = AUDIO_AAC; } if( strcmp( argv[cnt],"-v" )== 0 ) { svcEnable = 1; } } #if 0 printf("###########IsSilence = %d ################\n",IsSilence); printf("###########streamingMode = %d ################\n",streamingMode); printf("###########audioType = %d ################\n",audioType); printf("###########svcEnable = %d ################\n",svcEnable); #endif child[0] = fork(); if( child[0] != 0 ) { child[1] = fork(); } if( child[0] != 0 && child[1] != 0 ) { child[2] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 ) { child[3] = fork(); } if(svcEnable) { if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0) { child[4] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0) { child[5] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0) { child[6] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0) { child[7] = fork(); } } if( child[0] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE4; video_type = VIDEO_TYPE_H264_CIF; rtspServerPortNum = 8556; H264VideoBitrate = 12000000; videoRTPPortNum = 6012; audioRTPPortNum = 6014; } if( child[1] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE3; video_type = VIDEO_TYPE_MJPEG; rtspServerPortNum = 8555; MjpegVideoBitrate = 12000000; videoRTPPortNum = 6008; audioRTPPortNum = 6010; } if( child[2] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE; video_type = VIDEO_TYPE_MPEG4; rtspServerPortNum = 8553; Mpeg4VideoBitrate = 12000000; videoRTPPortNum = 6000; audioRTPPortNum = 6002; } if( child[3] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE2; video_type = VIDEO_TYPE_MPEG4_CIF; rtspServerPortNum = 8554; Mpeg4VideoBitrate = 12000000; videoRTPPortNum = 6004; audioRTPPortNum = 6006; } if(svcEnable) { if( child[4] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE5; video_type = VIDEO_TYPE_H264_SVC_30FPS; rtspServerPortNum = 8601; H264VideoBitrate = 12000000; videoRTPPortNum = 6016; audioRTPPortNum = 6018; } if( child[5] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE6; video_type = VIDEO_TYPE_H264_SVC_15FPS; rtspServerPortNum = 8602; H264VideoBitrate = 12000000; videoRTPPortNum = 6020; audioRTPPortNum = 6022; } if( child[6] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE7; video_type = VIDEO_TYPE_H264_SVC_7FPS; rtspServerPortNum = 8603; H264VideoBitrate = 12000000; videoRTPPortNum = 6024; audioRTPPortNum = 6026; } if( child[7] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE8; video_type = VIDEO_TYPE_H264_SVC_3FPS; rtspServerPortNum = 8604; H264VideoBitrate = 12000000; videoRTPPortNum = 6028; audioRTPPortNum = 6030; } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0 && child[7] != 0) { /* parent, success */ msg_type = LIVE_MSG_TYPE9; video_type = VIDEO_TYPE_H264; rtspServerPortNum = 8557; H264VideoBitrate = 12000000; videoRTPPortNum = 6032; audioRTPPortNum = 6034; } } else { if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0) { /* parent, success */ msg_type = LIVE_MSG_TYPE5; video_type = VIDEO_TYPE_H264; rtspServerPortNum = 8557; H264VideoBitrate = 12000000; videoRTPPortNum = 6032; audioRTPPortNum = 6034; } } videoType = video_type; // Objects used for multicast streaming: static Groupsock* rtpGroupsockAudio = NULL; static Groupsock* rtcpGroupsockAudio = NULL; static Groupsock* rtpGroupsockVideo = NULL; static Groupsock* rtcpGroupsockVideo = NULL; static FramedSource* sourceAudio = NULL; static RTPSink* sinkAudio = NULL; static RTCPInstance* rtcpAudio = NULL; static FramedSource* sourceVideo = NULL; static RTPSink* sinkVideo = NULL; static RTCPInstance* rtcpVideo = NULL; share_memory_init(msg_type); //init_signals(); *env << "Initializing...\n"; // Initialize the WIS input device: if( video_type == VIDEO_TYPE_MJPEG) { MjpegInputDevice = APPROInput::createNew(*env, VIDEO_TYPE_MJPEG); if (MjpegInputDevice == NULL) { err(*env) << "Failed to create MJPEG input device\n"; exit(1); } } if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS) { H264InputDevice = APPROInput::createNew(*env, video_type); if (H264InputDevice == NULL) { err(*env) << "Failed to create MJPEG input device\n"; exit(1); } } if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF ) { Mpeg4InputDevice = APPROInput::createNew(*env, video_type); if (Mpeg4InputDevice == NULL) { err(*env) << "Failed to create MPEG4 input device\n"; exit(1); } } // Create the RTSP server: RTSPServer* rtspServer = NULL; // Normal case: Streaming from a built-in RTSP server: rtspServer = RTSPServer::createNew(*env, rtspServerPortNum, NULL); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } *env << "...done initializing\n"; if( streamingMode == STREAMING_UNICAST ) { if( video_type == VIDEO_TYPE_MJPEG) { ServerMediaSession* sms = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sms->addSubsession(WISJPEGVideoServerMediaSubsession ::createNew(sms->envir(), *MjpegInputDevice, MjpegVideoBitrate)); if( IsSilence == 0) { sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *MjpegInputDevice)); } rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL:\n\t" << url << "\n"; delete[] url; } if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type ==VIDEO_TYPE_H264_SVC_3FPS) { ServerMediaSession* sms; sms = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sms->addSubsession(WISH264VideoServerMediaSubsession ::createNew(sms->envir(), *H264InputDevice, H264VideoBitrate)); if( IsSilence == 0) { sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *H264InputDevice)); } rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL:\n\t" << url << "\n"; delete[] url; } // Create a record describing the media to be streamed: if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF ) { ServerMediaSession* sms = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sms->addSubsession(WISMPEG4VideoServerMediaSubsession ::createNew(sms->envir(), *Mpeg4InputDevice, Mpeg4VideoBitrate)); if( IsSilence == 0) { sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *Mpeg4InputDevice)); } rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL:\n\t" << url << "\n"; delete[] url; } }else{ if (streamingMode == STREAMING_MULTICAST_SSM) { if (multicastAddress == 0) multicastAddress = chooseRandomIPv4SSMAddress(*env); } else if (multicastAddress != 0) { streamingMode = STREAMING_MULTICAST_ASM; } struct in_addr dest; dest.s_addr = multicastAddress; const unsigned char ttl = 255; // For RTCP: const unsigned maxCNAMElen = 100; unsigned char CNAME[maxCNAMElen + 1]; gethostname((char *) CNAME, maxCNAMElen); CNAME[maxCNAMElen] = '\0'; // just in case ServerMediaSession* sms=NULL; if( video_type == VIDEO_TYPE_MJPEG) { sms = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sourceAudio = MjpegInputDevice->audioSource(); sourceVideo = WISJPEGStreamSource::createNew(MjpegInputDevice->videoSource()); // Create 'groupsocks' for RTP and RTCP: const Port rtpPortVideo(videoRTPPortNum); const Port rtcpPortVideo(videoRTPPortNum+1); rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl); rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockVideo->multicastSendOnly(); rtcpGroupsockVideo->multicastSendOnly(); } setVideoRTPSinkBufferSize(); sinkVideo = JPEGVideoRTPSink::createNew(*env, rtpGroupsockVideo); } if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS) { sms = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sourceAudio = H264InputDevice->audioSource(); sourceVideo = H264VideoStreamFramer::createNew(*env, H264InputDevice->videoSource()); // Create 'groupsocks' for RTP and RTCP: const Port rtpPortVideo(videoRTPPortNum); const Port rtcpPortVideo(videoRTPPortNum+1); rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl); rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockVideo->multicastSendOnly(); rtcpGroupsockVideo->multicastSendOnly(); } setVideoRTPSinkBufferSize(); { char BuffStr[200]; extern int GetSprop(void *pBuff, char vType); GetSprop(BuffStr,video_type); sinkVideo = H264VideoRTPSink::createNew(*env, rtpGroupsockVideo,96, 0x64001F,BuffStr); } } // Create a record describing the media to be streamed: if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF ) { sms = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sourceAudio = Mpeg4InputDevice->audioSource(); sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(*env, Mpeg4InputDevice->videoSource()); // Create 'groupsocks' for RTP and RTCP: const Port rtpPortVideo(videoRTPPortNum); const Port rtcpPortVideo(videoRTPPortNum+1); rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl); rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockVideo->multicastSendOnly(); rtcpGroupsockVideo->multicastSendOnly(); } setVideoRTPSinkBufferSize(); sinkVideo = MPEG4ESVideoRTPSink::createNew(*env, rtpGroupsockVideo,97); } /* VIDEO Channel initial */ if(1) { // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthVideo = (Mpeg4VideoBitrate+500)/1000; // in kbps; for RTCP b/w share rtcpVideo = RTCPInstance::createNew(*env, rtcpGroupsockVideo, totalSessionBandwidthVideo, CNAME, sinkVideo, NULL /* we're a server */ , streamingMode == STREAMING_MULTICAST_SSM); // Note: This starts RTCP running automatically sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkVideo, rtcpVideo)); // Start streaming: sinkVideo->startPlaying(*sourceVideo, NULL, NULL); } /* AUDIO Channel initial */ if( IsSilence == 0) { // there's a separate RTP stream for audio // Create 'groupsocks' for RTP and RTCP: const Port rtpPortAudio(audioRTPPortNum); const Port rtcpPortAudio(audioRTPPortNum+1); rtpGroupsockAudio = new Groupsock(*env, dest, rtpPortAudio, ttl); rtcpGroupsockAudio = new Groupsock(*env, dest, rtcpPortAudio, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockAudio->multicastSendOnly(); rtcpGroupsockAudio->multicastSendOnly(); } if( audioSamplingFrequency == 16000 ) { if( audioType == AUDIO_G711) { sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "PCMU", 1); } else { char const* encoderConfigStr = "1408";// (2<<3)|(8>>1) = 0x14 ; ((8<<7)&0xFF)|(1<<3)=0x08 ; sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "AAC-hbr", encoderConfigStr, audioNumChannels); } } else{ if(audioType == AUDIO_G711) { sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 0, audioSamplingFrequency, "audio", "PCMU", 1); } else{ char const* encoderConfigStr = "1588";// (2<<3)|(11>>1) = 0x15 ; ((11<<7)&0xFF)|(1<<3)=0x88 ; sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "AAC-hbr", encoderConfigStr, audioNumChannels); } } // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share rtcpAudio = RTCPInstance::createNew(*env, rtcpGroupsockAudio, totalSessionBandwidthAudio, CNAME, sinkAudio, NULL /* we're a server */, streamingMode == STREAMING_MULTICAST_SSM); // Note: This starts RTCP running automatically sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkAudio, rtcpAudio)); // Start streaming: sinkAudio->startPlaying(*sourceAudio, NULL, NULL); } rtspServer->addServerMediaSession(sms); { struct in_addr dest; dest.s_addr = multicastAddress; char *url = rtspServer->rtspURL(sms); //char *url2 = inet_ntoa(dest); *env << "Mulicast Play this stream using the URL:\n\t" << url << "\n"; //*env << "2 Mulicast addr:\n\t" << url2 << "\n"; delete[] url; } } // Begin the LIVE555 event loop: env->taskScheduler().doEventLoop(&watchVariable); // does not return if( streamingMode!= STREAMING_UNICAST ) { Medium::close(rtcpAudio); Medium::close(sinkAudio); Medium::close(sourceAudio); delete rtpGroupsockAudio; delete rtcpGroupsockAudio; Medium::close(rtcpVideo); Medium::close(sinkVideo); Medium::close(sourceVideo); delete rtpGroupsockVideo; delete rtcpGroupsockVideo; } Medium::close(rtspServer); // will also reclaim "sms" and its "ServerMediaSubsession"s if( MjpegInputDevice != NULL ) { Medium::close(MjpegInputDevice); } if( H264InputDevice != NULL ) { Medium::close(H264InputDevice); } if( Mpeg4InputDevice != NULL ) { Medium::close(Mpeg4InputDevice); } env->reclaim(); delete scheduler; ApproInterfaceExit(); return 0; // only to prevent compiler warning }
// ----------------------------------------- // entry point // ----------------------------------------- int main(int argc, char** argv) { // default parameters const char *dev_name = "/dev/video0"; int format = V4L2_PIX_FMT_H264; int width = 640; int height = 480; int queueSize = 10; int fps = 25; unsigned short rtpPortNum = 20000; unsigned short rtcpPortNum = rtpPortNum+1; unsigned char ttl = 5; struct in_addr destinationAddress; unsigned short rtspPort = 8554; unsigned short rtspOverHTTPPort = 0; bool multicast = false; int verbose = 0; std::string outputFile; bool useMmap = false; // decode parameters int c = 0; while ((c = getopt (argc, argv, "hW:H:Q:P:F:v::O:T:mM")) != -1) { switch (c) { case 'O': outputFile = optarg; break; case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break; case 'm': multicast = true; break; case 'W': width = atoi(optarg); break; case 'H': height = atoi(optarg); break; case 'Q': queueSize = atoi(optarg); break; case 'P': rtspPort = atoi(optarg); break; case 'T': rtspOverHTTPPort = atoi(optarg); break; case 'F': fps = atoi(optarg); break; case 'M': useMmap = true; break; case 'h': { std::cout << argv[0] << " [-v[v]][-m] [-P RTSP port][-P RTSP/HTTP port][-Q queueSize] [-M] [-W width] [-H height] [-F fps] [-O file] [device]" << std::endl; std::cout << "\t -v : verbose " << std::endl; std::cout << "\t -v v : very verbose " << std::endl; std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl; std::cout << "\t -O file : Dump capture to a file" << std::endl; std::cout << "\t RTSP options :" << std::endl; std::cout << "\t -m : Enable multicast output" << std::endl; std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl; std::cout << "\t -H port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl; std::cout << "\t V4L2 options :" << std::endl; std::cout << "\t -M : V4L2 capture using memory mapped buffers (default use read interface)" << std::endl; std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl; std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl; std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl; std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl; exit(0); } } } if (optind<argc) { dev_name = argv[optind]; } // create live555 environment TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // create RTSP server RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspPort); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; } else { // set http tunneling if (rtspOverHTTPPort) { rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort); } // Init capture *env << "Create V4L2 Source..." << dev_name << "\n"; V4L2DeviceParameters param(dev_name,format,width,height,fps,verbose); V4L2Device* videoCapture = NULL; if (useMmap) { videoCapture = V4L2MMAPDeviceSource::createNew(param); } else { videoCapture = V4L2READDeviceSource::createNew(param); } V4L2DeviceSource* videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outputFile, queueSize, verbose); if (videoES == NULL) { *env << "Unable to create source for device " << dev_name << "\n"; } else { destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); OutPacketBuffer::maxSize = videoCapture->getBufferSize(); StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false); // Create Server Multicast Session if (multicast) { addSession(rtspServer, "multicast", MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, 96, replicator,format)); } // Create Server Unicast Session addSession(rtspServer, "unicast", UnicastServerMediaSubsession::createNew(*env,replicator,format)); // main loop signal(SIGINT,sighandler); env->taskScheduler().doEventLoop(&quit); *env << "Exiting..\n"; } Medium::close(videoES); delete videoCapture; Medium::close(rtspServer); } env->reclaim(); delete scheduler; return 0; }
int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server. Try first with the default port number (554), // and then with the alternative port number (8554): RTSPServer* rtspServer; #ifdef VANLINK_DVR_RTSP_PLAYBACK portNumBits rtspServerPortNum = 654;//add by sxh rtsp rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { rtspServerPortNum = 8654; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); } if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } #else portNumBits rtspServerPortNum = 554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { rtspServerPortNum = 8554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); } if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } #endif *env << "LIVE555 Media Server\n"; *env << "\tversion " << MEDIA_SERVER_VERSION_STRING << " (LIVE555 Streaming Media library version " << LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n"; char* urlPrefix = rtspServer->rtspURLPrefix(); *env << "Play streams from this server using the URL\n\t" << urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n"; *env << "Each file's type is inferred from its name suffix:\n"; *env << "\t\".aac\" => an AAC Audio (ADTS format) file\n"; *env << "\t\".amr\" => an AMR Audio file\n"; *env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n"; *env << "\t\".dv\" => a DV Video file\n"; *env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n"; *env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n"; *env << "\t\".ts\" => a MPEG Transport Stream file\n"; *env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n"; *env << "\t\".wav\" => a WAV Audio file\n"; *env << "See http://www.live555.com/mediaServer/ for additional documentation.\n"; #if 0 // RTSP-over-HTTP tunneling is not yet working // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling. // Try first with the default HTTP port (80), and then with the alternative HTTP // port number (8000). RTSPOverHTTPServer* rtspOverHTTPServer; portNumBits httpServerPortNum = 80; rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum); if (rtspOverHTTPServer == NULL) { httpServerPortNum = 8000; rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum); } if (rtspOverHTTPServer == NULL) { *env << "(No server for RTSP-over-HTTP tunneling was created.)\n"; } else { *env << "(We use port " << httpServerPortNum << " for RTSP-over-HTTP tunneling.)\n"; } #endif env->taskScheduler().doEventLoop(); // does not return return 0; // only to prevent compiler warning }
// ----------------------------------------- // entry point // ----------------------------------------- int main(int argc, char** argv) { // default parameters const char *dev_name = "/dev/video0"; int format = V4L2_PIX_FMT_H264; int width = 640; int height = 480; int queueSize = 10; int fps = 25; unsigned short rtspPort = 8554; unsigned short rtspOverHTTPPort = 0; bool multicast = false; int verbose = 0; std::string outputFile; bool useMmap = true; std::string url = "unicast"; std::string murl = "multicast"; bool useThread = true; std::string maddr; bool repeatConfig = true; int timeout = 65; // decode parameters int c = 0; while ((c = getopt (argc, argv, "v::Q:O:" "I:P:T:m:u:M:ct:" "rsfF:W:H:" "h")) != -1) { switch (c) { case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break; case 'Q': queueSize = atoi(optarg); break; case 'O': outputFile = optarg; break; // RTSP/RTP case 'I': ReceivingInterfaceAddr = inet_addr(optarg); break; case 'P': rtspPort = atoi(optarg); break; case 'T': rtspOverHTTPPort = atoi(optarg); break; case 'u': url = optarg; break; case 'm': multicast = true; murl = optarg; break; case 'M': multicast = true; maddr = optarg; break; case 'c': repeatConfig = false; break; case 't': timeout = atoi(optarg); break; // V4L2 case 'r': useMmap = false; break; case 's': useThread = false; break; case 'f': format = 0; break; case 'F': fps = atoi(optarg); break; case 'W': width = atoi(optarg); break; case 'H': height = atoi(optarg); break; case 'h': default: { std::cout << argv[0] << " [-v[v]] [-Q queueSize] [-O file]" << std::endl; std::cout << "\t [-I interface] [-P RTSP port] [-T RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout]" << std::endl; std::cout << "\t [-r] [-s] [-W width] [-H height] [-F fps] [device] [device]" << std::endl; std::cout << "\t -v : verbose" << std::endl; std::cout << "\t -vv : very verbose" << std::endl; std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl; std::cout << "\t -O output: Copy captured frame to a file or a V4L2 device" << std::endl; std::cout << "\t RTSP options :" << std::endl; std::cout << "\t -I addr : RTSP interface (default autodetect)" << std::endl; std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl; std::cout << "\t -T port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl; std::cout << "\t -u url : unicast url (default " << url << ")" << std::endl; std::cout << "\t -m url : multicast url (default " << murl << ")" << std::endl; std::cout << "\t -M addr : multicast group:port (default is random_address:20000)" << std::endl; std::cout << "\t -c : don't repeat config (default repeat config before IDR frame)" << std::endl; std::cout << "\t -t secs : RTCP expiration timeout (default " << timeout << ")" << std::endl; std::cout << "\t V4L2 options :" << std::endl; std::cout << "\t -r : V4L2 capture using read interface (default use memory mapped buffers)" << std::endl; std::cout << "\t -s : V4L2 capture using live555 mainloop (default use a reader thread)" << std::endl; std::cout << "\t -f : V4L2 capture using current format (-W,-H,-F are ignore)" << std::endl; std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl; std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl; std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl; std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl; exit(0); } } } std::list<std::string> devList; while (optind<argc) { devList.push_back(argv[optind]); optind++; } if (devList.empty()) { devList.push_back(dev_name); } // init logger initLogger(verbose); // create live555 environment TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // split multicast info std::istringstream is(maddr); std::string ip; getline(is, ip, ':'); struct in_addr destinationAddress; destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); if (!ip.empty()) { destinationAddress.s_addr = inet_addr(ip.c_str()); } std::string port; getline(is, port, ':'); unsigned short rtpPortNum = 20000; if (!port.empty()) { rtpPortNum = atoi(port.c_str()); } unsigned short rtcpPortNum = rtpPortNum+1; unsigned char ttl = 5; // create RTSP server RTSPServer* rtspServer = createRTSPServer(*env, rtspPort, rtspOverHTTPPort, timeout); if (rtspServer == NULL) { LOG(ERROR) << "Failed to create RTSP server: " << env->getResultMsg(); } else { int nbSource = 0; std::list<std::string>::iterator devIt; for ( devIt=devList.begin() ; devIt!=devList.end() ; ++devIt) { std::string deviceName(*devIt); // Init capture LOG(NOTICE) << "Create V4L2 Source..." << deviceName; V4L2DeviceParameters param(deviceName.c_str(),format,width,height,fps, verbose); V4l2Capture* videoCapture = V4l2DeviceFactory::CreateVideoCapure(param, useMmap); if (videoCapture) { nbSource++; format = videoCapture->getFormat(); int outfd = -1; V4l2Output* out = NULL; if (!outputFile.empty()) { V4L2DeviceParameters outparam(outputFile.c_str(), videoCapture->getFormat(), videoCapture->getWidth(), videoCapture->getHeight(), 0,verbose); V4l2Output* out = V4l2DeviceFactory::CreateVideoOutput(outparam, useMmap); if (out != NULL) { outfd = out->getFd(); } } LOG(NOTICE) << "Start V4L2 Capture..." << deviceName; if (!videoCapture->captureStart()) { LOG(NOTICE) << "Cannot start V4L2 Capture for:" << deviceName; } V4L2DeviceSource* videoES = NULL; if (format == V4L2_PIX_FMT_H264) { videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread, repeatConfig); } else { videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread); } if (videoES == NULL) { LOG(FATAL) << "Unable to create source for device " << deviceName; delete videoCapture; } else { // extend buffer size if needed if (videoCapture->getBufferSize() > OutPacketBuffer::maxSize) { OutPacketBuffer::maxSize = videoCapture->getBufferSize(); } StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false); std::string baseUrl; if (devList.size() > 1) { baseUrl = basename(deviceName.c_str()); baseUrl.append("/"); } // Create Multicast Session if (multicast) { LOG(NOTICE) << "RTP address " << inet_ntoa(destinationAddress) << ":" << rtpPortNum; LOG(NOTICE) << "RTCP address " << inet_ntoa(destinationAddress) << ":" << rtcpPortNum; addSession(rtspServer, baseUrl+murl, MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, replicator,format)); // increment ports for next sessions rtpPortNum+=2; rtcpPortNum+=2; } // Create Unicast Session addSession(rtspServer, baseUrl+url, UnicastServerMediaSubsession::createNew(*env,replicator,format)); } if (out) { delete out; } } } if (nbSource>0) { // main loop signal(SIGINT,sighandler); env->taskScheduler().doEventLoop(&quit); LOG(NOTICE) << "Exiting...."; } Medium::close(rtspServer); } env->reclaim(); delete scheduler; return 0; }
int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server. Try first with the default port number (554), // and then with the alternative port number (8554): RTSPServer* rtspServer; portNumBits rtspServerPortNum = 554; //先使用554默认端口创建RTSP server rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { //若使用554端口创建失败,则使用8554端口创建 Server rtspServerPortNum = 8554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); } if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } *env << "LIVE555 Media Server\n"; *env << "\tversion " << MEDIA_SERVER_VERSION_STRING << " (LIVE555 Streaming Media library version " << LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n"; char* urlPrefix = rtspServer->rtspURLPrefix(); *env << "Play streams from this server using the URL\n\t" << urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n"; *env << "Each file's type is inferred from its name suffix:\n"; *env << "\t\".264\" => a H.264 Video Elementary Stream file\n"; *env << "\t\".265\" => a H.265 Video Elementary Stream file\n"; *env << "\t\".aac\" => an AAC Audio (ADTS format) file\n"; *env << "\t\".ac3\" => an AC-3 Audio file\n"; *env << "\t\".amr\" => an AMR Audio file\n"; *env << "\t\".dv\" => a DV Video file\n"; *env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n"; *env << "\t\".mkv\" => a Matroska audio+video+(optional)subtitles file\n"; *env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n"; *env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n"; *env << "\t\".ogg\" or \".ogv\" or \".opus\" => an Ogg audio and/or video file\n"; *env << "\t\".ts\" => a MPEG Transport Stream file\n"; *env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n"; *env << "\t\".vob\" => a VOB (MPEG-2 video with AC-3 audio) file\n"; *env << "\t\".wav\" => a WAV Audio file\n"; *env << "\t\".webm\" => a WebM audio(Vorbis)+video(VP8) file\n"; *env << "See http://www.live555.com/mediaServer/ for additional documentation.\n"; // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling. // Try first with the default HTTP port (80), and then with the alternative HTTP // port numbers (8000 and 8080). if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) { *env << "(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling, or for HTTP live streaming (for indexed Transport Stream files only).)\n"; } else { *env << "(RTSP-over-HTTP tunneling is not available.)\n"; } env->taskScheduler().doEventLoop(); // does not return return 0; // only to prevent compiler warning }