// Constructor for a source-specific multicast group Groupsock::Groupsock(UsageEnvironment& env, struct in_addr const& groupAddr, struct in_addr const& sourceFilterAddr, Port port) : OutputSocket(env, port), deleteIfNoMembers(False), isSlave(False), fIncomingGroupEId(groupAddr, sourceFilterAddr, port.num()), fDests(NULL), fTTL(255) { addDestination(groupAddr, port); // First try a SSM join. If that fails, try a regular join: if (!socketJoinGroupSSM(env, socketNum(), groupAddr.s_addr, sourceFilterAddr.s_addr)) { if (DebugLevel >= 3) { env << *this << ": SSM join failed: " << env.getResultMsg(); env << " - trying regular join instead\n"; } if (!socketJoinGroup(env, socketNum(), groupAddr.s_addr)) { if (DebugLevel >= 1) { env << *this << ": failed to join group: " << env.getResultMsg() << "\n"; } } } if (DebugLevel >= 2) env << *this << ": created\n"; }
// Constructor for a source-independent multicast group Groupsock::Groupsock(UsageEnvironment& env, struct in_addr const& groupAddr, Port port, u_int8_t ttl) : OutputSocket(env, port), deleteIfNoMembers(False), isSlave(False), fIncomingGroupEId(groupAddr, port.num(), ttl), fDests(NULL), fTTL(ttl) { addDestination(groupAddr, port); // printf("Groupsock: grpaddr %s\n", inet_ntoa(groupAddr)); //jay if (!socketJoinGroup(env, socketNum(), groupAddr.s_addr)) { if (DebugLevel >= 1) { env << *this << ": failed to join group: " << env.getResultMsg() << "\n"; } } // Make sure we can get our source address: if (ourIPAddress(env) == 0) { if (DebugLevel >= 0) { // this is a fatal error env << "Unable to determine our source address: " << env.getResultMsg() << "\n"; } } if (DebugLevel >= 2) env << *this << ": created\n"; }
int CRTSPSession::openURL(UsageEnvironment& env, char const* progName, char const* rtspURL, int debugLevel) { m_rtspClient = ourRTSPClient::createNew(env, rtspURL, debugLevel, progName); if (m_rtspClient == NULL) { env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n"; return -1; } ((ourRTSPClient*)m_rtspClient)->m_nID = m_nID; m_rtspClient->sendDescribeCommand(continueAfterDESCRIBE); return 0; }
int sendBeepSound(const char* rtspURL, const char* username, const char* password) { FILE* fp = fopen(WAVE_FILE, "r"); if ( fp == NULL ) { LOG("wave file not exists : %s", WAVE_FILE); return -1; } else { fclose(fp); } // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish // to receive (even if more than stream uses the same "rtsp://" URL). ourRTSPClient* rtspClient = ourRTSPClient::createNew(*env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, "SCBT BackChannel"); if (rtspClient == NULL) { *env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env->getResultMsg() << "\n"; env->reclaim(); env = NULL; delete scheduler; scheduler = NULL; return -2; } rtspClient->bRequireBackChannel = bEnableBackChannel; // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream. // Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response. // Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop: Authenticator auth; auth.setUsernameAndPassword(username, password); rtspClient->sendDescribeCommand(continueAfterDESCRIBE, &auth); //continueAfterSETUP(rtspClient, 0, new char[2]); //startPlay(rtspClient); // All subsequent activity takes place within the event loop: env->taskScheduler().doEventLoop(&(rtspClient->scs.eventLoopWatchVariable)); // This function call does not return, unless, at some point in time, "eventLoopWatchVariable" gets set to something non-zero. // If you choose to continue the application past this point (i.e., if you comment out the "return 0;" statement above), // and if you don't intend to do anything more with the "TaskScheduler" and "UsageEnvironment" objects, // then you can also reclaim the (small) memory used by these objects by uncommenting the following code: env->reclaim(); env = NULL; delete scheduler; scheduler = NULL; return 0; }
Boolean Groupsock::output(UsageEnvironment& env, u_int8_t ttlToSend, unsigned char* buffer, unsigned bufferSize, DirectedNetInterface* interfaceNotToFwdBackTo) { do { // First, do the datagram send, to each destination: Boolean writeSuccess = True; for (destRecord* dests = fDests; dests != NULL; dests = dests->fNext) { int res = 0; if (!(res = write(dests->fGroupEId.groupAddress().s_addr, dests->fPort, ttlToSend, buffer, bufferSize))) { if (-1 == res) { fprintf(stderr, "errno = %d, errorsrting = %s\n", strerror(errno)); } writeSuccess = False; break; } } if (!writeSuccess) break; statsOutgoing.countPacket(bufferSize); statsGroupOutgoing.countPacket(bufferSize); // Then, forward to our members: int numMembers = 0; if (!members().IsEmpty()) { numMembers = outputToAllMembersExcept(interfaceNotToFwdBackTo, ttlToSend, buffer, bufferSize, ourIPAddress(env)); if (numMembers < 0) break; } if (DebugLevel >= 3) { env << *this << ": wrote " << bufferSize << " bytes, ttl " << (unsigned)ttlToSend; if (numMembers > 0) { env << "; relayed to " << numMembers << " members"; } env << "\n"; } return True; } while (0); if (DebugLevel >= 0) { // this is a fatal error env.setResultMsg("Groupsock write failed: ", env.getResultMsg()); } return False; }
void openURL(UsageEnvironment& env, char const* progName, char const* rtspURL) { // Begin by creating a "RTSPClient" object. Note that there is a separate "RTSPClient" object for each stream that we wish // to receive (even if more than stream uses the same "rtsp://" URL). RTSPClient* rtspClient = ourRTSPClient::createNew(env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, progName); if (rtspClient == NULL) { env << "Failed to create a RTSP client for URL \"" << rtspURL << "\": " << env.getResultMsg() << "\n"; return; } ++rtspClientCount; // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the stream. // Note that this command - like all RTSP commands - is sent asynchronously; we do not block, waiting for a response. // Instead, the following function call returns immediately, and we handle the RTSP response later, from within the event loop: rtspClient->sendDescribeCommand(continueAfterDESCRIBE); }
int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server: RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } // Add live stream WW_H264VideoSource * videoSource = 0; ServerMediaSession * sms = ServerMediaSession::createNew(*env, "live", 0, "ww live test"); sms->addSubsession(WW_H264VideoServerMediaSubsession::createNew(*env, videoSource)); rtspServer->addServerMediaSession(sms); char * url = rtspServer->rtspURL(sms); *env << "using url \"" << url << "\"\n"; delete[] url; // Run loop env->taskScheduler().doEventLoop(); rtspServer->removeServerMediaSession(sms); Medium::close(rtspServer); env->reclaim(); delete scheduler; return 1; }
Boolean Groupsock::output(UsageEnvironment& env, unsigned char* buffer, unsigned bufferSize, DirectedNetInterface* interfaceNotToFwdBackTo) { do { // First, do the datagram send, to each destination: Boolean writeSuccess = True; for (destRecord* dests = fDests; dests != NULL; dests = dests->fNext) { if (!write(dests->fGroupEId.groupAddress().s_addr, dests->fGroupEId.portNum(), dests->fGroupEId.ttl(), buffer, bufferSize)) { writeSuccess = False; break; } } if (!writeSuccess) break; statsOutgoing.countPacket(bufferSize); statsGroupOutgoing.countPacket(bufferSize); // Then, forward to our members: int numMembers = 0; if (!members().IsEmpty()) { numMembers = outputToAllMembersExcept(interfaceNotToFwdBackTo, ttl(), buffer, bufferSize, ourIPAddress(env)); if (numMembers < 0) break; } if (DebugLevel >= 3) { env << *this << ": wrote " << bufferSize << " bytes, ttl " << (unsigned)ttl(); if (numMembers > 0) { env << "; relayed to " << numMembers << " members"; } env << "\n"; } return True; } while (0); if (DebugLevel >= 0) { // this is a fatal error UsageEnvironment::MsgString msg = strDup(env.getResultMsg()); env.setResultMsg("Groupsock write failed: ", msg); delete[] (char*)msg; } return False; }
// ----------------------------------------- // entry point // ----------------------------------------- int main(int argc, char** argv) { // default parameters const char *dev_name = "/dev/video0"; int format = V4L2_PIX_FMT_H264; int width = 640; int height = 480; int queueSize = 10; int fps = 25; unsigned short rtpPortNum = 20000; unsigned short rtcpPortNum = rtpPortNum+1; unsigned char ttl = 5; struct in_addr destinationAddress; unsigned short rtspPort = 8554; unsigned short rtspOverHTTPPort = 0; bool multicast = false; int verbose = 0; std::string outputFile; bool useMmap = false; // decode parameters int c = 0; while ((c = getopt (argc, argv, "hW:H:Q:P:F:v::O:T:mM")) != -1) { switch (c) { case 'O': outputFile = optarg; break; case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break; case 'm': multicast = true; break; case 'W': width = atoi(optarg); break; case 'H': height = atoi(optarg); break; case 'Q': queueSize = atoi(optarg); break; case 'P': rtspPort = atoi(optarg); break; case 'T': rtspOverHTTPPort = atoi(optarg); break; case 'F': fps = atoi(optarg); break; case 'M': useMmap = true; break; case 'h': { std::cout << argv[0] << " [-v[v]][-m] [-P RTSP port][-P RTSP/HTTP port][-Q queueSize] [-M] [-W width] [-H height] [-F fps] [-O file] [device]" << std::endl; std::cout << "\t -v : verbose " << std::endl; std::cout << "\t -v v : very verbose " << std::endl; std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl; std::cout << "\t -O file : Dump capture to a file" << std::endl; std::cout << "\t RTSP options :" << std::endl; std::cout << "\t -m : Enable multicast output" << std::endl; std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl; std::cout << "\t -H port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl; std::cout << "\t V4L2 options :" << std::endl; std::cout << "\t -M : V4L2 capture using memory mapped buffers (default use read interface)" << std::endl; std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl; std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl; std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl; std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl; exit(0); } } } if (optind<argc) { dev_name = argv[optind]; } // create live555 environment TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // create RTSP server RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspPort); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; } else { // set http tunneling if (rtspOverHTTPPort) { rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort); } // Init capture *env << "Create V4L2 Source..." << dev_name << "\n"; V4L2DeviceParameters param(dev_name,format,width,height,fps,verbose); V4L2Device* videoCapture = NULL; if (useMmap) { videoCapture = V4L2MMAPDeviceSource::createNew(param); } else { videoCapture = V4L2READDeviceSource::createNew(param); } V4L2DeviceSource* videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outputFile, queueSize, verbose); if (videoES == NULL) { *env << "Unable to create source for device " << dev_name << "\n"; } else { destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); OutPacketBuffer::maxSize = videoCapture->getBufferSize(); StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false); // Create Server Multicast Session if (multicast) { addSession(rtspServer, "multicast", MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, 96, replicator,format)); } // Create Server Unicast Session addSession(rtspServer, "unicast", UnicastServerMediaSubsession::createNew(*env,replicator,format)); // main loop signal(SIGINT,sighandler); env->taskScheduler().doEventLoop(&quit); *env << "Exiting..\n"; } Medium::close(videoES); delete videoCapture; Medium::close(rtspServer); } env->reclaim(); delete scheduler; return 0; }
int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server. Try first with the default port number (554), // and then with the alternative port number (8554): RTSPServer* rtspServer; #ifdef VANLINK_DVR_RTSP_PLAYBACK portNumBits rtspServerPortNum = 654;//add by sxh rtsp rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { rtspServerPortNum = 8654; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); } if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } #else portNumBits rtspServerPortNum = 554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { rtspServerPortNum = 8554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); } if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } #endif *env << "LIVE555 Media Server\n"; *env << "\tversion " << MEDIA_SERVER_VERSION_STRING << " (LIVE555 Streaming Media library version " << LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n"; char* urlPrefix = rtspServer->rtspURLPrefix(); *env << "Play streams from this server using the URL\n\t" << urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n"; *env << "Each file's type is inferred from its name suffix:\n"; *env << "\t\".aac\" => an AAC Audio (ADTS format) file\n"; *env << "\t\".amr\" => an AMR Audio file\n"; *env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n"; *env << "\t\".dv\" => a DV Video file\n"; *env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n"; *env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n"; *env << "\t\".ts\" => a MPEG Transport Stream file\n"; *env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n"; *env << "\t\".wav\" => a WAV Audio file\n"; *env << "See http://www.live555.com/mediaServer/ for additional documentation.\n"; #if 0 // RTSP-over-HTTP tunneling is not yet working // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling. // Try first with the default HTTP port (80), and then with the alternative HTTP // port number (8000). RTSPOverHTTPServer* rtspOverHTTPServer; portNumBits httpServerPortNum = 80; rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum); if (rtspOverHTTPServer == NULL) { httpServerPortNum = 8000; rtspOverHTTPServer = RTSPOverHTTPServer::createNew(*env, httpServerPortNum, rtspServerPortNum); } if (rtspOverHTTPServer == NULL) { *env << "(No server for RTSP-over-HTTP tunneling was created.)\n"; } else { *env << "(We use port " << httpServerPortNum << " for RTSP-over-HTTP tunneling.)\n"; } #endif env->taskScheduler().doEventLoop(); // does not return return 0; // only to prevent compiler warning }
extern "C" demuxer_t* demux_open_rtp(demuxer_t* demuxer) { struct MPOpts *opts = demuxer->opts; Boolean success = False; do { TaskScheduler* scheduler = BasicTaskScheduler::createNew(); if (scheduler == NULL) break; UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); if (env == NULL) break; RTSPClient* rtspClient = NULL; SIPClient* sipClient = NULL; if (demuxer == NULL || demuxer->stream == NULL) break; // shouldn't happen demuxer->stream->eof = 0; // just in case // Look at the stream's 'priv' field to see if we were initiated // via a SDP description: char* sdpDescription = (char*)(demuxer->stream->priv); if (sdpDescription == NULL) { // We weren't given a SDP description directly, so assume that // we were given a RTSP or SIP URL: char const* protocol = demuxer->stream->streaming_ctrl->url->protocol; char const* url = demuxer->stream->streaming_ctrl->url->url; extern int verbose; if (strcmp(protocol, "rtsp") == 0) { rtspClient = RTSPClient::createNew(*env, verbose, "MPlayer"); if (rtspClient == NULL) { fprintf(stderr, "Failed to create RTSP client: %s\n", env->getResultMsg()); break; } sdpDescription = openURL_rtsp(rtspClient, url); } else { // SIP unsigned char desiredAudioType = 0; // PCMU (use 3 for GSM) sipClient = SIPClient::createNew(*env, desiredAudioType, NULL, verbose, "MPlayer"); if (sipClient == NULL) { fprintf(stderr, "Failed to create SIP client: %s\n", env->getResultMsg()); break; } sipClient->setClientStartPortNum(8000); sdpDescription = openURL_sip(sipClient, url); } if (sdpDescription == NULL) { fprintf(stderr, "Failed to get a SDP description from URL \"%s\": %s\n", url, env->getResultMsg()); break; } } // Now that we have a SDP description, create a MediaSession from it: MediaSession* mediaSession = MediaSession::createNew(*env, sdpDescription); if (mediaSession == NULL) break; // Create a 'RTPState' structure containing the state that we just created, // and store it in the demuxer's 'priv' field, for future reference: RTPState* rtpState = new RTPState; rtpState->sdpDescription = sdpDescription; rtpState->rtspClient = rtspClient; rtpState->sipClient = sipClient; rtpState->mediaSession = mediaSession; rtpState->audioBufferQueue = rtpState->videoBufferQueue = NULL; rtpState->flags = 0; rtpState->firstSyncTime.tv_sec = rtpState->firstSyncTime.tv_usec = 0; demuxer->priv = rtpState; int audiofound = 0, videofound = 0; // Create RTP receivers (sources) for each subsession: MediaSubsessionIterator iter(*mediaSession); MediaSubsession* subsession; unsigned desiredReceiveBufferSize; while ((subsession = iter.next()) != NULL) { // Ignore any subsession that's not audio or video: if (strcmp(subsession->mediumName(), "audio") == 0) { if (audiofound) { fprintf(stderr, "Additional subsession \"audio/%s\" skipped\n", subsession->codecName()); continue; } desiredReceiveBufferSize = 100000; } else if (strcmp(subsession->mediumName(), "video") == 0) { if (videofound) { fprintf(stderr, "Additional subsession \"video/%s\" skipped\n", subsession->codecName()); continue; } desiredReceiveBufferSize = 2000000; } else { continue; } if (rtsp_port) subsession->setClientPortNum (rtsp_port); if (!subsession->initiate()) { fprintf(stderr, "Failed to initiate \"%s/%s\" RTP subsession: %s\n", subsession->mediumName(), subsession->codecName(), env->getResultMsg()); } else { fprintf(stderr, "Initiated \"%s/%s\" RTP subsession on port %d\n", subsession->mediumName(), subsession->codecName(), subsession->clientPortNum()); // Set the OS's socket receive buffer sufficiently large to avoid // incoming packets getting dropped between successive reads from this // subsession's demuxer. Depending on the bitrate(s) that you expect, // you may wish to tweak the "desiredReceiveBufferSize" values above. int rtpSocketNum = subsession->rtpSource()->RTPgs()->socketNum(); int receiveBufferSize = increaseReceiveBufferTo(*env, rtpSocketNum, desiredReceiveBufferSize); if (verbose > 0) { fprintf(stderr, "Increased %s socket receive buffer to %d bytes \n", subsession->mediumName(), receiveBufferSize); } if (rtspClient != NULL) { // Issue a RTSP "SETUP" command on the chosen subsession: if (!rtspClient->setupMediaSubsession(*subsession, False, rtsp_transport_tcp)) break; if (!strcmp(subsession->mediumName(), "audio")) audiofound = 1; if (!strcmp(subsession->mediumName(), "video")) videofound = 1; } } } if (rtspClient != NULL) { // Issue a RTSP aggregate "PLAY" command on the whole session: if (!rtspClient->playMediaSession(*mediaSession)) break; } else if (sipClient != NULL) { sipClient->sendACK(); // to start the stream flowing } // Now that the session is ready to be read, do additional // MPlayer codec-specific initialization on each subsession: iter.reset(); while ((subsession = iter.next()) != NULL) { if (subsession->readSource() == NULL) continue; // not reading this unsigned flags = 0; if (strcmp(subsession->mediumName(), "audio") == 0) { rtpState->audioBufferQueue = new ReadBufferQueue(subsession, demuxer, "audio"); rtpState->audioBufferQueue->otherQueue = &(rtpState->videoBufferQueue); rtpCodecInitialize_audio(demuxer, subsession, flags); } else if (strcmp(subsession->mediumName(), "video") == 0) { rtpState->videoBufferQueue = new ReadBufferQueue(subsession, demuxer, "video"); rtpState->videoBufferQueue->otherQueue = &(rtpState->audioBufferQueue); rtpCodecInitialize_video(demuxer, subsession, flags); } rtpState->flags |= flags; } success = True; } while (0); if (!success) return NULL; // an error occurred // Hack: If audio and video are demuxed together on a single RTP stream, // then create a new "demuxer_t" structure to allow the higher-level // code to recognize this: if (demux_is_multiplexed_rtp_stream(demuxer)) { stream_t* s = new_ds_stream(demuxer->video); demuxer_t* od = demux_open(opts, s, DEMUXER_TYPE_UNKNOWN, opts->audio_id, opts->video_id, opts->sub_id, NULL); demuxer = new_demuxers_demuxer(od, od, od); } return demuxer; }
// ----------------------------------------- // entry point // ----------------------------------------- int main(int argc, char** argv) { // default parameters const char *dev_name = "/dev/video0"; int format = V4L2_PIX_FMT_H264; int width = 640; int height = 480; int queueSize = 10; int fps = 25; unsigned short rtspPort = 8554; unsigned short rtspOverHTTPPort = 0; bool multicast = false; int verbose = 0; std::string outputFile; bool useMmap = true; std::string url = "unicast"; std::string murl = "multicast"; bool useThread = true; std::string maddr; bool repeatConfig = true; int timeout = 65; // decode parameters int c = 0; while ((c = getopt (argc, argv, "v::Q:O:" "I:P:T:m:u:M:ct:" "rsfF:W:H:" "h")) != -1) { switch (c) { case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break; case 'Q': queueSize = atoi(optarg); break; case 'O': outputFile = optarg; break; // RTSP/RTP case 'I': ReceivingInterfaceAddr = inet_addr(optarg); break; case 'P': rtspPort = atoi(optarg); break; case 'T': rtspOverHTTPPort = atoi(optarg); break; case 'u': url = optarg; break; case 'm': multicast = true; murl = optarg; break; case 'M': multicast = true; maddr = optarg; break; case 'c': repeatConfig = false; break; case 't': timeout = atoi(optarg); break; // V4L2 case 'r': useMmap = false; break; case 's': useThread = false; break; case 'f': format = 0; break; case 'F': fps = atoi(optarg); break; case 'W': width = atoi(optarg); break; case 'H': height = atoi(optarg); break; case 'h': default: { std::cout << argv[0] << " [-v[v]] [-Q queueSize] [-O file]" << std::endl; std::cout << "\t [-I interface] [-P RTSP port] [-T RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout]" << std::endl; std::cout << "\t [-r] [-s] [-W width] [-H height] [-F fps] [device] [device]" << std::endl; std::cout << "\t -v : verbose" << std::endl; std::cout << "\t -vv : very verbose" << std::endl; std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl; std::cout << "\t -O output: Copy captured frame to a file or a V4L2 device" << std::endl; std::cout << "\t RTSP options :" << std::endl; std::cout << "\t -I addr : RTSP interface (default autodetect)" << std::endl; std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl; std::cout << "\t -T port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl; std::cout << "\t -u url : unicast url (default " << url << ")" << std::endl; std::cout << "\t -m url : multicast url (default " << murl << ")" << std::endl; std::cout << "\t -M addr : multicast group:port (default is random_address:20000)" << std::endl; std::cout << "\t -c : don't repeat config (default repeat config before IDR frame)" << std::endl; std::cout << "\t -t secs : RTCP expiration timeout (default " << timeout << ")" << std::endl; std::cout << "\t V4L2 options :" << std::endl; std::cout << "\t -r : V4L2 capture using read interface (default use memory mapped buffers)" << std::endl; std::cout << "\t -s : V4L2 capture using live555 mainloop (default use a reader thread)" << std::endl; std::cout << "\t -f : V4L2 capture using current format (-W,-H,-F are ignore)" << std::endl; std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl; std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl; std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl; std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl; exit(0); } } } std::list<std::string> devList; while (optind<argc) { devList.push_back(argv[optind]); optind++; } if (devList.empty()) { devList.push_back(dev_name); } // init logger initLogger(verbose); // create live555 environment TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); // split multicast info std::istringstream is(maddr); std::string ip; getline(is, ip, ':'); struct in_addr destinationAddress; destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); if (!ip.empty()) { destinationAddress.s_addr = inet_addr(ip.c_str()); } std::string port; getline(is, port, ':'); unsigned short rtpPortNum = 20000; if (!port.empty()) { rtpPortNum = atoi(port.c_str()); } unsigned short rtcpPortNum = rtpPortNum+1; unsigned char ttl = 5; // create RTSP server RTSPServer* rtspServer = createRTSPServer(*env, rtspPort, rtspOverHTTPPort, timeout); if (rtspServer == NULL) { LOG(ERROR) << "Failed to create RTSP server: " << env->getResultMsg(); } else { int nbSource = 0; std::list<std::string>::iterator devIt; for ( devIt=devList.begin() ; devIt!=devList.end() ; ++devIt) { std::string deviceName(*devIt); // Init capture LOG(NOTICE) << "Create V4L2 Source..." << deviceName; V4L2DeviceParameters param(deviceName.c_str(),format,width,height,fps, verbose); V4l2Capture* videoCapture = V4l2DeviceFactory::CreateVideoCapure(param, useMmap); if (videoCapture) { nbSource++; format = videoCapture->getFormat(); int outfd = -1; V4l2Output* out = NULL; if (!outputFile.empty()) { V4L2DeviceParameters outparam(outputFile.c_str(), videoCapture->getFormat(), videoCapture->getWidth(), videoCapture->getHeight(), 0,verbose); V4l2Output* out = V4l2DeviceFactory::CreateVideoOutput(outparam, useMmap); if (out != NULL) { outfd = out->getFd(); } } LOG(NOTICE) << "Start V4L2 Capture..." << deviceName; if (!videoCapture->captureStart()) { LOG(NOTICE) << "Cannot start V4L2 Capture for:" << deviceName; } V4L2DeviceSource* videoES = NULL; if (format == V4L2_PIX_FMT_H264) { videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread, repeatConfig); } else { videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread); } if (videoES == NULL) { LOG(FATAL) << "Unable to create source for device " << deviceName; delete videoCapture; } else { // extend buffer size if needed if (videoCapture->getBufferSize() > OutPacketBuffer::maxSize) { OutPacketBuffer::maxSize = videoCapture->getBufferSize(); } StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false); std::string baseUrl; if (devList.size() > 1) { baseUrl = basename(deviceName.c_str()); baseUrl.append("/"); } // Create Multicast Session if (multicast) { LOG(NOTICE) << "RTP address " << inet_ntoa(destinationAddress) << ":" << rtpPortNum; LOG(NOTICE) << "RTCP address " << inet_ntoa(destinationAddress) << ":" << rtcpPortNum; addSession(rtspServer, baseUrl+murl, MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, replicator,format)); // increment ports for next sessions rtpPortNum+=2; rtcpPortNum+=2; } // Create Unicast Session addSession(rtspServer, baseUrl+url, UnicastServerMediaSubsession::createNew(*env,replicator,format)); } if (out) { delete out; } } } if (nbSource>0) { // main loop signal(SIGINT,sighandler); env->taskScheduler().doEventLoop(&quit); LOG(NOTICE) << "Exiting...."; } Medium::close(rtspServer); } env->reclaim(); delete scheduler; return 0; }
int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server. Try first with the default port number (554), // and then with the alternative port number (8554): RTSPServer* rtspServer; portNumBits rtspServerPortNum = 554; //先使用554默认端口创建RTSP server rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); if (rtspServer == NULL) { //若使用554端口创建失败,则使用8554端口创建 Server rtspServerPortNum = 8554; rtspServer = DynamicRTSPServer::createNew(*env, rtspServerPortNum, authDB); } if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } *env << "LIVE555 Media Server\n"; *env << "\tversion " << MEDIA_SERVER_VERSION_STRING << " (LIVE555 Streaming Media library version " << LIVEMEDIA_LIBRARY_VERSION_STRING << ").\n"; char* urlPrefix = rtspServer->rtspURLPrefix(); *env << "Play streams from this server using the URL\n\t" << urlPrefix << "<filename>\nwhere <filename> is a file present in the current directory.\n"; *env << "Each file's type is inferred from its name suffix:\n"; *env << "\t\".264\" => a H.264 Video Elementary Stream file\n"; *env << "\t\".265\" => a H.265 Video Elementary Stream file\n"; *env << "\t\".aac\" => an AAC Audio (ADTS format) file\n"; *env << "\t\".ac3\" => an AC-3 Audio file\n"; *env << "\t\".amr\" => an AMR Audio file\n"; *env << "\t\".dv\" => a DV Video file\n"; *env << "\t\".m4e\" => a MPEG-4 Video Elementary Stream file\n"; *env << "\t\".mkv\" => a Matroska audio+video+(optional)subtitles file\n"; *env << "\t\".mp3\" => a MPEG-1 or 2 Audio file\n"; *env << "\t\".mpg\" => a MPEG-1 or 2 Program Stream (audio+video) file\n"; *env << "\t\".ogg\" or \".ogv\" or \".opus\" => an Ogg audio and/or video file\n"; *env << "\t\".ts\" => a MPEG Transport Stream file\n"; *env << "\t\t(a \".tsx\" index file - if present - provides server 'trick play' support)\n"; *env << "\t\".vob\" => a VOB (MPEG-2 video with AC-3 audio) file\n"; *env << "\t\".wav\" => a WAV Audio file\n"; *env << "\t\".webm\" => a WebM audio(Vorbis)+video(VP8) file\n"; *env << "See http://www.live555.com/mediaServer/ for additional documentation.\n"; // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling. // Try first with the default HTTP port (80), and then with the alternative HTTP // port numbers (8000 and 8080). if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) { *env << "(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling, or for HTTP live streaming (for indexed Transport Stream files only).)\n"; } else { *env << "(RTSP-over-HTTP tunneling is not available.)\n"; } env->taskScheduler().doEventLoop(); // does not return return 0; // only to prevent compiler warning }
SIPClient::SIPClient(UsageEnvironment& env, unsigned char desiredAudioRTPPayloadFormat, char const* mimeSubtype, int verbosityLevel, char const* applicationName) : Medium(env), fT1(500000 /* 500 ms */), fDesiredAudioRTPPayloadFormat(desiredAudioRTPPayloadFormat), fVerbosityLevel(verbosityLevel), fCSeq(0), fUserAgentHeaderStr(NULL), fUserAgentHeaderStrLen(0), fURL(NULL), fURLSize(0), fToTagStr(NULL), fToTagStrSize(0), fUserName(NULL), fUserNameSize(0), fInviteSDPDescription(NULL), fInviteSDPDescriptionReturned(NULL), fInviteCmd(NULL), fInviteCmdSize(0) { if (mimeSubtype == NULL) mimeSubtype = ""; fMIMESubtype = strDup(mimeSubtype); fMIMESubtypeSize = strlen(fMIMESubtype); if (applicationName == NULL) applicationName = ""; fApplicationName = strDup(applicationName); fApplicationNameSize = strlen(fApplicationName); struct in_addr ourAddress; ourAddress.s_addr = ourIPAddress(env); // hack fOurAddressStr = strDup(AddressString(ourAddress).val()); fOurAddressStrSize = strlen(fOurAddressStr); fOurSocket = new Groupsock(env, ourAddress, 0, 255); if (fOurSocket == NULL) { env << "ERROR: Failed to create socket for addr " << fOurAddressStr << ": " << env.getResultMsg() << "\n"; } // Now, find out our source port number. Hack: Do this by first trying to // send a 0-length packet, so that the "getSourcePort()" call will work. fOurSocket->output(envir(), (unsigned char*)"", 0); Port srcPort(0); getSourcePort(env, fOurSocket->socketNum(), srcPort); if (srcPort.num() != 0) { fOurPortNum = ntohs(srcPort.num()); } else { // No luck. Try again using a default port number: fOurPortNum = 5060; delete fOurSocket; fOurSocket = new Groupsock(env, ourAddress, fOurPortNum, 255); if (fOurSocket == NULL) { env << "ERROR: Failed to create socket for addr " << fOurAddressStr << ", port " << fOurPortNum << ": " << env.getResultMsg() << "\n"; } } // Set the "User-Agent:" header to use in each request: char const* const libName = "LIVE555 Streaming Media v"; char const* const libVersionStr = LIVEMEDIA_LIBRARY_VERSION_STRING; char const* libPrefix; char const* libSuffix; if (applicationName == NULL || applicationName[0] == '\0') { applicationName = libPrefix = libSuffix = ""; } else { libPrefix = " ("; libSuffix = ")"; } unsigned userAgentNameSize = fApplicationNameSize + strlen(libPrefix) + strlen(libName) + strlen(libVersionStr) + strlen(libSuffix) + 1; char* userAgentName = new char[userAgentNameSize]; sprintf(userAgentName, "%s%s%s%s%s", applicationName, libPrefix, libName, libVersionStr, libSuffix); setUserAgentString(userAgentName); delete[] userAgentName; reset(); }
int main(int argc, char** argv) { init_signals(); setpriority(PRIO_PROCESS, 0, 0); int IsSilence = 0; int svcEnable = 0; int cnt=0; int activePortCnt=0; if( GetSampleRate() == 16000 ) { audioOutputBitrate = 128000; audioSamplingFrequency = 16000; }else{ audioOutputBitrate = 64000; audioSamplingFrequency = 8000; } // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); int msg_type, video_type; APPROInput* MjpegInputDevice = NULL; APPROInput* H264InputDevice = NULL; APPROInput* Mpeg4InputDevice = NULL; static pid_t child[4] = { -1,-1,-1,-1 }; StreamingMode streamingMode = STREAMING_UNICAST; netAddressBits multicastAddress = 0;//our_inet_addr("224.1.4.6"); portNumBits videoRTPPortNum = 0; portNumBits audioRTPPortNum = 0; IsSilence = 0; svcEnable = 0; audioType = AUDIO_G711; streamingMode = STREAMING_UNICAST; for( cnt = 1; cnt < argc ;cnt++ ) { if( strcmp( argv[cnt],"-m" )== 0 ) { streamingMode = STREAMING_MULTICAST_SSM; } if( strcmp( argv[cnt],"-s" )== 0 ) { IsSilence = 1; } if( strcmp( argv[cnt],"-a" )== 0 ) { audioType = AUDIO_AAC; } if( strcmp( argv[cnt],"-v" )== 0 ) { svcEnable = 1; } } #if 0 printf("###########IsSilence = %d ################\n",IsSilence); printf("###########streamingMode = %d ################\n",streamingMode); printf("###########audioType = %d ################\n",audioType); printf("###########svcEnable = %d ################\n",svcEnable); #endif child[0] = fork(); if( child[0] != 0 ) { child[1] = fork(); } if( child[0] != 0 && child[1] != 0 ) { child[2] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 ) { child[3] = fork(); } if(svcEnable) { if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0) { child[4] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0) { child[5] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0) { child[6] = fork(); } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0) { child[7] = fork(); } } if( child[0] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE4; video_type = VIDEO_TYPE_H264_CIF; rtspServerPortNum = 8556; H264VideoBitrate = 12000000; videoRTPPortNum = 6012; audioRTPPortNum = 6014; } if( child[1] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE3; video_type = VIDEO_TYPE_MJPEG; rtspServerPortNum = 8555; MjpegVideoBitrate = 12000000; videoRTPPortNum = 6008; audioRTPPortNum = 6010; } if( child[2] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE; video_type = VIDEO_TYPE_MPEG4; rtspServerPortNum = 8553; Mpeg4VideoBitrate = 12000000; videoRTPPortNum = 6000; audioRTPPortNum = 6002; } if( child[3] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE2; video_type = VIDEO_TYPE_MPEG4_CIF; rtspServerPortNum = 8554; Mpeg4VideoBitrate = 12000000; videoRTPPortNum = 6004; audioRTPPortNum = 6006; } if(svcEnable) { if( child[4] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE5; video_type = VIDEO_TYPE_H264_SVC_30FPS; rtspServerPortNum = 8601; H264VideoBitrate = 12000000; videoRTPPortNum = 6016; audioRTPPortNum = 6018; } if( child[5] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE6; video_type = VIDEO_TYPE_H264_SVC_15FPS; rtspServerPortNum = 8602; H264VideoBitrate = 12000000; videoRTPPortNum = 6020; audioRTPPortNum = 6022; } if( child[6] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE7; video_type = VIDEO_TYPE_H264_SVC_7FPS; rtspServerPortNum = 8603; H264VideoBitrate = 12000000; videoRTPPortNum = 6024; audioRTPPortNum = 6026; } if( child[7] == 0 ) { /* parent, success */ msg_type = LIVE_MSG_TYPE8; video_type = VIDEO_TYPE_H264_SVC_3FPS; rtspServerPortNum = 8604; H264VideoBitrate = 12000000; videoRTPPortNum = 6028; audioRTPPortNum = 6030; } if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0 && child[4] != 0 && child[5] != 0 && child[6] != 0 && child[7] != 0) { /* parent, success */ msg_type = LIVE_MSG_TYPE9; video_type = VIDEO_TYPE_H264; rtspServerPortNum = 8557; H264VideoBitrate = 12000000; videoRTPPortNum = 6032; audioRTPPortNum = 6034; } } else { if( child[0] != 0 && child[1] != 0 && child[2] != 0 && child[3] != 0) { /* parent, success */ msg_type = LIVE_MSG_TYPE5; video_type = VIDEO_TYPE_H264; rtspServerPortNum = 8557; H264VideoBitrate = 12000000; videoRTPPortNum = 6032; audioRTPPortNum = 6034; } } videoType = video_type; // Objects used for multicast streaming: static Groupsock* rtpGroupsockAudio = NULL; static Groupsock* rtcpGroupsockAudio = NULL; static Groupsock* rtpGroupsockVideo = NULL; static Groupsock* rtcpGroupsockVideo = NULL; static FramedSource* sourceAudio = NULL; static RTPSink* sinkAudio = NULL; static RTCPInstance* rtcpAudio = NULL; static FramedSource* sourceVideo = NULL; static RTPSink* sinkVideo = NULL; static RTCPInstance* rtcpVideo = NULL; share_memory_init(msg_type); //init_signals(); *env << "Initializing...\n"; // Initialize the WIS input device: if( video_type == VIDEO_TYPE_MJPEG) { MjpegInputDevice = APPROInput::createNew(*env, VIDEO_TYPE_MJPEG); if (MjpegInputDevice == NULL) { err(*env) << "Failed to create MJPEG input device\n"; exit(1); } } if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS) { H264InputDevice = APPROInput::createNew(*env, video_type); if (H264InputDevice == NULL) { err(*env) << "Failed to create MJPEG input device\n"; exit(1); } } if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF ) { Mpeg4InputDevice = APPROInput::createNew(*env, video_type); if (Mpeg4InputDevice == NULL) { err(*env) << "Failed to create MPEG4 input device\n"; exit(1); } } // Create the RTSP server: RTSPServer* rtspServer = NULL; // Normal case: Streaming from a built-in RTSP server: rtspServer = RTSPServer::createNew(*env, rtspServerPortNum, NULL); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } *env << "...done initializing\n"; if( streamingMode == STREAMING_UNICAST ) { if( video_type == VIDEO_TYPE_MJPEG) { ServerMediaSession* sms = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sms->addSubsession(WISJPEGVideoServerMediaSubsession ::createNew(sms->envir(), *MjpegInputDevice, MjpegVideoBitrate)); if( IsSilence == 0) { sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *MjpegInputDevice)); } rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL:\n\t" << url << "\n"; delete[] url; } if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type ==VIDEO_TYPE_H264_SVC_3FPS) { ServerMediaSession* sms; sms = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sms->addSubsession(WISH264VideoServerMediaSubsession ::createNew(sms->envir(), *H264InputDevice, H264VideoBitrate)); if( IsSilence == 0) { sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *H264InputDevice)); } rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL:\n\t" << url << "\n"; delete[] url; } // Create a record describing the media to be streamed: if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF ) { ServerMediaSession* sms = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sms->addSubsession(WISMPEG4VideoServerMediaSubsession ::createNew(sms->envir(), *Mpeg4InputDevice, Mpeg4VideoBitrate)); if( IsSilence == 0) { sms->addSubsession(WISPCMAudioServerMediaSubsession::createNew(sms->envir(), *Mpeg4InputDevice)); } rtspServer->addServerMediaSession(sms); char *url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL:\n\t" << url << "\n"; delete[] url; } }else{ if (streamingMode == STREAMING_MULTICAST_SSM) { if (multicastAddress == 0) multicastAddress = chooseRandomIPv4SSMAddress(*env); } else if (multicastAddress != 0) { streamingMode = STREAMING_MULTICAST_ASM; } struct in_addr dest; dest.s_addr = multicastAddress; const unsigned char ttl = 255; // For RTCP: const unsigned maxCNAMElen = 100; unsigned char CNAME[maxCNAMElen + 1]; gethostname((char *) CNAME, maxCNAMElen); CNAME[maxCNAMElen] = '\0'; // just in case ServerMediaSession* sms=NULL; if( video_type == VIDEO_TYPE_MJPEG) { sms = ServerMediaSession::createNew(*env, MjpegStreamName, MjpegStreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sourceAudio = MjpegInputDevice->audioSource(); sourceVideo = WISJPEGStreamSource::createNew(MjpegInputDevice->videoSource()); // Create 'groupsocks' for RTP and RTCP: const Port rtpPortVideo(videoRTPPortNum); const Port rtcpPortVideo(videoRTPPortNum+1); rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl); rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockVideo->multicastSendOnly(); rtcpGroupsockVideo->multicastSendOnly(); } setVideoRTPSinkBufferSize(); sinkVideo = JPEGVideoRTPSink::createNew(*env, rtpGroupsockVideo); } if( video_type == VIDEO_TYPE_H264 || video_type == VIDEO_TYPE_H264_CIF || video_type == VIDEO_TYPE_H264_SVC_30FPS || video_type == VIDEO_TYPE_H264_SVC_15FPS || video_type == VIDEO_TYPE_H264_SVC_7FPS || video_type == VIDEO_TYPE_H264_SVC_3FPS) { sms = ServerMediaSession::createNew(*env, H264StreamName, H264StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sourceAudio = H264InputDevice->audioSource(); sourceVideo = H264VideoStreamFramer::createNew(*env, H264InputDevice->videoSource()); // Create 'groupsocks' for RTP and RTCP: const Port rtpPortVideo(videoRTPPortNum); const Port rtcpPortVideo(videoRTPPortNum+1); rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl); rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockVideo->multicastSendOnly(); rtcpGroupsockVideo->multicastSendOnly(); } setVideoRTPSinkBufferSize(); { char BuffStr[200]; extern int GetSprop(void *pBuff, char vType); GetSprop(BuffStr,video_type); sinkVideo = H264VideoRTPSink::createNew(*env, rtpGroupsockVideo,96, 0x64001F,BuffStr); } } // Create a record describing the media to be streamed: if( video_type == VIDEO_TYPE_MPEG4 || video_type == VIDEO_TYPE_MPEG4_CIF ) { sms = ServerMediaSession::createNew(*env, Mpeg4StreamName, Mpeg4StreamName, streamDescription,streamingMode == STREAMING_MULTICAST_SSM); sourceAudio = Mpeg4InputDevice->audioSource(); sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(*env, Mpeg4InputDevice->videoSource()); // Create 'groupsocks' for RTP and RTCP: const Port rtpPortVideo(videoRTPPortNum); const Port rtcpPortVideo(videoRTPPortNum+1); rtpGroupsockVideo = new Groupsock(*env, dest, rtpPortVideo, ttl); rtcpGroupsockVideo = new Groupsock(*env, dest, rtcpPortVideo, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockVideo->multicastSendOnly(); rtcpGroupsockVideo->multicastSendOnly(); } setVideoRTPSinkBufferSize(); sinkVideo = MPEG4ESVideoRTPSink::createNew(*env, rtpGroupsockVideo,97); } /* VIDEO Channel initial */ if(1) { // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthVideo = (Mpeg4VideoBitrate+500)/1000; // in kbps; for RTCP b/w share rtcpVideo = RTCPInstance::createNew(*env, rtcpGroupsockVideo, totalSessionBandwidthVideo, CNAME, sinkVideo, NULL /* we're a server */ , streamingMode == STREAMING_MULTICAST_SSM); // Note: This starts RTCP running automatically sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkVideo, rtcpVideo)); // Start streaming: sinkVideo->startPlaying(*sourceVideo, NULL, NULL); } /* AUDIO Channel initial */ if( IsSilence == 0) { // there's a separate RTP stream for audio // Create 'groupsocks' for RTP and RTCP: const Port rtpPortAudio(audioRTPPortNum); const Port rtcpPortAudio(audioRTPPortNum+1); rtpGroupsockAudio = new Groupsock(*env, dest, rtpPortAudio, ttl); rtcpGroupsockAudio = new Groupsock(*env, dest, rtcpPortAudio, ttl); if (streamingMode == STREAMING_MULTICAST_SSM) { rtpGroupsockAudio->multicastSendOnly(); rtcpGroupsockAudio->multicastSendOnly(); } if( audioSamplingFrequency == 16000 ) { if( audioType == AUDIO_G711) { sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "PCMU", 1); } else { char const* encoderConfigStr = "1408";// (2<<3)|(8>>1) = 0x14 ; ((8<<7)&0xFF)|(1<<3)=0x08 ; sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "AAC-hbr", encoderConfigStr, audioNumChannels); } } else{ if(audioType == AUDIO_G711) { sinkAudio = SimpleRTPSink::createNew(*env, rtpGroupsockAudio, 0, audioSamplingFrequency, "audio", "PCMU", 1); } else{ char const* encoderConfigStr = "1588";// (2<<3)|(11>>1) = 0x15 ; ((11<<7)&0xFF)|(1<<3)=0x88 ; sinkAudio = MPEG4GenericRTPSink::createNew(*env, rtpGroupsockAudio, 96, audioSamplingFrequency, "audio", "AAC-hbr", encoderConfigStr, audioNumChannels); } } // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share rtcpAudio = RTCPInstance::createNew(*env, rtcpGroupsockAudio, totalSessionBandwidthAudio, CNAME, sinkAudio, NULL /* we're a server */, streamingMode == STREAMING_MULTICAST_SSM); // Note: This starts RTCP running automatically sms->addSubsession(PassiveServerMediaSubsession::createNew(*sinkAudio, rtcpAudio)); // Start streaming: sinkAudio->startPlaying(*sourceAudio, NULL, NULL); } rtspServer->addServerMediaSession(sms); { struct in_addr dest; dest.s_addr = multicastAddress; char *url = rtspServer->rtspURL(sms); //char *url2 = inet_ntoa(dest); *env << "Mulicast Play this stream using the URL:\n\t" << url << "\n"; //*env << "2 Mulicast addr:\n\t" << url2 << "\n"; delete[] url; } } // Begin the LIVE555 event loop: env->taskScheduler().doEventLoop(&watchVariable); // does not return if( streamingMode!= STREAMING_UNICAST ) { Medium::close(rtcpAudio); Medium::close(sinkAudio); Medium::close(sourceAudio); delete rtpGroupsockAudio; delete rtcpGroupsockAudio; Medium::close(rtcpVideo); Medium::close(sinkVideo); Medium::close(sourceVideo); delete rtpGroupsockVideo; delete rtcpGroupsockVideo; } Medium::close(rtspServer); // will also reclaim "sms" and its "ServerMediaSubsession"s if( MjpegInputDevice != NULL ) { Medium::close(MjpegInputDevice); } if( H264InputDevice != NULL ) { Medium::close(H264InputDevice); } if( Mpeg4InputDevice != NULL ) { Medium::close(Mpeg4InputDevice); } env->reclaim(); delete scheduler; ApproInterfaceExit(); return 0; // only to prevent compiler warning }
void setupDarwinStreaming(UsageEnvironment& env, WISInput& inputDevice) { // Create a 'Darwin injector' object: injector = DarwinInjector::createNew(env, applicationName); // For RTCP: const unsigned maxCNAMElen = 100; unsigned char CNAME[maxCNAMElen + 1]; gethostname((char *) CNAME, maxCNAMElen); CNAME[maxCNAMElen] = '\0'; // just in case /******************audio***********************/ if (audioFormat != AFMT_NONE) { // Create the audio source: sourceAudio = createAudioSource(env, inputDevice.audioSource()); if (packageFormat != PFMT_TRANSPORT_STREAM) { // there's a separate RTP stream for audio // Create 'groupsocks' for RTP and RTCP. // (Note: Because we will actually be streaming through a remote Darwin server, // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.) struct in_addr dummyDestAddress; dummyDestAddress.s_addr = 0; rtpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0); rtcpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0); // Create a RTP sink for the audio stream: sinkAudio = createAudioRTPSink(env, rtpGroupsockAudio); // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share rtcpAudio = RTCPInstance::createNew(env, rtcpGroupsockAudio, totalSessionBandwidthAudio, CNAME, sinkAudio, NULL /* we're a server */); // Note: This starts RTCP running automatically // Add these to our 'Darwin injector': injector->addStream(sinkAudio, rtcpAudio); } } /******************end audio***********************/ /******************video***********************/ if (videoFormat != VFMT_NONE) { // Create the video source: if (packageFormat == PFMT_TRANSPORT_STREAM) { MPEG2TransportStreamFromESSource* tsSource = MPEG2TransportStreamFromESSource::createNew(env); tsSource->addNewVideoSource(inputDevice.videoSource(), 2); if (sourceAudio != NULL) tsSource->addNewAudioSource(sourceAudio, 2); // Gather the Transport packets into network packet-sized chunks: sourceVideo = MPEG2TransportStreamAccumulator::createNew(env, tsSource); sourceAudio = NULL; } else { switch (videoFormat) { case VFMT_NONE: // not used break; case VFMT_MJPEG: { sourceVideo = WISJPEGStreamSource::createNew(inputDevice.videoSource()); break; } case VFMT_MPEG1: case VFMT_MPEG2: { sourceVideo = MPEG1or2VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource()); break; } case VFMT_MPEG4: { sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource()); break; } } } // Create 'groupsocks' for RTP and RTCP. // (Note: Because we will actually be streaming through a remote Darwin server, // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.) struct in_addr dummyDestAddress; dummyDestAddress.s_addr = 0; rtpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0); rtcpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0); // Create a RTP sink for the video stream: unsigned char payloadFormatCode = 97; // if dynamic setVideoRTPSinkBufferSize(); if (packageFormat == PFMT_TRANSPORT_STREAM) { sinkVideo = SimpleRTPSink::createNew(env, rtpGroupsockVideo, 33, 90000, "video", "mp2t", 1, True, False/*no 'M' bit*/); } else { switch (videoFormat) { case VFMT_NONE: // not used break; case VFMT_MJPEG: { sinkVideo = JPEGVideoRTPSink::createNew(env, rtpGroupsockVideo); break; } case VFMT_MPEG1: case VFMT_MPEG2: { sinkVideo = MPEG1or2VideoRTPSink::createNew(env, rtpGroupsockVideo); break; } case VFMT_MPEG4: { sinkVideo = MPEG4ESVideoRTPSink::createNew(env, rtpGroupsockVideo, payloadFormatCode); break; } } } // Create (and start) a 'RTCP instance' for this RTP sink: unsigned totalSessionBandwidthVideo = (videoBitrate+500)/1000; // in kbps; for RTCP b/w share rtcpVideo = RTCPInstance::createNew(env, rtcpGroupsockVideo, totalSessionBandwidthVideo, CNAME, sinkVideo, NULL /* we're a server */); // Note: This starts RTCP running automatically // Add these to our 'Darwin injector': injector->addStream(sinkVideo, rtcpVideo); } /******************end video***********************/ // Next, specify the destination Darwin Streaming Server: char const* remoteStreamName = "test.sdp";//#####@@@@@ if (!injector->setDestination(remoteDSSNameOrAddress, remoteStreamName, applicationName, "LIVE555 Streaming Media")) { env << "Failed to connect to remote Darwin Streaming Server: " << env.getResultMsg() << "\n"; exit(1); } env << "Play this stream (from the Darwin Streaming Server) using the URL:\n" << "\trtsp://" << remoteDSSNameOrAddress << "/" << remoteStreamName << "\n"; }