char const* OnDemandServerMediaSubsession::sdpLines() { FUN_IN(); if (fSDPLines == NULL) { // We need to construct a set of SDP lines that describe this // subsession (as a unicast stream). To do so, we first create // dummy (unused) source and "RTPSink" objects, // whose parameters we use for the SDP lines: unsigned estBitrate; FramedSource* inputSource = createNewStreamSource(0, estBitrate); if (inputSource == NULL) return NULL; // file not found struct in_addr dummyAddr; dummyAddr.s_addr = 0; Groupsock dummyGroupsock(envir(), dummyAddr, 0, 0); unsigned char rtpPayloadType = 96 + trackNumber()-1; // if dynamic RTPSink* dummyRTPSink = createNewRTPSink(&dummyGroupsock, rtpPayloadType, inputSource); if (dummyRTPSink != NULL && dummyRTPSink->estimatedBitrate() > 0) estBitrate = dummyRTPSink->estimatedBitrate(); setSDPLinesFromRTPSink(dummyRTPSink, inputSource, estBitrate); Medium::close(dummyRTPSink); closeStreamSource(inputSource); } FUN_OUT(); return fSDPLines; }
void OnDemandServerMediaSubsession::startStream(unsigned clientSessionId, void* streamToken, TaskFunc* rtcpRRHandler, void* rtcpRRHandlerClientData, unsigned short& rtpSeqNum, unsigned& rtpTimestamp, ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, void* serverRequestAlternativeByteHandlerClientData) { StreamState* streamState = (StreamState*)streamToken; /* :TODO:2014/9/12 13:59:55:Sean: */ printf("startStream\n"); /* :TODO:End--- */ Destinations* destinations = (Destinations*)(fDestinationsHashTable->Lookup((char const*)clientSessionId)); if (streamState != NULL) { streamState->startPlaying(destinations, rtcpRRHandler, rtcpRRHandlerClientData, serverRequestAlternativeByteHandler, serverRequestAlternativeByteHandlerClientData); RTPSink* rtpSink = streamState->rtpSink(); // alias if (rtpSink != NULL) { rtpSeqNum = rtpSink->currentSeqNo(); rtpTimestamp = rtpSink->presetNextTimestamp(); } } }
// ----------------------------------------- // ServerMediaSubsession for Multicast // ----------------------------------------- MulticastServerMediaSubsession* MulticastServerMediaSubsession::createNew(UsageEnvironment& env , struct in_addr destinationAddress , Port rtpPortNum, Port rtcpPortNum , int ttl , StreamReplicator* replicator , const std::string& format) { // Create a source FramedSource* source = replicator->createStreamReplica(); FramedSource* videoSource = createSource(env, source, format); // Create RTP/RTCP groupsock Groupsock* rtpGroupsock = new Groupsock(env, destinationAddress, rtpPortNum, ttl); Groupsock* rtcpGroupsock = new Groupsock(env, destinationAddress, rtcpPortNum, ttl); // Create a RTP sink RTPSink* videoSink = createSink(env, rtpGroupsock, 96, format, dynamic_cast<V4L2DeviceSource*>(replicator->inputSource())); // Create 'RTCP instance' const unsigned maxCNAMElen = 100; unsigned char CNAME[maxCNAMElen+1]; gethostname((char*)CNAME, maxCNAMElen); CNAME[maxCNAMElen] = '\0'; RTCPInstance* rtcpInstance = RTCPInstance::createNew(env, rtcpGroupsock, 500, CNAME, videoSink, NULL); // Start Playing the Sink videoSink->startPlaying(*videoSource, NULL, NULL); return new MulticastServerMediaSubsession(replicator, videoSink, rtcpInstance); }
void OnDemandServerMediaSubsession::nullSeekStream(unsigned /*clientSessionId*/, void* streamToken) { StreamState* streamState = (StreamState*)streamToken; if (streamState != NULL && streamState->mediaSource() != NULL) { // Because we're not seeking here, get the current NPT, and remember it as the new 'start' NPT: streamState->startNPT() = getCurrentNPT(streamToken); RTPSink* rtpSink = streamState->rtpSink(); // alias if (rtpSink != NULL) rtpSink->resetPresentationTimes(); } }
RTPTransmissionStats::RTPTransmissionStats(RTPSink& rtpSink, u_int32_t SSRC) : fOurRTPSink(rtpSink), fSSRC(SSRC), fLastPacketNumReceived(0), fPacketLossRatio(0), fTotNumPacketsLost(0), fJitter(0), fLastSRTime(0), fDiffSR_RRTime(0), fFirstPacket(True), fTotalOctetCount_hi(0), fTotalOctetCount_lo(0), fTotalPacketCount_hi(0), fTotalPacketCount_lo(0) { gettimeofday(&fTimeCreated, NULL); fLastOctetCount = rtpSink.octetCount(); fLastPacketCount = rtpSink.packetCount(); }
static void checkForAuxSDPLine(void* clientData) { RTPSink* sink = (RTPSink*)clientData; if (sink->auxSDPLine() != NULL) { // Signal the event loop that we're done: doneFlag = ~0; } else { // No luck yet. Try again, after a brief delay: int uSecsToDelay = 100000; // 100 ms env->taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)checkForAuxSDPLine, sink); } }
void StreamState ::startPlaying(Destinations* dests, TaskFunc* rtcpRRHandler, void* rtcpRRHandlerClientData) { if (dests == NULL) return; //skip play action if it's already being played if (!fAreCurrentlyPlaying && fMediaSource != NULL) { if (fRTPSink != NULL) { fRTPSink->startPlaying(*fMediaSource, afterPlayingStreamState, this); fAreCurrentlyPlaying = True; } else if (fUDPSink != NULL) { fUDPSink->startPlaying(*fMediaSource, afterPlayingStreamState, this); fAreCurrentlyPlaying = True; } createWorkingThread(); } if (fRTCPInstance == NULL && fRTPSink != NULL) { // Create (and start) a 'RTCP instance' for this RTP sink: fRTCPInstance // = RTCPInstance::createNew(fRTPSink->envir(), fRTCPgs, = RTCPInstance::createNew(fRtcpEnv, fRTCPgs, fTotalBW, (unsigned char*)fMaster.fCNAME, fRTPSink, NULL /* we're a server */); // Note: This starts RTCP running automatically } if (dests->isTCP) { // Change RTP and RTCP to use the TCP socket instead of UDP: if (fRTPSink != NULL) { fRTPSink->addStreamSocket(dests->tcpSocketNum, dests->rtpChannelId); } if (fRTCPInstance != NULL) { fRTCPInstance->addStreamSocket(dests->tcpSocketNum, dests->rtcpChannelId); fRTCPInstance->setSpecificRRHandler(dests->tcpSocketNum, dests->rtcpChannelId, rtcpRRHandler, rtcpRRHandlerClientData); } } else { // Tell the RTP and RTCP 'groupsocks' about this destination // (in case they don't already have it): // printf("addDestination %s\n", inet_ntoa(dests->addr)); //jay if (fRTPgs != NULL) fRTPgs->addDestination(dests->addr, dests->rtpPort); if (fRTCPgs != NULL) fRTCPgs->addDestination(dests->addr, dests->rtcpPort); if (fRTCPInstance != NULL) { if (!IsMulticastAddress(dests->addr.s_addr)) { fRTCPInstance->setSpecificRRHandler(dests->addr.s_addr, dests->rtcpPort, rtcpRRHandler, rtcpRRHandlerClientData); } } } }
float OnDemandServerMediaSubsession::getCurrentNPT(void* streamToken) { do { if (streamToken == NULL) break; StreamState* streamState = (StreamState*)streamToken; RTPSink* rtpSink = streamState->rtpSink(); if (rtpSink == NULL) break; return streamState->startNPT() + (rtpSink->mostRecentPresentationTime().tv_sec - rtpSink->initialPresentationTime().tv_sec) + (rtpSink->mostRecentPresentationTime().tv_sec - rtpSink->initialPresentationTime().tv_sec)/1000000.0f; } while (0); return 0.0; }
void StreamState ::startPlaying(Destinations* dests, TaskFunc* rtcpRRHandler, void* rtcpRRHandlerClientData, ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, void* serverRequestAlternativeByteHandlerClientData) { if (dests == NULL) return; if (fRTCPInstance == NULL && fRTPSink != NULL) { // Create (and start) a 'RTCP instance' for this RTP sink: fRTCPInstance = RTCPInstance::createNew(fRTPSink->envir(), fRTCPgs, fTotalBW, (unsigned char*)fMaster.fCNAME, fRTPSink, NULL /* we're a server */); // Note: This starts RTCP running automatically } if (dests->isTCP) { // Change RTP and RTCP to use the TCP socket instead of UDP: if (fRTPSink != NULL) { fRTPSink->addStreamSocket(dests->tcpSocketNum, dests->rtpChannelId); fRTPSink->setServerRequestAlternativeByteHandler(dests->tcpSocketNum, serverRequestAlternativeByteHandler, serverRequestAlternativeByteHandlerClientData); } if (fRTCPInstance != NULL) { fRTCPInstance->addStreamSocket(dests->tcpSocketNum, dests->rtcpChannelId); fRTCPInstance->setSpecificRRHandler(dests->tcpSocketNum, dests->rtcpChannelId, rtcpRRHandler, rtcpRRHandlerClientData); } } else { // Tell the RTP and RTCP 'groupsocks' about this destination // (in case they don't already have it): if (fRTPgs != NULL) fRTPgs->addDestination(dests->addr, dests->rtpPort); if (fRTCPgs != NULL) fRTCPgs->addDestination(dests->addr, dests->rtcpPort); if (fRTCPInstance != NULL) { fRTCPInstance->setSpecificRRHandler(dests->addr.s_addr, dests->rtcpPort, rtcpRRHandler, rtcpRRHandlerClientData); } } if (!fAreCurrentlyPlaying && fMediaSource != NULL) { if (fRTPSink != NULL) { fRTPSink->startPlaying(*fMediaSource, afterPlayingStreamState, this); fAreCurrentlyPlaying = True; } else if (fUDPSink != NULL) { fUDPSink->startPlaying(*fMediaSource, afterPlayingStreamState, this); fAreCurrentlyPlaying = True; } } }
void chk_sdp_done1 () { if (mp_sdp_line) { m_done = 1; } else if (mp_dump_sink && mp_dump_sink->auxSDPLine()) { mp_sdp_line = strdup(mp_dump_sink->auxSDPLine()); mp_dump_sink = 0; m_done = 1; } else { // try again nextTask() = envir().taskScheduler().scheduleDelayedTask(100000, // 100ms chk_sdp_done, this); } }
void OnDemandServerMediaSubsession::seekStream(unsigned /*clientSessionId*/, void* streamToken, double& seekNPT, double streamDuration, u_int64_t& numBytes) { numBytes = 0; // by default: unknown // Seeking isn't allowed if multiple clients are receiving data from the same source: if (fReuseFirstSource) return; StreamState* streamState = (StreamState*)streamToken; if (streamState != NULL && streamState->mediaSource() != NULL) { seekStreamSource(streamState->mediaSource(), seekNPT, streamDuration, numBytes); streamState->startNPT() = (float)seekNPT; RTPSink* rtpSink = streamState->rtpSink(); // alias if (rtpSink != NULL) rtpSink->resetPresentationTimes(); } }
void StreamState::endPlaying(Destinations* dests, char *videotype, char *audiotype) { Debug(ckite_log_message, "StreamState::endPlaying\n"); if (dests->isTCP) { if (fRTPSink != NULL) { if (strcmp(videotype, "store") == 0 || strcmp(audiotype, "store") == 0) { }else{ fRTPSink->removeStreamSocket(dests->tcpSocketNum, dests->rtpChannelId); } } if (fRTCPInstance != NULL) { if (strcmp(videotype, "store") == 0 || strcmp(audiotype, "store") == 0) { }else{ fRTCPInstance->removeStreamSocket(dests->tcpSocketNum, dests->rtcpChannelId); } fRTCPInstance->setSpecificRRHandler(dests->tcpSocketNum, dests->rtcpChannelId, NULL, NULL); } } else { // Tell the RTP and RTCP 'groupsocks' to stop using these destinations: if (fRTPgs != NULL) fRTPgs->removeDestination(dests->addr, dests->rtpPort); if (fRTCPgs != NULL) fRTCPgs->removeDestination(dests->addr, dests->rtcpPort); if (fRTCPInstance != NULL) { fRTCPInstance->setSpecificRRHandler(dests->addr.s_addr, dests->rtcpPort, NULL, NULL); } } }
void OnDemandServerMediaSubsession::nullSeekStream(unsigned /*clientSessionId*/, void* streamToken, double streamEndTime, u_int64_t& numBytes) { numBytes = 0; // by default: unknown StreamState* streamState = (StreamState*)streamToken; if (streamState != NULL && streamState->mediaSource() != NULL) { // Because we're not seeking here, get the current NPT, and remember it as the new 'start' NPT: streamState->startNPT() = getCurrentNPT(streamToken); double duration = streamEndTime - streamState->startNPT(); if (duration < 0.0) duration = 0.0; setStreamSourceDuration(streamState->mediaSource(), duration, numBytes); RTPSink* rtpSink = streamState->rtpSink(); // alias if (rtpSink != NULL) rtpSink->resetPresentationTimes(); } }
static void afterPlaying(void* clientData) { //*env << "...done reading from file\n"; PLAY_CLIENT_DATA_T *pclientData = (PLAY_CLIENT_DATA_T *)clientData; if(pclientData == NULL){ *env << "afterPlaying: pclientData == NULL"; return; } RTPSink *videoSink = pclientData->videoSink; H264VideoStreamFramer* videoSource = pclientData->videoSource; char inputFileName[64] = {0}; memcpy(inputFileName, pclientData->inputFileName, 64); videoSink->stopPlaying(); // Note that this also closes the input file that this source read from. Medium::close(videoSource); delete pclientData; // Start playing once again: play(videoSink, inputFileName); }
int main() { CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); MFStartup(MF_VERSION); TaskScheduler* scheduler = BasicTaskScheduler::createNew(); UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); in_addr dstAddr = { 127, 0, 0, 1 }; Groupsock rtpGroupsock(*env, dstAddr, 1233, 255); rtpGroupsock.addDestination(dstAddr, 1234, 0); RTPSink * rtpSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96); MediaFoundationH264LiveSource * mediaFoundationH264Source = MediaFoundationH264LiveSource::createNew(*env); rtpSink->startPlaying(*mediaFoundationH264Source, NULL, NULL); // This function call does not return. env->taskScheduler().doEventLoop(); return 0; }
virtual char const* getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) { if (mp_sdp_line) return mp_sdp_line; if (!mp_dump_sink) { mp_dump_sink = rtpSink; mp_dump_sink->startPlaying(*inputSource, afterPlayingDump, this); chk_sdp_done(this); } envir().taskScheduler().doEventLoop(&m_done); // blocking... return mp_sdp_line; }
void StreamState::endPlaying(Destinations* dests) { if (dests->isTCP) { if (fRTPSink != NULL) { fRTPSink->removeStreamSocket(dests->tcpSocketNum, dests->rtpChannelId); } if (fRTCPInstance != NULL) { fRTCPInstance->removeStreamSocket(dests->tcpSocketNum, dests->rtcpChannelId); fRTCPInstance->setSpecificRRHandler(dests->tcpSocketNum, dests->rtcpChannelId, NULL, NULL); } } else { // Tell the RTP and RTCP 'groupsocks' to stop using these destinations: if (fRTPgs != NULL) fRTPgs->removeDestination(dests->addr, dests->rtpPort); if (fRTCPgs != NULL) fRTCPgs->removeDestination(dests->addr, dests->rtcpPort); if (fRTCPInstance != NULL) { fRTCPInstance->setSpecificRRHandler(dests->addr.s_addr, dests->rtcpPort, NULL, NULL); } } }
void StreamState::pause() { if (fRTPSink != NULL) fRTPSink->stopPlaying(); if (fUDPSink != NULL) fUDPSink->stopPlaying(); fAreCurrentlyPlaying = False; }
PassiveServerMediaSubsession ::PassiveServerMediaSubsession(RTPSink& rtpSink, RTCPInstance* rtcpInstance) : ServerMediaSubsession(rtpSink.envir()), fSDPLines(NULL), fRTPSink(rtpSink), fRTCPInstance(rtcpInstance) { fClientRTCPSourceRecords = HashTable::create(ONE_WORD_HASH_KEYS); }
void OnDemandServerMediaSubsession ::getStreamParameters(unsigned clientSessionId, netAddressBits clientAddress, Port const& clientRTPPort, Port const& clientRTCPPort, int tcpSocketNum, unsigned char rtpChannelId, unsigned char rtcpChannelId, netAddressBits& destinationAddress, u_int8_t& /*destinationTTL*/, Boolean& isMulticast, Port& serverRTPPort, Port& serverRTCPPort, void*& streamToken) { if (destinationAddress == 0) { destinationAddress = clientAddress; /* :TODO:2014/9/12 13:17:28:Sean: added*/ printf("client requests unicast 0x%x\n", clientAddress); /* :TODO:End--- */ } /* :TODO:2014/9/12 13:17:28:Sean: added no multicastAddress*/ if (isMulticast) { // destinationAddress = multicastAddress; printf("client requests mulicast, port %d\n", fInitialPortNum); } /* :TODO:End--- */ struct in_addr destinationAddr; destinationAddr.s_addr = destinationAddress; isMulticast = False; if (fLastStreamToken != NULL && fReuseFirstSource) { // Special case: Rather than creating a new 'StreamState', // we reuse the one that we've already created: serverRTPPort = ((StreamState*)fLastStreamToken)->serverRTPPort(); serverRTCPPort = ((StreamState*)fLastStreamToken)->serverRTCPPort(); ++((StreamState*)fLastStreamToken)->referenceCount(); streamToken = fLastStreamToken; } else { // Normal case: Create a new media source: unsigned streamBitrate; FramedSource* mediaSource = createNewStreamSource(clientSessionId, streamBitrate); // Create 'groupsock' and 'sink' objects for the destination, // using previously unused server port numbers: RTPSink* rtpSink = NULL; BasicUDPSink* udpSink = NULL; Groupsock* rtpGroupsock = NULL; Groupsock* rtcpGroupsock = NULL; if (clientRTPPort.num() != 0 || tcpSocketNum >= 0) { // Normal case: Create destinations portNumBits serverPortNum; if (clientRTCPPort.num() == 0) { // We're streaming raw UDP (not RTP). Create a single groupsock: NoReuse dummy(envir()); // ensures that we skip over ports that are already in use for (serverPortNum = fInitialPortNum; ; ++serverPortNum) { struct in_addr dummyAddr; dummyAddr.s_addr = 0; serverRTPPort = serverPortNum; rtpGroupsock = new Groupsock(envir(), dummyAddr, serverRTPPort, 255); if (rtpGroupsock->socketNum() >= 0) break; // success } udpSink = BasicUDPSink::createNew(envir(), rtpGroupsock); } else { // Normal case: We're streaming RTP (over UDP or TCP). Create a pair of // groupsocks (RTP and RTCP), with adjacent port numbers (RTP port number even). // (If we're multiplexing RTCP and RTP over the same port number, it can be odd or even.) NoReuse dummy(envir()); // ensures that we skip over ports that are already in use for (portNumBits serverPortNum = fInitialPortNum; ; ++serverPortNum) { struct in_addr dummyAddr; dummyAddr.s_addr = 0; serverRTPPort = serverPortNum; rtpGroupsock = new Groupsock(envir(), dummyAddr, serverRTPPort, 255); if (rtpGroupsock->socketNum() < 0) { delete rtpGroupsock; continue; // try again } if (fMultiplexRTCPWithRTP) { // Use the RTP 'groupsock' object for RTCP as well: serverRTCPPort = serverRTPPort; rtcpGroupsock = rtpGroupsock; } else { // Create a separate 'groupsock' object (with the next (odd) port number) for RTCP: serverRTCPPort = ++serverPortNum; rtcpGroupsock = new Groupsock(envir(), dummyAddr, serverRTCPPort, 255); if (rtcpGroupsock->socketNum() < 0) { delete rtpGroupsock; delete rtcpGroupsock; continue; // try again } } break; // success } unsigned char rtpPayloadType = 96 + trackNumber()-1; // if dynamic rtpSink = createNewRTPSink(rtpGroupsock, rtpPayloadType, mediaSource); if (rtpSink != NULL && rtpSink->estimatedBitrate() > 0) streamBitrate = rtpSink->estimatedBitrate(); } // Turn off the destinations for each groupsock. They'll get set later // (unless TCP is used instead): if (rtpGroupsock != NULL) rtpGroupsock->removeAllDestinations(); if (rtcpGroupsock != NULL) rtcpGroupsock->removeAllDestinations(); if (rtpGroupsock != NULL) { // Try to use a big send buffer for RTP - at least 0.1 second of // specified bandwidth and at least 50 KB unsigned rtpBufSize = streamBitrate * 25 / 2; // 1 kbps * 0.1 s = 12.5 bytes if (rtpBufSize < 50 * 1024) rtpBufSize = 50 * 1024; increaseSendBufferTo(envir(), rtpGroupsock->socketNum(), rtpBufSize); } } // Set up the state of the stream. The stream will get started later: streamToken = fLastStreamToken = new StreamState(*this, serverRTPPort, serverRTCPPort, rtpSink, udpSink, streamBitrate, mediaSource, rtpGroupsock, rtcpGroupsock/*Sean added*/, fParentSession->envir()); } // Record these destinations as being for this client session id: Destinations* destinations; if (tcpSocketNum < 0) { // UDP destinations = new Destinations(destinationAddr, clientRTPPort, clientRTCPPort); } else { // TCP destinations = new Destinations(tcpSocketNum, rtpChannelId, rtcpChannelId); } fDestinationsHashTable->Add((char const*)clientSessionId, destinations); }
RTPSink* ProxyServerMediaSubsession ::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource) { if (verbosityLevel() > 0) { envir() << *this << "::createNewRTPSink()\n"; } // Create (and return) the appropriate "RTPSink" object for our codec: // (Note: The configuration string might not be correct if a transcoder is used. FIX!) ##### RTPSink* newSink; if (strcmp(fCodecName, "AC3") == 0 || strcmp(fCodecName, "EAC3") == 0) { newSink = AC3AudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency()); #if 0 // This code does not work; do *not* enable it: } else if (strcmp(fCodecName, "AMR") == 0 || strcmp(fCodecName, "AMR-WB") == 0) { Boolean isWideband = strcmp(fCodecName, "AMR-WB") == 0; newSink = AMRAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, isWideband, fClientMediaSubsession.numChannels()); #endif } else if (strcmp(fCodecName, "DV") == 0) { newSink = DVVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(fCodecName, "GSM") == 0) { newSink = GSMAudioRTPSink::createNew(envir(), rtpGroupsock); } else if (strcmp(fCodecName, "H263-1998") == 0 || strcmp(fCodecName, "H263-2000") == 0) { newSink = H263plusVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency()); } else if (strcmp(fCodecName, "H264") == 0) { newSink = H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.fmtp_spropparametersets()); } else if (strcmp(fCodecName, "H265") == 0) { newSink = H265VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.fmtp_spropvps(), fClientMediaSubsession.fmtp_spropsps(), fClientMediaSubsession.fmtp_sproppps()); } else if (strcmp(fCodecName, "JPEG") == 0) { newSink = SimpleRTPSink::createNew(envir(), rtpGroupsock, 26, 90000, "video", "JPEG", 1/*numChannels*/, False/*allowMultipleFramesPerPacket*/, False/*doNormalMBitRule*/); } else if (strcmp(fCodecName, "MP4A-LATM") == 0) { newSink = MPEG4LATMAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.fmtp_config(), fClientMediaSubsession.numChannels()); } else if (strcmp(fCodecName, "MP4V-ES") == 0) { newSink = MPEG4ESVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.attrVal_unsigned("profile-level-id"), fClientMediaSubsession.fmtp_config()); } else if (strcmp(fCodecName, "MPA") == 0) { newSink = MPEG1or2AudioRTPSink::createNew(envir(), rtpGroupsock); } else if (strcmp(fCodecName, "MPA-ROBUST") == 0) { newSink = MP3ADURTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(fCodecName, "MPEG4-GENERIC") == 0) { newSink = MPEG4GenericRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.mediumName(), fClientMediaSubsession.attrVal_strToLower("mode"), fClientMediaSubsession.fmtp_config(), fClientMediaSubsession.numChannels()); } else if (strcmp(fCodecName, "MPV") == 0) { newSink = MPEG1or2VideoRTPSink::createNew(envir(), rtpGroupsock); } else if (strcmp(fCodecName, "OPUS") == 0) { newSink = SimpleRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, 48000, "audio", "OPUS", 2, False/*only 1 Opus 'packet' in each RTP packet*/); } else if (strcmp(fCodecName, "T140") == 0) { newSink = T140TextRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(fCodecName, "THEORA") == 0) { newSink = TheoraVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.fmtp_config()); } else if (strcmp(fCodecName, "VORBIS") == 0) { newSink = VorbisAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.numChannels(), fClientMediaSubsession.fmtp_config()); } else if (strcmp(fCodecName, "VP8") == 0) { newSink = VP8VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(fCodecName, "VP9") == 0) { newSink = VP9VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(fCodecName, "AMR") == 0 || strcmp(fCodecName, "AMR-WB") == 0) { // Proxying of these codecs is currently *not* supported, because the data received by the "RTPSource" object is not in a // form that can be fed directly into a corresponding "RTPSink" object. if (verbosityLevel() > 0) { envir() << "\treturns NULL (because we currently don't support the proxying of \"" << fClientMediaSubsession.mediumName() << "/" << fCodecName << "\" streams)\n"; } return NULL; } else if (strcmp(fCodecName, "QCELP") == 0 || strcmp(fCodecName, "H261") == 0 || strcmp(fCodecName, "H263-1998") == 0 || strcmp(fCodecName, "H263-2000") == 0 || strcmp(fCodecName, "X-QT") == 0 || strcmp(fCodecName, "X-QUICKTIME") == 0) { // This codec requires a specialized RTP payload format; however, we don't yet have an appropriate "RTPSink" subclass for it: if (verbosityLevel() > 0) { envir() << "\treturns NULL (because we don't have a \"RTPSink\" subclass for this RTP payload format)\n"; } return NULL; } else { // This codec is assumed to have a simple RTP payload format that can be implemented just with a "SimpleRTPSink": Boolean allowMultipleFramesPerPacket = True; // by default Boolean doNormalMBitRule = True; // by default // Some codecs change the above default parameters: if (strcmp(fCodecName, "MP2T") == 0) { doNormalMBitRule = False; // no RTP 'M' bit } newSink = SimpleRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.mediumName(), fCodecName, fClientMediaSubsession.numChannels(), allowMultipleFramesPerPacket, doNormalMBitRule); } // Because our relayed frames' presentation times are inaccurate until the input frames have been RTCP-synchronized, // we temporarily disable RTCP "SR" reports for this "RTPSink" object: newSink->enableRTCPReports() = False; // Also tell our "PresentationTimeSubsessionNormalizer" object about the "RTPSink", so it can enable RTCP "SR" reports later: PresentationTimeSubsessionNormalizer* ssNormalizer; if (strcmp(fCodecName, "H264") == 0 || strcmp(fCodecName, "H265") == 0 || strcmp(fCodecName, "MP4V-ES") == 0 || strcmp(fCodecName, "MPV") == 0 || strcmp(fCodecName, "DV") == 0) { // There was a separate 'framer' object in front of the "PresentationTimeSubsessionNormalizer", so go back one object to get it: ssNormalizer = (PresentationTimeSubsessionNormalizer*)(((FramedFilter*)inputSource)->inputSource()); } else { ssNormalizer = (PresentationTimeSubsessionNormalizer*)inputSource; } ssNormalizer->setRTPSink(newSink); return newSink; }
RTPSink* ProxyServerMediaSubsession ::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource) { if (verbosityLevel() > 0) { envir() << *this << "::createNewRTPSink()\n"; } // Create (and return) the appropriate "RTPSink" object for our codec: RTPSink* newSink; char const* const codecName = fClientMediaSubsession.codecName(); if (strcmp(codecName, "AC3") == 0 || strcmp(codecName, "EAC3") == 0) { newSink = AC3AudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency()); } else if (strcmp(codecName, "AMR") == 0 || strcmp(codecName, "AMR-WB") == 0) { Boolean isWideband = strcmp(codecName, "AMR-WB") == 0; newSink = AMRAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, isWideband, fClientMediaSubsession.numChannels()); } else if (strcmp(codecName, "DV") == 0) { newSink = DVVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(codecName, "GSM") == 0) { newSink = GSMAudioRTPSink::createNew(envir(), rtpGroupsock); } else if (strcmp(codecName, "H263-1998") == 0 || strcmp(codecName, "H263-2000") == 0) { newSink = H263plusVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency()); } else if (strcmp(codecName, "H264") == 0) { newSink = H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.fmtp_spropparametersets()); } else if (strcmp(codecName, "MP4A-LATM") == 0) { newSink = MPEG4LATMAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.fmtp_config(), fClientMediaSubsession.numChannels()); } else if (strcmp(codecName, "MP4V-ES") == 0) { newSink = MPEG4ESVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.fmtp_profile_level_id(), fClientMediaSubsession.fmtp_config()); } else if (strcmp(codecName, "MPA") == 0) { newSink = MPEG1or2AudioRTPSink::createNew(envir(), rtpGroupsock); } else if (strcmp(codecName, "MPA-ROBUST") == 0) { newSink = MP3ADURTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(codecName, "MPEG4-GENERIC") == 0) { newSink = MPEG4GenericRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.mediumName(), fClientMediaSubsession.fmtp_mode(), fClientMediaSubsession.fmtp_config(), fClientMediaSubsession.numChannels()); } else if (strcmp(codecName, "MPV") == 0) { newSink = MPEG1or2VideoRTPSink::createNew(envir(), rtpGroupsock); } else if (strcmp(codecName, "T140") == 0) { newSink = T140TextRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(codecName, "VORBIS") == 0) { newSink = VorbisAudioRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.numChannels(), fClientMediaSubsession.fmtp_config()); } else if (strcmp(codecName, "VP8") == 0) { newSink = VP8VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); } else if (strcmp(codecName, "QCELP") == 0 || strcmp(codecName, "H261") == 0 || strcmp(codecName, "H263-1998") == 0 || strcmp(codecName, "H263-2000") == 0 || strcmp(codecName, "X-QT") == 0 || strcmp(codecName, "X-QUICKTIME") == 0) { // This codec requires a specialized RTP payload format; however, we don't yet have an appropriate "RTPSink" subclass for it: if (verbosityLevel() > 0) { envir() << "\treturns NULL (because we don't have a \"RTPSink\" subclass for this RTP payload format)\n"; } return NULL; } else { // This codec is assumed to have a simple RTP paylaod format that can be implemented just with a "SimpleRTPSink": Boolean allowMultipleFramesPerPacket = True; // by default Boolean doNormalMBitRule = True; // by default // Some codecs change the above default parameters: if (strcmp(codecName, "MP2T") == 0) { doNormalMBitRule = False; // no RTP 'M' bit } newSink = SimpleRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, fClientMediaSubsession.rtpTimestampFrequency(), fClientMediaSubsession.mediumName(), fClientMediaSubsession.codecName(), fClientMediaSubsession.numChannels(), allowMultipleFramesPerPacket, doNormalMBitRule); } // Because our relayed frames' presentation times are inaccurate until the input frames have been RTCP-synchronized, // we temporarily disable RTCP "SR" reports for this "RTPSink" object: newSink->enableRTCPReports() = False; // Also tell our "PresentationTimeSubsessionNormalizer" object about the "RTPSink", so it can enable RTCP "SR" reports later: PresentationTimeSubsessionNormalizer* ssNormalizer; if (strcmp(codecName, "H264") == 0 || strcmp(codecName, "MP4V-ES") == 0 || strcmp(codecName, "MPV") == 0 || strcmp(codecName, "DV") == 0) { // There was a separate 'framer' object in front of the "PresentationTimeSubsessionNormalizer", so go back one object to get it: ssNormalizer = (PresentationTimeSubsessionNormalizer*)(((FramedFilter*)inputSource)->inputSource()); } else { ssNormalizer = (PresentationTimeSubsessionNormalizer*)inputSource; } ssNormalizer->setRTPSink(newSink); return newSink; }
void StreamState::pause() { DEBUG_LOG(INF, "StreamState::pause"); if (fRTPSink != NULL) fRTPSink->stopPlaying(); if (fUDPSink != NULL) fUDPSink->stopPlaying(); fAreCurrentlyPlaying = False; }
PassiveServerMediaSubsession ::PassiveServerMediaSubsession(RTPSink& rtpSink, RTCPInstance* rtcpInstance) : ServerMediaSubsession(rtpSink.envir()), fRTPSink(rtpSink), fRTCPInstance(rtcpInstance), fSDPLines(NULL) { }