Пример #1
0
Boolean MediaSubsession::initiate(int useSpecialRTPoffset) {
  if (fReadSource != NULL) return True; // has already been initiated

  do {
    if (fCodecName == NULL) {
      env().setResultMsg("Codec is unspecified");
      break;
    }

    // Create RTP and RTCP 'Groupsocks' on which to receive incoming data.
    // (Groupsocks will work even for unicast addresses)
    struct in_addr tempAddr;
    tempAddr.s_addr = connectionEndpointAddress();
        // This could get changed later, as a result of a RTSP "SETUP"

    if (fClientPortNum != 0) {
      // The sockets' port numbers were specified for us.  Use these:
      fClientPortNum = fClientPortNum&~1; // even
      if (isSSM()) {
	fRTPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, fClientPortNum);
      } else {
	fRTPSocket = new Groupsock(env(), tempAddr, fClientPortNum, 255);
      }
      if (fRTPSocket == NULL) {
	env().setResultMsg("Failed to create RTP socket");
	break;
      }
      
      // Set our RTCP port to be the RTP port +1
      portNumBits const rtcpPortNum = fClientPortNum|1;
      if (isSSM()) {
	fRTCPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, rtcpPortNum);
      } else {
	fRTCPSocket = new Groupsock(env(), tempAddr, rtcpPortNum, 255);
      }
      if (fRTCPSocket == NULL) {
	char tmpBuf[100];
	sprintf(tmpBuf, "Failed to create RTCP socket (port %d)", rtcpPortNum);
	env().setResultMsg(tmpBuf);
	break;
      }
    } else {
      // Port numbers were not specified in advance, so we use ephemeral port numbers.
      // Create sockets until we get a port-number pair (even: RTP; even+1: RTCP).
      // We need to make sure that we don't keep trying to use the same bad port numbers over and over again.
      // so we store bad sockets in a table, and delete them all when we're done.
      HashTable* socketHashTable = HashTable::create(ONE_WORD_HASH_KEYS);
      if (socketHashTable == NULL) break;
      Boolean success = False;

      while (1) {
	// Create a new socket:
	if (isSSM()) {
	  fRTPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, 0);
	} else {
	  fRTPSocket = new Groupsock(env(), tempAddr, 0, 255);
	}
	if (fRTPSocket == NULL) {
	  env().setResultMsg("MediaSession::initiate(): unable to create RTP and RTCP sockets");
	  break;
	}

	// Get the client port number, and check whether it's even (for RTP):
	Port clientPort(0);
	if (!getSourcePort(env(), fRTPSocket->socketNum(), clientPort)) {
	  break;
	}
	fClientPortNum = ntohs(clientPort.num()); 
	if ((fClientPortNum&1) != 0) { // it's odd
	  // Record this socket in our table, and keep trying:
	  unsigned key = (unsigned)fClientPortNum;
	  socketHashTable->Add((char const*)key, fRTPSocket);
	  continue;
	}

	// Make sure we can use the next (i.e., odd) port number, for RTCP:
	portNumBits rtcpPortNum = fClientPortNum|1;
	if (isSSM()) {
	  fRTCPSocket = new Groupsock(env(), tempAddr, fSourceFilterAddr, rtcpPortNum);
	} else {
	  fRTCPSocket = new Groupsock(env(), tempAddr, rtcpPortNum, 255);
	}
	if (fRTCPSocket != NULL) {
	  // Success! Use these two sockets (and delete any others that we've created):
	  Groupsock* oldGS;
	  while ((oldGS = (Groupsock*)socketHashTable->RemoveNext()) != NULL) {
	    delete oldGS;
	  }
	  delete socketHashTable;
	  success = True;
	  break;
	} else {
	  // We couldn't create the RTCP socket (perhaps that port number's already in use elsewhere?).
	  // Record the first socket in our table, and keep trying:
	  unsigned key = (unsigned)fClientPortNum;
	  socketHashTable->Add((char const*)key, fRTPSocket);
	  continue;
	}
      }
      if (!success) break; // a fatal error occurred trying to create the RTP and RTCP sockets; we can't continue
    }

    // ASSERT: fRTPSocket != NULL && fRTCPSocket != NULL
    if (isSSM()) {
      // Special case for RTCP SSM: Send RTCP packets back to the source via unicast:
      fRTCPSocket->changeDestinationParameters(fSourceFilterAddr,0,~0);
    }

	//////////////////////////////////////////////////////////////////////////
	// 裁剪掉不需要的Source.

    // Check "fProtocolName"
    if (strcmp(fProtocolName, "UDP") == 0) {

#ifndef CUT_MIN_SIZE
      // A UDP-packetized stream (*not* a RTP stream)
      fReadSource = BasicUDPSource::createNew(env(), fRTPSocket);
      fRTPSource = NULL; // Note!

      if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream
	fReadSource = MPEG2TransportStreamFramer::createNew(env(), fReadSource);
	    // this sets "durationInMicroseconds" correctly, based on the PCR values
      }
#endif

    } else {
      // Check "fCodecName" against the set of codecs that we support,
      // and create our RTP source accordingly
      // (Later make this code more efficient, as this set grows #####)
      // (Also, add more fmts that can be implemented by SimpleRTPSource#####)
      Boolean createSimpleRTPSource = False;
      Boolean doNormalMBitRule = False; // used if "createSimpleRTPSource"

	  if (strcmp(fCodecName, "H264") == 0) {
		  fReadSource = fRTPSource
			  = H264VideoRTPSource::createNew(env(), fRTPSocket,
			  fRTPPayloadFormat,
			  fRTPTimestampFrequency);
	  }  
	  else if (strcmp(fCodecName, "MP4V-ES") == 0) 
	  { // MPEG-4 Elem Str vid
		  fReadSource = fRTPSource
			  = MPEG4ESVideoRTPSource::createNew(env(), fRTPSocket,
			  fRTPPayloadFormat,
			  fRTPTimestampFrequency);
	  }
#ifndef CUT_MIN_SIZE	  
	  else if (strcmp(fCodecName, "QCELP") == 0) { // QCELP audio
	fReadSource =
	  QCELPAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource,
					 fRTPPayloadFormat,
					 fRTPTimestampFrequency);
	// Note that fReadSource will differ from fRTPSource in this case
      } else if (strcmp(fCodecName, "AMR") == 0) { // AMR audio (narrowband)
	fReadSource =
	  AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource,
				       fRTPPayloadFormat, 0 /*isWideband*/,
				       fNumChannels, fOctetalign, fInterleaving,
				       fRobustsorting, fCRC);
	// Note that fReadSource will differ from fRTPSource in this case
      } else if (strcmp(fCodecName, "AMR-WB") == 0) { // AMR audio (wideband)
	fReadSource =
	  AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource,
				       fRTPPayloadFormat, 1 /*isWideband*/,
				       fNumChannels, fOctetalign, fInterleaving,
				       fRobustsorting, fCRC);
	// Note that fReadSource will differ from fRTPSource in this case
      } else if (strcmp(fCodecName, "MPA") == 0) { // MPEG-1 or 2 audio
	fReadSource = fRTPSource
	  = MPEG1or2AudioRTPSource::createNew(env(), fRTPSocket,
					      fRTPPayloadFormat,
					      fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "MPA-ROBUST") == 0) { // robust MP3 audio
	fRTPSource
	  = MP3ADURTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
				       fRTPTimestampFrequency);
	if (fRTPSource == NULL) break;

	// Add a filter that deinterleaves the ADUs after depacketizing them:
	MP3ADUdeinterleaver* deinterleaver
	  = MP3ADUdeinterleaver::createNew(env(), fRTPSource);
	if (deinterleaver == NULL) break;

	// Add another filter that converts these ADUs to MP3 frames:
	fReadSource = MP3FromADUSource::createNew(env(), deinterleaver);
      } else if (strcmp(fCodecName, "X-MP3-DRAFT-00") == 0) {
	// a non-standard variant of "MPA-ROBUST" used by RealNetworks
	// (one 'ADU'ized MP3 frame per packet; no headers)
	fRTPSource
	  = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
				       fRTPTimestampFrequency,
				       "audio/MPA-ROBUST" /*hack*/);
	if (fRTPSource == NULL) break;

	// Add a filter that converts these ADUs to MP3 frames:
	fReadSource = MP3FromADUSource::createNew(env(), fRTPSource,
						  False /*no ADU header*/);
      } else if (strcmp(fCodecName, "MP4A-LATM") == 0) { // MPEG-4 LATM audio
	fReadSource = fRTPSource
	  = MPEG4LATMAudioRTPSource::createNew(env(), fRTPSocket,
					       fRTPPayloadFormat,
					       fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "AC3") == 0) { // AC3 audio
	fReadSource = fRTPSource
	  = AC3AudioRTPSource::createNew(env(), fRTPSocket,
					 fRTPPayloadFormat,
					 fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "MPEG4-GENERIC") == 0) {
	fReadSource = fRTPSource
	  = MPEG4GenericRTPSource::createNew(env(), fRTPSocket,
					     fRTPPayloadFormat,
					     fRTPTimestampFrequency,
					     fMediumName, fMode,
					     fSizelength, fIndexlength,
					     fIndexdeltalength);
      } else if (strcmp(fCodecName, "MPV") == 0) { // MPEG-1 or 2 video
	fReadSource = fRTPSource
	  = MPEG1or2VideoRTPSource::createNew(env(), fRTPSocket,
					      fRTPPayloadFormat,
					      fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream
	fRTPSource = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
						fRTPTimestampFrequency, "video/MP2T",
						0, False);
	fReadSource = MPEG2TransportStreamFramer::createNew(env(), fRTPSource);
	    // this sets "durationInMicroseconds" correctly, based on the PCR values
      } else if (strcmp(fCodecName, "H261") == 0) { // H.261
	fReadSource = fRTPSource
	  = H261VideoRTPSource::createNew(env(), fRTPSocket,
					  fRTPPayloadFormat,
					  fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "H263-1998") == 0 ||
		 strcmp(fCodecName, "H263-2000") == 0) { // H.263+
	fReadSource = fRTPSource
	  = H263plusVideoRTPSource::createNew(env(), fRTPSocket,
					      fRTPPayloadFormat,
					      fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "JPEG") == 0) { // motion JPEG
	fReadSource = fRTPSource
	  = JPEGVideoRTPSource::createNew(env(), fRTPSocket,
					  fRTPPayloadFormat,
					  fRTPTimestampFrequency,
					  videoWidth(),
					  videoHeight());
      } else if (strcmp(fCodecName, "X-QT") == 0
		 || strcmp(fCodecName, "X-QUICKTIME") == 0) {
	// Generic QuickTime streams, as defined in
	// <http://developer.apple.com/quicktime/icefloe/dispatch026.html>
	char* mimeType
	  = new char[strlen(mediumName()) + strlen(codecName()) + 2] ;
	sprintf(mimeType, "%s/%s", mediumName(), codecName());
	fReadSource = fRTPSource
	  = QuickTimeGenericRTPSource::createNew(env(), fRTPSocket,
						 fRTPPayloadFormat,
						 fRTPTimestampFrequency,
						 mimeType);
	delete[] mimeType;
#ifdef SUPPORT_REAL_RTSP
      } else if (strcmp(fCodecName, "X-PN-REALAUDIO") == 0 ||
		 strcmp(fCodecName, "X-PN-MULTIRATE-REALAUDIO-LIVE") == 0 ||
		 strcmp(fCodecName, "X-PN-REALVIDEO") == 0 ||
		 strcmp(fCodecName, "X-PN-MULTIRATE-REALVIDEO-LIVE") == 0) {
	// A RealNetworks 'RDT' stream (*not* a RTP stream)
	fReadSource = RealRDTSource::createNew(env());
	fRTPSource = NULL; // Note!
	parentSession().isRealNetworksRDT = True;
#endif
      } 
#endif	// CUT_MIN_SIZE

	  else if (  strcmp(fCodecName, "PCMU") == 0 // PCM u-law audio
		   || strcmp(fCodecName, "GSM") == 0 // GSM audio
		   || strcmp(fCodecName, "PCMA") == 0 // PCM a-law audio
		   || strcmp(fCodecName, "L16") == 0 // 16-bit linear audio
		   || strcmp(fCodecName, "MP1S") == 0 // MPEG-1 System Stream
		   || strcmp(fCodecName, "MP2P") == 0 // MPEG-2 Program Stream
		   || strcmp(fCodecName, "L8") == 0 // 8-bit linear audio
		   || strcmp(fCodecName, "G726-16") == 0 // G.726, 16 kbps
		   || strcmp(fCodecName, "G726-24") == 0 // G.726, 24 kbps
		   || strcmp(fCodecName, "G726-32") == 0 // G.726, 32 kbps
		   || strcmp(fCodecName, "G726-40") == 0 // G.726, 40 kbps
		   || strcmp(fCodecName, "SPEEX") == 0 // SPEEX audio
		   ) {
	createSimpleRTPSource = True;
	useSpecialRTPoffset = 0;
      } else if (useSpecialRTPoffset >= 0) {
	// We don't know this RTP payload format, but try to receive
	// it using a 'SimpleRTPSource' with the specified header offset:
	createSimpleRTPSource = True;
      } else {
	env().setResultMsg("RTP payload format unknown or not supported");
	break;
      }

      if (createSimpleRTPSource) {
	char* mimeType
	  = new char[strlen(mediumName()) + strlen(codecName()) + 2] ;
	sprintf(mimeType, "%s/%s", mediumName(), codecName());
	fReadSource = fRTPSource
	  = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
				       fRTPTimestampFrequency, mimeType,
				       (unsigned)useSpecialRTPoffset,
				       doNormalMBitRule);
	delete[] mimeType;
      }
    }

    if (fReadSource == NULL) {
      env().setResultMsg("Failed to create read source");
      break;
    }

    // Finally, create our RTCP instance. (It starts running automatically)
    if (fRTPSource != NULL) {
      unsigned totSessionBandwidth = 500; // HACK - later get from SDP#####
      fRTCPInstance = RTCPInstance::createNew(env(), fRTCPSocket,
					      totSessionBandwidth,
					      (unsigned char const*)
					      fParent.CNAME(),
					      NULL /* we're a client */,
					      fRTPSource);
      if (fRTCPInstance == NULL) {
	env().setResultMsg("Failed to create RTCP instance");
	break;
      }
    }

    return True;
  } while (0);

  delete fRTPSocket; fRTPSocket = NULL;
  delete fRTCPSocket; fRTCPSocket = NULL;
  Medium::close(fRTCPInstance); fRTCPInstance = NULL;
  Medium::close(fReadSource); fReadSource = fRTPSource = NULL;
  fClientPortNum = 0;
  return False;
}
Пример #2
0
// IDemux
void CInputStream::UpdateStreams()
{
  DisposeStreams();

  INPUTSTREAM_IDS streamIDs;
  try
  {
    streamIDs = m_pStruct->GetStreamIds();
  }
  catch (std::exception &e)
  {
    DisposeStreams();
    CLog::Log(LOGERROR, "CInputStream::UpdateStreams - error GetStreamIds. Reason: %s", e.what());
    return;
  }

  if (streamIDs.m_streamCount > INPUTSTREAM_IDS::MAX_STREAM_COUNT)
  {
    DisposeStreams();
    return;
  }

  for (unsigned int i=0; i<streamIDs.m_streamCount; i++)
  {
    INPUTSTREAM_INFO stream;
    try
    {
      stream = m_pStruct->GetStream(streamIDs.m_streamIds[i]);
    }
    catch (std::exception &e)
    {
      DisposeStreams();
      CLog::Log(LOGERROR, "CInputStream::GetTotalTime - error GetStream. Reason: %s", e.what());
      return;
    }

    if (stream.m_streamType == INPUTSTREAM_INFO::TYPE_NONE)
      continue;

    std::string codecName(stream.m_codecName);
    StringUtils::ToLower(codecName);
    AVCodec *codec = avcodec_find_decoder_by_name(codecName.c_str());
    if (!codec)
      continue;

    CDemuxStream *demuxStream;

    if (stream.m_streamType == INPUTSTREAM_INFO::TYPE_AUDIO)
    {
      CDemuxStreamAudio *audioStream = new CDemuxStreamAudio();

      audioStream->iChannels = stream.m_Channels;
      audioStream->iSampleRate = stream.m_SampleRate;
      audioStream->iBlockAlign = stream.m_BlockAlign;
      audioStream->iBitRate = stream.m_BitRate;
      audioStream->iBitsPerSample = stream.m_BitsPerSample;
      demuxStream = audioStream;
    }
    else if (stream.m_streamType == INPUTSTREAM_INFO::TYPE_VIDEO)
    {
      CDemuxStreamVideo *videoStream = new CDemuxStreamVideo();

      videoStream->iFpsScale = stream.m_FpsScale;
      videoStream->iFpsRate = stream.m_FpsRate;
      videoStream->iWidth = stream.m_Width;
      videoStream->iHeight = stream.m_Height;
      videoStream->fAspect = stream.m_Aspect;
      videoStream->stereo_mode = "mono";
      demuxStream = videoStream;
    }
    else if (stream.m_streamType == INPUTSTREAM_INFO::TYPE_SUBTITLE)
    {
      // TODO needs identifier in INPUTSTREAM_INFO
      continue;
    }
    else
      continue;

    demuxStream->codec = codec->id;
    demuxStream->bandwidth = stream.m_Bandwidth;
    demuxStream->codecName = stream.m_codecInternalName;
    demuxStream->uniqueId = streamIDs.m_streamIds[i];
    demuxStream->language[0] = stream.m_language[0];
    demuxStream->language[1] = stream.m_language[1];
    demuxStream->language[2] = stream.m_language[2];
    demuxStream->language[3] = stream.m_language[3];

    if (stream.m_ExtraData && stream.m_ExtraSize)
    {
      demuxStream->ExtraData = new uint8_t[stream.m_ExtraSize];
      demuxStream->ExtraSize = stream.m_ExtraSize;
      for (unsigned int j=0; j<stream.m_ExtraSize; j++)
        demuxStream->ExtraData[j] = stream.m_ExtraData[j];
    }

    m_streams[demuxStream->uniqueId] = demuxStream;
  }
}
Пример #3
0
void PhVideoDecoder::open(QString fileName)
{
	close();
	PHDEBUG << fileName;

	_currentFrame = PHFRAMEMIN;

	if(avformat_open_input(&_formatContext, fileName.toStdString().c_str(), NULL, NULL) < 0) {
		emit openFailed();
		close();
		return;
	}

	PHDEBUG << "Retrieve stream information";
	if (avformat_find_stream_info(_formatContext, NULL) < 0) {
		emit openFailed();
		close();
		return; // Couldn't find stream information
	}

	// Disable dump for specs
	if(PhDebug::logMask() & 1)
		av_dump_format(_formatContext, 0, fileName.toStdString().c_str(), 0);

	// Find video stream :
	for(int i = 0; i < (int)_formatContext->nb_streams; i++) {
		AVMediaType streamType = _formatContext->streams[i]->codec->codec_type;
		PHDEBUG << i << ":" << streamType;
		switch(streamType) {
		case AVMEDIA_TYPE_VIDEO:
			// Some containers are advertised with several video streams.
			// For example, one is the main stream and the other one is just a cover picture (single frame).
			// Here we choose the one that has the largest number of frames.
			if (!_videoStream || _videoStream->nb_frames < _formatContext->streams[i]->nb_frames) {
				_videoStream = _formatContext->streams[i];
			}
			PHDEBUG << "\t=> video";
			break;
		case AVMEDIA_TYPE_AUDIO:
			if(_useAudio && (_audioStream == NULL))
				_audioStream = _formatContext->streams[i];
			PHDEBUG << "\t=> audio";
			break;
		default:
			PHDEBUG << "\t=> unknown";
			break;
		}
	}

	if(_videoStream == NULL) {
		emit openFailed();
		close();
		return;
	}

	// Looking for timecode type
	_tcType = PhTimeCode::computeTimeCodeType(this->framePerSecond());

	PHDEBUG << "size : " << _videoStream->codec->width << "x" << _videoStream->codec->height;
	AVCodec * videoCodec = avcodec_find_decoder(_videoStream->codec->codec_id);
	if(videoCodec == NULL) {
		PHDEBUG << "Unable to find the codec:" << _videoStream->codec->codec_id;
		emit openFailed();
		close();
		return;
	}

	if (avcodec_open2(_videoStream->codec, videoCodec, NULL) < 0) {
		PHDEBUG << "Unable to open the codec:" << _videoStream->codec;
		emit openFailed();
		close();
		return;
	}

	_videoFrame = av_frame_alloc();

	if(_audioStream) {
		AVCodec* audioCodec = avcodec_find_decoder(_audioStream->codec->codec_id);
		if(audioCodec) {
			if(avcodec_open2(_audioStream->codec, audioCodec, NULL) < 0) {
				PHDEBUG << "Unable to open audio codec.";
				_audioStream = NULL;
			}
			else {
				_audioFrame = av_frame_alloc();
				PHDEBUG << "Audio OK.";
			}
		}
		else {
			PHDEBUG << "Unable to find codec for audio.";
			_audioStream = NULL;
		}
	}

	_fileName = fileName;

	emit opened(_tcType, frameIn(), frameLength(), width(), height(), codecName());
}
Пример #4
0
Boolean MediaSubsession::createSourceObjects(int useSpecialRTPoffset) {
  do {
    // First, check "fProtocolName"
    if (strcmp(fProtocolName, "UDP") == 0) {
      // A UDP-packetized stream (*not* a RTP stream)
      fReadSource = BasicUDPSource::createNew(env(), fRTPSocket);
      fRTPSource = NULL; // Note!
      
      if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream
	fReadSource = MPEG2TransportStreamFramer::createNew(env(), fReadSource);
	// this sets "durationInMicroseconds" correctly, based on the PCR values
      }
    } else {
      // Check "fCodecName" against the set of codecs that we support,
      // and create our RTP source accordingly
      // (Later make this code more efficient, as this set grows #####)
      // (Also, add more fmts that can be implemented by SimpleRTPSource#####)
      Boolean createSimpleRTPSource = False; // by default; can be changed below
      Boolean doNormalMBitRule = False; // default behavior if "createSimpleRTPSource" is True
      if (strcmp(fCodecName, "QCELP") == 0) { // QCELP audio
	fReadSource =
	  QCELPAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource,
					 fRTPPayloadFormat,
					 fRTPTimestampFrequency);
	// Note that fReadSource will differ from fRTPSource in this case
      } else if (strcmp(fCodecName, "AMR") == 0) { // AMR audio (narrowband)
	fReadSource =
	  AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource,
				       fRTPPayloadFormat, 0 /*isWideband*/,
				       fNumChannels, fOctetalign, fInterleaving,
				       fRobustsorting, fCRC);
	// Note that fReadSource will differ from fRTPSource in this case
      } else if (strcmp(fCodecName, "AMR-WB") == 0) { // AMR audio (wideband)
	fReadSource =
	  AMRAudioRTPSource::createNew(env(), fRTPSocket, fRTPSource,
				       fRTPPayloadFormat, 1 /*isWideband*/,
				       fNumChannels, fOctetalign, fInterleaving,
				       fRobustsorting, fCRC);
	// Note that fReadSource will differ from fRTPSource in this case
      } else if (strcmp(fCodecName, "MPA") == 0) { // MPEG-1 or 2 audio
	fReadSource = fRTPSource
	  = MPEG1or2AudioRTPSource::createNew(env(), fRTPSocket,
					      fRTPPayloadFormat,
					      fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "MPA-ROBUST") == 0) { // robust MP3 audio
	fReadSource = fRTPSource
	  = MP3ADURTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
				       fRTPTimestampFrequency);
	if (fRTPSource == NULL) break;
	
	if (!fReceiveRawMP3ADUs) {
	  // Add a filter that deinterleaves the ADUs after depacketizing them:
	  MP3ADUdeinterleaver* deinterleaver
	    = MP3ADUdeinterleaver::createNew(env(), fRTPSource);
	  if (deinterleaver == NULL) break;
	
	  // Add another filter that converts these ADUs to MP3 frames:
	  fReadSource = MP3FromADUSource::createNew(env(), deinterleaver);
	}
      } else if (strcmp(fCodecName, "X-MP3-DRAFT-00") == 0) {
	// a non-standard variant of "MPA-ROBUST" used by RealNetworks
	// (one 'ADU'ized MP3 frame per packet; no headers)
	fRTPSource
	  = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
				       fRTPTimestampFrequency,
				       "audio/MPA-ROBUST" /*hack*/);
	if (fRTPSource == NULL) break;
	
	// Add a filter that converts these ADUs to MP3 frames:
	fReadSource = MP3FromADUSource::createNew(env(), fRTPSource,
						  False /*no ADU header*/);
      } else if (strcmp(fCodecName, "MP4A-LATM") == 0) { // MPEG-4 LATM audio
	fReadSource = fRTPSource
	  = MPEG4LATMAudioRTPSource::createNew(env(), fRTPSocket,
					       fRTPPayloadFormat,
					       fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "VORBIS") == 0) { // Vorbis audio
	fReadSource = fRTPSource
	  = VorbisAudioRTPSource::createNew(env(), fRTPSocket,
					    fRTPPayloadFormat,
					    fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "VP8") == 0) { // VP8 video
	fReadSource = fRTPSource
	  = VP8VideoRTPSource::createNew(env(), fRTPSocket,
					 fRTPPayloadFormat,
					 fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "AC3") == 0 || strcmp(fCodecName, "EAC3") == 0) { // AC3 audio
	fReadSource = fRTPSource
	  = AC3AudioRTPSource::createNew(env(), fRTPSocket,
					 fRTPPayloadFormat,
					 fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "MP4V-ES") == 0) { // MPEG-4 Elementary Stream video
	fReadSource = fRTPSource
	  = MPEG4ESVideoRTPSource::createNew(env(), fRTPSocket,
					     fRTPPayloadFormat,
					     fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "MPEG4-GENERIC") == 0) {
	fReadSource = fRTPSource
	  = MPEG4GenericRTPSource::createNew(env(), fRTPSocket,
					     fRTPPayloadFormat,
					     fRTPTimestampFrequency,
					     fMediumName, fMode,
					     fSizelength, fIndexlength,
					     fIndexdeltalength);
      } else if (strcmp(fCodecName, "MPV") == 0) { // MPEG-1 or 2 video
	fReadSource = fRTPSource
	  = MPEG1or2VideoRTPSource::createNew(env(), fRTPSocket,
					      fRTPPayloadFormat,
					      fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "MP2T") == 0) { // MPEG-2 Transport Stream
	fRTPSource = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
						fRTPTimestampFrequency, "video/MP2T",
						0, False);
	fReadSource = MPEG2TransportStreamFramer::createNew(env(), fRTPSource);
	// this sets "durationInMicroseconds" correctly, based on the PCR values
      } else if (strcmp(fCodecName, "H261") == 0) { // H.261
	fReadSource = fRTPSource
	  = H261VideoRTPSource::createNew(env(), fRTPSocket,
					  fRTPPayloadFormat,
					  fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "H263-1998") == 0 ||
		 strcmp(fCodecName, "H263-2000") == 0) { // H.263+
	fReadSource = fRTPSource
	  = H263plusVideoRTPSource::createNew(env(), fRTPSocket,
					      fRTPPayloadFormat,
					      fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "H264") == 0) {
	fReadSource = fRTPSource
	  = H264VideoRTPSource::createNew(env(), fRTPSocket,
					  fRTPPayloadFormat,
					  fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "DV") == 0) {
	fReadSource = fRTPSource
	  = DVVideoRTPSource::createNew(env(), fRTPSocket,
					fRTPPayloadFormat,
					fRTPTimestampFrequency);
      } else if (strcmp(fCodecName, "JPEG") == 0) { // motion JPEG
	fReadSource = fRTPSource
	  = JPEGVideoRTPSource::createNew(env(), fRTPSocket,
					  fRTPPayloadFormat,
					  fRTPTimestampFrequency,
					  videoWidth(),
					  videoHeight());
      } else if (strcmp(fCodecName, "X-QT") == 0
		 || strcmp(fCodecName, "X-QUICKTIME") == 0) {
	// Generic QuickTime streams, as defined in
	// <http://developer.apple.com/quicktime/icefloe/dispatch026.html>
	char* mimeType
	  = new char[strlen(mediumName()) + strlen(codecName()) + 2] ;
	sprintf(mimeType, "%s/%s", mediumName(), codecName());
	fReadSource = fRTPSource
	  = QuickTimeGenericRTPSource::createNew(env(), fRTPSocket,
						 fRTPPayloadFormat,
						 fRTPTimestampFrequency,
						 mimeType);
	delete[] mimeType;
      } else if (  strcmp(fCodecName, "PCMU") == 0 // PCM u-law audio
		   || strcmp(fCodecName, "GSM") == 0 // GSM audio
		   || strcmp(fCodecName, "DVI4") == 0 // DVI4 (IMA ADPCM) audio
		   || strcmp(fCodecName, "PCMA") == 0 // PCM a-law audio
		   || strcmp(fCodecName, "MP1S") == 0 // MPEG-1 System Stream
		   || strcmp(fCodecName, "MP2P") == 0 // MPEG-2 Program Stream
		   || strcmp(fCodecName, "L8") == 0 // 8-bit linear audio
		   || strcmp(fCodecName, "L16") == 0 // 16-bit linear audio
		   || strcmp(fCodecName, "L20") == 0 // 20-bit linear audio (RFC 3190)
		   || strcmp(fCodecName, "L24") == 0 // 24-bit linear audio (RFC 3190)
		   || strcmp(fCodecName, "G726-16") == 0 // G.726, 16 kbps
		   || strcmp(fCodecName, "G726-24") == 0 // G.726, 24 kbps
		   || strcmp(fCodecName, "G726-32") == 0 // G.726, 32 kbps
		   || strcmp(fCodecName, "G726-40") == 0 // G.726, 40 kbps
		   || strcmp(fCodecName, "SPEEX") == 0 // SPEEX audio
		   || strcmp(fCodecName, "T140") == 0 // T.140 text (RFC 4103)
		   || strcmp(fCodecName, "DAT12") == 0 // 12-bit nonlinear audio (RFC 3190)
		   ) {
	createSimpleRTPSource = True;
	useSpecialRTPoffset = 0;
      } else if (useSpecialRTPoffset >= 0) {
	// We don't know this RTP payload format, but try to receive
	// it using a 'SimpleRTPSource' with the specified header offset:
	createSimpleRTPSource = True;
      } else {
	env().setResultMsg("RTP payload format unknown or not supported");
	break;
      }
      
      if (createSimpleRTPSource) {
	char* mimeType
	  = new char[strlen(mediumName()) + strlen(codecName()) + 2] ;
	sprintf(mimeType, "%s/%s", mediumName(), codecName());
	fReadSource = fRTPSource
	  = SimpleRTPSource::createNew(env(), fRTPSocket, fRTPPayloadFormat,
				       fRTPTimestampFrequency, mimeType,
				       (unsigned)useSpecialRTPoffset,
				       doNormalMBitRule);
	delete[] mimeType;
      }
    }

    return True;
  } while (0);

  return False; // an error occurred
}
Пример #5
0
const QString& JISX0201CharCodec::name() const
{
    return codecName();
}