int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Open the input file as a 'byte-stream file source':
  FramedSource* inputSource = ByteStreamFileSource::createNew(*env, inputFileName);
  if (inputSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
	 << "\" as a byte-stream file source\n";
    exit(1);
  }

  // Create a 'framer' filter for this file source, to generate presentation times for each NAL unit:
  H265VideoStreamFramer* framer = H265VideoStreamFramer::createNew(*env, inputSource, True/*includeStartCodeInOutput*/);

  // Then create a filter that packs the H.265 video data into a Transport Stream:
  MPEG2TransportStreamFromESSource* tsFrames = MPEG2TransportStreamFromESSource::createNew(*env);
  tsFrames->addNewVideoSource(framer, 6/*mpegVersion: H.265*/);
  
  // Open the output file as a 'file sink':
  MediaSink* outputSink = FileSink::createNew(*env, outputFileName);
  if (outputSink == NULL) {
    *env << "Unable to open file \"" << outputFileName << "\" as a file sink\n";
    exit(1);
  }

  // Finally, start playing:
  *env << "Beginning to read...\n";
  outputSink->startPlaying(*tsFrames, afterPlaying, NULL);

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}
void ClientTrickPlayState::updateStateOnScaleChange()
{
    fScale = fNextScale;

    // Change our source objects to reflect the change in scale:
    // First, close the existing trick play source (if any):
    if (fTrickPlaySource != NULL)
    {
        fTrickModeFilter->forgetInputSource();
        // so that the underlying Transport Stream source doesn't get deleted by:
        Medium::close(fTrickPlaySource);
        fTrickPlaySource = NULL;
        fTrickModeFilter = NULL;
    }
    if (fNextScale != 1.0f)
    {
        // Create a new trick play filter from the original Transport Stream source:
        UsageEnvironment &env = fIndexFile->envir(); // alias
        fTrickModeFilter = MPEG2TransportStreamTrickModeFilter
                           ::createNew(env, fOriginalTransportStreamSource, fIndexFile, int(fNextScale));
        fTrickModeFilter->seekTo(fTSRecordNum, fIxRecordNum);

        // And generate a Transport Stream from this:
        fTrickPlaySource = MPEG2TransportStreamFromESSource::createNew(env);
        fTrickPlaySource->addNewVideoSource(fTrickModeFilter, fIndexFile->mpegVersion());

        fFramer->changeInputSource(fTrickPlaySource);
    }
    else
    {
        // Switch back to the original Transport Stream source:
        reseekOriginalTransportStreamSource();
        fFramer->changeInputSource(fOriginalTransportStreamSource);
    }
}
int main(int argc, char const** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Parse the command line:
  programName = argv[0];
  if (argc != 5) usage();

  char const* inputFileName = argv[1];
  // Check whether the input file name ends with ".ts":
  int len = strlen(inputFileName);
  if (len < 4 || strcmp(&inputFileName[len-3], ".ts") != 0) {
    *env << "ERROR: input file name \"" << inputFileName
	 << "\" does not end with \".ts\"\n";
    usage();
  }

  // Parse the <start-time> and <scale> parameters:
  float startTime;
  if (sscanf(argv[2], "%f", &startTime) != 1 || startTime < 0.0f) usage();

  int scale;
  if (sscanf(argv[3], "%d", &scale) != 1 || scale == 0) usage();

  // Open the input file (as a 'byte stream file source'):
  FramedSource* input
    = ByteStreamFileSource::createNew(*env, inputFileName, TRANSPORT_PACKET_SIZE);
  if (input == NULL) {
    *env << "Failed to open input file \"" << inputFileName << "\" (does it exist?)\n";
    exit(1);
  }

  // Check whether the corresponding index file exists.
  // The index file name is the same as the input file name, except with suffix ".tsx":
  char* indexFileName = new char[len+2]; // allow for trailing x\0
  sprintf(indexFileName, "%sx", inputFileName);
  MPEG2TransportStreamIndexFile* indexFile
    = MPEG2TransportStreamIndexFile::createNew(*env, indexFileName);
  if (indexFile == NULL) {
    *env << "Failed to open index file \"" << indexFileName << "\" (does it exist?)\n";
    exit(1);
  }

  // Create a filter that generates trick mode data from the input and index files:
  MPEG2TransportStreamTrickModeFilter* trickModeFilter
    = MPEG2TransportStreamTrickModeFilter::createNew(*env, input, indexFile, scale);

  if (startTime > 0.0f) {
    // Seek the input Transport Stream and Index files to the specified start time:
    unsigned long tsRecordNumber, indexRecordNumber;
    indexFile->lookupTSPacketNumFromNPT(startTime, tsRecordNumber, indexRecordNumber);
    if (!trickModeFilter->seekTo(tsRecordNumber, indexRecordNumber)) { // TARFU!
      *env << "Failed to seek trick mode filter to ts #" << (unsigned)tsRecordNumber
	   << ", ix #" << (unsigned)indexRecordNumber
	   << "(for time " << startTime << ")\n";
      exit(1);
    }
  }

  // Generate a new Transport Stream from the Trick Mode filter:
  MPEG2TransportStreamFromESSource* newTransportStream
    = MPEG2TransportStreamFromESSource::createNew(*env);
  newTransportStream->addNewVideoSource(trickModeFilter, indexFile->mpegVersion());

  // Open the output file (for writing), as a 'file sink':
  char const* outputFileName = argv[4];
  MediaSink* output = FileSink::createNew(*env, outputFileName);
  if (output == NULL) {
    *env << "Failed to open output file \"" << outputFileName << "\"\n";
    exit(1);
  }

  // Start playing, to generate the output file:
  *env << "Writing output file \"" << outputFileName
       << "\" (start time " << startTime
       << ", scale " << scale
       << ")...";
  output->startPlaying(*newTransportStream, afterPlaying, NULL);

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}
Example #4
0
void setupDarwinStreaming(UsageEnvironment& env, WISInput& inputDevice) {
  // Create a 'Darwin injector' object:
  injector = DarwinInjector::createNew(env, applicationName);

  // For RTCP:
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen + 1];
  gethostname((char *) CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0';      // just in case

  /******************audio***********************/
  if (audioFormat != AFMT_NONE) {
    // Create the audio source:
    sourceAudio = createAudioSource(env, inputDevice.audioSource());

    if (packageFormat != PFMT_TRANSPORT_STREAM) { // there's a separate RTP stream for audio
      // Create 'groupsocks' for RTP and RTCP.
      // (Note: Because we will actually be streaming through a remote Darwin server,
      // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.)
      struct in_addr dummyDestAddress;
      dummyDestAddress.s_addr = 0;
      rtpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0);
      rtcpGroupsockAudio = new Groupsock(env, dummyDestAddress, 0, 0);
      
      // Create a RTP sink for the audio stream:
      sinkAudio = createAudioRTPSink(env, rtpGroupsockAudio);

      // Create (and start) a 'RTCP instance' for this RTP sink:
      unsigned totalSessionBandwidthAudio = (audioOutputBitrate+500)/1000; // in kbps; for RTCP b/w share
      rtcpAudio = RTCPInstance::createNew(env, rtcpGroupsockAudio,
					  totalSessionBandwidthAudio, CNAME,
					  sinkAudio, NULL /* we're a server */);
          // Note: This starts RTCP running automatically

      // Add these to our 'Darwin injector':
      injector->addStream(sinkAudio, rtcpAudio);
    }
  }
  /******************end audio***********************/

  /******************video***********************/
  if (videoFormat != VFMT_NONE) {
    // Create the video source:
    if (packageFormat == PFMT_TRANSPORT_STREAM) {
      MPEG2TransportStreamFromESSource* tsSource
	= MPEG2TransportStreamFromESSource::createNew(env);
      tsSource->addNewVideoSource(inputDevice.videoSource(), 2);
      if (sourceAudio != NULL) tsSource->addNewAudioSource(sourceAudio, 2);
      // Gather the Transport packets into network packet-sized chunks:
      sourceVideo = MPEG2TransportStreamAccumulator::createNew(env, tsSource);
      sourceAudio = NULL;
    } else {
      switch (videoFormat) {
      case VFMT_NONE: // not used
	break;
      case VFMT_MJPEG: {
	sourceVideo = WISJPEGStreamSource::createNew(inputDevice.videoSource());
	break;
      }
      case VFMT_MPEG1:
      case VFMT_MPEG2: {
	sourceVideo = MPEG1or2VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
	break;
      }
      case VFMT_MPEG4: {
	sourceVideo = MPEG4VideoStreamDiscreteFramer::createNew(env, inputDevice.videoSource());
	break;
      }
      }
    }

    // Create 'groupsocks' for RTP and RTCP.
    // (Note: Because we will actually be streaming through a remote Darwin server,
    // via TCP, we just use dummy destination addresses, port numbers, and TTLs here.)
    struct in_addr dummyDestAddress;
    dummyDestAddress.s_addr = 0;
    rtpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);
    rtcpGroupsockVideo = new Groupsock(env, dummyDestAddress, 0, 0);

    // Create a RTP sink for the video stream:
    unsigned char payloadFormatCode = 97; // if dynamic
    setVideoRTPSinkBufferSize();
    if (packageFormat == PFMT_TRANSPORT_STREAM) {
      sinkVideo = SimpleRTPSink::createNew(env, rtpGroupsockVideo,
					   33, 90000, "video", "mp2t",
					   1, True, False/*no 'M' bit*/);
    } else {
      switch (videoFormat) {
      case VFMT_NONE: // not used
	break;
      case VFMT_MJPEG: {
	sinkVideo = JPEGVideoRTPSink::createNew(env, rtpGroupsockVideo);
	break;
      }
      case VFMT_MPEG1:
      case VFMT_MPEG2: {
	sinkVideo = MPEG1or2VideoRTPSink::createNew(env, rtpGroupsockVideo);
	break;
      }
      case VFMT_MPEG4: {
	sinkVideo = MPEG4ESVideoRTPSink::createNew(env, rtpGroupsockVideo, payloadFormatCode);
	break;
      }
      }
    }

    // Create (and start) a 'RTCP instance' for this RTP sink:
    unsigned totalSessionBandwidthVideo = (videoBitrate+500)/1000; // in kbps; for RTCP b/w share
    rtcpVideo = RTCPInstance::createNew(env, rtcpGroupsockVideo,
					totalSessionBandwidthVideo, CNAME,
					sinkVideo, NULL /* we're a server */);
        // Note: This starts RTCP running automatically

    // Add these to our 'Darwin injector':
    injector->addStream(sinkVideo, rtcpVideo);
  }
  /******************end video***********************/

  // Next, specify the destination Darwin Streaming Server:
  char const* remoteStreamName = "test.sdp";//#####@@@@@
  if (!injector->setDestination(remoteDSSNameOrAddress, remoteStreamName,
                                applicationName, "LIVE555 Streaming Media")) {
    env << "Failed to connect to remote Darwin Streaming Server: " << env.getResultMsg() << "\n";
    exit(1);
  }

  env << "Play this stream (from the Darwin Streaming Server) using the URL:\n"
       << "\trtsp://" << remoteDSSNameOrAddress << "/" << remoteStreamName << "\n";

}