void MatroskaFileParser::seekToEndOfFile() {
  ByteStreamFileSource* fileSource = (ByteStreamFileSource*)fInputSource; // we know it's a "ByteStreamFileSource"
  if (fileSource != NULL) {
    fileSource->seekToEnd();
    resetStateAfterSeeking();
  }
}
void MPEG1or2DemuxedServerMediaSubsession
::seekStreamSource(FramedSource* inputSource, double& seekNPT, double /*streamDuration*/, u_int64_t& /*numBytes*/) {
  float const dur = duration();
  unsigned const size = fOurDemux.fileSize();
  unsigned absBytePosition = dur == 0.0 ? 0 : (unsigned)((seekNPT/dur)*size);

  // "inputSource" is a 'framer'
  // Flush its data, to account for the seek that we're about to do:
  if ((fStreamIdTag&0xF0) == 0xC0 /*MPEG audio*/) {
    MPEG1or2AudioStreamFramer* framer = (MPEG1or2AudioStreamFramer*)inputSource;
    framer->flushInput();
  } else if ((fStreamIdTag&0xF0) == 0xE0 /*video*/) {
    MPEG1or2VideoStreamFramer* framer = (MPEG1or2VideoStreamFramer*)inputSource;
    framer->flushInput();
  }

  // "inputSource" is a filter; its input source is the original elem stream source:
  MPEG1or2DemuxedElementaryStream* elemStreamSource
    = (MPEG1or2DemuxedElementaryStream*)(((FramedFilter*)inputSource)->inputSource());

  // Next, get the original source demux:
  MPEG1or2Demux& sourceDemux = elemStreamSource->sourceDemux();

  // and flush its input buffers:
  sourceDemux.flushInput();

  // Then, get the original input file stream from the source demux:
  ByteStreamFileSource* inputFileSource
    = (ByteStreamFileSource*)(sourceDemux.inputSource());
  // Note: We can make that cast, because we know that the demux was originally
  // created from a "ByteStreamFileSource".

  // Do the appropriate seek within the input file stream:
  inputFileSource->seekToByteAbsolute(absBytePosition);
}
FramedSource* MPEG2TransportFileServerMediaSubsession
::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) {
  // Create the video source:
  unsigned const inputDataChunkSize
    = TRANSPORT_PACKETS_PER_NETWORK_PACKET*TRANSPORT_PACKET_SIZE;
  ByteStreamFileSource* fileSource
    = ByteStreamFileSource::createNew(envir(), fFileName, inputDataChunkSize);
  if (fileSource == NULL) return NULL;
  fFileSize = fileSource->fileSize();

  // Use the file size and the duration to estimate the stream's bitrate:
  if (fFileSize > 0 && fDuration > 0.0) {
    estBitrate = (unsigned)((int64_t)fFileSize/(125*fDuration) + 0.5); // kbps, rounded
  } else {
    estBitrate = 5000; // kbps, estimate
  }


  // Create a framer for the Transport Stream:
  MPEG2TransportStreamFramer* framer
    = MPEG2TransportStreamFramer::createNew(envir(), fileSource);

  if (fIndexFile != NULL) { // we support 'trick play'
    // Keep state for this client (if we don't already have it):
    ClientTrickPlayState* client = lookupClient(clientSessionId);
    if (client == NULL) {
      client = newClientTrickPlayState();
      fClientSessionHashTable->Add((char const*)clientSessionId, client);
    }
    client->setSource(framer);
  }

  return framer;
}
void MatroskaFileParser::seekToFilePosition(u_int64_t offsetInFile) {
  ByteStreamFileSource* fileSource = (ByteStreamFileSource*)fInputSource; // we know it's a "ByteStreamFileSource"
  if (fileSource != NULL) {
    fileSource->seekToByteAbsolute(offsetInFile);
    resetStateAfterSeeking();
  }
}
void MPEG2TransportStreamTrickModeFilter::seekToTransportPacket(unsigned long tsPacketNum) {
  if (tsPacketNum == fNextTSPacketNum) return; // we're already there

  ByteStreamFileSource* tsFile = (ByteStreamFileSource*)fInputSource;
  u_int64_t tsPacketNum64 = (u_int64_t)tsPacketNum;
  tsFile->seekToByteAbsolute(tsPacketNum64*TRANSPORT_PACKET_SIZE);

  fNextTSPacketNum = tsPacketNum;
}
FramedSource* H265VideoFileServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
  estBitrate = 500; // kbps, estimate

  // Create the video source:
  ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName);
  if (fileSource == NULL) return NULL;
  fFileSize = fileSource->fileSize();

  // Create a framer for the Video Elementary Stream:
  return H265VideoStreamFramer::createNew(envir(), fileSource);
}
void DVVideoFileServerMediaSubsession::seekStreamSource(FramedSource* inputSource, double seekNPT) {
  // First, get the file source from "inputSource" (a framer):
  DVVideoStreamFramer* framer = (DVVideoStreamFramer*)inputSource;
  ByteStreamFileSource* fileSource = (ByteStreamFileSource*)(framer->inputSource());

  // Then figure out where to seek to within the file:
  if (fFileDuration > 0.0) {
    u_int64_t seekByteNumber = (u_int64_t)(((int64_t)fFileSize*seekNPT)/fFileDuration);
    fileSource->seekToByteAbsolute(seekByteNumber);
  }
}
void DVVideoFileServerMediaSubsession
::setStreamSourceDuration(FramedSource* inputSource, double streamDuration, u_int64_t& numBytes) {
  // First, get the file source from "inputSource" (a framer):
  DVVideoStreamFramer* framer = (DVVideoStreamFramer*)inputSource;
  ByteStreamFileSource* fileSource = (ByteStreamFileSource*)(framer->inputSource());

  // Then figure out how many bytes to limit the streaming to:
  if (fFileDuration > 0.0) {
    numBytes = (u_int64_t)(((int64_t)fFileSize*streamDuration)/fFileDuration);
    fileSource->seekToByteRelative(0, numBytes);
  }
}
FramedSource* MPEG1or2VideoFileServerMediaSubsession
::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
  estBitrate = 500; // kbps, estimate

  ByteStreamFileSource* fileSource
    = ByteStreamFileSource::createNew(envir(), fFileName);
  if (fileSource == NULL) return NULL;
  fFileSize = fileSource->fileSize();

  return MPEG1or2VideoStreamFramer
    ::createNew(envir(), fileSource, fIFramesOnly, fVSHPeriod);
}
static float MPEG1or2ProgramStreamFileDuration(UsageEnvironment& env,
					       char const* fileName,
					       unsigned& fileSize) {
  FramedSource* dataSource = NULL;
  float duration = 0.0; // until we learn otherwise
  fileSize = 0; // ditto

  do {
    // Open the input file as a 'byte-stream file source':
    ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(env, fileName);
    if (fileSource == NULL) break;
    dataSource = fileSource;

    fileSize = (unsigned)(fileSource->fileSize());
    if (fileSize == 0) break;

    // Create a MPEG demultiplexor that reads from that source.
    MPEG1or2Demux* baseDemux = MPEG1or2Demux::createNew(env, dataSource, True);
    if (baseDemux == NULL) break;

    // Create, from this, a source that returns raw PES packets:
    dataSource = baseDemux->newRawPESStream();

    // Read the first time code from the file:
    float firstTimeCode;
    if (!getMPEG1or2TimeCode(dataSource, *baseDemux, True, firstTimeCode)) break;

    // Then, read the last time code from the file.
    // (Before doing this, flush the demux's input buffers,
    //  and seek towards the end of the file, for efficiency.)
    baseDemux->flushInput();
    unsigned const startByteFromEnd = 100000;
    unsigned newFilePosition
      = fileSize < startByteFromEnd ? 0 : fileSize - startByteFromEnd;
    if (newFilePosition > 0) fileSource->seekToByteAbsolute(newFilePosition);

    float lastTimeCode;
    if (!getMPEG1or2TimeCode(dataSource, *baseDemux, False, lastTimeCode)) break;

    // Take the difference between these time codes as being the file duration:
    float timeCodeDiff = lastTimeCode - firstTimeCode;
    if (timeCodeDiff < 0) break;
    duration = timeCodeDiff;
  } while (0);

  Medium::close(dataSource);
  return duration;
}
FramedSource* DVVideoFileServerMediaSubsession
::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
  // Create the video source:
  ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName);
  if (fileSource == NULL) return NULL;
  fFileSize = fileSource->fileSize();

  // Create a framer for the Video Elementary Stream:
  DVVideoStreamFramer* framer = DVVideoStreamFramer::createNew(envir(), fileSource, True/*the file source is seekable*/);
  
  // Use the framer to figure out the file's duration:
  unsigned frameSize;
  double frameDuration;
  if (framer->getFrameParameters(frameSize, frameDuration)) {
    fFileDuration = (float)(((int64_t)fFileSize*frameDuration)/(frameSize*1000000.0));
    estBitrate = (unsigned)((8000.0*frameSize)/frameDuration); // in kbps
  } else {
    estBitrate = 50000; // kbps, estimate
  }

  return framer;
}
void ClientTrickPlayState::reseekOriginalTransportStreamSource()
{
    u_int64_t tsRecordNum64 = (u_int64_t)fTSRecordNum;
    fOriginalTransportStreamSource->seekToByteAbsolute(tsRecordNum64 * TRANSPORT_PACKET_SIZE);
}
FramedSource* H264VideoFileServerMediaSubsession
::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate)
{
    estBitrate = 500; // 500 kbps, estimate
    int streamId = -1;
    if (strncmp(fFileName, "live_stream1", 12) == 0)
    {
        streamId = 0;
    }
    else if (strncmp(fFileName, "live_stream2", 12) == 0)
    {
        streamId = 1;
    }
    else if (strncmp(fFileName, "live_stream3", 12) == 0)
    {
        streamId = 2;
    }
    else if (strncmp(fFileName, "live_stream4", 12) == 0)
    {
        streamId = 3;
    }
    else
    {
#if defined( USE_V3_3_CODE )
        // Create the video source:
        ByteStreamFileSource* fileSource
        = ByteStreamFileSource::createNew(envir(), fFileName);
        if (fileSource == NULL) return NULL;
        fFileSize = fileSource->fileSize();

        // Create a framer for the Video Elementary Stream:
        if (fEncType == IAV_ENCODE_H264)
        {
            return MyH264VideoStreamFramer::createNew(envir(), fileSource);
        }
        else if (fEncType == IAV_ENCODE_MJPEG)
        {
            return NULL; //not realized
        }
        else
        {
            return NULL;
        }
#else
        // Create the video source:
        ByteStreamFileSource* fileSource
        = ByteStreamFileSource::createNew(envir(), fFileName);
        if (fileSource == NULL) return NULL;
        fFileSize = fileSource->fileSize();

        return MyH264VideoStreamFramer::createNew(envir(), fileSource);
#endif
    }

#if defined( USE_V3_3_CODE )
    if (fEncType == IAV_ENCODE_H264)
    {
        return MyH264VideoStreamFramer::createNew(envir(), streamId);
    }
    else if (fEncType == IAV_ENCODE_MJPEG)
    {
        int jpegQuality = getJpegQ(streamId);
        if ( jpegQuality < 0)
        {
            return NULL;
        }
        return MyJPEGVideoSource::createNew(envir(), streamId, jpegQuality);
    }
    else
    {
        return NULL;
    }
#else
    return MyH264VideoStreamFramer::createNew( envir(), streamId, fIPCMediaDataDispatchServerPort, fIPCMediaDataDispatchClientPort );
#endif
}