// Note: We should change the following to use asynchronous file reading, ##### // as we now do with ByteStreamFileSource. ##### void AMRAudioFileSource::doGetNextFrame() { if (feof(fFid) || ferror(fFid)) { handleClosure(this); return; } // Begin by reading the 1-byte frame header (and checking it for validity) while (1) { if (fread(&fLastFrameHeader, 1, 1, fFid) < 1) { handleClosure(this); return; } if ((fLastFrameHeader&0x83) != 0) { #ifdef DEBUG fprintf(stderr, "Invalid frame header 0x%02x (padding bits (0x83) are not zero)\n", fLastFrameHeader); #endif } else { unsigned char ft = (fLastFrameHeader&0x78)>>3; fFrameSize = fIsWideband ? frameSizeWideband[ft] : frameSize[ft]; if (fFrameSize == FT_INVALID) { #ifdef DEBUG fprintf(stderr, "Invalid FT field %d (from frame header 0x%02x)\n", ft, fLastFrameHeader); #endif } else { // The frame header is OK #ifdef DEBUG fprintf(stderr, "Valid frame header 0x%02x -> ft %d -> frame size %d\n", fLastFrameHeader, ft, fFrameSize); #endif break; } } } // Next, read the frame-block into the buffer provided: fFrameSize *= fNumChannels; // because multiple channels make up a frame-block if (fFrameSize > fMaxSize) { fNumTruncatedBytes = fFrameSize - fMaxSize; fFrameSize = fMaxSize; } fFrameSize = fread(fTo, 1, fFrameSize, fFid); // Set the 'presentation time': if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous frame (20 ms) unsigned uSeconds = fPresentationTime.tv_usec + 20000; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } fDurationInMicroseconds = 20000; // each frame is 20 ms // Switch to another task, and inform the reader that he has data: nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); }
void MPEG2TransportStreamFramer::afterGettingFrame1(unsigned frameSize, struct timeval presentationTime) { fFrameSize += frameSize; unsigned const numTSPackets = fFrameSize/TRANSPORT_PACKET_SIZE; fNumTSPacketsToStream -= numTSPackets; fFrameSize = numTSPackets*TRANSPORT_PACKET_SIZE; // an integral # of TS packets if (fFrameSize == 0) { // We didn't read a complete TS packet; assume that the input source has closed. handleClosure(); return; } // Make sure the data begins with a sync byte: unsigned syncBytePosition; for (syncBytePosition = 0; syncBytePosition < fFrameSize; ++syncBytePosition) { if (fTo[syncBytePosition] == TRANSPORT_SYNC_BYTE) break; } if (syncBytePosition == fFrameSize) { envir() << "No Transport Stream sync byte in data."; handleClosure(); return; } else if (syncBytePosition > 0) { // There's a sync byte, but not at the start of the data. Move the good data // to the start of the buffer, then read more to fill it up again: memmove(fTo, &fTo[syncBytePosition], fFrameSize - syncBytePosition); fFrameSize -= syncBytePosition; fInputSource->getNextFrame(&fTo[fFrameSize], syncBytePosition, afterGettingFrame, this, FramedSource::handleClosure, this); return; } // else normal case: the data begins with a sync byte fPresentationTime = presentationTime; // Scan through the TS packets that we read, and update our estimate of // the duration of each packet: struct timeval tvNow; gettimeofday(&tvNow, NULL); double timeNow = tvNow.tv_sec + tvNow.tv_usec/1000000.0; for (unsigned i = 0; i < numTSPackets; ++i) { if (!updateTSPacketDurationEstimate(&fTo[i*TRANSPORT_PACKET_SIZE], timeNow)) { // We hit a preset limit (based on PCR) within the stream. Handle this as if the input source has closed: handleClosure(); return; } } fDurationInMicroseconds = numTSPackets * (unsigned)(fTSPacketDurationEstimate*1000000); // Complete the delivery to our client: afterGetting(this); }
void ByteStreamMultiFileSource::doGetNextFrame() { do { // First, check whether we've run out of sources: if (fCurrentlyReadSourceNumber >= fNumSources) break; fHaveStartedNewFile = False; ByteStreamFileSource*& source = fSourceArray[fCurrentlyReadSourceNumber]; if (source == NULL) { // The current source hasn't been created yet. Do this now: source = ByteStreamFileSource::createNew(envir(), fFileNameArray[fCurrentlyReadSourceNumber], fPreferredFrameSize, fPlayTimePerFrame); if (source == NULL) break; fHaveStartedNewFile = True; } // (Attempt to) read from the current source. source->getNextFrame(fTo, fMaxSize, afterGettingFrame, this, onSourceClosure, this); return; } while (0); // An error occurred; consider ourselves closed: handleClosure(); }
void ByteStreamFileSource::doReadFromFile() { // Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less) if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize) { fMaxSize = (unsigned)fNumBytesToStream; } if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) { fMaxSize = fPreferredFrameSize; } #ifdef READ_FROM_FILES_SYNCHRONOUSLY fFrameSize = fread(fTo, 1, fMaxSize, fFid); #else if (fFidIsSeekable) { fFrameSize = fread(fTo, 1, fMaxSize, fFid); } else { // For non-seekable files (e.g., pipes), call "read()" rather than "fread()", to ensure that the read doesn't block: fFrameSize = read(fileno(fFid), fTo, fMaxSize); } #endif if (fFrameSize == 0) { handleClosure(); return; } fNumBytesToStream -= fFrameSize; // Set the 'presentation time': if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) { if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous data: unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } // Remember the play time of this data: fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; fDurationInMicroseconds = fLastPlayTime; } else { // We don't know a specific play time duration for this data, // so just record the current time as being the 'presentation time': gettimeofday(&fPresentationTime, NULL); } // Inform the reader that he has data: #ifdef READ_FROM_FILES_SYNCHRONOUSLY // To avoid possible infinite recursion, we need to return to the event loop to do this: nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); #else // Because the file read was done from the event loop, we can call the // 'after getting' function directly, without risk of infinite recursion: FramedSource::afterGetting(this); #endif }
void ByteStreamLiveSource::doGetNextFrame() { if (fLimitNumBytesToStream && fNumBytesToStream == 0) { handleClosure(); return; } doReadFromBuffer(); }
void firewire_source::poll() { envir().taskScheduler().turnOffBackgroundReadHandling( raw1394_get_fd(handle_.get())); if (raw1394_loop_iterate(handle_.get()) < 0) handleClosure(this); else FramedSource::afterGetting(this); }
//从文件中读取fMaxSize个字节到fTo void ByteStreamFileSource::doReadFromFile() { // Try to read as many bytes as will fit in the buffer provided // (or "fPreferredFrameSize" if less) if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) { fMaxSize = fPreferredFrameSize; } DEBUG_LOG(INF, "Read file: start = %ld, size= %u; Write to %p", ftell(fFid), fMaxSize, fTo); fFrameSize = fread(fTo, 1, fMaxSize, fFid);//fread(buffer,size,count,fp); if (fFrameSize == 0) { handleClosure(this); return; } // 设置图像时间,Set the 'presentation time': if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) { if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // fPresentationTime为零,表示这是第一帧,This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous data: // 根据上次的fPresentationTime和上次的持续时间计算新的fPresentationTime unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } // Remember the play time of this data: fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; fDurationInMicroseconds = fLastPlayTime; } else { // We don't know a specific play time duration for this data, // so just record the current time as being the 'presentation time': gettimeofday(&fPresentationTime, NULL); } // Inform the reader that he has data: #ifdef READ_FROM_FILES_SYNCHRONOUSLY // To avoid possible infinite recursion, we need to return to the event loop to do this: nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); #else // Because the file read was done from the event loop, we can call the // 'after getting' function directly, without risk of infinite recursion: FramedSource::afterGetting(this); #endif }
void DefaultSource::doGetNextFrame() { auto const CODE = _reader->read(fTo, fMaxSize, &fFrameSize, &fPresentationTime); if (isFailure(CODE)) { handleClosure(); return; } // Inform the reader that he has data: // To avoid possible infinite recursion, we need to return to the event loop to do this: nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); }
void WindowsAudioInputDevice_common::onceAudioIsReady() { fFrameSize = readFromBuffers(fTo, fMaxSize, fPresentationTime); if (fFrameSize == 0) { // The source is no longer readable handleClosure(this); return; } fDurationInMicroseconds = 1000000/fSamplingFrequency; // Call our own 'after getting' function. Because we sometimes get here // after returning from a delay, we can call this directly, without risking // infinite recursion afterGetting(this); }
void GAVideoLiveSource ::doGetNextFrame() { // This function is called (by our 'downstream' object) when it asks for new data. // Note: If, for some reason, the source device stops being readable (e.g., it gets closed), then you do the following: if (0 /* the source stops being readable */ /*%%% TO BE WRITTEN %%%*/) { handleClosure(NULL); return; } // If a new frame of data is immediately available to be delivered, then do this now: if (encoder_pktqueue_size(this->channelId) > 0) { deliverFrame(); } // No new data is immediately available to be delivered. We don't do anything more here. // Instead, our event trigger must be called (e.g., from a separate thread) when new data becomes available. }
void ADUFromMP3Source::doGetNextFrame() { if (!fAreEnqueueingMP3Frame) { // Arrange to enqueue a new MP3 frame: fTotalDataSizeBeforePreviousRead = fSegments->totalDataSize(); fAreEnqueueingMP3Frame = True; fSegments->enqueueNewSegment(fInputSource, this); } else { // Deliver an ADU from a previously-read MP3 frame: fAreEnqueueingMP3Frame = False; if (!doGetNextFrame1()) { // An internal error occurred; act as if our source went away: handleClosure(); } } }
void ByteStreamMemoryBufferSource::doGetNextFrame() { if (fCurIndex >= fBufferSize || (fLimitNumBytesToStream && fNumBytesToStream == 0)) { handleClosure(); return; } // Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less) fFrameSize = fMaxSize; if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fFrameSize) { fFrameSize = (unsigned)fNumBytesToStream; } if (fPreferredFrameSize > 0 && fPreferredFrameSize < fFrameSize) { fFrameSize = fPreferredFrameSize; } if (fCurIndex + fFrameSize > fBufferSize) { fFrameSize = (unsigned)(fBufferSize - fCurIndex); } memmove(fTo, &fBuffer[fCurIndex], fFrameSize); fCurIndex += fFrameSize; fNumBytesToStream -= fFrameSize; // Set the 'presentation time': if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) { if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous data: unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } // Remember the play time of this data: fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; fDurationInMicroseconds = fLastPlayTime; } else { // We don't know a specific play time duration for this data, // so just record the current time as being the 'presentation time': gettimeofday(&fPresentationTime, NULL); } // Inform the downstream object that it has data: FramedSource::afterGetting(this); }
void MPEG2TransportStreamFramer::doGetNextFrame() { if (fLimitNumTSPacketsToStream) { if (fNumTSPacketsToStream == 0) { handleClosure(); return; } if (fNumTSPacketsToStream*TRANSPORT_PACKET_SIZE < fMaxSize) { fMaxSize = fNumTSPacketsToStream*TRANSPORT_PACKET_SIZE; } } // Read directly from our input source into our client's buffer: fFrameSize = 0; fInputSource->getNextFrame(fTo, fMaxSize, afterGettingFrame, this, FramedSource::handleClosure, this); }
void ByteStreamFileSource::doGetNextFrame() { if (feof(fFid) || ferror(fFid)) { handleClosure(this); return; } #ifdef READ_FROM_FILES_SYNCHRONOUSLY doReadFromFile(); #else if (!fHaveStartedReading) { // Await readable data from the file: envir().taskScheduler().turnOnBackgroundReadHandling(fileno(fFid), (TaskScheduler::BackgroundHandlerProc*)&fileReadableHandler, this); fHaveStartedReading = True; } #endif }
void MP3ADUTranscoder::afterGettingFrame1(unsigned numBytesRead, unsigned numTruncatedBytes, struct timeval presentationTime, unsigned durationInMicroseconds) { fNumTruncatedBytes = numTruncatedBytes; // but can we handle this being >0? ##### fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; fFrameSize = TranscodeMP3ADU(fOrigADU, numBytesRead, fOutBitrate, fTo, fMaxSize, fAvailableBytesForBackpointer); if (fFrameSize == 0) { // internal error - bad ADU data? handleClosure(); return; } // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. afterGetting(this); }
void WAVAudioFileSource::doGetNextFrame() { if (feof(fFid) || ferror(fFid) || (fLimitNumBytesToStream && fNumBytesToStream == 0)) { handleClosure(); return; } fFrameSize = 0; // until it's set later #ifdef READ_FROM_FILES_SYNCHRONOUSLY doReadFromFile(); #else if (!fHaveStartedReading) { // Await readable data from the file: envir().taskScheduler().turnOnBackgroundReadHandling(fileno(fFid), (TaskScheduler::BackgroundHandlerProc*)&fileReadableHandler, this); fHaveStartedReading = True; } #endif }
void WindowsAudioInputDevice_common::audioReadyPoller1() { if (readHead != NULL) { onceAudioIsReady(); } else { unsigned const maxPollingDelay = (100 + fGranularityInMS)*1000; if (fTotalPollingDelay > maxPollingDelay) { // We've waited too long for the audio device - assume it's down: handleClosure(this); return; } // Try again after a short delay: unsigned const uSecondsToDelay = fGranularityInMS*1000; fTotalPollingDelay += uSecondsToDelay; nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecondsToDelay, (TaskFunc*)audioReadyPoller, this); } }
void DeviceSource::doGetNextFrame() { // Arrange here for our "deliverFrame" member function to be called // when the next frame of data becomes available from the device. // This must be done in a non-blocking fashion - i.e., so that we // return immediately from this function even if no data is // currently available. // // If the device can be implemented as a readable socket, then one easy // way to do this is using a call to // envir().taskScheduler().turnOnBackgroundReadHandling( ... ) // (See examples of this call in the "liveMedia" directory.) // If, for some reason, the source device stops being readable // (e.g., it gets closed), then you do the following: if (0 /* the source stops being readable */) { handleClosure(this); return; } }
void ByteStreamFileSource::doGetNextFrame() { DEBUG_LOG(INF, "ByteStreamFileSource::doGetNextFrame"); if (feof(fFid) || ferror(fFid)) { handleClosure(this); return; } #ifdef READ_FROM_FILES_SYNCHRONOUSLY //win32下是同步读 doReadFromFile(); #else if (!fHaveStartedReading) { // Await readable data from the file: // fileno返回打开文件的文件描述符 envir().taskScheduler().turnOnBackgroundReadHandling(fileno(fFid), (TaskScheduler::BackgroundHandlerProc*)&fileReadableHandler, this); fHaveStartedReading = True; DEBUG_LOG(INF, "turnOnBackgroundReadHandling: handle=%d, func='fileReadableHandler'", fileno(fFid)); } #endif }
void MP3FileSource::doGetNextFrame() { if (!doGetNextFrame1()) { handleClosure(); return; } // Switch to another task: #if defined(__WIN32__) || defined(_WIN32) // HACK: liveCaster/lc uses an implementation of scheduleDelayedTask() // that performs very badly (chewing up lots of CPU time, apparently polling) // on Windows. Until this is fixed, we just call our "afterGetting()" // function directly. This avoids infinite recursion, as long as our sink // is discontinuous, which is the case for the RTP sink that liveCaster/lc // uses. ##### afterGetting(this); #else nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)afterGetting, this); #endif }
void firewire_source::doGetNextFrame() { if (!handle_.get() && !try_open()) { handleClosure(this); return; } fFrameSize = std::min<unsigned int>(overspill_.size(), fMaxSize); fNumTruncatedBytes = fFrameSize - overspill_.size(); if (fFrameSize) { memcpy(fTo, overspill_.data(), fFrameSize); overspill_.clear(); FramedSource::afterGetting(this); return; } envir().taskScheduler().turnOnBackgroundReadHandling( raw1394_get_fd(handle_.get()), raw_poll, this); }
void ZmqFramedSource::doGetNextFrame() { // if ((fLimitNumBytesToStream && fNumBytesToStream == 0)) { // handleClosure(this); // return; // } if(!subscriber->connected()){ envir() << "disconnected "<<" no "<<tt<<"\n"; handleClosure(this); return; } fFrameSize = fMaxSize; if(fCurIndex >= fBufferSize){ zmq::message_t msgevp; subscriber->recv(&msgevp); zmq::message_t msgData; subscriber->recv(&msgData); if(msgData.size() < totalBufferSize){ memcpy(fBuffer,msgData.data(),msgData.size()); fBufferSize = msgData.size(); fCurIndex = 0; }else{ envir() << "receive msg "<<(int)msgData.size()<< " is more than max buffer size "<<(int)totalBufferSize ; } } int leftBufferSize = fBufferSize - fCurIndex; if(leftBufferSize < fMaxSize){ fFrameSize = leftBufferSize; if(next){ envir() << "leftBufferSize "<< leftBufferSize << " fMaxSize "<<fMaxSize << " no "<<tt<<"\n"; next = false; } }else{ next =true; } envir() << "large leftBufferSize "<< leftBufferSize <<" fMaxSize "<<fMaxSize << " no "<<tt<<"\n"; memmove(fTo, &fBuffer[fCurIndex], fFrameSize); fCurIndex += fFrameSize; if (fFrameSize == 0) { envir() << "fFrameSize == 0 "<<fMaxSize ; handleClosure(this); return; } // Set the 'presentation time': // We don't know a specific play time duration for this data, // so just record the current time as being the 'presentation time': gettimeofday(&fPresentationTime, NULL); // nextTask() = envir().taskScheduler().scheduleDelayedTask(0,(TaskFunc*)FramedSource::afterGetting, this); // Inform the downstream object that it has data: FramedSource::afterGetting(this); }
// Note: We should change the following to use asynchronous file reading, ##### // as we now do with ByteStreamFileSource. ##### void WAVAudioFileSource::doGetNextFrame() { if (feof(fFid) || ferror(fFid) || (fLimitNumBytesToStream && fNumBytesToStream == 0)) { handleClosure(this); return; } // Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less) if (fLimitNumBytesToStream && fNumBytesToStream < fMaxSize) { fMaxSize = fNumBytesToStream; } if (fPreferredFrameSize < fMaxSize) { fMaxSize = fPreferredFrameSize; } unsigned bytesPerSample = (fNumChannels*fBitsPerSample)/8; if (bytesPerSample == 0) bytesPerSample = 1; // because we can't read less than a byte at a time unsigned bytesToRead = fMaxSize - fMaxSize%bytesPerSample; if (fScaleFactor == 1) { // Common case - read samples in bulk: fFrameSize = fread(fTo, 1, bytesToRead, fFid); fNumBytesToStream -= fFrameSize; } else { // We read every 'fScaleFactor'th sample: fFrameSize = 0; while (bytesToRead > 0) { size_t bytesRead = fread(fTo, 1, bytesPerSample, fFid); if (bytesRead <= 0) break; fTo += bytesRead; fFrameSize += bytesRead; fNumBytesToStream -= bytesRead; bytesToRead -= bytesRead; // Seek to the appropriate place for the next sample: fseek(fFid, (fScaleFactor-1)*bytesPerSample, SEEK_CUR); } } // Set the 'presentation time' and 'duration' of this frame: if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous data: unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } // Remember the play time of this data: fDurationInMicroseconds = fLastPlayTime = (unsigned)((fPlayTimePerSample*fFrameSize)/bytesPerSample); // Switch to another task, and inform the reader that he has data: #if defined(__WIN32__) || defined(_WIN32) // HACK: One of our applications that uses this source uses an // implementation of scheduleDelayedTask() that performs very badly // (chewing up lots of CPU time, apparently polling) on Windows. // Until this is fixed, we just call our "afterGetting()" function // directly. This avoids infinite recursion, as long as our sink // is discontinuous, which is the case for the RTP sink that // this application uses. ##### afterGetting(this); #else nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); #endif }
void TsStreamFileSource::doGetNextFrame() { //if (feof(fFid) || ferror(fFid)) { // handleClosure(this); // return; //} FileReader* reader = (FileReader*)fFid; // Try to read as many bytes as will fit in the buffer provided // (or "fPreferredFrameSize" if less) if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) { fMaxSize = fPreferredFrameSize; } long lReadBytes = 0; if (m_buffer.DequeFromBuffer(fTo,fMaxSize, &lReadBytes)!=S_OK) { if (reader->GetTimeshift()) { //Timeshifting is theoretically endless, so send NULL TS packets as there is not enough real data to send if (m_buffer.GetNullTsBuffer(fTo,fMaxSize, &lReadBytes)!=S_OK) { LogDebug("ts:GetNullTsBuffer() timeout, closing stream"); //See TSBuffer.cpp for timeout value handleClosure(this); return; } } else //It's a recording, so not endless - assume end-of-file { LogDebug("ts:eof reached, closing stream"); handleClosure(this); return; } } fFrameSize = lReadBytes; __int64 fileSize = reader->GetFileSize(); if (fileSize < 0) fileSize = 0; fFileSize = fileSize; // Set the 'presentation time': if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) { if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous data: unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } // Remember the play time of this data: fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; fDurationInMicroseconds = fLastPlayTime; } else { // We don't know a specific play time duration for this data, // so just record the current time as being the 'presentation time': gettimeofday(&fPresentationTime, NULL); } // Switch to another task, and inform the reader that he has data: nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); }
void WAVAudioFileSource::doReadFromFile() { // Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less) if (fLimitNumBytesToStream && fNumBytesToStream < fMaxSize) { fMaxSize = fNumBytesToStream; } if (fPreferredFrameSize < fMaxSize) { fMaxSize = fPreferredFrameSize; } unsigned bytesPerSample = (fNumChannels*fBitsPerSample)/8; if (bytesPerSample == 0) bytesPerSample = 1; // because we can't read less than a byte at a time // For 'trick play', read one sample at a time; otherwise (normal case) read samples in bulk: unsigned bytesToRead = fScaleFactor == 1 ? fMaxSize - fMaxSize%bytesPerSample : bytesPerSample; unsigned numBytesRead; while (1) { // loop for 'trick play' only #ifdef READ_FROM_FILES_SYNCHRONOUSLY numBytesRead = fread(fTo, 1, bytesToRead, fFid); #else if (fFidIsSeekable) { numBytesRead = fread(fTo, 1, bytesToRead, fFid); } else { // For non-seekable files (e.g., pipes), call "read()" rather than "fread()", to ensure that the read doesn't block: numBytesRead = read(fileno(fFid), fTo, bytesToRead); } #endif if (numBytesRead == 0) { handleClosure(); return; } fFrameSize += numBytesRead; fTo += numBytesRead; fMaxSize -= numBytesRead; fNumBytesToStream -= numBytesRead; // If we did an asynchronous read, and didn't read an integral number of samples, then we need to wait for another read: #ifndef READ_FROM_FILES_SYNCHRONOUSLY if (fFrameSize%bytesPerSample > 0) return; #endif // If we're doing 'trick play', then seek to the appropriate place for reading the next sample, // and keep reading until we fill the provided buffer: if (fScaleFactor != 1) { SeekFile64(fFid, (fScaleFactor-1)*bytesPerSample, SEEK_CUR); if (fMaxSize < bytesPerSample) break; } else { break; // from the loop (normal case) } } // Set the 'presentation time' and 'duration' of this frame: if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous data: unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } // Remember the play time of this data: fDurationInMicroseconds = fLastPlayTime = (unsigned)((fPlayTimePerSample*fFrameSize)/bytesPerSample); // Inform the reader that he has data: #ifdef READ_FROM_FILES_SYNCHRONOUSLY // To avoid possible infinite recursion, we need to return to the event loop to do this: nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); #else // Because the file read was done from the event loop, we can call the // 'after getting' function directly, without risk of infinite recursion: FramedSource::afterGetting(this); #endif }
void TsMPEG2TransportStreamFramer::afterGettingFrame1(unsigned frameSize, struct timeval presentationTime) { fFrameSize += frameSize; unsigned const numTSPackets = fFrameSize/TRANSPORT_PACKET_SIZE; unsigned const dataGoingToBeLost=fFrameSize % TRANSPORT_PACKET_SIZE; fFrameSize = numTSPackets*TRANSPORT_PACKET_SIZE; // an integral # of TS packets if (fFrameSize == 0) { // We didn't read a complete TS packet; assume that the input source has closed. handleClosure(this); return; } if (dataGoingToBeLost>0) { //need to handle a mid buffer } // Make sure the data begins with a sync byte: unsigned syncBytePosition; for (syncBytePosition = 0; syncBytePosition < fFrameSize; ++syncBytePosition) { if (fTo[syncBytePosition] == TRANSPORT_SYNC_BYTE) break; } if (syncBytePosition == fFrameSize) { envir() << "No Transport Stream sync byte in data."; handleClosure(this); return; } else if (syncBytePosition > 0) { // There's a sync byte, but not at the start of the data. Move the good data // to the start of the buffer, then read more to fill it up again: memmove(fTo, &fTo[syncBytePosition], frameSize - syncBytePosition); fFrameSize -= syncBytePosition-dataGoingToBeLost; fInputSource->getNextFrame(&fTo[fFrameSize], syncBytePosition, afterGettingFrame, this, FramedSource::handleClosure, this); return; } else if (dataGoingToBeLost>0)// there is a problem in the buffer somewhere { unsigned badPacket = 0; for (badPacket=0;badPacket<numTSPackets;badPacket++) { if (fTo[badPacket*TRANSPORT_PACKET_SIZE]!=TRANSPORT_SYNC_BYTE && badPacket*TRANSPORT_PACKET_SIZE<frameSize) break; } //we know it's the previous one... if (badPacket!=0) { for (syncBytePosition = 1; syncBytePosition < TRANSPORT_PACKET_SIZE; ++syncBytePosition) { if (fTo[badPacket*TRANSPORT_PACKET_SIZE-syncBytePosition] == TRANSPORT_SYNC_BYTE) break; } memmove(&fTo[(badPacket-1)*TRANSPORT_PACKET_SIZE], &fTo[badPacket*TRANSPORT_PACKET_SIZE-syncBytePosition], frameSize - (badPacket*TRANSPORT_PACKET_SIZE-syncBytePosition)); fFrameSize -= TRANSPORT_PACKET_SIZE-syncBytePosition-dataGoingToBeLost; fInputSource->getNextFrame(&fTo[fFrameSize], syncBytePosition, afterGettingFrame, this, FramedSource::handleClosure, this); return; } }// else normal case: the data begins with a sync byte fPresentationTime = presentationTime; // Scan through the TS packets that we read, and update our estimate of // the duration of each packet: struct timeval tvNow; gettimeofday(&tvNow, NULL); double timeNow = tvNow.tv_sec + tvNow.tv_usec/1000000.0; for (unsigned i = 0; i < numTSPackets; ++i) { updateTSPacketDurationEstimate(&fTo[i*TRANSPORT_PACKET_SIZE], timeNow); } fDurationInMicroseconds = numTSPackets * (unsigned)(fTSPacketDurationEstimate*1000000); // Complete the delivery to our client: afterGetting(this); }
void T140IdleFilter::onSourceClosure() { envir().taskScheduler().unscheduleDelayedTask(fIdleTimerTask); fIdleTimerTask = NULL; handleClosure(); }
void ByteStreamLiveSource::doReadFromBuffer() { #if 1 //printf("=== zyl ===, %s, %d\n",__FILE__, __LINE__); //初始化计数器 unsigned int readLen = 0; unsigned int syncBytePosition = 0; // 没用,之前确定最多读取字节数的变量 fMaxSize = fPreferredFrameSize; //printf("=== zyl ===, fLocalBuf.buf_read_counter = %d, fLocalBuf.buf_len = %d\n", // fLocalBuf.buf_read_counter, fLocalBuf.buf_len); //初始化Frame Size fFrameSize = 0; //如果剩余的字节数不够,先读取剩余的字节数 if((fLocalBuf.buf_len - fLocalBuf.buf_read_counter) < fPreferredFrameSize) { // fMaxSize = fLocalBuf.buf_len - fLocalBuf.buf_read_counter; // 确定要读取的字节数 readLen = fLocalBuf.buf_len - fLocalBuf.buf_read_counter; // 读取这些字节 memcpy(fTo, (fLocalBuf.buf_data + fLocalBuf.buf_read_counter), readLen); //fMaxSize += fLocalBuf.buf_len - fLocalBuf.buf_read_counter; // 已经读取字节数统计值 fLocalBuf.buf_read_counter += readLen; // 当前Frame Size fFrameSize += readLen; } // 如果已经读完一个buffer if(fLocalBuf.buf_read_counter == fLocalBuf.buf_len) { while(fPTsBuf->buf_writing !=0 ) { printf("=== zyl === waiting for buf_writing\n"); }; #if 0 for(i = 0; i < 188; i++) { printf("%02x, ", fPTsBuf->buf_data[i]); if ((i+1)%16 == 0) printf("\n"); } printf("\n"); #endif memcpy(fLocalBuf.buf_data, fPTsBuf->buf_data, fPTsBuf->buf_len); fLocalBuf.buf_read_counter = 0; fLocalBuf.buf_len = fPTsBuf->buf_len; } // 如果已经读取的字节数不够 if(fFrameSize < fPreferredFrameSize) { // 还需要读取这些字节的数据 readLen = fPreferredFrameSize - fFrameSize; // 读取这些字节,当然,起始地址需要改变一下 memcpy(fTo+fFrameSize, (fLocalBuf.buf_data + fLocalBuf.buf_read_counter), readLen); // 已经读取字节数统计值 fLocalBuf.buf_read_counter += readLen; // 当前Frame Size fFrameSize += readLen; } // 如果读到的buffer第一个字节不是0x47 while(TRANSPORT_SYNC_BYTE != fTo[syncBytePosition]) { syncBytePosition++; } if(0 != syncBytePosition) { printf("=== zyl === syncBytePosition !=0\n"); memmove(fTo, &fTo[syncBytePosition], fFrameSize - syncBytePosition); fFrameSize -= syncBytePosition; // 如果已经读取的字节数不够 if(fFrameSize < fPreferredFrameSize) { // 还需要读取这些字节的数据 readLen = fPreferredFrameSize - fFrameSize; // 读取这些字节,当然,起始地址需要改变一下 memcpy(fTo+fFrameSize, (fLocalBuf.buf_data + fLocalBuf.buf_read_counter), readLen); // 已经读取字节数统计值 fLocalBuf.buf_read_counter += readLen; // 当前Frame Size fFrameSize += readLen; } } //printf("=== zyl === ,fLocalBuf.buf_read_counter = %d, fLocalBuf.buf_len = %d\n", // fLocalBuf.buf_read_counter, fLocalBuf.buf_len); if (fFrameSize == 0) { handleClosure(); return; } //fNumBytesToStream -= fFrameSize; // Set the 'presentation time': if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) { if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) { // This is the first frame, so use the current time: gettimeofday(&fPresentationTime, NULL); } else { // Increment by the play time of the previous data: unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime; fPresentationTime.tv_sec += uSeconds/1000000; fPresentationTime.tv_usec = uSeconds%1000000; } // Remember the play time of this data: fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize; fDurationInMicroseconds = fLastPlayTime; } else { // We don't know a specific play time duration for this data, // so just record the current time as being the 'presentation time': gettimeofday(&fPresentationTime, NULL); } // Inform the reader that he has data: // Because the file read was done from the event loop, we can call the // 'after getting' function directly, without risk of infinite recursion: FramedSource::afterGetting(this); #endif }
void MPEG2TransportStreamTrickModeFilter::onSourceClosure1() { fIndexFile->stopReading(); handleClosure(); }