QWORD MP4RtpTrack::SeekNearestSyncFrame(QWORD time) { //Reset us Reset(); //Get time in track units MP4Duration when = time*timeScale/1000; //Get nearest sample sampleId = MP4GetSampleIdFromTime(mp4,hint,when,false); //Check if (sampleId == MP4_INVALID_SAMPLE_ID) //Nothing return MP4_INVALID_TIMESTAMP; //Find nearest sync while(sampleId>0) { //If it is a sync frame if (MP4GetSampleSync(mp4,hint,sampleId)>0) { //Get sample time when = MP4GetSampleTime(mp4,hint,sampleId); //And convert it to timescale return when*1000/timeScale; } //new one sampleId--; } //Nothing found go to init return MP4_INVALID_TIMESTAMP; }
MP4ReadStatus GetNextH264VideoSample(unsigned char **sample, unsigned int &sample_size, unsigned long long int &duration, bool &is_key_frame) { if (next_video_sample_idx > video_sample_number) { return MP4_READ_EOS; } unsigned int video_sample_offset = 0; if(MP4GetSampleSync(handle, video_track_id, next_video_sample_idx)) { /* * If current sample has key frame, we need to put SPS/PPS in front of key frame. */ if (pSeqHeaders && pSeqHeaderSize) { for(int i = 0; (pSeqHeaders[i] && pSeqHeaderSize[i]); i++) { (*(unsigned int *)(video_sample + video_sample_offset)) = htonl(1); video_sample_offset += 4; memcpy(video_sample + video_sample_offset, pSeqHeaders[i], pSeqHeaderSize[i]); video_sample_offset += pSeqHeaderSize[i]; } } if (pPictHeaders && pPictHeaderSize) { for(int i = 0; (pPictHeaders[i] && pPictHeaderSize[i]); i++) { (*(unsigned int *)(video_sample + video_sample_offset)) = htonl(1); video_sample_offset += 4; memcpy(video_sample + video_sample_offset, pPictHeaders[i], pPictHeaderSize[i]); video_sample_offset += pPictHeaderSize[i]; } } } MP4Duration mp4_duration = 0; unsigned char *video_sample_start_addr = video_sample + video_sample_offset; sample_size = video_sample_max_size - video_sample_offset; if (!MP4ReadSample(handle, video_track_id, next_video_sample_idx, &video_sample_start_addr, &sample_size, NULL, &mp4_duration, NULL, &is_key_frame)) { printf("Fail to read video sample (%d)\n", next_video_sample_idx); return MP4_READ_ERR; } // Convert AVC1 format to AnnexB if (sample_size >= 4) { unsigned int *p = (unsigned int *) video_sample_start_addr; *p = htonl(1); } *sample = video_sample; sample_size += video_sample_offset; duration = (1000 * mp4_duration) / time_scale; next_video_sample_idx++; return MP4_READ_OK; }
QWORD MP4RtpTrack::Read(Listener *listener) { int last = 0; uint8_t* data; bool isSyncSample; // If it's first packet of a frame if (!numHintSamples) { // Get number of rtp packets for this sample if (!MP4ReadRtpHint(mp4, hint, sampleId, &numHintSamples)) { //Print error Error("Error reading hintt"); //Exit return MP4_INVALID_TIMESTAMP; } // Get number of samples for this sample frameSamples = MP4GetSampleDuration(mp4, hint, sampleId); // Get size of sample frameSize = MP4GetSampleSize(mp4, hint, sampleId); // Get sample timestamp frameTime = MP4GetSampleTime(mp4, hint, sampleId); //Convert to miliseconds frameTime = MP4ConvertFromTrackTimestamp(mp4, hint, frameTime, 1000); // Check if it is H264 and it is a Sync frame if (codec==VideoCodec::H264 && MP4GetSampleSync(mp4,track,sampleId)) // Send SEI info SendH263SEI(listener); //Get max data lenght BYTE *data = NULL; DWORD dataLen = 0; MP4Timestamp startTime; MP4Duration duration; MP4Duration renderingOffset; //Get values data = frame->GetData(); dataLen = frame->GetMaxMediaLength(); // Read next rtp packet if (!MP4ReadSample( mp4, // MP4FileHandle hFile track, // MP4TrackId hintTrackId sampleId, // MP4SampleId sampleId, (u_int8_t **) &data, // u_int8_t** ppBytes (u_int32_t *) &dataLen, // u_int32_t* pNumBytes &startTime, // MP4Timestamp* pStartTime &duration, // MP4Duration* pDuration &renderingOffset, // MP4Duration* pRenderingOffset &isSyncSample // bool* pIsSyncSample )) { Error("Error reading sample"); //Last return MP4_INVALID_TIMESTAMP; } //Check type if (media == MediaFrame::Video) { //Get video frame VideoFrame *video = (VideoFrame*)frame; //Set lenght video->SetLength(dataLen); //Timestamp video->SetTimestamp(startTime*90000/timeScale); //Set intra video->SetIntra(isSyncSample); } else { //Get Audio frame AudioFrame *audio = (AudioFrame*)frame; //Set lenght audio->SetLength(dataLen); //Timestamp audio->SetTimestamp(startTime*8000/timeScale); } //Check listener if (listener) //Frame callback listener->onMediaFrame(*frame); } // if it's the last if (packetIndex + 1 == numHintSamples) //Set last mark last = 1; // Set mark bit rtp.SetMark(last); // Get data pointer data = rtp.GetMediaData(); //Get max data lenght DWORD dataLen = rtp.GetMaxMediaLength(); // Read next rtp packet if (!MP4ReadRtpPacket( mp4, // MP4FileHandle hFile hint, // MP4TrackId hintTrackId packetIndex++, // u_int16_t packetIndex (u_int8_t **) &data, // u_int8_t** ppBytes (u_int32_t *) &dataLen, // u_int32_t* pNumBytes 0, // u_int32_t ssrc DEFAULT(0) 0, // bool includeHeader DEFAULT(true) 1 // bool includePayload DEFAULT(true) )) { //Error Error("Error reading packet [%d,%d,%d]\n", hint, track,packetIndex); //Exit return MP4_INVALID_TIMESTAMP; } //Check if (dataLen>rtp.GetMaxMediaLength()) { //Error Error("RTP packet too big [%u,%u]\n",dataLen,rtp.GetMaxMediaLength()); //Exit return MP4_INVALID_TIMESTAMP; } //Set lenght rtp.SetMediaLength(dataLen); // Write frame listener->onRTPPacket(rtp); // Are we the last packet in a hint? if (last) { // The first hint packetIndex = 0; // Go for next sample sampleId++; numHintSamples = 0; //Return next frame time return GetNextFrameTime(); } // This packet is this one return frameTime; }