/*---------------------------------------------------------------------- | AP4_HintTrackReader::WriteSampleRtpData +---------------------------------------------------------------------*/ AP4_Result AP4_HintTrackReader::WriteSampleRtpData(AP4_SampleRtpConstructor* constructor, AP4_ByteStream* data_stream) { AP4_Track* referenced_track = NULL; if (constructor->GetTrackRefIndex() == 0xFF) { // data is in the hint track referenced_track = &m_HintTrack; } else { // check if we have a media track if (m_MediaTrack == NULL) return AP4_FAILURE; referenced_track = m_MediaTrack; } // write the sample data AP4_Sample sample; AP4_Result result = referenced_track->GetSample(constructor->GetSampleNum()-1, // adjust sample); if (AP4_FAILED(result)) return result; AP4_DataBuffer buffer(constructor->GetLength()); result = sample.ReadData( buffer, constructor->GetLength(), constructor->GetSampleOffset()); if (AP4_FAILED(result)) return result; // write the data return data_stream->Write(buffer.GetData(), buffer.GetDataSize()); }
/*---------------------------------------------------------------------- | AP4_MarlinIpmpTrackDecrypter:GetProcessedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_MarlinIpmpTrackDecrypter::GetProcessedSampleSize(AP4_Sample& sample) { // with CBC, we need to decrypt the last block to know what the padding was AP4_Size encrypted_size = sample.GetSize()-AP4_AES_BLOCK_SIZE; AP4_DataBuffer encrypted; AP4_DataBuffer decrypted; AP4_Size decrypted_size = AP4_CIPHER_BLOCK_SIZE; if (sample.GetSize() < 2*AP4_CIPHER_BLOCK_SIZE) { return 0; } AP4_Size offset = sample.GetSize()-2*AP4_CIPHER_BLOCK_SIZE; if (AP4_FAILED(sample.ReadData(encrypted, 2*AP4_CIPHER_BLOCK_SIZE, offset))) { return 0; } decrypted.Reserve(decrypted_size); m_Cipher->SetIV(encrypted.GetData()); if (AP4_FAILED(m_Cipher->ProcessBuffer(encrypted.GetData()+AP4_CIPHER_BLOCK_SIZE, AP4_CIPHER_BLOCK_SIZE, decrypted.UseData(), &decrypted_size, true))) { return 0; } unsigned int padding_size = AP4_CIPHER_BLOCK_SIZE-decrypted_size; return encrypted_size-padding_size; }
/*---------------------------------------------------------------------- | AutoDetectAudioFragmentDuration +---------------------------------------------------------------------*/ static unsigned int AutoDetectAudioFragmentDuration(AP4_ByteStream& stream, TrackCursor* cursor) { // remember where we are in the stream AP4_Position where = 0; stream.Tell(where); AP4_LargeSize stream_size = 0; stream.GetSize(stream_size); AP4_LargeSize bytes_available = stream_size-where; AP4_UI64 fragment_count = 0; AP4_UI32 last_fragment_size = 0; AP4_Atom* atom = NULL; while (AP4_SUCCEEDED(AP4_DefaultAtomFactory::Instance.CreateAtomFromStream(stream, bytes_available, atom))) { if (atom && atom->GetType() == AP4_ATOM_TYPE_MOOF) { AP4_ContainerAtom* moof = AP4_DYNAMIC_CAST(AP4_ContainerAtom, atom); AP4_TfhdAtom* tfhd = AP4_DYNAMIC_CAST(AP4_TfhdAtom, moof->FindChild("traf/tfhd")); if (tfhd && tfhd->GetTrackId() == cursor->m_Track->GetId()) { ++fragment_count; AP4_TrunAtom* trun = AP4_DYNAMIC_CAST(AP4_TrunAtom, moof->FindChild("traf/trun")); if (trun) { last_fragment_size = trun->GetEntries().ItemCount(); } } } delete atom; atom = NULL; } // restore the stream to its original position stream.Seek(where); // decide if we can infer an fragment size if (fragment_count == 0 || cursor->m_Samples->GetSampleCount() == 0) { return 0; } // don't count the last fragment if we have more than one if (fragment_count > 1 && last_fragment_size) { --fragment_count; } if (fragment_count <= 1 || cursor->m_Samples->GetSampleCount() < last_fragment_size) { last_fragment_size = 0; } AP4_Sample sample; AP4_UI64 total_duration = 0; for (unsigned int i=0; i<cursor->m_Samples->GetSampleCount()-last_fragment_size; i++) { cursor->m_Samples->GetSample(i, sample); total_duration += sample.GetDuration(); } return (unsigned int)AP4_ConvertTime(total_duration/fragment_count, cursor->m_Track->GetMediaTimeScale(), 1000); }
/*---------------------------------------------------------------------- | AutoDetectFragmentDuration +---------------------------------------------------------------------*/ static unsigned int AutoDetectFragmentDuration(TrackCursor* cursor) { AP4_Sample sample; unsigned int sample_count = cursor->m_Samples->GetSampleCount(); // get the first sample as the starting point AP4_Result result = cursor->m_Samples->GetSample(0, sample); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to read first sample\n"); return 0; } if (!sample.IsSync()) { fprintf(stderr, "ERROR: first sample is not an I frame\n"); return 0; } for (unsigned int interval = 1; interval < sample_count; interval++) { bool irregular = false; unsigned int sync_count = 0; unsigned int i; for (i = 0; i < sample_count; i += interval) { result = cursor->m_Samples->GetSample(i, sample); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to read sample %d\n", i); return 0; } if (!sample.IsSync()) { irregular = true; break; } ++sync_count; } if (sync_count < 1) continue; if (!irregular) { // found a pattern AP4_UI64 duration = sample.GetDts(); double fps = (double)(interval*(sync_count-1))/((double)duration/(double)cursor->m_Track->GetMediaTimeScale()); if (Options.verbosity > 0) { printf("found regular I-frame interval: %d frames (at %.3f frames per second)\n", interval, (float)fps); } return (unsigned int)(1000.0*(double)interval/fps); } } return 0; }
/*---------------------------------------------------------------------- | AP4_OmaDcfCbcSampleEncrypter::GetEncryptedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_OmaDcfCbcSampleEncrypter::GetEncryptedSampleSize(AP4_Sample& sample) { AP4_Size sample_size = sample.GetSize(); AP4_Size padding_size = AP4_CIPHER_BLOCK_SIZE-(sample_size%AP4_CIPHER_BLOCK_SIZE); return sample_size+padding_size+AP4_CIPHER_BLOCK_SIZE+1; }
/*---------------------------------------------------------------------- | TrackCursor::SetSampleIndex +---------------------------------------------------------------------*/ AP4_Result TrackCursor::SetSampleIndex(AP4_Ordinal sample_index) { m_SampleIndex = sample_index; // check if we're at the end if (sample_index >= m_Samples->GetSampleCount()) { AP4_UI64 end_dts = m_Sample.GetDts()+m_Sample.GetDuration(); m_Sample.Reset(); m_Sample.SetDts(end_dts); m_Eos = true; } else { return m_Samples->GetSample(m_SampleIndex, m_Sample); } return AP4_SUCCESS; }
/*---------------------------------------------------------------------- | AP4_LinearReader::PopSample +---------------------------------------------------------------------*/ bool AP4_LinearReader::PopSample(Tracker* tracker, AP4_Sample& sample, AP4_DataBuffer& sample_data) { SampleBuffer* head = NULL; if (AP4_SUCCEEDED(tracker->m_Samples.PopHead(head))) { assert(head->m_Sample); sample = *head->m_Sample; sample_data.SetData(head->m_Data.GetData(), head->m_Data.GetDataSize()); assert(m_BufferFullness >= sample.GetSize()); m_BufferFullness -= sample.GetSize(); delete head; return true; } return false; }
/*---------------------------------------------------------------------- | AP4_DecryptingSampleReader::ReadSampleData +---------------------------------------------------------------------*/ AP4_Result AP4_DecryptingSampleReader::ReadSampleData(AP4_Sample& sample, AP4_DataBuffer& sample_data) { AP4_Result result = sample.ReadData(m_DataBuffer); if (AP4_FAILED(result)) return result; return m_Decrypter->DecryptSampleData(m_DataBuffer, sample_data); }
virtual AP4_Result GetSample(AP4_Ordinal index, AP4_Sample& sample) { AP4_Result result = m_Track->GetSample(index, sample); if (AP4_SUCCEEDED(result)) { if (m_ForcedSync[index]) { sample.SetSync(true); } } return result; }
/*---------------------------------------------------------------------- | AP4_IsmaTrackDecrypter::GetProcessedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_IsmaTrackDecrypter::GetProcessedSampleSize(AP4_Sample& sample) { AP4_Size isma_header_size = m_CipherParams->GetKeyIndicatorLength() + m_CipherParams->GetIvLength(); if (m_CipherParams->GetSelectiveEncryption()) { isma_header_size++; } return sample.GetSize()-isma_header_size; }
/*---------------------------------------------------------------------- | IsIFrame +---------------------------------------------------------------------*/ static bool IsIFrame(AP4_Sample& sample, AP4_AvcSampleDescription* avc_desc) { AP4_DataBuffer sample_data; if (AP4_FAILED(sample.ReadData(sample_data))) { return false; } const unsigned char* data = sample_data.GetData(); AP4_Size size = sample_data.GetDataSize(); while (size >= avc_desc->GetNaluLengthSize()) { unsigned int nalu_length = 0; if (avc_desc->GetNaluLengthSize() == 1) { nalu_length = *data++; --size; } else if (avc_desc->GetNaluLengthSize() == 2) { nalu_length = AP4_BytesToUInt16BE(data); data += 2; size -= 2; } else if (avc_desc->GetNaluLengthSize() == 4) { nalu_length = AP4_BytesToUInt32BE(data); data += 4; size -= 4; } else { return false; } if (nalu_length <= size) { size -= nalu_length; } else { size = 0; } switch (*data & 0x1F) { case 1: { AP4_BitStream bits; bits.WriteBytes(data+1, 8); ReadGolomb(bits); unsigned int slice_type = ReadGolomb(bits); if (slice_type == 2 || slice_type == 7) { return true; } else { return false; // only show first slice type } } case 5: return true; } data += nalu_length; } return false; }
/*---------------------------------------------------------------------- | AP4_Track::Clone +---------------------------------------------------------------------*/ AP4_Track* AP4_Track::Clone(AP4_Result* result) { AP4_SyntheticSampleTable* sample_table = new AP4_SyntheticSampleTable(); // default return value if (result) *result = AP4_SUCCESS; // add clones of the sample descriptions to the new sample table for (unsigned int i=0; ;i++) { AP4_SampleDescription* sample_description = GetSampleDescription(i); if (sample_description == NULL) break; sample_table->AddSampleDescription(sample_description->Clone()); } AP4_Sample sample; AP4_Ordinal index = 0; while (AP4_SUCCEEDED(GetSample(index, sample))) { AP4_ByteStream* data_stream; data_stream = sample.GetDataStream(); sample_table->AddSample(*data_stream, sample.GetOffset(), sample.GetSize(), sample.GetDuration(), sample.GetDescriptionIndex(), sample.GetDts(), sample.GetCtsDelta(), sample.IsSync()); AP4_RELEASE(data_stream); // release our ref, the table has kept its own ref. index++; } // create the cloned track AP4_Track* clone = new AP4_Track(GetType(), sample_table, GetId(), GetMovieTimeScale(), GetDuration(), GetMediaTimeScale(), GetMediaDuration(), GetTrackLanguage(), GetWidth(), GetHeight()); return clone; }
/*---------------------------------------------------------------------- | AP4_OmaDcfCtrSampleDecrypter::GetDecryptedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_OmaDcfCtrSampleDecrypter::GetDecryptedSampleSize(AP4_Sample& sample) { if (m_Cipher == NULL) return 0; // decide if this sample is encrypted or not bool is_encrypted; if (m_SelectiveEncryption) { // read the first byte to see if the sample is encrypted or not AP4_Byte h; AP4_DataBuffer peek_buffer; peek_buffer.SetBuffer(&h, 1); sample.ReadData(peek_buffer, 1); is_encrypted = ((h&0x80)!=0); } else { is_encrypted = true; } AP4_Size crypto_header_size = (m_SelectiveEncryption?1:0)+(is_encrypted?m_IvLength:0); return sample.GetSize()-crypto_header_size; }
/*---------------------------------------------------------------------- | AP4_Track::ReadSample +---------------------------------------------------------------------*/ AP4_Result AP4_Track::ReadSample(AP4_Ordinal index, AP4_Sample& sample, AP4_DataBuffer& data) { AP4_Result result; // get the sample result = GetSample(index, sample); if (AP4_FAILED(result)) return result; // read the data return sample.ReadData(data); }
AP4_Result SampleFileStorage::StoreSample(AP4_Sample& from_sample, AP4_Sample& to_sample) { // clone the sample fields to_sample = from_sample; // read the sample data AP4_DataBuffer sample_data; AP4_Result result = from_sample.ReadData(sample_data); if (AP4_FAILED(result)) return result; // mark where we are going to store the sample data AP4_Position position; m_Stream->Tell(position); to_sample.SetOffset(position); // write the sample data result = m_Stream->Write(sample_data.GetData(), sample_data.GetDataSize()); if (AP4_FAILED(result)) return result; // update the stream for the new sample to_sample.SetDataStream(*m_Stream); return AP4_SUCCESS; }
/*---------------------------------------------------------------------- | ReadSample +---------------------------------------------------------------------*/ static AP4_Result ReadSample(SampleReader& reader, AP4_Track& track, AP4_Sample& sample, AP4_DataBuffer& sample_data, double& ts, bool& eos) { AP4_Result result = reader.ReadSample(sample, sample_data); if (AP4_FAILED(result)) { if (result == AP4_ERROR_EOS) { eos = true; } else { return result; } } ts = (double)sample.GetDts()/(double)track.GetMediaTimeScale(); return AP4_SUCCESS; }
/*---------------------------------------------------------------------- | AP4_OmaDcfCbcSampleDecrypter::GetDecryptedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_OmaDcfCbcSampleDecrypter::GetDecryptedSampleSize(AP4_Sample& sample) { if (m_Cipher == NULL) return 0; // decide if this sample is encrypted or not bool is_encrypted; if (m_SelectiveEncryption) { // read the first byte to see if the sample is encrypted or not AP4_Byte h; AP4_DataBuffer peek_buffer; peek_buffer.SetBuffer(&h, 1); sample.ReadData(peek_buffer, 1); is_encrypted = ((h&0x80)!=0); } else { is_encrypted = true; } if (is_encrypted) { // with CBC, we need to decrypt the last block to know what the padding was AP4_Size crypto_header_size = (m_SelectiveEncryption?1:0)+m_IvLength; AP4_Size encrypted_size = sample.GetSize()-crypto_header_size; AP4_DataBuffer encrypted; AP4_DataBuffer decrypted; AP4_Size decrypted_size = AP4_CIPHER_BLOCK_SIZE; if (sample.GetSize() < crypto_header_size+AP4_CIPHER_BLOCK_SIZE) { return 0; } AP4_Size offset = sample.GetSize()-2*AP4_CIPHER_BLOCK_SIZE; if (AP4_FAILED(sample.ReadData(encrypted, 2*AP4_CIPHER_BLOCK_SIZE, offset))) { return 0; } decrypted.Reserve(decrypted_size); m_Cipher->SetIV(encrypted.GetData()); if (AP4_FAILED(m_Cipher->ProcessBuffer(encrypted.GetData()+AP4_CIPHER_BLOCK_SIZE, AP4_CIPHER_BLOCK_SIZE, decrypted.UseData(), &decrypted_size, true))) { return 0; } unsigned int padding_size = AP4_CIPHER_BLOCK_SIZE-decrypted_size; return encrypted_size-padding_size; } else { return sample.GetSize()-(m_SelectiveEncryption?1:0); } }
/*---------------------------------------------------------------------- | ShowSample +---------------------------------------------------------------------*/ static void ShowSample(AP4_Sample& sample, unsigned int index, AP4_SampleDecrypter* sample_decrypter) { printf("[%06d] size=%6d duration=%6d", index, (int)sample.GetSize(), (int)sample.GetDuration()); printf(" offset=%10lld dts=%10lld cts=%10lld ", sample.GetOffset(), sample.GetDts(), sample.GetCts()); if (sample.IsSync()) { printf(" [S] "); } else { printf(" "); } AP4_DataBuffer sample_data; sample.ReadData(sample_data); AP4_DataBuffer* data = &sample_data; AP4_DataBuffer decrypted_sample_data; if (sample_decrypter) { sample_decrypter->DecryptSampleData(sample_data, decrypted_sample_data); data = & decrypted_sample_data; } unsigned int show = data->GetDataSize(); if (show > 12) show = 12; // max first 12 chars for (unsigned int i=0; i<show; i++) { printf("%02x", data->GetData()[i]); } if (show == data->GetDataSize()) { printf("\n"); } else { printf("...\n"); } }
/*---------------------------------------------------------------------- | main +---------------------------------------------------------------------*/ int main(int argc, char** argv) { if (argc < 2) { PrintUsageAndExit(); } // init the variables const char* input_filename = NULL; const char* output_filename = NULL; const char* track_selector = NULL; AP4_UI32 selected_track_id = 0; unsigned int fragment_duration = 0; bool auto_detect_fragment_duration = true; bool create_segment_index = false; bool quiet = false; AP4_UI32 timescale = 0; AP4_Result result; Options.verbosity = 1; Options.debug = false; Options.trim = false; Options.no_tfdt = false; Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE; // parse the command line argv++; char* arg; while ((arg = *argv++)) { if (!strcmp(arg, "--verbosity")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --verbosity option\n"); return 1; } Options.verbosity = strtoul(arg, NULL, 10); } else if (!strcmp(arg, "--debug")) { Options.debug = true; } else if (!strcmp(arg, "--index")) { create_segment_index = true; } else if (!strcmp(arg, "--quiet")) { quiet = true; } else if (!strcmp(arg, "--trim")) { Options.trim = true; } else if (!strcmp(arg, "--no-tfdt")) { Options.no_tfdt = true; } else if (!strcmp(arg, "--force-i-frame-sync")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --fragment-duration option\n"); return 1; } if (!strcmp(arg, "all")) { Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_ALL; } else if (!strcmp(arg, "auto")) { Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_AUTO; } else { fprintf(stderr, "ERROR: unknown mode for --force-i-frame-sync\n"); return 1; } } else if (!strcmp(arg, "--fragment-duration")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --fragment-duration option\n"); return 1; } fragment_duration = strtoul(arg, NULL, 10); auto_detect_fragment_duration = false; } else if (!strcmp(arg, "--timescale")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --timescale option\n"); return 1; } timescale = strtoul(arg, NULL, 10); } else if (!strcmp(arg, "--track")) { track_selector = *argv++; if (track_selector == NULL) { fprintf(stderr, "ERROR: missing argument after --track option\n"); return 1; } } else { if (input_filename == NULL) { input_filename = arg; } else if (output_filename == NULL) { output_filename = arg; } else { fprintf(stderr, "ERROR: unexpected argument '%s'\n", arg); return 1; } } } if (Options.debug && Options.verbosity == 0) { Options.verbosity = 1; } if (input_filename == NULL) { fprintf(stderr, "ERROR: no input specified\n"); return 1; } AP4_ByteStream* input_stream = NULL; result = AP4_FileByteStream::Create(input_filename, AP4_FileByteStream::STREAM_MODE_READ, input_stream); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: cannot open input (%d)\n", result); return 1; } if (output_filename == NULL) { fprintf(stderr, "ERROR: no output specified\n"); return 1; } AP4_ByteStream* output_stream = NULL; result = AP4_FileByteStream::Create(output_filename, AP4_FileByteStream::STREAM_MODE_WRITE, output_stream); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: cannot create/open output (%d)\n", result); return 1; } // parse the input MP4 file (moov only) AP4_File input_file(*input_stream, AP4_DefaultAtomFactory::Instance, true); // check the file for basic properties if (input_file.GetMovie() == NULL) { fprintf(stderr, "ERROR: no movie found in the file\n"); return 1; } if (!quiet && input_file.GetMovie()->HasFragments()) { fprintf(stderr, "NOTICE: file is already fragmented, it will be re-fragmented\n"); } // create a cusor list to keep track of the tracks we will read from AP4_Array<TrackCursor*> cursors; // iterate over all tracks TrackCursor* video_track = NULL; TrackCursor* audio_track = NULL; TrackCursor* subtitles_track = NULL; unsigned int video_track_count = 0; unsigned int audio_track_count = 0; unsigned int subtitles_track_count = 0; for (AP4_List<AP4_Track>::Item* track_item = input_file.GetMovie()->GetTracks().FirstItem(); track_item; track_item = track_item->GetNext()) { AP4_Track* track = track_item->GetData(); // sanity check if (track->GetSampleCount() == 0 && !input_file.GetMovie()->HasFragments()) { fprintf(stderr, "WARNING: track %d has no samples, it will be skipped\n", track->GetId()); continue; } // create a sample array for this track SampleArray* sample_array; if (input_file.GetMovie()->HasFragments()) { sample_array = new CachedSampleArray(track); } else { sample_array = new SampleArray(track); } // create a cursor for the track TrackCursor* cursor = new TrackCursor(track, sample_array); cursor->m_Tfra->SetTrackId(track->GetId()); cursors.Append(cursor); if (track->GetType() == AP4_Track::TYPE_VIDEO) { if (video_track) { fprintf(stderr, "WARNING: more than one video track found\n"); } else { video_track = cursor; } video_track_count++; } else if (track->GetType() == AP4_Track::TYPE_AUDIO) { if (audio_track == NULL) { audio_track = cursor; } audio_track_count++; } else if (track->GetType() == AP4_Track::TYPE_SUBTITLES) { if (subtitles_track == NULL) { subtitles_track = cursor; } subtitles_track_count++; } } if (cursors.ItemCount() == 0) { fprintf(stderr, "ERROR: no valid track found\n"); return 1; } if (track_selector) { if (!strncmp("audio", track_selector, 5)) { if (audio_track) { selected_track_id = audio_track->m_Track->GetId(); } else { fprintf(stderr, "ERROR: no audio track found\n"); return 1; } } else if (!strncmp("video", track_selector, 5)) { if (video_track) { selected_track_id = video_track->m_Track->GetId(); } else { fprintf(stderr, "ERROR: no video track found\n"); return 1; } } else if (!strncmp("subtitles", track_selector, 9)) { if (subtitles_track) { selected_track_id = subtitles_track->m_Track->GetId(); } else { fprintf(stderr, "ERROR: no subtitles track found\n"); return 1; } } else { selected_track_id = (AP4_UI32)strtol(track_selector, NULL, 10); bool found = false; for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (cursors[i]->m_Track->GetId() == selected_track_id) { found = true; break; } } if (!found) { fprintf(stderr, "ERROR: track not found\n"); return 1; } } } if (video_track_count == 0 && audio_track_count == 0 && subtitles_track_count == 0) { fprintf(stderr, "ERROR: no audio, video, or subtitles track in the file\n"); return 1; } AP4_AvcSampleDescription* avc_desc = NULL; if (video_track && (Options.force_i_frame_sync != AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE)) { // that feature is only supported for AVC AP4_SampleDescription* sdesc = video_track->m_Track->GetSampleDescription(0); if (sdesc) { avc_desc = AP4_DYNAMIC_CAST(AP4_AvcSampleDescription, sdesc); } if (avc_desc == NULL) { fprintf(stderr, "--force-i-frame-sync can only be used with AVC/H.264 video\n"); return 1; } } // remember where the stream was AP4_Position position; input_stream->Tell(position); // for fragmented input files, we need to populate the sample arrays if (input_file.GetMovie()->HasFragments()) { AP4_LinearReader reader(*input_file.GetMovie(), input_stream); for (unsigned int i=0; i<cursors.ItemCount(); i++) { reader.EnableTrack(cursors[i]->m_Track->GetId()); } AP4_UI32 track_id; AP4_Sample sample; do { result = reader.GetNextSample(sample, track_id); if (AP4_SUCCEEDED(result)) { for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (cursors[i]->m_Track->GetId() == track_id) { cursors[i]->m_Samples->AddSample(sample); break; } } } } while (AP4_SUCCEEDED(result)); } else if (video_track && (Options.force_i_frame_sync != AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE)) { AP4_Sample sample; if (Options.force_i_frame_sync == AP4_FRAGMENTER_FORCE_SYNC_MODE_AUTO) { // detect if this looks like an open-gop source for (unsigned int i=1; i<video_track->m_Samples->GetSampleCount(); i++) { if (AP4_SUCCEEDED(video_track->m_Samples->GetSample(i, sample))) { if (sample.IsSync()) { // we found a sync i-frame, assume this is *not* an open-gop source Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE; if (Options.debug) { printf("this does not look like an open-gop source, not forcing i-frame sync flags\n"); } break; } } } } if (Options.force_i_frame_sync != AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE) { for (unsigned int i=0; i<video_track->m_Samples->GetSampleCount(); i++) { if (AP4_SUCCEEDED(video_track->m_Samples->GetSample(i, sample))) { if (IsIFrame(sample, avc_desc)) { video_track->m_Samples->ForceSync(i); } } } } } // return the stream to its original position input_stream->Seek(position); // auto-detect the fragment duration if needed if (auto_detect_fragment_duration) { if (video_track) { fragment_duration = AutoDetectFragmentDuration(video_track); } else if (audio_track && input_file.GetMovie()->HasFragments()) { fragment_duration = AutoDetectAudioFragmentDuration(*input_stream, audio_track); } if (fragment_duration == 0) { if (Options.verbosity > 0) { fprintf(stderr, "unable to autodetect fragment duration, using default\n"); } fragment_duration = AP4_FRAGMENTER_DEFAULT_FRAGMENT_DURATION; } else if (fragment_duration > AP4_FRAGMENTER_MAX_AUTO_FRAGMENT_DURATION) { if (Options.verbosity > 0) { fprintf(stderr, "auto-detected fragment duration too large, using default\n"); } fragment_duration = AP4_FRAGMENTER_DEFAULT_FRAGMENT_DURATION; } } // fragment the file Fragment(input_file, *output_stream, cursors, fragment_duration, timescale, selected_track_id, create_segment_index); // cleanup and exit if (input_stream) input_stream->Release(); if (output_stream) output_stream->Release(); return 0; }
/*---------------------------------------------------------------------- | Mp4Parser_Seek +---------------------------------------------------------------------*/ BLT_METHOD Mp4Parser_Seek(BLT_MediaNode* _self, BLT_SeekMode* mode, BLT_SeekPoint* point) { Mp4Parser* self = ATX_SELF_EX(Mp4Parser, BLT_BaseMediaNode, BLT_MediaNode); /* estimate the seek point */ if (ATX_BASE(self, BLT_BaseMediaNode).context == NULL) return BLT_FAILURE; BLT_Stream_EstimateSeekPoint(ATX_BASE(self, BLT_BaseMediaNode).context, *mode, point); if (!(point->mask & BLT_SEEK_POINT_MASK_TIME_STAMP)) { return BLT_FAILURE; } /* seek to the estimated offset on all tracks */ AP4_Ordinal sample_index = 0; AP4_UI32 ts_ms = point->time_stamp.seconds*1000+point->time_stamp.nanoseconds/1000000; if (self->video_output.track) { AP4_Result result = self->video_output.track->GetSampleIndexForTimeStampMs(ts_ms, sample_index); if (AP4_FAILED(result)) { ATX_LOG_WARNING_1("video GetSampleIndexForTimeStampMs failed (%d)", result); return BLT_FAILURE; } ATX_LOG_FINE_1("seeking to video time %d ms", ts_ms); // go to the nearest sync sample self->video_output.sample = self->video_output.track->GetNearestSyncSampleIndex(sample_index); if (self->input.reader) { self->input.reader->SetSampleIndex(self->video_output.track->GetId(), self->video_output.sample); } ATX_LOG_FINE_1("seeking to video sync sample %d", self->video_output.sample); // compute the timestamp of the video sample we're seeking to, so we can pick an audio // sample that is close in time (there are many more audio sync points than video) AP4_Sample sample; if (AP4_SUCCEEDED(self->video_output.track->GetSample(self->video_output.sample, sample))) { AP4_UI32 media_timescale = self->video_output.track->GetMediaTimeScale(); if (media_timescale) { ts_ms = (AP4_UI32)((((AP4_UI64)sample.GetCts())*1000)/media_timescale); ATX_LOG_FINE_1("sync sample time is %d ms", ts_ms); } } else { ATX_LOG_FINE_1("unable to get sample info for sample %d", self->video_output.sample); } } if (self->audio_output.track) { AP4_Result result = self->audio_output.track->GetSampleIndexForTimeStampMs(ts_ms, sample_index); if (AP4_FAILED(result)) { ATX_LOG_WARNING_1("audio GetSampleIndexForTimeStampMs failed (%d)", result); return BLT_FAILURE; } self->audio_output.sample = sample_index; if (self->input.reader) { self->input.reader->SetSampleIndex(self->audio_output.track->GetId(), sample_index); } } /* set the mode so that the nodes down the chain know the seek has */ /* already been done on the stream */ *mode = BLT_SEEK_MODE_IGNORE; return BLT_SUCCESS; }
/*---------------------------------------------------------------------- | Mp4ParserOutput_GetPacket +---------------------------------------------------------------------*/ BLT_METHOD Mp4ParserOutput_GetPacket(BLT_PacketProducer* _self, BLT_MediaPacket** packet) { Mp4ParserOutput* self = ATX_SELF(Mp4ParserOutput, BLT_PacketProducer); *packet = NULL; // if we don't have an input yet, we can't produce packets //if (self->parser->input.mp4_file == NULL) { // return BLT_ERROR_PORT_HAS_NO_DATA; //} if (self->track == NULL) { return BLT_ERROR_EOS; } else { // check for end-of-stream if (self->sample >= self->track->GetSampleCount()) { return BLT_ERROR_EOS; } // read one sample AP4_Sample sample; AP4_DataBuffer* sample_buffer = self->sample_buffer; AP4_Result result; if (self->parser->input.reader) { // linear reader mode result = self->parser->input.reader->ReadNextSample(self->track->GetId(), sample, *sample_buffer); if (AP4_SUCCEEDED(result)) self->sample++; } else { // normal mode result = self->track->ReadSample(self->sample++, sample, *sample_buffer); } if (AP4_FAILED(result)) { ATX_LOG_WARNING_1("ReadSample failed (%d)", result); if (result == AP4_ERROR_EOS || result == ATX_ERROR_OUT_OF_RANGE) { ATX_LOG_WARNING("incomplete media"); return BLT_ERROR_INCOMPLETE_MEDIA; } else { return BLT_ERROR_PORT_HAS_NO_DATA; } } // update the sample description if it has changed if (sample.GetDescriptionIndex() != self->sample_description_index) { result = Mp4ParserOutput_SetSampleDescription(self, sample.GetDescriptionIndex()); if (BLT_FAILED(result)) return result; } // decrypt the sample if needed if (self->sample_decrypter) { self->sample_decrypter->DecryptSampleData(*sample_buffer, *self->sample_decrypted_buffer); sample_buffer = self->sample_decrypted_buffer; } AP4_Size packet_size = sample_buffer->GetDataSize(); result = BLT_Core_CreateMediaPacket(ATX_BASE(self->parser, BLT_BaseMediaNode).core, packet_size, (const BLT_MediaType*)self->media_type, packet); if (BLT_FAILED(result)) return result; BLT_MediaPacket_SetPayloadSize(*packet, packet_size); void* buffer = BLT_MediaPacket_GetPayloadBuffer(*packet); ATX_CopyMemory(buffer, sample_buffer->GetData(), packet_size); // set the timestamp AP4_UI32 media_timescale = self->track->GetMediaTimeScale(); if (media_timescale) { AP4_UI64 ts = ((AP4_UI64)sample.GetCts())*1000000; ts /= media_timescale; BLT_TimeStamp bt_ts = { (BLT_Int32)(ts / 1000000), (BLT_Int32)((ts % 1000000)*1000) }; BLT_MediaPacket_SetTimeStamp(*packet, bt_ts); } // set packet flags if (self->sample == 1) { BLT_MediaPacket_SetFlags(*packet, BLT_MEDIA_PACKET_FLAG_START_OF_STREAM); } return BLT_SUCCESS; } }
/*---------------------------------------------------------------------- | AP4_IsmaTrackEncrypter::GetProcessedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_IsmaTrackEncrypter::GetProcessedSampleSize(AP4_Sample& sample) { return sample.GetSize()+4; //fixed header size for now }
/*---------------------------------------------------------------------- | Fragment +---------------------------------------------------------------------*/ static void Fragment(AP4_File& input_file, AP4_ByteStream& output_stream, AP4_Array<TrackCursor*>& cursors, unsigned int fragment_duration, AP4_UI32 timescale, AP4_UI32 track_id, bool create_segment_index) { AP4_List<FragmentInfo> fragments; TrackCursor* index_cursor = NULL; AP4_Result result; AP4_Movie* input_movie = input_file.GetMovie(); if (input_movie == NULL) { fprintf(stderr, "ERROR: no moov found in the input file\n"); return; } // create the output file object AP4_Movie* output_movie = new AP4_Movie(1000); // create an mvex container AP4_ContainerAtom* mvex = new AP4_ContainerAtom(AP4_ATOM_TYPE_MVEX); AP4_MehdAtom* mehd = new AP4_MehdAtom(0); mvex->AddChild(mehd); // add an output track for each track in the input file for (unsigned int i=0; i<cursors.ItemCount(); i++) { AP4_Track* track = cursors[i]->m_Track; // skip non matching tracks if we have a selector if (track_id && track->GetId() != track_id) { continue; } result = cursors[i]->Init(); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to init sample cursor (%d), skipping track %d\n", result, track->GetId()); return; } // create a sample table (with no samples) to hold the sample description AP4_SyntheticSampleTable* sample_table = new AP4_SyntheticSampleTable(); for (unsigned int j=0; j<track->GetSampleDescriptionCount(); j++) { AP4_SampleDescription* sample_description = track->GetSampleDescription(j); sample_table->AddSampleDescription(sample_description, false); } // create the track AP4_Track* output_track = new AP4_Track(sample_table, track->GetId(), timescale?timescale:1000, AP4_ConvertTime(track->GetDuration(), input_movie->GetTimeScale(), timescale?timescale:1000), timescale?timescale:track->GetMediaTimeScale(), 0,//track->GetMediaDuration(), track); output_movie->AddTrack(output_track); // add a trex entry to the mvex container AP4_TrexAtom* trex = new AP4_TrexAtom(track->GetId(), 1, 0, 0, 0); mvex->AddChild(trex); } // select the anchor cursor TrackCursor* anchor_cursor = NULL; for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (cursors[i]->m_Track->GetId() == track_id) { anchor_cursor = cursors[i]; } } if (anchor_cursor == NULL) { for (unsigned int i=0; i<cursors.ItemCount(); i++) { // use this as the anchor track if it is the first video track if (cursors[i]->m_Track->GetType() == AP4_Track::TYPE_VIDEO) { anchor_cursor = cursors[i]; break; } } } if (anchor_cursor == NULL) { // no video track to anchor with, pick the first audio track for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (cursors[i]->m_Track->GetType() == AP4_Track::TYPE_AUDIO) { anchor_cursor = cursors[i]; break; } } // no audio track to anchor with, pick the first subtitles track for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (cursors[i]->m_Track->GetType() == AP4_Track::TYPE_SUBTITLES) { anchor_cursor = cursors[i]; break; } } } if (anchor_cursor == NULL) { // this shoudl never happen fprintf(stderr, "ERROR: no anchor track\n"); return; } if (create_segment_index) { index_cursor = anchor_cursor; } if (Options.debug) { printf("Using track ID %d as anchor\n", anchor_cursor->m_Track->GetId()); } // update the mehd duration mehd->SetDuration(output_movie->GetDuration()); // add the mvex container to the moov container output_movie->GetMoovAtom()->AddChild(mvex); // compute all the fragments unsigned int sequence_number = 1; for(;;) { TrackCursor* cursor = NULL; // pick the first track with a fragment index lower than the anchor's for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (track_id && cursors[i]->m_Track->GetId() != track_id) continue; if (cursors[i]->m_Eos) continue; if (cursors[i]->m_FragmentIndex < anchor_cursor->m_FragmentIndex) { cursor = cursors[i]; break; } } // check if we found a non-anchor cursor to use if (cursor == NULL) { // the anchor should be used in this round, check if we can use it if (anchor_cursor->m_Eos) { // the anchor is done, pick a new anchor unless we need to trim anchor_cursor = NULL; if (!Options.trim) { for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (track_id && cursors[i]->m_Track->GetId() != track_id) continue; if (cursors[i]->m_Eos) continue; if (anchor_cursor == NULL || cursors[i]->m_Track->GetType() == AP4_Track::TYPE_VIDEO || cursors[i]->m_Track->GetType() == AP4_Track::TYPE_AUDIO) { anchor_cursor = cursors[i]; if (Options.debug) { printf("+++ New anchor: Track ID %d\n", anchor_cursor->m_Track->GetId()); } } } } } cursor = anchor_cursor; } if (cursor == NULL) break; // all done // decide how many samples go into this fragment AP4_UI64 target_dts; if (cursor == anchor_cursor) { // compute the current dts in milliseconds AP4_UI64 anchor_dts_ms = AP4_ConvertTime(cursor->m_Sample.GetDts(), cursor->m_Track->GetMediaTimeScale(), 1000); // round to the nearest multiple of fragment_duration AP4_UI64 anchor_position = (anchor_dts_ms + (fragment_duration/2))/fragment_duration; // pick the next fragment_duration multiple at our target target_dts = AP4_ConvertTime(fragment_duration*(anchor_position+1), 1000, cursor->m_Track->GetMediaTimeScale()); } else { target_dts = AP4_ConvertTime(anchor_cursor->m_Sample.GetDts(), anchor_cursor->m_Track->GetMediaTimeScale(), cursor->m_Track->GetMediaTimeScale()); if (target_dts <= cursor->m_Sample.GetDts()) { // we must be at the end, past the last anchor sample, just use the target duration target_dts = AP4_ConvertTime(fragment_duration*(cursor->m_FragmentIndex+1), 1000, cursor->m_Track->GetMediaTimeScale()); if (target_dts <= cursor->m_Sample.GetDts()) { // we're still behind, there may have been an alignment/rounding error, just advance by one segment duration target_dts = cursor->m_Sample.GetDts()+AP4_ConvertTime(fragment_duration, 1000, cursor->m_Track->GetMediaTimeScale()); } } } unsigned int end_sample_index = cursor->m_Samples->GetSampleCount(); AP4_UI64 smallest_diff = (AP4_UI64)(0xFFFFFFFFFFFFFFFFULL); AP4_Sample sample; for (unsigned int i=cursor->m_SampleIndex+1; i<=cursor->m_Samples->GetSampleCount(); i++) { AP4_UI64 dts; if (i < cursor->m_Samples->GetSampleCount()) { result = cursor->m_Samples->GetSample(i, sample); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", i, result); return; } if (!sample.IsSync()) continue; // only look for sync samples dts = sample.GetDts(); } else { result = cursor->m_Samples->GetSample(i-1, sample); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", i-1, result); return; } dts = sample.GetDts()+sample.GetDuration(); } AP4_SI64 diff = dts-target_dts; AP4_UI64 abs_diff = diff<0?-diff:diff; if (abs_diff < smallest_diff) { // this sample is the closest to the target so far end_sample_index = i; smallest_diff = abs_diff; } if (diff >= 0) { // this sample is past the target, it is not going to get any better, stop looking break; } } if (cursor->m_Eos) continue; if (Options.debug) { if (cursor == anchor_cursor) { printf("===="); } else { printf("----"); } printf(" Track ID %d - dts=%lld, target=%lld, start=%d, end=%d/%d\n", cursor->m_Track->GetId(), cursor->m_Sample.GetDts(), target_dts, cursor->m_SampleIndex, end_sample_index, cursor->m_Track->GetSampleCount()); } // emit a fragment for the selected track if (Options.verbosity > 1) { printf("fragment: track ID %d ", cursor->m_Track->GetId()); } // decide which sample description index to use // (this is not very sophisticated, we only look at the sample description // index of the first sample in the group, which may not be correct. This // should be fixed later) unsigned int sample_desc_index = cursor->m_Sample.GetDescriptionIndex(); unsigned int tfhd_flags = AP4_TFHD_FLAG_DEFAULT_BASE_IS_MOOF; if (sample_desc_index > 0) { tfhd_flags |= AP4_TFHD_FLAG_SAMPLE_DESCRIPTION_INDEX_PRESENT; } if (cursor->m_Track->GetType() == AP4_Track::TYPE_VIDEO) { tfhd_flags |= AP4_TFHD_FLAG_DEFAULT_SAMPLE_FLAGS_PRESENT; } // setup the moof structure AP4_ContainerAtom* moof = new AP4_ContainerAtom(AP4_ATOM_TYPE_MOOF); AP4_MfhdAtom* mfhd = new AP4_MfhdAtom(sequence_number++); moof->AddChild(mfhd); AP4_ContainerAtom* traf = new AP4_ContainerAtom(AP4_ATOM_TYPE_TRAF); AP4_TfhdAtom* tfhd = new AP4_TfhdAtom(tfhd_flags, cursor->m_Track->GetId(), 0, sample_desc_index+1, 0, 0, 0); if (tfhd_flags & AP4_TFHD_FLAG_DEFAULT_SAMPLE_FLAGS_PRESENT) { tfhd->SetDefaultSampleFlags(0x1010000); // sample_is_non_sync_sample=1, sample_depends_on=1 (not I frame) } traf->AddChild(tfhd); if (!Options.no_tfdt) { AP4_TfdtAtom* tfdt = new AP4_TfdtAtom(1, cursor->m_Timestamp); traf->AddChild(tfdt); } AP4_UI32 trun_flags = AP4_TRUN_FLAG_DATA_OFFSET_PRESENT | AP4_TRUN_FLAG_SAMPLE_DURATION_PRESENT | AP4_TRUN_FLAG_SAMPLE_SIZE_PRESENT; AP4_UI32 first_sample_flags = 0; if (cursor->m_Track->GetType() == AP4_Track::TYPE_VIDEO) { trun_flags |= AP4_TRUN_FLAG_FIRST_SAMPLE_FLAGS_PRESENT; first_sample_flags = 0x2000000; // sample_depends_on=2 (I frame) } AP4_TrunAtom* trun = new AP4_TrunAtom(trun_flags, 0, first_sample_flags); traf->AddChild(trun); moof->AddChild(traf); // create a new FragmentInfo object to store the fragment details FragmentInfo* fragment = new FragmentInfo(cursor->m_Samples, cursor->m_Tfra, cursor->m_Timestamp, moof); fragments.Add(fragment); // add samples to the fragment unsigned int sample_count = 0; AP4_Array<AP4_TrunAtom::Entry> trun_entries; fragment->m_MdatSize = AP4_ATOM_HEADER_SIZE; for (;;) { // if we have one non-zero CTS delta, we'll need to express it if (cursor->m_Sample.GetCtsDelta()) { trun->SetFlags(trun->GetFlags() | AP4_TRUN_FLAG_SAMPLE_COMPOSITION_TIME_OFFSET_PRESENT); } // add one sample trun_entries.SetItemCount(sample_count+1); AP4_TrunAtom::Entry& trun_entry = trun_entries[sample_count]; trun_entry.sample_duration = timescale? (AP4_UI32)AP4_ConvertTime(cursor->m_Sample.GetDuration(), cursor->m_Track->GetMediaTimeScale(), timescale): cursor->m_Sample.GetDuration(); trun_entry.sample_size = cursor->m_Sample.GetSize(); trun_entry.sample_composition_time_offset = timescale? (AP4_UI32)AP4_ConvertTime(cursor->m_Sample.GetCtsDelta(), cursor->m_Track->GetMediaTimeScale(), timescale): cursor->m_Sample.GetCtsDelta(); fragment->m_SampleIndexes.SetItemCount(sample_count+1); fragment->m_SampleIndexes[sample_count] = cursor->m_SampleIndex; fragment->m_MdatSize += trun_entry.sample_size; fragment->m_Duration += trun_entry.sample_duration; // next sample cursor->m_Timestamp += trun_entry.sample_duration; result = cursor->SetSampleIndex(cursor->m_SampleIndex+1); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", cursor->m_SampleIndex+1, result); return; } sample_count++; if (cursor->m_Eos) { if (Options.debug) { printf("[Track ID %d has reached the end]\n", cursor->m_Track->GetId()); } break; } if (cursor->m_SampleIndex >= end_sample_index) { break; // done with this fragment } } if (Options.verbosity > 1) { printf(" %d samples\n", sample_count); } // update moof and children trun->SetEntries(trun_entries); trun->SetDataOffset((AP4_UI32)moof->GetSize()+AP4_ATOM_HEADER_SIZE); // advance the cursor's fragment index ++cursor->m_FragmentIndex; } // write the ftyp atom AP4_FtypAtom* ftyp = input_file.GetFileType(); if (ftyp) { // keep the existing brand and compatible brands AP4_Array<AP4_UI32> compatible_brands; compatible_brands.EnsureCapacity(ftyp->GetCompatibleBrands().ItemCount()+1); for (unsigned int i=0; i<ftyp->GetCompatibleBrands().ItemCount(); i++) { compatible_brands.Append(ftyp->GetCompatibleBrands()[i]); } // add the compatible brand if it is not already there if (!ftyp->HasCompatibleBrand(AP4_FILE_BRAND_ISO5)) { compatible_brands.Append(AP4_FILE_BRAND_ISO5); } // create a replacement AP4_FtypAtom* new_ftyp = new AP4_FtypAtom(ftyp->GetMajorBrand(), ftyp->GetMinorVersion(), &compatible_brands[0], compatible_brands.ItemCount()); ftyp = new_ftyp; } else { AP4_UI32 compat = AP4_FILE_BRAND_ISO5; ftyp = new AP4_FtypAtom(AP4_FTYP_BRAND_MP42, 0, &compat, 1); } ftyp->Write(output_stream); delete ftyp; // write the moov atom output_movie->GetMoovAtom()->Write(output_stream); // write the (not-yet fully computed) index if needed AP4_SidxAtom* sidx = NULL; AP4_Position sidx_position = 0; output_stream.Tell(sidx_position); if (create_segment_index) { sidx = new AP4_SidxAtom(index_cursor->m_Track->GetId(), index_cursor->m_Track->GetMediaTimeScale(), 0, 0); // reserve space for the entries now, but they will be computed and updated later sidx->SetReferenceCount(fragments.ItemCount()); sidx->Write(output_stream); } // write all fragments for (AP4_List<FragmentInfo>::Item* item = fragments.FirstItem(); item; item = item->GetNext()) { FragmentInfo* fragment = item->GetData(); // remember the time and position of this fragment output_stream.Tell(fragment->m_MoofPosition); fragment->m_Tfra->AddEntry(fragment->m_Timestamp, fragment->m_MoofPosition); // write the moof fragment->m_Moof->Write(output_stream); // write mdat output_stream.WriteUI32(fragment->m_MdatSize); output_stream.WriteUI32(AP4_ATOM_TYPE_MDAT); AP4_DataBuffer sample_data; AP4_Sample sample; for (unsigned int i=0; i<fragment->m_SampleIndexes.ItemCount(); i++) { // get the sample result = fragment->m_Samples->GetSample(fragment->m_SampleIndexes[i], sample); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", fragment->m_SampleIndexes[i], result); return; } // read the sample data result = sample.ReadData(sample_data); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to read sample data for sample %d (%d)\n", fragment->m_SampleIndexes[i], result); return; } // write the sample data result = output_stream.Write(sample_data.GetData(), sample_data.GetDataSize()); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to write sample data (%d)\n", result); return; } } } // update the index and re-write it if needed if (create_segment_index) { unsigned int segment_index = 0; AP4_SidxAtom::Reference reference; for (AP4_List<FragmentInfo>::Item* item = fragments.FirstItem(); item; item = item->GetNext()) { FragmentInfo* fragment = item->GetData(); reference.m_ReferencedSize = (AP4_UI32)(fragment->m_Moof->GetSize()+fragment->m_MdatSize); reference.m_SubsegmentDuration = fragment->m_Duration; reference.m_StartsWithSap = true; sidx->SetReference(segment_index++, reference); } AP4_Position here = 0; output_stream.Tell(here); output_stream.Seek(sidx_position); sidx->Write(output_stream); output_stream.Seek(here); delete sidx; } // create an mfra container and write out the index AP4_ContainerAtom mfra(AP4_ATOM_TYPE_MFRA); for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (track_id && cursors[i]->m_Track->GetId() != track_id) { continue; } mfra.AddChild(cursors[i]->m_Tfra); cursors[i]->m_Tfra = NULL; } AP4_MfroAtom* mfro = new AP4_MfroAtom((AP4_UI32)mfra.GetSize()+16); mfra.AddChild(mfro); result = mfra.Write(output_stream); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: failed to write 'mfra' (%d)\n", result); return; } // cleanup fragments.DeleteReferences(); for (unsigned int i=0; i<cursors.ItemCount(); i++) { delete cursors[i]; } for (AP4_List<FragmentInfo>::Item* item = fragments.FirstItem(); item; item = item->GetNext()) { FragmentInfo* fragment = item->GetData(); delete fragment->m_Moof; } delete output_movie; }
/*---------------------------------------------------------------------- | AP4_Processor::ProcessFragments +---------------------------------------------------------------------*/ AP4_Result AP4_Processor::ProcessFragment( AP4_ContainerAtom* moof, AP4_SidxAtom* sidx, AP4_Position sidx_position, AP4_ByteStream& output, AP4_Array<AP4_Position>& moof_positions, AP4_Array<AP4_Position>& mdat_positions) { unsigned int fragment_index = 0; //AP4_UI64 mdat_payload_offset = atom_offset+atom->GetSize()+AP4_ATOM_HEADER_SIZE; AP4_Sample sample; AP4_DataBuffer sample_data_in; AP4_DataBuffer sample_data_out; AP4_Result result = AP4_SUCCESS; // parse the moof //AP4_MovieFragment* fragment = new AP4_MovieFragment(moof); // process all the traf atoms AP4_Array<AP4_Processor::FragmentHandler*> handlers; AP4_Array<AP4_FragmentSampleTable*> sample_tables; for (; AP4_Atom* child = moof->GetChild(AP4_ATOM_TYPE_TRAF, handlers.ItemCount());) { AP4_TrafAtom* traf = AP4_DYNAMIC_CAST(AP4_TrafAtom, child); PERTRACK &track_data(m_TrackData[traf->GetInternalTrackId()]); AP4_TrakAtom* trak = track_data.track_handler->GetTrakAtom(); AP4_TrexAtom* trex = track_data.track_handler->GetTrexAtom(); // create the handler for this traf AP4_Processor::FragmentHandler* handler = CreateFragmentHandler(trak, trex, traf, *(m_StreamData[track_data.streamId].stream), moof_positions[track_data.streamId]); if (handler) { result = handler->ProcessFragment(); if (AP4_FAILED(result)) return result; } handlers.Append(handler); // create a sample table object so we can read the sample data AP4_FragmentSampleTable* sample_table = new AP4_FragmentSampleTable( traf, trex, traf->GetInternalTrackId(), m_StreamData[track_data.streamId].stream, moof_positions[traf->GetInternalTrackId()], mdat_positions[traf->GetInternalTrackId()], 0); sample_tables.Append(sample_table); // let the handler look at the samples before we process them if (handler) result = handler->PrepareForSamples(sample_table); if (AP4_FAILED(result)) return result; } output.Buffer(); // write the moof AP4_UI64 moof_out_start = 0; output.Tell(moof_out_start); moof->Write(output); // remember the location of this fragment FragmentMapEntry map_entry = { moof_positions[0], moof_out_start }; fragment_map_.Append(map_entry); // write an mdat header AP4_Position mdat_out_start; AP4_UI64 mdat_size = AP4_ATOM_HEADER_SIZE; output.Tell(mdat_out_start); output.WriteUI32(0); output.WriteUI32(AP4_ATOM_TYPE_MDAT); // process all track runs for (unsigned int i=0; i<handlers.ItemCount(); i++) { AP4_Processor::FragmentHandler* handler = handlers[i]; // get the track ID AP4_ContainerAtom* traf = AP4_DYNAMIC_CAST(AP4_ContainerAtom, moof->GetChild(AP4_ATOM_TYPE_TRAF, i)); if (traf == NULL) continue; AP4_TfhdAtom* tfhd = AP4_DYNAMIC_CAST(AP4_TfhdAtom, traf->GetChild(AP4_ATOM_TYPE_TFHD)); // compute the base data offset AP4_UI64 base_data_offset; if (tfhd->GetFlags() & AP4_TFHD_FLAG_BASE_DATA_OFFSET_PRESENT) { base_data_offset = mdat_out_start+AP4_ATOM_HEADER_SIZE; } else { base_data_offset = moof_out_start; } // build a list of all trun atoms AP4_Array<AP4_TrunAtom*> truns; for (AP4_List<AP4_Atom>::Item* child_item = traf->GetChildren().FirstItem(); child_item; child_item = child_item->GetNext()) { AP4_Atom* child_atom = child_item->GetData(); if (child_atom->GetType() == AP4_ATOM_TYPE_TRUN) { AP4_TrunAtom* trun = AP4_DYNAMIC_CAST(AP4_TrunAtom, child_atom); truns.Append(trun); } } AP4_Ordinal trun_index = 0; AP4_Ordinal trun_sample_index = 0; AP4_TrunAtom* trun = truns[0]; trun->SetDataOffset((AP4_SI32)((mdat_out_start+mdat_size)-base_data_offset)); // write the mdat for (unsigned int j=0; j<sample_tables[i]->GetSampleCount(); j++, trun_sample_index++) { // advance the trun index if necessary if (trun_sample_index >= trun->GetEntries().ItemCount()) { trun = truns[++trun_index]; trun->SetDataOffset((AP4_SI32)((mdat_out_start+mdat_size)-base_data_offset)); trun_sample_index = 0; } // get the next sample result = sample_tables[i]->GetSample(j, sample); if (AP4_FAILED(result)) return result; sample.ReadData(sample_data_in); m_TrackData[sample_tables[i]->GetInteralTrackId()].dts = sample.GetDts(); // process the sample data if (handler) { result = handler->ProcessSample(sample_data_in, sample_data_out); if (AP4_FAILED(result)) return result; // write the sample data result = output.Write(sample_data_out.GetData(), sample_data_out.GetDataSize()); if (AP4_FAILED(result)) return result; // update the mdat size mdat_size += sample_data_out.GetDataSize(); // update the trun entry trun->UseEntries()[trun_sample_index].sample_size = sample_data_out.GetDataSize(); } else { // write the sample data (unmodified) result = output.Write(sample_data_in.GetData(), sample_data_in.GetDataSize()); if (AP4_FAILED(result)) return result; // update the mdat size mdat_size += sample_data_in.GetDataSize(); } } if (handler) { // update the tfhd header if (tfhd->GetFlags() & AP4_TFHD_FLAG_BASE_DATA_OFFSET_PRESENT) { tfhd->SetBaseDataOffset(mdat_out_start+AP4_ATOM_HEADER_SIZE); } if (tfhd->GetFlags() & AP4_TFHD_FLAG_DEFAULT_SAMPLE_SIZE_PRESENT) { tfhd->SetDefaultSampleSize(trun->GetEntries()[0].sample_size); } // give the handler a chance to update the atoms handler->FinishFragment(); } } // update the mdat header AP4_Position mdat_out_end; output.Tell(mdat_out_end); #if defined(AP4_DEBUG) AP4_ASSERT(mdat_out_end-mdat_out_start == mdat_size); #endif if (AP4_FAILED(result = output.Seek(mdat_out_start))) return result; output.WriteUI32((AP4_UI32)mdat_size); output.Seek(mdat_out_end); // update the moof if needed if (AP4_FAILED(result = output.Seek(moof_out_start))) return result; moof->Write(output); output.Seek(mdat_out_end); // update the sidx if we have one if (sidx && fragment_index < sidx->GetReferences().ItemCount()) { if (fragment_index == 0) { sidx->SetFirstOffset(moof_out_start-(sidx_position+sidx->GetSize())); } AP4_LargeSize fragment_size = mdat_out_end-moof_out_start; AP4_SidxAtom::Reference& sidx_ref = sidx->UseReferences()[fragment_index]; sidx_ref.m_ReferencedSize = (AP4_UI32)fragment_size; } // cleanup //delete fragment; for (unsigned int i=0; i<handlers.ItemCount(); i++) { delete handlers[i]; } for (unsigned int i=0; i<sample_tables.ItemCount(); i++) { delete sample_tables[i]; } if (AP4_FAILED(result = output.Flush())) return result; return AP4_SUCCESS; }
static prMALError SDKImportAudio7( imStdParms *stdParms, imFileRef SDKfileRef, imImportAudioRec7 *audioRec7) { prMALError result = malNoError; // privateData ImporterLocalRec8H ldataH = reinterpret_cast<ImporterLocalRec8H>(audioRec7->privateData); stdParms->piSuites->memFuncs->lockHandle(reinterpret_cast<char**>(ldataH)); ImporterLocalRec8Ptr localRecP = reinterpret_cast<ImporterLocalRec8Ptr>( *ldataH ); if(localRecP && localRecP->audio_track && localRecP->alac) { assert(localRecP->reader != NULL); assert(localRecP->file != NULL && localRecP->file->GetMovie() != NULL); assert(audioRec7->position >= 0); // Do they really want contiguous samples? assert(audioRec7->position < localRecP->duration); if(audioRec7->size > localRecP->duration - audioRec7->position) { // this does happen, we get asked for audio data past the duration // let's make sure there's no garbage there and re-set audioRec7->size for(int c=0; c < localRecP->numChannels; c++) { memset(audioRec7->buffer[c], 0, sizeof(float) * audioRec7->size); } audioRec7->size = localRecP->duration - audioRec7->position; } const AP4_UI32 timestamp_ms = audioRec7->position * 1000 / localRecP->audioSampleRate; const size_t bytes_per_sample = (localRecP->alac->mConfig.bitDepth <= 16 ? 2 : 4); const size_t alac_buf_size = localRecP->alac->mConfig.frameLength * localRecP->alac->mConfig.numChannels * bytes_per_sample + kALACMaxEscapeHeaderBytes; uint8_t *alac_buffer = (uint8_t *)malloc(alac_buf_size); AP4_Ordinal sample_index = 0; AP4_Result ap4_result = localRecP->audio_track->GetSampleIndexForTimeStampMs(timestamp_ms, sample_index); if(ap4_result == AP4_SUCCESS) { // for surround channels // Premiere uses Left, Right, Left Rear, Right Rear, Center, LFE // ALAC uses Center, Left, Right, Left Rear, Right Rear, LFE // http://alac.macosforge.org/trac/browser/trunk/ReadMe.txt static const int surround_swizzle[] = {4, 0, 1, 2, 3, 5}; static const int stereo_swizzle[] = {0, 1, 2, 3, 4, 5}; // no swizzle, actually const int *swizzle = localRecP->numChannels > 2 ? surround_swizzle : stereo_swizzle; csSDK_uint32 samples_needed = audioRec7->size; PrAudioSample pos = 0; AP4_DataBuffer dataBuffer; while(samples_needed > 0 && ap4_result == AP4_SUCCESS && result == malNoError) { AP4_Sample sample; ap4_result = localRecP->audio_track->ReadSample(sample_index, sample, dataBuffer); if(ap4_result == AP4_SUCCESS) { const PrAudioSample sample_pos = sample.GetDts() * localRecP->audioSampleRate / localRecP->audio_track->GetMediaTimeScale(); const PrAudioSample sample_len = sample.GetDuration() * localRecP->audioSampleRate / localRecP->audio_track->GetMediaTimeScale(); const PrAudioSample skip_samples = (audioRec7->position > sample_pos) ? (audioRec7->position - sample_pos) : 0; long samples_to_read = sample_len - skip_samples; if(samples_to_read > samples_needed) samples_to_read = samples_needed; else if(samples_to_read < 0) samples_to_read = 0; if(samples_to_read > 0) { BitBuffer bits; BitBufferInit(&bits, dataBuffer.UseData(), dataBuffer.GetDataSize()); uint32_t outSamples = 0; int32_t alac_result = localRecP->alac->Decode(&bits, alac_buffer, localRecP->alac->mConfig.frameLength, localRecP->numChannels, &outSamples); if(alac_result == 0) { bool eos = false; if(samples_to_read > outSamples) { samples_to_read = outSamples; eos = true; } if(localRecP->alac->mConfig.bitDepth == 16) { CopySamples<int16_t>((const int16_t *)alac_buffer, audioRec7->buffer, localRecP->numChannels, swizzle, samples_to_read, pos, skip_samples); } else if(localRecP->alac->mConfig.bitDepth == 32) { CopySamples<int32_t>((const int32_t *)alac_buffer, audioRec7->buffer, localRecP->numChannels, swizzle, samples_to_read, pos, skip_samples); } else { assert(localRecP->alac->mConfig.bitDepth == 20 || localRecP->alac->mConfig.bitDepth == 24); CopySamples24(alac_buffer, audioRec7->buffer, localRecP->numChannels, swizzle, samples_to_read, pos, skip_samples, localRecP->alac->mConfig.bitDepth); } if(eos) { // end of the stream break; } } else assert(false); } samples_needed -= samples_to_read; pos += samples_to_read; sample_index++; } else assert(false); } assert(ap4_result == AP4_SUCCESS); if(ap4_result != AP4_SUCCESS && ap4_result != AP4_ERROR_EOS && ap4_result != AP4_ERROR_OUT_OF_RANGE) { result = imFileReadFailed; } } else { assert(ap4_result == AP4_ERROR_EOS); } free(alac_buffer); } stdParms->piSuites->memFuncs->unlockHandle(reinterpret_cast<char**>(ldataH)); assert(result == malNoError); return result; }
/*---------------------------------------------------------------------- | AP4_MarlinIpmpTrackEncrypter:GetProcessedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_MarlinIpmpTrackEncrypter::GetProcessedSampleSize(AP4_Sample& sample) { return AP4_CIPHER_BLOCK_SIZE*(2+(sample.GetSize()/AP4_CIPHER_BLOCK_SIZE)); }
/*---------------------------------------------------------------------- | AP4_SampleTable::GenerateStblAtom +---------------------------------------------------------------------*/ AP4_Result AP4_SampleTable::GenerateStblAtom(AP4_ContainerAtom*& stbl) { // create the stbl container stbl = new AP4_ContainerAtom(AP4_ATOM_TYPE_STBL); // create the stsd atom AP4_StsdAtom* stsd = new AP4_StsdAtom(this); // create the stsz atom AP4_StszAtom* stsz = new AP4_StszAtom(); // create the stsc atom AP4_StscAtom* stsc = new AP4_StscAtom(); // create the stts atom AP4_SttsAtom* stts = new AP4_SttsAtom(); // create the stss atom AP4_StssAtom* stss = new AP4_StssAtom(); // declare the ctts atom (may be created later) AP4_CttsAtom* ctts = NULL; // start chunk table AP4_Ordinal current_chunk_index = 0; AP4_Size current_chunk_size = 0; AP4_Position current_chunk_offset = 0; AP4_Cardinal current_samples_in_chunk = 0; AP4_Ordinal current_sample_description_index = 0; AP4_UI32 current_duration = 0; AP4_Cardinal current_duration_run = 0; AP4_UI32 current_cts_delta = 0; AP4_Cardinal current_cts_delta_run = 0; AP4_Array<AP4_Position> chunk_offsets; // process all the samples bool all_samples_are_sync = false; AP4_Cardinal sample_count = GetSampleCount(); for (AP4_Ordinal i=0; i<sample_count; i++) { AP4_Sample sample; GetSample(i, sample); // update DTS table AP4_UI32 new_duration = sample.GetDuration(); if (new_duration != current_duration && current_duration_run != 0) { // emit a new stts entry stts->AddEntry(current_duration_run, current_duration); // reset the run count current_duration_run = 0; } ++current_duration_run; current_duration = new_duration; // update CTS table AP4_UI32 new_cts_delta = sample.GetCtsDelta(); if (new_cts_delta != current_cts_delta && current_cts_delta_run != 0) { // create a ctts atom if we don't have one if (ctts == NULL) ctts = new AP4_CttsAtom(); //emit a new ctts entry ctts->AddEntry(current_cts_delta_run, current_cts_delta); // reset the run count current_cts_delta_run = 0; } ++current_cts_delta_run; current_cts_delta = new_cts_delta; // add an entry into the stsz atom stsz->AddEntry(sample.GetSize()); // update the sync sample table if (sample.IsSync()) { stss->AddEntry(i+1); if (i==0) all_samples_are_sync = true; } else { all_samples_are_sync = false; } // see in which chunk this sample is AP4_Ordinal chunk_index = 0; AP4_Ordinal position_in_chunk = 0; AP4_Result result = GetSampleChunkPosition(i, chunk_index, position_in_chunk); if (AP4_SUCCEEDED(result)) { if (chunk_index != current_chunk_index && current_samples_in_chunk != 0) { // new chunk chunk_offsets.Append(current_chunk_offset); current_chunk_offset += current_chunk_size; stsc->AddEntry(1, current_samples_in_chunk, current_sample_description_index+1); current_samples_in_chunk = 0; current_chunk_size = 0; } current_chunk_index = chunk_index; } // store the sample description index current_sample_description_index = sample.GetDescriptionIndex(); // adjust the current chunk info current_chunk_size += sample.GetSize(); ++current_samples_in_chunk; } // finish the stts table if (sample_count) stts->AddEntry(current_duration_run, current_duration); // finish the ctts table if we have one if (ctts) { AP4_ASSERT(current_cts_delta_run != 0); // add a ctts entry ctts->AddEntry(current_cts_delta_run, current_cts_delta); } // process any unfinished chunk if (current_samples_in_chunk != 0) { // new chunk chunk_offsets.Append(current_chunk_offset); stsc->AddEntry(1, current_samples_in_chunk, current_sample_description_index+1); } // attach the children of stbl stbl->AddChild(stsd); stbl->AddChild(stsz); stbl->AddChild(stsc); stbl->AddChild(stts); if (ctts) stbl->AddChild(ctts); if (!all_samples_are_sync && stss->GetEntries().ItemCount() != 0) { stbl->AddChild(stss); } else { delete stss; } // see if we need a co64 or an stco atom AP4_Size chunk_count = chunk_offsets.ItemCount(); if (current_chunk_offset <= 0xFFFFFFFF) { // make an array of 32-bit entries AP4_UI32* chunk_offsets_32 = new AP4_UI32[chunk_count]; for (unsigned int i=0; i<chunk_count; i++) { chunk_offsets_32[i] = (AP4_UI32)chunk_offsets[i]; } // create the stco atom AP4_StcoAtom* stco = new AP4_StcoAtom(&chunk_offsets_32[0], chunk_count); stbl->AddChild(stco); delete[] chunk_offsets_32; } else { // create the co64 atom AP4_Co64Atom* co64 = new AP4_Co64Atom(&chunk_offsets[0], chunk_count); stbl->AddChild(co64); } return AP4_SUCCESS; }
/*---------------------------------------------------------------------- | AP4_AtomSampleTable::GetSample +---------------------------------------------------------------------*/ AP4_Result AP4_AtomSampleTable::GetSample(AP4_Ordinal index, AP4_Sample& sample) { AP4_Result result; // check that we have a chunk offset table if (m_StcoAtom == NULL && m_Co64Atom == NULL) { return AP4_ERROR_INVALID_FORMAT; } // MP4 uses 1-based indexes internally, so adjust by one index++; // find out in which chunk this sample is located AP4_Ordinal chunk, skip, desc; result = m_StscAtom->GetChunkForSample(index, chunk, skip, desc); if (AP4_FAILED(result)) return result; // check that the result is within bounds if (skip > index) return AP4_ERROR_INTERNAL; // get the atom offset for this chunk AP4_Offset offset = 0; if (m_StcoAtom) result = m_StcoAtom->GetChunkOffset(chunk, offset); else if (m_Co64Atom) result = m_Co64Atom->GetChunkOffset(chunk, offset); else result = AP4_ERROR_INTERNAL; if (AP4_FAILED(result)) return result; /* // compute the additional offset inside the chunk for (unsigned int i = index-skip; i < index; i++) { AP4_Size size; result = m_StszAtom->GetSampleSize(i, size); if (AP4_FAILED(result)) return result; offset += size; } */ AP4_Size size; result = m_StszAtom->GetSampleSize(index - skip, index, size); if (AP4_FAILED(result)) return result; offset += size; // set the description index sample.SetDescriptionIndex(desc-1); // adjust for 0-based indexes // set the dts and cts AP4_TimeStamp dts; AP4_Duration duration; result = m_SttsAtom->GetDts(index, dts, duration); if (AP4_FAILED(result)) return result; sample.SetDts(dts); sample.SetDuration(duration); if (m_CttsAtom == NULL) { sample.SetCts(dts); } else { AP4_UI32 cts_offset; result = m_CttsAtom->GetCtsOffset(index, cts_offset); if (AP4_FAILED(result)) return result; sample.SetCts(dts + *((signed long*)&cts_offset)); // HACK: it shouldn't be signed, but such files exist unfortunatelly } // set the size AP4_Size sample_size; result = m_StszAtom->GetSampleSize(index, sample_size); if (AP4_FAILED(result)) return result; sample.SetSize(sample_size); // set the offset sample.SetOffset(offset); // set the data stream sample.SetDataStream(m_SampleStream); return AP4_SUCCESS; }
/*---------------------------------------------------------------------- | AP4_Processor::TrackHandler::GetProcessedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_Processor::TrackHandler::GetProcessedSampleSize(AP4_Sample& sample) { // default implementation: do no change the sample size return sample.GetSize(); }
/*---------------------------------------------------------------------- | AP4_OmaDcfCtrSampleEncrypter::GetEncryptedSampleSize +---------------------------------------------------------------------*/ AP4_Size AP4_OmaDcfCtrSampleEncrypter::GetEncryptedSampleSize(AP4_Sample& sample) { return sample.GetSize()+AP4_CIPHER_BLOCK_SIZE+1; }