Exemplo n.º 1
0
/*----------------------------------------------------------------------
|   Fragment
+---------------------------------------------------------------------*/
static void
Fragment(AP4_File&                input_file,
         AP4_ByteStream&          output_stream,
         AP4_Array<TrackCursor*>& cursors,
         unsigned int             fragment_duration,
         AP4_UI32                 timescale,
         AP4_UI32                 track_id,
         bool                     create_segment_index)
{
    AP4_List<FragmentInfo> fragments;
    TrackCursor*           index_cursor = NULL;
    AP4_Result             result;
    
    AP4_Movie* input_movie = input_file.GetMovie();
    if (input_movie == NULL) {
        fprintf(stderr, "ERROR: no moov found in the input file\n");
        return;
    }

    // create the output file object
    AP4_Movie* output_movie = new AP4_Movie(1000);
    
    // create an mvex container
    AP4_ContainerAtom* mvex = new AP4_ContainerAtom(AP4_ATOM_TYPE_MVEX);
    AP4_MehdAtom*      mehd = new AP4_MehdAtom(0);
    mvex->AddChild(mehd);
    
    // add an output track for each track in the input file
    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
        AP4_Track* track = cursors[i]->m_Track;
        
        // skip non matching tracks if we have a selector
        if (track_id && track->GetId() != track_id) {
            continue;
        }
        
        result = cursors[i]->Init();
        if (AP4_FAILED(result)) {
            fprintf(stderr, "ERROR: failed to init sample cursor (%d), skipping track %d\n", result, track->GetId());
            return;
        }

        // create a sample table (with no samples) to hold the sample description
        AP4_SyntheticSampleTable* sample_table = new AP4_SyntheticSampleTable();
        for (unsigned int j=0; j<track->GetSampleDescriptionCount(); j++) {
            AP4_SampleDescription* sample_description = track->GetSampleDescription(j);
            sample_table->AddSampleDescription(sample_description, false);
        }
        
        // create the track
        AP4_Track* output_track = new AP4_Track(sample_table,
                                                track->GetId(),
                                                timescale?timescale:1000,
                                                AP4_ConvertTime(track->GetDuration(),
                                                                input_movie->GetTimeScale(),
                                                                timescale?timescale:1000),
                                                timescale?timescale:track->GetMediaTimeScale(),
                                                0,//track->GetMediaDuration(),
                                                track);
        output_movie->AddTrack(output_track);
        
        // add a trex entry to the mvex container
        AP4_TrexAtom* trex = new AP4_TrexAtom(track->GetId(),
                                              1,
                                              0,
                                              0,
                                              0);
        mvex->AddChild(trex);
    }
    
    // select the anchor cursor
    TrackCursor* anchor_cursor = NULL;
    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
        if (cursors[i]->m_Track->GetId() == track_id) {
            anchor_cursor = cursors[i];
        }
    }
    if (anchor_cursor == NULL) {
        for (unsigned int i=0; i<cursors.ItemCount(); i++) {
            // use this as the anchor track if it is the first video track
            if (cursors[i]->m_Track->GetType() == AP4_Track::TYPE_VIDEO) {
                anchor_cursor = cursors[i];
                break;
            }
        }
    }
    if (anchor_cursor == NULL) {
        // no video track to anchor with, pick the first audio track
        for (unsigned int i=0; i<cursors.ItemCount(); i++) {
            if (cursors[i]->m_Track->GetType() == AP4_Track::TYPE_AUDIO) {
                anchor_cursor = cursors[i];
                break;
            }
        }
        // no audio track to anchor with, pick the first subtitles track
        for (unsigned int i=0; i<cursors.ItemCount(); i++) {
            if (cursors[i]->m_Track->GetType() == AP4_Track::TYPE_SUBTITLES) {
                anchor_cursor = cursors[i];
                break;
            }
        }
    }
    if (anchor_cursor == NULL) {
        // this shoudl never happen
        fprintf(stderr, "ERROR: no anchor track\n");
        return;
    }
    if (create_segment_index) {
        index_cursor = anchor_cursor;
    }
    if (Options.debug) {
        printf("Using track ID %d as anchor\n", anchor_cursor->m_Track->GetId());
    }
    
    // update the mehd duration
    mehd->SetDuration(output_movie->GetDuration());
    
    // add the mvex container to the moov container
    output_movie->GetMoovAtom()->AddChild(mvex);
    
    // compute all the fragments
    unsigned int sequence_number = 1;
    for(;;) {
        TrackCursor* cursor = NULL;

        // pick the first track with a fragment index lower than the anchor's
        for (unsigned int i=0; i<cursors.ItemCount(); i++) {
            if (track_id && cursors[i]->m_Track->GetId() != track_id) continue;
            if (cursors[i]->m_Eos) continue;
            if (cursors[i]->m_FragmentIndex < anchor_cursor->m_FragmentIndex) {
                cursor = cursors[i];
                break;
            }
        }
        
        // check if we found a non-anchor cursor to use
        if (cursor == NULL) {
            // the anchor should be used in this round, check if we can use it
            if (anchor_cursor->m_Eos) {
                // the anchor is done, pick a new anchor unless we need to trim
                anchor_cursor = NULL;
                if (!Options.trim) {
                    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
                        if (track_id && cursors[i]->m_Track->GetId() != track_id) continue;
                        if (cursors[i]->m_Eos) continue;
                        if (anchor_cursor == NULL ||
                            cursors[i]->m_Track->GetType() == AP4_Track::TYPE_VIDEO ||
                            cursors[i]->m_Track->GetType() == AP4_Track::TYPE_AUDIO) {
                            anchor_cursor = cursors[i];
                            if (Options.debug) {
                                printf("+++ New anchor: Track ID %d\n", anchor_cursor->m_Track->GetId());
                            }
                        }
                    }
                }
            }
            cursor = anchor_cursor;
        }
        if (cursor == NULL) break; // all done
        
        // decide how many samples go into this fragment
        AP4_UI64 target_dts;
        if (cursor == anchor_cursor) {
            // compute the current dts in milliseconds
            AP4_UI64 anchor_dts_ms = AP4_ConvertTime(cursor->m_Sample.GetDts(),
                                                     cursor->m_Track->GetMediaTimeScale(),
                                                     1000);
            // round to the nearest multiple of fragment_duration
            AP4_UI64 anchor_position = (anchor_dts_ms + (fragment_duration/2))/fragment_duration;
            
            // pick the next fragment_duration multiple at our target
            target_dts = AP4_ConvertTime(fragment_duration*(anchor_position+1),
                                         1000,
                                         cursor->m_Track->GetMediaTimeScale());
        } else {
            target_dts = AP4_ConvertTime(anchor_cursor->m_Sample.GetDts(),
                                         anchor_cursor->m_Track->GetMediaTimeScale(),
                                         cursor->m_Track->GetMediaTimeScale());
            if (target_dts <= cursor->m_Sample.GetDts()) {
                // we must be at the end, past the last anchor sample, just use the target duration
                target_dts = AP4_ConvertTime(fragment_duration*(cursor->m_FragmentIndex+1),
                                            1000,
                                            cursor->m_Track->GetMediaTimeScale());
                
                if (target_dts <= cursor->m_Sample.GetDts()) {
                    // we're still behind, there may have been an alignment/rounding error, just advance by one segment duration
                    target_dts = cursor->m_Sample.GetDts()+AP4_ConvertTime(fragment_duration,
                                                                           1000,
                                                                           cursor->m_Track->GetMediaTimeScale());
                }
            }
        }

        unsigned int end_sample_index = cursor->m_Samples->GetSampleCount();
        AP4_UI64 smallest_diff = (AP4_UI64)(0xFFFFFFFFFFFFFFFFULL);
        AP4_Sample sample;
        for (unsigned int i=cursor->m_SampleIndex+1; i<=cursor->m_Samples->GetSampleCount(); i++) {
            AP4_UI64 dts;
            if (i < cursor->m_Samples->GetSampleCount()) {
                result = cursor->m_Samples->GetSample(i, sample);
                if (AP4_FAILED(result)) {
                    fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", i, result);
                    return;
                }
                if (!sample.IsSync()) continue; // only look for sync samples
                dts = sample.GetDts();
            } else {
                result = cursor->m_Samples->GetSample(i-1, sample);
                if (AP4_FAILED(result)) {
                    fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", i-1, result);
                    return;
                }
                dts = sample.GetDts()+sample.GetDuration();
            }
            AP4_SI64 diff = dts-target_dts;
            AP4_UI64 abs_diff = diff<0?-diff:diff;
            if (abs_diff < smallest_diff) {
                // this sample is the closest to the target so far
                end_sample_index = i;
                smallest_diff = abs_diff;
            }
            if (diff >= 0) {
                // this sample is past the target, it is not going to get any better, stop looking
                break;
            }
        }
        if (cursor->m_Eos) continue;
        
        if (Options.debug) {
            if (cursor == anchor_cursor) {
                printf("====");
            } else {
                printf("----");
            }
            printf(" Track ID %d - dts=%lld, target=%lld, start=%d, end=%d/%d\n",
                   cursor->m_Track->GetId(),
                   cursor->m_Sample.GetDts(),
                   target_dts,
                   cursor->m_SampleIndex,
                   end_sample_index,
                   cursor->m_Track->GetSampleCount());
        }
        
        // emit a fragment for the selected track
        if (Options.verbosity > 1) {
            printf("fragment: track ID %d ", cursor->m_Track->GetId());
        }

        // decide which sample description index to use
        // (this is not very sophisticated, we only look at the sample description
        // index of the first sample in the group, which may not be correct. This
        // should be fixed later)
        unsigned int sample_desc_index = cursor->m_Sample.GetDescriptionIndex();
        unsigned int tfhd_flags = AP4_TFHD_FLAG_DEFAULT_BASE_IS_MOOF;
        if (sample_desc_index > 0) {
            tfhd_flags |= AP4_TFHD_FLAG_SAMPLE_DESCRIPTION_INDEX_PRESENT;
        }
        if (cursor->m_Track->GetType() == AP4_Track::TYPE_VIDEO) {
            tfhd_flags |= AP4_TFHD_FLAG_DEFAULT_SAMPLE_FLAGS_PRESENT;
        }
        
        // setup the moof structure
        AP4_ContainerAtom* moof = new AP4_ContainerAtom(AP4_ATOM_TYPE_MOOF);
        AP4_MfhdAtom* mfhd = new AP4_MfhdAtom(sequence_number++);
        moof->AddChild(mfhd);
        AP4_ContainerAtom* traf = new AP4_ContainerAtom(AP4_ATOM_TYPE_TRAF);
        AP4_TfhdAtom* tfhd = new AP4_TfhdAtom(tfhd_flags,
                                              cursor->m_Track->GetId(),
                                              0,
                                              sample_desc_index+1,
                                              0,
                                              0,
                                              0);
        if (tfhd_flags & AP4_TFHD_FLAG_DEFAULT_SAMPLE_FLAGS_PRESENT) {
            tfhd->SetDefaultSampleFlags(0x1010000); // sample_is_non_sync_sample=1, sample_depends_on=1 (not I frame)
        }
        
        traf->AddChild(tfhd);
        if (!Options.no_tfdt) {
            AP4_TfdtAtom* tfdt = new AP4_TfdtAtom(1, cursor->m_Timestamp);
            traf->AddChild(tfdt);
        }
        AP4_UI32 trun_flags = AP4_TRUN_FLAG_DATA_OFFSET_PRESENT     |
                              AP4_TRUN_FLAG_SAMPLE_DURATION_PRESENT |
                              AP4_TRUN_FLAG_SAMPLE_SIZE_PRESENT;
        AP4_UI32 first_sample_flags = 0;
        if (cursor->m_Track->GetType() == AP4_Track::TYPE_VIDEO) {
            trun_flags |= AP4_TRUN_FLAG_FIRST_SAMPLE_FLAGS_PRESENT;
            first_sample_flags = 0x2000000; // sample_depends_on=2 (I frame)
        }
        AP4_TrunAtom* trun = new AP4_TrunAtom(trun_flags, 0, first_sample_flags);
        
        traf->AddChild(trun);
        moof->AddChild(traf);
        
        // create a new FragmentInfo object to store the fragment details
        FragmentInfo* fragment = new FragmentInfo(cursor->m_Samples, cursor->m_Tfra, cursor->m_Timestamp, moof);
        fragments.Add(fragment);
        
        // add samples to the fragment
        unsigned int                   sample_count = 0;
        AP4_Array<AP4_TrunAtom::Entry> trun_entries;
        fragment->m_MdatSize = AP4_ATOM_HEADER_SIZE;
        for (;;) {
            // if we have one non-zero CTS delta, we'll need to express it
            if (cursor->m_Sample.GetCtsDelta()) {
                trun->SetFlags(trun->GetFlags() | AP4_TRUN_FLAG_SAMPLE_COMPOSITION_TIME_OFFSET_PRESENT);
            }
            
            // add one sample
            trun_entries.SetItemCount(sample_count+1);
            AP4_TrunAtom::Entry& trun_entry = trun_entries[sample_count];
            trun_entry.sample_duration                = timescale?
                                                        (AP4_UI32)AP4_ConvertTime(cursor->m_Sample.GetDuration(),
                                                                                  cursor->m_Track->GetMediaTimeScale(),
                                                                                  timescale):
                                                        cursor->m_Sample.GetDuration();
            trun_entry.sample_size                    = cursor->m_Sample.GetSize();
            trun_entry.sample_composition_time_offset = timescale?
                                                        (AP4_UI32)AP4_ConvertTime(cursor->m_Sample.GetCtsDelta(),
                                                                                  cursor->m_Track->GetMediaTimeScale(),
                                                                                  timescale):
                                                        cursor->m_Sample.GetCtsDelta();
                        
            fragment->m_SampleIndexes.SetItemCount(sample_count+1);
            fragment->m_SampleIndexes[sample_count] = cursor->m_SampleIndex;
            fragment->m_MdatSize += trun_entry.sample_size;
            fragment->m_Duration += trun_entry.sample_duration;
            
            // next sample
            cursor->m_Timestamp += trun_entry.sample_duration;
            result = cursor->SetSampleIndex(cursor->m_SampleIndex+1);
            if (AP4_FAILED(result)) {
                fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", cursor->m_SampleIndex+1, result);
                return;
            }
            sample_count++;
            if (cursor->m_Eos) {
                if (Options.debug) {
                    printf("[Track ID %d has reached the end]\n", cursor->m_Track->GetId());
                }
                break;
            }
            if (cursor->m_SampleIndex >= end_sample_index) {
                break; // done with this fragment
            }
        }
        if (Options.verbosity > 1) {
            printf(" %d samples\n", sample_count);
        }
                
        // update moof and children
        trun->SetEntries(trun_entries);
        trun->SetDataOffset((AP4_UI32)moof->GetSize()+AP4_ATOM_HEADER_SIZE);
        
        // advance the cursor's fragment index
        ++cursor->m_FragmentIndex;
    }
    
    // write the ftyp atom
    AP4_FtypAtom* ftyp = input_file.GetFileType();
    if (ftyp) {
        // keep the existing brand and compatible brands
        AP4_Array<AP4_UI32> compatible_brands;
        compatible_brands.EnsureCapacity(ftyp->GetCompatibleBrands().ItemCount()+1);
        for (unsigned int i=0; i<ftyp->GetCompatibleBrands().ItemCount(); i++) {
            compatible_brands.Append(ftyp->GetCompatibleBrands()[i]);
        }
        
        // add the compatible brand if it is not already there
        if (!ftyp->HasCompatibleBrand(AP4_FILE_BRAND_ISO5)) {
            compatible_brands.Append(AP4_FILE_BRAND_ISO5);
        }

        // create a replacement
        AP4_FtypAtom* new_ftyp = new AP4_FtypAtom(ftyp->GetMajorBrand(),
                                                  ftyp->GetMinorVersion(),
                                                  &compatible_brands[0],
                                                  compatible_brands.ItemCount());
        ftyp = new_ftyp;
    } else {
        AP4_UI32 compat = AP4_FILE_BRAND_ISO5;
        ftyp = new AP4_FtypAtom(AP4_FTYP_BRAND_MP42, 0, &compat, 1);
    }
    ftyp->Write(output_stream);
    delete ftyp;
    
    // write the moov atom
    output_movie->GetMoovAtom()->Write(output_stream);

    // write the (not-yet fully computed) index if needed
    AP4_SidxAtom* sidx = NULL;
    AP4_Position  sidx_position = 0;
    output_stream.Tell(sidx_position);
    if (create_segment_index) {
        sidx = new AP4_SidxAtom(index_cursor->m_Track->GetId(),
                                index_cursor->m_Track->GetMediaTimeScale(),
                                0,
                                0);
        // reserve space for the entries now, but they will be computed and updated later
        sidx->SetReferenceCount(fragments.ItemCount());
        sidx->Write(output_stream);
    }
    
    // write all fragments
    for (AP4_List<FragmentInfo>::Item* item = fragments.FirstItem();
                                       item;
                                       item = item->GetNext()) {
        FragmentInfo* fragment = item->GetData();

        // remember the time and position of this fragment
        output_stream.Tell(fragment->m_MoofPosition);
        fragment->m_Tfra->AddEntry(fragment->m_Timestamp, fragment->m_MoofPosition);
        
        // write the moof
        fragment->m_Moof->Write(output_stream);
        
        // write mdat
        output_stream.WriteUI32(fragment->m_MdatSize);
        output_stream.WriteUI32(AP4_ATOM_TYPE_MDAT);
        AP4_DataBuffer sample_data;
        AP4_Sample     sample;
        for (unsigned int i=0; i<fragment->m_SampleIndexes.ItemCount(); i++) {
            // get the sample
            result = fragment->m_Samples->GetSample(fragment->m_SampleIndexes[i], sample);
            if (AP4_FAILED(result)) {
                fprintf(stderr, "ERROR: failed to get sample %d (%d)\n", fragment->m_SampleIndexes[i], result);
                return;
            }

            // read the sample data
            result = sample.ReadData(sample_data);
            if (AP4_FAILED(result)) {
                fprintf(stderr, "ERROR: failed to read sample data for sample %d (%d)\n", fragment->m_SampleIndexes[i], result);
                return;
            }
            
            // write the sample data
            result = output_stream.Write(sample_data.GetData(), sample_data.GetDataSize());
            if (AP4_FAILED(result)) {
                fprintf(stderr, "ERROR: failed to write sample data (%d)\n", result);
                return;
            }
        }
    }

    // update the index and re-write it if needed
    if (create_segment_index) {
        unsigned int segment_index = 0;
        AP4_SidxAtom::Reference reference;
        for (AP4_List<FragmentInfo>::Item* item = fragments.FirstItem();
                                           item;
                                           item = item->GetNext()) {
            FragmentInfo* fragment = item->GetData();
            reference.m_ReferencedSize     = (AP4_UI32)(fragment->m_Moof->GetSize()+fragment->m_MdatSize);
            reference.m_SubsegmentDuration = fragment->m_Duration;
            reference.m_StartsWithSap      = true;
            sidx->SetReference(segment_index++, reference);
        }
        AP4_Position here = 0;
        output_stream.Tell(here);
        output_stream.Seek(sidx_position);
        sidx->Write(output_stream);
        output_stream.Seek(here);
        delete sidx;
    }
    
    // create an mfra container and write out the index
    AP4_ContainerAtom mfra(AP4_ATOM_TYPE_MFRA);
    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
        if (track_id && cursors[i]->m_Track->GetId() != track_id) {
            continue;
        }
        mfra.AddChild(cursors[i]->m_Tfra);
        cursors[i]->m_Tfra = NULL;
    }
    AP4_MfroAtom* mfro = new AP4_MfroAtom((AP4_UI32)mfra.GetSize()+16);
    mfra.AddChild(mfro);
    result = mfra.Write(output_stream);
    if (AP4_FAILED(result)) {
        fprintf(stderr, "ERROR: failed to write 'mfra' (%d)\n", result);
        return;
    }
    
    // cleanup
    fragments.DeleteReferences();
    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
        delete cursors[i];
    }
    for (AP4_List<FragmentInfo>::Item* item = fragments.FirstItem();
                                       item;
                                       item = item->GetNext()) {
        FragmentInfo* fragment = item->GetData();
        delete fragment->m_Moof;
    }
    delete output_movie;
}
Exemplo n.º 2
0
/*----------------------------------------------------------------------
|   Fragment
+---------------------------------------------------------------------*/
static void
Fragment(AP4_File&       input_file,
         AP4_ByteStream& output_stream,
         unsigned int    fragment_duration,
         AP4_UI32        timescale)
{
    AP4_Result result;
    
    AP4_Movie* input_movie = input_file.GetMovie();
    if (input_movie == NULL) {
        fprintf(stderr, "ERROR: no moov found in the input file\n");
        return;
    }

    // create the output file object
    AP4_Movie* output_movie = new AP4_Movie(1000);
    
    // create an mvex container
    AP4_ContainerAtom* mvex = new AP4_ContainerAtom(AP4_ATOM_TYPE_MVEX);
    AP4_MehdAtom* mehd = new AP4_MehdAtom(0); 
    mvex->AddChild(mehd);
    
    // create a cusor list to keep track of the tracks we will read from
    AP4_Array<TrackCursor*> cursors;
    
    // add an output track for each track in the input file
    for (AP4_List<AP4_Track>::Item* track_item = input_movie->GetTracks().FirstItem();
                                    track_item;
                                    track_item = track_item->GetNext()) {
        AP4_Track* track = track_item->GetData();
        TrackCursor* cursor = new TrackCursor();
        cursor->m_TrackId = track->GetId();
        cursor->m_Tfra->SetTrackId(track->GetId());
        cursors.Append(cursor);
                    
        // create a sample table (with no samples) to hold the sample description
        AP4_SyntheticSampleTable* sample_table = new AP4_SyntheticSampleTable();
        for (unsigned int i=0; i<track->GetSampleDescriptionCount(); i++) {
            AP4_SampleDescription* sample_description = track->GetSampleDescription(i);
            sample_table->AddSampleDescription(sample_description, false);
        }
        
        // create the track
        AP4_Track* output_track = new AP4_Track(track->GetType(),
                                                sample_table,
                                                cursor->m_TrackId,
                                                timescale?timescale:1000,
                                                AP4_ConvertTime(track->GetDuration(),
                                                                input_movie->GetTimeScale(),
                                                                timescale?timescale:1000),
                                                timescale?timescale:track->GetMediaTimeScale(),
                                                0,//track->GetMediaDuration(),
                                                track->GetTrackLanguage(),
                                                track->GetWidth(),
                                                track->GetHeight());
        output_movie->AddTrack(output_track);
        result = cursor->SetTrack(track);
        if (AP4_FAILED(result)) {
            fprintf(stderr, "ERROR: failed to read sample (%d)\n", result);
            return;
        }
                
        // add a trex entry to the mvex container
        AP4_TrexAtom* trex = new AP4_TrexAtom(cursor->m_TrackId,
                                              1,
                                              0,
                                              0,
                                              0);
        mvex->AddChild(trex);
    }
    
    if (cursors.ItemCount() == 0) {
        fprintf(stderr, "ERROR: no track found\n");
        return;
    }

    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
        if (cursors[i]->m_Track->GetType() == AP4_Track::TYPE_VIDEO) {
            cursors[i]->m_TargetDuration = AP4_ConvertTime(fragment_duration>AP4_FRAGMENTER_FRAGMENT_DURATION_TOLERANCE ?
                                                           fragment_duration-AP4_FRAGMENTER_FRAGMENT_DURATION_TOLERANCE : 0,
                                                           1000,
                                                           cursors[i]->m_Track->GetMediaTimeScale());
        } else {
            cursors[i]->m_TargetDuration = AP4_ConvertTime(fragment_duration,
                                                           1000,
                                                           cursors[i]->m_Track->GetMediaTimeScale());
        }
    }
    
    // update the mehd duration
    mehd->SetDuration(output_movie->GetDuration());
    
    // the mvex container to the moov container
    output_movie->GetMoovAtom()->AddChild(mvex);
    
    // write the ftyp atom
    AP4_FtypAtom* ftyp = input_file.GetFileType();
    if (ftyp) {
        ftyp->Write(output_stream);
    }
                 
    // write the moov atom
    output_movie->GetMoovAtom()->Write(output_stream);
    
    // write all the fragments
    unsigned int sequence_number = 1;
    for(;;) {
        // select the next track to read from
        TrackCursor* cursor = NULL;
        AP4_UI64 min_dts = (AP4_UI64)(-1);
        for (unsigned int i=0; i<cursors.ItemCount(); i++) {
            if (cursors[i]->m_Eos) continue;
            AP4_UI64 dts = AP4_ConvertTime(cursors[i]->m_Sample.GetDts(),
                                           cursors[i]->m_Track->GetMediaTimeScale(),
                                           AP4_FRAGMENTER_BASE_TIMESCALE);
            if (dts < min_dts) {
                min_dts = dts;
                cursor = cursors[i];
            }
        }
        if (cursor == NULL) break; // all done
        
        // compute the target end for the segment
        cursor->m_EndDts = cursor->m_Sample.GetDts()+cursor->m_TargetDuration;
        
        // emit a fragment for the selected track
        if (Options.verbosity > 0) {
            printf("fragment: track ID %d ", cursor->m_Track->GetId());
        }

        // remember the time and position of this fragment
        AP4_Position moof_offset = 0;
        output_stream.Tell(moof_offset);
        cursor->m_Tfra->AddEntry(cursor->m_Timestamp, moof_offset);
        
        // decide which sample description index to use
        // (this is not very sophisticated, we only look at the sample description
        // index of the first sample in the group, which may not be correct. This
        // should be fixed later)
        unsigned int sample_desc_index = cursor->m_Sample.GetDescriptionIndex();
        unsigned int tfhd_flags = AP4_TFHD_FLAG_DEFAULT_BASE_IS_MOOF;
        if (sample_desc_index > 0) {
            tfhd_flags |= AP4_TFHD_FLAG_SAMPLE_DESCRIPTION_INDEX_PRESENT;
        }
        if (cursor->m_Track->GetType() == AP4_Track::TYPE_VIDEO) {
            tfhd_flags |= AP4_TFHD_FLAG_DEFAULT_SAMPLE_FLAGS_PRESENT;
        }
            
        // setup the moof structure
        AP4_ContainerAtom* moof = new AP4_ContainerAtom(AP4_ATOM_TYPE_MOOF);
        AP4_MfhdAtom* mfhd = new AP4_MfhdAtom(sequence_number++);
        moof->AddChild(mfhd);
        AP4_ContainerAtom* traf = new AP4_ContainerAtom(AP4_ATOM_TYPE_TRAF);
        AP4_TfhdAtom* tfhd = new AP4_TfhdAtom(tfhd_flags,
                                              cursor->m_TrackId,
                                              0,
                                              sample_desc_index+1,
                                              0,
                                              0,
                                              0);
        if (tfhd_flags & AP4_TFHD_FLAG_DEFAULT_SAMPLE_FLAGS_PRESENT) {
            tfhd->SetDefaultSampleFlags(0x1010000); // sample_is_non_sync_sample=1, sample_depends_on=1 (not I frame)
        }
        
        traf->AddChild(tfhd);
        AP4_TfdtAtom* tfdt = new AP4_TfdtAtom(1, cursor->m_Timestamp);
        traf->AddChild(tfdt);
        AP4_UI32 trun_flags = AP4_TRUN_FLAG_DATA_OFFSET_PRESENT     |
                              AP4_TRUN_FLAG_SAMPLE_DURATION_PRESENT |
                              AP4_TRUN_FLAG_SAMPLE_SIZE_PRESENT;
        AP4_UI32 first_sample_flags = 0;
        if (cursor->m_Track->GetType() == AP4_Track::TYPE_VIDEO) {
            trun_flags |= AP4_TRUN_FLAG_FIRST_SAMPLE_FLAGS_PRESENT;
            first_sample_flags = 0x2000000; // sample_depends_on=2 (I frame)
        }
        AP4_TrunAtom* trun = new AP4_TrunAtom(trun_flags, 0, first_sample_flags);
        
        traf->AddChild(trun);
        moof->AddChild(traf);
            
        // decide which samples go in this fragment
        AP4_Array<AP4_UI32>            sample_indexes;
        unsigned int                   sample_count = 0;
        AP4_Array<AP4_TrunAtom::Entry> trun_entries;
        AP4_UI32                       mdat_size = AP4_ATOM_HEADER_SIZE;
        for (;;) {
            // if we have one non-zero CTS delta, we'll need to express it
            if (cursor->m_Sample.GetCtsDelta()) {
                trun->SetFlags(trun->GetFlags() | AP4_TRUN_FLAG_SAMPLE_COMPOSITION_TIME_OFFSET_PRESENT);
            }
            
            // add one sample
            trun_entries.SetItemCount(sample_count+1);
            AP4_TrunAtom::Entry& trun_entry = trun_entries[sample_count];
            trun_entry.sample_duration                = timescale?
                                                        (AP4_UI32)AP4_ConvertTime(cursor->m_Sample.GetDuration(),
                                                                                  cursor->m_Track->GetMediaTimeScale(),
                                                                                  timescale):
                                                        cursor->m_Sample.GetDuration();
            trun_entry.sample_size                    = cursor->m_Sample.GetSize();
            trun_entry.sample_composition_time_offset = timescale?
                                                        (AP4_UI32)AP4_ConvertTime(cursor->m_Sample.GetCtsDelta(),
                                                                                  cursor->m_Track->GetMediaTimeScale(),
                                                                                  timescale):
                                                        cursor->m_Sample.GetCtsDelta();
                        
            sample_indexes.SetItemCount(sample_count+1);
            sample_indexes[sample_count] = cursor->m_SampleIndex;
            mdat_size += trun_entry.sample_size;
            
            // next sample
            cursor->m_Timestamp += trun_entry.sample_duration;
            cursor->m_SampleIndex++;
            sample_count++;
            if (cursor->m_SampleIndex >= cursor->m_Track->GetSampleCount()) {
                cursor->m_Eos = true;

                AP4_UI64 end_dts = cursor->m_Sample.GetDts()+cursor->m_Sample.GetDuration();
                cursor->m_Sample.Reset();
                cursor->m_Sample.SetDts(end_dts);

                break;
            }
            result = cursor->m_Track->GetSample(cursor->m_SampleIndex, cursor->m_Sample);
            if (AP4_FAILED(result)) {
                cursor->m_Eos = true;

                AP4_UI64 end_dts = cursor->m_Sample.GetDts()+cursor->m_Sample.GetDuration();
                cursor->m_Sample.Reset();
                cursor->m_Sample.SetDts(end_dts);

                break;
            }
            if (cursor->m_Sample.IsSync()) {
                if (cursor->m_Sample.GetDts() >= cursor->m_EndDts) {
                    break; // done with this segment
                }
            }
        }
        if (Options.verbosity) {
            printf(" %d samples\n", sample_count);
        }
                
        // update moof and children
        trun->SetEntries(trun_entries);
        trun->SetDataOffset((AP4_UI32)moof->GetSize()+AP4_ATOM_HEADER_SIZE);
        
        // write moof
        moof->Write(output_stream);
        
        // write mdat
        output_stream.WriteUI32(mdat_size);
        output_stream.WriteUI32(AP4_ATOM_TYPE_MDAT);
        AP4_Sample     sample;
        AP4_DataBuffer sample_data;
        for (unsigned int i=0; i<sample_indexes.ItemCount(); i++) {
            result = cursor->m_Track->ReadSample(sample_indexes[i], sample, sample_data);
            if (AP4_FAILED(result)) {
                fprintf(stderr, "ERROR: failed to read sample %d (%d)\n", sample_indexes[i], result);
                return;
            }
            result = output_stream.Write(sample_data.GetData(), sample_data.GetDataSize());
            if (AP4_FAILED(result)) {
                fprintf(stderr, "ERROR: failed to write sample data (%d)\n", result);
                return;
            }
        }
        
        // cleanup
        delete moof;
    }
    
    // create an mfra container and write out the index
    AP4_ContainerAtom mfra(AP4_ATOM_TYPE_MFRA);
    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
        mfra.AddChild(cursors[i]->m_Tfra);
        cursors[i]->m_Tfra = NULL;
    }
    AP4_MfroAtom* mfro = new AP4_MfroAtom((AP4_UI32)mfra.GetSize()+16);
    mfra.AddChild(mfro);
    result = mfra.Write(output_stream);
    if (AP4_FAILED(result)) {
        fprintf(stderr, "ERROR: failed to write 'mfra' (%d)\n", result);
        return;
    }
    
    // cleanup
    for (unsigned int i=0; i<cursors.ItemCount(); i++) {
        delete cursors[i];
    }
    delete output_movie;
}