示例#1
0
static stts_t* stts_create(mp4_context_t const* mp4_context,
													 samples_t const* first, samples_t const* last)
{
	unsigned int entries = 0;
	unsigned int samples = last - first;
	stts_t* stts = stts_init();
	stts->table_ = (stts_table_t*) realloc(stts->table_, samples * sizeof(stts_table_t));

	while(first != last)
	{
		unsigned int sample_count = 1;
		unsigned int sample_duration =
			(unsigned int)(first[1].pts_ - first[0].pts_);
		while(++first != last)
		{
			if((first[1].pts_ - first[0].pts_) != sample_duration)
				break;
			++sample_count;
		}

		stts->table_[entries].sample_count_ = sample_count;
		stts->table_[entries].sample_duration_ = sample_duration;
		++entries;
	}
	stts->entries_ = entries;

	if(stts_get_samples(stts) != samples)
	{
		MP4_WARNING("ERROR: stts_get_samples=%d, should be %d\n",
			stts_get_samples(stts), samples);
	}
	return stts;
}
示例#2
0
static ctts_t* ctts_create(mp4_context_t const* mp4_context,
													 samples_t const* first, samples_t const* last)
{
	samples_t const* f = first;
	unsigned int i = 0;
	while(f != last)
	{
		if(f->cto_)
			break;
		++f;
	}

	if(f == last)
	{
		return 0;
	}
	else
	{
		unsigned int prev_cto = 0;
		unsigned int samples = last - first;
		ctts_t* ctts = ctts_init();
		ctts->table_ = (ctts_table_t*)
			malloc((samples) * sizeof(ctts_table_t));

		f = first; i = 0;
		prev_cto = f->cto_;
		while(f != last)
		{
			unsigned int sc = 0;
			while(f->cto_ == prev_cto && f != last)
			{
				++sc;
				++f;
			}
			ctts->table_[i].sample_count_ = sc ;
			ctts->table_[i].sample_offset_ = prev_cto;
			prev_cto = f->cto_;
			++i;
		}
		ctts->entries_ = i;
		if(ctts_get_samples(ctts) != samples)
		{
			MP4_WARNING("ERROR: stts_get_samples=%d, should be %d\n",
				ctts_get_samples(ctts), samples);
		}
		return ctts;
	}
}
extern int mp4_split(struct mp4_context_t* mp4_context,
                     unsigned int* trak_sample_start,
                     unsigned int* trak_sample_end,
                     mp4_split_options_t const* options)
{
  int result;

  float start_time = options->start;
  float end_time = options->end;

  moov_build_index(mp4_context, mp4_context->moov);

  {
    struct moov_t const* moov = mp4_context->moov;
    long moov_time_scale = moov->mvhd_->timescale_;
    unsigned int start = (unsigned int)(start_time * moov_time_scale + 0.5f);
    unsigned int end = (unsigned int)(end_time * moov_time_scale + 0.5f);

    // for every trak, convert seconds to sample (time-to-sample).
    // adjust sample to keyframe
    result = get_aligned_start_and_end(mp4_context, start, end,
                                       trak_sample_start, trak_sample_end);

    if (options->exact){
      // now we need to find the audio track and RESET *its* trak_sample_start
      // time to the exact start time we want, regardless of keyframes
      unsigned int i=0;
      for(i=0; i != moov->tracks_; ++i){
        struct trak_t* trak = moov->traks_[i];
        if (trak->mdia_->hdlr_->handler_type_ == FOURCC('s','o','u','n')){
          // the FOURCC is soun(d) AKA audio track
          long trak_time_scale = trak->mdia_->mdhd_->timescale_;
          struct stts_t* stts = trak->mdia_->minf_->stbl_->stts_;
          unsigned int start_exact_time_sample = stts_get_sample(stts, moov_time_to_trak_time((options->start * moov_time_scale), moov_time_scale, trak_time_scale));
          MP4_WARNING("FFGOP: AUDIO REWRITING trak_sample_start[%i]: %u => %u\n", i, trak_sample_start[i], start_exact_time_sample);
          trak_sample_start[i] = start_exact_time_sample;
        }
      }
    }
  }

  return result;
}
extern int output_mp4(struct mp4_context_t* mp4_context,
                      unsigned int const* trak_sample_start,
                      unsigned int const* trak_sample_end,
                      struct bucket_t** buckets,
                      struct mp4_split_options_t* options)
{
  unsigned int i;

  uint64_t mdat_start = mp4_context->mdat_atom.start_;
  uint64_t mdat_size = mp4_context->mdat_atom.size_;
  int64_t offset;

  struct moov_t* moov = mp4_context->moov;
//  unsigned char* moov_data = mp4_context->moov_data;
  unsigned char* moov_data = (unsigned char*)
    malloc((size_t)mp4_context->moov_atom.size_ + ATOM_PREAMBLE_SIZE + 1024);

  uint64_t moov_size;

  long moov_time_scale = moov->mvhd_->timescale_;
  uint64_t skip_from_start = UINT64_MAX;
  uint64_t end_offset = 0;

  uint64_t moov_duration = 0;

#if 1
  uint64_t new_mdat_start = 0;
  {
    static char const free_data[] = {
      0x0, 0x0, 0x0,  42, 'f', 'r', 'e', 'e',
      'v', 'i', 'd', 'e', 'o', ' ', 's', 'e',
      'r', 'v', 'e', 'd', ' ', 'b', 'y', ' ',
      'm', 'o', 'd', '_', 'h', '2', '6', '4',
      '_', 's', 't', 'r', 'e', 'a', 'm', 'i',
      'n', 'g'
    };
    uint32_t size_of_header = (uint32_t)mp4_context->ftyp_atom.size_ +
                              sizeof(free_data);
    unsigned char* buffer = (unsigned char*)malloc(size_of_header);

    if(mp4_context->ftyp_atom.size_)
    {
      fseeko(mp4_context->infile, mp4_context->ftyp_atom.start_, SEEK_SET);
      if(fread(buffer, (off_t)mp4_context->ftyp_atom.size_, 1, mp4_context->infile) != 1)
      {
        MP4_ERROR("%s", "Error reading ftyp atom\n");
        free(buffer);
        return 0;
      }
    }

    // copy free data
    memcpy(buffer + mp4_context->ftyp_atom.size_, free_data,
           sizeof(free_data));

    if(options->output_format == OUTPUT_FORMAT_MP4)
    {
      bucket_t* bucket = bucket_init_memory(buffer, size_of_header);
      bucket_insert_tail(buckets, bucket);
    }
    free(buffer);

    new_mdat_start += size_of_header;
  }

//  new_mdat_start += mp4_context->moov_atom.size_;
#endif

  offset = new_mdat_start - mp4_context->mdat_atom.start_;
  // subtract old moov size
//  offset -= mp4_context->moov_atom.size_;

  for(i = 0; i != moov->tracks_; ++i)
  {
    struct trak_t* trak = moov->traks_[i];
    struct stbl_t* stbl = trak->mdia_->minf_->stbl_;

    unsigned int start_sample = trak_sample_start[i];
    unsigned int end_sample = trak_sample_end[i];


    if (options->exact)
      trak_fast_forward_first_partial_GOP(mp4_context, options, trak, start_sample);

    trak_update_index(mp4_context, trak, start_sample, end_sample);

    if(trak->samples_size_ == 0)
    {
      MP4_WARNING("Trak %u contains no samples. Maybe a fragmented file?", i);
      return 1;
    }

    {
      uint64_t skip =
        trak->samples_[start_sample].pos_ - trak->samples_[0].pos_;
      if(skip < skip_from_start)
        skip_from_start = skip;
      MP4_INFO("Trak can skip %"PRIu64" bytes\n", skip);

      if(end_sample != trak->samples_size_)
      {
        uint64_t end_pos = trak->samples_[end_sample].pos_;
        if(end_pos > end_offset)
          end_offset = end_pos;
        MP4_INFO("New endpos=%"PRIu64"\n", end_pos);
        MP4_INFO("Trak can skip %"PRIu64" bytes at end\n",
               mdat_start + mdat_size - end_offset);
      }
    }

    {
      // fixup trak (duration)
      uint64_t trak_duration = stts_get_duration(stbl->stts_);
      long trak_time_scale = trak->mdia_->mdhd_->timescale_;
      {
        uint64_t duration = trak_time_to_moov_time(trak_duration,
          moov_time_scale, trak_time_scale);
        trak->mdia_->mdhd_->duration_= trak_duration;
        trak->tkhd_->duration_ = duration;
        MP4_INFO("trak: new_duration=%"PRIu64"\n", duration);

        if(duration > moov_duration)
          moov_duration = duration;
      }
    }

//      MP4_INFO("stco.size=%d, ", read_int32(stbl->stco_ + 4));
//      MP4_INFO("stts.size=%d samples=%d\n", read_int32(stbl->stts_ + 4), stts_get_samples(stbl->stts_));
//      MP4_INFO("stsz.size=%d\n", read_int32(stbl->stsz_ + 8));
//      MP4_INFO("stsc.samples=%d\n", stsc_get_samples(stbl->stsc_));
  }
  moov->mvhd_->duration_ = moov_duration;
  MP4_INFO("moov: new_duration=%.2f seconds\n", moov_duration / (float)moov_time_scale);

  // subtract bytes we skip at the front of the mdat atom
  offset -= skip_from_start;

  MP4_INFO("%s", "moov: writing header\n");

  moov_write(moov, moov_data);
  moov_size = read_32(moov_data);

  // add new moov size
  offset += moov_size;

  MP4_INFO("shifting offsets by %"PRId64"\n", offset);
  moov_shift_offsets_inplace(moov, offset);

  // traffic shaping: create offsets for each second
  create_traffic_shaping(moov,
                         trak_sample_start,
                         trak_sample_end,
                         offset,
                         options);

#ifdef COMPRESS_MOOV_ATOM
  if(!options->client_is_flash)
  {
    compress_moov(mp4_context, moov, moov_data, &moov_size);
  }
#endif

  if(end_offset != 0)
  {
    MP4_INFO("mdat_size=%"PRId64" end_offset=%"PRId64"\n",
             mdat_size, end_offset);
    mdat_size = end_offset - mdat_start;
  }
  mdat_start += skip_from_start;
  mdat_size -= skip_from_start;

  MP4_INFO("mdat_bucket(%"PRId64", %"PRId64")\n", mdat_start, mdat_size);

  bucket_insert_tail(buckets, bucket_init_memory(moov_data, moov_size));
  free(moov_data);

  {
    struct mp4_atom_t mdat_atom;
    mdat_atom.type_ = FOURCC('m', 'd', 'a', 't');
    mdat_atom.short_size_ = 0; // TODO: use original small/wide mdat box

    if(options->adaptive)
    {
      // empty mdat atom
      mdat_atom.size_ = ATOM_PREAMBLE_SIZE;
    }
    else
    {
      mdat_atom.size_ = mdat_size;
    }

    {
      unsigned char buffer[32];
      int mdat_header_size = mp4_atom_write_header(buffer, &mdat_atom);
      bucket_insert_tail(buckets,
        bucket_init_memory(buffer, mdat_header_size));

      if(mdat_atom.size_ - mdat_header_size)
      {
        bucket_insert_tail(buckets,
          bucket_init_file(mdat_start + mdat_header_size,
                           mdat_atom.size_ - mdat_header_size));
      }
    }
  }

  return 1;
}
static void trak_update_index(struct mp4_context_t const* mp4_context,
                              struct trak_t* trak,
                              unsigned int start, unsigned int end)
{
  // write samples [start,end>

  // stts = [entries * [sample_count, sample_duration]
  {
    struct stts_t* stts = trak->mdia_->minf_->stbl_->stts_;

    unsigned int entries = 0;
    unsigned int s = start;

    while(s != end)
    {
      unsigned int sample_count = 1;
      unsigned int sample_duration =
        (unsigned int)(trak->samples_[s + 1].pts_ - trak->samples_[s].pts_);
      while(++s != end)
      {
        if((trak->samples_[s + 1].pts_ - trak->samples_[s].pts_) != sample_duration)
          break;
        ++sample_count;
      }
// TODO: entries may be empty when we read a fragmented movie file. use
// output_mov() instead.
//      if(entries + 1 > stts->entries_)
//      {
//        stts->table_ = (stts_table_t*)
//          realloc(stts->table_, (entries + 1) * sizeof(stts_table_t));
//      }

      stts->table_[entries].sample_count_ = sample_count;
      stts->table_[entries].sample_duration_ = sample_duration;
      ++entries;
    }
    stts->entries_ = entries;

    if(stts_get_samples(stts) != end - start)
    {
      MP4_WARNING("ERROR: stts_get_samples=%d, should be %d\n",
             stts_get_samples(stts), end - start);
    }
  }

  // ctts = [entries * [sample_count, sample_offset]
  {
    struct ctts_t* ctts = trak->mdia_->minf_->stbl_->ctts_;
    if(ctts)
    {
      unsigned int entries = 0;
      unsigned int s = start;

      while(s != end)
      {
        unsigned int sample_count = 1;
        unsigned int sample_offset = trak->samples_[s].cto_;
        while(++s != end)
        {
          if(trak->samples_[s].cto_ != sample_offset)
            break;
          ++sample_count;
        }
        // write entry
        ctts->table_[entries].sample_count_ = sample_count;
        ctts->table_[entries].sample_offset_ = sample_offset;
        ++entries;
      }
      ctts->entries_ = entries;
      if(ctts_get_samples(ctts) != end - start)
      {
        MP4_WARNING("ERROR: ctts_get_samples=%d, should be %d\n",
               ctts_get_samples(ctts), end - start);
      }
    }
  }

  // process chunkmap:
  {
    struct stsc_t* stsc = trak->mdia_->minf_->stbl_->stsc_;
    if(stsc != NULL)
    {
      unsigned int i;

      for(i = 0; i != trak->chunks_size_; ++i)
      {
        if(trak->chunks_[i].sample_ + trak->chunks_[i].size_ > start)
          break;
      }

      {
        unsigned int stsc_entries = 0;
        unsigned int chunk_start = i;
        unsigned int chunk_end;
        // problem.mp4: reported by Jin-seok Lee. Second track contains no samples
        if(trak->chunks_size_ != 0)
        {
          unsigned int samples =
            trak->chunks_[i].sample_ + trak->chunks_[i].size_ - start;
          unsigned int id = trak->chunks_[i].id_;

          // write entry [chunk,samples,id]
          stsc->table_[stsc_entries].chunk_ = 0;
          stsc->table_[stsc_entries].samples_ = samples;
          stsc->table_[stsc_entries].id_ = id;
          ++stsc_entries;

          if(i != trak->chunks_size_)
          {
            for(i += 1; i != trak->chunks_size_; ++i)
            {
              unsigned int next_size = trak->chunks_[i].size_;
              if(trak->chunks_[i].sample_ + trak->chunks_[i].size_ > end)
              {
                next_size = end - trak->chunks_[i].sample_;
              }

              if(next_size != samples)
              {
                samples = next_size;
                id = trak->chunks_[i].id_;
                stsc->table_[stsc_entries].chunk_ = i - chunk_start;
                stsc->table_[stsc_entries].samples_ = samples;
                stsc->table_[stsc_entries].id_ = id;
                ++stsc_entries;
              }

              if(trak->chunks_[i].sample_ + next_size == end)
              {
                break;
              }
            }
          }
        }
        chunk_end = i + 1;
        stsc->entries_ = stsc_entries;

        {
          struct stco_t* stco = trak->mdia_->minf_->stbl_->stco_;
          unsigned int entries = 0;
          for(i = chunk_start; i != chunk_end; ++i)
          {
            stco->chunk_offsets_[entries] = stco->chunk_offsets_[i];
            ++entries;
          }
          stco->entries_ = entries;

          // patch first chunk with correct sample offset
          stco->chunk_offsets_[0] = (uint32_t)trak->samples_[start].pos_;
        }
      }
    }
  }

  // process sync samples:
  if(trak->mdia_->minf_->stbl_->stss_)
  {
    struct stss_t* stss = trak->mdia_->minf_->stbl_->stss_;
    unsigned int entries = 0;
    unsigned int stss_start;
    unsigned int i;

    for(i = 0; i != stss->entries_; ++i)
    {
      if(stss->sample_numbers_[i] >= start + 1)
        break;
    }
    stss_start = i;
    for(; i != stss->entries_; ++i)
    {
      unsigned int sync_sample = stss->sample_numbers_[i];
      if(sync_sample >= end + 1)
        break;
      stss->sample_numbers_[entries] = sync_sample - start;
      ++entries;
    }
    stss->entries_ = entries;
  }

  // process sample sizes
  {
    struct stsz_t* stsz = trak->mdia_->minf_->stbl_->stsz_;
    if(stsz != NULL)
    {
      if(stsz->sample_size_ == 0)
      {
        unsigned int entries = 0;
        unsigned int i;
        for(i = start; i != end; ++i)
        {
          stsz->sample_sizes_[entries] = stsz->sample_sizes_[i];
          ++entries;
        }
      }
      stsz->entries_ = end - start;
    }
  }
}
static void trak_fast_forward_first_partial_GOP(struct mp4_context_t const* mp4_context, 
                                                struct mp4_split_options_t* options, 
                                                struct trak_t *trak, 
                                                unsigned int start_sample)
{
  if (!trak->mdia_->minf_->stbl_->stts_){
    MP4_WARNING("FFGOP: NO STTS FOR THIS TRACK -- CANNOT ADJUST THIS TRACK\n","");
    return;
  }
  // NOTE: STTS atom = "time to sample" atom, which is what we use
  //  (and STSS atom = "sync samples" atom, which is list of keyframes)
  struct stts_t* stts = trak->mdia_->minf_->stbl_->stts_;
  
    

  // find the sample frame location of the exact desired time we wanted to 
  // start at (regardless of keyframes!)
  struct moov_t* moov = mp4_context->moov;
  float moov_time_scale = moov->mvhd_->timescale_;
  float trak_time_scale = trak->mdia_->mdhd_->timescale_;
  unsigned int start_exact_time_sample = stts_get_sample(stts, moov_time_to_trak_time((options->start * moov_time_scale), moov_time_scale, trak_time_scale));

  if (start_exact_time_sample == start_sample)
    return; // starting at wanted time already, nothing to do!

  MP4_INFO("FFGOP: start: %fs;  sample start exact time:%u;  sample keyframe just before:%u\n", 
           options->start, start_exact_time_sample, start_sample);
  MP4_INFO("FFGOP: moov_time_scale = %f, trak_time_scale = %f\n", moov_time_scale, trak_time_scale);

  
  

  // In practice, IA videos seem to always have stts->entries_ == 1 8-)
  // That's the starting number / table setup.
  // The STTS atom will be rewritten by caller, expanding to more entries since we dropping durations!
  unsigned int s=0, i=0, j=0, nRewritten=0;
  for (j=0; j < stts->entries_; j++){ 
    for (i=0; i < stts->table_[j].sample_count_; i++){
      // NOTE: begin time-shifting at "start_sample" bec. mod_h264_streaming 
      // finds the keyframe (sample time) before the exact start time, and *then*
      // decrements by one.  so those samples "go out the door" -- and thus we
      // need to rewrite them, too
      if (s >= start_sample  &&  s < start_exact_time_sample){
        /* see mp4_io.h for samples_t (pts_/size_/pos_/cto_/is_ss_/is_smooth_ss_) */
        samples_t sample = trak->samples_[s];
        // let's change current PTS to something fractionally *just* less than
        // the PTS of the first frame we want to see fully.  each frame we dont want
        // to see is 1 fraction earlier PTS than the next frame PTS.
        uint64_t pts  = sample.pts_;
        uint64_t pts2 = trak->samples_[start_exact_time_sample].pts_ - (start_exact_time_sample-s);
        //uint64_t pts2 = trak->samples_[start_exact_time_sample].pts_ + (s <= (start_sample+1) ? -2 : -1);
        trak->samples_[s].pts_ = pts2;
        MP4_INFO("FFGOP: stts[%d] samples_[%d].pts_ = %lu (%0.3fsec)  REWRITING TO %lu (%0.3fsec)\n", 
                 j, s, pts, ((float)pts / trak_time_scale), pts2, ((float)pts2 / trak_time_scale));
        nRewritten++;
      }
      s++;
    }
  }

  if (nRewritten){
    MP4_WARNING("FFGOP: ==============>  %u FRAMES GOT FAST-FORWARDED (APPROXIMATELY %2.1f SECONDS ASSUMING 29.97 fps, YMMV)\n\n", nRewritten, nRewritten/29.97);
  }
}