Exemplo n.º 1
0
bool CQTAudioByteStream::start_next_frame (uint8_t **buffer, 
					   uint32_t *buflen,
					   frame_timestamp_t *pts,
					   void **ud)
{
  uint64_t ret;
  if (m_frame_on >= m_frames_max) {
#ifdef DEBUG_QTIME_AUDIO_FRAME
    player_debug_message("Setting EOF from start_next_frame %d %d", 
			 m_frame_on, m_frames_max);
#endif
    m_eof = 1;
  }
  ret = m_frame_on;
  ret *= m_samples_per_frame;
  pts->audio_freq_timestamp = ret;
  pts->audio_freq = m_frame_rate;
  ret *= 1000;
  ret /= m_frame_rate;
#ifdef DEBUG_QTIME_AUDIO_FRAME
  player_debug_message("audio - start frame %d %d", m_frame_on, m_frames_max);
#endif
#if 0
  player_debug_message("audio Start next frame "U64 " offset %u %u", 
		       ret, m_byte_on, m_this_frame_size);
#endif
  read_frame(m_frame_on);
  *buffer = m_buffer;
  *buflen = m_this_frame_size;
  m_frame_on++;
  pts->msec_timestamp = ret;
  pts->timestamp_is_pts = false;
  return (true);
}
Exemplo n.º 2
0
bool CQTVideoByteStream::start_next_frame (uint8_t **buffer, 
					   uint32_t *buflen,
					   frame_timestamp_t *ts,
					   void **ud)
{
  uint64_t ret;
  long start;
  int duration;

#if 0
  if (m_frame_on != 0) {
    player_debug_message("frame %d on %u %d", m_frame_on,
			 m_byte_on, m_this_frame_size);
    if (m_byte_on != 0 && m_byte_on != m_this_frame_size) {
      for (uint32_t ix = m_byte_on; ix < m_this_frame_size - 4; ix++) {
	if ((m_buffer[ix] == 0) &&
	    (m_buffer[ix + 1] == 0) &&
	    (m_buffer[ix + 2] == 1)) {
	  player_debug_message("correct start code %x", m_buffer[ix + 3]);
	  player_debug_message("offset %d %d %d", m_byte_on,
			       ix, m_this_frame_size);
	}
      }
    }
  }
#endif
  if (m_frame_on >= m_frames_max) {
    m_eof = 1;
  }
#ifdef DEBUG_QTIME_VIDEO_FRAME
  player_debug_message("start_next_frame %d", m_frame_on);
#endif
  if (quicktime_video_frame_time(m_parent->get_file(),
				 m_track,
				 m_frame_on,
				 &start,
				 &duration) != 0) {
    ret = start;
    ret *= 1000;
    ret /= m_time_scale;
   //player_debug_message("Returning "U64, ret);
 } else {
   ret = m_frame_on;
   ret *= 1000;
   ret /= m_frame_rate;
 }
  read_frame(m_frame_on);
  *buffer = m_buffer;
  *buflen = m_this_frame_size;

  m_frame_on++;
  ts->msec_timestamp = ret;
  ts->timestamp_is_pts = false;
  return (true);
}
Exemplo n.º 3
0
void CMpeg3VideoByteStream::play (uint64_t start)
{
  player_debug_message("mpeg3 play "U64, start);
  m_play_start_time = start;

  set_timebase(start);
}
Exemplo n.º 4
0
/*
 * read_frame for video - this will try to read the next frame 
 */
void CQTVideoByteStream::read_frame (uint32_t frame_to_read)
{
  uint32_t next_frame_size;

  if (m_frame_in_buffer == frame_to_read) {
#ifdef DEBUG_QTIME_VIDEO_FRAME
    player_debug_message("frame in buffer %u %u", m_byte_on, m_this_frame_size);
#endif
    m_byte_on = 0;
    return;
  }

  // Haven't already read the next frame,  so - get the size, see if
  // it fits, then read it into the appropriate buffer
  m_parent->lock_file_mutex();
  next_frame_size = quicktime_frame_size(m_parent->get_file(),
					 frame_to_read,
					 m_track);
  if (next_frame_size > m_max_frame_size) {
    m_max_frame_size = next_frame_size + 4;
    m_buffer = (uint8_t *)realloc(m_buffer, 
					(next_frame_size + 4) * sizeof(char));
  }
  m_this_frame_size = next_frame_size;
  quicktime_set_video_position(m_parent->get_file(), frame_to_read, m_track);
  m_frame_in_buffer = frame_to_read;
#ifdef DEBUG_QTIME_VIDEO_FRAME
  player_debug_message("reading into buffer %u", m_this_frame_size);
#endif
  quicktime_read_frame(m_parent->get_file(),
		       (unsigned char *)m_buffer,
		       m_track);
#ifdef DEBUG_QTIME_VIDEO_FRAME
  player_debug_message("Buffer %d %02x %02x %02x %02x", 
		       frame_to_read,
	 m_buffer[0],
	 m_buffer[1],
	 m_buffer[2],
	 m_buffer[3]);
#endif
  m_parent->unlock_file_mutex();
  m_byte_on = 0;
}
Exemplo n.º 5
0
void CQTAudioByteStream::read_frame (uint32_t frame_to_read)
{
#ifdef DEBUG_QTIME_AUDIO_FRAME
  player_debug_message("audio read_frame %d", frame_to_read);
#endif

  if (m_frame_in_buffer == frame_to_read) {
    m_byte_on = 0;
    return;
  }
  m_parent->lock_file_mutex();

  m_frame_in_buffer = frame_to_read;

  unsigned char *buff = (unsigned char *)m_buffer;
  quicktime_set_audio_position(m_parent->get_file(), 
			       frame_to_read, 
			       m_track);
  m_this_frame_size = quicktime_read_audio_frame(m_parent->get_file(),
						 buff,
						 m_max_frame_size,
						 m_track);
  if (m_this_frame_size < 0) {
    m_max_frame_size = -m_this_frame_size;
    m_buffer = (uint8_t *)realloc(m_buffer, m_max_frame_size * sizeof(char));
    // Okay - I could have used a goto, but it really grates...
    m_frame_in_buffer = frame_to_read;
    buff = (unsigned char *)m_buffer;
    quicktime_set_audio_position(m_parent->get_file(), 
				 frame_to_read, 
				 m_track);
    m_this_frame_size = quicktime_read_audio_frame(m_parent->get_file(),
						   buff,
						   m_max_frame_size,
						 m_track);
  }
#if 0
  player_debug_message("qta frame size %u", m_this_frame_size);
#endif
  m_parent->unlock_file_mutex();
  m_byte_on = 0;
}
Exemplo n.º 6
0
static inline bool convert_psts (mpeg2t_es_t *es_pid,
				 mpeg2t_frame_t *fptr,
				 uint64_t start_psts)
{
  uint64_t ps_ts;
#ifdef DEBUG_MPEG2T_PSTS
  uint64_t initial;
#endif
  // we want to 0 out the timestamps of the incoming transport
  // stream so that the on-demand shows the correct timestamps

  // see if we've set the initial timestamp
  // here, we want to check for a gross change in the psts
  if (fptr->have_ps_ts == 0 && fptr->have_dts == 0) return true;

  if (fptr->have_dts) {
    ps_ts = fptr->dts;
    if (ps_ts < start_psts) return false;
#ifdef DEBUG_MPEG2T_PSTS
    initial = fptr->dts;
#endif
    fptr->dts -= start_psts;
#ifdef DEBUG_MPEG2T_PSTS
    player_debug_message(" convert dts "U64" to "U64" "U64,
			 fptr->dts, start_psts, initial);
#endif
  } else {
    ps_ts = fptr->ps_ts;
    if (ps_ts < start_psts) return false;
#ifdef DEBUG_MPEG2T_PSTS
    initial = fptr->dts;
#endif
    fptr->ps_ts -= start_psts;
#ifdef DEBUG_MPEG2T_PSTS
    player_debug_message(" convert psts "U64" to "U64" "U64,
			 fptr->ps_ts, start_psts, initial);
#endif
  }
  return true;
}
Exemplo n.º 7
0
void CQTVideoByteStream::play (uint64_t start)
{
  m_play_start_time = start;
  uint32_t ix;
  long frame_start;
  int duration;


  m_parent->lock_file_mutex();
  for (ix = 0; ix < m_frames_max; ix++) {
    if (quicktime_video_frame_time(m_parent->get_file(),
				   m_track, 
				   ix, 
				   &frame_start, 
				   &duration) != 0) {
      uint64_t cmp;
      cmp = frame_start + duration;
      cmp *= 1000;
      cmp /= m_time_scale;
      //player_debug_message("frame %d "U64, ix, cmp);
      if (cmp >= start) {
	player_debug_message("Searched through - frame %d is "U64, 
			     ix, start);
	break;
      }
    } else {
      m_parent->unlock_file_mutex();
      ix = (uint32_t)((start * m_frame_rate) / 1000);
      video_set_timebase(ix);
      return;
    }
  }
#if 0
  player_debug_message("qtime video frame " D64 , start);
#endif
  // we've got the position;
  m_parent->unlock_file_mutex();
  video_set_timebase(ix);
}
Exemplo n.º 8
0
void CAviVideoByteStream::play (uint64_t start)
{
  m_play_start_time = start;

  double time = UINT64_TO_DOUBLE(start);
  time *= m_frame_rate;
  time /= 1000;
#if 0
  player_debug_message("avi video frame " D64 , start);
#endif
  // we've got the position;
  video_set_timebase((uint32_t)time);
}
Exemplo n.º 9
0
void CAviAudioByteStream::play (uint64_t start)
{
  m_play_start_time = start;
  
#if 0  
  start *= m_frame_rate;
  start /= 1000 * m_samples_per_frame;
  player_debug_message("qtime audio frame " D64, start);
#endif
  // we've got the position;
  audio_set_timebase((uint32_t)start);
  m_play_start_time = start;
}
Exemplo n.º 10
0
bool CAviAudioByteStream::start_next_frame (uint8_t **buffer, 
					    uint32_t *buflen,
					    frame_timestamp_t *ts,
					    void **ud)
{
  int value;
  if (m_buffer_on < m_this_frame_size) {
    value = m_this_frame_size - m_buffer_on;
    memmove(m_buffer,
	    m_buffer + m_buffer_on,
	    m_this_frame_size - m_buffer_on);
    m_this_frame_size -= m_buffer_on;
  } else {
    value = 0;
    m_this_frame_size = 0;
  }
  m_buffer_on = 0;
  m_parent->lock_file_mutex();
  AVI_set_audio_position(m_parent->get_file(), m_file_pos);

  int ret;
  ret = AVI_read_audio(m_parent->get_file(), 
		       (char *)m_buffer + value, 
		       m_max_frame_size - m_this_frame_size);
  m_parent->unlock_file_mutex();

  //player_debug_message("return from avi read %d", ret);
  m_this_frame_size += ret;
  m_file_pos += ret;
  if (m_file_pos >= AVI_audio_bytes(m_parent->get_file())) {
    m_eof = 1;
  }

  *buffer = m_buffer;
  *buflen = m_this_frame_size;
#if 0
  uint64_t ret;
  ret = m_frame_on;
  ret *= m_samples_per_frame;
  ret *= 1000;
  ret /= m_frame_rate;
    player_debug_message("Start next frame "U64 " offset %u %u", 
			 ret, m_byte_on, m_this_frame_size);
  return (ret);
#endif
  ts->msec_timestamp = 0;
  ts->audio_freq_timestamp = 0;
  ts->audio_freq = AVI_audio_rate(m_parent->get_file());
  ts->timestamp_is_pts = false;
  return true;
}
Exemplo n.º 11
0
/**************************************************************************
 * Quicktime Audio stream functions
 **************************************************************************/
void CQTAudioByteStream::audio_set_timebase (long frame)
{
#ifdef DEBUG_QTIME_AUDIO_FRAME
  player_debug_message("Setting qtime audio timebase to frame %ld", frame);
#endif
  m_eof = 0;
  m_frame_on = frame;
  m_parent->lock_file_mutex();
  quicktime_set_audio_position(m_parent->get_file(), 
			       frame, 
			       m_track);
  m_parent->unlock_file_mutex();
  read_frame(0);
}
Exemplo n.º 12
0
bool CMpeg2fByteStream::start_next_frame (uint8_t **buffer, 
					  uint32_t *buflen,
					  frame_timestamp_t *ts,
					  void **ud)
{
  if (m_frame) {
    mpeg2t_free_frame(m_frame);
  }
  
  // see if there is a frame ready for this pid.  If not, 
  // request one
  m_frame = mpeg2t_get_es_list_head(m_es_pid);
  if (m_frame == NULL) {
    m_file->get_frame_for_pid(m_es_pid);
    m_frame = mpeg2t_get_es_list_head(m_es_pid);
    if (m_frame == NULL) {
      player_debug_message("%s no frame %d", m_name, m_file->eof());
      return false;
    }
  }
  // Convert the psts
  if (convert_psts(m_es_pid, m_frame, m_file->get_start_psts()) == false) {
    return false;
  }

  // convert the psts into a timestamp
  if (get_timestamp_for_frame(m_frame, ts) >= 0) {
    *buffer = m_frame->frame;
    *buflen = m_frame->frame_len;
#ifdef DEBUG_MPEG2T_FRAME
    player_debug_message("%s - len %d time "U64" ftype %d", 
			 m_name, *buflen, ret, m_frame->frame_type);
#endif
    return true;
  }
  return false;
}
Exemplo n.º 13
0
int CMpeg2fAudioByteStream::get_timestamp_for_frame (mpeg2t_frame_t *fptr,
						     frame_timestamp_t *ts)
{
  uint64_t pts_in_msec;
  // all ts for audio are stored in msec, not in timescale
#ifdef DEBUG_MPEG2T_PSTS
  player_debug_message("audio frame len %d have  dts %d ts "U64, 
		       fptr->frame_len, fptr->have_dts, fptr->dts);
  player_debug_message("audio frame len %d have psts %d ts "U64" %d %d", 
		       fptr->frame_len, fptr->have_ps_ts, fptr->ps_ts,
		       m_es_pid->sample_per_frame, 
		       m_es_pid->sample_freq);
#endif
  if (fptr->have_ps_ts != 0 || fptr->have_dts != 0) {
    m_timestamp_loaded = 1;
    pts_in_msec = fptr->have_dts ? fptr->dts : fptr->ps_ts;
    m_audio_last_freq_timestamp = 
      ((pts_in_msec * m_es_pid->sample_freq) / TO_U64(90000));
    ts->audio_freq_timestamp = m_audio_last_freq_timestamp;
    ts->audio_freq = m_es_pid->sample_freq;
    pts_in_msec *= TO_U64(1000);
    pts_in_msec /= TO_U64(90000);
    m_last_timestamp = pts_in_msec;
    ts->msec_timestamp = m_last_timestamp;
    m_frames_since_last_timestamp = 0;
    return 0;
  }

  if (m_timestamp_loaded == 0) return -1;
  if (m_es_pid->info_loaded == 0) return -1;

  ts->msec_timestamp = m_last_timestamp;
  ts->audio_freq_timestamp = m_audio_last_freq_timestamp;
  ts->audio_freq = m_es_pid->sample_freq;
  return 0;
}
Exemplo n.º 14
0
void CQTVideoByteStream::config (long num_frames, float frate, int time_scale)
{
  m_frames_max = num_frames;
  m_frame_rate = (uint32_t)frate;
  m_time_scale = time_scale;

  long start;
  int duration;
  // Set up max play time, based on the timing of the last frame.
  if (quicktime_video_frame_time(m_parent->get_file(),
				 m_track,
				 m_frames_max - 1,
				 &start,
				 &duration) != 0) {
    player_debug_message("video frame time - %d %ld %d", 
			 m_frames_max, start, duration);
    m_max_time = (start + duration);
    m_max_time /= m_time_scale;
  } else {
    m_max_time = m_frames_max;
    m_max_time /= m_frame_rate;
  }
  player_debug_message("Max time is %g", m_max_time);
}
Exemplo n.º 15
0
void CQTByteStreamBase::check_for_end_of_frame (void)
{
  if (m_byte_on >= m_this_frame_size) {
    uint32_t next_frame;
    next_frame = m_frame_in_buffer + 1;
#if 0
    player_debug_message("%s - next frame %d", 
			 m_name, 
			 next_frame);
#endif
    if (next_frame >= m_frames_max) {
      m_eof = 1;
    } else {
      read_frame(next_frame);
    }
  }
}
Exemplo n.º 16
0
/*
 * pause_all_media - do a spin loop until the sync thread indicates it's
 * paused.
 */
int CPlayerSession::pause_all_media (void) 
{
  int ret;
  CPlayerMedia *p;
  m_session_state = SESSION_PAUSED;
  if (session_control_is_aggregate()) {
    rtsp_command_t cmd;
    rtsp_decode_t *decode;

    memset(&cmd, 0, sizeof(rtsp_command_t));
    if (rtsp_send_aggregate_pause(m_rtsp_client,
				  m_session_control_url,
				  &cmd,
				  &decode) != 0) {
      player_debug_message("RTSP aggregate pause command failed");
      free_decode_response(decode);
      return (-1);
    }
    free_decode_response(decode);
  }
  p = m_my_media;
  while (p != NULL) {
    ret = p->do_pause();
    if (ret != 0) return (ret);
    p = p->get_next();
  }
  m_sync_pause_done = 0;
  send_sync_thread_a_message(MSG_PAUSE_SESSION);
#ifndef NEED_SDL_VIDEO_IN_MAIN_THREAD
  do {
#endif
    SDL_Delay(100);
#ifndef NEED_SDL_VIDEO_IN_MAIN_THREAD
  } while (m_sync_pause_done == 0);
#endif
  m_paused = 1;
  return (0);
}
Exemplo n.º 17
0
/*
 * C2ConsecIpPort::C2ConsecIpPort() - get 2 consecutive, even-odd, ip
 * port numbers
 */
C2ConsecIpPort::C2ConsecIpPort (CIpPort **global, in_port_t start_port)
{
  CIpPort *newone;
  m_first = m_second = NULL;
  in_port_t firstport, maxport;

  maxport = (in_port_t)~0;
  if (start_port == 0) {
    firstport = 1024;
    if (config.get_config_value(CONFIG_IPPORT_MIN) != UINT32_MAX) {
      firstport = config.get_config_value(CONFIG_IPPORT_MIN);
      if (config.get_config_value(CONFIG_IPPORT_MAX) != UINT32_MAX) {
	maxport = config.get_config_value(CONFIG_IPPORT_MAX);
	if (maxport <= firstport) {
	  player_error_message("IP port configuration error - %u %u - using 65535 as max port value", 
			       firstport, maxport);
	  maxport = (in_port_t)~0;
	}
      }
    }
  } else {
    firstport = start_port;
  }
  while (1) {
    // first, get an even port number.  If not even, save it in the
    // global queue.
    do {
      newone = new CIpPort(firstport, maxport);
      if (newone->valid() == 0)
	return;
      if ((newone->get_port_num() & 0x1) == 0x1) {
	newone->set_next(*global);
	*global = newone;
	firstport++;
      }
    } while ((newone->get_port_num() & 0x1) == 0x1);

    player_debug_message("First port is %d", newone->get_port_num());

    // Okay, save the first, get the 2nd.  If okay, just return
    m_first = newone;
    in_port_t next;
    next = m_first->get_port_num() + 1;
    m_second = new CIpPort(next, next);
    if ((m_second->valid() == 1) &&
	(m_second->get_port_num() == next)) {
      player_debug_message("Ip ports are %u %u", next - 1, next);
      return;
    } else {
      player_debug_message("Got port %d invalid %d", m_second->get_port_num(),
			   m_second->valid());
    }
    // Not okay - save both off in the global queue, and try again...
    m_first->set_next(*global);
    *global = m_first;
    m_first = NULL;
    firstport = m_second->get_port_num() + 1;
    m_second->set_next(*global);
    *global = m_second;
    m_second = NULL;
  }
}
Exemplo n.º 18
0
/*
 * play_all_media - get all media to play
 */
int CPlayerSession::play_all_media (int start_from_begin, 
				    double start_time,
				    char *errmsg, 
				    uint32_t errlen)
{
  int ret;
  CPlayerMedia *p;

  if (m_set_end_time == 0) {
    range_desc_t *range;
    if (m_sdp_info && m_sdp_info->session_range.have_range != FALSE) {
      range = &m_sdp_info->session_range;
    } else {
      range = NULL;
      p = m_my_media;
      while (range == NULL && p != NULL) {
	media_desc_t *media;
	media = p->get_sdp_media_desc();
	if (media && media->media_range.have_range) {
	  range = &media->media_range;
	}
	p = p->get_next();
      }
    }
    if (range != NULL) {
      m_end_time = (uint64_t)(range->range_end * 1000.0);
      m_set_end_time = 1;
    }
  }
  p = m_my_media;
  m_session_state = SESSION_BUFFERING;
  if (m_paused == 1 && start_time == 0.0 && start_from_begin == FALSE) {
    /*
     * we were paused.  Continue.
     */
    m_play_start_time = m_current_time;
    start_time = UINT64_TO_DOUBLE(m_current_time);
    start_time /= 1000.0;
    player_debug_message("Restarting at " U64 ", %g", m_current_time, start_time);
  } else {
    /*
     * We might have been paused, but we're told to seek
     */
    // Indicate what time we're starting at for sync task.
    m_play_start_time = (uint64_t)(start_time * 1000.0);
  }
  m_paused = 0;

  send_sync_thread_a_message(MSG_START_SESSION);
  // If we're doing aggregate rtsp, send the play command...

  if (session_control_is_aggregate() &&
      m_dont_send_first_rtsp_play == 0) {
    char buffer[80];
    rtsp_command_t cmd;
    rtsp_decode_t *decode;

    memset(&cmd, 0, sizeof(rtsp_command_t));
    if (m_set_end_time != 0) {
      uint64_t stime = (uint64_t)(start_time * 1000.0);
      sprintf(buffer, "npt="U64"."U64"-"U64"."U64, 
	      stime / 1000, stime % 1000, m_end_time / 1000, m_end_time % 1000);
      cmd.range = buffer;
    }
    if (rtsp_send_aggregate_play(m_rtsp_client,
				 m_session_control_url,
				 &cmd,
				 &decode) != 0) {
      if (errmsg != NULL) {
	snprintf(errmsg, errlen, "RTSP Aggregate Play Error %s-%s", 
		 decode->retcode,
		 decode->retresp != NULL ? decode->retresp : "");
      }
      player_debug_message("RTSP aggregate play command failed");
      free_decode_response(decode);
      return (-1);
    }
    if (decode->rtp_info == NULL) {
      player_error_message("No rtp info field");
    } else {
      player_debug_message("rtp info is \'%s\'", decode->rtp_info);
    }
    int ret = process_rtsp_rtpinfo(decode->rtp_info, this, NULL);
    free_decode_response(decode);
    if (ret < 0) {
      if (errmsg != NULL) {
	snprintf(errmsg, errlen, "RTSP aggregate RtpInfo response failure");
      }
      player_debug_message("rtsp aggregate rtpinfo failed");
      return (-1);
    }
  }
  m_dont_send_first_rtsp_play = 0;

  while (p != NULL) {
    ret = p->do_play(start_time, errmsg, errlen);
    if (ret != 0) return (ret);
    p = p->get_next();
  }
  return (0);
}
Exemplo n.º 19
0
int CMpeg2fVideoByteStream::get_timestamp_for_frame (mpeg2t_frame_t *fptr,
						     frame_timestamp_t *ts)

{
#ifdef DEBUG_MPEG2T_PSTS
  if (fptr->have_dts) {
    player_debug_message("video frame len %d have  dts %d ts "U64,
			 fptr->frame_len, fptr->have_dts, fptr->dts);
  } if (fptr->have_ps_ts) {
    player_debug_message("video frame len %d have psts %d ts "U64, 
			 fptr->frame_len, fptr->have_ps_ts, fptr->ps_ts);
  }
#endif
#if 0
  if (m_es_pid->stream_type == MPEG2T_STREAM_H264) {
    if (fptr->have_dts || fptr->have_ps_ts) {
      if (fptr->have_dts)
	outts = fptr->dts;
      else
	outts = fptr->ps_ts;
      outts *= TO_U64(1000);
      outts /= TO_U64(90000); // get msec from 90000 timescale
      return 0;
    }
    return -1;
  }
  uint64_t ts;
  //  m_es_pid->frame_rate = 24;
  double value = 90000.0 / m_es_pid->frame_rate;
  uint64_t frame_time = (uint64_t)value;
  if (fptr->have_ps_ts == 0 && fptr->have_dts == 0) {
    // We don't have a timestamp on this - just increment from
    // the previous timestamp.
    if (m_timestamp_loaded == 0) return -1;
    if (m_es_pid->info_loaded == 0) return -1;

    outts = m_prev_ts + frame_time;
    m_have_prev_frame_type = 1;
    m_prev_frame_type = fptr->frame_type;
    m_prev_ts = outts;
    outts *= TO_U64(1000);
    outts /= TO_U64(90000); // get msec from 90000 timescale
    return 0;
  }
  m_timestamp_loaded = 1;
  if (fptr->have_dts != 0) {
    outts = fptr->dts;
  } else {
    ts = fptr->ps_ts;

    if (m_have_prev_frame_type) {
      if (fptr->frame_type == 3) {
	// B frame
	outts = ts;
      } else {
	outts = m_prev_ts + frame_time;
      }
    } else {
      if (fptr->frame_type == 1) {
	uint16_t temp_ref = MP4AV_Mpeg3PictHdrTempRef(fptr->frame + fptr->pict_header_offset);
	ts -= ((temp_ref + 1) * m_es_pid->tick_per_frame);
	outts = ts;
      } else {
	player_error_message( "no psts and no prev frame");
	outts = ts;
      }
    }
  }

  m_have_prev_frame_type = 1;
  m_prev_frame_type = fptr->frame_type;
  m_prev_ts = outts;

  outts *= TO_U64(1000);
  outts /= TO_U64(90000); // get msec from 90000 timescale
  
  return 0;
#endif
  ts->timestamp_is_pts = fptr->have_dts == false;
  uint64_t outts;
  if (fptr->have_dts)
    outts = fptr->dts;
  else
    outts = fptr->ps_ts;
  outts *= TO_U64(1000);
  outts /= TO_U64(90000); // get msec from 90000 timescale
  ts->msec_timestamp = outts;
  return 0;
}
Exemplo n.º 20
0
/*
 * Create the media for the quicktime file, and set up some session stuff.
 */
int create_media_for_avi_file (CPlayerSession *psptr, 
			       const char *name,
			       char *errmsg,
			       uint32_t errlen,
			       int have_audio_driver,
			       control_callback_vft_t *cc_vft)
{
  CAviFile *Avifile1 = NULL;
  avi_t *avi;
  CPlayerMedia *mptr;
  avi = AVI_open_input_file(name, 1);
  if (avi == NULL) {
    snprintf(errmsg, errlen, "%s", AVI_strerror());
    player_error_message("%s", AVI_strerror());
    return (-1);
  }

  int video_count = 1;
  codec_plugin_t *plugin;
  video_query_t vq;

  const char *codec_name = AVI_video_compressor(avi);
  player_debug_message("Trying avi video codec %s", codec_name);
  plugin = check_for_video_codec(STREAM_TYPE_AVI_FILE,
				 codec_name, 
				 NULL,
				 -1,
				 -1,
				 NULL,
				 0, 
				 &config);
  if (plugin == NULL) {
    video_count = 0;
  } else {
    vq.track_id = 1;
    vq.stream_type = STREAM_TYPE_AVI_FILE;
    vq.compressor = codec_name;
    vq.type = -1;
    vq.profile = -1;
    vq.fptr = NULL;
    vq.h = AVI_video_height(avi);
    vq.w = AVI_video_width(avi);
    vq.frame_rate = AVI_video_frame_rate(avi);
    vq.config = NULL;
    vq.config_len = 0;
    vq.enabled = 0;
    vq.reference = NULL;
  }

  int have_audio = 0;
  int audio_count = 0;
  audio_query_t aq;

  if (AVI_audio_bytes(avi) != 0) {
    have_audio = 1;
    plugin = check_for_audio_codec(STREAM_TYPE_AVI_FILE,
				   NULL,
				   NULL,
				   AVI_audio_format(avi), 
				   -1, 
				   NULL, 
				   0,
				   &config);
    if (plugin != NULL) {
      audio_count = 1;
      aq.track_id = 1;
      aq.stream_type = STREAM_TYPE_AVI_FILE;
      aq.compressor = NULL;
      aq.type = AVI_audio_format(avi);
      aq.profile = -1;
      aq.fptr = NULL;
      aq.sampling_freq = AVI_audio_rate(avi);
      aq.chans = AVI_audio_channels(avi);
      aq.config = NULL;
      aq.config_len = 0;
      aq.enabled = 0;
      aq.reference = NULL;
    }
  }

  if (cc_vft != NULL && cc_vft->media_list_query != NULL) {
    (cc_vft->media_list_query)(psptr, video_count, &vq, audio_count, &aq);
  } else {
    if (video_count != 0) vq.enabled = 1;
    if (audio_count != 0) aq.enabled = 1;
  }


  if ((video_count == 0 || vq.enabled == 0) && 
      (audio_count == 0 || aq.enabled == 0)) {
    snprintf(errmsg, errlen, "No audio or video tracks enabled or playable");
    AVI_close(avi);
    return -1;
  }
  
  Avifile1 = new CAviFile(name, avi, vq.enabled, audio_count);
  psptr->set_media_close_callback(close_avi_file, Avifile1);

  if (video_count != 0 && vq.enabled) {
    mptr = new CPlayerMedia(psptr);
    if (mptr == NULL) {
      return (-1);
    }
  
    video_info_t *vinfo = MALLOC_STRUCTURE(video_info_t);
    if (vinfo == NULL) 
      return (-1);
    vinfo->height = vq.h;
    vinfo->width = vq.w;
    player_debug_message("avi file h %d w %d frame rate %g", 
			 vinfo->height,
			 vinfo->width,
			 vq.frame_rate);

    plugin = check_for_video_codec(STREAM_TYPE_AVI_FILE,
				   codec_name, 
				   NULL,
				   -1,
				   -1,
				   NULL,
				   0,
				   &config);
    int ret;
    ret = mptr->create_video_plugin(plugin,
				    STREAM_TYPE_AVI_FILE,
				    codec_name,
				    -1,
				    -1,
				    NULL,
				    vinfo,
				    NULL,
				    0);
    if (ret < 0) {
      snprintf(errmsg, errlen, "Failed to create video plugin %s", 
	       codec_name);
      player_error_message("Failed to create plugin data");
      delete mptr;
      return -1;
    }
    CAviVideoByteStream *vbyte = new CAviVideoByteStream(Avifile1);
    if (vbyte == NULL) {
      delete mptr;
      return (-1);
    }
    vbyte->config(AVI_video_frames(avi), vq.frame_rate);
    ret = mptr->create(vbyte, TRUE, errmsg, errlen);
    if (ret != 0) {
      return (-1);
    }
  }
    
  int seekable = 1;
  if (have_audio_driver > 0 && audio_count > 0 && aq.enabled != 0) {
    plugin = check_for_audio_codec(STREAM_TYPE_AVI_FILE,
				   NULL,
				   NULL,
				   aq.type,
				   -1, 
				   NULL, 
				   0,
				   &config);
    CAviAudioByteStream *abyte;
    mptr = new CPlayerMedia(psptr);
    if (mptr == NULL) {
      return (-1);
    }
    audio_info_t *ainfo;
    ainfo = MALLOC_STRUCTURE(audio_info_t);
    ainfo->freq = aq.sampling_freq;
    ainfo->chans = aq.chans;
    ainfo->bitspersample = AVI_audio_bits(avi); 

  
    int ret;
    ret = mptr->create_audio_plugin(plugin, 
				    aq.stream_type,
				    aq.compressor,
				    aq.type,
				    aq.profile,
				    NULL, 
				    ainfo,
				    NULL, 
				    0);
    if (ret < 0) {
      delete mptr;
      player_error_message("Couldn't create audio from plugin %s", 
			   plugin->c_name);
      return -1;
    }
    abyte = new CAviAudioByteStream(Avifile1);

    ret = mptr->create(abyte, FALSE, errmsg, errlen);
    if (ret != 0) {
      return (-1);
    }
    seekable = 0;
  } 
  psptr->session_set_seekable(seekable);

  if (audio_count == 0 && have_audio != 0) {
    snprintf(errmsg, errlen, "Unknown Audio Codec in avi file ");
    return (1);
  }
  if (video_count != 1) {
    snprintf(errmsg, errlen, "Unknown Video Codec %s in avi file",
	     codec_name);
    return (1);
  }
  return (0);
}
Exemplo n.º 21
0
/*
 * Create - will determine pids and psts ranges in file.  Will also
 * loop through the file and determine CFilePosRec points at percentages
 */
int CMpeg2tFile::create (CPlayerSession *psptr)
{
  m_mpeg2t = create_mpeg2_transport();
  if (m_mpeg2t == NULL) {
    psptr->set_message("Couldn't create mpeg2 transport");
    fclose(m_ifile);
    return -1;
  }
  // nice, large buffers to process
  m_buffer_size_max = 188 * 2000;
  m_buffer = (uint8_t *)malloc(m_buffer_size_max);

  if (m_buffer == NULL) {
    psptr->set_message("Malloc error");
    return -1;
  }
  m_buffer[0] = MPEG2T_SYNC_BYTE;
  m_buffer_size = fread(&m_buffer[1], 1, m_buffer_size_max - 1, m_ifile) + 1;

  bool done = false;
  mpeg2t_pid_t *pidptr;
  uint32_t buflen_used;
  bool have_psts = false;
  uint64_t earliest_psts = 0;
  mpeg2t_es_t *es_pid;

  int olddebuglevel;
  olddebuglevel = config.get_config_value(CONFIG_MPEG2T_DEBUG);
  if (olddebuglevel != LOG_DEBUG)
    mpeg2t_set_loglevel(LOG_CRIT);
  m_mpeg2t->save_frames_at_start = 1;
  /*
   * We need to determine which PIDs are present, and try to establish
   * a starting psts.  We also want to establish what type of video and
   * audio are in the mix.  Note: if we try to run this on a file that
   * we don't understand the video, this could take a while, because the
   * info never gets loaded.
   */
  do {
    m_buffer_on = 0;
    while (m_buffer_on + 188 < m_buffer_size && done == false) {
      
      pidptr = mpeg2t_process_buffer(m_mpeg2t, 
				     &m_buffer[m_buffer_on],
				     m_buffer_size - m_buffer_on,
				     &buflen_used);
      m_buffer_on += buflen_used;
      if (pidptr != NULL && pidptr->pak_type == MPEG2T_ES_PAK) {
	es_pid = (mpeg2t_es_t *)pidptr;
	mpeg2t_frame_t *fptr;

	// determine earliest PS_TS
	while ((fptr = mpeg2t_get_es_list_head(es_pid)) != NULL) {
	  if (fptr->have_ps_ts != 0 || fptr->have_dts != 0) {
	    uint64_t ps_ts = 0;
	    bool store_psts = true;
	    if (fptr->have_dts != 0) {
	      ps_ts = fptr->dts;
	    } else {
	      if (es_pid->is_video == 1) { // mpeg2
		// video - make sure we get the first I frame, then we can
		// get the real timestamp
		if (fptr->frame_type != 1) {
		  store_psts = false;
		} else {
		  ps_ts = fptr->ps_ts;
		  uint16_t temp_ref = MP4AV_Mpeg3PictHdrTempRef(fptr->frame + fptr->pict_header_offset);
		  ps_ts -= ((temp_ref + 1) * es_pid->tick_per_frame);
		}
	      } else {
		ps_ts = fptr->ps_ts;
	      }
	    }
	    if (store_psts) {
	      // when we have the first psts for a ES_PID, turn off
	      // parsing frames for that PID.
	      mpeg2t_set_frame_status(es_pid, MPEG2T_PID_NOTHING);
	      if (have_psts) {
		earliest_psts = MIN(earliest_psts, ps_ts);
	      } else {
		earliest_psts = ps_ts;
		have_psts = true;
	      }
	    }
	  }
	  mpeg2t_free_frame(fptr);
	}

	// Each time, search through and see if there are any ES_PIDs 
	// that have not returned a psts.  We're done when the info is
	// loaded for all the es pids.
	pidptr = m_mpeg2t->pas.pid.next_pid;
	bool finished = true;
	while (pidptr != NULL && finished) {
	  if (pidptr->pak_type == MPEG2T_ES_PAK) {
	    es_pid = (mpeg2t_es_t *)pidptr;
	    if (es_pid->info_loaded == 0) {
	      finished = false;
	    }
	  }
	  pidptr = pidptr->next_pid;
	}
	done = finished || have_psts;
      }
    }
    if (done == false) {
      m_buffer_size = fread(m_buffer, 1, m_buffer_size_max, m_ifile);
    }
  } while (m_buffer_size >=188 && done == false);

  if (done == false) {
    psptr->set_message("Could not find information in TS");
    mpeg2t_set_loglevel(olddebuglevel);
    return -1;
  }

#ifdef DEBUG_MPEG2F_SEARCH
  mpeg2f_message(LOG_DEBUG, "initial psts is "U64, earliest_psts);
#endif
  m_start_psts = earliest_psts;

  // Now, we'll try to build a rough index for the file
  // enable psts reading for the pid
  for (pidptr = m_mpeg2t->pas.pid.next_pid; pidptr != NULL; pidptr = pidptr->next_pid) {
    if (pidptr->pak_type == MPEG2T_ES_PAK) {
      es_pid = (mpeg2t_es_t *)pidptr;
      mpeg2t_set_frame_status(es_pid, MPEG2T_PID_REPORT_PSTS);
    }
  }
  m_file_record.record_point(0, earliest_psts, 0);
  fpos_t fpos;
  uint64_t end;
  uint64_t perc, cur;

  // find out the length of the file.
  struct stat filestat;
  if (fstat(fileno(m_ifile), &filestat) != 0) {
    return -1;
  }
  end = filestat.st_size;
  perc = end;
  // perc is what size of the file to skip through to get a rough
  // timetable.  We want to do 10% chunks, or 100Mb chunks, whichever is
  // less.
  while (perc > TO_U64(100000000)) {
    perc /= 2;
  }
  if (perc > (end / TO_U64(10))) {
    perc = end / TO_U64(10);
  }
  if (perc < (end / TO_U64(50))) {
    perc = end / TO_U64(50);
  }
#ifdef DEBUG_MPEG2F_SEARCH
  mpeg2f_message(LOG_DEBUG, "perc is "U64" "U64, perc, (perc * TO_U64(100)) / end );
#endif

  cur = perc;

  bool is_seekable = true;
  uint64_t last_psts, ts;
  last_psts = earliest_psts;

  // Now - skip to the next perc chunk, and try to find the next psts
  // we'll record this info.
  do {
#ifdef DEBUG_MPEG2F_SEARCH
    mpeg2f_message(LOG_DEBUG, "current "U64" end "U64, cur, end);
#endif
    VAR_TO_FPOS(fpos, cur);
    fsetpos(m_ifile, &fpos);
    done = false;
    uint64_t count = 0;
    m_buffer_on = 0;
    m_buffer_size = 0;
    do {
      if (m_buffer_on + 188 > m_buffer_size) {
	if (m_buffer_on < m_buffer_size) {
	  memmove(m_buffer, m_buffer + m_buffer_on, 
		  m_buffer_size - m_buffer_on);
	  m_buffer_on = m_buffer_size - m_buffer_on;
	} else {
	  m_buffer_on = 0;
	}
	m_buffer_size = fread(m_buffer + m_buffer_on, 
			      1, 
			      (188 * 10) - m_buffer_on, 
			      m_ifile);

	count += m_buffer_size - m_buffer_on;
	m_buffer_size += m_buffer_on;
	m_buffer_on = 0;
	if (m_buffer_size < 188) {
	  m_buffer_size = 0;
	  done = true;
	}
      }

      pidptr = mpeg2t_process_buffer(m_mpeg2t,
				     m_buffer + m_buffer_on, 
				     m_buffer_size - m_buffer_on, 
				     &buflen_used);
      m_buffer_on += buflen_used;
      if (pidptr != NULL && pidptr->pak_type == MPEG2T_ES_PAK) {
	es_pid = (mpeg2t_es_t *)pidptr;
	// If we have a psts, record it.
	// If it is less than the previous one, we've got a discontinuity, so
	// we can't seek.
	if (es_pid->have_ps_ts || es_pid->have_dts) {
	  ts = es_pid->have_ps_ts ? es_pid->ps_ts : es_pid->dts;
	  if (ts < last_psts) {
	    player_error_message("pid %x psts "U64" is less than prev record point "U64, 
				 es_pid->pid.pid, ts, last_psts);
	    cur = end;
	    is_seekable = false;
	  } else {
#ifdef DEBUG_MPEG2F_SEARCH
	    mpeg2f_message(LOG_DEBUG, "pid %x psts "U64" %d", 
			       pidptr->pid, ts, 
			       es_pid->is_video);
#endif
	    m_file_record.record_point(cur, ts, 0);
	  }
	  done = true;
	}
      }

    } while (done == false && count < perc / 2);
    cur += perc;

  } while (cur < end - (m_buffer_size_max * 2));

  //mpeg2f_message(LOG_DEBUG, "starting end search");
  // Now, we'll go to close to the end of the file, and look for a 
  // final PSTS.  This gives us a rough estimate of the elapsed time
  long seek_offset;
  seek_offset = 0;
  seek_offset -= (m_buffer_size_max) * 2;
  fseek(m_ifile, seek_offset, SEEK_END);
  m_buffer_on = m_buffer_size = 0;
  uint64_t max_psts;
  max_psts = m_start_psts;
  do {
    while (m_buffer_on + 188 <= m_buffer_size) {
      
      pidptr = mpeg2t_process_buffer(m_mpeg2t, 
				     &m_buffer[m_buffer_on],
				     m_buffer_size - m_buffer_on,
				     &buflen_used);
      m_buffer_on += buflen_used;
      if (pidptr != NULL && pidptr->pak_type == MPEG2T_ES_PAK) {
	es_pid = (mpeg2t_es_t *)pidptr;
	if (es_pid->have_ps_ts) {
	  es_pid->have_ps_ts = 0;
	  max_psts = MAX(es_pid->ps_ts, max_psts);
	} else if (es_pid->have_dts) {
	  es_pid->have_dts = 0;
	  max_psts = MAX(es_pid->dts, max_psts);
	}
      }
    }
    if (m_buffer_size > m_buffer_on) {
      memmove(m_buffer, m_buffer + m_buffer_on, m_buffer_size - m_buffer_on);
    }
    m_buffer_on = m_buffer_size - m_buffer_on;
    m_buffer_size = fread(m_buffer + m_buffer_on, 1, 
			  m_buffer_size_max - m_buffer_on, m_ifile);
    m_buffer_size += m_buffer_on;
    m_buffer_on = 0;
    if (m_buffer_size < 188) m_buffer_size = 0;
  } while (m_buffer_size > 188) ;
  m_last_psts = max_psts;
  // Calculate the rough max time; hopefully it will be greater than the
  // initial...
  m_max_time = max_psts;
  m_max_time -= m_start_psts;
  m_max_time /= 90000.0;
#ifdef DEBUG_MPEG2F_SEARCH
  player_debug_message("last psts is "U64" "U64" %g", max_psts,
		       (max_psts - m_start_psts) / TO_U64(90000),
		       m_max_time);
#endif
  mpeg2t_set_loglevel(olddebuglevel);

  if (is_seekable) {
    psptr->session_set_seekable(1);
  }
  m_ts_seeked_in_msec = UINT64_MAX;
  rewind(m_ifile);

  return 0;
}
Exemplo n.º 22
0
static int iso_decode (codec_data_t *ptr, 
		       frame_timestamp_t *ts, 
		       int from_rtp, 
		       int *sync_frame,
		       uint8_t *buffer,
		       uint32_t buflen,
		       void *userdata)
{
  Int iEof = 1;
  iso_decode_t *iso = (iso_decode_t *)ptr;
  uint32_t used = 0;

  if (buflen <= 4) return -1;

  //  iso_message(LOG_DEBUG, "iso", "frame %d", iso->m_total_frames);
  iso->m_total_frames++;
  buffer[buflen] = 0;
  buffer[buflen + 1] = 0;
  buffer[buflen + 2] = 1;

  switch (iso->m_decodeState) {
  case DECODE_STATE_VOL_SEARCH: {
    if (buffer[0] == 0 &&
	buffer[1] == 0 &&
	(buffer[2] & 0xfc) == 0x80 &&
	(buffer[3] & 0x03) == 0x02) {
      // we have the short header
      iso->m_short_header = 1;
      iso->m_pvodec->SetUpBitstreamBuffer((unsigned char *)buffer, buflen);
      iso->m_pvodec->video_plane_with_short_header();
      iso->m_pvodec->postVO_VOLHeadInit(iso->m_pvodec->getWidth(),
					iso->m_pvodec->getHeight(),
					&iso->m_bSpatialScalability);
      iso_message(LOG_INFO, mp4iso, "Decoding using short headers");
      iso->m_vft->video_configure(iso->m_ifptr, 
				  iso->m_pvodec->getWidth(),
				  iso->m_pvodec->getHeight(),
				  VIDEO_FORMAT_YUV,
				  calculate_aspect_ratio(iso));
      iso->m_decodeState = DECODE_STATE_NORMAL;
      try {
	iEof = iso->m_pvodec->h263_decode(FALSE);
      } catch (...) {
	iso_message(LOG_ERR, mp4iso, "Couldn't decode h263 in vol search");
      }
      break; 
    } else {
      uint8_t *volhdr = MP4AV_Mpeg4FindVol(buffer, buflen);
      if (volhdr != NULL) {
	used = volhdr - buffer;
	try {
	  iso->m_pvodec->SetUpBitstreamBuffer((unsigned char *)volhdr, buflen - used);
	  iso->m_pvodec->decodeVOLHead();
	  iso->m_pvodec->postVO_VOLHeadInit(iso->m_pvodec->getWidth(),
					    iso->m_pvodec->getHeight(),
					  &iso->m_bSpatialScalability);
	  iso_message(LOG_INFO, mp4iso, "Found VOL");
	
	  iso->m_vft->video_configure(iso->m_ifptr, 
				      iso->m_pvodec->getWidth(),
				      iso->m_pvodec->getHeight(),
				      VIDEO_FORMAT_YUV,
				      calculate_aspect_ratio(iso));
	
	  iso->m_decodeState = DECODE_STATE_WAIT_I;
	  used += iso->m_pvodec->get_used_bytes();
	} catch (int err) {
	  iso_message(LOG_DEBUG, mp4iso, "Caught exception in VOL search %d", err);
	  if (err == 1) used = buflen;
	  else used += iso->m_pvodec->get_used_bytes();
	}
      }
    }
    if (iso->m_decodeState != DECODE_STATE_WAIT_I) {
      if (iso->m_vinfo != NULL) {
	iso->m_pvodec->FakeOutVOVOLHead(iso->m_vinfo->height,
					iso->m_vinfo->width,
					30,
					&iso->m_bSpatialScalability);
	iso->m_vft->video_configure(iso->m_ifptr, 
				    iso->m_vinfo->width,
				    iso->m_vinfo->height,
				    VIDEO_FORMAT_YUV,
				    calculate_aspect_ratio(iso));

	iso->m_decodeState = DECODE_STATE_NORMAL;
      } 

      return used;
    }
    // else fall through
  }
  case DECODE_STATE_WAIT_I: {
    uint8_t *vophdr = MP4AV_Mpeg4FindVop(buffer, buflen);
    if (vophdr != NULL) {
      used = vophdr - buffer;
    }
    iso->m_pvodec->SetUpBitstreamBuffer((unsigned char *)buffer + used, buflen + 3 - used);
    try {
      iEof = iso->m_pvodec->decode(NULL, TRUE);
      if (iEof == -1) {
	iso->m_num_wait_i_frames++;
	return(iso->m_pvodec->get_used_bytes());
      }
      iso_message(LOG_DEBUG, mp4iso, "Back to normal decode");
      iso->m_decodeState = DECODE_STATE_NORMAL;
      iso->m_bCachedRefFrame = FALSE;
      iso->m_bCachedRefFrameCoded = FALSE;
      iso->m_cached_valid = FALSE;
      iso->m_cached_time = 0;
    } catch (int err) {
      if (err != 1)
	iso_message(LOG_DEBUG, mp4iso, 
		    "ts "U64",Caught exception in wait_i %d", 
		    ts->msec_timestamp, err);
      return (iso->m_pvodec->get_used_bytes());
      //return (-1);
    }
    break;
  }
  case DECODE_STATE_NORMAL:
    try {
      if (iso->m_short_header != 0) {
	iso->m_pvodec->SetUpBitstreamBuffer((unsigned char *)buffer, buflen + 3);
	iEof = iso->m_pvodec->h263_decode(TRUE);
      } else {
	uint8_t *vophdr = MP4AV_Mpeg4FindVop(buffer, buflen);
	if (vophdr != NULL && vophdr != buffer) {
	  iso_message(LOG_DEBUG, mp4iso, "Illegal code before VOP header");
	  used = vophdr - buffer;
	  buflen -= used;
	  buffer = vophdr;
	}
	iso->m_pvodec->SetUpBitstreamBuffer((unsigned char *)buffer, buflen + 3);
	iEof = iso->m_pvodec->decode(NULL, FALSE, FALSE);
      }
    } catch (int err) {
      // This is because sometimes, the encoder doesn't read all the bytes
      // it should out of the rtp packet.  The rtp bytestream does a read
      // and determines that we're trying to read across bytestreams.
      // If we get this, we don't want to change anything - just fall up
      // to the decoder thread so it gives us a new timestamp.
      if (err == 1) {
	// throw from running past end of frame
	return -1;
      }
      iso_message(LOG_DEBUG, mp4iso, 
		  "Mpeg4 ncaught %d -> waiting for I", err);
      iso->m_decodeState = DECODE_STATE_WAIT_I;
      return (iso->m_pvodec->get_used_bytes());
    } catch (...) {
      iso_message(LOG_DEBUG, mp4iso, 
		  "Mpeg4 ncaught -> waiting for I");
      iso->m_decodeState = DECODE_STATE_WAIT_I;
      //return (-1);
      return (iso->m_pvodec->get_used_bytes());
    }
    break;
  }

  /*
   * We've got a good frame.  See if we need to display it
   */
  const CVOPU8YUVBA *pvopcQuant = NULL;
  if (iso->m_pvodec->fSptUsage() == 1) {
    //player_debug_message("Sprite");
  }
  uint64_t displaytime = 0;
  int cached_ts = 0;
  if (iEof == EOF) {
    if (iso->m_bCachedRefFrame) {
      iso->m_bCachedRefFrame = FALSE;
      if (iso->m_bCachedRefFrameCoded) {
	pvopcQuant = iso->m_pvodec->pvopcRefQLater();
	displaytime = ts->msec_timestamp;
      }
    }
  } else {
#if 0
    iso_message(LOG_DEBUG, mp4iso, "frame "U64" type %d", 
		ts->msec_timestamp, iso->m_pvodec->vopmd().vopPredType);
#endif
    if (iso->m_pvodec->vopmd().vopPredType == BVOP) {
      if (iEof != FALSE) {
	pvopcQuant = iso->m_pvodec->pvopcReconCurr();
	displaytime = ts->msec_timestamp;
      } 
    } else {
      if (iso->m_bCachedRefFrame) {
	iso->m_bCachedRefFrame = FALSE;
	if (iso->m_bCachedRefFrameCoded) {
	  pvopcQuant = iso->m_pvodec->pvopcRefQPrev();
	  if (ts->timestamp_is_pts) {
	    int old_was_valid = iso->m_cached_valid;
	    displaytime = iso->m_cached_time;
	    cached_ts = 1;
	    // old time stamp wasn't valid - instead of calculating it
	    // ourselves, just punt on it.
	    if (old_was_valid == 0) {
	      return (iEof == EOF ? -1 : 0);
	    }
	  } else {
	    displaytime = ts->msec_timestamp;
	  }
	}
      }

      iso->m_cached_time = ts->msec_timestamp;
      iso->m_cached_valid = TRUE;
      iso->m_bCachedRefFrame = TRUE;
      iso->m_bCachedRefFrameCoded = (iEof != FALSE);
    }
  }

  if (pvopcQuant != NULL) {
#if 0
    player_debug_message("frame rtp_ts "U64" disp "U64" cached %d", 
			 ts->msec_timestamp, displaytime, cached_ts);
#endif
    /*
     * Get the information to the video sync structure
     */
    const uint8_t *y, *u, *v;
    int pixelw_y, pixelw_uv;
    pixelw_y =  pvopcQuant->getPlane(Y_PLANE)->where().width;
    pixelw_uv = pvopcQuant->getPlane(U_PLANE)->where().width;

    y = (const uint8_t *)pvopcQuant->getPlane(Y_PLANE)->pixels(0,0);
    u = (const uint8_t *)pvopcQuant->getPlane(U_PLANE)->pixels(0,0);
    v = (const uint8_t *)pvopcQuant->getPlane(V_PLANE)->pixels(0,0);
    iso->m_last_time = displaytime;
#if 0
    player_debug_message("Adding video at "U64" %d", displaytime,
			 iso->m_pvodec->vopmd().vopPredType);
#endif

    iso->m_vft->video_have_frame(iso->m_ifptr, 
				y, 
				u, 
				v, 
				pixelw_y, 
				pixelw_uv, 
				displaytime);
  } else {
    iso_message(LOG_DEBUG, mp4iso, "decode but no frame "U64, ts->msec_timestamp);
  }
  return (iso->m_pvodec->get_used_bytes() + used);
}
Exemplo n.º 23
0
/*
 * create_streaming - create a session for streaming.  Create an
 * RTSP session with the server, get the SDP information from it.
 */
int CPlayerSession::create_streaming_ondemand (const char *url, 
					       char *errmsg,
					       uint32_t errlen, 
					       int use_tcp)
{
  rtsp_command_t cmd;
  rtsp_decode_t *decode;
  sdp_decode_info_t *sdpdecode;
  int dummy;
  int err;

  // streaming has seek capability (at least on demand)
  session_set_seekable(1);
  player_debug_message("Creating streaming %s", url);
  memset(&cmd, 0, sizeof(rtsp_command_t));

  /*
   * create RTSP session
   */
  if (use_tcp != 0) {
    m_rtsp_client = rtsp_create_client_for_rtp_tcp(url, &err);
  } else {
    m_rtsp_client = rtsp_create_client(url, &err);
  }
  if (m_rtsp_client == NULL) {
    snprintf(errmsg, errlen, "Failed to create RTSP client");
    player_error_message("Failed to create rtsp client - error %d", err);
    return (err);
  }
  m_rtp_over_rtsp = use_tcp;

  cmd.accept = "application/sdp";

  /*
   * Send the RTSP describe.  This should return SDP information about
   * the session.
   */
  int rtsp_resp;

  rtsp_resp = rtsp_send_describe(m_rtsp_client, &cmd, &decode);
  if (rtsp_resp != RTSP_RESPONSE_GOOD) {
    int retval;
    if (decode != NULL) {
      retval = (((decode->retcode[0] - '0') * 100) +
		((decode->retcode[1] - '0') * 10) +
		(decode->retcode[2] - '0'));
      snprintf(errmsg, errlen, "RTSP describe error %d %s", retval,
	       decode->retresp != NULL ? decode->retresp : "");
      free_decode_response(decode);
    } else {
      retval = -1;
      snprintf(errmsg, errlen, "RTSP return invalid %d", rtsp_resp);
    }
    player_error_message("Describe response not good\n");
    return (retval);
  }

  sdpdecode = set_sdp_decode_from_memory(decode->body);
  if (sdpdecode == NULL) {
    snprintf(errmsg, errlen, "Memory failure");
    player_error_message("Couldn't get sdp decode\n");
    free_decode_response(decode);
    return (-1);
  }

  /*
   * Decode the SDP information into structures we can use.
   */
  err = sdp_decode(sdpdecode, &m_sdp_info, &dummy);
  free(sdpdecode);
  if (err != 0) {
    snprintf(errmsg, errlen, "Couldn't decode session description %s",
	     decode->body);
    player_error_message("Couldn't decode sdp %s", decode->body);
    free_decode_response(decode);
    return (-1);
  }
  if (dummy != 1) {
    snprintf(errmsg, errlen, "Incorrect number of sessions in sdp decode %d",
	     dummy);
    player_error_message("%s", errmsg);
    free_decode_response(decode);
    return (-1);
  }

  /*
   * Make sure we can use the urls in the sdp info
   */
  if (decode->content_location != NULL) {
    // Note - we may have problems if the content location is not absolute.
    m_content_base = strdup(decode->content_location);
  } else if (decode->content_base != NULL) {
    m_content_base = strdup(decode->content_base);
  } else {
    int urllen = strlen(url);
    if (url[urllen] != '/') {
      char *temp;
      temp = (char *)malloc(urllen + 2);
      strcpy(temp, url);
      strcat(temp, "/");
      m_content_base = temp;
    } else {
      m_content_base = strdup(url);
    }
  }

  convert_relative_urls_to_absolute(m_sdp_info,
				    m_content_base);

  if (m_sdp_info->control_string != NULL) {
    player_debug_message("setting control url to %s", m_sdp_info->control_string);
    set_session_control_url(m_sdp_info->control_string);
  }
  free_decode_response(decode);
  m_streaming = 1;
  m_streaming_ondemand = (get_range_from_sdp(m_sdp_info) != NULL);
  return (0);
}
Exemplo n.º 24
0
void CPlayerMedia::create_rtp_byte_stream (uint8_t rtp_pt,
					   uint64_t tps,
					   format_list_t *fmt)
{
  int codec;
  rtp_check_return_t plugin_ret;
  rtp_plugin_t *rtp_plugin;
  int stream_ondemand;
  rtp_plugin = NULL;
  plugin_ret = check_for_rtp_plugins(fmt, rtp_pt, &rtp_plugin, &config);
  
  stream_ondemand = 0;
  if (m_stream_ondemand == 1 &&
      get_range_from_media(m_media_info) != NULL) {
    // m_stream_ondemand == 1 means we're using RTSP, and having a range 
    // in the SDP means that we have an ondemand presentation; otherwise, we
    // want to treat it like a broadcast session, and use the RTCP.
    stream_ondemand = 1;
  }
  if (plugin_ret != RTP_PLUGIN_NO_MATCH) {
    switch (plugin_ret) {
    case RTP_PLUGIN_MATCH:
      player_debug_message("Starting rtp bytestream %s from plugin", 
			   rtp_plugin->name);
      m_rtp_byte_stream = new CPluginRtpByteStream(rtp_plugin,
						 fmt,
						 rtp_pt,
						 stream_ondemand,
						 tps,
						 &m_head,
						 &m_tail,
						 m_rtsp_base_seq_received,
						 m_rtp_base_seq,
						 m_rtsp_base_ts_received,
						 m_rtp_base_ts,
						 m_rtcp_received,
						 m_rtcp_ntp_frac,
						 m_rtcp_ntp_sec,
						 m_rtcp_rtp_ts);
      return;
    case RTP_PLUGIN_MATCH_USE_VIDEO_DEFAULT:
      // just fall through...
      break; 
    case RTP_PLUGIN_MATCH_USE_AUDIO_DEFAULT:
      m_rtp_byte_stream = 
	new CAudioRtpByteStream(rtp_pt, 
				fmt, 
				stream_ondemand,
				tps,
				&m_head,
				&m_tail,
				m_rtsp_base_seq_received,
				m_rtp_base_seq,
				m_rtsp_base_ts_received,
				m_rtp_base_ts,
				m_rtcp_received,
				m_rtcp_ntp_frac,
				m_rtcp_ntp_sec,
				m_rtcp_rtp_ts);
      if (m_rtp_byte_stream != NULL) {
	player_debug_message("Starting generic audio byte stream");
	return;
      }

    case RTP_PLUGIN_NO_MATCH:
    default:
      break;
    }
  } else {
    if (is_audio() == false && (rtp_pt == 32)) {
      codec = VIDEO_MPEG12;
      m_rtp_byte_stream = new CMpeg3RtpByteStream(rtp_pt,
						  fmt, 
						  stream_ondemand,
						  tps,
						  &m_head,
						  &m_tail,
						  m_rtsp_base_seq_received,
						  m_rtp_base_seq,
						  m_rtsp_base_ts_received,
						  m_rtp_base_ts,
						  m_rtcp_received,
						  m_rtcp_ntp_frac,
						  m_rtcp_ntp_sec,
						  m_rtcp_rtp_ts);
      if (m_rtp_byte_stream != NULL) {
	return;
      }
    } else {
      if (rtp_pt == 14) {
	codec = MPEG4IP_AUDIO_MP3;
      } else if (rtp_pt <= 23) {
	codec = MPEG4IP_AUDIO_GENERIC;
      }  else {
	if (fmt->rtpmap_name == NULL) return;

	codec = lookup_audio_codec_by_name(fmt->rtpmap_name);
	if (codec < 0) {
	  codec = MPEG4IP_AUDIO_NONE; // fall through everything to generic
	}
      }
      switch (codec) {
      case MPEG4IP_AUDIO_MP3: {
	m_rtp_byte_stream = 
	  new CAudioRtpByteStream(rtp_pt, fmt, 
				  stream_ondemand,
				  tps,
				  &m_head,
				  &m_tail,
				  m_rtsp_base_seq_received,
				  m_rtp_base_seq,
				  m_rtsp_base_ts_received,
				  m_rtp_base_ts,
				  m_rtcp_received,
				  m_rtcp_ntp_frac,
				  m_rtcp_ntp_sec,
				  m_rtcp_rtp_ts);
	if (m_rtp_byte_stream != NULL) {
	  m_rtp_byte_stream->set_skip_on_advance(4);
	  player_debug_message("Starting mp3 2250 audio byte stream");
	  return;
	}
      }
	break;
      case MPEG4IP_AUDIO_MP3_ROBUST:
	m_rtp_byte_stream = 
	  new CRfc3119RtpByteStream(rtp_pt, fmt, 
				    stream_ondemand,
				    tps,
				    &m_head,
				    &m_tail,
				    m_rtsp_base_seq_received,
				    m_rtp_base_seq,
				    m_rtsp_base_ts_received,
				    m_rtp_base_ts,
				    m_rtcp_received,
				    m_rtcp_ntp_frac,
				    m_rtcp_ntp_sec,
				    m_rtcp_rtp_ts);
	if (m_rtp_byte_stream != NULL) {
	  player_debug_message("Starting mpa robust byte stream");
	  return;
	}
	break;
      case MPEG4IP_AUDIO_GENERIC:
	m_rtp_byte_stream = 
	  new CAudioRtpByteStream(rtp_pt, fmt, 
				  stream_ondemand,
				  tps,
				  &m_head,
				  &m_tail,
				  m_rtsp_base_seq_received,
				  m_rtp_base_seq,
				  m_rtsp_base_ts_received,
				  m_rtp_base_ts,
				  m_rtcp_received,
				  m_rtcp_ntp_frac,
				  m_rtcp_ntp_sec,
				  m_rtcp_rtp_ts);
	if (m_rtp_byte_stream != NULL) {
	  player_debug_message("Starting generic audio byte stream");
	  return;
	}
      default:
	break;
      }
    }
  }
  if (m_rtp_byte_stream == NULL) 
    m_rtp_byte_stream = new CRtpByteStream(fmt->media->media,
					   fmt, 
					   rtp_pt,
					   stream_ondemand,
					   tps,
					   &m_head,
					   &m_tail,
					   m_rtsp_base_seq_received,
					   m_rtp_base_seq,
					   m_rtsp_base_ts_received,
					   m_rtp_base_ts,
					   m_rtcp_received,
					   m_rtcp_ntp_frac,
					   m_rtcp_ntp_sec,
					   m_rtcp_rtp_ts);
}