예제 #1
0
media_desc_t *create_text_sdp (CTextProfile *pConfig)
{
  media_desc_t *sdpMedia;
  format_list_t *sdpMediaFormat;
  sdpMedia = MALLOC_STRUCTURE(media_desc_t);
  memset(sdpMedia, 0, sizeof(*sdpMedia));

  sdpMediaFormat = MALLOC_STRUCTURE(format_list_t);
  memset(sdpMediaFormat, 0, sizeof(*sdpMediaFormat));

  sdpMediaFormat->media = sdpMedia;
  sdpMediaFormat->fmt = strdup("98");

  sdpMediaFormat->rtpmap_clock_rate = 90000;
  sdpMedia->fmt_list = sdpMediaFormat;

  if (strcmp(pConfig->GetStringValue(CFG_TEXT_ENCODING), 
	     TEXT_ENCODING_PLAIN) == 0) {
    // text
    sdpMediaFormat->rtpmap_name = strdup("x-plain-text");
    sdpMedia->media = strdup("application");
  } else {
    sdpMediaFormat->rtpmap_name = strdup("X-HREF");
    sdpMedia->media = strdup("control");
    const char *base_url = pConfig->GetStringValue(CFG_TEXT_HREF_BASE_URL);
    if (base_url != NULL) {
      char *temp = (char *)malloc(strlen("base_url=") + strlen(base_url) + 1);
      sprintf(temp,"base_url=%s", base_url);
      sdpMediaFormat->fmt_param = temp;
    }
  }
  
  return sdpMedia;
}
예제 #2
0
media_desc_t *faac_create_audio_sdp (CAudioProfile *pConfig,
				     bool *mpeg4,
				     bool *isma_compliant,
				     uint8_t *audioProfile,
				     uint8_t **audioConfig,
				     uint32_t *audioConfigLen)
{
  media_desc_t *sdpMediaAudio;
  format_list_t *sdpMediaAudioFormat;
  rtpmap_desc_t *sdpAudioRtpMap;
  char audioFmtpBuf[512];

  faac_mp4_fileinfo(pConfig, mpeg4, isma_compliant, audioProfile, audioConfig,
 	    audioConfigLen, NULL);

  sdpMediaAudio = MALLOC_STRUCTURE(media_desc_t);
  memset(sdpMediaAudio, 0, sizeof(*sdpMediaAudio));

  sdpMediaAudioFormat = MALLOC_STRUCTURE(format_list_t);
  memset(sdpMediaAudioFormat, 0, sizeof(*sdpMediaAudioFormat));

  sdpMediaAudioFormat->media = sdpMediaAudio;
  sdpMediaAudioFormat->fmt = strdup("97");

  sdpAudioRtpMap = MALLOC_STRUCTURE(rtpmap_desc_t);
  memset(sdpAudioRtpMap, 0, sizeof(*sdpAudioRtpMap));
  sdpAudioRtpMap->clock_rate = pConfig->GetIntegerValue(CFG_AUDIO_SAMPLE_RATE);

  char* sConfig = MP4BinaryToBase16(*audioConfig, *audioConfigLen);
  if(pConfig->GetBoolValue(CFG_RTP_RFC3016)) {
    sdpAudioRtpMap->encode_name = strdup("MP4A-LATM");
    sprintf(audioFmtpBuf, "profile-level-id=15;object=2;cpresent=0; config=%s ", sConfig); 

  } else {
    sdp_add_string_to_list(&sdpMediaAudio->unparsed_a_lines, "a=mpeg4-esid:10");
    sdpAudioRtpMap->encode_name = strdup("mpeg4-generic");

    sprintf(audioFmtpBuf,
	    "streamtype=5; profile-level-id=15; mode=AAC-hbr; config=%s; "
	    "SizeLength=13; IndexLength=3; IndexDeltaLength=3; Profile=1;",
	    sConfig); 
  }

  free(sConfig);
  sdpMediaAudioFormat->fmt_param = strdup(audioFmtpBuf);
  sdpMediaAudioFormat->rtpmap = sdpAudioRtpMap;
  sdpMediaAudio->fmt = sdpMediaAudioFormat;

  return sdpMediaAudio;
}
예제 #3
0
void CTextEncoder::AddRtpDestination (CMediaStream *stream,
				       bool disable_ts_offset, 
				       uint16_t max_ttl,
				       in_port_t srcPort)
{
  mp4live_rtp_params_t *mrtp;

  mrtp = MALLOC_STRUCTURE(mp4live_rtp_params_t);
  rtp_default_params(&mrtp->rtp_params);
  mrtp->rtp_params.rtp_addr = stream->GetStringValue(STREAM_TEXT_DEST_ADDR);
  mrtp->rtp_params.rtp_rx_port = srcPort;
  mrtp->rtp_params.rtp_tx_port = stream->GetIntegerValue(STREAM_TEXT_DEST_PORT);
  mrtp->rtp_params.rtp_ttl = max_ttl;
  mrtp->rtp_params.transmit_initial_rtcp = 1;
  mrtp->rtp_params.rtcp_addr = stream->GetStringValue(STREAM_TEXT_RTCP_DEST_ADDR);
  mrtp->rtp_params.rtcp_tx_port = stream->GetIntegerValue(STREAM_TEXT_RTCP_DEST_PORT);

  mrtp->use_srtp = stream->GetBoolValue(STREAM_TEXT_USE_SRTP);
  mrtp->srtp_params.enc_algo = 
    (srtp_enc_algos_t)stream->GetIntegerValue(STREAM_TEXT_SRTP_ENC_ALGO);
  mrtp->srtp_params.auth_algo = 
    (srtp_auth_algos_t)stream->GetIntegerValue(STREAM_TEXT_SRTP_AUTH_ALGO);
  mrtp->srtp_params.tx_key = stream->m_text_key;
  mrtp->srtp_params.tx_salt = stream->m_text_salt;
  mrtp->srtp_params.rx_key = stream->m_text_key;
  mrtp->srtp_params.rx_salt = stream->m_text_salt;
  mrtp->srtp_params.rtp_enc = stream->GetBoolValue(STREAM_TEXT_SRTP_RTP_ENC);
  mrtp->srtp_params.rtp_auth = stream->GetBoolValue(STREAM_TEXT_SRTP_RTP_AUTH);
  mrtp->srtp_params.rtcp_enc = stream->GetBoolValue(STREAM_TEXT_SRTP_RTCP_ENC);

  AddRtpDestInt(disable_ts_offset, mrtp);
}
예제 #4
0
static bool ReadNextLine (text_file_data_t *tptr)
{
    off_t start;
    start = ftello(tptr->m_file);
    if (fgets(tptr->m_buffer, PATH_MAX, tptr->m_file) == NULL) {
        tptr->m_max_index = tptr->m_index;
        return false;
    }
    char *end = tptr->m_buffer + strlen(tptr->m_buffer) - 1;
    while (isspace(*end) && end > tptr->m_buffer) {
        *end = '\0';
        end--;
    }
    debug_message("Read line %u %s", tptr->m_index, tptr->m_buffer);

    if (tptr->m_line_offset_tail == NULL ||
            tptr->m_line_offset_tail->index < tptr->m_index) {
        text_line_offset_t *tlptr = MALLOC_STRUCTURE(text_line_offset_t);
        tlptr->next_line = NULL;
        tlptr->index = tptr->m_index;
        tlptr->offset = start;
        if (tptr->m_line_offset_head == NULL) {
            tptr->m_line_offset_head = tptr->m_line_offset_tail = tlptr;
        } else {
            tptr->m_line_offset_tail->next_line = tlptr;
            tptr->m_line_offset_tail = tlptr;
        }
        debug_message("Add to end");
    }
    tptr->m_index++;
    return true;
}
예제 #5
0
/*
 * Isma rtp bytestream has a potential set of headers at the beginning
 * of each rtp frame.  This can interleave frames in different packets
 */
rtp_plugin_data_t *rfc3267_plugin_create (format_list_t *media_fmt,
					   uint8_t rtp_payload_type, 
					   rtp_vft_t *vft,
					   void *ifptr)
{
  rfc3267_data_t *iptr;

  iptr = MALLOC_STRUCTURE(rfc3267_data_t);
  memset(iptr, 0, sizeof(rfc3267_data_t));
  iptr->m_vft = vft;
  iptr->m_ifptr = ifptr;

  iptr->m_amr_is_wb = 
    strcasecmp(media_fmt->rtpmap->encode_name, "AMR-WB") == 0; 
#ifdef RFC3267_DUMP_OUTPUT_TO_FILE
  iptr->m_outfile = fopen("raw.amr", "w");
#endif
  if (iptr->m_amr_is_wb) {
    iptr->m_rtp_ts_add = 320;
  } else {
    iptr->m_rtp_ts_add = 160;
  }
  rfc3267_message(LOG_DEBUG, rfc3267rtp, "type %s ts add %u",
		  iptr->m_amr_is_wb ? "AMR-WB" : "AMR", iptr->m_rtp_ts_add);
  return (&iptr->plug);
}
예제 #6
0
static bool start_next_frame (rtp_plugin_data_t *pifptr, 
			      uint8_t **buffer, 
			      uint32_t *buflen,
			      frame_timestamp_t *ts,
			      void **userdata)
{
  h261_rtp_data_t *iptr = (h261_rtp_data_t *)pifptr;
  uint64_t timetick;
  h261_rtp_userdata_t *udata;

  udata = MALLOC_STRUCTURE(h261_rtp_userdata_t);

  if (iptr->m_current_pak != NULL) {
    (iptr->m_vft->free_pak)(iptr->m_current_pak);
    iptr->m_current_pak = NULL;
  }

  iptr->m_current_pak = (iptr->m_vft->get_next_pak)(iptr->m_ifptr, 
						    NULL, 
						    1);
  if (iptr->m_current_pak == NULL) return false;

  udata->detected_loss = 0;
  if (iptr->m_first_pak != 0) {
    if (iptr->m_last_seq + 1 != iptr->m_current_pak->rtp_pak_seq) {
      udata->detected_loss = 1;
      h261_message(LOG_ERR, h261rtp, "RTP sequence should be %d is %d", 
		   iptr->m_last_seq + 1, iptr->m_current_pak->rtp_pak_seq);
    }
  }
  udata->m_bit_value = iptr->m_current_pak->rtp_pak_m;
  iptr->m_first_pak = 1;
  iptr->m_last_seq = iptr->m_current_pak->rtp_pak_seq;

  *buffer = iptr->m_current_pak->rtp_data;
  *buflen = iptr->m_current_pak->rtp_data_len;
  *userdata = udata;

#ifdef H261_RTP_DUMP_OUTPUT_TO_FILE
  if (*buffer != NULL) {
    fwrite(*buffer, *buflen,  1, iptr->m_outfile);
  }
#endif
  timetick = 
    iptr->m_vft->rtp_ts_to_msec(iptr->m_ifptr, 
				iptr->m_current_pak->rtp_pak_ts,
				iptr->m_current_pak->pd.rtp_pd_timestamp,
				0);
  // We're going to have to handle wrap better...
#ifdef DEBUG_H261
  h261_message(LOG_DEBUG, h261rtp, "start next frame %p %d ts %x "U64, 
	       *buffer, *buflen, iptr->m_current_pak->rtp_pak_ts, timetick);
#endif
  ts->msec_timestamp = timetick;
  ts->timestamp_is_pts = false;
  return (timetick);
}
transaction_id_t *transaction_id_create (void)
{
  transaction_id_t *ret;

  ret = MALLOC_STRUCTURE(transaction_id_t);
  if (!ret) {
      return NULL;
  }

  transaction_id_generate(ret->tid);

  ret->next_tid = NULL;
  return ret;
}
예제 #8
0
bool CX264VideoEncoder::GetEncodedImage(
	u_int8_t** ppBuffer, u_int32_t* pBufferLength,
	Timestamp *dts, Timestamp *pts)
{
  if (m_vopBufferLength == 0) return false;

  h264_media_frame_t *mf = MALLOC_STRUCTURE(h264_media_frame_t);

  if (mf == NULL) {
    CHECK_AND_FREE(m_vopBuffer);
    m_vopBufferLength = 0;
    return false;
  }
  
  mf->buffer = m_vopBuffer;
  mf->buffer_len = m_vopBufferLength;
  mf->nal_number = m_nal_num;
  mf->nal_bufs = m_nal_info;
  m_nal_info = NULL;
  m_vopBuffer = NULL;
  *ppBuffer = (uint8_t *)mf;
  *pBufferLength = 0;

#if 1
  Timestamp pts_try = m_pic_output.i_pts + m_pts_add;
  Timestamp closest_on_stack = m_push->Closest(pts_try, m_frame_time);
  if (closest_on_stack != 0) {
    *pts = closest_on_stack;
  } else {
    *pts = pts_try;
  }
  //  debug_message("try "U64" closest "U64, pts_try, closest_on_stack);
#else 
  *pts = m_pic_output.i_pts + m_pts_add;
#endif
  *dts = m_push->Pop();
  //  debug_message("dts "U64" pts "U64" "D64" type %u ", *dts, *pts, *pts - *dts, m_pic_output.i_type);
  if (*dts > *pts) *pts = *dts;
  else if (*pts - *dts < 6) *pts = *dts;
#if 0
  if (*dts != *pts) {
    debug_message("PTS "U64" not DTS "U64, 
		  *pts, *dts);
  }
#endif
  m_vopBuffer = NULL;
  m_vopBufferLength = 0;
  
  return true;
}
예제 #9
0
rtp_plugin_data_t *h264_rtp_plugin_create (format_list_t *media_fmt,
					   uint8_t rtp_payload_type, 
					   rtp_vft_t *vft,
					   void *ifptr)
{
  h264_rtp_data_t *iptr;

  iptr = MALLOC_STRUCTURE(h264_rtp_data_t);
  memset(iptr, 0, sizeof(*iptr));
  iptr->m_vft = vft;
  iptr->m_ifptr = ifptr;

#ifdef H264_RTP_DUMP_OUTPUT_TO_FILE
  iptr->m_outfile = fopen("rtp.h264", "w");
#endif

  return (&iptr->plug);
}
예제 #10
0
static codec_data_t *xvid_create (const char *stream_type,
				  const char *compressor, 
				  int type, 
				  int profile,
				  format_list_t *media_fmt,
				  video_info_t *vinfo,
				  const uint8_t *userdata,
				  uint32_t ud_size,
				  video_vft_t *vft,
				  void *ifptr)
{
  xvid_codec_t *xvid;

  xvid = MALLOC_STRUCTURE(xvid_codec_t);
  memset(xvid, 0, sizeof(*xvid));

  xvid->m_vft = vft;
  xvid->m_ifptr = ifptr;

  xvid_gbl_init_t gbl_init;
  gbl_init.version = XVID_VERSION;
  gbl_init.cpu_flags = 0;
  xvid_global(NULL, 0, &gbl_init, NULL);

  xvid->m_decodeState = XVID_STATE_VO_SEARCH;
  if (media_fmt != NULL && media_fmt->fmt_param != NULL) {
    // See if we can decode a passed in vovod header
    if (parse_vovod(xvid, media_fmt->fmt_param, 1, 0) == 0) {
      xvid->m_decodeState = XVID_STATE_WAIT_I;
    }
  } else if (userdata != NULL) {
    if (parse_vovod(xvid, (char *)userdata, 0, ud_size) == 0) {
      xvid->m_decodeState = XVID_STATE_WAIT_I;
    }
  } 

  xvid->m_vinfo = vinfo;

  xvid->m_num_wait_i = 0;
  xvid->m_num_wait_i_frames = 0;
  xvid->m_total_frames = 0;
  xvid_message(LOG_DEBUG, "xvid", "created xvid");
  return ((codec_data_t *)xvid);
}
예제 #11
0
codec_data_t *celp_file_check (lib_message_func_t message,
			      const char *name, 
			      double *max, 
			      char *desc[4])
{
  celp_codec_t *celp;
  int len = strlen(name);
  if (strcasecmp(name + len - 5, ".celp") != 0) {
    return (NULL);
  }

  celp = MALLOC_STRUCTURE(celp_codec_t);
  memset(celp, 0, sizeof(*celp));
  *max = 0;

  celp->m_buffer = (uint8_t *)malloc(MAX_READ_BUFFER);
  celp->m_buffer_size_max = MAX_READ_BUFFER;
  celp->m_ifile = fopen(name, FOPEN_READ_BINARY);
  if (celp->m_ifile == NULL) {
    free(celp);
    return NULL;
  }
  //celp->m_output_frame_size = 1024;
  
  celp->m_buffer_size = fread(celp->m_buffer, 
			     1, 
			     celp->m_buffer_size_max, 
			     celp->m_ifile);

  unsigned long freq, chans;


  // may want to actually decode the first frame...
  if (freq == 0) {
    message(LOG_ERR, celplib, "Couldn't determine CELP frame rate");
    celp_close((codec_data_t *)celp);
    return (NULL);
  } 
  celp->m_freq = freq;
  celp->m_chans = chans;
  celp->m_celp_inited = 1;
  celp->m_framecount = 0;
  return ((codec_data_t *)celp);
}
예제 #12
0
static codec_data_t *iso_create (const char *stream_type,
				 const char *compressor, 
				 int type, 
				 int profile, 
				 format_list_t *media_fmt,
				 video_info_t *vinfo,
				 const uint8_t *userdata,
				 uint32_t ud_size,
				 video_vft_t *vft,
				 void *ifptr)
{
  iso_decode_t *iso;

  iso = MALLOC_STRUCTURE(iso_decode_t);
  if (iso == NULL) return NULL;
  memset(iso, 0, sizeof(*iso));
  iso->m_vft = vft;
  iso->m_ifptr = ifptr;

  iso->m_main_short_video_header = FALSE;
  iso->m_pvodec = new CVideoObjectDecoder();
  iso->m_decodeState = DECODE_STATE_VOL_SEARCH;
  if (media_fmt != NULL && media_fmt->fmt_param != NULL) {
    // See if we can decode a passed in vovod header
    if (parse_vovod(iso, media_fmt->fmt_param, 1, 0) == 1) {
      iso->m_decodeState = DECODE_STATE_WAIT_I;
    }
  } else if (userdata != NULL) {
    if (parse_vovod(iso, (const char *)userdata, 0, ud_size) == 1) {
      iso->m_decodeState = DECODE_STATE_WAIT_I;
    }
  }
  iso->m_vinfo = vinfo;

  iso->m_num_wait_i = 0;
  iso->m_num_wait_i_frames = 0;
  iso->m_total_frames = 0;
  return (codec_data_t *)iso;
}
예제 #13
0
CRfc3119RtpByteStream::CRfc3119RtpByteStream (unsigned int rtp_pt,
					      format_list_t *fmt,
					      int ondemand,
					      uint64_t tps,
					      rtp_packet **head, 
					      rtp_packet **tail,
					      int rtp_seq_set,
					      uint16_t rtp_base_seq,
					      int rtp_ts_set,
					      uint32_t rtp_base_ts,
					      int rtcp_received,
					      uint32_t ntp_frac,
					      uint32_t ntp_sec,
					      uint32_t rtp_ts) :
  CRtpByteStreamBase("mparobust", fmt, rtp_pt, ondemand, tps, head, tail, 
		     rtp_seq_set, rtp_base_seq, rtp_ts_set, rtp_base_ts,
		     rtcp_received, ntp_frac, ntp_sec, rtp_ts)
{
#ifdef ISMA_RTP_DUMP_OUTPUT_TO_FILE
  m_outfile = fopen("isma.aac", "w");
#endif
  m_adu_data_free = NULL;
  m_deinterleave_list = NULL;
  m_ordered_adu_list = NULL;
  m_pending_adu_list = NULL;

  adu_data_t *p;
  for (int ix = 0; ix < 25; ix++) {
    p = MALLOC_STRUCTURE(adu_data_t);
    p->next_adu = m_adu_data_free;
    m_adu_data_free = p;
  }
  m_rtp_ts_add = 0;
  m_recvd_first_pak = 0;
  m_got_next_idx = 0;
  m_mp3_frame = NULL;
  m_mp3_frame_size = 0;
}
예제 #14
0
static codec_data_t *mpeg2dec_create (const char *stream_type,
				      const char *compressor,
				      int type, 
				      int profile, 
				      format_list_t *media_fmt,
				      video_info_t *vinfo,
				      const uint8_t *userdata,
				      uint32_t ud_size,
				      video_vft_t *vft,
				      void *ifptr)
{
  mpeg2dec_codec_t *mpeg2dec;

  mpeg2dec = MALLOC_STRUCTURE(mpeg2dec_codec_t);
  memset(mpeg2dec, 0, sizeof(*mpeg2dec));

  mpeg2dec->m_vft = vft;
  mpeg2dec->m_ifptr = ifptr;

  mpeg2dec->m_decoder = mpeg2_init();

  mpeg2dec->m_did_pause = 1;
  return ((codec_data_t *)mpeg2dec);
}
예제 #15
0
GtkWidget *create_TextFileDialog (bool do_file)
{
    GtkWidget *TextFileDialog;
    GtkWidget *vbox42;
    GtkWidget *hbox105 = NULL;
    GtkWidget *label196 = NULL;
    GtkWidget *FileNameLabel = NULL;
    GtkWidget *LineEntry;
    GtkWidget *hbox111;
    GtkWidget *vbox43;
    GtkWidget *StartButton;
    GtkWidget *alignment32;
    GtkWidget *hbox112;
    GtkWidget *image38;
    GtkWidget *label204;
    GtkWidget *vbox44;
    GtkWidget *vbox45;
    GtkWidget *PrevButton;
    GtkWidget *alignment33;
    GtkWidget *hbox113;
    GtkWidget *image39;
    GtkWidget *label205;
    GtkWidget *label206;
    GtkWidget *vbox46;
    GtkWidget *NextButton;
    GtkWidget *alignment34;
    GtkWidget *hbox114;
    GtkWidget *label207;
    GtkWidget *image40;
    GtkWidget *vbox47;
    GtkWidget *EndButton;
    GtkWidget *alignment35;
    GtkWidget *hbox115;
    GtkWidget *label208;
    GtkWidget *image41;
    GtkWidget *label209;
    GtkWidget *vbox48;
    GtkWidget *SendButton;
    GtkWidget *alignment36;
    GtkWidget *hbox116;
    GtkWidget *label210;
    GtkWidget *image42;
    GtkWidget *statusbar2;
    GtkTooltips *tooltips;

    tooltips = gtk_tooltips_new();

    TextFileDialog = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    gtk_window_set_title(GTK_WINDOW(TextFileDialog), _("Text File Transmission"));
    gtk_window_set_position(GTK_WINDOW(TextFileDialog), GTK_WIN_POS_CENTER);

    vbox42 = gtk_vbox_new(FALSE, 13);
    gtk_widget_show(vbox42);
    gtk_container_add(GTK_CONTAINER(TextFileDialog), vbox42);

    if (do_file) {
        hbox105 = gtk_hbox_new(FALSE, 0);
        gtk_widget_show(hbox105);
        gtk_box_pack_start(GTK_BOX(vbox42), hbox105, FALSE, FALSE, 0);

        label196 = gtk_label_new(_("File Name:"));
        gtk_widget_show(label196);
        gtk_box_pack_start(GTK_BOX(hbox105), label196, TRUE, TRUE, 0);
        gtk_misc_set_padding(GTK_MISC(label196), 0, 9);


        FileNameLabel = gtk_label_new("");
        gtk_widget_show(FileNameLabel);
        gtk_box_pack_start(GTK_BOX(hbox105), FileNameLabel, TRUE, TRUE, 0);
    }
    LineEntry = gtk_entry_new();
    gtk_widget_show(LineEntry);
    gtk_box_pack_start(GTK_BOX(vbox42), LineEntry, FALSE, FALSE, 0);

    hbox111 = gtk_hbox_new(FALSE, 0);
    gtk_widget_show(hbox111);
    gtk_box_pack_start(GTK_BOX(vbox42), hbox111, TRUE, TRUE, 0);

    vbox43 = gtk_vbox_new(FALSE, 0);
    gtk_widget_show(vbox43);
    gtk_box_pack_start(GTK_BOX(hbox111), vbox43, FALSE, TRUE, 0);

    StartButton = gtk_button_new();
    gtk_widget_show(StartButton);
    gtk_box_pack_start(GTK_BOX(vbox43), StartButton, TRUE, FALSE, 0);
    gtk_tooltips_set_tip(tooltips, StartButton, _("Move to beginning of file"), NULL);

    alignment32 = gtk_alignment_new(0.5, 0.5, 0, 0);
    gtk_widget_show(alignment32);
    gtk_container_add(GTK_CONTAINER(StartButton), alignment32);

    hbox112 = gtk_hbox_new(FALSE, 2);
    gtk_widget_show(hbox112);
    gtk_container_add(GTK_CONTAINER(alignment32), hbox112);

    image38 = gtk_image_new_from_stock("gtk-goto-first", GTK_ICON_SIZE_BUTTON);
    gtk_widget_show(image38);
    gtk_box_pack_start(GTK_BOX(hbox112), image38, FALSE, FALSE, 0);

    label204 = gtk_label_new_with_mnemonic(_("Start"));
    gtk_widget_show(label204);
    gtk_box_pack_start(GTK_BOX(hbox112), label204, FALSE, FALSE, 0);

    vbox44 = gtk_vbox_new(FALSE, 0);
    gtk_widget_show(vbox44);
    gtk_box_pack_start(GTK_BOX(hbox111), vbox44, FALSE, TRUE, 0);

    vbox45 = gtk_vbox_new(FALSE, 0);
    gtk_widget_show(vbox45);
    gtk_box_pack_start(GTK_BOX(vbox44), vbox45, TRUE, FALSE, 0);

    PrevButton = gtk_button_new();
    gtk_widget_show(PrevButton);
    gtk_box_pack_start(GTK_BOX(vbox45), PrevButton, TRUE, FALSE, 0);
    gtk_tooltips_set_tip(tooltips, PrevButton, _("Move to previous entry"), NULL);

    alignment33 = gtk_alignment_new(0.5, 0.5, 0, 0);
    gtk_widget_show(alignment33);
    gtk_container_add(GTK_CONTAINER(PrevButton), alignment33);

    hbox113 = gtk_hbox_new(FALSE, 2);
    gtk_widget_show(hbox113);
    gtk_container_add(GTK_CONTAINER(alignment33), hbox113);

    image39 = gtk_image_new_from_stock("gtk-go-back", GTK_ICON_SIZE_BUTTON);
    gtk_widget_show(image39);
    gtk_box_pack_start(GTK_BOX(hbox113), image39, FALSE, FALSE, 0);

    label205 = gtk_label_new_with_mnemonic(_("Previous"));
    gtk_widget_show(label205);
    gtk_box_pack_start(GTK_BOX(hbox113), label205, FALSE, FALSE, 0);

    label206 = gtk_label_new("");
    gtk_widget_show(label206);
    gtk_box_pack_start(GTK_BOX(hbox111), label206, TRUE, TRUE, 11);

    vbox46 = gtk_vbox_new(FALSE, 0);
    gtk_widget_show(vbox46);
    gtk_box_pack_start(GTK_BOX(hbox111), vbox46, TRUE, TRUE, 0);

    NextButton = gtk_button_new();
    gtk_widget_show(NextButton);
    gtk_box_pack_start(GTK_BOX(vbox46), NextButton, TRUE, FALSE, 0);
    gtk_tooltips_set_tip(tooltips, NextButton, _("Move to next entry"), NULL);

    alignment34 = gtk_alignment_new(0.5, 0.5, 0, 0);
    gtk_widget_show(alignment34);
    gtk_container_add(GTK_CONTAINER(NextButton), alignment34);

    hbox114 = gtk_hbox_new(FALSE, 2);
    gtk_widget_show(hbox114);
    gtk_container_add(GTK_CONTAINER(alignment34), hbox114);

    label207 = gtk_label_new_with_mnemonic(_("Next"));
    gtk_widget_show(label207);
    gtk_box_pack_start(GTK_BOX(hbox114), label207, FALSE, FALSE, 0);

    image40 = gtk_image_new_from_stock("gtk-go-forward", GTK_ICON_SIZE_BUTTON);
    gtk_widget_show(image40);
    gtk_box_pack_start(GTK_BOX(hbox114), image40, FALSE, FALSE, 0);

    vbox47 = gtk_vbox_new(FALSE, 0);
    gtk_widget_show(vbox47);
    gtk_box_pack_start(GTK_BOX(hbox111), vbox47, TRUE, TRUE, 0);

    EndButton = gtk_button_new();
    gtk_widget_show(EndButton);
    gtk_box_pack_start(GTK_BOX(vbox47), EndButton, TRUE, FALSE, 0);
    gtk_tooltips_set_tip(tooltips, EndButton, _("Move to last entry in file"), NULL);

    alignment35 = gtk_alignment_new(0.5, 0.5, 0, 0);
    gtk_widget_show(alignment35);
    gtk_container_add(GTK_CONTAINER(EndButton), alignment35);

    hbox115 = gtk_hbox_new(FALSE, 2);
    gtk_widget_show(hbox115);
    gtk_container_add(GTK_CONTAINER(alignment35), hbox115);

    label208 = gtk_label_new_with_mnemonic(_("End"));
    gtk_widget_show(label208);
    gtk_box_pack_start(GTK_BOX(hbox115), label208, FALSE, FALSE, 0);

    image41 = gtk_image_new_from_stock("gtk-goto-last", GTK_ICON_SIZE_BUTTON);
    gtk_widget_show(image41);
    gtk_box_pack_start(GTK_BOX(hbox115), image41, FALSE, FALSE, 0);

    label209 = gtk_label_new("");
    gtk_widget_show(label209);
    gtk_box_pack_start(GTK_BOX(hbox111), label209, TRUE, TRUE, 26);

    vbox48 = gtk_vbox_new(FALSE, 0);
    gtk_widget_show(vbox48);
    gtk_box_pack_start(GTK_BOX(hbox111), vbox48, TRUE, FALSE, 0);

    SendButton = gtk_button_new();
    gtk_widget_show(SendButton);
    gtk_box_pack_start(GTK_BOX(vbox48), SendButton, TRUE, FALSE, 0);
    gtk_tooltips_set_tip(tooltips, SendButton, _("Transmit file"), NULL);

    alignment36 = gtk_alignment_new(0.5, 0.5, 0, 0);
    gtk_widget_show(alignment36);
    gtk_container_add(GTK_CONTAINER(SendButton), alignment36);

    hbox116 = gtk_hbox_new(FALSE, 2);
    gtk_widget_show(hbox116);
    gtk_container_add(GTK_CONTAINER(alignment36), hbox116);

    label210 = gtk_label_new_with_mnemonic(_("Send"));
    gtk_widget_show(label210);
    gtk_box_pack_start(GTK_BOX(hbox116), label210, FALSE, FALSE, 0);

    image42 = gtk_image_new_from_stock("gtk-ok", GTK_ICON_SIZE_BUTTON);
    gtk_widget_show(image42);
    gtk_box_pack_start(GTK_BOX(hbox116), image42, FALSE, FALSE, 0);

    statusbar2 = gtk_statusbar_new ();
    gtk_widget_show (statusbar2);
    gtk_box_pack_start (GTK_BOX (vbox42), statusbar2, FALSE, FALSE, 0);

    g_signal_connect((gpointer) TextFileDialog, "delete_event",
                     G_CALLBACK(on_TextFileDialog_delete_event),
                     NULL);
    g_signal_connect((gpointer) TextFileDialog, "destroy",
                     G_CALLBACK(on_TextFileDialog_destroy),
                     NULL);
    if (do_file) {
        g_signal_connect((gpointer) StartButton, "clicked",
                         G_CALLBACK(on_StartButton_clicked),
                         TextFileDialog);
        g_signal_connect((gpointer) PrevButton, "clicked",
                         G_CALLBACK(on_PrevButton_clicked),
                         TextFileDialog);
        g_signal_connect((gpointer) NextButton, "clicked",
                         G_CALLBACK(on_NextButton_clicked),
                         TextFileDialog);
        g_signal_connect((gpointer) EndButton, "clicked",
                         G_CALLBACK(on_EndButton_clicked),
                         TextFileDialog);
    }
    g_signal_connect((gpointer) SendButton, "clicked",
                     G_CALLBACK(on_SendButton_clicked),
                     TextFileDialog);
    g_signal_connect((gpointer)LineEntry, "activate",
                     G_CALLBACK(on_LineEntry_activate),
                     TextFileDialog);

    /* Store pointers to all widgets, for use by lookup_widget(). */
    GLADE_HOOKUP_OBJECT_NO_REF(TextFileDialog, TextFileDialog, "TextFileDialog");
    GLADE_HOOKUP_OBJECT(TextFileDialog, vbox42, "vbox42");
    if (hbox105 != NULL) {
        GLADE_HOOKUP_OBJECT(TextFileDialog, hbox105, "hbox105");
        GLADE_HOOKUP_OBJECT(TextFileDialog, label196, "label196");
        GLADE_HOOKUP_OBJECT(TextFileDialog, FileNameLabel, "FileNameLabel");
    }
    GLADE_HOOKUP_OBJECT(TextFileDialog, LineEntry, "LineEntry");
    GLADE_HOOKUP_OBJECT(TextFileDialog, hbox111, "hbox111");
    GLADE_HOOKUP_OBJECT(TextFileDialog, vbox43, "vbox43");
    GLADE_HOOKUP_OBJECT(TextFileDialog, StartButton, "StartButton");
    GLADE_HOOKUP_OBJECT(TextFileDialog, alignment32, "alignment32");
    GLADE_HOOKUP_OBJECT(TextFileDialog, hbox112, "hbox112");
    GLADE_HOOKUP_OBJECT(TextFileDialog, image38, "image38");
    GLADE_HOOKUP_OBJECT(TextFileDialog, label204, "label204");
    GLADE_HOOKUP_OBJECT(TextFileDialog, vbox44, "vbox44");
    GLADE_HOOKUP_OBJECT(TextFileDialog, vbox45, "vbox45");
    GLADE_HOOKUP_OBJECT(TextFileDialog, PrevButton, "PrevButton");
    GLADE_HOOKUP_OBJECT(TextFileDialog, alignment33, "alignment33");
    GLADE_HOOKUP_OBJECT(TextFileDialog, hbox113, "hbox113");
    GLADE_HOOKUP_OBJECT(TextFileDialog, image39, "image39");
    GLADE_HOOKUP_OBJECT(TextFileDialog, label205, "label205");
    GLADE_HOOKUP_OBJECT(TextFileDialog, label206, "label206");
    GLADE_HOOKUP_OBJECT(TextFileDialog, vbox46, "vbox46");
    GLADE_HOOKUP_OBJECT(TextFileDialog, NextButton, "NextButton");
    GLADE_HOOKUP_OBJECT(TextFileDialog, alignment34, "alignment34");
    GLADE_HOOKUP_OBJECT(TextFileDialog, hbox114, "hbox114");
    GLADE_HOOKUP_OBJECT(TextFileDialog, label207, "label207");
    GLADE_HOOKUP_OBJECT(TextFileDialog, image40, "image40");
    GLADE_HOOKUP_OBJECT(TextFileDialog, vbox47, "vbox47");
    GLADE_HOOKUP_OBJECT(TextFileDialog, EndButton, "EndButton");
    GLADE_HOOKUP_OBJECT(TextFileDialog, alignment35, "alignment35");
    GLADE_HOOKUP_OBJECT(TextFileDialog, hbox115, "hbox115");
    GLADE_HOOKUP_OBJECT(TextFileDialog, label208, "label208");
    GLADE_HOOKUP_OBJECT(TextFileDialog, image41, "image41");
    GLADE_HOOKUP_OBJECT(TextFileDialog, label209, "label209");
    GLADE_HOOKUP_OBJECT(TextFileDialog, vbox48, "vbox48");
    GLADE_HOOKUP_OBJECT(TextFileDialog, SendButton, "SendButton");
    GLADE_HOOKUP_OBJECT(TextFileDialog, alignment36, "alignment36");
    GLADE_HOOKUP_OBJECT(TextFileDialog, hbox116, "hbox116");
    GLADE_HOOKUP_OBJECT(TextFileDialog, label210, "label210");
    GLADE_HOOKUP_OBJECT(TextFileDialog, image42, "image42");
    GLADE_HOOKUP_OBJECT_NO_REF(TextFileDialog, tooltips, "tooltips");
    GLADE_HOOKUP_OBJECT (TextFileDialog, statusbar2, "statusbar2");

    if (do_file) {
        text_file_data_t *tptr = MALLOC_STRUCTURE(text_file_data_t);
        memset(tptr, 0, sizeof(*tptr));
        const char *fname =
            MyConfig->GetStringValue(CONFIG_TEXT_SOURCE_FILE_NAME);

        tptr->m_file = fopen(fname, "r");
        if (tptr->m_file == NULL) {
            char buffer[PATH_MAX];
            snprintf(buffer, PATH_MAX, "Can't open file %s", fname);
            ShowMessage("Can't open file",buffer);
            gtk_widget_destroy(TextFileDialog);
            return NULL;
        }
        gtk_label_set_text(GTK_LABEL(FileNameLabel), fname);
        ReadNextLine(tptr);
        DisplayLineInBuffer(TextFileDialog, tptr);
        GLADE_HOOKUP_OBJECT_NO_REF(TextFileDialog, tptr, "TextFileData");
    } else {
        gtk_widget_set_sensitive(StartButton, false);
        gtk_widget_set_sensitive(PrevButton, false);
        gtk_widget_set_sensitive(NextButton, false);
        gtk_widget_set_sensitive(EndButton, false);
        GLADE_HOOKUP_OBJECT_NO_REF(TextFileDialog, NULL, "TextFileData");
    }

    gtk_drag_dest_set(TextFileDialog,
                      GTK_DEST_DEFAULT_ALL,
                      drop_types,
                      NUM_ELEMENTS_IN_ARRAY(drop_types),
                      GDK_ACTION_COPY);
    gtk_drag_dest_set(LineEntry,
                      GTK_DEST_DEFAULT_ALL,
                      drop_types,
                      NUM_ELEMENTS_IN_ARRAY(drop_types),
                      GDK_ACTION_COPY);

    g_signal_connect((gpointer)TextFileDialog, "drag_data_received",
                     G_CALLBACK(on_drag_data_received),
                     TextFileDialog);

    g_signal_connect((gpointer)LineEntry, "drag_data_received",
                     G_CALLBACK(on_drag_data_received_entry),
                     TextFileDialog);

    gtk_widget_show(TextFileDialog);

    if (do_file) {
        gtk_widget_grab_focus(SendButton);
        on_SendButton_clicked(GTK_BUTTON(SendButton), TextFileDialog);
    } else {
        gtk_widget_grab_focus(LineEntry);
    }
    return TextFileDialog;
}
예제 #16
0
bool CFfmpegVideoEncoder::GetEncodedImage(
	u_int8_t** ppBuffer, u_int32_t* pBufferLength,
	Timestamp *dts, Timestamp *pts)
{
  bool ret = true;

  if (m_vopBufferLength == 0) {
    // return without clearing m_vopBuffer
    // this should only happen at beginning
    *dts = *pts = 0;
    *ppBuffer = NULL;
    *pBufferLength = 0;
    return false;
  }

  if (m_media_frame == MPEG2VIDEOFRAME ||
      (m_usingBFrames && m_media_frame == MPEG4VIDEOFRAME)) {
    // we need to handle b frames
    // create a pts queue element for this frame
    pts_queue_t *pq = MALLOC_STRUCTURE(pts_queue_t);
    pq->next = NULL;
    pq->frameBuffer = m_vopBuffer;
    pq->frameBufferLen = m_vopBufferLength;
    pq->needs_pts = m_avctx->coded_frame->pict_type != 3;
    pq->encodeTime = m_push->Pop();
    
    ret = false;
    if (m_pts_queue == NULL) {
      // nothing on queue - put it on
      m_pts_queue = m_pts_queue_end = pq;
    } else {
      // we have something on the queue
      // if the first element on the queue does not need pts
      // or if the new encoded frame has the pts, we're going
      // to pull the element off the fifo
      if (m_pts_queue->needs_pts == false ||
	  pq->needs_pts) {
	// remove element from head
	*dts = m_pts_queue->encodeTime;
	if (m_pts_queue->needs_pts) {
	  *pts = pq->encodeTime;
	  //debug_message("dts "U64" pts "U64, *dts, *pts);
	} else {
	  *pts = *dts;
	  //debug_message("dts "U64, *dts);
	}
	*ppBuffer = m_pts_queue->frameBuffer;
	*pBufferLength = m_pts_queue->frameBufferLen;
	ret = true;
      } 
      m_pts_queue_end->next = pq;
      m_pts_queue_end = pq;
    }
    // If we have a good return value, pop the head off the pts queue
    if (ret) {
      pq = m_pts_queue;
      m_pts_queue = m_pts_queue->next;
      free(pq);
    } else {
      // otherwise, return nothing.
      *ppBuffer = NULL;
      *pBufferLength = 0;
      *pts = *dts = 0;
      ret = false;
    }
    // either way, return the vop
    m_vopBuffer = NULL;
    m_vopBufferLength = 0;
    return ret;
  } else {
    // pts == dts == encoding time.  Return.
    *ppBuffer = m_vopBuffer;
    *pBufferLength = m_vopBufferLength;
    *pts = *dts = m_push->Pop();
    m_vopBuffer = NULL;
    m_vopBufferLength = 0;
  }
  return true;
}
예제 #17
0
int CMpeg2tFile::create_video (CPlayerSession *psptr,
			       mpeg2t_t *decoder,
			       video_query_t *vq,
			       uint video_offset,
			       int &sdesc)
{
  uint ix;
  CPlayerMedia *mptr;
  codec_plugin_t *plugin;
  int created = 0;

  // Loop through the vq structure, and set up a new player media
  for (ix = 0; ix < video_offset; ix++) {
    mpeg2t_pid_t *pidptr;
    mpeg2t_es_t *es_pid;
    pidptr = mpeg2t_lookup_pid(decoder,vq[ix].track_id);
    if (pidptr->pak_type != MPEG2T_ES_PAK) {
      mpeg2f_message(LOG_CRIT, "mpeg2t video type is not es pak - pid %x",
		     vq[ix].track_id);
      exit(1);
    }
    es_pid = (mpeg2t_es_t *)pidptr;
    if (vq[ix].enabled != 0 && created == 0) {
      created = 1;
      mptr = new CPlayerMedia(psptr, VIDEO_SYNC);
      if (mptr == NULL) {
	return (-1);
      }
      video_info_t *vinfo;
      vinfo = MALLOC_STRUCTURE(video_info_t);
      vinfo->height = vq[ix].h;
      vinfo->width = vq[ix].w;
      plugin = check_for_video_codec(STREAM_TYPE_MPEG2_TRANSPORT_STREAM,
				     NULL,
				     NULL,
				     vq[ix].type,
				     vq[ix].profile,
				     vq[ix].config, 
				     vq[ix].config_len,
				     &config);

      int ret = mptr->create_video_plugin(plugin, 
					  STREAM_TYPE_MPEG2_TRANSPORT_STREAM,
					  NULL,
					  vq[ix].type,
					  vq[ix].profile,
					  NULL, // sdp info
					  vinfo, // video info
					  vq[ix].config,
					  vq[ix].config_len);

      if (ret < 0) {
	mpeg2f_message(LOG_ERR, "Failed to create plugin data");
	psptr->set_message("Failed to start plugin");
	delete mptr;
	return -1;
      }

      CMpeg2fVideoByteStream *vbyte;
      vbyte = new CMpeg2fVideoByteStream(this, es_pid);
      if (vbyte == NULL) {
	mpeg2f_message(LOG_CRIT, "failed to create byte stream");
	delete mptr;
	return (-1);
      }
      ret = mptr->create_media("video", vbyte, false);
      if (ret != 0) {
	mpeg2f_message(LOG_CRIT, "failed to create from file");
	return (-1);
      }
      if (es_pid->info_loaded) {
	char buffer[80];
	if (mpeg2t_write_stream_info(es_pid, buffer, 80) >= 0) {
	  psptr->set_session_desc(sdesc, buffer);
	  sdesc++;
	}
      }
      mpeg2t_set_frame_status(es_pid, MPEG2T_PID_SAVE_FRAME);
    }  else {
      mpeg2t_set_frame_status(es_pid, MPEG2T_PID_NOTHING);
    }
  }
  return created;
}
/*
 * Isma rtp bytestream has a potential set of headers at the beginning
 * of each rtp frame.  This can interleave frames in different packets
 */
rtp_plugin_data_t *isma_rtp_plugin_create (format_list_t *media_fmt,
        uint8_t rtp_payload_type,
        rtp_vft_t *vft,
        void *ifptr)
{
    isma_enc_rtp_data_t *iptr;
    fmtp_parse_t *fmtp;

    iptr = MALLOC_STRUCTURE(isma_enc_rtp_data_t);
    if ( iptr == NULL )
        return NULL;

    memset(iptr, 0, sizeof(isma_enc_rtp_data_t));

    if (strcasecmp(media_fmt->media->media, "audio") == 0) {
        ismacrypInitSession(&(iptr->myEncSID), KeyTypeAudio);
    }

    iptr->m_vft = vft;
    iptr->m_ifptr = ifptr;

    iptr->m_rtp_packet_mutex = SDL_CreateMutex();
#ifdef ISMA_RTP_DUMP_OUTPUT_TO_FILE
    iptr->m_outfile = fopen("isma.aac", "w");
#endif
    iptr->m_frame_data_head = NULL;
    iptr->m_frame_data_on = NULL;
    iptr->m_frame_data_free = NULL;
    isma_frame_data_t *p;
    for (iptr->m_frame_data_max = 0; iptr->m_frame_data_max < 25; iptr->m_frame_data_max++) {
        p = (isma_frame_data_t *)malloc(sizeof(isma_frame_data_t));
        p->frame_data_next = iptr->m_frame_data_free;
        iptr->m_frame_data_free = p;
    }

    fmtp = parse_fmtp_for_mpeg4(media_fmt->fmt_param, iptr->m_vft->log_msg);

    mpeg4_audio_config_t audio_config;
    decode_mpeg4_audio_config(fmtp->config_binary,
                              fmtp->config_binary_len,
                              &audio_config);
    if (audio_object_type_is_aac(&audio_config)) {
        iptr->m_rtp_ts_add = audio_config.codec.aac.frame_len_1024 != 0 ? 1024 : 960;
    } else {
        iptr->m_rtp_ts_add = audio_config.codec.celp.samples_per_frame;
        isma_message(LOG_DEBUG, ismaencrtp, "celp spf is %d", iptr->m_rtp_ts_add);
    }
    iptr->m_rtp_ts_add = (iptr->m_rtp_ts_add * media_fmt->rtpmap->clock_rate) /
                         audio_config.frequency;
    isma_message(LOG_DEBUG, ismaencrtp,
                 "Rtp ts add is %d (%d %d)", iptr->m_rtp_ts_add,
                 media_fmt->rtpmap->clock_rate,
                 audio_config.frequency);
    iptr->m_fmtp = fmtp;
    iptr->m_min_first_header_bits = iptr->m_fmtp->size_length + iptr->m_fmtp->index_length;
    iptr->m_min_header_bits = iptr->m_fmtp->size_length + iptr->m_fmtp->index_delta_length;
    if (iptr->m_fmtp->CTS_delta_length > 0) {
        iptr->m_min_header_bits++;
        iptr->m_min_first_header_bits++;
    }
    if (iptr->m_fmtp->DTS_delta_length > 0) {
        iptr->m_min_header_bits++;
        iptr->m_min_first_header_bits++;
    }

    isma_message(LOG_DEBUG, ismaencrtp, "min headers are %d %d", iptr->m_min_first_header_bits,
                 iptr->m_min_header_bits);

    iptr->m_min_header_bits += iptr->m_fmtp->auxiliary_data_size_length;
    iptr->m_min_first_header_bits += iptr->m_fmtp->auxiliary_data_size_length;
    iptr->m_frag_reass_buffer = NULL;
    iptr->m_frag_reass_size_max = 0;
    return (&iptr->plug);
}
예제 #19
0
static int create_mpeg3_video (video_query_t *vq,
                               mpeg2ps_t *vfile,
                               CPlayerSession *psptr,
                               int &sdesc)
{
    CPlayerMedia *mptr;
    codec_plugin_t *plugin;
    int ret;

    plugin = check_for_video_codec(STREAM_TYPE_MPEG_FILE,
                                   "mp2v",
                                   NULL,
                                   vq->type,
                                   -1,
                                   NULL,
                                   0,
                                   &config);
    if (plugin == NULL) {
        psptr->set_message("Can't find plugin for mpeg video");
        return 0;
    }
    mptr = new CPlayerMedia(psptr, VIDEO_SYNC);
    if (mptr == NULL) {
        psptr->set_message("Could not create video media");
        return -1;
    }
    video_info_t *vinfo;
    vinfo = MALLOC_STRUCTURE(video_info_t);
    vinfo->height = vq->h;
    vinfo->width = vq->w;

    char buffer[80];
    int bitrate;
    char *name = mpeg2ps_get_video_stream_name(vfile, vq->track_id);
    ret = snprintf(buffer, 80, "%s Video, %d x %d",
                   name,
                   vinfo->width, vinfo->height);
    free(name);
    if (vq->frame_rate != 0.0) {
        ret += snprintf(buffer + ret, 80 - ret, ", %g", vq->frame_rate);
    }
    bitrate =
        (int)(mpeg2ps_get_video_stream_bitrate(vfile, vq->track_id) / 1000.0);
    if (bitrate > 0) {
        snprintf(buffer + ret, 80 - ret, ", %d kbps", bitrate);
    }
    psptr->set_session_desc(sdesc, buffer);
    sdesc++;
    mpeg3f_message(LOG_DEBUG, "video stream h %d w %d fr %g bitr %d",
                   vinfo->height, vinfo->width, vq->frame_rate,
                   bitrate);
    ret = mptr->create_video_plugin(plugin, STREAM_TYPE_MPEG_FILE,
                                    vq->compressor,
                                    vq->type, vq->profile,
                                    NULL, vinfo, NULL, 0);
    if (ret < 0) {
        mpeg3f_message(LOG_ERR, "Failed to create video plugin");
        psptr->set_message("Failed to create video plugin");
        free(vinfo);
        return -1;
    }
    CMpeg3VideoByteStream *vbyte;
    vbyte = new CMpeg3VideoByteStream(vfile, vq->track_id);
    if (vbyte == NULL) {
        psptr->set_message("Failed to create video bytestream");
        return -1;
    }
    ret = mptr->create_media("video", vbyte);
    if (ret != 0) {
        psptr->set_message("Couldn't create video media");
        return -1;
    }
    return 1;
}
예제 #20
0
static int create_mpeg3_audio (audio_query_t * aq,
                               mpeg2ps_t *afile,
                               CPlayerSession *psptr,
                               int &sdesc)
{
    CPlayerMedia *mptr;
    codec_plugin_t *plugin;
    int ret;

    plugin = check_for_audio_codec(STREAM_TYPE_MPEG_FILE,
                                   NULL,
                                   NULL,
                                   aq->type,
                                   -1,
                                   NULL,
                                   0,
                                   &config);
    if (plugin == NULL) {
        psptr->set_message("Can't find plugin for mpeg audio format %s",
                           mpeg2ps_get_audio_stream_name(afile, aq->track_id));
        return 0;
    }
    mptr = new CPlayerMedia(psptr, AUDIO_SYNC);
    if (mptr == NULL) {
        psptr->set_message("Could not create video media");
        return -1;
    }
    audio_info_t *ainfo;
    ainfo = MALLOC_STRUCTURE(audio_info_t);
    ainfo->freq = aq->sampling_freq;
    ainfo->chans = aq->chans;
    ainfo->bitspersample = 16;

    char buffer[80];
    snprintf(buffer, 80, "%s Audio, %d, %d channels",
             mpeg2ps_get_audio_stream_name(afile, aq->track_id),
             ainfo->freq,
             ainfo->chans);
    psptr->set_session_desc(sdesc, buffer);
    sdesc++;

    ret = mptr->create_audio_plugin(plugin, aq->stream_type,
                                    aq->compressor,
                                    aq->type, aq->profile,
                                    NULL, ainfo, NULL, 0);
    if (ret < 0) {
        mpeg3f_message(LOG_ERR, "Failed to create audio plugin");
        psptr->set_message("Failed to create audio plugin");
        free(ainfo);
        delete mptr;
        return -1;
    }
    CMpeg3AudioByteStream *abyte;
    abyte = new CMpeg3AudioByteStream(afile, aq->track_id);
    if (abyte == NULL) {
        psptr->set_message("Failed to create audio bytestream");
        return -1;
    }
    ret = mptr->create_media("audio", abyte);
    if (ret != 0) {
        psptr->set_message("Couldn't create audio media");
        return -1;
    }
    return 1;
}
예제 #21
0
/*
 * Create the media for the quicktime file, and set up some session stuff.
 */
int create_media_for_avi_file (CPlayerSession *psptr, 
			       const char *name,
			       char *errmsg,
			       uint32_t errlen,
			       int have_audio_driver,
			       control_callback_vft_t *cc_vft)
{
  CAviFile *Avifile1 = NULL;
  avi_t *avi;
  CPlayerMedia *mptr;
  avi = AVI_open_input_file(name, 1);
  if (avi == NULL) {
    snprintf(errmsg, errlen, "%s", AVI_strerror());
    player_error_message("%s", AVI_strerror());
    return (-1);
  }

  int video_count = 1;
  codec_plugin_t *plugin;
  video_query_t vq;

  const char *codec_name = AVI_video_compressor(avi);
  player_debug_message("Trying avi video codec %s", codec_name);
  plugin = check_for_video_codec(STREAM_TYPE_AVI_FILE,
				 codec_name, 
				 NULL,
				 -1,
				 -1,
				 NULL,
				 0, 
				 &config);
  if (plugin == NULL) {
    video_count = 0;
  } else {
    vq.track_id = 1;
    vq.stream_type = STREAM_TYPE_AVI_FILE;
    vq.compressor = codec_name;
    vq.type = -1;
    vq.profile = -1;
    vq.fptr = NULL;
    vq.h = AVI_video_height(avi);
    vq.w = AVI_video_width(avi);
    vq.frame_rate = AVI_video_frame_rate(avi);
    vq.config = NULL;
    vq.config_len = 0;
    vq.enabled = 0;
    vq.reference = NULL;
  }

  int have_audio = 0;
  int audio_count = 0;
  audio_query_t aq;

  if (AVI_audio_bytes(avi) != 0) {
    have_audio = 1;
    plugin = check_for_audio_codec(STREAM_TYPE_AVI_FILE,
				   NULL,
				   NULL,
				   AVI_audio_format(avi), 
				   -1, 
				   NULL, 
				   0,
				   &config);
    if (plugin != NULL) {
      audio_count = 1;
      aq.track_id = 1;
      aq.stream_type = STREAM_TYPE_AVI_FILE;
      aq.compressor = NULL;
      aq.type = AVI_audio_format(avi);
      aq.profile = -1;
      aq.fptr = NULL;
      aq.sampling_freq = AVI_audio_rate(avi);
      aq.chans = AVI_audio_channels(avi);
      aq.config = NULL;
      aq.config_len = 0;
      aq.enabled = 0;
      aq.reference = NULL;
    }
  }

  if (cc_vft != NULL && cc_vft->media_list_query != NULL) {
    (cc_vft->media_list_query)(psptr, video_count, &vq, audio_count, &aq);
  } else {
    if (video_count != 0) vq.enabled = 1;
    if (audio_count != 0) aq.enabled = 1;
  }


  if ((video_count == 0 || vq.enabled == 0) && 
      (audio_count == 0 || aq.enabled == 0)) {
    snprintf(errmsg, errlen, "No audio or video tracks enabled or playable");
    AVI_close(avi);
    return -1;
  }
  
  Avifile1 = new CAviFile(name, avi, vq.enabled, audio_count);
  psptr->set_media_close_callback(close_avi_file, Avifile1);

  if (video_count != 0 && vq.enabled) {
    mptr = new CPlayerMedia(psptr);
    if (mptr == NULL) {
      return (-1);
    }
  
    video_info_t *vinfo = MALLOC_STRUCTURE(video_info_t);
    if (vinfo == NULL) 
      return (-1);
    vinfo->height = vq.h;
    vinfo->width = vq.w;
    player_debug_message("avi file h %d w %d frame rate %g", 
			 vinfo->height,
			 vinfo->width,
			 vq.frame_rate);

    plugin = check_for_video_codec(STREAM_TYPE_AVI_FILE,
				   codec_name, 
				   NULL,
				   -1,
				   -1,
				   NULL,
				   0,
				   &config);
    int ret;
    ret = mptr->create_video_plugin(plugin,
				    STREAM_TYPE_AVI_FILE,
				    codec_name,
				    -1,
				    -1,
				    NULL,
				    vinfo,
				    NULL,
				    0);
    if (ret < 0) {
      snprintf(errmsg, errlen, "Failed to create video plugin %s", 
	       codec_name);
      player_error_message("Failed to create plugin data");
      delete mptr;
      return -1;
    }
    CAviVideoByteStream *vbyte = new CAviVideoByteStream(Avifile1);
    if (vbyte == NULL) {
      delete mptr;
      return (-1);
    }
    vbyte->config(AVI_video_frames(avi), vq.frame_rate);
    ret = mptr->create(vbyte, TRUE, errmsg, errlen);
    if (ret != 0) {
      return (-1);
    }
  }
    
  int seekable = 1;
  if (have_audio_driver > 0 && audio_count > 0 && aq.enabled != 0) {
    plugin = check_for_audio_codec(STREAM_TYPE_AVI_FILE,
				   NULL,
				   NULL,
				   aq.type,
				   -1, 
				   NULL, 
				   0,
				   &config);
    CAviAudioByteStream *abyte;
    mptr = new CPlayerMedia(psptr);
    if (mptr == NULL) {
      return (-1);
    }
    audio_info_t *ainfo;
    ainfo = MALLOC_STRUCTURE(audio_info_t);
    ainfo->freq = aq.sampling_freq;
    ainfo->chans = aq.chans;
    ainfo->bitspersample = AVI_audio_bits(avi); 

  
    int ret;
    ret = mptr->create_audio_plugin(plugin, 
				    aq.stream_type,
				    aq.compressor,
				    aq.type,
				    aq.profile,
				    NULL, 
				    ainfo,
				    NULL, 
				    0);
    if (ret < 0) {
      delete mptr;
      player_error_message("Couldn't create audio from plugin %s", 
			   plugin->c_name);
      return -1;
    }
    abyte = new CAviAudioByteStream(Avifile1);

    ret = mptr->create(abyte, FALSE, errmsg, errlen);
    if (ret != 0) {
      return (-1);
    }
    seekable = 0;
  } 
  psptr->session_set_seekable(seekable);

  if (audio_count == 0 && have_audio != 0) {
    snprintf(errmsg, errlen, "Unknown Audio Codec in avi file ");
    return (1);
  }
  if (video_count != 1) {
    snprintf(errmsg, errlen, "Unknown Video Codec %s in avi file",
	     codec_name);
    return (1);
  }
  return (0);
}
예제 #22
0
int CMpeg2tFile::create_audio (CPlayerSession *psptr,
			       mpeg2t_t *decoder,
			       audio_query_t *aq,
			       uint audio_offset,
			       int &sdesc)
{
  uint ix;
  CPlayerMedia *mptr;
  codec_plugin_t *plugin;
  int created = 0;

  for (ix = 0; ix < audio_offset; ix++) {
    mpeg2t_pid_t *pidptr;
    mpeg2t_es_t *es_pid;
    pidptr = mpeg2t_lookup_pid(decoder,aq[ix].track_id);
    if (pidptr->pak_type != MPEG2T_ES_PAK) {
      mpeg2f_message(LOG_CRIT, "mpeg2t video type is not es pak - pid %x",
		     aq[ix].track_id);
      exit(1);
    }
    es_pid = (mpeg2t_es_t *)pidptr;
    if (aq[ix].enabled != 0 && created == 0) {
      created = 1;
      mptr = new CPlayerMedia(psptr, AUDIO_SYNC);
      if (mptr == NULL) {
	return (-1);
      }
      audio_info_t *ainfo;
      ainfo = MALLOC_STRUCTURE(audio_info_t);
      ainfo->freq = aq[ix].sampling_freq;
      ainfo->chans = aq[ix].chans;
      ainfo->bitspersample = 0;
      plugin = check_for_audio_codec(STREAM_TYPE_MPEG2_TRANSPORT_STREAM,
				     NULL,
				     NULL,
				     aq[ix].type,
				     aq[ix].profile,
				     aq[ix].config, 
				     aq[ix].config_len,
				     &config);

      int ret = mptr->create_audio_plugin(plugin, 
					  STREAM_TYPE_MPEG2_TRANSPORT_STREAM,
					  NULL,
					  aq[ix].type,
					  aq[ix].profile,
					  NULL, // sdp info
					  ainfo, // video info
					  aq[ix].config,
					  aq[ix].config_len);

      if (ret < 0) {
	mpeg2f_message(LOG_ERR, "Failed to create plugin data");
	psptr->set_message("Failed to start plugin");
	delete mptr;
	return -1;
      }

      CMpeg2fAudioByteStream *abyte;
      abyte = new CMpeg2fAudioByteStream(this, es_pid);
      if (abyte == NULL) {
	mpeg2f_message(LOG_CRIT, "failed to create byte stream");
	delete mptr;
	return (-1);
      }
      ret = mptr->create_media("audio", abyte, false);
      if (ret != 0) {
	mpeg2f_message(LOG_CRIT, "failed to create from file");
	return (-1);
      }
      if (es_pid->info_loaded) {
	char buffer[80];
	if (mpeg2t_write_stream_info(es_pid, buffer, 80) >= 0) {
	  psptr->set_session_desc(sdesc, buffer);
	  sdesc++;
	}
      }
      mpeg2t_set_frame_status(es_pid, MPEG2T_PID_SAVE_FRAME);
    }  else {
      mpeg2t_set_frame_status(es_pid, MPEG2T_PID_NOTHING);
    }
  }
  return created;
}
예제 #23
0
MP4TrackId Mp4vCreator(MP4FileHandle mp4File, FILE* inFile, bool doEncrypt,
		       bool allowVariableFrameRate)
{
    bool rc;

    u_int8_t sampleBuffer[256 * 1024 * 2];
    u_int8_t* pCurrentSample = sampleBuffer;
    u_int32_t maxSampleSize = sizeof(sampleBuffer) / 2;
    u_int32_t prevSampleSize = 0;

    // the current syntactical object
    // typically 1:1 with a sample
    // but not always, i.e. non-VOP's
    u_int8_t* pObj = pCurrentSample;
    u_int32_t objSize;
    u_int8_t objType;

    // the current sample
    MP4SampleId sampleId = 1;
    MP4Timestamp currentSampleTime = 0;

    // the last reference VOP
    MP4SampleId refVopId = 1;
    MP4Timestamp refVopTime = 0;

    // track configuration info
    u_int8_t videoProfileLevel = MPEG4_SP_L3;
    u_int8_t timeBits = 15;
    u_int16_t timeTicks = 30000;
    u_int16_t frameDuration = 3000;
    u_int16_t frameWidth = 320;
    u_int16_t frameHeight = 240;
    u_int32_t esConfigSize = 0;
    int vopType = 0;
    int prevVopType = 0;
    bool foundVOSH = false, foundVO = false, foundVOL = false;
    u_int32_t lastVopTimeIncrement = 0;
    bool variableFrameRate = false;
    bool lastFrame = false;
    bool haveBframes = false;
    mpeg4_frame_t *head = NULL, *tail = NULL;

    // start reading objects until we get the first VOP
    while (LoadNextObject(inFile, pObj, &objSize, &objType)) {
        // guard against buffer overflow
        if (pObj + objSize >= pCurrentSample + maxSampleSize) {
            fprintf(stderr,
                    "%s: buffer overflow, invalid video stream?\n", ProgName);
            return MP4_INVALID_TRACK_ID;
        }
#ifdef DEBUG_MP4V
        if (Verbosity & MP4_DETAILS_SAMPLE) {
            printf("MP4V type %x size %u\n",
                    objType, objSize);
        }
#endif

        if (objType == MP4AV_MPEG4_VOSH_START) {
            MP4AV_Mpeg4ParseVosh(pObj, objSize,
                    &videoProfileLevel);
            foundVOSH = true;
        } else if (objType == MP4AV_MPEG4_VO_START) {
            foundVO = true;
        } else if (objType == MP4AV_MPEG4_VOL_START) {
            MP4AV_Mpeg4ParseVol(pObj, objSize,
                    &timeBits, &timeTicks, &frameDuration,
                    &frameWidth, &frameHeight);

            foundVOL = true;
#ifdef DEBUG_MP4V
            printf("ParseVol: timeBits %u timeTicks %u frameDuration %u\n",
                    timeBits, timeTicks, frameDuration);
#endif

        } else if (foundVOL == true || objType == MP4AV_MPEG4_VOP_START) {
            esConfigSize = pObj - pCurrentSample;
            // ready to set up mp4 track
            break;
        }
        /* XXX why do we need this if ?
         * It looks like it will remove this object ... XXX */
	// It does.  On Purpose.  wmay 6/2004
        if (objType != MP4AV_MPEG4_USER_DATA_START) {
            pObj += objSize;
        }
    }

    if (foundVOSH == false) {
        fprintf(stderr,
                "%s: no VOSH header found in MPEG-4 video.\n"
                "This can cause problems with players other than mp4player. \n",
                ProgName);
    } else {
        if (VideoProfileLevelSpecified &&
                videoProfileLevel != VideoProfileLevel) {
            fprintf(stderr,
                    "%s: You have specified a different video profile level than was detected in the VOSH header\n"
                    "The level you specified was %d and %d was read from the VOSH\n",
                    ProgName, VideoProfileLevel, videoProfileLevel);
        }
    }
    if (foundVO == false) {
        fprintf(stderr,
                "%s: No VO header found in mpeg-4 video.\n"
                "This can cause problems with players other than mp4player\n",
                ProgName);
    }
    if (foundVOL == false) {
        fprintf(stderr,
                "%s: fatal: No VOL header found in mpeg-4 video stream\n",
                ProgName);
        return MP4_INVALID_TRACK_ID;
    }

    // convert frame duration to canonical time scale
    // note zero value for frame duration signals variable rate video
    if (timeTicks == 0) {
        timeTicks = 1;
    }
    u_int32_t mp4FrameDuration = 0;

    if (VideoFrameRate) {
      mp4FrameDuration = (u_int32_t)(((double)Mp4TimeScale) / VideoFrameRate);    
    } else if (frameDuration) {
	  VideoFrameRate = frameDuration;
	  VideoFrameRate /= timeTicks;
	  mp4FrameDuration = (Mp4TimeScale * frameDuration) / timeTicks;
    } else {
      if (allowVariableFrameRate == false ) {
	fprintf(stderr,
		"%s: variable rate video stream signalled,"
		" please specify average frame rate with -r option\n"
		" or --variable-frame-rate argument\n",
		ProgName);
	return MP4_INVALID_TRACK_ID;
      }

        variableFrameRate = true;
    }

    ismacryp_session_id_t ismaCrypSId;
    mp4v2_ismacrypParams *icPp =  (mp4v2_ismacrypParams *) malloc(sizeof(mp4v2_ismacrypParams));
    memset(icPp, 0, sizeof(mp4v2_ismacrypParams));


    // initialize ismacryp session if encrypting
    if (doEncrypt) {

        if (ismacrypInitSession(&ismaCrypSId,KeyTypeVideo) != 0) {
            fprintf(stderr, "%s: could not initialize the ISMAcryp session\n",
                    ProgName);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetScheme(ismaCrypSId, &(icPp->scheme_type)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp scheme type. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetSchemeVersion(ismaCrypSId, &(icPp->scheme_version)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp scheme ver. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetKMSUri(ismaCrypSId, &(icPp->kms_uri)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp kms uri. sid %d\n",
                    ProgName, ismaCrypSId);
            CHECK_AND_FREE(icPp->kms_uri);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if ( ismacrypGetSelectiveEncryption(ismaCrypSId, &(icPp->selective_enc)) != ismacryp_rc_ok ) {
            fprintf(stderr, "%s: could not get ismacryp selec enc. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetKeyIndicatorLength(ismaCrypSId, &(icPp->key_ind_len)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp key ind len. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetIVLength(ismaCrypSId, &(icPp->iv_len)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp iv len. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
    }

    // create the new video track
    MP4TrackId trackId;
    if (doEncrypt) {
        trackId =
            MP4AddEncVideoTrack(
                    mp4File,
                    Mp4TimeScale,
                    mp4FrameDuration,
                    frameWidth,
                    frameHeight,
                    icPp,
                    MP4_MPEG4_VIDEO_TYPE);
    } else {
        trackId =
            MP4AddVideoTrack(
                    mp4File,
                    Mp4TimeScale,
                    mp4FrameDuration,
                    frameWidth,
                    frameHeight,
                    MP4_MPEG4_VIDEO_TYPE);
    }

    if (trackId == MP4_INVALID_TRACK_ID) {
        fprintf(stderr,
                "%s: can't create video track\n", ProgName);
        return MP4_INVALID_TRACK_ID;
    }

    if (VideoProfileLevelSpecified) {
        videoProfileLevel = VideoProfileLevel;
    }
    if (MP4GetNumberOfTracks(mp4File, MP4_VIDEO_TRACK_TYPE) == 1) {
        MP4SetVideoProfileLevel(mp4File, videoProfileLevel);
    }
    printf("es config size is %d\n", esConfigSize);
    if (esConfigSize) {
        MP4SetTrackESConfiguration(mp4File, trackId,
                pCurrentSample, esConfigSize);

        // move past ES config, so it doesn't go into first sample
        pCurrentSample += esConfigSize;
    }
    // Move the current frame to the beginning of the
    // buffer
    memmove(sampleBuffer, pCurrentSample, pObj - pCurrentSample + objSize);
    pObj = sampleBuffer + (pObj - pCurrentSample);
    pCurrentSample = sampleBuffer;
    MP4Timestamp prevFrameTimestamp = 0;

    // now process the rest of the video stream
    while ( true ) {
        if ( objType != MP4AV_MPEG4_VOP_START ) {
	  // keep it in the buffer until a VOP comes along
	  // Actually, do nothings, since we only want VOP
	  // headers in the stream - wmay 6/2004
	  //pObj += objSize;

        } else { // we have VOP
            u_int32_t sampleSize = (pObj + objSize) - pCurrentSample;

            vopType = MP4AV_Mpeg4GetVopType(pObj, objSize);

	    mpeg4_frame_t *fr = MALLOC_STRUCTURE(mpeg4_frame_t);
	    if (head == NULL) {
	      head = tail = fr;
	    } else {
	      tail->next = fr;
	      tail = fr;
	    }
	    fr->vopType = vopType;
	    fr->frameTimestamp = currentSampleTime;
	    fr->next = NULL;
            if ( variableFrameRate ) {
                // variable frame rate:  recalculate "mp4FrameDuration"
                if ( lastFrame ) {
                    // last frame
                    mp4FrameDuration = Mp4TimeScale / timeTicks;
                } else {
                    // not the last frame
                    u_int32_t vopTimeIncrement;
                    MP4AV_Mpeg4ParseVop(pObj, objSize, &vopType, timeBits, timeTicks, &vopTimeIncrement);
                    u_int32_t vopTime = vopTimeIncrement - lastVopTimeIncrement;
                    mp4FrameDuration = (Mp4TimeScale * vopTime) / timeTicks;
                    lastVopTimeIncrement = vopTimeIncrement % timeTicks;
                }
	    }
            if ( prevSampleSize > 0 ) { // not the first time
                // fill sample data & length to write
                u_int8_t* sampleData2Write = NULL;
                u_int32_t sampleLen2Write = 0;
                if ( doEncrypt ) {
                    if ( ismacrypEncryptSampleAddHeader(ismaCrypSId,
                                sampleSize,
                                sampleBuffer,
                                &sampleLen2Write,
                                &sampleData2Write) != 0 ) {
                        fprintf(stderr,
                                "%s: can't encrypt video sample and add header %u\n",
                                ProgName, sampleId);
                    }
                } else {
                    sampleData2Write = sampleBuffer;
                    sampleLen2Write = prevSampleSize;
                }

		
            if (variableFrameRate == false) {
	      double now_calc;
	      now_calc = sampleId;
	      now_calc *= Mp4TimeScale;
	      now_calc /= VideoFrameRate;
	      MP4Timestamp now_ts = (MP4Timestamp)now_calc;
	      mp4FrameDuration = now_ts - prevFrameTimestamp;
	      prevFrameTimestamp = now_ts;
	      currentSampleTime = now_ts;
	    }
                // Write the previous sample
                rc = MP4WriteSample(mp4File, trackId,
                        sampleData2Write, sampleLen2Write,
                        mp4FrameDuration, 0, prevVopType == VOP_TYPE_I);

                if ( doEncrypt && sampleData2Write ) {
                    // buffer allocated by encrypt function.
                    // must free it!
                    free(sampleData2Write);
                }

                if ( !rc ) {
                    fprintf(stderr,
                            "%s: can't write video frame %u\n",
                            ProgName, sampleId);
                    MP4DeleteTrack(mp4File, trackId);
                    return MP4_INVALID_TRACK_ID;
                }

                // deal with rendering time offsets
                // that can occur when B frames are being used
                // which is the case for all profiles except Simple Profile
		haveBframes |= (prevVopType == VOP_TYPE_B);

		if ( lastFrame ) {
		  // finish read frames
		  break;
		}
                sampleId++;
            } // not the first time

            currentSampleTime += mp4FrameDuration;

            // Move the current frame to the beginning of the
            // buffer
            memmove(sampleBuffer, pCurrentSample, sampleSize);
            prevSampleSize = sampleSize;
            prevVopType = vopType;
            // reset pointers
            pObj = pCurrentSample = sampleBuffer + sampleSize;
        } // we have VOP

        // load next object from bitstream
        if (!LoadNextObject(inFile, pObj, &objSize, &objType)) {
            if (objType != MP4AV_MPEG4_VOP_START)
                break;
            lastFrame = true;
            objSize = 0;
            continue;
        }
        // guard against buffer overflow
        if (pObj + objSize >= pCurrentSample + maxSampleSize) {
            fprintf(stderr,
                    "%s: buffer overflow, invalid video stream?\n", ProgName);
            MP4DeleteTrack(mp4File, trackId);
            return MP4_INVALID_TRACK_ID;
        }
#ifdef DEBUG_MP4V
        if (Verbosity & MP4_DETAILS_SAMPLE) {
            printf("MP4V type %x size %u\n",
                    objType, objSize);
        }
#endif
    }
    bool doRenderingOffset = false;
    switch (videoProfileLevel) {
    case MPEG4_SP_L0:
    case MPEG4_SP_L1:
    case MPEG4_SP_L2:
    case MPEG4_SP_L3:
      break;
    default:
      doRenderingOffset = true;
      break;
    }
   
    if (doRenderingOffset && haveBframes) {
      // only generate ctts (with rendering offset for I, P frames) when
      // we need one.  We saved all the frames types and timestamps above - 
      // we can't use MP4ReadSample, because the end frames might not have
      // been written 
      refVopId = 1;
      refVopTime = 0;
      MP4SampleId maxSamples = MP4GetTrackNumberOfSamples(mp4File, trackId);
      // start with sample 2 - we know the first one is a I frame
      mpeg4_frame_t *fr = head->next; // skip the first one
      for (MP4SampleId ix = 2; ix <= maxSamples; ix++) {
	if (fr->vopType != VOP_TYPE_B) {
#ifdef DEBUG_MP4V_TS
            printf("sample %u %u renderingOffset "U64"\n",
		   refVopId, fr->vopType, fr->frameTimestamp - refVopTime);
#endif
	  MP4SetSampleRenderingOffset(mp4File, trackId, refVopId, 
				      fr->frameTimestamp - refVopTime);
	  refVopId = ix;
	  refVopTime = fr->frameTimestamp;
	}
	fr = fr->next;
      }
      
#ifdef DEBUG_MP4V_TS
      printf("sample %u %u renderingOffset "U64"\n",
	     refVopId, fr->vopType, fr->frameTimestamp - refVopTime);
#endif
      MP4SetSampleRenderingOffset(mp4File, trackId, refVopId, 
				  fr->frameTimestamp - refVopTime);
    }

    while (head != NULL) {
      tail = head->next;
      free(head);
      head = tail;
    }
    // terminate session if encrypting
    if (doEncrypt) {
        if (ismacrypEndSession(ismaCrypSId) != 0) {
            fprintf(stderr,
                    "%s: could not end the ISMAcryp session\n",
                    ProgName);
        }
    }

    return trackId;
}
예제 #24
0
파일: ffmpeg.cpp 프로젝트: qmwd2006/mpeg4ip
static codec_data_t *ffmpeg_create (const char *stream_type,
                                    const char *compressor,
                                    int type,
                                    int profile,
                                    format_list_t *media_fmt,
                                    video_info_t *vinfo,
                                    const uint8_t *userdata,
                                    uint32_t ud_size,
                                    video_vft_t *vft,
                                    void *ifptr)
{
    ffmpeg_codec_t *ffmpeg;

    ffmpeg = MALLOC_STRUCTURE(ffmpeg_codec_t);
    memset(ffmpeg, 0, sizeof(*ffmpeg));

    ffmpeg->m_vft = vft;
    ffmpeg->m_ifptr = ifptr;
    avcodec_init();
    avcodec_register_all();
    av_log_set_level(AV_LOG_QUIET);

    ffmpeg->m_codecId = ffmpeg_find_codec(stream_type, compressor, type,
                                          profile, media_fmt, userdata, ud_size);

    // must have a codecID - we checked it earlier
    ffmpeg->m_codec = avcodec_find_decoder(ffmpeg->m_codecId);
    ffmpeg->m_c = avcodec_alloc_context();
    ffmpeg->m_picture = avcodec_alloc_frame();
    bool open_codec = true;
    bool run_userdata = false;
    bool free_userdata = false;

    switch (ffmpeg->m_codecId) {
    case CODEC_ID_MJPEG:
        break;
    case CODEC_ID_H264:
        // need to find height and width
        if (media_fmt != NULL && media_fmt->fmt_param != NULL) {
            userdata = h264_sdp_parse_sprop_param_sets(media_fmt->fmt_param,
                       &ud_size,
                       ffmpeg->m_vft->log_msg);
            if (userdata != NULL) free_userdata = true;
            ffmpeg_message(LOG_DEBUG, "ffmpeg", "sprop len %d", ud_size);
        }
        if (ud_size > 0) {
            ffmpeg_message(LOG_DEBUG, "ffmpeg", "userdata len %d", ud_size);
            open_codec = ffmpeg_find_h264_size(ffmpeg, userdata, ud_size);
            ffmpeg_message(LOG_DEBUG, "ffmpeg", "open codec is %d", open_codec);
            run_userdata = true;
        } else {
            open_codec = false;
        }
        break;
    case CODEC_ID_MPEG4: {
        fmtp_parse_t *fmtp = NULL;
        open_codec = false;
        if (media_fmt != NULL) {
            fmtp = parse_fmtp_for_mpeg4(media_fmt->fmt_param,
                                        ffmpeg->m_vft->log_msg);
            if (fmtp->config_binary != NULL) {
                userdata = fmtp->config_binary;
                ud_size = fmtp->config_binary_len;
                fmtp->config_binary = NULL;
                free_userdata = true;
            }
        }

        if (ud_size > 0) {
            uint8_t *vol = MP4AV_Mpeg4FindVol((uint8_t *)userdata, ud_size);
            u_int8_t TimeBits;
            u_int16_t TimeTicks;
            u_int16_t FrameDuration;
            u_int16_t FrameWidth;
            u_int16_t FrameHeight;
            u_int8_t  aspectRatioDefine;
            u_int8_t  aspectRatioWidth;
            u_int8_t  aspectRatioHeight;
            if (vol) {
                if (MP4AV_Mpeg4ParseVol(vol,
                                        ud_size - (vol - userdata),
                                        &TimeBits,
                                        &TimeTicks,
                                        &FrameDuration,
                                        &FrameWidth,
                                        &FrameHeight,
                                        &aspectRatioDefine,
                                        &aspectRatioWidth,
                                        &aspectRatioHeight)) {
                    ffmpeg->m_c->width = FrameWidth;
                    ffmpeg->m_c->height = FrameHeight;
                    open_codec = true;
                    run_userdata = true;
                }
            }
        }
        if (fmtp != NULL) {
            free_fmtp_parse(fmtp);
        }
    }
    break;
    case CODEC_ID_SVQ3:
        ffmpeg->m_c->extradata = (void *)userdata;
        ffmpeg->m_c->extradata_size = ud_size;
        if (vinfo != NULL) {
            ffmpeg->m_c->width = vinfo->width;
            ffmpeg->m_c->height = vinfo->height;
        }
        break;
    default:
        break;
    }
    if (open_codec) {
        if (avcodec_open(ffmpeg->m_c, ffmpeg->m_codec) < 0) {
            ffmpeg_message(LOG_CRIT, "ffmpeg", "failed to open codec");
            return NULL;
        }
        ffmpeg_message(LOG_DEBUG, "ffmpeg", "pixel format is %d",
                       ffmpeg->m_c->pix_fmt);
        ffmpeg->m_codec_opened = true;
        if (run_userdata) {
            uint32_t offset = 0;
            do {
                int got_picture;
                offset += avcodec_decode_video(ffmpeg->m_c,
                                               ffmpeg->m_picture,
                                               &got_picture,
                                               (uint8_t *)userdata + offset,
                                               ud_size - offset);
            } while (offset < ud_size);
        }

    }

    if (free_userdata) {
        CHECK_AND_FREE(userdata);
    }
    ffmpeg->m_did_pause = 1;
    return ((codec_data_t *)ffmpeg);
}