int dc_gpac_audio_isom_write(AudioOutputFile * p_aoutf) { GF_Err ret; //AVStream * p_video_stream = // p_voutf->p_fmt->streams[p_voutf->i_vstream_idx]; //AVCodecContext * p_video_codec_ctx = p_video_stream->codec; //AVCodecContext * p_audio_codec_ctx = p_aoutf->p_codec_ctx; p_aoutf->p_sample->data = (char *) p_aoutf->packet.data; p_aoutf->p_sample->dataLength = p_aoutf->packet.size; p_aoutf->p_sample->DTS = p_aoutf->dts; //p_aoutf->p_aframe->pts; p_aoutf->p_sample->IsRAP = 1; //p_aoutf->p_aframe->key_frame;//p_audio_codec_ctx->coded_frame->key_frame; //printf("RAP %d , DTS %ld \n", p_aoutf->p_sample->IsRAP, p_aoutf->p_sample->DTS); ret = gf_isom_fragment_add_sample(p_aoutf->p_isof, 1, p_aoutf->p_sample, 1, p_aoutf->p_codec_ctx->frame_size, 0, 0, 0); p_aoutf->dts += p_aoutf->p_codec_ctx->frame_size; if (ret != GF_OK) { fprintf(stderr, "%s: gf_isom_fragment_add_sample\n", gf_error_to_string(ret)); return -1; } // ret = gf_isom_flush_fragments(p_voutf->p_isof, 1); // if (ret != GF_OK) { // fprintf(stderr, "%s: gf_isom_flush_fragments\n", // gf_error_to_string(ret)); // return -1; // } return 0; }
static char *validator_create_snapshot(GF_Validator *validator) { GF_Err e; GF_VideoSurface fb; GF_Terminal *term = validator->term; char *dumpname; dumpname = validator_get_snapshot_name(validator, validator->is_recording, validator->snapshot_number); e = gf_term_get_screen_buffer(term, &fb); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[Validator] Error dumping screen buffer %s\n", gf_error_to_string(e))); } else { u32 dst_size = fb.width*fb.height*3; char *dst=gf_malloc(sizeof(char)*dst_size); e = gf_img_png_enc(fb.video_buffer, fb.width, fb.height, fb.pitch_y, fb.pixel_format, dst, &dst_size); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[Validator] Error encoding PNG %s\n", gf_error_to_string(e))); } else { FILE *png = gf_f64_open(dumpname, "wb"); if (!png) { GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[Validator] Error writing file %s\n", dumpname)); } else { gf_fwrite(dst, dst_size, 1, png); fclose(png); GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[Validator] Writing file %s\n", dumpname)); } } if (dst) gf_free(dst); gf_term_release_screen_buffer(term, &fb); } validator->snapshot_number++; return dumpname; }
GF_Err PNC_InitRTP(GF_RTPChannel **chan, char *dest, int port, unsigned short mtu_size) { GF_Err res; GF_RTSPTransport tr; *chan = gf_rtp_new(); res = gf_rtp_set_ports(*chan, 0); if (res) { fprintf(stderr, "Cannot set RTP ports: %s\n", gf_error_to_string(res)); gf_rtp_del(*chan); return res; } tr.destination = dest; tr.IsUnicast = gf_sk_is_multicast_address(dest) ? 0 : 1; tr.Profile="RTP/AVP";//RTSP_PROFILE_RTP_AVP; tr.IsRecord = 0; tr.Append = 0; tr.source = "0.0.0.0"; tr.SSRC=rand(); tr.port_first = port; tr.port_last = port+1; if (tr.IsUnicast) { tr.client_port_first = port; tr.client_port_last = port+1; } else { tr.source = dest; tr.client_port_first = 0; tr.client_port_last = 0; } res = gf_rtp_setup_transport(*chan, &tr, dest); if (res) { fprintf(stderr, "Cannot setup RTP transport %s\n", gf_error_to_string(res)); gf_rtp_del(*chan); return res; } res = gf_rtp_initialize(*chan, 0, 1, mtu_size, 0, 0, NULL); if (res) { fprintf(stderr, "Cannot initialize RTP transport %s\n", gf_error_to_string(res)); gf_rtp_del(*chan); return res; } return GF_OK; }
Bool visual_2d_draw_frame(GF_VisualManager *visual, GF_Node *root, GF_TraverseState *tr_state, Bool is_root_visual) { GF_SceneGraph *sg; GF_Matrix2D backup; u32 i; Bool res; GF_Err e; #ifndef GPAC_DISABLE_LOG u32 itime, time = gf_sys_clock(); #endif gf_mx2d_copy(backup, tr_state->transform); visual->bounds_tracker_modif_flag = DRAWABLE_HAS_CHANGED; e = visual_2d_init_draw(visual, tr_state); if (e) { gf_mx2d_copy(tr_state->transform, backup); GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Visual2D] Cannot init draw phase: %s\n", gf_error_to_string(e))); return 0; } #ifndef GPAC_DISABLE_LOG itime = gf_sys_clock(); visual->compositor->traverse_setup_time = itime - time; time = itime; #endif GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Visual2D] Traversing scene subtree (root node %s)\n", root ? gf_node_get_class_name(root) : "none")); if (is_root_visual) { gf_node_traverse(root, tr_state); i=0; while ((sg = (GF_SceneGraph*)gf_list_enum(visual->compositor->extra_scenes, &i))) { gf_sc_traverse_subscene(visual->compositor, root, sg, tr_state); } } else { gf_node_traverse(root, tr_state); } #ifndef GPAC_DISABLE_LOG itime = gf_sys_clock(); visual->compositor->traverse_and_direct_draw_time = itime - time; time = itime; #endif gf_mx2d_copy(tr_state->transform, backup); res = visual_2d_terminate_draw(visual, tr_state); #ifndef GPAC_DISABLE_LOG if (!tr_state->immediate_draw) { visual->compositor->indirect_draw_time = gf_sys_clock() - time; } #endif return res; }
GPACDemuxMP4Simple::GPACDemuxMP4Simple(std::string const& path) : reader(new ISOFileReader) { GF_ISOFile *movie; u64 missingBytes; GF_Err e = gf_isom_open_progressive(path.c_str(), 0, 0, &movie, &missingBytes); if ((e != GF_OK && e != GF_ISOM_INCOMPLETE_FILE) || movie == nullptr) { throw error(format("Could not open file %s for reading (%s).", path, gf_error_to_string(e))); } reader->init(movie); output = addOutput(new OutputDefault); }
int dc_gpac_video_isom_close(VideoOutputFile *video_output_file) { GF_Err ret; ret = gf_isom_close(video_output_file->isof); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_close\n", gf_error_to_string(ret))); return -1; } return 0; }
int dc_gpac_video_isom_close_seg(VideoOutputFile *video_output_file) { GF_Err ret; ret = gf_isom_close_segment(video_output_file->isof, 0, 0, 0, 0, 0, 0, 1, video_output_file->seg_marker, NULL, NULL); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_close_segment\n", gf_error_to_string(ret))); return -1; } GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DashCast] Rep %s Closing segment at UTC "LLU" ms\n", video_output_file->rep_id, gf_net_get_utc() )); return 0; }
static GF_Err rtp_stream_init_channel(GF_RTPStreamer *rtp, u32 path_mtu, const char * dest, int port, int ttl, const char *ifce_addr) { GF_RTSPTransport tr; GF_Err res; rtp->channel = gf_rtp_new(); gf_rtp_set_ports(rtp->channel, 0); memset(&tr, 0, sizeof(GF_RTSPTransport)); tr.IsUnicast = gf_sk_is_multicast_address(dest) ? 0 : 1; tr.Profile="RTP/AVP"; tr.destination = (char *)dest; tr.source = "0.0.0.0"; tr.IsRecord = 0; tr.Append = 0; tr.SSRC = rand(); tr.TTL = ttl; tr.port_first = port; tr.port_last = port+1; if (tr.IsUnicast) { tr.client_port_first = port; tr.client_port_last = port+1; } else { tr.source = (char *)dest; } res = gf_rtp_setup_transport(rtp->channel, &tr, dest); if (res !=0) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Cannot setup RTP transport info: %s\n", gf_error_to_string(res) )); return res; } res = gf_rtp_initialize(rtp->channel, 0, 1, path_mtu, 0, 0, (char *)ifce_addr); if (res !=0) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Cannot initialize RTP sockets: %s\n", gf_error_to_string(res) )); return res; } return GF_OK; }
int dc_gpac_audio_isom_close(AudioOutputFile *audio_output_file) { GF_Err ret; ret = gf_isom_close(audio_output_file->isof); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_close\n", gf_error_to_string(ret))); return -1; } //audio_output_file->acc_samples = 0; return 0; }
static void rtp_stream_on_packet_done(void *cbk, GF_RTPHeader *header) { GF_RTPStreamer *rtp = cbk; GF_Err e = gf_rtp_send_packet(rtp->channel, header, rtp->buffer+12, rtp->payload_len, 1); #ifndef GPAC_DISABLE_LOG if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Error %s sending RTP packet\n", gf_error_to_string(e))); } else { GF_LOG(GF_LOG_DEBUG, GF_LOG_RTP, ("RTP SN %u - TS %u - M %u - Size %u\n", header->SequenceNumber, header->TimeStamp, header->Marker, rtp->payload_len + 12)); } #endif rtp->payload_len = 0; }
int dc_gpac_audio_isom_close_seg(AudioOutputFile *audio_output_file) { GF_Err ret; ret = gf_isom_close_segment(audio_output_file->isof, 0, 0,0, 0, 0, 0, 1, audio_output_file->seg_marker, NULL, NULL); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_close_segment\n", gf_error_to_string(ret))); return -1; } GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DashCast] Audio segment closed at "LLU"\n", gf_net_get_utc() )); //audio_output_file->acc_samples = 0; return 0; }
int dc_gpac_audio_isom_close(AudioOutputFile * p_aoutf) { GF_Err ret; ret = gf_isom_close(p_aoutf->p_isof); if (ret != GF_OK) { fprintf(stderr, "%s: gf_isom_close\n", gf_error_to_string(ret)); return -1; } //p_aoutf->acc_samples = 0; return 0; }
int dc_gpac_audio_isom_open_seg(AudioOutputFile *audio_output_file, char *filename) { GF_Err ret; ret = gf_isom_start_segment(audio_output_file->isof, filename, GF_TRUE); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_start_segment\n", gf_error_to_string(ret))); return -1; } GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DashCast] Audio segment %s started at "LLU"\n", filename, gf_net_get_utc() )); audio_output_file->dts = 0; return 0; }
int dc_gpac_audio_isom_write(AudioOutputFile *audio_output_file) { GF_Err ret; audio_output_file->sample->data = (char *) audio_output_file->packet.data; audio_output_file->sample->dataLength = audio_output_file->packet.size; audio_output_file->sample->DTS = audio_output_file->dts; //audio_output_file->aframe->pts; audio_output_file->sample->IsRAP = RAP; //audio_output_file->aframe->key_frame;//audio_codec_ctx->coded_frame->key_frame; ret = gf_isom_fragment_add_sample(audio_output_file->isof, 1, audio_output_file->sample, 1, audio_output_file->codec_ctx->frame_size, 0, 0, 0); audio_output_file->dts += audio_output_file->codec_ctx->frame_size; if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_fragment_add_sample\n", gf_error_to_string(ret))); return -1; } return 0; }
int dc_gpac_audio_isom_open_seg(AudioOutputFile * p_aoutf, char * psz_name) { GF_Err ret; ret = gf_isom_start_segment(p_aoutf->p_isof, psz_name); if (ret != GF_OK) { fprintf(stderr, "%s: gf_isom_start_segment\n", gf_error_to_string(ret)); return -1; } // ret = gf_isom_start_fragment(p_aoutf->p_isof, 1); // if (ret != GF_OK) { // fprintf(stderr, "%s: gf_isom_start_fragment\n", // gf_error_to_string(ret)); // return -1; // } p_aoutf->dts = 0; return 0; }
int dc_gpac_video_isom_open_seg(VideoOutputFile *video_output_file, char *filename) { GF_Err ret; ret = gf_isom_start_segment(video_output_file->isof, filename, 1); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_start_segment\n", gf_error_to_string(ret))); return -1; } // ret = gf_isom_set_traf_base_media_decode_time(video_output_file->isof, 1, // video_output_file->first_dts); // if (ret != GF_OK) { // GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_set_traf_base_media_decode_time\n", gf_error_to_string(ret))); // return -1; // } // // video_output_file->first_dts += video_output_file->frame_per_segment; GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DashCast] Opening new segment %s at UTC "LLU" ms\n", filename, gf_net_get_utc() )); return 0; }
Bool COsmo4AppView::EventProc(GF_Event *evt) { TRect r; #ifndef GPAC_GUI_ONLY switch (evt->type) { case GF_EVENT_MESSAGE: if (!evt->message.message) return 0; if (evt->message.error) { char err[1024]; sprintf(err, "Error: %s", gf_error_to_string(evt->message.error)); MessageBox(evt->message.message, err); } else { MessageBox(evt->message.message, "Info"); } break; case GF_EVENT_SCENE_SIZE: r = Rect(); gf_term_set_size(m_term, r.Width(), r.Height()); break; } #endif return 0; }
GF_EXPORT GF_SceneEngine *gf_seng_init_from_string(void *calling_object, char * inputContext, u32 load_type, u32 width, u32 height, Bool usePixelMetrics, char *dump_path) { GF_SceneEngine *seng; GF_Err e = GF_OK; if (!inputContext) return NULL; GF_SAFEALLOC(seng, GF_SceneEngine) if (!seng) return NULL; seng->calling_object = calling_object; seng->dump_path = dump_path; /*Step 1: create context and load input*/ seng->sg = gf_sg_new(); seng->ctx = gf_sm_new(seng->sg); seng->owns_context = 1; memset(& seng->loader, 0, sizeof(GF_SceneLoader)); seng->loader.ctx = seng->ctx; seng->loader.type = load_type; /*since we're encoding in BIFS we must get MPEG-4 nodes only*/ seng->loader.flags = GF_SM_LOAD_MPEG4_STRICT; /* assign a loader type only if it was not requested (e.g. DIMS should not be overriden by SVG) */ if (!seng->loader.type) { if (inputContext[0] == '<') { if (strstr(inputContext, "<svg ")) seng->loader.type = GF_SM_LOAD_SVG; else if (strstr(inputContext, "<saf ")) seng->loader.type = GF_SM_LOAD_XSR; else if (strstr(inputContext, "XMT-A") || strstr(inputContext, "X3D")) seng->loader.type = GF_SM_LOAD_XMTA; } else { seng->loader.type = GF_SM_LOAD_BT; } } e = gf_sm_load_string(&seng->loader, inputContext, 0); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_SCENE, ("[SceneEngine] cannot load context from %s (error %s)\n", inputContext, gf_error_to_string(e))); goto exit; } if (!seng->ctx->root_od) { seng->ctx->is_pixel_metrics = usePixelMetrics; seng->ctx->scene_width = width; seng->ctx->scene_height = height; } e = gf_sm_live_setup(seng); if (e!=GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_SCENE, ("[SceneEngine] cannot init scene encoder for context (error %s)\n", gf_error_to_string(e))); goto exit; } return seng; exit: gf_seng_terminate(seng); return NULL; }
/*! * Sends the TS mux to socket * \param avr The AVRedirect structure pointer */ static GF_Err sendTSMux(GF_AbstractTSMuxer * ts) { u32 status; const char * pkt; GF_Err e; u32 padding, data; padding = data = 0; while ( (NULL!= ( pkt = gf_m2ts_mux_process ( ts->muxer, &status )))) { switch (status) { case GF_M2TS_STATE_IDLE: break; case GF_M2TS_STATE_DATA: data+=188; break; case GF_M2TS_STATE_PADDING: padding+=188; break; default: break; } if (ts->ts_output_udp_sk) { e = gf_sk_send ( ts->ts_output_udp_sk, pkt, 188); if ( e ) { GF_LOG ( GF_LOG_ERROR, GF_LOG_MODULE, ( "[AVRedirect] Unable to send TS data : %s\n", gf_error_to_string(e)) ); return e; } } } if (data || padding) GF_LOG(GF_LOG_DEBUG, GF_LOG_MODULE, ("[AVRedirect] : Sent TS data=%u/padding=%u\n", data, padding)); return GF_OK; }
GF_EXPORT GF_SceneEngine *gf_seng_init(void *calling_object, char * inputContext, u32 load_type, char *dump_path, Bool embed_resources) { GF_SceneEngine *seng; GF_Err e = GF_OK; if (!inputContext) return NULL; GF_SAFEALLOC(seng, GF_SceneEngine) if (!seng) return NULL; seng->calling_object = calling_object; /*Step 1: create context and load input*/ seng->sg = gf_sg_new(); gf_sg_set_node_callback(seng->sg, gf_seng_on_node_modified); gf_sg_set_private(seng->sg, seng); seng->dump_path = dump_path; seng->ctx = gf_sm_new(seng->sg); seng->owns_context = 1; memset(&(seng->loader), 0, sizeof(GF_SceneLoader)); seng->loader.ctx = seng->ctx; seng->loader.type = load_type; /*since we're encoding in BIFS we must get MPEG-4 nodes only*/ seng->loader.flags = GF_SM_LOAD_MPEG4_STRICT; if (embed_resources) seng->loader.flags |= GF_SM_LOAD_EMBEDS_RES; seng->loader.fileName = inputContext; e = gf_sm_load_init(&(seng->loader)); if (!e) e = gf_sm_load_run(&(seng->loader)); if (e<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_SCENE, ("[SceneEngine] Cannot load context from %s (error %s)\n", inputContext, gf_error_to_string(e))); goto exit; } e = gf_sm_live_setup(seng); if (e!=GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_SCENE, ("[SceneEngine] cannot init scene encoder for context (error %s)\n", gf_error_to_string(e))); goto exit; } return seng; exit: gf_seng_terminate(seng); return NULL; }
GF_EXPORT GF_SceneEngine *gf_seng_init_from_context(void *calling_object, GF_SceneManager *ctx, char *dump_path) { GF_SceneEngine *seng; GF_Err e = GF_OK; if (!ctx) return NULL; GF_SAFEALLOC(seng, GF_SceneEngine) if (!seng) return NULL; seng->calling_object = calling_object; seng->dump_path = dump_path; /*Step 1: create context and load input*/ seng->sg = ctx->scene_graph; seng->ctx = ctx; seng->owns_context = 0; e = gf_sm_live_setup(seng); if (e!=GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_SCENE, ("[SceneEngine] cannot init scene encoder for context (error %s)\n", gf_error_to_string(e))); goto exit; } return seng; exit: gf_seng_terminate(seng); return NULL; }
GF_EXPORT GF_RTPStreamer *gf_rtp_streamer_new_extended(u32 streamType, u32 oti, u32 timeScale, const char *ip_dest, u16 port, u32 MTU, u8 TTL, const char *ifce_addr, u32 flags, char *dsi, u32 dsi_len, u32 PayloadType, u32 sample_rate, u32 nb_ch, Bool is_crypted, u32 IV_length, u32 KI_length, u32 MinSize, u32 MaxSize, u32 avgTS, u32 maxDTSDelta, u32 const_dur, u32 bandwidth, u32 max_ptime, u32 au_sn_len ) { GF_SLConfig slc; GF_RTPStreamer *stream; u32 rtp_type, default_rtp_rate; u8 OfficialPayloadType; u32 required_rate, force_dts_delta, PL_ID; char *mpeg4mode; Bool has_mpeg4_mapping; GF_Err e; if (!timeScale) timeScale = 1000; GF_SAFEALLOC(stream, GF_RTPStreamer); /*by default NO PL signaled*/ PL_ID = 0; OfficialPayloadType = 0; force_dts_delta = 0; mpeg4mode = NULL; required_rate = 0; nb_ch = 0; has_mpeg4_mapping = 1; rtp_type = 0; /*for max compatibility with QT*/ default_rtp_rate = 90000; /*timed-text is a bit special, we support multiple stream descriptions & co*/ switch (streamType) { case GF_STREAM_TEXT: if (oti!=GPAC_OTI_TEXT_MPEG4) return NULL; rtp_type = GF_RTP_PAYT_3GPP_TEXT; /*fixme - this works cos there's only one PL for text in mpeg4 at the current time*/ PL_ID = 0x10; break; case GF_STREAM_AUDIO: required_rate = sample_rate; switch (oti) { /*AAC*/ case GPAC_OTI_AUDIO_AAC_MPEG4: case GPAC_OTI_AUDIO_AAC_MPEG2_MP: case GPAC_OTI_AUDIO_AAC_MPEG2_LCP: case GPAC_OTI_AUDIO_AAC_MPEG2_SSRP: PL_ID = 0x01; mpeg4mode = "AAC"; rtp_type = GF_RTP_PAYT_MPEG4; required_rate = sample_rate; #ifndef GPAC_DISABLE_AV_PARSERS if (dsi) { GF_M4ADecSpecInfo a_cfg; gf_m4a_get_config(dsi, dsi_len, &a_cfg); nb_ch = a_cfg.nb_chan; sample_rate = a_cfg.base_sr; PL_ID = a_cfg.audioPL; switch (a_cfg.base_object_type) { case GF_M4A_AAC_MAIN: case GF_M4A_AAC_LC: if (flags & GP_RTP_PCK_USE_LATM_AAC) { rtp_type = GF_RTP_PAYT_LATM; break; } case GF_M4A_AAC_SBR: case GF_M4A_AAC_PS: case GF_M4A_AAC_LTP: case GF_M4A_AAC_SCALABLE: case GF_M4A_ER_AAC_LC: case GF_M4A_ER_AAC_LTP: case GF_M4A_ER_AAC_SCALABLE: mpeg4mode = "AAC"; break; case GF_M4A_CELP: case GF_M4A_ER_CELP: mpeg4mode = "CELP"; break; } } #endif break; /*MPEG1/2 audio*/ case GPAC_OTI_AUDIO_MPEG2_PART3: case GPAC_OTI_AUDIO_MPEG1: if (!is_crypted) { rtp_type = GF_RTP_PAYT_MPEG12_AUDIO; /*use official RTP/AVP payload type*/ OfficialPayloadType = 14; required_rate = 90000; } /*encrypted MP3 must be sent through MPEG-4 generic to signal all ISMACryp stuff*/ else { rtp_type = GF_RTP_PAYT_MPEG4; } break; /*QCELP audio*/ case GPAC_OTI_AUDIO_13K_VOICE: rtp_type = GF_RTP_PAYT_QCELP; OfficialPayloadType = 12; required_rate = 8000; nb_ch = 1; break; /*EVRC/SVM audio*/ case GPAC_OTI_AUDIO_EVRC_VOICE: case GPAC_OTI_AUDIO_SMV_VOICE: rtp_type = GF_RTP_PAYT_EVRC_SMV; required_rate = 8000; nb_ch = 1; } break; case GF_STREAM_VISUAL: rtp_type = GF_RTP_PAYT_MPEG4; required_rate = default_rtp_rate; if (is_crypted) { /*that's another pain with ISMACryp, even if no B-frames the DTS is signaled...*/ if (oti==GPAC_OTI_VIDEO_MPEG4_PART2) force_dts_delta = 22; flags |= GP_RTP_PCK_SIGNAL_RAP | GP_RTP_PCK_SIGNAL_TS; } switch (oti) { /*ISO/IEC 14496-2*/ case GPAC_OTI_VIDEO_MPEG4_PART2: PL_ID = 1; #ifndef GPAC_DISABLE_AV_PARSERS if (dsi) { GF_M4VDecSpecInfo vhdr; gf_m4v_get_config(dsi, dsi_len, &vhdr); PL_ID = vhdr.VideoPL; } #endif break; /*MPEG1/2 video*/ case GPAC_OTI_VIDEO_MPEG1: case GPAC_OTI_VIDEO_MPEG2_SIMPLE: case GPAC_OTI_VIDEO_MPEG2_MAIN: case GPAC_OTI_VIDEO_MPEG2_SNR: case GPAC_OTI_VIDEO_MPEG2_SPATIAL: case GPAC_OTI_VIDEO_MPEG2_HIGH: case GPAC_OTI_VIDEO_MPEG2_422: if (!is_crypted) { rtp_type = GF_RTP_PAYT_MPEG12_VIDEO; OfficialPayloadType = 32; } break; /*AVC/H.264*/ case GPAC_OTI_VIDEO_AVC: required_rate = 90000; /* "90 kHz clock rate MUST be used"*/ rtp_type = GF_RTP_PAYT_H264_AVC; PL_ID = 0x0F; break; /*H264-SVC*/ case GPAC_OTI_VIDEO_SVC: required_rate = 90000; /* "90 kHz clock rate MUST be used"*/ rtp_type = GF_RTP_PAYT_H264_SVC; PL_ID = 0x0F; break; /*HEVC*/ case GPAC_OTI_VIDEO_HEVC: required_rate = 90000; /* "90 kHz clock rate MUST be used"*/ rtp_type = GF_RTP_PAYT_HEVC; PL_ID = 0x0F; break; } break; case GF_STREAM_SCENE: case GF_STREAM_OD: if (oti == GPAC_OTI_SCENE_DIMS) { rtp_type = GF_RTP_PAYT_3GPP_DIMS; has_mpeg4_mapping = 0; } else { rtp_type = GF_RTP_PAYT_MPEG4; } break; case GF_STREAM_4CC: switch (oti) { case GF_ISOM_SUBTYPE_3GP_H263: rtp_type = GF_RTP_PAYT_H263; required_rate = 90000; streamType = GF_STREAM_VISUAL; OfficialPayloadType = 34; /*not 100% compliant (short header is missing) but should still work*/ oti = GPAC_OTI_VIDEO_MPEG4_PART2; PL_ID = 0x01; break; case GF_ISOM_SUBTYPE_3GP_AMR: required_rate = 8000; rtp_type = GF_RTP_PAYT_AMR; streamType = GF_STREAM_AUDIO; has_mpeg4_mapping = 0; nb_ch = 1; break; case GF_ISOM_SUBTYPE_3GP_AMR_WB: required_rate = 16000; rtp_type = GF_RTP_PAYT_AMR_WB; streamType = GF_STREAM_AUDIO; has_mpeg4_mapping = 0; nb_ch = 1; break; case GF_ISOM_SUBTYPE_AC3: rtp_type = GF_RTP_PAYT_AC3; streamType = GF_STREAM_AUDIO; has_mpeg4_mapping = 1; nb_ch = 1; break; case GF_ISOM_SUBTYPE_AVC_H264: case GF_ISOM_SUBTYPE_AVC2_H264: case GF_ISOM_SUBTYPE_AVC3_H264: case GF_ISOM_SUBTYPE_AVC4_H264: case GF_ISOM_SUBTYPE_SVC_H264: { required_rate = 90000; /* "90 kHz clock rate MUST be used"*/ rtp_type = GF_RTP_PAYT_H264_AVC; streamType = GF_STREAM_VISUAL; oti = GPAC_OTI_VIDEO_AVC; PL_ID = 0x0F; } break; case GF_ISOM_SUBTYPE_3GP_QCELP: required_rate = 8000; rtp_type = GF_RTP_PAYT_QCELP; streamType = GF_STREAM_AUDIO; oti = GPAC_OTI_AUDIO_13K_VOICE; OfficialPayloadType = 12; nb_ch = 1; break; case GF_ISOM_SUBTYPE_3GP_EVRC: case GF_ISOM_SUBTYPE_3GP_SMV: required_rate = 8000; rtp_type = GF_RTP_PAYT_EVRC_SMV; streamType = GF_STREAM_AUDIO; oti = (oti==GF_ISOM_SUBTYPE_3GP_EVRC) ? GPAC_OTI_AUDIO_EVRC_VOICE : GPAC_OTI_AUDIO_SMV_VOICE; nb_ch = 1; break; } break; default: GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[RTP Packetizer] Unsupported stream type %x\n", streamType)); return NULL; } /*not supported*/ if (!rtp_type) return NULL; /*override hinter type if requested and possible*/ if (has_mpeg4_mapping && (flags & GP_RTP_PCK_FORCE_MPEG4)) { rtp_type = GF_RTP_PAYT_MPEG4; } /*use static payload ID if enabled*/ else if (OfficialPayloadType && (flags & GP_RTP_PCK_USE_STATIC_ID) ) { PayloadType = OfficialPayloadType; } /*systems carousel: we need at least IDX and RAP signaling*/ if (flags & GP_RTP_PCK_SYSTEMS_CAROUSEL) { flags |= GP_RTP_PCK_SIGNAL_RAP; } /*update flags in MultiSL*/ if (flags & GP_RTP_PCK_USE_MULTI) { if (MinSize != MaxSize) flags |= GP_RTP_PCK_SIGNAL_SIZE; if (!const_dur) flags |= GP_RTP_PCK_SIGNAL_TS; } /*default SL for RTP */ memset(&slc, 0, sizeof(GF_SLConfig)); slc.tag = GF_ODF_SLC_TAG; slc.useTimestampsFlag = 1; slc.timestampLength = 32; slc.timestampResolution = timeScale; /*override clockrate if set*/ if (required_rate) { Double sc = required_rate; sc /= slc.timestampResolution; maxDTSDelta = (u32) (maxDTSDelta*sc); slc.timestampResolution = required_rate; } /*switch to RTP TS*/ max_ptime = (u32) (max_ptime * slc.timestampResolution / 1000); slc.AUSeqNumLength = au_sn_len; slc.CUDuration = const_dur; if (flags & GP_RTP_PCK_SIGNAL_RAP) { slc.useRandomAccessPointFlag = 1; } else { slc.useRandomAccessPointFlag = 0; slc.hasRandomAccessUnitsOnlyFlag = 1; } stream->packetizer = gf_rtp_builder_new(rtp_type, &slc, flags, stream, rtp_stream_on_new_packet, rtp_stream_on_packet_done, NULL, rtp_stream_on_data); if (!stream->packetizer) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[RTP Packetizer] Failed to create packetizer\n")); gf_free(stream); return NULL; } gf_rtp_builder_init(stream->packetizer, PayloadType, MTU, max_ptime, streamType, oti, PL_ID, MinSize, MaxSize, avgTS, maxDTSDelta, IV_length, KI_length, mpeg4mode); if (force_dts_delta) stream->packetizer->slMap.DTSDeltaLength = force_dts_delta; e = rtp_stream_init_channel(stream, MTU + 12, ip_dest, port, TTL, ifce_addr); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[RTP Packetizer] Failed to create RTP channel - error %s\n", gf_error_to_string(e) )); gf_free(stream); return NULL; } stream->ts_scale = slc.timestampResolution; stream->ts_scale /= timeScale; stream->buffer_alloc = MTU+12; stream->buffer = gf_malloc(sizeof(char) * stream->buffer_alloc); return stream; }
GF_Err MPD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const char *url) { GF_MPD_In *mpdin = (GF_MPD_In*) plug->priv; const char *opt; GF_Err e; u32 max_cache_duration, auto_switch_count; GF_DASHInitialSelectionMode first_select_mode; Bool keep_files, disable_switching; GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MPD_IN] Received Service Connection request (%p) from terminal for %s\n", serv, url)); if (!mpdin|| !serv || !url) return GF_BAD_PARAM; mpdin->service = serv; mpdin->dash_io.udta = mpdin; mpdin->dash_io.delete_cache_file = mpdin_dash_io_delete_cache_file; mpdin->dash_io.create = mpdin_dash_io_create; mpdin->dash_io.del = mpdin_dash_io_del; mpdin->dash_io.abort = mpdin_dash_io_abort; mpdin->dash_io.setup_from_url = mpdin_dash_io_setup_from_url; mpdin->dash_io.set_range = mpdin_dash_io_set_range; mpdin->dash_io.init = mpdin_dash_io_init; mpdin->dash_io.run = mpdin_dash_io_run; mpdin->dash_io.get_url = mpdin_dash_io_get_url; mpdin->dash_io.get_cache_name = mpdin_dash_io_get_cache_name; mpdin->dash_io.get_mime = mpdin_dash_io_get_mime; mpdin->dash_io.get_bytes_per_sec = mpdin_dash_io_get_bytes_per_sec; mpdin->dash_io.get_total_size = mpdin_dash_io_get_total_size; mpdin->dash_io.on_dash_event = mpdin_dash_io_on_dash_event; max_cache_duration = 30; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "MaxCacheDuration"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "MaxCacheDuration", "30"); if (opt) max_cache_duration = atoi(opt); auto_switch_count = 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "AutoSwitchCount"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "AutoSwitchCount", "0"); if (opt) auto_switch_count = atoi(opt); keep_files = 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "KeepFiles"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "KeepFiles", "no"); if (opt && !strcmp(opt, "yes")) keep_files = 1; disable_switching = 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "DisableSwitching"); if (opt && !strcmp(opt, "yes")) disable_switching = 1; first_select_mode = 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "StartRepresentation"); if (!opt) { gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "StartRepresentation", "minBandwidth"); opt = "minBandwidth"; } if (opt && !strcmp(opt, "maxBandwidth")) first_select_mode = GF_DASH_SELECT_BANDWIDTH_HIGHEST; else if (opt && !strcmp(opt, "minQuality")) first_select_mode = GF_DASH_SELECT_QUALITY_LOWEST; else if (opt && !strcmp(opt, "maxQuality")) first_select_mode = GF_DASH_SELECT_QUALITY_HIGHEST; else first_select_mode = GF_DASH_SELECT_BANDWIDTH_LOWEST; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "MemoryStorage"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "MemoryStorage", "no"); mpdin->memory_storage = (opt && !strcmp(opt, "yes")) ? 1 : 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "UseMaxResolution"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "UseMaxResolution", "yes"); mpdin->use_max_res = (!opt || !strcmp(opt, "yes")) ? 1 : 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "ImmediateSwitching"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "ImmediateSwitching", "no"); mpdin->immediate_switch = (opt && !strcmp(opt, "yes")) ? 1 : 0; mpdin->in_seek = 0; mpdin->previous_start_range = -1; mpdin->dash = gf_dash_new(&mpdin->dash_io, max_cache_duration, auto_switch_count, keep_files, disable_switching, first_select_mode); if (!mpdin->dash) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[MPD_IN] Error - cannot create DASH Client for %s\n", url)); gf_term_on_connect(mpdin->service, NULL, GF_IO_ERR); return GF_OK; } /*dash thread starts at the end of gf_dash_open */ e = gf_dash_open(mpdin->dash, url); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[MPD_IN] Error - cannot initialize DASH Client for %s: %s\n", url, gf_error_to_string(e) )); gf_term_on_connect(mpdin->service, NULL, e); return GF_OK; } return GF_OK; }
static void term_on_connect(void *user_priv, GF_ClientService *service, LPNETCHANNEL netch, GF_Err err) { GF_Channel *ch; GF_ObjectManager *root; GET_TERM(); GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] %s connection ACK received from %s - %s\n", netch ? "Channel" : "Service", service->url, gf_error_to_string(err) )); root = service->owner; if (root && (root->net_service != service)) { gf_term_message(term, service->url, "Incompatible module type", GF_SERVICE_ERROR); return; } /*this is service connection*/ if (!netch) { gf_term_service_media_event(service->owner, GF_EVENT_MEDIA_SETUP_DONE); if (err) { char msg[5000]; snprintf(msg, sizeof(msg)-1, "Cannot open %s", service->url); gf_term_message(term, service->url, msg, err); gf_term_service_media_event(service->owner, GF_EVENT_ERROR); /*destroy service only if attached*/ if (root) { gf_term_lock_media_queue(term, 1); service->ifce->CloseService(service->ifce); root->net_service = NULL; if (service->owner && service->nb_odm_users) service->nb_odm_users--; service->owner = NULL; /*depends on module: some module could forget to call gf_term_on_disconnect */ if ( gf_list_del_item(term->net_services, service) >= 0) { /*and queue for destroy*/ gf_list_add(term->net_services_to_remove, service); } gf_term_lock_media_queue(term, 0); if (!root->parentscene) { GF_Event evt; evt.type = GF_EVENT_CONNECT; evt.connect.is_connected = 0; gf_term_send_event(term, &evt); } else { if (root->subscene) gf_scene_notify_event(root->subscene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, err); /*try to reinsert OD for VRML/X3D with multiple URLs: 1- first remove from parent scene without destroying object, this will trigger a re-setup if other URLs are present 2- then destroy object*/ gf_scene_remove_object(root->parentscene, root, 0); gf_odm_disconnect(root, 1); } return; } } if (!root) { /*channel service connect*/ u32 i; GF_ChannelSetup *cs; GF_List *ODs; if (!gf_list_count(term->channels_pending)) { return; } ODs = gf_list_new(); gf_term_lock_net(term, 1); i=0; while ((cs = (GF_ChannelSetup*)gf_list_enum(term->channels_pending, &i))) { if (cs->ch->service != service) continue; gf_list_rem(term->channels_pending, i-1); i--; /*even if error do setup (channel needs to be deleted)*/ if (gf_odm_post_es_setup(cs->ch, cs->dec, err) == GF_OK) { if (cs->ch->odm && (gf_list_find(ODs, cs->ch->odm)==-1) ) gf_list_add(ODs, cs->ch->odm); } gf_free(cs); } gf_term_lock_net(term, 0); /*finally setup all ODs concerned (we do this later in case of scalability)*/ while (gf_list_count(ODs)) { GF_ObjectManager *odm = (GF_ObjectManager*)gf_list_get(ODs, 0); gf_list_rem(ODs, 0); /*force re-setup*/ gf_scene_setup_object(odm->parentscene, odm); } gf_list_del(ODs); } else { /*setup od*/ gf_odm_setup_entry_point(root, service->url); } /*load cache if requested*/ if (!err && term->enable_cache) { err = gf_term_service_cache_load(service); /*not a fatal error*/ if (err) gf_term_message(term, "GPAC Cache", "Cannot load cache", err); } return; } /*this is channel connection*/ ch = gf_term_get_channel(service, netch); if (!ch) { GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Terminal] Channel connection ACK error: channel not found\n")); return; } /*confirm channel connection even if error - this allow playback of objects even if not all streams are setup*/ gf_term_lock_net(term, 1); gf_es_on_connect(ch); gf_term_lock_net(term, 0); if (err && ((err!=GF_STREAM_NOT_FOUND) || (ch->esd->decoderConfig->streamType!=GF_STREAM_INTERACT))) { GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Terminal] Channel %d connection error: %s\n", ch->esd->ESID, gf_error_to_string(err) )); ch->es_state = GF_ESM_ES_UNAVAILABLE; /* return;*/ } if (ch->odm->mo) { GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Channel %d connected - %d objects opened\n", ch->esd->ESID, ch->odm->mo->num_open )); } else { GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Channel %d connected - not attached to the scene\n", ch->esd->ESID)); } /*Plays request are skiped until all channels are connected. We send a PLAY on the objecy in case 1-OD user has requested a play 2-this is a channel of the root OD */ if ( (ch->odm->mo && ch->odm->mo->num_open) || !ch->odm->parentscene ) { gf_odm_start(ch->odm, 0); } #if 0 else if (ch->odm->codec && ch->odm->codec->ck && ch->odm->codec->ck->no_time_ctrl) { gf_odm_play(ch->odm); } #endif }
GF_EXPORT GF_DownloadSession *gf_term_download_new(GF_ClientService *service, const char *url, u32 flags, gf_dm_user_io user_io, void *cbk) { GF_Err e; GF_DownloadSession * sess; char *sURL, *orig_url; if (!service){ GF_LOG(GF_LOG_ERROR, GF_LOG_NETWORK, ("[HTTP] service is null, cannot create new download session for %s.\n", url)); return NULL; } sURL = gf_url_concatenate(service->url, url); /*path was absolute*/ if (!sURL) sURL = gf_strdup(url); assert( service->term ); orig_url = service->pending_service_session ? (char *) gf_dm_sess_get_original_resource_name(service->pending_service_session) : NULL; /*this will take care of URL formatting (%20 etc ..) */ if (orig_url) orig_url = gf_url_concatenate(service->url, orig_url); if (orig_url && !strcmp(orig_url, sURL)) { sess = service->pending_service_session; service->pending_service_session = NULL; /*resetup*/ gf_dm_sess_reassign(sess, flags, user_io, cbk); } else { sess = gf_dm_sess_new(service->term->downloader, sURL, flags, user_io, cbk, &e); } if (orig_url) gf_free(orig_url); if (!sess){ GF_LOG(GF_LOG_ERROR, GF_LOG_NETWORK, ("[HTTP] session could not be created for %s : %s. service url=%s, url=%s.\n", sURL, gf_error_to_string(e), service->url, url)); gf_free(sURL); sURL = NULL; return NULL; } gf_free(sURL); sURL = NULL; gf_dm_sess_set_private(sess, service); gf_list_add(service->dnloads, sess); return sess; }
static GF_Err FFD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const char *url) { GF_Err e; s64 last_aud_pts; u32 i; s32 res; Bool is_local; const char *sOpt; char *ext, szName[1024]; FFDemux *ffd = plug->priv; AVInputFormat *av_in = NULL; char szExt[20]; if (ffd->ctx) return GF_SERVICE_ERROR; assert( url && strlen(url) < 1024); strcpy(szName, url); ext = strrchr(szName, '#'); ffd->service_type = 0; e = GF_NOT_SUPPORTED; ffd->service = serv; if (ext) { if (!stricmp(&ext[1], "video")) ffd->service_type = 1; else if (!stricmp(&ext[1], "audio")) ffd->service_type = 2; ext[0] = 0; } /*some extensions not supported by ffmpeg, overload input format*/ ext = strrchr(szName, '.'); strcpy(szExt, ext ? ext+1 : ""); strlwr(szExt); if (!strcmp(szExt, "cmp")) av_in = av_find_input_format("m4v"); is_local = (strnicmp(url, "file://", 7) && strstr(url, "://")) ? 0 : 1; GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG] opening file %s - local %d - av_in %08x\n", url, is_local, av_in)); if (!is_local) { AVProbeData pd; /*setup wraper for FFMPEG I/O*/ ffd->buffer_size = 8192; sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "FFMPEG", "IOBufferSize"); if (sOpt) ffd->buffer_size = atoi(sOpt); ffd->buffer = gf_malloc(sizeof(char)*ffd->buffer_size); #ifdef FFMPEG_DUMP_REMOTE ffd->outdbg = gf_f64_open("ffdeb.raw", "wb"); #endif #ifdef USE_PRE_0_7 init_put_byte(&ffd->io, ffd->buffer, ffd->buffer_size, 0, ffd, ff_url_read, NULL, NULL); ffd->io.is_streamed = 1; #else ffd->io.seekable = 1; #endif ffd->dnload = gf_service_download_new(ffd->service, url, GF_NETIO_SESSION_NOT_THREADED | GF_NETIO_SESSION_NOT_CACHED, NULL, ffd); if (!ffd->dnload) return GF_URL_ERROR; while (1) { u32 read; e = gf_dm_sess_fetch_data(ffd->dnload, ffd->buffer + ffd->buffer_used, ffd->buffer_size - ffd->buffer_used, &read); if (e==GF_EOS) break; /*we're sync!!*/ if (e==GF_IP_NETWORK_EMPTY) continue; if (e) goto err_exit; ffd->buffer_used += read; if (ffd->buffer_used == ffd->buffer_size) break; } if (e==GF_EOS) { const char *cache_file = gf_dm_sess_get_cache_name(ffd->dnload); res = open_file(&ffd->ctx, cache_file, av_in); } else { pd.filename = szName; pd.buf_size = ffd->buffer_used; pd.buf = (u8 *) ffd->buffer; av_in = av_probe_input_format(&pd, 1); if (!av_in) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] error probing file %s - probe start with %c %c %c %c\n", url, ffd->buffer[0], ffd->buffer[1], ffd->buffer[2], ffd->buffer[3])); return GF_NOT_SUPPORTED; } /*setup downloader*/ av_in->flags |= AVFMT_NOFILE; #ifdef USE_AVFORMAT_OPEN_INPUT /*commit ffmpeg 603b8bc2a109978c8499b06d2556f1433306eca7*/ res = avformat_open_input(&ffd->ctx, szName, av_in, NULL); #else res = av_open_input_stream(&ffd->ctx, &ffd->io, szName, av_in, NULL); #endif } } else { res = open_file(&ffd->ctx, szName, av_in); } switch (res) { #ifndef _WIN32_WCE case 0: e = GF_OK; break; case AVERROR_IO: e = GF_URL_ERROR; goto err_exit; case AVERROR_INVALIDDATA: e = GF_NON_COMPLIANT_BITSTREAM; goto err_exit; case AVERROR_NOMEM: e = GF_OUT_OF_MEM; goto err_exit; case AVERROR_NOFMT: e = GF_NOT_SUPPORTED; goto err_exit; #endif default: e = GF_SERVICE_ERROR; goto err_exit; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG] looking for streams in %s - %d streams - type %s\n", ffd->ctx->filename, ffd->ctx->nb_streams, ffd->ctx->iformat->name)); res = av_find_stream_info(ffd->ctx); if (res <0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] cannot locate streams - error %d\n", res)); e = GF_NOT_SUPPORTED; goto err_exit; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG] file %s opened - %d streams\n", url, ffd->ctx->nb_streams)); /*figure out if we can use codecs or not*/ ffd->audio_st = ffd->video_st = -1; for (i = 0; i < ffd->ctx->nb_streams; i++) { AVCodecContext *enc = ffd->ctx->streams[i]->codec; switch(enc->codec_type) { case AVMEDIA_TYPE_AUDIO: if ((ffd->audio_st<0) && (ffd->service_type!=1)) { ffd->audio_st = i; ffd->audio_tscale = ffd->ctx->streams[i]->time_base; } break; case AVMEDIA_TYPE_VIDEO: if ((ffd->video_st<0) && (ffd->service_type!=2)) { ffd->video_st = i; ffd->video_tscale = ffd->ctx->streams[i]->time_base; } break; default: break; } } if ((ffd->service_type==1) && (ffd->video_st<0)) goto err_exit; if ((ffd->service_type==2) && (ffd->audio_st<0)) goto err_exit; if ((ffd->video_st<0) && (ffd->audio_st<0)) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] No supported streams in file\n")); goto err_exit; } sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "FFMPEG", "DataBufferMS"); ffd->data_buffer_ms = 0; if (sOpt) ffd->data_buffer_ms = atoi(sOpt); if (!ffd->data_buffer_ms) ffd->data_buffer_ms = FFD_DATA_BUFFER; /*build seek*/ if (is_local) { /*check we do have increasing pts. If not we can't rely on pts, we must skip SL we assume video pts is always present*/ if (ffd->audio_st>=0) { last_aud_pts = 0; for (i=0; i<20; i++) { AVPacket pkt; pkt.stream_index = -1; if (av_read_frame(ffd->ctx, &pkt) <0) break; if (pkt.pts == AV_NOPTS_VALUE) pkt.pts = pkt.dts; if (pkt.stream_index==ffd->audio_st) last_aud_pts = pkt.pts; } if (last_aud_pts*ffd->audio_tscale.den<10*ffd->audio_tscale.num) ffd->unreliable_audio_timing = 1; } ffd->seekable = (av_seek_frame(ffd->ctx, -1, 0, AVSEEK_FLAG_BACKWARD)<0) ? 0 : 1; if (!ffd->seekable) { #ifndef FF_API_CLOSE_INPUT_FILE av_close_input_file(ffd->ctx); #else avformat_close_input(&ffd->ctx); #endif ffd->ctx = NULL; open_file(&ffd->ctx, szName, av_in); av_find_stream_info(ffd->ctx); } } /*let's go*/ gf_service_connect_ack(serv, NULL, GF_OK); /*if (!ffd->service_type)*/ FFD_SetupObjects(ffd); ffd->service_type = 0; return GF_OK; err_exit: GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] Error opening file %s: %s\n", url, gf_error_to_string(e))); #ifndef FF_API_CLOSE_INPUT_FILE if (ffd->ctx) av_close_input_file(ffd->ctx); #else if (ffd->ctx) avformat_close_input(&ffd->ctx); #endif ffd->ctx = NULL; gf_service_connect_ack(serv, NULL, e); return GF_OK; }
static int ff_url_read(void *h, unsigned char *buf, int size) { u32 retry = 10; u32 read; int full_size; FFDemux *ffd = (FFDemux *)h; full_size = 0; if (ffd->buffer_used) { if (ffd->buffer_used >= (u32) size) { ffd->buffer_used-=size; memcpy(ffd->buffer, ffd->buffer+size, sizeof(char)*ffd->buffer_used); #ifdef FFMPEG_DUMP_REMOTE if (ffd->outdbg) gf_fwrite(buf, size, 1, ffd->outdbg); #endif return size; } full_size += ffd->buffer_used; buf += ffd->buffer_used; size -= ffd->buffer_used; ffd->buffer_used = 0; } while (size) { GF_Err e = gf_dm_sess_fetch_data(ffd->dnload, buf, size, &read); if (e==GF_EOS) break; /*we're sync!!*/ if (e==GF_IP_NETWORK_EMPTY) { if (!retry) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] timeout fetching bytes from network\n") ); return -1; } retry --; gf_sleep(100); continue; } if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] error fetching bytes from network: %s\n", gf_error_to_string(e) ) ); return -1; } full_size += read; if (read==size) break; size -= read; buf += read; } #ifdef FFMPEG_DUMP_REMOTE if (ffd->outdbg) gf_fwrite(ffd->buffer, full_size, 1, ffd->outdbg); #endif return full_size ? (int) full_size : -1; }
// ----------------------------------------------------------------------------- // COsmo4AppUi::HandleCommandL() // Takes care of command handling. // ----------------------------------------------------------------------------- // void COsmo4AppUi::HandleCommandL( TInt aCommand ) { GF_Err e; #ifndef GPAC_GUI_ONLY switch( aCommand ) { case EAknSoftkeyBack: if (view_mode==1) TogglePlaylist(); break; case EEikCmdExit: case EAknSoftkeyExit: iAppView->Shutdown(); Exit(); break; /*PLAYLIST commands*/ case EOsmo4PlayListAdd: iPlaylist->PlaylistAct(Osmo4PLAdd); break; case EOsmo4PlayListRem: iPlaylist->PlaylistAct(Osmo4PLRem); break; case EOsmo4PlayListMoveUp: iPlaylist->PlaylistAct(Osmo4PLMoveUp); break; case EOsmo4PlayListMoveDown: iPlaylist->PlaylistAct(Osmo4PLMoveDown); break; case EOsmo4PlayListClear: iPlaylist->PlaylistAct(Osmo4PLClear); break; case EOsmo4PlayListMode: iPlaylist->PlaylistAct(Osmo4PLToggleMode); break; case EOsmo4PlayListAllFiles: iPlaylist->PlaylistAct(Osmo4PLToggleAllFiles); break; /*FILE menu command*/ case EOsmo4PlayListView: TogglePlaylist(); break; case EOsmo4OpenURL: break; case EOsmo4Fullscreen: break; case EOsmo4ViewMaxSize: { CEikStatusPane* statusPane = StatusPane(); if (statusPane->IsVisible()) statusPane->MakeVisible(EFalse); else statusPane->MakeVisible(ETrue); } break; case EOsmo4AROriginal: gf_term_set_option(iAppView->m_term, GF_OPT_ASPECT_RATIO, GF_ASPECT_RATIO_KEEP); break; case EOsmo4ARFillScreen: gf_term_set_option(iAppView->m_term, GF_OPT_ASPECT_RATIO, GF_ASPECT_RATIO_FILL_SCREEN); break; case EOsmo4AR4_3: gf_term_set_option(iAppView->m_term, GF_OPT_ASPECT_RATIO, GF_ASPECT_RATIO_4_3); break; case EOsmo4AR16_9: gf_term_set_option(iAppView->m_term, GF_OPT_ASPECT_RATIO, GF_ASPECT_RATIO_16_9); break; case EOsmo4NavReset: gf_term_set_option(iAppView->m_term, GF_OPT_NAVIGATION_TYPE, 0); break; case EOsmo4NavNone: gf_term_set_option(iAppView->m_term, GF_OPT_NAVIGATION, GF_NAVIGATE_NONE); break; case EOsmo4NavSlide: e = gf_term_set_option(iAppView->m_term, GF_OPT_NAVIGATION, GF_NAVIGATE_SLIDE); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("Cannot set navigation: %s", gf_error_to_string(e) )); } break; case EOsmo4NavWalk: gf_term_set_option(iAppView->m_term, GF_OPT_NAVIGATION, GF_NAVIGATE_WALK); break; case EOsmo4NavFly: gf_term_set_option(iAppView->m_term, GF_OPT_NAVIGATION, GF_NAVIGATE_FLY); break; case EOsmo4NavExamine: gf_term_set_option(iAppView->m_term, GF_OPT_NAVIGATION, GF_NAVIGATE_EXAMINE); break; case EOsmo4NavHeadlight: gf_term_set_option(iAppView->m_term, GF_OPT_HEADLIGHT, !gf_term_get_option(iAppView->m_term, GF_OPT_HEADLIGHT) ); break; case EOsmo4CollideNone: gf_term_set_option(iAppView->m_term, GF_OPT_COLLISION, GF_COLLISION_NONE); break; case EOsmo4CollideSimple: gf_term_set_option(iAppView->m_term, GF_OPT_COLLISION, GF_COLLISION_NORMAL); break; case EOsmo4CollideDisp: gf_term_set_option(iAppView->m_term, GF_OPT_COLLISION, GF_COLLISION_DISPLACEMENT); break; case EOsmo4NavGravity: gf_term_set_option(iAppView->m_term, GF_OPT_GRAVITY, !gf_term_get_option(iAppView->m_term, GF_OPT_GRAVITY)); break; case EOsmo4ViewRTI: iAppView->show_rti = !iAppView->show_rti; break; case EOsmo4OptEnableLogs: { const char *opt = gf_cfg_get_key(iAppView->m_user.config, "General", "Logs"); if (opt && !stricmp(opt, "@debug")) { gf_cfg_set_key(iAppView->m_user.config, "General", "Logs", "all@error"); } else { gf_cfg_set_key(iAppView->m_user.config, "General", "Logs", "all@debug"); } iAppView->SetupLogs(); } break; case EOsmo4OptOpenGL: { const char *opt = gf_cfg_get_key(iAppView->m_user.config, "Compositor", "ForceOpenGL"); Bool use_gl = (opt && !strcmp(opt, "yes")) ? 1 : 0; gf_cfg_set_key(iAppView->m_user.config, "Compositor", "ForceOpenGL", use_gl ? "no" : "yes"); gf_term_set_option(iAppView->m_term, GF_OPT_USE_OPENGL, !use_gl); } break; case EOsmo4OptDirectDraw: { const char *opt = gf_cfg_get_key(iAppView->m_user.config, "Compositor", "DirectDraw"); Bool use_dd = (opt && !strcmp(opt, "yes")) ? 1 : 0; gf_cfg_set_key(iAppView->m_user.config, "Compositor", "DirectDraw", use_dd ? "no" : "yes"); gf_term_set_option(iAppView->m_term, GF_OPT_DIRECT_DRAW, !use_dd); } break; case EOsmo4OptXMLProgressive: { const char *opt = gf_cfg_get_key(iAppView->m_user.config, "SAXLoader", "Progressive"); Bool use_prog = (opt && !strcmp(opt, "yes")) ? 1 : 0; gf_cfg_set_key(iAppView->m_user.config, "SAXLoader", "Progressive", use_prog ? "no" : "yes"); gf_cfg_set_key(iAppView->m_user.config, "SAXLoader", "MaxDuration", "100"); } break; default: if ((aCommand>=EOsmo4OpenRecentFirst) && (aCommand<=EOsmo4OpenRecentLast)) { const char *sOpt = gf_cfg_get_key_name(iAppView->m_user.config, "RecentFiles", aCommand - EOsmo4OpenRecentFirst); if (sOpt) iAppView->Connect(sOpt); } else { iAppView->MessageBox("Unandled command - panic", "Osmo4"); Panic( EOsmo4Ui ); } break; } #endif }
Bool GPAC_EventProc(void *ptr, GF_Event *evt) { switch (evt->type) { case GF_EVENT_DURATION: Duration = (u32) (evt->duration.duration*1000); CanSeek = evt->duration.can_seek; break; case GF_EVENT_MESSAGE: { if (!evt->message.message) return 0; GF_LOG(GF_LOG_ERROR, GF_LOG_CONSOLE, ("%s: %s\n", evt->message.message, gf_error_to_string(evt->message.error))); //set_status((char *) evt->message.message); } break; case GF_EVENT_PROGRESS: { char *szTitle = ""; if (evt->progress.progress_type==0) szTitle = "Buffer "; else if (evt->progress.progress_type==1) szTitle = "Download "; else if (evt->progress.progress_type==2) szTitle = "Import "; cbk_on_progress(szTitle, evt->progress.done, evt->progress.total); } break; case GF_EVENT_SIZE: break; case GF_EVENT_RESOLUTION: recompute_res(evt->size.width, evt->size.height); do_layout(1); break; case GF_EVENT_SCENE_SIZE: do_layout(1); break; case GF_EVENT_DBLCLICK: set_full_screen(); return 0; case GF_EVENT_CONNECT: if (evt->connect.is_connected) { is_connected = 1; if (!backlight_off) set_backlight_state(1); refresh_recent_files(); navigation_on = (gf_term_get_option(term, GF_OPT_NAVIGATION)==GF_NAVIGATE_NONE) ? 0 : 1; } else { navigation_on = 0; is_connected = 0; Duration = 0; } break; case GF_EVENT_EOS: if (Duration>2000) gf_term_play_from_time(term, 0, 0); break; case GF_EVENT_QUIT: PostMessage(g_hwnd, WM_DESTROY, 0, 0); break; case GF_EVENT_KEYDOWN: switch (evt->key.key_code) { case GF_KEY_ENTER: if (full_screen) set_full_screen(); break; case GF_KEY_1: ctrl_mod_down = !ctrl_mod_down; evt->key.key_code = GF_KEY_CONTROL; evt->type = ctrl_mod_down ? GF_EVENT_KEYDOWN : GF_EVENT_KEYUP; gf_term_user_event(term, evt); break; case GF_KEY_MEDIAPREVIOUSTRACK: playlist_act = 2; break; case GF_KEY_MEDIANEXTTRACK: playlist_act = 1; break; } break; case GF_EVENT_NAVIGATE: if (gf_term_is_supported_url(term, evt->navigate.to_url, 1, 1)) { gf_term_navigate_to(term, evt->navigate.to_url); return 1; } else { #ifdef _WIN32_WCE u16 dst[1024]; #endif SHELLEXECUTEINFO info; /* if (full_screen) gf_term_set_option(term, GF_OPT_FULLSCREEN, 0); full_screen = 0; */ memset(&info, 0, sizeof(SHELLEXECUTEINFO)); info.cbSize = sizeof(SHELLEXECUTEINFO); info.lpVerb = _T("open"); info.fMask = SEE_MASK_NOCLOSEPROCESS; info.lpFile = _T("iexplore"); #ifdef _WIN32_WCE CE_CharToWide((char *) evt->navigate.to_url, dst); info.lpParameters = (LPCTSTR) dst; #else info.lpParameters = evt->navigate.to_url; #endif info.nShow = SW_SHOWNORMAL; ShellExecuteEx(&info); } return 1; } return 0; }
static u32 MM_SimulationStep_Decoder(GF_Terminal *term, u32 *nb_active_decs) { CodecEntry *ce; GF_Err e; u32 count, remain; u32 time_taken, time_slice, time_left; #ifndef GF_DISABLE_LOG term->compositor->networks_time = gf_sys_clock(); #endif // GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Media Manager] Entering simultion step\n")); gf_term_handle_services(term); #ifndef GF_DISABLE_LOG term->compositor->networks_time = gf_sys_clock() - term->compositor->networks_time; #endif #ifndef GF_DISABLE_LOG term->compositor->decoders_time = gf_sys_clock(); #endif gf_mx_p(term->mm_mx); count = gf_list_count(term->codecs); time_left = term->frame_duration; *nb_active_decs = 0; if (term->last_codec >= count) term->last_codec = 0; remain = count; /*this is ultra basic a nice scheduling system would be much better*/ while (remain) { ce = (CodecEntry*)gf_list_get(term->codecs, term->last_codec); if (!ce) break; if (!(ce->flags & GF_MM_CE_RUNNING) || (ce->flags & GF_MM_CE_THREADED) || ce->dec->force_cb_resize) { remain--; if (!remain) break; term->last_codec = (term->last_codec + 1) % count; continue; } time_slice = ce->dec->Priority * time_left / term->cumulated_priority; if (ce->dec->PriorityBoost) time_slice *= 2; time_taken = gf_sys_clock(); (*nb_active_decs) ++; e = gf_codec_process(ce->dec, time_slice); time_taken = gf_sys_clock() - time_taken; /*avoid signaling errors too often...*/ #ifndef GPAC_DISABLE_LOG if (e) { GF_LOG(GF_LOG_WARNING, GF_LOG_CODEC, ("[ODM%d] Decoding Error %s\n", ce->dec->odm->OD->objectDescriptorID, gf_error_to_string(e) )); } else { //GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Decode time slice %d ms out of %d ms\n", ce->dec->decio ? ce->dec->decio->module_name : "RAW", time_taken, time_left )); } #endif if (ce->flags & GF_MM_CE_DISCARDED) { gf_free(ce); gf_list_rem(term->codecs, term->last_codec); count--; if (!count) break; } else { if (ce->dec->CB && (ce->dec->CB->UnitCount >= ce->dec->CB->Min)) ce->dec->PriorityBoost = 0; } term->last_codec = (term->last_codec + 1) % count; remain -= 1; if (time_left > time_taken) { time_left -= time_taken; if (!remain) break; } else { time_left = 0; break; } } gf_mx_v(term->mm_mx); #ifndef GF_DISABLE_LOG term->compositor->decoders_time = gf_sys_clock() - term->compositor->decoders_time; #endif return time_left; }