GF_EXPORT GF_Err gf_term_process_flush(GF_Terminal *term) { u32 i; CodecEntry *ce; if (!(term->flags & GF_TERM_NO_COMPOSITOR_THREAD) ) return GF_BAD_PARAM; /*update till frame mature*/ while (1) { if (term->flags & GF_TERM_NO_DECODER_THREAD) { gf_term_handle_services(term); gf_mx_p(term->mm_mx); i=0; while ((ce = (CodecEntry*)gf_list_enum(term->codecs, &i))) { gf_codec_process(ce->dec, 10000); } gf_mx_v(term->mm_mx); } if (!gf_sc_draw_frame(term->compositor, NULL)) break; if (! (term->user->init_flags & GF_TERM_NO_REGULATION)) break; } return GF_OK; }
u32 MM_Loop(void *par) { GF_Terminal *term = (GF_Terminal *) par; Bool do_scene = (term->flags & GF_TERM_NO_VISUAL_THREAD) ? 1 : 0; Bool do_codec = (term->flags & GF_TERM_NO_DECODER_THREAD) ? 0 : 1; Bool do_regulate = (term->user->init_flags & GF_TERM_NO_REGULATION) ? 0 : 1; gf_th_set_priority(term->mm_thread, term->priority); GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaManager] Entering thread ID %d\n", gf_th_id() )); // GF_LOG(GF_LOG_DEBUG, GF_LOG_RTI, ("(RTI] Terminal Cycle Log\tServices\tDecoders\tCompositor\tSleep\n")); while (term->flags & GF_TERM_RUNNING) { u32 left; if (do_codec) left = MM_SimulationStep_Decoder(term); else left = term->frame_duration; if (do_scene) { u32 time_taken = gf_sys_clock(); gf_sc_draw_frame(term->compositor); time_taken = gf_sys_clock() - time_taken; if (left>time_taken) left -= time_taken; else left = 0; } if (do_regulate) gf_sleep(left); } term->flags |= GF_TERM_DEAD; return 0; }
GF_EXPORT u32 gf_term_process_step(GF_Terminal *term) { u32 nb_decs=0; u32 time_taken = gf_sys_clock(); if (term->flags & GF_TERM_NO_DECODER_THREAD) { MM_SimulationStep_Decoder(term, &nb_decs); } if (term->flags & GF_TERM_NO_COMPOSITOR_THREAD) { u32 ms_until_next; gf_sc_draw_frame(term->compositor, &ms_until_next); if (ms_until_next<term->compositor->frame_duration/2) { time_taken=0; } } time_taken = gf_sys_clock() - time_taken; if (time_taken > term->compositor->frame_duration) { time_taken = 0; } else { time_taken = term->compositor->frame_duration - time_taken; } if (term->bench_mode || (term->user->init_flags & GF_TERM_NO_REGULATION)) return time_taken; if (2*time_taken >= term->compositor->frame_duration) { gf_sleep(nb_decs ? 1 : time_taken); } return time_taken; }
void wxGPACPanel::Update() { if (m_term) { //gf_term_set_option(m_term, GF_OPT_PLAY_STATE, GF_STATE_STEP_PAUSE); gf_sc_invalidate(m_term->compositor, NULL); gf_sc_draw_frame(m_term->compositor); } }
u32 MM_SimulationStep_Compositor(GF_Terminal *term, u32 time_left) { u32 time_taken = gf_sys_clock(); gf_sc_draw_frame(term->compositor); time_taken = gf_sys_clock() - time_taken; if (time_left>time_taken) time_left -= time_taken; else time_left = 0; return time_left; }
u32 MM_Loop(void *par) { GF_Terminal *term = (GF_Terminal *) par; Bool do_scene = (term->flags & GF_TERM_NO_VISUAL_THREAD) ? 1 : 0; Bool do_codec = (term->flags & GF_TERM_NO_DECODER_THREAD) ? 0 : 1; Bool do_regulate = (term->user->init_flags & GF_TERM_NO_REGULATION) ? 0 : 1; gf_th_set_priority(term->mm_thread, term->priority); GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaManager] Entering thread ID %d\n", gf_th_id() )); // GF_LOG(GF_LOG_DEBUG, GF_LOG_RTI, ("(RTI] Terminal Cycle Log\tServices\tDecoders\tCompositor\tSleep\n")); while (term->flags & GF_TERM_RUNNING) { u32 nb_decs = 0; u32 left = 0; if (do_codec) left = MM_SimulationStep_Decoder(term, &nb_decs); else left = term->frame_duration; if (do_scene) { u32 ms_until_next=0; u32 time_taken = gf_sys_clock(); gf_sc_draw_frame(term->compositor, &ms_until_next); time_taken = gf_sys_clock() - time_taken; if (ms_until_next<term->frame_duration/2) { left = 0; } else if (left>time_taken) left -= time_taken; else left = 0; } if (do_regulate) { if (term->bench_mode) { gf_sleep(0); } else { if (left==term->frame_duration) { //if nothing was done during this pass but we have active decoder, just yield. We don't want to sleep since //composition memory could be released at any time. We should have a signal here, rather than a wait gf_sleep(nb_decs ? 0 : term->frame_duration/2); } } } } term->flags |= GF_TERM_DEAD; return 0; }
GF_EXPORT GF_Err gf_term_process_flush(GF_Terminal *term) { u32 i; CodecEntry *ce; if (!(term->flags & GF_TERM_NO_COMPOSITOR_THREAD) ) return GF_BAD_PARAM; /*update till frame mature*/ while (1) { if (term->flags & GF_TERM_NO_DECODER_THREAD) { gf_term_handle_services(term); gf_mx_p(term->mm_mx); i=0; while ((ce = (CodecEntry*)gf_list_enum(term->codecs, &i))) { gf_codec_process(ce->dec, 10000); } gf_mx_v(term->mm_mx); } if (!gf_sc_draw_frame(term->compositor, 1, NULL)) { if (!term->root_scene || !term->root_scene->root_od) break; if (gf_list_count(term->media_queue) ) continue; //wait for audio to be flushed if (gf_sc_check_audio_pending(term->compositor) ) continue; //force end of buffer if (gf_scene_check_clocks(term->root_scene->root_od->net_service, term->root_scene, 1)) break; } if (! (term->user->init_flags & GF_TERM_NO_REGULATION)) break; } return GF_OK; }
GF_EXPORT u32 gf_term_process_step(GF_Terminal *term) { u32 time_taken = gf_sys_clock(); if (term->flags & GF_TERM_NO_DECODER_THREAD) { MM_SimulationStep_Decoder(term); } if (term->flags & GF_TERM_NO_COMPOSITOR_THREAD) { gf_sc_draw_frame(term->compositor); } time_taken = gf_sys_clock() - time_taken; if (time_taken > term->compositor->frame_duration) { time_taken = 0; } else { time_taken = term->compositor->frame_duration - time_taken; } if (term->user->init_flags & GF_TERM_NO_REGULATION) return time_taken; gf_sleep(time_taken); return time_taken; }
void bifs_to_vid(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time) { GF_User user; BIFSVID b2v; u16 es_id; Bool first_dump, needs_raw; u32 i, j, di, count, timescale, frameNum; u32 duration, cur_time; GF_VideoSurface fb; GF_Err e; char old_driv[1024]; const char *test; char config_path[GF_MAX_PATH]; avi_t *avi_out; Bool reset_fps; GF_ESD *esd; char comp[5]; char *conv_buf; memset(&user, 0, sizeof(GF_User)); if (szConfigFile && strlen(szConfigFile)) { user.config = gf_cfg_init(config_path, NULL); } else { user.config = gf_cfg_init(NULL, NULL); } if (!user.config) { fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path); return; } avi_out = NULL; conv_buf = NULL; esd = NULL; needs_raw = 0; test = gf_cfg_get_key(user.config, "General", "ModulesDirectory"); user.modules = gf_modules_new((const unsigned char *) test, user.config); strcpy(old_driv, "raw_out"); if (!gf_modules_get_count(user.modules)) { printf("Error: no modules found\n"); goto err_exit; } /*switch driver to raw_driver*/ test = gf_cfg_get_key(user.config, "Video", "DriverName"); if (test) strcpy(old_driv, test); test = gf_cfg_get_key(user.config, "Compositor", "RendererName"); /*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/ if (test && strstr(test, "2D")) { gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output"); needs_raw = 1; } needs_raw = 0; user.init_flags = GF_TERM_NO_AUDIO | GF_TERM_FORCE_3D; b2v.sr = gf_sc_new(&user, 0, NULL); gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0); b2v.sg = gf_sg_new(); gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v); gf_sg_set_init_callback(b2v.sg, node_init, &b2v); gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v); /*load config*/ gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1); b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0); if (needs_raw) { test = gf_cfg_get_key(user.config, "Video", "DriverName"); if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) { printf("couldn't load raw output driver (%s used)\n", test); goto err_exit; } } strcpy(config_path, ""); if (out_dir) { strcat(config_path, out_dir); if (config_path[strlen(config_path)-1] != '\\') strcat(config_path, "\\"); } strcat(config_path, rad_name); strcat(config_path, "_bifs"); if (!dump_type) { strcat(config_path, ".avi"); avi_out = AVI_open_output_file(config_path); comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0; if (!avi_out) goto err_exit; } for (i=0; i<gf_isom_get_track_count(file); i++) { esd = gf_isom_get_esd(file, i+1, 1); if (!esd) continue; if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break; gf_odf_desc_del((GF_Descriptor *) esd); esd = NULL; } if (!esd) { printf("no bifs track found\n"); goto err_exit; } b2v.duration = gf_isom_get_media_duration(file, i+1); timescale = gf_isom_get_media_timescale(file, i+1); es_id = (u16) gf_isom_get_track_id(file, i+1); e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication); if (e) { printf("BIFS init error %s\n", gf_error_to_string(e)); gf_odf_desc_del((GF_Descriptor *) esd); esd = NULL; goto err_exit; } if (dump_time>=0) dump_time = dump_time *1000 / timescale; gf_sc_set_scene(b2v.sr, b2v.sg); count = gf_isom_get_sample_count(file, i+1); reset_fps = 0; if (!fps) { fps = (Float) (count * timescale); fps /= (Double) (s64) b2v.duration; printf("Estimated BIFS FrameRate %g\n", fps); reset_fps = 1; } if (!width || !height) { gf_sg_get_scene_size_info(b2v.sg, &width, &height); } /*we work in RGB24, and we must make sure the pitch is %4*/ if ((width*3)%4) { printf("Adjusting width (%d) to have a stride multiple of 4\n", width); while ((width*3)%4) width--; } gf_sc_set_size(b2v.sr, width, height); gf_sc_draw_frame(b2v.sr); gf_sc_get_screen_buffer(b2v.sr, &fb); width = fb.width; height = fb.height; if (avi_out) { AVI_set_video(avi_out, width, height, fps, comp); conv_buf = gf_malloc(sizeof(char) * width * height * 3); } printf("Dumping at BIFS resolution %d x %d\n\n", width, height); gf_sc_release_screen_buffer(b2v.sr, &fb); cur_time = 0; duration = (u32)(timescale / fps); if (reset_fps) fps = 0; frameNum = 1; first_dump = 1; for (j=0; j<count; j++) { GF_ISOSample *samp = gf_isom_get_sample(file, i+1, j+1, &di); b2v.cts = samp->DTS + samp->CTS_Offset; /*apply command*/ gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0); gf_isom_sample_del(&samp); if ((frameID>=0) && (j<(u32)frameID)) continue; if ((dump_time>=0) && ((u32) dump_time>b2v.cts)) continue; /*render frame*/ gf_sc_draw_frame(b2v.sr); /*needed for background2D !!*/ if (first_dump) { gf_sc_draw_frame(b2v.sr); first_dump = 0; } if (fps) { if (cur_time > b2v.cts) continue; while (1) { printf("dumped frame time %f (frame %d - sample %d)\r", ((Float)cur_time)/timescale, frameNum, j+1); dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, frameNum); frameNum++; cur_time += duration; if (cur_time > b2v.cts) break; } } else { dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, (frameID>=0) ? frameID : frameNum); if (frameID>=0 || dump_time>=0) break; frameNum++; printf("dumped frame %d / %d\r", j+1, count); } } gf_odf_desc_del((GF_Descriptor *) esd); /*destroy everything*/ gf_bifs_decoder_del(b2v.bifs); gf_sg_del(b2v.sg); gf_sc_set_scene(b2v.sr, NULL); gf_sc_del(b2v.sr); err_exit: if (avi_out) AVI_close(avi_out); if (conv_buf) gf_free(conv_buf); if (user.modules) gf_modules_del(user.modules); if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv); gf_cfg_del(user.config); }
/*generates an intertwined bmp from a scene file with 5 different viewpoints*/ void bifs3d_viewpoints_merger(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time) { GF_User user; char out_path[GF_MAX_PATH]; char old_driv[1024]; BIFSVID b2v; Bool needs_raw; GF_Err e; GF_VideoSurface fb; unsigned char **rendered_frames; u32 nb_viewpoints = 5; u32 viewpoint_index; /* Configuration of the Rendering Capabilities */ { const char *test; char config_path[GF_MAX_PATH]; memset(&user, 0, sizeof(GF_User)); user.config = gf_cfg_init(szConfigFile, NULL); if (!user.config) { fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path); return; } test = gf_cfg_get_key(user.config, "General", "ModulesDirectory"); user.modules = gf_modules_new((const unsigned char *) test, user.config); strcpy(old_driv, "raw_out"); if (!gf_modules_get_count(user.modules)) { printf("Error: no modules found\n"); goto err_exit; } /*switch driver to raw_driver*/ test = gf_cfg_get_key(user.config, "Video", "DriverName"); if (test) strcpy(old_driv, test); needs_raw = 0; test = gf_cfg_get_key(user.config, "Compositor", "RendererName"); /*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/ if (test && strstr(test, "2D")) { gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output"); needs_raw = 1; } if (needs_raw) { test = gf_cfg_get_key(user.config, "Video", "DriverName"); if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) { printf("couldn't load raw output driver (%s used)\n", test); goto err_exit; } } } memset(&b2v, 0, sizeof(BIFSVID)); user.init_flags = GF_TERM_NO_AUDIO; /* Initialization of the compositor */ b2v.sr = gf_sc_new(&user, 0, NULL); gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0); /* Initialization of the scene graph */ b2v.sg = gf_sg_new(); gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v); gf_sg_set_init_callback(b2v.sg, node_init, &b2v); gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v); /*load config*/ gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1); { u32 di; u32 track_number; GF_ESD *esd; u16 es_id; b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0); for (track_number=0; track_number<gf_isom_get_track_count(file); track_number++) { esd = gf_isom_get_esd(file, track_number+1, 1); if (!esd) continue; if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break; gf_odf_desc_del((GF_Descriptor *) esd); esd = NULL; } if (!esd) { printf("no bifs track found\n"); goto err_exit; } es_id = (u16) gf_isom_get_track_id(file, track_number+1); e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication); if (e) { printf("BIFS init error %s\n", gf_error_to_string(e)); gf_odf_desc_del((GF_Descriptor *) esd); esd = NULL; goto err_exit; } { GF_ISOSample *samp = gf_isom_get_sample(file, track_number+1, 1, &di); b2v.cts = samp->DTS + samp->CTS_Offset; /*apply command*/ gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0); gf_isom_sample_del(&samp); } b2v.duration = gf_isom_get_media_duration(file, track_number+1); gf_odf_desc_del((GF_Descriptor *) esd); } gf_sc_set_scene(b2v.sr, b2v.sg); if (!width || !height) { gf_sg_get_scene_size_info(b2v.sg, &width, &height); } /*we work in RGB24, and we must make sure the pitch is %4*/ if ((width*3)%4) { printf("Adjusting width (%d) to have a stride multiple of 4\n", width); while ((width*3)%4) width--; } gf_sc_set_size(b2v.sr, width, height); gf_sc_get_screen_buffer(b2v.sr, &fb); width = fb.width; height = fb.height; gf_sc_release_screen_buffer(b2v.sr, &fb); GF_SAFEALLOC(rendered_frames, nb_viewpoints*sizeof(char *)); for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) { GF_SAFEALLOC(rendered_frames[viewpoint_index-1], fb.width*fb.height*3); gf_sc_set_viewpoint(b2v.sr, viewpoint_index, NULL); gf_sc_draw_frame(b2v.sr); /*needed for background2D !!*/ gf_sc_draw_frame(b2v.sr); strcpy(out_path, ""); if (out_dir) { strcat(out_path, out_dir); if (out_path[strlen(out_path)-1] != '\\') strcat(out_path, "\\"); } strcat(out_path, rad_name); strcat(out_path, "_view"); gf_sc_get_screen_buffer(b2v.sr, &fb); write_bmp(&fb, out_path, viewpoint_index); memcpy(rendered_frames[viewpoint_index-1], fb.video_buffer, fb.width*fb.height*3); gf_sc_release_screen_buffer(b2v.sr, &fb); } if (width != 800 || height != 480) { printf("Wrong scene dimension, cannot produce output\n"); goto err_exit; } else { u32 x, y; GF_VideoSurface out_fb; u32 bpp = 3; out_fb.width = 800; out_fb.height = 480; out_fb.pitch = 800*bpp; out_fb.pixel_format = GF_PIXEL_RGB_24; out_fb.is_hardware_memory = 0; GF_SAFEALLOC(out_fb.video_buffer, out_fb.pitch*out_fb.height) #if 1 for (y=0; y<out_fb.height; y++) { /*starting red pixel is R1, R5, R4, R3, R2, R1, R5, ... when increasing line num*/ u32 line_shift = (5-y) % 5; for (x=0; x<out_fb.width; x++) { u32 view_shift = (line_shift+bpp*x)%5; u32 offset = out_fb.pitch*y + x*bpp; /* red */ out_fb.video_buffer[offset] = rendered_frames[view_shift][offset]; /* green */ out_fb.video_buffer[offset+1] = rendered_frames[(view_shift+1)%5][offset+1]; /* blue */ out_fb.video_buffer[offset+2] = rendered_frames[(view_shift+2)%5][offset+2]; } } #else /*calibration*/ for (y=0; y<out_fb.height; y++) { u32 line_shift = (5- y%5) % 5; for (x=0; x<out_fb.width; x++) { u32 view_shift = (line_shift+bpp*x)%5; u32 offset = out_fb.pitch*y + x*bpp; out_fb.video_buffer[offset] = ((view_shift)%5 == 2) ? 0xFF : 0; out_fb.video_buffer[offset+1] = ((view_shift+1)%5 == 2) ? 0xFF : 0; out_fb.video_buffer[offset+2] = ((view_shift+2)%5 == 2) ? 0xFF : 0; } } #endif write_bmp(&out_fb, "output", 0); } /*destroy everything*/ gf_bifs_decoder_del(b2v.bifs); gf_sg_del(b2v.sg); gf_sc_set_scene(b2v.sr, NULL); gf_sc_del(b2v.sr); err_exit: /* if (rendered_frames) { for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) { if (rendered_frames[viewpoint_index-1]) gf_free(rendered_frames[viewpoint_index-1]); } gf_free(rendered_frames); } if (output_merged_frame) gf_free(output_merged_frame); */ if (user.modules) gf_modules_del(user.modules); if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv); gf_cfg_del(user.config); }