Example #1
0
File: main.c Project: Bevara/GPAC
/*generates an intertwined bmp from a scene file with 5 different viewpoints*/
void bifs3d_viewpoints_merger(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time)
{
	GF_User user;
	char out_path[GF_MAX_PATH];
	char old_driv[1024];
	BIFSVID b2v;
	Bool needs_raw;
	GF_Err e;
	GF_VideoSurface fb;
	unsigned char **rendered_frames;
	u32 nb_viewpoints = 5;
	u32 viewpoint_index;


	/* Configuration of the Rendering Capabilities */
	{
		const char *test;
		char config_path[GF_MAX_PATH];
		memset(&user, 0, sizeof(GF_User));
		user.config = gf_cfg_init(szConfigFile, NULL);

		if (!user.config) {
			fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
			return;
		}

		test = gf_cfg_get_key(user.config, "General", "ModulesDirectory");
		user.modules = gf_modules_new((const unsigned char *) test, user.config);
		strcpy(old_driv, "raw_out");
		if (!gf_modules_get_count(user.modules)) {
			printf("Error: no modules found\n");
			goto err_exit;
		}

		/*switch driver to raw_driver*/
		test = gf_cfg_get_key(user.config, "Video", "DriverName");
		if (test) strcpy(old_driv, test);

		needs_raw = 0;
		test = gf_cfg_get_key(user.config, "Compositor", "RendererName");
		/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
		if (test && strstr(test, "2D")) {
			gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output");
			needs_raw = 1;
		}
		if (needs_raw) {
			test = gf_cfg_get_key(user.config, "Video", "DriverName");
			if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
				printf("couldn't load raw output driver (%s used)\n", test);
				goto err_exit;
			}
		}
	}

	memset(&b2v, 0, sizeof(BIFSVID));
	user.init_flags = GF_TERM_NO_AUDIO;
	/* Initialization of the compositor */
	b2v.sr = gf_sc_new(&user, 0, NULL);
	gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0);

	/* Initialization of the scene graph */
	b2v.sg = gf_sg_new();
	gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v);
	gf_sg_set_init_callback(b2v.sg, node_init, &b2v);
	gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v);

	/*load config*/
	gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1);

	{
		u32 di;
		u32 track_number;
		GF_ESD *esd;
		u16 es_id;
		b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0);

		for (track_number=0; track_number<gf_isom_get_track_count(file); track_number++) {
			esd = gf_isom_get_esd(file, track_number+1, 1);
			if (!esd) continue;
			if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break;
			gf_odf_desc_del((GF_Descriptor *) esd);
			esd = NULL;
		}
		if (!esd) {
			printf("no bifs track found\n");
			goto err_exit;
		}

		es_id = (u16) gf_isom_get_track_id(file, track_number+1);
		e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
		if (e) {
			printf("BIFS init error %s\n", gf_error_to_string(e));
			gf_odf_desc_del((GF_Descriptor *) esd);
			esd = NULL;
			goto err_exit;
		}

		{
			GF_ISOSample *samp = gf_isom_get_sample(file, track_number+1, 1, &di);
			b2v.cts = samp->DTS + samp->CTS_Offset;
			/*apply command*/
			gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0);
			gf_isom_sample_del(&samp);
		}

		b2v.duration = gf_isom_get_media_duration(file, track_number+1);

		gf_odf_desc_del((GF_Descriptor *) esd);

	}
	gf_sc_set_scene(b2v.sr, b2v.sg);

	if (!width || !height) {
		gf_sg_get_scene_size_info(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}
	gf_sc_set_size(b2v.sr, width, height);
	gf_sc_get_screen_buffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	gf_sc_release_screen_buffer(b2v.sr, &fb);

	GF_SAFEALLOC(rendered_frames, nb_viewpoints*sizeof(char *));
	for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) {
		GF_SAFEALLOC(rendered_frames[viewpoint_index-1], fb.width*fb.height*3);
		gf_sc_set_viewpoint(b2v.sr, viewpoint_index, NULL);
		gf_sc_draw_frame(b2v.sr);
		/*needed for background2D !!*/
		gf_sc_draw_frame(b2v.sr);
		strcpy(out_path, "");
		if (out_dir) {
			strcat(out_path, out_dir);
			if (out_path[strlen(out_path)-1] != '\\') strcat(out_path, "\\");
		}
		strcat(out_path, rad_name);
		strcat(out_path, "_view");
		gf_sc_get_screen_buffer(b2v.sr, &fb);
		write_bmp(&fb, out_path, viewpoint_index);
		memcpy(rendered_frames[viewpoint_index-1], fb.video_buffer, fb.width*fb.height*3);
		gf_sc_release_screen_buffer(b2v.sr, &fb);
	}

	if (width != 800 || height != 480) {
		printf("Wrong scene dimension, cannot produce output\n");
		goto err_exit;
	} else {
		u32 x, y;
		GF_VideoSurface out_fb;
		u32 bpp = 3;
		out_fb.width = 800;
		out_fb.height = 480;
		out_fb.pitch = 800*bpp;
		out_fb.pixel_format = GF_PIXEL_RGB_24;
		out_fb.is_hardware_memory = 0;
		GF_SAFEALLOC(out_fb.video_buffer, out_fb.pitch*out_fb.height)
#if 1
		for (y=0; y<out_fb.height; y++) {
			/*starting red pixel is R1, R5, R4, R3, R2, R1, R5, ... when increasing line num*/
			u32 line_shift = (5-y) % 5;
			for (x=0; x<out_fb.width; x++) {
				u32 view_shift = (line_shift+bpp*x)%5;
				u32 offset = out_fb.pitch*y + x*bpp;
				/* red */
				out_fb.video_buffer[offset] = rendered_frames[view_shift][offset];
				/* green */
				out_fb.video_buffer[offset+1] = rendered_frames[(view_shift+1)%5][offset+1];
				/* blue */
				out_fb.video_buffer[offset+2] = rendered_frames[(view_shift+2)%5][offset+2];
			}
		}
#else
		/*calibration*/
		for (y=0; y<out_fb.height; y++) {
			u32 line_shift = (5- y%5) % 5;
			for (x=0; x<out_fb.width; x++) {
				u32 view_shift = (line_shift+bpp*x)%5;
				u32 offset = out_fb.pitch*y + x*bpp;
				out_fb.video_buffer[offset] = ((view_shift)%5 == 2) ? 0xFF : 0;
				out_fb.video_buffer[offset+1] = ((view_shift+1)%5 == 2) ? 0xFF : 0;
				out_fb.video_buffer[offset+2] = ((view_shift+2)%5 == 2) ? 0xFF : 0;
			}
		}
#endif
		write_bmp(&out_fb, "output", 0);
	}

	/*destroy everything*/
	gf_bifs_decoder_del(b2v.bifs);
	gf_sg_del(b2v.sg);
	gf_sc_set_scene(b2v.sr, NULL);
	gf_sc_del(b2v.sr);



err_exit:
	/*	if (rendered_frames) {
			for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) {
				if (rendered_frames[viewpoint_index-1]) gf_free(rendered_frames[viewpoint_index-1]);
			}
			gf_free(rendered_frames);
		}
		if (output_merged_frame) gf_free(output_merged_frame);
	*/
	if (user.modules) gf_modules_del(user.modules);
	if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv);
	gf_cfg_del(user.config);
}
Example #2
0
File: main.c Project: Bevara/GPAC
void bifs_to_vid(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time)
{
	GF_User user;
	BIFSVID b2v;
	u16 es_id;
	Bool first_dump, needs_raw;
	u32 i, j, di, count, timescale, frameNum;
	u32 duration, cur_time;
	GF_VideoSurface fb;
	GF_Err e;
	char old_driv[1024];
	const char *test;
	char config_path[GF_MAX_PATH];
	avi_t *avi_out;
	Bool reset_fps;
	GF_ESD *esd;
	char comp[5];
	char *conv_buf;

	memset(&user, 0, sizeof(GF_User));
	if (szConfigFile && strlen(szConfigFile)) {
		user.config = gf_cfg_init(config_path, NULL);
	} else {
		user.config = gf_cfg_init(NULL, NULL);
	}

	if (!user.config) {
		fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
		return;
	}
	avi_out = NULL;
	conv_buf = NULL;
	esd = NULL;
	needs_raw = 0;
	test = gf_cfg_get_key(user.config, "General", "ModulesDirectory");
	user.modules = gf_modules_new((const unsigned char *) test, user.config);
	strcpy(old_driv, "raw_out");
	if (!gf_modules_get_count(user.modules)) {
		printf("Error: no modules found\n");
		goto err_exit;
	}

	/*switch driver to raw_driver*/
	test = gf_cfg_get_key(user.config, "Video", "DriverName");
	if (test) strcpy(old_driv, test);

	test = gf_cfg_get_key(user.config, "Compositor", "RendererName");
	/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
	if (test && strstr(test, "2D")) {
		gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output");
		needs_raw = 1;
	}

	needs_raw = 0;
	user.init_flags = GF_TERM_NO_AUDIO | GF_TERM_FORCE_3D;
	b2v.sr = gf_sc_new(&user, 0, NULL);
	gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0);

	b2v.sg = gf_sg_new();
	gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v);
	gf_sg_set_init_callback(b2v.sg, node_init, &b2v);
	gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v);

	/*load config*/
	gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1);

	b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0);

	if (needs_raw) {
		test = gf_cfg_get_key(user.config, "Video", "DriverName");
		if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
			printf("couldn't load raw output driver (%s used)\n", test);
			goto err_exit;
		}
	}

	strcpy(config_path, "");
	if (out_dir) {
		strcat(config_path, out_dir);
		if (config_path[strlen(config_path)-1] != '\\') strcat(config_path, "\\");
	}
	strcat(config_path, rad_name);
	strcat(config_path, "_bifs");
	if (!dump_type) {
		strcat(config_path, ".avi");
		avi_out = AVI_open_output_file(config_path);
		comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0;
		if (!avi_out) goto err_exit;
	}


	for (i=0; i<gf_isom_get_track_count(file); i++) {
		esd = gf_isom_get_esd(file, i+1, 1);
		if (!esd) continue;
		if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break;
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
	}
	if (!esd) {
		printf("no bifs track found\n");
		goto err_exit;
	}

	b2v.duration = gf_isom_get_media_duration(file, i+1);
	timescale = gf_isom_get_media_timescale(file, i+1);
	es_id = (u16) gf_isom_get_track_id(file, i+1);
	e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
	if (e) {
		printf("BIFS init error %s\n", gf_error_to_string(e));
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
		goto err_exit;
	}
	if (dump_time>=0) dump_time = dump_time *1000 / timescale;

	gf_sc_set_scene(b2v.sr, b2v.sg);
	count = gf_isom_get_sample_count(file, i+1);

	reset_fps = 0;
	if (!fps) {
		fps = (Float) (count * timescale);
		fps /= (Double) (s64) b2v.duration;
		printf("Estimated BIFS FrameRate %g\n", fps);
		reset_fps = 1;
	}

	if (!width || !height) {
		gf_sg_get_scene_size_info(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}

	gf_sc_set_size(b2v.sr, width, height);
	gf_sc_draw_frame(b2v.sr);

	gf_sc_get_screen_buffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	if (avi_out) {
		AVI_set_video(avi_out, width, height, fps, comp);
		conv_buf = gf_malloc(sizeof(char) * width * height * 3);
	}
	printf("Dumping at BIFS resolution %d x %d\n\n", width, height);
	gf_sc_release_screen_buffer(b2v.sr, &fb);

	cur_time = 0;

	duration = (u32)(timescale / fps);
	if (reset_fps) fps = 0;

	frameNum = 1;
	first_dump = 1;
	for (j=0; j<count; j++) {
		GF_ISOSample *samp = gf_isom_get_sample(file, i+1, j+1, &di);

		b2v.cts = samp->DTS + samp->CTS_Offset;
		/*apply command*/
		gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0);
		gf_isom_sample_del(&samp);

		if ((frameID>=0) && (j<(u32)frameID)) continue;
		if ((dump_time>=0) && ((u32) dump_time>b2v.cts)) continue;
		/*render frame*/
		gf_sc_draw_frame(b2v.sr);
		/*needed for background2D !!*/
		if (first_dump) {
			gf_sc_draw_frame(b2v.sr);
			first_dump = 0;
		}

		if (fps) {
			if (cur_time > b2v.cts) continue;

			while (1) {
				printf("dumped frame time %f (frame %d - sample %d)\r", ((Float)cur_time)/timescale, frameNum, j+1);
				dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, frameNum);
				frameNum++;
				cur_time += duration;
				if (cur_time > b2v.cts) break;
			}
		} else {
			dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, (frameID>=0) ? frameID : frameNum);
			if (frameID>=0 || dump_time>=0) break;
			frameNum++;
			printf("dumped frame %d / %d\r", j+1, count);
		}

	}
	gf_odf_desc_del((GF_Descriptor *) esd);

	/*destroy everything*/
	gf_bifs_decoder_del(b2v.bifs);
	gf_sg_del(b2v.sg);
	gf_sc_set_scene(b2v.sr, NULL);
	gf_sc_del(b2v.sr);

err_exit:
	if (avi_out) AVI_close(avi_out);
	if (conv_buf) gf_free(conv_buf);
	if (user.modules) gf_modules_del(user.modules);
	if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv);
	gf_cfg_del(user.config);
}
Example #3
0
static GF_Err CTXLoad_ProcessData(GF_SceneDecoder *plug, const char *inBuffer, u32 inBufferLength,
                                  u16 ES_ID, u32 stream_time, u32 mmlevel)
{
	GF_Err e = GF_OK;
	u32 i, j, k, nb_updates, last_rap=0;
	GF_AUContext *au;
	Bool can_delete_com;
	GF_StreamContext *sc;
	CTXLoadPriv *priv = (CTXLoadPriv *)plug->privateStack;

	/*something failed*/
	if (priv->load_flags==3) return GF_EOS;

	/*this signals main scene deconnection, destroy the context if needed*/
	assert(ES_ID);
	if (!priv->ctx) {
		e = CTXLoad_Setup((GF_BaseDecoder *)plug);
		if (e) return e;
	}


	if (stream_time==(u32)-1) {
		/*seek on root stream: destroy the context manager and reload it. We cannot seek on the main stream
		because commands may have changed node attributes/children and we d'ont track the initial value*/
		if (priv->load_flags && (priv->base_stream_id == ES_ID)) {
			if (priv->src) gf_fclose(priv->src);
			priv->src = NULL;
			gf_sm_load_done(&priv->load);
			priv->file_pos = 0;
			/*queue scene for detach*/
			gf_term_lock_media_queue(priv->scene->root_od->term, GF_TRUE);
			priv->scene->root_od->action_type = GF_ODM_ACTION_SCENE_RECONNECT;
			gf_list_add(priv->scene->root_od->term->media_queue, priv->scene->root_od);
			gf_term_lock_media_queue(priv->scene->root_od->term, GF_FALSE);

			return CTXLoad_Setup((GF_BaseDecoder *)plug);
		}
		i=0;
		while ((sc = (GF_StreamContext *)gf_list_enum(priv->ctx->streams, &i))) {
			/*not our stream*/
			if (!sc->in_root_od && (sc->ESID != ES_ID)) continue;
			/*not the base stream*/
			if (sc->in_root_od && (priv->base_stream_id != ES_ID)) continue;
			/*handle SWF media extraction*/
			if ((sc->streamType == GF_STREAM_OD) && (priv->load_flags==1)) continue;
			sc->last_au_time = 0;
		}
		return GF_OK;
	}

	if (priv->load_flags != 2) {

		if (priv->progressive_support) {
			u32 entry_time;
			char file_buf[4096+1];
			if (!priv->src) {
				priv->src = gf_fopen(priv->file_name, "rb");
				if (!priv->src) return GF_URL_ERROR;
				priv->file_pos = 0;
			}
			priv->load.type = GF_SM_LOAD_XMTA;
			e = GF_OK;
			entry_time = gf_sys_clock();
			gf_fseek(priv->src, priv->file_pos, SEEK_SET);
			while (1) {
				u32 diff;
				s32 nb_read = (s32) fread(file_buf, 1, 4096, priv->src);
				if (nb_read<0) {
					return GF_IO_ERR;
				}
				file_buf[nb_read] = 0;
				if (!nb_read) {
					if (priv->file_pos==priv->file_size) {
						gf_fclose(priv->src);
						priv->src = NULL;
						priv->load_flags = 2;
						gf_sm_load_done(&priv->load);
						break;
					}
					break;
				}

				e = gf_sm_load_string(&priv->load, file_buf, GF_FALSE);
				priv->file_pos += nb_read;
				if (e) break;
				diff = gf_sys_clock() - entry_time;
				if (diff > priv->sax_max_duration) break;
			}
			if (!priv->scene->graph_attached) {
				gf_sg_set_scene_size_info(priv->scene->graph, priv->ctx->scene_width, priv->ctx->scene_height, priv->ctx->is_pixel_metrics);
				gf_scene_attach_to_compositor(priv->scene);

				CTXLoad_CheckStreams(priv);
			}
		}
		/*load first frame only*/
		else if (!priv->load_flags) {
			/*we need the whole file*/
			if (!CTXLoad_CheckDownload(priv)) return GF_OK;

			priv->load_flags = 1;
			e = gf_sm_load_init(&priv->load);
			if (!e) {
				CTXLoad_CheckStreams(priv);
				gf_sg_set_scene_size_info(priv->scene->graph, priv->ctx->scene_width, priv->ctx->scene_height, priv->ctx->is_pixel_metrics);
				/*VRML, override base clock*/
				if ((priv->load.type==GF_SM_LOAD_VRML) || (priv->load.type==GF_SM_LOAD_X3DV) || (priv->load.type==GF_SM_LOAD_X3D)) {
					/*override clock callback*/
					gf_sg_set_scene_time_callback(priv->scene->graph, CTXLoad_GetVRMLTime);
				}
			}
		}
		/*load the rest*/
		else {
			priv->load_flags = 2;
			e = gf_sm_load_run(&priv->load);
			gf_sm_load_done(&priv->load);
			/*in case this was not set in the first pass (XMT)*/
			gf_sg_set_scene_size_info(priv->scene->graph, priv->ctx->scene_width, priv->ctx->scene_height, priv->ctx->is_pixel_metrics);
		}

		if (e<0) {
			gf_sm_load_done(&priv->load);
			gf_sm_del(priv->ctx);
			priv->ctx = NULL;
			priv->load_flags = 3;
			return e;
		}

		/*and figure out duration of root scene, and take care of XMT timing*/
		if (priv->load_flags==2) {
			CTXLoad_CheckStreams(priv);
			if (!gf_list_count(priv->ctx->streams)) {
				gf_scene_attach_to_compositor(priv->scene);
			}
		}
	}

	nb_updates = 0;

	i=0;
	while ((sc = (GF_StreamContext *)gf_list_enum(priv->ctx->streams, &i))) {
		/*not our stream*/
		if (!sc->in_root_od && (sc->ESID != ES_ID)) continue;
		/*not the base stream*/
		if (sc->in_root_od && (priv->base_stream_id != ES_ID)) continue;
		/*handle SWF media extraction*/
		if ((sc->streamType == GF_STREAM_OD) && (priv->load_flags==1)) continue;

		/*check for seek*/
		if (sc->last_au_time > 1 + stream_time) {
			sc->last_au_time = 0;
		}

		can_delete_com = GF_FALSE;
		if (sc->in_root_od && (priv->load_flags==2)) can_delete_com = GF_TRUE;

		/*we're in the right stream, apply update*/
		j=0;

		/*seek*/
		if (!sc->last_au_time) {
			while ((au = (GF_AUContext *)gf_list_enum(sc->AUs, &j))) {
				u32 au_time = (u32) (au->timing*1000/sc->timeScale);

				if (au_time > stream_time)
					break;
				if (au->flags & GF_SM_AU_RAP) last_rap = j-1;
			}
			j = last_rap;
		}

		while ((au = (GF_AUContext *)gf_list_enum(sc->AUs, &j))) {
			u32 au_time = (u32) (au->timing*1000/sc->timeScale);

			if (au_time + 1 <= sc->last_au_time) {
				/*remove first replace command*/
				if (can_delete_com && (sc->streamType==GF_STREAM_SCENE)) {
					while (gf_list_count(au->commands)) {
						GF_Command *com = (GF_Command *)gf_list_get(au->commands, 0);
						gf_list_rem(au->commands, 0);
						gf_sg_command_del(com);
					}
					j--;
					gf_list_rem(sc->AUs, j);
					gf_list_del(au->commands);
					gf_free(au);
				}
				continue;
			}

			if (au_time > stream_time) {
				nb_updates++;
				break;
			}

			if (sc->streamType == GF_STREAM_SCENE) {
				GF_Command *com;
				/*apply the commands*/
				k=0;
				while ((com = (GF_Command *)gf_list_enum(au->commands, &k))) {
					e = gf_sg_command_apply(priv->scene->graph, com, 0);
					if (e) break;
					/*remove commands on base layer*/
					if (can_delete_com) {
						k--;
						gf_list_rem(au->commands, k);
						gf_sg_command_del(com);
					}
				}
			}
			else if (sc->streamType == GF_STREAM_OD) {
				/*apply the commands*/
				while (gf_list_count(au->commands)) {
					Bool keep_com = GF_FALSE;
					GF_ODCom *com = (GF_ODCom *)gf_list_get(au->commands, 0);
					gf_list_rem(au->commands, 0);
					switch (com->tag) {
					case GF_ODF_OD_UPDATE_TAG:
					{
						GF_ODUpdate *odU = (GF_ODUpdate *)com;
						while (gf_list_count(odU->objectDescriptors)) {
							GF_ESD *esd;
							char *remote;
							GF_MuxInfo *mux = NULL;
							GF_ObjectDescriptor *od = (GF_ObjectDescriptor *)gf_list_get(odU->objectDescriptors, 0);
							gf_list_rem(odU->objectDescriptors, 0);
							/*we can only work with single-stream ods*/
							esd = (GF_ESD*)gf_list_get(od->ESDescriptors, 0);
							if (!esd) {
								if (od->URLString) {
									ODS_SetupOD(priv->scene, od);
								} else {
									gf_odf_desc_del((GF_Descriptor *) od);
								}
								continue;
							}
							/*fix OCR dependencies*/
							if (CTXLoad_StreamInRootOD(priv->ctx->root_od, esd->OCRESID)) esd->OCRESID = priv->base_stream_id;

							/*forbidden if ESD*/
							if (od->URLString) {
								gf_odf_desc_del((GF_Descriptor *) od);
								continue;
							}
							/*look for MUX info*/
							k=0;
							while ((mux = (GF_MuxInfo*)gf_list_enum(esd->extensionDescriptors, &k))) {
								if (mux->tag == GF_ODF_MUXINFO_TAG) break;
								mux = NULL;
							}
							/*we need a mux if not animation stream*/
							if (!mux || !mux->file_name) {
								/*only animation streams are handled*/
								if (!esd->decoderConfig) {
									gf_odf_desc_del((GF_Descriptor *) od);
								} else if (esd->decoderConfig->streamType==GF_STREAM_SCENE) {
									/*set ST to private scene to get sure the stream will be redirected to us*/
									esd->decoderConfig->streamType = GF_STREAM_PRIVATE_SCENE;
									esd->dependsOnESID = priv->base_stream_id;
									ODS_SetupOD(priv->scene, od);
								} else if (esd->decoderConfig->streamType==GF_STREAM_INTERACT) {
									GF_UIConfig *cfg = (GF_UIConfig *) esd->decoderConfig->decoderSpecificInfo;
									gf_odf_encode_ui_config(cfg, &esd->decoderConfig->decoderSpecificInfo);
									gf_odf_desc_del((GF_Descriptor *) cfg);
									ODS_SetupOD(priv->scene, od);
								} else {
									gf_odf_desc_del((GF_Descriptor *) od);
								}
								continue;
							}
							//solve url before import
							if (mux->src_url) {
								char *res_url = gf_url_concatenate(mux->src_url, mux->file_name);
								if (res_url) {
									gf_free(mux->file_name);
									mux->file_name = res_url;
								}
								gf_free(mux->src_url);
								mux->src_url = NULL;
							}
							/*text import*/
							if (mux->textNode) {
#ifdef GPAC_DISABLE_MEDIA_IMPORT
								gf_odf_desc_del((GF_Descriptor *) od);
								continue;
#else
								e = gf_sm_import_bifs_subtitle(priv->ctx, esd, mux);
								if (e) {
									e = GF_OK;
									gf_odf_desc_del((GF_Descriptor *) od);
									continue;
								}
								/*set ST to private scene and dependency to base to get sure the stream will be redirected to us*/
								esd->decoderConfig->streamType = GF_STREAM_PRIVATE_SCENE;
								esd->dependsOnESID = priv->base_stream_id;
								ODS_SetupOD(priv->scene, od);
								continue;
#endif
							}

							/*soundstreams are a bit of a pain, they may be declared before any data gets written*/
							if (mux->delete_file) {
								FILE *t = gf_fopen(mux->file_name, "rb");
								if (!t) {
									keep_com = GF_TRUE;
									gf_list_insert(odU->objectDescriptors, od, 0);
									break;
								}
								gf_fclose(t);
							}
							/*remap to remote URL - warning, the URL has already been resolved according to the parent path*/
							remote = (char*)gf_malloc(sizeof(char) * (strlen("gpac://")+strlen(mux->file_name)+1) );
							strcpy(remote, "gpac://");
							strcat(remote, mux->file_name);
							k = od->objectDescriptorID;
							/*if files were created we'll have to clean up (swf import)*/
							if (mux->delete_file) gf_list_add(priv->files_to_delete, gf_strdup(remote));

							gf_odf_desc_del((GF_Descriptor *) od);
							od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
							od->URLString = remote;
							od->objectDescriptorID = k;
							ODS_SetupOD(priv->scene, od);
						}
						if (keep_com) break;
					}
					break;
					case GF_ODF_OD_REMOVE_TAG:
					{
						GF_ODRemove *odR = (GF_ODRemove*)com;
						for (k=0; k<odR->NbODs; k++) {
							GF_ObjectManager *odm = gf_scene_find_odm(priv->scene, odR->OD_ID[k]);
							if (odm) gf_odm_disconnect(odm, 1);
						}
					}
					break;
					default:
						break;
					}
					if (keep_com) {
						gf_list_insert(au->commands, com, 0);
						break;
					} else {
						gf_odf_com_del(&com);
					}
					if (e) break;
				}

			}
			sc->last_au_time = au_time + 1;
			/*attach graph to renderer*/
			if (!priv->scene->graph_attached)
				gf_scene_attach_to_compositor(priv->scene);
			if (e) return e;

			/*for root streams remove completed AUs (no longer needed)*/
			if (sc->in_root_od && !gf_list_count(au->commands) ) {
				j--;
				gf_list_rem(sc->AUs, j);
				gf_list_del(au->commands);
				gf_free(au);
			}
		}
	}
	if (e) return e;
	if ((priv->load_flags==2) && !nb_updates) return GF_EOS;
	return GF_OK;
}