Beispiel #1
0
static void TraverseBackground(GF_Node *node, void *rs, Bool is_destroy)
{
	M_Background *bck;
	BackgroundStack *st;
	SFColor bcol;
	SFVec4f res;
	Fixed scale;
	Bool has_sky, has_ground, front_tx, back_tx, top_tx, bottom_tx, right_tx, left_tx;
	GF_Matrix mx;
	GF_Compositor *compositor;
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;

	if (is_destroy) {
		DestroyBackground(node);
		return;
	}

	gf_node_dirty_clear(node, 0);
	bck = (M_Background *)node;
	st = (BackgroundStack *) gf_node_get_private(node);
	compositor = (GF_Compositor*)st->compositor;


	/*may happen in get_bounds*/
	if (!tr_state->backgrounds) return;

	/*first traverse, bound if needed*/
	if (gf_list_find(tr_state->backgrounds, node) < 0) {
		gf_list_add(tr_state->backgrounds, node);
		assert(gf_list_find(st->reg_stacks, tr_state->backgrounds)==-1);
		gf_list_add(st->reg_stacks, tr_state->backgrounds);
		/*only bound if we're on top*/
		if (gf_list_get(tr_state->backgrounds, 0) == bck) {
			if (!bck->isBound) Bindable_SetIsBound(node, 1);
		}

		/*check streams*/
		if (back_use_texture(&bck->frontUrl) && !st->txh_front.is_open) gf_sc_texture_play(&st->txh_front, &bck->frontUrl);
		if (back_use_texture(&bck->bottomUrl) && !st->txh_bottom.is_open) gf_sc_texture_play(&st->txh_bottom, &bck->bottomUrl);
		if (back_use_texture(&bck->backUrl) && !st->txh_back.is_open) gf_sc_texture_play(&st->txh_back, &bck->backUrl);
		if (back_use_texture(&bck->topUrl) && !st->txh_top.is_open) gf_sc_texture_play(&st->txh_top, &bck->topUrl);
		if (back_use_texture(&bck->rightUrl) && !st->txh_right.is_open) gf_sc_texture_play(&st->txh_right, &bck->rightUrl);
		if (back_use_texture(&bck->leftUrl) && !st->txh_left.is_open) gf_sc_texture_play(&st->txh_left, &bck->leftUrl);

		/*in any case don't draw the first time (since the background could have been declared last)*/
		gf_sc_invalidate(st->compositor, NULL);
		return;
	}
	if (!bck->isBound) return;

	if (tr_state->traversing_mode != TRAVERSE_BINDABLE) {
		if (tr_state->traversing_mode == TRAVERSE_SORT) {
			gf_mx_copy(st->current_mx, tr_state->model_matrix);
		}
		return;
	}

	front_tx = back_gf_sc_texture_enabled(&bck->frontUrl, &st->txh_front);
	back_tx = back_gf_sc_texture_enabled(&bck->backUrl, &st->txh_back);
	top_tx = back_gf_sc_texture_enabled(&bck->topUrl, &st->txh_top);
	bottom_tx = back_gf_sc_texture_enabled(&bck->bottomUrl, &st->txh_bottom);
	right_tx = back_gf_sc_texture_enabled(&bck->rightUrl, &st->txh_right);
	left_tx = back_gf_sc_texture_enabled(&bck->leftUrl, &st->txh_left);

	has_sky = ((bck->skyColor.count>1) && bck->skyAngle.count) ? 1 : 0;
	has_ground = ((bck->groundColor.count>1) && bck->groundAngle.count) ? 1 : 0;
	bcol.red = bcol.green = bcol.blue = 0;
	if (bck->skyColor.count) bcol = bck->skyColor.vals[0];

	/*if we clear the main visual clear it entirely - ONLY IF NOT IN LAYER*/
	if ((tr_state->visual == compositor->visual) && (tr_state->visual->back_stack == tr_state->backgrounds)) {
		visual_3d_clear(tr_state->visual, bcol, FIX_ONE);
		if (!has_sky && !has_ground && !front_tx && !back_tx && !top_tx && !bottom_tx && !left_tx && !right_tx) {
			return;
		}
	}

	/*undo translation*/
	res.x = res.y = res.z = 0;
	res.q = FIX_ONE;
	gf_mx_apply_vec_4x4(&tr_state->camera->unprojection, &res);
	assert(res.q);
	res.x = gf_divfix(res.x, res.q);
	res.y = gf_divfix(res.y, res.q);
	res.z = gf_divfix(res.z, res.q);
	/*NB: we don't support local rotation of the background ...*/

	/*enable background state (turn off all quality options)*/
	visual_3d_set_background_state(tr_state->visual, 1);

	if (has_sky) {
		GF_Matrix bck_mx;
		gf_mx_copy(bck_mx, tr_state->model_matrix);
		gf_mx_copy(tr_state->model_matrix, st->current_mx);

		if (!st->sky_mesh) {
			st->sky_mesh = new_mesh();
			back_build_dome(st->sky_mesh, &bck->skyAngle, &bck->skyColor, 0);
		}

		gf_mx_init(mx);
		gf_mx_add_translation(&mx, res.x, res.y, res.z);

		/*CHECKME - not sure why, we need to scale less in fixed point otherwise z-far clipping occur - probably some
		rounding issues...*/
#ifdef GPAC_FIXED_POINT
		scale = (tr_state->camera->z_far/10)*8;
#else
		scale = 9*tr_state->camera->z_far/10;
#endif
		gf_mx_add_scale(&mx, scale, scale, scale);

		gf_mx_add_matrix(&tr_state->model_matrix, &mx);

		visual_3d_mesh_paint(tr_state, st->sky_mesh);

		gf_mx_copy(tr_state->model_matrix, bck_mx);
	}

	if (has_ground) {
		GF_Matrix bck_mx;
		gf_mx_copy(bck_mx, tr_state->model_matrix);
		gf_mx_copy(tr_state->model_matrix, st->current_mx);

		if (!st->ground_mesh) {
			st->ground_mesh = new_mesh();
			back_build_dome(st->ground_mesh, &bck->groundAngle, &bck->groundColor, 1);
		}

		gf_mx_init(mx);
		gf_mx_add_translation(&mx, res.x, res.y, res.z);
		/*cf above*/
#ifdef GPAC_FIXED_POINT
		scale = (tr_state->camera->z_far/100)*70;
#else
		scale = 85*tr_state->camera->z_far/100;
#endif
		gf_mx_add_scale(&mx, scale, -scale, scale);

		gf_mx_add_matrix(&tr_state->model_matrix, &mx);
		visual_3d_mesh_paint(tr_state, st->ground_mesh);
		gf_mx_copy(tr_state->model_matrix, bck_mx);
	}

	if (front_tx || back_tx || left_tx || right_tx || top_tx || bottom_tx) {
		GF_Matrix bck_mx;
		gf_mx_copy(bck_mx, tr_state->model_matrix);
		gf_mx_copy(tr_state->model_matrix, st->current_mx);

		gf_mx_init(mx);
		gf_mx_add_translation(&mx, res.x, res.y, res.z);
#ifdef GPAC_FIXED_POINT
		scale = (tr_state->camera->z_far/100)*99;
		gf_mx_add_scale(&mx, scale, scale, scale);
#else
		gf_mx_add_scale(&mx, tr_state->camera->z_far, tr_state->camera->z_far, tr_state->camera->z_far);
#endif
		visual_3d_enable_antialias(tr_state->visual, 1);

		gf_mx_add_matrix(&tr_state->model_matrix, &mx);

		if (front_tx) back_draw_texture(tr_state, &st->txh_front, st->front_mesh);
		if (back_tx) back_draw_texture(tr_state, &st->txh_back, st->back_mesh);
		if (top_tx) back_draw_texture(tr_state, &st->txh_top, st->top_mesh);
		if (bottom_tx) back_draw_texture(tr_state, &st->txh_bottom, st->bottom_mesh);
		if (left_tx) back_draw_texture(tr_state, &st->txh_left, st->left_mesh);
		if (right_tx) back_draw_texture(tr_state, &st->txh_right, st->right_mesh);

		gf_mx_copy(tr_state->model_matrix, bck_mx);
	}

	/*enable background state (turn off all quality options)*/
	visual_3d_set_background_state(tr_state->visual, 0);
}
Beispiel #2
0
/* Returns:
	0 if no rendering traversal is required, 
	1 if a rendering traversal is required!!!,
   -1 if the time node is a discard which has been deleted!! */
s32 gf_smil_timing_notify_time(SMIL_Timing_RTI *rti, Double scene_time)
{
	Fixed simple_time;
	s32 ret = 0;
	GF_DOM_Event evt;
	SMILTimingAttributesPointers *timingp = rti->timingp;

	if (!timingp) return 0;
	
	if (rti->scene_time == scene_time) return 0;
	rti->scene_time = scene_time;
	rti->cycle_number++;

	/* for fraction events, we indicate that the scene needs redraw */
	if (rti->evaluate_status == SMIL_TIMING_EVAL_FRACTION) 
		return 1;

	if (rti->evaluate_status == SMIL_TIMING_EVAL_DISCARD) {
		/* TODO: FIX ME discarding should send a begin event ? */
		/* -1 is a special case when the discard is evaluated */
		if (gf_smil_discard(rti, FLT2FIX(rti->scene_time))) return -1;
		else return 0;
	}

	gf_node_register(rti->timed_elt, NULL);

waiting_to_begin:
	if (rti->status == SMIL_STATUS_WAITING_TO_BEGIN) {
		if (rti->current_interval && scene_time >= rti->current_interval->begin) {			
			GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SMIL Timing   ] Time %f - Timed element %s - Activating\n", gf_node_get_scene_time((GF_Node *)rti->timed_elt), gf_node_get_name((GF_Node *)rti->timed_elt)));
			rti->status = SMIL_STATUS_ACTIVE;

			memset(&evt, 0, sizeof(evt));
			evt.type = GF_EVENT_BEGIN_EVENT;
			evt.smil_event_time = rti->current_interval->begin;
			gf_dom_event_fire((GF_Node *)rti->timed_elt, NULL, &evt);

			if (rti->timed_elt->sgprivate->tag==TAG_SVG_conditional) {
				SVG_Element *e = (SVG_Element *)rti->timed_elt;
				/*activate conditional*/
				if (e->children) gf_node_render(e->children->node, NULL);
				rti->status = SMIL_STATUS_DONE;
			} else {
				gf_smil_reorder_anim(rti);
			}
		} else {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SMIL Timing   ] Time %f - Timed element %s - Evaluating (Not starting)\n", gf_node_get_scene_time((GF_Node *)rti->timed_elt), gf_node_get_name((GF_Node *)rti->timed_elt)));
			ret = -2;
			goto exit;
		}
	}

	if (rti->status == SMIL_STATUS_ACTIVE) {
		u32 cur_id;

		if (rti->current_interval->active_duration >= 0 
			&& scene_time >= (rti->current_interval->begin + rti->current_interval->active_duration)) {

			GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SMIL Timing   ] Time %f - Timed element %s - Stopping \n", gf_node_get_scene_time((GF_Node *)rti->timed_elt), gf_node_get_name((GF_Node *)rti->timed_elt)));
			memset(&evt, 0, sizeof(evt));
			evt.type = GF_EVENT_END_EVENT;
			evt.smil_event_time = rti->current_interval->begin + rti->current_interval->active_duration;
			gf_dom_event_fire((GF_Node *)rti->timed_elt, NULL, &evt);

			ret = rti->postpone;

			if (timingp->fill && *timingp->fill == SMIL_FILL_FREEZE) {
				rti->status = SMIL_STATUS_FROZEN;
				rti->first_frozen = rti->cycle_number;
				rti->evaluate_status = SMIL_TIMING_EVAL_FREEZE;
				if (!rti->postpone) {
					Fixed simple_time = gf_smil_timing_get_normalized_simple_time(rti, scene_time);
					rti->evaluate(rti, simple_time, rti->evaluate_status);
				}
			} else {
				rti->status = SMIL_STATUS_DONE;
				rti->first_frozen = rti->cycle_number;
				rti->evaluate_status = SMIL_TIMING_EVAL_REMOVE;
				if (!rti->postpone) {
					Fixed simple_time = gf_smil_timing_get_normalized_simple_time(rti, scene_time);
					rti->evaluate(rti, simple_time, rti->evaluate_status);
				}
			}

		}
		/*special case for unspecified simpleDur with animations (not with media timed elements)*/
		else if (0 && rti->postpone 
			&& (rti->current_interval->simple_duration==-1) 
			&& (rti->current_interval->active_duration<=0) 
		) {
			ret = 1;
			rti->status = SMIL_STATUS_FROZEN;
			rti->first_frozen = rti->cycle_number;
			rti->evaluate_status = SMIL_TIMING_EVAL_FREEZE;
		} else { // the animation is still active 
			if (!timingp->restart || *timingp->restart == SMIL_RESTART_ALWAYS) {
				s32 interval_index;
				interval_index = gf_smil_timing_find_interval_index(rti, scene_time);
				
				if (interval_index >= 0 &&
					interval_index != rti->current_interval_index) {
					/* intervals are different, use the new one */
					rti->current_interval_index = interval_index;
					rti->current_interval = (SMIL_Interval*)gf_list_get(rti->intervals, rti->current_interval_index);
					
					/* reinserting the new timed elements at its proper place in the list
					  of timed elements in the scenegraph */
					gf_smil_reorder_timing(rti);

					/* if this is animation, reinserting the animation in the list of animations 
				       that targets this attribute, so that it is the last one */
					gf_smil_reorder_anim(rti);

					memset(&evt, 0, sizeof(evt));
					evt.type = GF_EVENT_BEGIN_EVENT;
					evt.smil_event_time = rti->current_interval->begin;
					gf_dom_event_fire((GF_Node *)rti->timed_elt, NULL, &evt);

				
				} 
			}

			ret = rti->postpone;
			
			cur_id = rti->current_interval->nb_iterations;
			simple_time = gf_smil_timing_get_normalized_simple_time(rti, scene_time);
			if (cur_id < rti->current_interval->nb_iterations) {
				memset(&evt, 0, sizeof(evt));
				evt.type = GF_EVENT_REPEAT_EVENT;
				evt.smil_event_time = rti->current_interval->begin + rti->current_interval->nb_iterations*rti->current_interval->simple_duration;
				evt.detail = rti->current_interval->nb_iterations;
				gf_dom_event_fire((GF_Node *)rti->timed_elt, NULL, &evt);

				GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SMIL Timing   ] Time %f - Timed element %s - Repeating\n", gf_node_get_scene_time((GF_Node *)rti->timed_elt), gf_node_get_name((GF_Node *)rti->timed_elt)));
				rti->evaluate_status = SMIL_TIMING_EVAL_REPEAT;		
			} else {
				GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SMIL Timing   ] Time %f - Timed element %s - Updating\n", gf_node_get_scene_time((GF_Node *)rti->timed_elt), gf_node_get_name((GF_Node *)rti->timed_elt)));
				rti->evaluate_status = SMIL_TIMING_EVAL_UPDATE;
			}

			if (!rti->postpone) {
				rti->evaluate(rti, simple_time, rti->evaluate_status);
			}	
		}
	}

	if ((rti->status == SMIL_STATUS_DONE) || (rti->status == SMIL_STATUS_FROZEN)) {
		if (!timingp->restart || *timingp->restart != SMIL_RESTART_NEVER) { 
			/* Check changes in begin or end attributes */
			s32 interval_index;

			GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[SMIL Timing   ] Time %f - Timed element %s - Checking for restart\n", gf_node_get_scene_time((GF_Node *)rti->timed_elt), gf_node_get_name((GF_Node *)rti->timed_elt)));
			interval_index = gf_smil_timing_find_interval_index(rti, scene_time);
			if (interval_index >= 0 && interval_index != rti->current_interval_index) {
				/* intervals are different, use the new one */
				rti->current_interval_index = interval_index;
				rti->current_interval = (SMIL_Interval*)gf_list_get(rti->intervals, rti->current_interval_index);

				/* reinserting the new timed elements at its proper place in the list
				  of timed elements in the scenegraph */
				gf_smil_reorder_timing(rti);

				rti->status = SMIL_STATUS_WAITING_TO_BEGIN;
				rti->evaluate_status = SMIL_TIMING_EVAL_NONE;
				goto waiting_to_begin;
			} 
		} else if ((rti->status == SMIL_STATUS_DONE) && 
			        timingp->restart && (*timingp->restart == SMIL_RESTART_NEVER)) {
			/* the timed element is done and cannot restart, we don't need to evaluate it anymore */
			GF_SceneGraph * sg = rti->timed_elt->sgprivate->scenegraph;
			while (sg->parent_scene) sg = sg->parent_scene;
			gf_list_del_item(sg->smil_timed_elements, rti);
		}
	}

exit:
	gf_node_unregister(rti->timed_elt, NULL);
	return ret;
}
Beispiel #3
0
GF_Err ISOR_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com)
{
	Double track_dur, media_dur;
	ISOMChannel *ch;
	ISOMReader *read;
	u32 count, i;

	if (!plug || !plug->priv || !com) return GF_SERVICE_ERROR;
	read = (ISOMReader *) plug->priv;
	if (read->disconnected) return GF_OK;

	if (com->command_type==GF_NET_SERVICE_INFO) {
		u32 tag_len;
		const char *tag;
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_NAME, &tag, &tag_len)==GF_OK) com->info.name = tag;
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_ARTIST, &tag, &tag_len)==GF_OK) com->info.artist = tag;
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_ALBUM, &tag, &tag_len)==GF_OK) com->info.album = tag;
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COMMENT, &tag, &tag_len)==GF_OK) com->info.comment = tag;
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_TRACK, &tag, &tag_len)==GF_OK) {
			com->info.track_info = (((tag[2]<<8)|tag[3]) << 16) | ((tag[4]<<8)|tag[5]);
		}
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COMPOSER, &tag, &tag_len)==GF_OK) com->info.composer = tag;
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_WRITER, &tag, &tag_len)==GF_OK) com->info.writer = tag;
		if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_GENRE, &tag, &tag_len)==GF_OK) {
			if (tag[0]) {
				com->info.genre = 0;
			} else {
				com->info.genre = (tag[0]<<8) | tag[1];
			}
		}
		return GF_OK;
	}
	if (com->command_type==GF_NET_SERVICE_HAS_AUDIO) {
		u32 i, count;
		count = gf_isom_get_track_count(read->mov);
		for (i=0; i<count; i++) {
			if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_AUDIO) return GF_OK;
		}
		return GF_NOT_SUPPORTED;
	}

	if (com->command_type == GF_NET_SERVICE_QUALITY_SWITCH)
	{
		count = gf_list_count(read->channels);
		for (i = 0; i < count; i++)
		{
			ch = (ISOMChannel *)gf_list_get(read->channels, i);
			if (gf_isom_has_scalable_layer(read->mov)) {
				ch->next_track = gf_channel_switch_quality(ch, read->mov, com->switch_quality.up);
			}
		}
		return GF_OK;
	}
	if (com->command_type == GF_NET_SERVICE_PROXY_DATA_RECEIVE) {
		isor_flush_data(read, 1, com->proxy_data.is_chunk);
		return GF_OK;
	}
	if (com->command_type == GF_NET_SERVICE_FLUSH_DATA) {
		if (read->nb_playing && plug->query_proxy)
			isor_flush_data(read, 0, 0);
		return GF_OK;
	}
	if (com->command_type == GF_NET_SERVICE_CAN_REVERSE_PLAYBACK) 
		return GF_OK;

	if (!com->base.on_channel) return GF_NOT_SUPPORTED;

	ch = isor_get_channel(read, com->base.on_channel);
	if (!ch) return GF_STREAM_NOT_FOUND;

	switch (com->command_type) {
	case GF_NET_CHAN_SET_PADDING:
		if (!ch->track) return GF_OK;
		gf_isom_set_sample_padding(read->mov, ch->track, com->pad.padding_bytes);
		return GF_OK;
	case GF_NET_CHAN_SET_PULL:
		//we don't pull in DASH base services, we flush as soon as we have a complete segment
#ifndef DASH_USE_PULL
		if (read->input->proxy_udta && !read->input->proxy_type)
			return GF_NOT_SUPPORTED;
#endif

		ch->is_pulling = 1;
		return GF_OK;
	case GF_NET_CHAN_INTERACTIVE:
		return GF_OK;
	case GF_NET_CHAN_BUFFER:
		//dash or HTTP, do rebuffer if not disabled
		if (plug->query_proxy) {
		} else if (read->dnload) {
			ch->buffer_min = com->buffer.min;
			ch->buffer_max = com->buffer.max;
		} else {
			com->buffer.max = com->buffer.min = 0;
		}
		return GF_OK;
	case GF_NET_CHAN_DURATION:
		if (!ch->track) {
			com->duration.duration = 0;
			return GF_OK;
		}
		ch->duration = gf_isom_get_track_duration(read->mov, ch->track);
		track_dur = (Double) (s64) ch->duration;
		track_dur /= read->time_scale;
		if (gf_isom_get_edit_segment_count(read->mov, ch->track)) {
			com->duration.duration = (Double) track_dur;
			ch->duration = (u32) (track_dur * ch->time_scale);
		} else {
			/*some file indicate a wrong TrackDuration, get the longest*/
			ch->duration = gf_isom_get_media_duration(read->mov, ch->track);
			media_dur = (Double) (s64) ch->duration;
			media_dur /= ch->time_scale;
			com->duration.duration = MAX(track_dur, media_dur);
		}
		return GF_OK;
	case GF_NET_CHAN_PLAY:

		gf_mx_p(read->segment_mutex);
		isor_reset_reader(ch);
		ch->speed = com->play.speed;
		read->reset_frag_state = 1;
		if (read->frag_type)
			read->frag_type = 1;
		gf_mx_v(read->segment_mutex);

		ch->start = ch->end = 0;
		if (com->play.speed>0) {
			if (com->play.start_range>=0) {
				ch->start = (u64) (s64) (com->play.start_range * ch->time_scale);
				ch->start = check_round(ch, ch->start, com->play.start_range, 1);
			}
			if (com->play.end_range >= com->play.start_range) {
				ch->end = (u64) (s64) (com->play.end_range*ch->time_scale);
				ch->end = check_round(ch, ch->end, com->play.end_range, 0);
			}
		} else if (com->play.speed<0) {
			Double end = com->play.end_range;
			if (end==-1) end = 0;
			ch->start = (u64) (s64) (com->play.start_range * ch->time_scale);
			if (end <= com->play.start_range)
				ch->end = (u64) (s64) (end  * ch->time_scale);
		}
		ch->is_playing = 1;
		if (com->play.dash_segment_switch) ch->wait_for_segment_switch = 1;
		GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[IsoMedia] Starting channel playback "LLD" to "LLD" (%g to %g)\n", ch->start, ch->end, com->play.start_range, com->play.end_range));

		//and check buffer level on play request
		isor_check_buffer_level(read);
		read->nb_playing++;
		return GF_OK;
	case GF_NET_CHAN_STOP:
		if (read->nb_playing) read->nb_playing--;
		isor_reset_reader(ch);
		return GF_OK;

	case GF_NET_CHAN_SET_SPEED:
		gf_mx_p(read->segment_mutex);
		ch->speed = com->play.speed;
		gf_mx_v(read->segment_mutex);
		return GF_OK;
	/*nothing to do on MP4 for channel config*/
	case GF_NET_CHAN_CONFIG:
		return GF_OK;
	case GF_NET_CHAN_GET_PIXEL_AR:
		return gf_isom_get_pixel_aspect_ratio(read->mov, ch->track, 1, &com->par.hSpacing, &com->par.vSpacing);
	case GF_NET_CHAN_GET_DSI:
	{
		/*it may happen that there are conflicting config when using ESD URLs...*/
		GF_DecoderConfig *dcd = gf_isom_get_decoder_config(read->mov, ch->track, 1);
		com->get_dsi.dsi = NULL;
		com->get_dsi.dsi_len = 0;
		if (dcd) {
			if (dcd->decoderSpecificInfo) {
				com->get_dsi.dsi = dcd->decoderSpecificInfo->data;
				com->get_dsi.dsi_len = dcd->decoderSpecificInfo->dataLength;
				dcd->decoderSpecificInfo->data = NULL;
			}
			gf_odf_desc_del((GF_Descriptor *) dcd);
		}
		return GF_OK;
	}
	case GF_NET_CHAN_NALU_MODE:
		ch->nalu_extract_mode = GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG;
		ch->disable_seek = 1;
		//when this is set, we work in real scalable (eg N streams reassembled by the player) so only extract the layer. This wll need refinements if we plan to support
		//several scalable layers ...
		if (com->nalu_mode.extract_mode==1) {
			ch->nalu_extract_mode |= GF_ISOM_NALU_EXTRACT_ANNEXB_FLAG | GF_ISOM_NALU_EXTRACT_VDRD_FLAG | GF_ISOM_NALU_EXTRACT_LAYER_ONLY;
		}
		gf_isom_set_nalu_extract_mode(ch->owner->mov, ch->track, ch->nalu_extract_mode);
		break;
	default:
		break;
	}
	return GF_NOT_SUPPORTED;
}
Beispiel #4
0
GF_Err gf_isom_extract_meta_item_extended(GF_ISOFile *file, Bool root_meta, u32 track_num, u32 item_id, const char *dump_file_name, char **out_data, u32 *out_size, const char **out_mime )
{
    GF_BitStream *item_bs;
    char szPath[1024];
    GF_ItemExtentEntry *extent_entry;
    FILE *resource = NULL;
    u32 i, count;
    GF_ItemLocationEntry *location_entry;
    u32 item_num;
    char *item_name = NULL;

    GF_MetaBox *meta = gf_isom_get_meta(file, root_meta, track_num);
    if (!meta || !meta->item_infos || !meta->item_locations) return GF_BAD_PARAM;

    if (out_mime) *out_mime = NULL;

    item_num = gf_isom_get_meta_item_by_id(file, root_meta, track_num, item_id);
    if (item_num) {
        GF_ItemInfoEntryBox *item_entry = (GF_ItemInfoEntryBox *)gf_list_get(meta->item_infos->item_infos, item_num-1);
        item_name = item_entry->item_name;
        if (out_mime) *out_mime = item_entry->content_type;
    }

    location_entry = NULL;
    count = gf_list_count(meta->item_locations->location_entries);
    for (i=0; i<count; i++) {
        location_entry = (GF_ItemLocationEntry *)gf_list_get(meta->item_locations->location_entries, i);
        if (location_entry->item_ID == item_id) break;
        location_entry = NULL;
    }

    if (!location_entry) return GF_BAD_PARAM;
    /*FIXME*/
    if (location_entry->data_reference_index) {
        char *item_url = NULL, *item_urn = NULL;
        GF_Box *a = (GF_Box *)gf_list_get(meta->file_locations->dref->boxList, location_entry->data_reference_index-1);
        if (a->type==GF_ISOM_BOX_TYPE_URL) {
            item_url = ((GF_DataEntryURLBox*)a)->location;
        } else if (a->type==GF_ISOM_BOX_TYPE_URN) {
            item_url = ((GF_DataEntryURNBox*)a)->location;
            item_urn = ((GF_DataEntryURNBox*)a)->nameURN;
        }
        GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[IsoMedia] Item already outside the ISO file at URL: %s, URN: %s\n", (item_url?item_url:"N/A"), (item_urn?item_urn:"N/A") ));
        return GF_OK;
    }

    /*don't extract self-reference item*/
    count = gf_list_count(location_entry->extent_entries);
    if (!location_entry->base_offset && (count==1)) {
        extent_entry = (GF_ItemExtentEntry *)gf_list_get(location_entry->extent_entries, 0);
        if (!extent_entry->extent_length
#ifndef GPAC_DISABLE_ISOM_WRITE
                && !extent_entry->original_extent_offset
#endif
           ) return GF_BAD_PARAM;
    }

    item_bs = NULL;

    if (out_data) {
        item_bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
    } else if (dump_file_name) {
        strcpy(szPath, dump_file_name);
        resource = gf_f64_open(szPath, "wb");
        item_bs = gf_bs_from_file(resource, GF_BITSTREAM_WRITE);
    } else {
        if (item_name) strcpy(szPath, item_name);
        else sprintf(szPath, "item_id%02d", item_id);
        resource = gf_f64_open(szPath, "wb");
        item_bs = gf_bs_from_file(resource, GF_BITSTREAM_WRITE);
    }

    for (i=0; i<count; i++) {
        char buf_cache[4096];
        u64 remain;
        GF_ItemExtentEntry *extent_entry = (GF_ItemExtentEntry *)gf_list_get(location_entry->extent_entries, i);
        gf_bs_seek(file->movieFileMap->bs, location_entry->base_offset + extent_entry->extent_offset);

        remain = extent_entry->extent_length;
        while (remain) {
            u32 cache_size = (remain>4096) ? 4096 : (u32) remain;
            gf_bs_read_data(file->movieFileMap->bs, buf_cache, cache_size);
            gf_bs_write_data(item_bs, buf_cache, cache_size);
            remain -= cache_size;
        }
    }
    if (out_data) {
        gf_bs_get_content(item_bs, out_data, out_size);
    }
    if (resource) {
        fclose(resource);
    }
    gf_bs_del(item_bs);
    return GF_OK;
}
Beispiel #5
0
static GF_Err swf_svg_show_frame(SWFReader *read)
{
	u32     i;
	u32     len;
	GF_List *sdl = gf_list_new(); // sorted display list

	/* sorting the display list because SVG/CSS z-index is not well supported */
	while (gf_list_count(read->display_list))
	{
		Bool        inserted = GF_FALSE;
		DispShape   *s;

		s = (DispShape *)gf_list_get(read->display_list, 0);
		gf_list_rem(read->display_list, 0);

		for (i = 0; i < gf_list_count(sdl); i++)
		{
			DispShape *s2 = (DispShape *)gf_list_get(sdl, i);
			if (s->depth < s2->depth)
			{
				gf_list_insert(sdl, s, i);
				inserted = GF_TRUE;
				break;
			}
		}
		if (!inserted)
		{
			gf_list_add(sdl, s);
		}
	}
	gf_list_del(read->display_list);
	read->display_list = sdl;

	/* dumping the display list */
	len = gf_list_count(read->display_list);
	for (i=0; i<len; i++)
	{
		DispShape   *s;
		s = (DispShape *)gf_list_get(read->display_list, i);
		swf_svg_print(read, "<use xlink:href=\"#S%d\" ", s->char_id);
		//swf_svg_print(read, "z-index=\"%d\" ", s->depth);
		swf_svg_print_matrix(read, &s->mat);
		swf_svg_print(read, "/>\n");
		read->empty_frame = GF_FALSE;
	}
	if (!read->empty_frame) {
		read->print_frame_header = GF_TRUE;
		read->frame_header_offset = 0;
		swf_svg_print(read, "<g display=\"none\">\n");
		swf_svg_print(read, "<animate id=\"frame%d_anim\" attributeName=\"display\" to=\"inline\" ", read->current_frame);
		swf_svg_print(read, "begin=\"%g\" ", 1.0*(read->current_frame)/read->frame_rate);
		if (read->current_frame+1 < read->frame_count) {
			swf_svg_print(read, "end=\"frame%d_anim.begin\" fill=\"remove\" ", (read->current_frame+1));
		} else {
			swf_svg_print(read, "fill=\"freeze\" ");
		}
		swf_svg_print(read, "/>\n");
		read->print_frame_header = GF_FALSE;

		swf_svg_print(read, "</g>\n");
	}
	read->add_sample(read->user, read->svg_data, read->svg_data_size, read->current_frame*1000/read->frame_rate, (read->current_frame == 0));
	gf_free(read->svg_data);
	read->svg_data = NULL;
	read->svg_data_size = 0;

	read->empty_frame = GF_TRUE;
	return GF_OK;
}
Beispiel #6
0
void mediasensor_update_timing(GF_ObjectManager *odm, Bool is_eos)
{
	GF_Segment *desc;
	u32 i, count, j, ms_count;
	Double time;
	ms_count = gf_list_count(odm->ms_stack);
	if (!ms_count) return;

	time = odm->media_current_time / 1000.0;
	//dirty hack to get timing of frame when very late (openhevc debug)
	if (odm->subscene && odm->subscene->dyn_ck && odm->subscene->dyn_ck->last_TS_rendered)
		time = odm->subscene->dyn_ck->last_TS_rendered / 1000.0;

	for (j=0; j<ms_count; j++) {
		MediaSensorStack *media_sens = (MediaSensorStack *)gf_list_get(odm->ms_stack, j);
		if (!media_sens->is_init) continue;
		count = gf_list_count(media_sens->seg);

		/*full object controled*/
		if (!media_sens->active_seg && !count) {
			/*check for end of scene (MediaSensor on inline)*/
			if (odm->subscene && odm->subscene->duration) {
				GF_Clock *ck = gf_odm_get_media_clock(odm);
				if (ck->has_seen_eos && (1000*time>=(Double) (s64)odm->subscene->duration)) {
					if (media_sens->sensor->isActive) {
						/*force notification of time (ntify the scene duration rather than the current clock*/
						media_sens->sensor->mediaCurrentTime = (Double) odm->subscene->duration;
						media_sens->sensor->mediaCurrentTime /= 1000;
						gf_node_event_out((GF_Node *) media_sens->sensor, 1/*"mediaCurrentTime"*/);
						media_sens->sensor->isActive = 0;
						gf_node_event_out((GF_Node *) media_sens->sensor, 4/*"isActive"*/);

						GF_LOG(GF_LOG_DEBUG, GF_LOG_INTERACT, ("[ODM%d] Deactivating media sensor\n", odm->OD->objectDescriptorID));
					}
					continue;
				}
			}

			if (!is_eos && !media_sens->sensor->isActive) {
				media_sens->sensor->isActive = 1;
				gf_node_event_out((GF_Node *) media_sens->sensor, 4/*"isActive"*/);

				gf_node_event_out((GF_Node *) media_sens->sensor, 4/*"isActive"*/);
				if (odm->subscene) {
					media_sens->sensor->mediaDuration = (Double) (s64)odm->subscene->duration;
				} else {
					media_sens->sensor->mediaDuration = (Double) (s64)odm->duration;
				}
				if (media_sens->sensor->mediaDuration)
					media_sens->sensor->mediaDuration /= 1000;
				else
					media_sens->sensor->mediaDuration = -FIX_ONE;

				gf_node_event_out((GF_Node *) media_sens->sensor, 3/*"mediaDuration"*/);
			}

			if (is_eos && media_sens->sensor->isActive) {
				if (media_sens->sensor->mediaDuration>=0) {
					media_sens->sensor->mediaCurrentTime = media_sens->sensor->mediaDuration;
				} else {
					media_sens->sensor->mediaCurrentTime = time;
				}
				gf_node_event_out((GF_Node *) media_sens->sensor, 1/*"mediaCurrentTime"*/);
				media_sens->sensor->isActive = 0;
				gf_node_event_out((GF_Node *) media_sens->sensor, 4/*"isActive"*/);
			} else {
				if (media_sens->sensor->isActive && (media_sens->sensor->mediaCurrentTime != time)) {
					media_sens->sensor->mediaCurrentTime = time;
					gf_node_event_out((GF_Node *) media_sens->sensor, 1/*"mediaCurrentTime"*/);
				}
			}
			continue;
		}

		/*locate segment*/
		for (i=media_sens->active_seg; i<count; i++) {
			desc = (GF_Segment*)gf_list_get(media_sens->seg, i);
			/*not controled*/
			if (desc->startTime > time) {
				if (media_sens->sensor->isActive) {
					media_sens->sensor->isActive = 0;
					gf_node_event_out((GF_Node *) media_sens->sensor, 4/*"isActive"*/);

					GF_LOG(GF_LOG_DEBUG, GF_LOG_INTERACT, ("[ODM%d] Deactivating media sensor at time %g - segment %s\n", odm->OD->objectDescriptorID, time, desc->SegmentName));
				}
				continue;
			}
			if (desc->startTime + desc->Duration < time) continue;
			if (desc->startTime + desc->Duration == time) {
				continue;
			}
			/*segment switch, force activation (isActive TRUE send at each seg)*/
			if (media_sens->active_seg != i) {
				media_sens->active_seg = i;
				media_sens->sensor->isActive = 0;
			}

			if (!media_sens->sensor->isActive) {
				media_sensor_activate_segment(media_sens, desc);

				GF_LOG(GF_LOG_DEBUG, GF_LOG_INTERACT, ("[ODM%d] Activating media sensor time %g - segment %s\n", odm->OD->objectDescriptorID, time, desc->SegmentName));
			}

			/*set media time - relative to segment start time*/
			time -= desc->startTime;
			if (media_sens->sensor->mediaCurrentTime != time) {
				media_sens->sensor->mediaCurrentTime = time;
				gf_node_event_out((GF_Node *) media_sens->sensor, 1/*"mediaCurrentTime"*/);
			}
			break;
		}
		if (i==count) {
			/*we're after last segment, deactivate*/
			if (media_sens->sensor->isActive) {
				media_sens->sensor->isActive = 0;
				gf_node_event_out((GF_Node *) media_sens->sensor, 4/*"isActive"*/);
				media_sens->active_seg = count;
				GF_LOG(GF_LOG_DEBUG, GF_LOG_INTERACT, ("[ODM%d] Deactivating media sensor at time %g: no more segments\n", odm->OD->objectDescriptorID, time));
			}
		}
	}
}
Beispiel #7
0
void gf_term_add_codec(GF_Terminal *term, GF_Codec *codec)
{
	u32 i, count;
	Bool threaded;
	CodecEntry *cd;
	CodecEntry *ptr, *next;
	GF_CodecCapability cap;
	assert(codec);

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Registering codec %s\n", codec->decio ? codec->decio->module_name : "RAW"));

	/*caution: the mutex can be grabbed by a decoder waiting for a mutex owned by the calling thread
	this happens when several scene codecs are running concurently and triggering play/pause on media*/
	gf_mx_p(term->mm_mx);

	cd = mm_get_codec(term->codecs, codec);
	if (cd) goto exit;

	GF_SAFEALLOC(cd, CodecEntry);
	cd->dec = codec;
	if (!cd->dec->Priority)
		cd->dec->Priority = 1;

	/*we force audio codecs to be threaded in free mode, so that we avoid waiting in the audio renderer if another decoder is locking the main mutex
	this can happen when the audio decoder is running late*/
	if (codec->type==GF_STREAM_AUDIO) {
		threaded = 1;
	} else {
		cap.CapCode = GF_CODEC_WANTS_THREAD;
		cap.cap.valueInt = 0;
		gf_codec_get_capability(codec, &cap);
		threaded = cap.cap.valueInt;
	}

	if (threaded) cd->flags |= GF_MM_CE_REQ_THREAD;


	if (term->flags & GF_TERM_MULTI_THREAD) {
		if ((codec->type==GF_STREAM_AUDIO) || (codec->type==GF_STREAM_VISUAL)) threaded = 1;
	} else if (term->flags & GF_TERM_SINGLE_THREAD) {
		threaded = 0;
	}
	if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA)
		threaded = 0;

	if (threaded) {
		cd->thread = gf_th_new(cd->dec->decio->module_name);
		cd->mx = gf_mx_new(cd->dec->decio->module_name);
		cd->flags |= GF_MM_CE_THREADED;
		gf_list_add(term->codecs, cd);
		goto exit;
	}

	//add codec 1- per priority 2- per type, audio being first
	//priorities inherits from Systems (5bits) so range from 0 to 31
	//we sort from MAX to MIN
	count = gf_list_count(term->codecs);
	for (i=0; i<count; i++) {
		ptr = (CodecEntry*)gf_list_get(term->codecs, i);
		if (ptr->flags & GF_MM_CE_THREADED) continue;

		//higher priority, continue
		if (ptr->dec->Priority > codec->Priority) continue;

		//same priority, put audio first
		if (ptr->dec->Priority == codec->Priority) {
			//we insert audio (0x05) before video (0x04)
			if (ptr->dec->type < codec->type) {
				gf_list_insert(term->codecs, cd, i);
				goto exit;
			}
			//same prior, same type: insert after
			if (ptr->dec->type == codec->type) {
				if (i+1==count) {
					gf_list_add(term->codecs, cd);
				} else {
					gf_list_insert(term->codecs, cd, i+1);
				}
				goto exit;
			}
			//we insert video (0x04) after audio (0x05) if next is not audio
			//last one
			if (i+1 == count) {
				gf_list_add(term->codecs, cd);
				goto exit;
			}
			next = (CodecEntry*)gf_list_get(term->codecs, i+1);
			//# priority level, insert
			if ((next->flags & GF_MM_CE_THREADED) || (next->dec->Priority != codec->Priority)) {
				gf_list_insert(term->codecs, cd, i+1);
				goto exit;
			}
			//same priority level and at least one after : continue
			continue;
		}
		gf_list_insert(term->codecs, cd, i);
		goto exit;
	}
	//if we got here, first in list
	gf_list_add(term->codecs, cd);

exit:
	gf_mx_v(term->mm_mx);
	return;
}
Beispiel #8
0
void generateNodeImpl2(FILE *output, SVGGenElement* svg_elt) 
{
	u32 i;	

	/* Constructor */
	fprintf(output, "void *gf_svg_sani_new_%s()\n{\n\tSVG_SANI_%sElement *p;\n", svg_elt->implementation_name,svg_elt->implementation_name);
	fprintf(output, "\tGF_SAFEALLOC(p, SVG_SANI_%sElement);\n\tif (!p) return NULL;\n\tgf_node_setup((GF_Node *)p, TAG_SVG_SANI_%s);\n\tgf_sg_parent_setup((GF_Node *) p);\n",svg_elt->implementation_name,svg_elt->implementation_name);

	fprintf(output, "\tgf_svg_sani_init_core((SVG_SANI_Element *)p);\n");		
	if (svg_elt->has_focus) {
		fprintf(output, "\tgf_svg_sani_init_focus((SVG_SANI_Element *)p);\n");		
	} 
	if (svg_elt->has_xlink) {
		fprintf(output, "\tgf_svg_sani_init_xlink((SVG_SANI_Element *)p);\n");		
	} 
	if (svg_elt->has_timing) {
		fprintf(output, "\tgf_svg_sani_init_timing((SVG_SANI_Element *)p);\n");		
	} 
	if (svg_elt->has_sync) {
		fprintf(output, "\tgf_svg_sani_init_sync((SVG_SANI_Element *)p);\n");		
	}
	if (svg_elt->has_animation){
		fprintf(output, "\tgf_svg_sani_init_anim((SVG_SANI_Element *)p);\n");		
	} 
	if (svg_elt->has_conditional) {
		fprintf(output, "\tgf_svg_sani_init_conditional((SVG_SANI_Element *)p);\n");		
	} 

	if (svg_elt->has_transform) {
		fprintf(output, "\tgf_mx2d_init(p->transform.mat);\n");
	} 

	if (!strcmp(svg_elt->implementation_name, "conditional")) {
		fprintf(output, "\tgf_svg_sa_init_lsr_conditional(&p->updates);\n");
		fprintf(output, "\tgf_svg_sani_init_timing((SVG_SANI_Element *)p);\n");		

	} 

	for (i = 0; i < gf_list_count(svg_elt->attributes); i++) {
		SVGGenAttribute *att = gf_list_get(svg_elt->attributes, i);
		
		/* forcing initialization of old-properties */
		if (!strcmp("audio-level", att->svg_name)) {
			fprintf(output, "\tp->audio_level.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->audio_level.value = FIX_ONE;\n");
		} else if (!strcmp("display", att->svg_name)) {
			fprintf(output, "\tp->display = SVG_DISPLAY_INLINE;\n");
		} else if (!strcmp("display-align", att->svg_name)) {
			fprintf(output, "\tp->display_align = SVG_DISPLAYALIGN_AUTO;\n");
		} else if (!strcmp("fill", att->svg_name)) {
			fprintf(output, "\tp->fill.type = SVG_PAINT_COLOR;\n");
			fprintf(output, "\tp->fill.color.type = SVG_COLOR_RGBCOLOR;\n");
		} else if (!strcmp("fill-opacity", att->svg_name)) {
			fprintf(output, "\tp->fill_opacity.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->fill_opacity.value = FIX_ONE;\n");
		} else if (!strcmp("fill-rule", att->svg_name)) {
			fprintf(output, "\tp->fill_rule = SVG_FILLRULE_NONZERO;\n");
		} else if (!strcmp("font-family", att->svg_name)) {
			fprintf(output, "\tp->font_family.type = SVG_FONTFAMILY_VALUE;\n");
			fprintf(output, "\tp->font_family.value = strdup(\"Arial\");\n");
		} else if (!strcmp("font-size", att->svg_name)) {
			fprintf(output, "\tp->font_size.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->font_size.value = 12*FIX_ONE;\n");
		} else if (!strcmp("font-style", att->svg_name)) {
			fprintf(output, "\tp->font_style = SVG_FONTSTYLE_NORMAL;\n");
		} else if (!strcmp("font-variant", att->svg_name)) {
			fprintf(output, "\tp->font_variant = SVG_FONTVARIANT_NORMAL;\n");
		} else if (!strcmp("font-weight", att->svg_name)) {
			fprintf(output, "\tp->font_weight = SVG_FONTWEIGHT_NORMAL;\n");
		} else if (!strcmp("line-increment", att->svg_name)) {
			fprintf(output, "\tp->line_increment.type = SVG_NUMBER_AUTO;\n");
			fprintf(output, "\tp->line_increment.value = FIX_ONE;\n");
		} else if (!strcmp("opacity", att->svg_name)) {
			fprintf(output, "\tp->opacity.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->opacity.value = FIX_ONE;\n");
		} else if (!strcmp("solid-color", att->svg_name)) {
			fprintf(output, "\tp->solid_color.type = SVG_PAINT_COLOR;\n");
			fprintf(output, "\tp->solid_color.color.type = SVG_COLOR_RGBCOLOR;\n");
		} else if (!strcmp("solid-opacity", att->svg_name)) {
			fprintf(output, "\tp->solid_opacity.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->solid_opacity.value = FIX_ONE;\n");
		} else if (!strcmp("solid-color", att->svg_name)) {
			fprintf(output, "\tp->stop_color.type = SVG_PAINT_COLOR;\n");
			fprintf(output, "\tp->stop_color.color.type = SVG_COLOR_RGBCOLOR;\n");
		} else if (!strcmp("stop-opacity", att->svg_name)) {
			fprintf(output, "\tp->stop_opacity.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->stop_opacity.value = FIX_ONE;\n");
		} else if (!strcmp("stroke", att->svg_name)) {
			fprintf(output, "\tp->stroke.type = SVG_PAINT_NONE;\n");
			fprintf(output, "\tp->stroke.color.type = SVG_COLOR_RGBCOLOR;\n");
		} else if (!strcmp("stroke-dasharray", att->svg_name)) {
			fprintf(output, "\tp->stroke_dasharray.type = SVG_STROKEDASHARRAY_NONE;\n");
		} else if (!strcmp("stroke-dashoffset", att->svg_name)) {
			fprintf(output, "\tp->stroke_dashoffset.type = SVG_NUMBER_VALUE;\n");
		} else if (!strcmp("stroke-linecap", att->svg_name)) {
			fprintf(output, "\tp->stroke_linecap = SVG_STROKELINECAP_BUTT;\n");
		} else if (!strcmp("stroke-linejoin", att->svg_name)) {
			fprintf(output, "\tp->stroke_linejoin = SVG_STROKELINEJOIN_MITER;\n");
		} else if (!strcmp("stroke-miterlimit", att->svg_name)) {
			fprintf(output, "\tp->stroke_miterlimit.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->stroke_miterlimit.value = 4*FIX_ONE;\n");
		} else if (!strcmp("stroke-opacity", att->svg_name)) {
			fprintf(output, "\tp->stroke_opacity.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->stroke_opacity.value = FIX_ONE;\n");
		} else if (!strcmp("stroke-width", att->svg_name)) {
			fprintf(output, "\tp->stroke_width.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->stroke_width.value = FIX_ONE;\n");
		} else if (!strcmp("text-align", att->svg_name)) {
			fprintf(output, "\tp->text_align = SVG_TEXTALIGN_START;\n");
		} else if (!strcmp("text-anchor", att->svg_name)) {
			fprintf(output, "\tp->text_anchor = SVG_TEXTANCHOR_START;\n");
		} else if (!strcmp("vector-effect", att->svg_name)) {
			fprintf(output, "\tp->vector_effect = SVG_VECTOREFFECT_NONE;\n");
		} else if (!strcmp("viewport-fill", att->svg_name)) {
			fprintf(output, "\tp->viewport_fill.type = SVG_PAINT_NONE;\n");
		} else if (!strcmp("viewport-fill-opacity", att->svg_name)) {
			fprintf(output, "\tp->viewport_fill_opacity.type = SVG_NUMBER_VALUE;\n");
			fprintf(output, "\tp->viewport_fill_opacity.value = FIX_ONE;\n");
		} else if (!strcmp("visibility", att->svg_name)) {
			fprintf(output, "\tp->visibility = SVG_VISIBILITY_VISIBLE;\n");
		}

		/* Initialization of complex types */
		if ( !strcmp("SVG_Points", att->impl_type) || 
			 !strcmp("SVG_Coordinates", att->impl_type) ||
			 !strcmp("SMIL_KeyPoints", att->impl_type)) {
			fprintf(output, "\tp->%s = gf_list_new();\n", att->implementation_name);
		} else if (!strcmp("SVG_PathData", att->impl_type) && !strcmp(svg_elt->svg_name, "animateMotion")) {
			fprintf(output, "#ifdef USE_GF_PATH\n");
			fprintf(output, "\tgf_path_reset(&p->path);\n");
			fprintf(output, "#else\n");
			fprintf(output, "\tp->path.commands = gf_list_new();\n");
			fprintf(output, "\tp->path.points = gf_list_new();\n");
			fprintf(output, "#endif\n");
		} else if (!strcmp("SVG_PathData", att->impl_type)) {
			fprintf(output, "#ifdef USE_GF_PATH\n");
			fprintf(output, "\tgf_path_reset(&p->d);\n");
			fprintf(output, "#else\n");
			fprintf(output, "\tp->d.commands = gf_list_new();\n");
			fprintf(output, "\tp->d.points = gf_list_new();\n");
			fprintf(output, "#endif\n");
		} else if (!strcmp(att->svg_name, "lsr:enabled")) {
			fprintf(output, "\tp->lsr_enabled = 1;\n");
		} 
	}
	/*some default values*/
	if (!strcmp(svg_elt->svg_name, "svg")) {
		fprintf(output, "\tp->width.type = SVG_NUMBER_PERCENTAGE;\n");
		fprintf(output, "\tp->width.value = INT2FIX(100);\n");
		fprintf(output, "\tp->height.type = SVG_NUMBER_PERCENTAGE;\n");
		fprintf(output, "\tp->height.value = INT2FIX(100);\n");
	}
	else if (!strcmp(svg_elt->svg_name, "linearGradient")) {
		fprintf(output, "\tp->x2.value = FIX_ONE;\n");
		fprintf(output, "\tgf_mx2d_init(p->gradientTransform.mat);\n");
	}
	else if (!strcmp(svg_elt->svg_name, "radialGradient")) {
		fprintf(output, "\tp->cx.value = FIX_ONE/2;\n");
		fprintf(output, "\tp->cy.value = FIX_ONE/2;\n");
		fprintf(output, "\tp->r.value = FIX_ONE/2;\n");
		fprintf(output, "\tgf_mx2d_init(p->gradientTransform.mat);\n");
		fprintf(output, "\tp->fx.value = FIX_ONE/2;\n");
		fprintf(output, "\tp->fy.value = FIX_ONE/2;\n");
	}
	else if (!strcmp(svg_elt->svg_name, "video") || !strcmp(svg_elt->svg_name, "audio") || !strcmp(svg_elt->svg_name, "animation")) {
		fprintf(output, "\tp->timing->dur.type = SMIL_DURATION_MEDIA;\n");
	}
	fprintf(output, "\treturn p;\n}\n\n");

	/* Destructor */
	fprintf(output, "static void gf_svg_sani_%s_del(GF_Node *node)\n{\n", svg_elt->implementation_name);
	fprintf(output, "\tSVG_SANI_%sElement *p = (SVG_SANI_%sElement *)node;\n", svg_elt->implementation_name, svg_elt->implementation_name);
	fprintf(output, "\tgf_svg_sani_reset_base_element((SVG_SANI_Element *)p);\n");

	if (!strcmp(svg_elt->implementation_name, "conditional")) {
		fprintf(output, "\tgf_svg_sa_reset_lsr_conditional(&p->updates);\n");
	} 
	else if (!strcmp(svg_elt->implementation_name, "a")) {
		fprintf(output, "\tif (p->target) free(p->target);\n");
	} 

	for (i = 0; i < gf_list_count(svg_elt->attributes); i++) {
		SVGGenAttribute *att = gf_list_get(svg_elt->attributes, i);
		if (!strcmp("SMIL_KeyPoints", att->impl_type)) {
			fprintf(output, "\tgf_smil_delete_key_types(p->%s);\n", att->implementation_name);
		} else if (!strcmp("SVG_Coordinates", att->impl_type)) {
			fprintf(output, "\tgf_svg_delete_coordinates(p->%s);\n", att->implementation_name);
		} else if (!strcmp("SVG_Points", att->impl_type)) {
			fprintf(output, "\tgf_svg_delete_points(p->%s);\n", att->implementation_name);
		} else if (!strcmp("SVG_PathData", att->impl_type)) {
			if (!strcmp(svg_elt->svg_name, "animateMotion")) {
				fprintf(output, "\tgf_svg_reset_path(p->path);\n");
			} else {
				fprintf(output, "\tgf_svg_reset_path(p->d);\n");
			}
		} else if (!strcmp("XMLRI", att->impl_type)) {
			fprintf(output, "\tgf_svg_reset_iri(node->sgprivate->scenegraph, &p->%s);\n", att->implementation_name);
		} else if (!strcmp("SVG_FontFamily", att->impl_type)) {
			fprintf(output, "\tif (p->%s.value) free(p->%s.value);\n", att->implementation_name, att->implementation_name);
		} else if (!strcmp("SVG_String", att->impl_type) || !strcmp("SVG_ContentType", att->impl_type)) {
			fprintf(output, "\tfree(p->%s);\n", att->implementation_name);
		}
	}
	if (svg_elt->has_transform) {
		fprintf(output, "\tif (p->motionTransform) free(p->motionTransform);\n");
	} 

	fprintf(output, "\tgf_sg_parent_reset((GF_Node *) p);\n");
	fprintf(output, "\tgf_node_free((GF_Node *)p);\n");
	fprintf(output, "}\n\n");

	/* Attribute Access */
	fprintf(output, "static GF_Err gf_svg_sani_%s_get_attribute(GF_Node *node, GF_FieldInfo *info)\n{\n", svg_elt->implementation_name);
	fprintf(output, "\tswitch (info->fieldIndex) {\n");
	svg_elt->nb_atts = 0;
	svg_elt->nb_atts = generateCoreInfo(output, svg_elt, svg_elt->nb_atts);

	if (svg_elt->has_focus) 
		svg_elt->nb_atts = generateGenericInfo(output, svg_elt, 4, "((SVG_SANI_Element *)node)->focus->", svg_elt->nb_atts);
	if (svg_elt->has_xlink) 
		svg_elt->nb_atts = generateGenericInfo(output, svg_elt, 5, "((SVG_SANI_Element *)node)->xlink->", svg_elt->nb_atts);
	if (svg_elt->has_timing) 
		svg_elt->nb_atts = generateGenericInfo(output, svg_elt, 6, "((SVG_SANI_Element *)node)->timing->", svg_elt->nb_atts);
	if (svg_elt->has_sync) 
		svg_elt->nb_atts = generateGenericInfo(output, svg_elt, 7, "((SVG_SANI_Element *)node)->sync->", svg_elt->nb_atts);
	if (svg_elt->has_animation) 
		svg_elt->nb_atts = generateGenericInfo(output, svg_elt, 8, "((SVG_SANI_Element *)node)->anim->", svg_elt->nb_atts);
	if (svg_elt->has_conditional) 
		svg_elt->nb_atts = generateGenericInfo(output, svg_elt, 9, "((SVG_SANI_Element *)node)->conditional->", svg_elt->nb_atts);
	if (svg_elt->has_transform) {
		svg_elt->nb_atts = generateTransformInfo2(output, svg_elt, svg_elt->nb_atts);
		svg_elt->nb_atts = generateMotionTransformInfo2(output, svg_elt, svg_elt->nb_atts);
	}
	if (svg_elt->has_xy) 
		svg_elt->nb_atts = generateXYInfo2(output, svg_elt, svg_elt->nb_atts);

	for (i = 0; i < gf_list_count(svg_elt->attributes); i++) {
		SVGGenAttribute *att = gf_list_get(svg_elt->attributes, i);
		generateAttributeInfo2(output, svg_elt->implementation_name, att, svg_elt->nb_atts++);
	}
	fprintf(output, "\t\tdefault: return GF_BAD_PARAM;\n\t}\n}\n\n");

}
Beispiel #9
0
void generateSVGCode_V2(GF_List *svg_elements) 
{
	FILE *output;
	u32 i;

	output = BeginFile(0);
	fprintf(output, "#include <gpac/scenegraph_svg.h>\n\n\n");
	fprintf(output, "/* Definition of SVG 2 Alternate element internal tags */\n");
	fprintf(output, "/* TAG names are made of \"TAG_SVG_SANI_\" + SVG element name (with - replaced by _) */\n");

	/* write all tags */
	fprintf(output, "enum {\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		if (i == 0) {
			fprintf(output, "\tTAG_SVG_SANI_%s = GF_NODE_RANGE_FIRST_SVG_SANI", elt->implementation_name);
		} else {
			fprintf(output, ",\n\tTAG_SVG_SANI_%s", elt->implementation_name);
		}
	}
	
	fprintf(output, ",\n\t/*undefined elements (when parsing) use this tag*/\n\tTAG_SVG_SANI_UndefinedElement\n};\n\n");

	fprintf(output, "/******************************************\n");
 	fprintf(output, "*   SVG_SANI_ Elements structure definitions    *\n");
 	fprintf(output, "*******************************************/\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		generateNode2(output, elt);
	}
	EndFile(output, 0);

	output = BeginFile(1);
	fprintf(output, "#include <gpac/nodes_svg_sani.h>\n\n");

	fprintf(output, "#ifndef GPAC_DISABLE_SVG\n\n");
	fprintf(output, "#include <gpac/internal/scenegraph_dev.h>\n\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		generateNodeImpl2(output, elt);
	}

	fprintf(output, "SVG_SANI_Element *gf_svg_sani_create_node(u32 ElementTag)\n");
	fprintf(output, "{\n");
	fprintf(output, "\tswitch (ElementTag) {\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		fprintf(output, "\t\tcase TAG_SVG_SANI_%s: return (SVG_SANI_Element*) gf_svg_sani_new_%s();\n",elt->implementation_name,elt->implementation_name);
	}
	fprintf(output, "\t\tdefault: return NULL;\n\t}\n}\n\n");
	
	fprintf(output, "void gf_svg_sani_element_del(SVG_SANI_Element *elt)\n{\n");
	fprintf(output, "\tGF_Node *node = (GF_Node *)elt;\n");
	fprintf(output, "\tswitch (node->sgprivate->tag) {\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		fprintf(output, "\t\tcase TAG_SVG_SANI_%s: gf_svg_sani_%s_del(node); return;\n", elt->implementation_name, elt->implementation_name);
	}
	fprintf(output, "\t\tdefault: return;\n\t}\n}\n\n");

	fprintf(output, "u32 gf_svg_sani_get_attribute_count(GF_Node *node)\n{\n");
	fprintf(output, "\tswitch (node->sgprivate->tag) {\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		fprintf(output, "\t\tcase TAG_SVG_SANI_%s: return %i;\n", elt->implementation_name, elt->nb_atts);
	}
	fprintf(output, "\t\tdefault: return 0;\n\t}\n}\n\n");
	
	fprintf(output, "GF_Err gf_svg_sani_get_attribute_info(GF_Node *node, GF_FieldInfo *info)\n{\n");
	fprintf(output, "\tswitch (node->sgprivate->tag) {\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		fprintf(output, "\t\tcase TAG_SVG_SANI_%s: return gf_svg_sani_%s_get_attribute(node, info);\n", elt->implementation_name, elt->implementation_name);
	}
	fprintf(output, "\t\tdefault: return GF_BAD_PARAM;\n\t}\n}\n\n");

	fprintf(output, "u32 gf_svg_sani_type_by_class_name(const char *element_name)\n{\n\tif (!element_name) return TAG_UndefinedNode;\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		fprintf(output, "\tif (!stricmp(element_name, \"%s\")) return TAG_SVG_SANI_%s;\n", elt->svg_name, elt->implementation_name);
	}
	fprintf(output, "\treturn TAG_UndefinedNode;\n}\n\n");

	fprintf(output, "const char *gf_svg_sani_get_element_name(u32 tag)\n{\n\tswitch(tag) {\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		fprintf(output, "\tcase TAG_SVG_SANI_%s: return \"%s\";\n", elt->implementation_name, elt->svg_name);
	}
	fprintf(output, "\tdefault: return \"UndefinedNode\";\n\t}\n}\n\n");

	fprintf(output, "Bool gf_svg_sani_is_element_transformable(u32 tag)\n{\n\tswitch(tag) {\n");
	for (i=0; i<gf_list_count(svg_elements); i++) {
		SVGGenElement *elt = (SVGGenElement *)gf_list_get(svg_elements, i);
		fprintf(output, "\tcase TAG_SVG_SANI_%s:", elt->implementation_name);
		if (elt->has_transform) fprintf(output, "return 1;\n");
		else fprintf(output, "return 0;\n");
	}
	fprintf(output, "\tdefault: return 0;\n\t}\n}\n");

	fprintf(output, "#endif /*GPAC_DISABLE_SVG*/\n\n");
	EndFile(output, 1); 

	generate_laser_tables(svg_elements);
}
Beispiel #10
0
static void validator_xvs_close(GF_Validator *validator)
{
	if (validator->xvs_parser) {
		if (validator->is_recording) {
			FILE *xvs_fp;
			char *xvs_content;
			char filename[100];
			GF_XMLAttribute *att;
			GF_XMLAttribute *att_file = NULL;
			u32 att_index = 0;
			while (1) {
				att = gf_list_get(validator->xvs_node->attributes, att_index);
				if (!att) {
					break;
				} else if (!strcmp(att->name, "file")) {
					att_file = att;
				}
				att_index++;
			}

			if (!att_file) {
				GF_SAFEALLOC(att, GF_XMLAttribute);
				att->name = gf_strdup("file");
				gf_list_add(validator->xvs_node->attributes, att);
			} else {
				att = att_file;
				if (att->value) gf_free(att->value);
			}
			sprintf(filename, "%s%c%s", validator->test_base, GF_PATH_SEPARATOR, validator->test_filename);
			att->value = gf_strdup(filename);
			xvs_content = gf_xml_dom_serialize(validator->xvs_node, 0);
			xvs_fp = gf_f64_open(validator->xvs_filename, "wt");
			gf_fwrite(xvs_content, strlen(xvs_content), 1, xvs_fp);
			fclose(xvs_fp);
			gf_free(xvs_content);
		} else {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_MODULE, ("[Validator] XVS Result : %s\n", (validator->xvs_result?"Success":"Failure")));
			if (validator->xvs_node_in_xvl) {
				GF_XMLAttribute *att;
				GF_XMLAttribute *att_result = NULL;
				u32 att_index = 0;
				while (1) {
					att = gf_list_get(validator->xvs_node_in_xvl->attributes, att_index);
					if (!att) {
						break;
					} else if (!strcmp(att->name, "result")) {
						att_result = att;
					}
					att_index++;
				}
				if (!att_result) {
					GF_SAFEALLOC(att_result, GF_XMLAttribute);
					att_result->name = gf_strdup("result");
					gf_list_add(validator->xvs_node_in_xvl->attributes, att_result);
				}
				if (att_result->value) gf_free(att_result->value);
				att_result->value = gf_strdup(validator->xvs_result ? "pass" : "fail");
			}
		}
		gf_xml_dom_del(validator->xvs_parser);
		validator->xvs_parser = NULL;
	}
	validator->xvs_node = NULL;
	validator->xvs_node_in_xvl = NULL;
	validator->xvs_filename = NULL;
	validator->test_filename = NULL;
	validator->ck = NULL;
	validator->xvs_event_index = 0;
	validator->snapshot_number = 0;
}
Beispiel #11
0
static Bool validator_load_event(GF_Validator *validator)
{
	GF_XMLNode *event_node;
	GF_XMLAttribute *att;
	u32 att_index;

	memset(&validator->next_event, 0, sizeof(GF_Event));
	validator->evt_loaded = 0;
	validator->next_event_snapshot = 0;

	if (!validator->xvs_node) return 0;

	while (1) {
		event_node = gf_list_get(validator->xvs_node->content, validator->xvs_event_index);
		if (!event_node) {
			return 0;
		} else if (event_node->type == GF_XML_NODE_TYPE) {
			validator->xvs_event_index++;
			break;
		} else {
			validator->xvs_event_index++;
		}
	}

	if (!strcmp(event_node->name, "snapshot")) {
		validator->next_event_snapshot = 1;
	} else {
		validator->next_event.type = gf_dom_event_type_by_name(event_node->name);
		if (validator->next_event.type == GF_EVENT_UNKNOWN) {
			return 1;
		}
	}

	att_index = 0;
	while (1) {
		att = gf_list_get(event_node->attributes, att_index);
		if (!att) break;
		if (!strcmp(att->name, "time")) {
			validator->next_time = atoi(att->value);
		} else if (!strcmp(att->name, "button")) {
			if (!strcmp(att->value, "Left")) {
				validator->next_event.mouse.button = 0;
			} else if (!strcmp(att->value, "Middle")) {
				validator->next_event.mouse.button = 1;
			} else if (!strcmp(att->value, "Right")) {
				validator->next_event.mouse.button = 2;
			}
		} else if (!strcmp(att->name, "x")) {
			validator->next_event.mouse.x = atoi(att->value);
		} else if (!strcmp(att->name, "y")) {
			validator->next_event.mouse.y = atoi(att->value);
		} else if (!strcmp(att->name, "wheel_pos")) {
			validator->next_event.mouse.wheel_pos = FLT2FIX(atof(att->value));
		} else if (!strcmp(att->name, "shift") && !strcmp(att->value, "true")) {
			validator->next_event.mouse.key_states |= GF_KEY_MOD_SHIFT;
		} else if (!strcmp(att->name, "alt") && !strcmp(att->value, "true")) {
			validator->next_event.mouse.key_states |= GF_KEY_MOD_ALT;
		} else if (!strcmp(att->name, "ctrl") && !strcmp(att->value, "true")) {
			validator->next_event.mouse.key_states |= GF_KEY_MOD_CTRL;
		} else if (!strcmp(att->name, "key_identifier")) {
			validator->next_event.key.key_code = gf_dom_get_key_type(att->value);
		} else if (!strcmp(att->name, "unicode-char")) {
			validator->next_event.character.unicode_char = atoi(att->value);
		}
		att_index++;
	}
	validator->evt_loaded = 1;
	return 1;
}
Beispiel #12
0
static Bool validator_xvs_open(GF_Validator *validator)
{
	GF_Err e;
	GF_LOG(GF_LOG_DEBUG, GF_LOG_MODULE, ("[Validator] Opening Validation Script: %s\n", validator->xvs_filename));
	validator->snapshot_number = 0;
	validator->xvs_parser = gf_xml_dom_new();
	e = gf_xml_dom_parse(validator->xvs_parser, validator->xvs_filename, NULL, NULL);
	if (e != GF_OK) {
		if (validator->is_recording) {
			GF_SAFEALLOC(validator->xvs_node, GF_XMLNode);
			validator->xvs_node->name = gf_strdup("TestValidationScript");
			validator->xvs_node->attributes = gf_list_new();
			validator->xvs_node->content = gf_list_new();
		} else {
			gf_xml_dom_del(validator->xvs_parser);
			validator->xvs_parser = NULL;
			return 0;
		}
	} else {
		validator->xvs_node = gf_xml_dom_get_root(validator->xvs_parser);
	}
	/* Get the file name from the XVS if not found in the XVL */
	if (!validator->test_filename) {
		GF_XMLAttribute *att;
		GF_XMLAttribute *att_file;
		u32 att_index = 0;
		att_file = NULL;
		while (1) {
			att = gf_list_get(validator->xvs_node->attributes, att_index);
			if (!att) {
				break;
			} else if (!strcmp(att->name, "file")) {
				att_file = att;
			}
			att_index++;
		}
		if (!att_file) {
			gf_xml_dom_del(validator->xvs_parser);
			validator->xvs_parser = NULL;
			validator->xvs_node = NULL;
			return 0;
		} else {
			char *sep;
			sep = strrchr(att_file->value, GF_PATH_SEPARATOR);
			if (!sep) {
				validator->test_filename = att_file->value;
			} else {
				sep[0] = 0;
				validator->test_base = gf_strdup(att_file->value);
				sep[0] = GF_PATH_SEPARATOR;
				validator->test_filename = sep+1;
			}
		}
	}
	if (validator->is_recording) {
		GF_XMLNode *node;
		/* Removing prerecorded interactions */
		while (gf_list_count(validator->xvs_node->content)) {
			GF_XMLNode *child = (GF_XMLNode *)gf_list_last(validator->xvs_node->content);
			gf_list_rem_last(validator->xvs_node->content);
			gf_xml_dom_node_del(child);
		}
		/* adding an extra text node for line break in serialization */
		GF_SAFEALLOC(node, GF_XMLNode);
		node->type = GF_XML_TEXT_TYPE;
		node->name = gf_strdup("\n");
		gf_list_add(validator->xvs_node->content, node);
	} else {
		validator->xvs_result = 1;
	}
	return 1;
}
Beispiel #13
0
static void svg_traverse_text(GF_Node *node, void *rs, Bool is_destroy)
{
	SVGPropertiesPointers backup_props;
	u32 backup_flags;
	GF_Matrix2D backup_matrix;
	GF_Matrix mx3d;
	GF_ChildNodeItem *child;
	DrawableContext *ctx;
	SVG_TextStack *st = (SVG_TextStack *)gf_node_get_private(node);
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	SVG_Element *text = (SVG_Element *)node;
	SVGAllAttributes atts;
	u32 i,imax;
	Fixed * lw;

	if (is_destroy) {
		drawable_del(st->drawable);
		svg_reset_text_stack(st);
		gf_list_del(st->spans);
		gf_free(st);
		return;
	}

	if (tr_state->traversing_mode==TRAVERSE_DRAW_2D) {
		svg_text_draw_2d(st, tr_state);
		return;
	}
	else if (tr_state->traversing_mode==TRAVERSE_GET_TEXT) {
		tr_state->text_parent = node;
		gf_font_spans_get_selection(node, st->spans, tr_state);
		/*and browse children*/
		child = ((GF_ParentNode *) text)->children;
		while (child) {
			switch  (gf_node_get_tag(child->node)) {
			case TAG_SVG_tspan:
				gf_node_traverse(child->node, tr_state); 
				break;
			}
			child = child->next;
		}
		tr_state->text_parent = NULL;
		return;
	}

	gf_svg_flatten_attributes(text, &atts);
	if (!compositor_svg_traverse_base(node, &atts, tr_state, &backup_props, &backup_flags))
		return;

	tr_state->in_svg_text++;
	tr_state->text_parent = node;

	if (tr_state->traversing_mode==TRAVERSE_PICK) {
		compositor_svg_apply_local_transformation(tr_state, &atts, &backup_matrix, &mx3d);
		if (*tr_state->svg_props->pointer_events!=SVG_POINTEREVENTS_NONE) 
			gf_font_spans_pick(node, st->spans, tr_state, &st->bounds, 1, st->drawable);

		/*and browse children*/
		child = ((GF_ParentNode *) text)->children;
		while (child) {
			switch  (gf_node_get_tag(child->node)) {
			case TAG_SVG_tspan:
				gf_node_traverse(child->node, tr_state); 
				break;
			}
			child = child->next;
		}
		memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));
		compositor_svg_restore_parent_transformation(tr_state, &backup_matrix, &mx3d);
		tr_state->svg_flags = backup_flags;
		tr_state->text_parent = NULL;
		tr_state->in_svg_text--;
		return;
	}
	else if (tr_state->traversing_mode==TRAVERSE_GET_TEXT) {
		gf_font_spans_get_selection(node, st->spans, tr_state);
		memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));
		tr_state->svg_flags = backup_flags;
		tr_state->text_parent = NULL;
		tr_state->in_svg_text--;
		return;
	}

	compositor_svg_apply_local_transformation(tr_state, &atts, &backup_matrix, &mx3d);

	if ( (st->prev_size != tr_state->svg_props->font_size->value) || 
		 (st->prev_flags != *tr_state->svg_props->font_style) || 
		 (st->prev_anchor != *tr_state->svg_props->text_anchor) ||
		 (gf_node_dirty_get(node) & (GF_SG_SVG_GEOMETRY_DIRTY | GF_SG_CHILD_DIRTY) ) 
		 || tr_state->visual->compositor->reset_fonts
	) {
		u32 mode;
		child = ((GF_ParentNode *) text)->children;

		svg_reset_text_stack(st);
		tr_state->text_end_x = 0;
		tr_state->text_end_y = 0;
		/*init the xml:space algo*/
		tr_state->last_char_type = 0;

		/*initialize x and y counters - stored at the traverse level for handling tspan & co*/
		if (atts.text_x) tr_state->count_x = gf_list_count(*atts.text_x);
		else tr_state->count_x=0;
		if (atts.text_y) tr_state->count_y = gf_list_count(*atts.text_y);
		else tr_state->count_y=0;
		if (atts.text_rotate) tr_state->count_rotate = gf_list_count(*atts.text_rotate);
		else tr_state->count_rotate=0;

		/*horizontal justifiers container*/
		tr_state->x_anchors = gf_list_new();

		/*compute length of all text blocks*/
		while (child) {
			svg_compute_text_width(child->node, &atts, tr_state);
			child=child->next;
		}

		/*apply justification of all blocks*/
		imax=gf_list_count(tr_state->x_anchors);
		for (i=0;i<imax;i++){
			lw=gf_list_get(tr_state->x_anchors, i);
			svg_apply_text_anchor(tr_state, lw);
		}

		/*re-initialize x and y counters for final compute*/
		if (atts.text_x) tr_state->count_x = gf_list_count(*atts.text_x);
		else tr_state->count_x=0;
		if (atts.text_y) tr_state->count_y = gf_list_count(*atts.text_y);
		else tr_state->count_y=0;
		if (atts.text_rotate) tr_state->count_rotate = gf_list_count(*atts.text_rotate);
		else tr_state->count_rotate=0;
		tr_state->idx_rotate = 0;
		tr_state->chunk_index = 0;

		/*initialize current text position*/
		if (!tr_state->text_end_x){
			SVG_Coordinate *xc = (atts.text_x ? (SVG_Coordinate *) gf_list_get(*atts.text_x, 0) : NULL);
			tr_state->text_end_x = (xc ? xc->value : 0);
		}
		if (!tr_state->text_end_y){
			SVG_Coordinate *yc = (atts.text_y ? (SVG_Coordinate *) gf_list_get(*atts.text_y, 0) : NULL);
			tr_state->text_end_y = (yc ? yc->value : 0);
		}

		/*pass x and y to children*/
		tr_state->text_x = atts.text_x;
		tr_state->text_y = atts.text_y;
		tr_state->text_rotate = atts.text_rotate;
		
		drawable_reset_path(st->drawable);
		
		/*switch to bounds mode, and recompute children*/
		mode = tr_state->traversing_mode;
		tr_state->traversing_mode = TRAVERSE_GET_BOUNDS;
		tr_state->last_char_type = 0;

		child = ((GF_ParentNode *) text)->children;
		while (child) {
			svg_traverse_text_block(child->node, &atts, tr_state, st->spans);
			child = child->next;
		}
		tr_state->traversing_mode = mode;
		gf_node_dirty_clear(node, 0);
		drawable_mark_modified(st->drawable, tr_state);
		st->prev_size = tr_state->svg_props->font_size->value;
		st->prev_flags = *tr_state->svg_props->font_style;
		st->prev_anchor = *tr_state->svg_props->text_anchor;

		while (gf_list_count(tr_state->x_anchors)) {
			Fixed *f = gf_list_last(tr_state->x_anchors);
			gf_list_rem_last(tr_state->x_anchors);
			gf_free(f);
		}
		gf_list_del(tr_state->x_anchors);
		tr_state->x_anchors = NULL;
	
		svg_update_bounds(st);
	} 

	if (tr_state->traversing_mode == TRAVERSE_GET_BOUNDS) {
		if (!compositor_svg_is_display_off(tr_state->svg_props))
			tr_state->bounds = st->bounds;

	} else if ((tr_state->traversing_mode == TRAVERSE_SORT) 
		&& !compositor_svg_is_display_off(tr_state->svg_props) 
		&& (*(tr_state->svg_props->visibility) != SVG_VISIBILITY_HIDDEN) 
		) {
		ctx = drawable_init_context_svg(st->drawable, tr_state);
		if (ctx) svg_finalize_sort(ctx, st, tr_state);

		/*and browse children*/
		child = ((GF_ParentNode *) text)->children;
		while (child) {
			switch  (gf_node_get_tag(child->node)) {
			case TAG_SVG_tspan:
				gf_node_traverse(child->node, tr_state); 
				break;
			case TAG_SVG_switch:
				gf_node_traverse(child->node, tr_state); 
				break;
			}
			child = child->next;
		}
	}
	tr_state->in_svg_text--;
	tr_state->text_parent = NULL;

	compositor_svg_restore_parent_transformation(tr_state, &backup_matrix, &mx3d);
	memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));
	tr_state->svg_flags = backup_flags;
}
Beispiel #14
0
void svg_traverse_domtext(GF_Node *node, SVGAllAttributes *atts, GF_TraverseState *tr_state, GF_List *spans, GF_Node *anchor_node)
{
	GF_DOMText *dom_text = (GF_DOMText *)node;
	Fixed x, y;
	u32 i;
	Fixed x_anchor, *ptr;
	GF_Font *font;
	Fixed block_width;
	GF_FontManager *fm;
	GF_TextSpan *span;

	if (!dom_text->textContent) return;

	if (tr_state->in_svg_text_area) {
		svg_traverse_dom_text_area(node, atts, tr_state, spans);
		return;
	}

	fm = tr_state->visual->compositor->font_manager;
	if (!fm) return;

	font = svg_set_font(tr_state, fm);
	if (!font) return;
	if (font->not_loaded) {
		tr_state->visual->compositor->reset_fonts = 1;
		tr_state->visual->compositor->skip_flush = 1;
		gf_sc_next_frame_state(tr_state->visual->compositor, GF_SC_DRAW_FRAME);
		return;
	}


	span = svg_get_text_span(fm, font, tr_state->svg_props->font_size->value, (tr_state->count_x>1), (tr_state->count_y>1), tr_state->count_rotate, atts, dom_text->textContent, atts->xml_lang ? *atts->xml_lang : NULL, tr_state);
	if (!span) return;

	i=0;
	/*
	if character position is given in (x, y) attributes, use it. 
	Otherwise add text at tr_state->text_end_x.
	*/		
	while ((i<span->nb_glyphs)
		&& ( (tr_state->count_x>1) || (tr_state->count_y>1) )
	) {
		//get x and y positions
		if (tr_state->count_x==0) {
			x = tr_state->text_end_x;
		} else {
			SVG_Coordinate *xc = (SVG_Coordinate *) gf_list_get(*tr_state->text_x, tr_state->chunk_index);
			x = xc->value;
			(tr_state->count_x)--;
		}
		if (tr_state->count_y==0) {
			y = tr_state->text_end_y;
		} else {
			SVG_Coordinate *yc = (SVG_Coordinate *) gf_list_get(*tr_state->text_y, tr_state->chunk_index);
			y = yc->value;
			(tr_state->count_y)--;
		}
		

		/*apply x-anchor*/
		ptr = (Fixed *)gf_list_get(tr_state->x_anchors, tr_state->chunk_index);
		x_anchor = ptr ? *ptr : 0;
		if (span->dx) span->dx[i] = x_anchor + x;
		else if (!i) span->off_x = x_anchor + x;
		if (span->dy) span->dy[i] = y;
		else span->off_y = y;

		if (tr_state->count_rotate) {
			SVG_Coordinate *rc = (SVG_Coordinate *) gf_list_get(*tr_state->text_rotate, tr_state->idx_rotate);
			span->rot[i] = gf_mulfix(GF_PI/180, rc->value);
			if (tr_state->idx_rotate+1<tr_state->count_rotate) tr_state->idx_rotate++;
		}

		/*update last glyph position*/
		block_width = (span->glyphs[i] ? span->glyphs[i]->horiz_advance : font->max_advance_h) * span->font_scale;
		tr_state->text_end_x = x+block_width;
		tr_state->text_end_y = y;
		(tr_state->chunk_index)++;
		i++;
	}

	/* no more positions, add remaining glyphs as a block*/
	if (i<span->nb_glyphs) {
		Fixed offset;
		if ((tr_state->count_x==1) && tr_state->text_x) {
			SVG_Coordinate *xc = (SVG_Coordinate *) gf_list_get(*tr_state->text_x, tr_state->chunk_index);
			tr_state->text_end_x = xc->value;
			(tr_state->count_x)--;
		}
		if ((tr_state->count_y==1) && tr_state->text_y) {
			SVG_Coordinate *yc = (SVG_Coordinate *) gf_list_get(*tr_state->text_y, tr_state->chunk_index);
			tr_state->text_end_y = yc->value;
			(tr_state->count_y)--;
		}

		x = tr_state->text_end_x;
		y = tr_state->text_end_y;

		/*apply x anchor*/
		ptr = (Fixed *)gf_list_get(tr_state->x_anchors, tr_state->chunk_index);
		x_anchor = ptr ? *ptr : 0;

		offset = x_anchor + x - (span->dx ? span->dx[i] : span->off_x);

		if (!span->dx && (tr_state->text_x || x_anchor)) span->off_x = x_anchor + x;
		if (!span->dy && tr_state->text_y) span->off_y = y;

		block_width = 0;
		while (i<span->nb_glyphs) {

			if (span->rot) {
				SVG_Coordinate *rc = (SVG_Coordinate *) gf_list_get(*tr_state->text_rotate, tr_state->idx_rotate);
				span->rot[i] = gf_mulfix(GF_PI/180, rc->value);
				if (tr_state->idx_rotate+1<tr_state->count_rotate) tr_state->idx_rotate++;
			}
			if (span->dx) span->dx[i] = offset + block_width;
			if (span->dy) span->dy[i] = y;
			block_width += (span->glyphs[i] ? span->glyphs[i]->horiz_advance : font->max_advance_h) * span->font_scale;

			i++;
		}
		tr_state->text_end_x += block_width;
	}

	/*add span path to list of spans*/
	gf_list_add(spans, span);
	span->anchor = anchor_node;
}
Beispiel #15
0
/* This function notifies the scene time to all the timed elements from the list in the given scene graph.
   It returns the number of active timed elements. If no timed element is active, this means that from the timing
   point of view, the scene has not changed and no rendering refresh is needed, even if the time has changed.
   It uses an additional list of modified timed elements to insure that no timing
   element was modified by the begin/end/repeat of another timed element.
*/
Bool gf_smil_notify_timed_elements(GF_SceneGraph *sg)
{
	SMIL_Timing_RTI *rti;
	u32 active_count, i;
	s32 ret;
	Bool do_loop;
	if (!sg) return 0;

	active_count = 0;

	/*
		Note: whenever a timed element is active, we trigger a gf_node_dirty_parent_graph so that the parent graph
		is aware that some modifications may happen in the subtree. This is needed for cases where the subtree
		is in an offscreen surface, to force retraversing of the subtree and thus apply the animation.

	*/

	/* notify the new scene time to the register timed elements
	   this might modify other timed elements or the element itself
	   in which case it will be added to the list of modified elements */
	i = 0;
	do_loop = 1;
	while(do_loop && (rti = (SMIL_Timing_RTI *)gf_list_enum(sg->smil_timed_elements, &i))) {
		ret = gf_smil_timing_notify_time(rti, gf_node_get_scene_time((GF_Node*)rti->timed_elt) );
		switch (ret) {
		case -1:
			/* special case for discard element
			   when a discard element is executed, it automatically removes itself from the list of timed element
			   in the scene graph, we need to fix the index i. */
			i--;
			break;
		case -2:
			/* special return value, -2 means that the tested timed element is waiting to begin
			   Assuming that the timed elements are sorted by begin order,
			   the next ones don't need to be checked */
			do_loop = 0;
			break;
		case -3:
			/* special case for animation elements which do not need to be notified anymore,
			   but which require a tree traversal */
			i--;
			active_count ++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 1:
			active_count++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 0:
		default:
			break;
		}
	}

	/* notify the timed elements which have been modified either since the previous frame (updates, scripts) or
	   because of the start/end/repeat of the previous notifications */
	while (gf_list_count(sg->modified_smil_timed_elements)) {
		/* first remove the modified smil timed element */
		rti = gf_list_get(sg->modified_smil_timed_elements, 0);
		gf_list_rem(sg->modified_smil_timed_elements, 0);

		/* then remove it in the list of non modified (if it was there) */
		gf_list_del_item(sg->smil_timed_elements, rti);

		/* then insert it at its right position (in the sorted list of timed elements) */
		gf_smil_timing_add_to_sg(sg, rti);

		/* finally again notify this timed element */
		rti->force_reevaluation = 1;
		ret = gf_smil_timing_notify_time(rti, gf_node_get_scene_time((GF_Node*)rti->timed_elt) );
		switch (ret) {
		case -1:
			break;
		case -2:
			break;
		case -3:
			active_count++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 1:
			active_count++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 0:
		default:
			break;
		}

	}
	return (active_count>0);
}
Beispiel #16
0
static GF_Err ODF_ProcessData(GF_SceneDecoder *plug, const char *inBuffer, u32 inBufferLength,
                              u16 ES_ID, u32 AU_time, u32 mmlevel)
{
	GF_Err e;
	GF_ODCom *com;
	GF_ODCodec *oddec;
	ODPriv *priv = (ODPriv *)plug->privateStack;

	oddec = gf_odf_codec_new();

	e = gf_odf_codec_set_au(oddec, inBuffer, inBufferLength);
	if (!e)  e = gf_odf_codec_decode(oddec);
	if (e) goto err_exit;

	//3- process all the commands in this AU, in order
	while (1) {
		com = gf_odf_codec_get_com(oddec);
		if (!com) break;

		//ok, we have a command
		switch (com->tag) {
		case GF_ODF_OD_UPDATE_TAG:
			e = ODS_ODUpdate(priv, (GF_ODUpdate *) com);
			break;
		case GF_ODF_OD_REMOVE_TAG:
			e = ODS_RemoveOD(priv, (GF_ODRemove *) com);
			break;
		case GF_ODF_ESD_UPDATE_TAG:
			e = ODS_UpdateESD(priv, (GF_ESDUpdate *)com);
			break;
		case GF_ODF_ESD_REMOVE_TAG:
			e = ODS_RemoveESD(priv, (GF_ESDRemove *)com);
			break;
		case GF_ODF_IPMP_UPDATE_TAG:
#if 0
		{
			GF_IPMPUpdate *ipmpU = (GF_IPMPUpdate *)com;
			while (gf_list_count(ipmpU->IPMPDescList)) {
				GF_IPMP_Descriptor *ipmp = gf_list_get(ipmpU->IPMPDescList, 0);
				gf_list_rem(ipmpU->IPMPDescList, 0);
				IS_UpdateIPMP(priv->scene, ipmp);
			}
			e = GF_OK;
		}
#else
		e = GF_OK;
#endif
		break;
		case GF_ODF_IPMP_REMOVE_TAG:
			e = GF_NOT_SUPPORTED;
			break;
		/*should NEVER exist outside the file format*/
		case GF_ODF_ESD_REMOVE_REF_TAG:
			e = GF_NON_COMPLIANT_BITSTREAM;
			break;
		default:
			if (com->tag >= GF_ODF_COM_ISO_BEGIN_TAG && com->tag <= GF_ODF_COM_ISO_END_TAG) {
				e = GF_ODF_FORBIDDEN_DESCRIPTOR;
			} else {
				/*we don't process user commands*/
				e = GF_OK;
			}
			break;
		}
		gf_odf_com_del(&com);
		if (e) break;
	}

err_exit:
	gf_odf_codec_del(oddec);
	return e;
}
Beispiel #17
0
GF_EXPORT
GF_Err gf_isom_next_hint_packet(GF_ISOFile *the_file, u32 trackNumber, char **pck_data, u32 *pck_size, Bool *disposable, Bool *repeated, u32 *trans_ts, u32 *sample_num)
{
	GF_RTPPacket *pck;
	GF_Err e;
	GF_BitStream *bs;
	GF_TrackBox *trak, *ref_trak;
	GF_HintSampleEntryBox *entry;
	u32 i, count, ts;
	s32 cts_off;

	*pck_data = NULL;
	*pck_size = 0;
	if (trans_ts) *trans_ts = 0;
	if (disposable) *disposable = 0;
	if (repeated) *repeated = 0;
	if (sample_num) *sample_num = 0;

	trak = gf_isom_get_track_from_file(the_file, trackNumber);
	if (!trak) return GF_BAD_PARAM;
	e = Media_GetSampleDesc(trak->Media, 1, (GF_SampleEntryBox **) &entry, NULL);
	if (e) return e;
	switch (entry->type) {
	case GF_ISOM_BOX_TYPE_RTP_STSD:
		break;
	default:
		return GF_NOT_SUPPORTED;
	}

	if (!entry->hint_sample) {
		e = gf_isom_load_next_hint_sample(the_file, trackNumber, trak, entry);
		if (e) return e;
	}
	pck = (GF_RTPPacket *)gf_list_get(entry->hint_sample->packetTable, 0);
	gf_list_rem(entry->hint_sample->packetTable, 0);
	if (!pck) return GF_BAD_PARAM;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	/*write RTP header*/
	gf_bs_write_int(bs, 2, 2);	/*version*/
	gf_bs_write_int(bs, pck->P_bit, 1);	/*P bit*/
	gf_bs_write_int(bs, pck->X_bit, 1);	/*X bit*/
	gf_bs_write_int(bs, 0, 4);	/*CSRC count*/
	gf_bs_write_int(bs, pck->M_bit, 1);	/*M bit*/
	gf_bs_write_int(bs, pck->payloadType, 7);	/*payt*/
	gf_bs_write_u16(bs, entry->pck_sn);	/*seq num*/
	entry->pck_sn++;

	/*look for CTS offset in TLV*/
	cts_off = 0;
	count = gf_list_count(pck->TLV);
	for (i=0; i<count; i++) {
		GF_RTPOBox *rtpo = (GF_RTPOBox *)gf_list_get(pck->TLV, i);
		if (rtpo->type == GF_ISOM_BOX_TYPE_RTPO) {
			cts_off = rtpo->timeOffset;
			break;
		}
	}
	/*TS - TODO check TS wrapping*/
	ts = (u32) (entry->hint_sample->TransmissionTime + pck->relativeTransTime + entry->ts_offset + cts_off);
	gf_bs_write_u32(bs, ts );
	gf_bs_write_u32(bs, entry->ssrc);	/*SSRC*/

	/*then build all data*/
	count = gf_list_count(pck->DataTable);
	for (i=0; i<count; i++) {
		GF_GenericDTE *dte = (GF_GenericDTE *)gf_list_get(pck->DataTable, i);
		switch (dte->source) {
		/*empty*/
		case 0:
			break;
		/*immediate data*/
		case 1:
			gf_bs_write_data(bs, ((GF_ImmediateDTE *)dte)->data, ((GF_ImmediateDTE *)dte)->dataLength);
			break;
		/*sample data*/
		case 2:
		{
			GF_ISOSample *samp;
			GF_SampleDTE *sdte = (GF_SampleDTE *)dte;
			/*get track if not this one*/
			if (sdte->trackRefIndex != (s8) -1) {
				if (!entry->hint_ref || !entry->hint_ref->trackIDs) {
					gf_isom_hint_rtp_del(pck);
					gf_bs_del(bs);
					return GF_ISOM_INVALID_FILE;
				}
				ref_trak = gf_isom_get_track_from_id(trak->moov, entry->hint_ref->trackIDs[(u32)sdte->trackRefIndex]);
			} else {
				ref_trak = trak;
			}
			samp = gf_isom_get_data_sample(entry->hint_sample, ref_trak, sdte->sampleNumber);
			if (!samp) {
				gf_isom_hint_rtp_del(pck);
				gf_bs_del(bs);
				return GF_IO_ERR;
			}
			gf_bs_write_data(bs, samp->data + sdte->byteOffset, sdte->dataLength);
		}
			break;
		/*sample desc data - currently NOT SUPPORTED !!!*/
		case 3:
			break;
		}
	}
	if (trans_ts) *trans_ts = ts;
	if (disposable) *disposable = pck->B_bit;
	if (repeated) *repeated = pck->R_bit;
	if (sample_num) *sample_num = entry->cur_sample-1;

	gf_bs_get_content(bs, pck_data, pck_size);
	gf_bs_del(bs);
	gf_isom_hint_rtp_del(pck);
	if (!gf_list_count(entry->hint_sample->packetTable)) {
		gf_isom_hint_sample_del(entry->hint_sample);
		entry->hint_sample = NULL;
	}
	return GF_OK;
}
Beispiel #18
0
GF_Err gf_webvtt_merge_cues(GF_WebVTTParser *parser, u64 start, GF_List *cues)
{
	GF_WebVTTSample *wsample;
	GF_WebVTTSample *prev_wsample;
	Bool            has_continuation_cue = GF_FALSE;

	assert(gf_list_count(parser->samples) <= 1);

	wsample = gf_webvtt_sample_new();
	wsample->start = start;

	prev_wsample = (GF_WebVTTSample *)gf_list_last(parser->samples);
	while (gf_list_count(cues)) {
		GF_WebVTTCue *cue = (GF_WebVTTCue *)gf_list_get(cues, 0);
		gf_list_rem(cues, 0);
		/* add the cue to the current sample */
		gf_list_add(wsample->cues, cue);
		/* update with the previous sample */
		if (prev_wsample) {
			Bool  found = GF_FALSE;
			while (!found && gf_list_count(prev_wsample->cues)) {
				GF_WebVTTCue *old_cue = (GF_WebVTTCue *)gf_list_get(prev_wsample->cues, 0);
				gf_list_rem(prev_wsample->cues, 0);
				if (
				    ((!cue->id && !old_cue->id) || (old_cue->id && cue->id && !strcmp(old_cue->id, cue->id))) &&
				    ((!cue->settings && !old_cue->settings) || (old_cue->settings && cue->settings && !strcmp(old_cue->settings, cue->settings))) &&
				    ((!cue->text && !old_cue->text) || (old_cue->text && cue->text && !strcmp(old_cue->text, cue->text)))
				) {
					/* if it is the same cue, update its start with the initial start */
					cue->start = old_cue->start;
					has_continuation_cue = GF_TRUE;
					found = GF_TRUE;
					if (old_cue->pre_text) {
						cue->pre_text = old_cue->pre_text;
						old_cue->pre_text = NULL;
					}
					if (old_cue->post_text) {
						cue->post_text = old_cue->post_text;
						old_cue->post_text = NULL;
					}
				} else {
					/* finalize the end cue time */
					if (gf_webvtt_timestamp_is_zero(&old_cue->end)) {
						gf_webvtt_timestamp_set(&old_cue->end, wsample->start);
					}
					/* transfer the cue */
					if (!has_continuation_cue) {
						/* the cue can be safely serialized while keeping the order */
						parser->on_cue_read(parser->user, old_cue);
					} else {
						/* keep the cue in the current sample to respect cue start ordering */
						gf_list_add(wsample->cues, old_cue);
					}
				}
				/* delete the old cue */
				gf_webvtt_cue_del(old_cue);
			}
		}
	}
	/* No cue in the current sample */
	if (prev_wsample) {
		while (gf_list_count(prev_wsample->cues)) {
			GF_WebVTTCue *cue = (GF_WebVTTCue *)gf_list_get(prev_wsample->cues, 0);
			gf_list_rem(prev_wsample->cues, 0);
			/* finalize the end cue time */
			if (gf_webvtt_timestamp_is_zero(&cue->end)) {
				gf_webvtt_timestamp_set(&cue->end, wsample->start);
			}
			/* transfer the cue */
			if (!has_continuation_cue) {
				/* the cue can be safely serialized while keeping the order */
				parser->on_cue_read(parser->user, cue);
			} else {
				/* keep the cue in the current sample to respect cue start ordering */
				gf_list_add(wsample->cues, cue);
			}
			gf_webvtt_cue_del(cue);
		}
		gf_webvtt_sample_del(prev_wsample);
		gf_list_rem_last(parser->samples);
		prev_wsample = NULL;
	} else {
		/* nothing to do */
	}
	if (gf_list_count(wsample->cues)) {
		gf_list_add(parser->samples, wsample);
	} else {
		gf_webvtt_sample_del(wsample);
	}
	return GF_OK;
}
Beispiel #19
0
GF_Err gf_isom_get_ttxt_esd(GF_MediaBox *mdia, GF_ESD **out_esd)
{
	GF_BitStream *bs;
	u32 count, i;
	Bool has_v_info;
	GF_List *sampleDesc;
	GF_ESD *esd;
	GF_TrackBox *tk;

	*out_esd = NULL;
	sampleDesc = mdia->information->sampleTable->SampleDescription->other_boxes;
	count = gf_list_count(sampleDesc);
	if (!count) return GF_ISOM_INVALID_MEDIA;

	esd = gf_odf_desc_esd_new(2);
	esd->decoderConfig->streamType = GF_STREAM_TEXT;
	esd->decoderConfig->objectTypeIndication = GPAC_OTI_TEXT_MPEG4;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);


	/*Base3GPPFormat*/
	gf_bs_write_u8(bs, 0x10);
	/*MPEGExtendedFormat*/
	gf_bs_write_u8(bs, 0x10);
	/*profileLevel*/
	gf_bs_write_u8(bs, 0x10);
	gf_bs_write_u24(bs, mdia->mediaHeader->timeScale);
	gf_bs_write_int(bs, 0, 1);	/*no alt formats*/
	gf_bs_write_int(bs, 2, 2);	/*only out-of-band-band sample desc*/
	gf_bs_write_int(bs, 1, 1);	/*we will write sample desc*/

								/*write v info if any visual track in this movie*/
	has_v_info = 0;
	i = 0;
	while ((tk = (GF_TrackBox*)gf_list_enum(mdia->mediaTrack->moov->trackList, &i))) {
		if (tk->Media->handler && (tk->Media->handler->handlerType == GF_ISOM_MEDIA_VISUAL)) {
			has_v_info = 1;
		}
	}
	gf_bs_write_int(bs, has_v_info, 1);

	gf_bs_write_int(bs, 0, 3);	/*reserved, spec doesn't say the values*/
	gf_bs_write_u8(bs, mdia->mediaTrack->Header->layer);
	gf_bs_write_u16(bs, mdia->mediaTrack->Header->width >> 16);
	gf_bs_write_u16(bs, mdia->mediaTrack->Header->height >> 16);

	/*write desc*/
	gf_bs_write_u8(bs, count);
	for (i = 0; i<count; i++) {
		GF_Tx3gSampleEntryBox *a;
		a = (GF_Tx3gSampleEntryBox *)gf_list_get(sampleDesc, i);
		if ((a->type != GF_ISOM_BOX_TYPE_TX3G) && (a->type != GF_ISOM_BOX_TYPE_TEXT)) continue;
		gf_isom_write_tx3g(a, bs, i + 1, SAMPLE_INDEX_OFFSET);
	}
	if (has_v_info) {
		u32 trans;
		/*which video shall we pick for MPEG-4, and how is the associations indicated in 3GP ???*/
		gf_bs_write_u16(bs, 0);
		gf_bs_write_u16(bs, 0);
		trans = mdia->mediaTrack->Header->matrix[6];
		trans >>= 16;
		gf_bs_write_u16(bs, trans);
		trans = mdia->mediaTrack->Header->matrix[7];
		trans >>= 16;
		gf_bs_write_u16(bs, trans);
	}

	gf_bs_get_content(bs, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
	gf_bs_del(bs);
	*out_esd = esd;
	return GF_OK;
}
Beispiel #20
0
static GF_Err gf_webvtt_add_cue_to_samples(GF_WebVTTParser *parser, GF_List *samples, GF_WebVTTCue *cue)
{
	s32 i;
	u64 cue_start;
	u64 cue_end;
	u64 sample_end;

	sample_end = 0;
	cue_start = gf_webvtt_timestamp_get(&cue->start);
	cue_end   = gf_webvtt_timestamp_get(&cue->end);
	/* samples in the samples list are contiguous: sample(n)->start == sample(n-1)->end */
	for (i = 0; i < (s32)gf_list_count(samples); i++) {
		GF_WebVTTSample *sample;
		sample = (GF_WebVTTSample *)gf_list_get(samples, i);
		/* save the sample end in case there are no more samples to test */
		sample_end = sample->end;
		if (cue_start < sample->start)
		{
			/* cues must be ordered according to their start time, so drop the cue */
			/* TODO delete the cue */
			return GF_BAD_PARAM;
		}
		else if (cue_start == sample->start && cue_end == sample->end)
		{
			/* if the timing of the new cue matches the sample, no need to split, add the cue to the sample */
			gf_list_add(sample->cues, cue);
			/* the cue does not need to processed further */
			return GF_OK;
		}
		else if (cue_start >= sample->end)
		{
			/* flush the current sample */
			gf_list_del_item(samples, sample);
			parser->on_sample_parsed(parser->user, sample);
			sample = NULL;
			i--;
			/* process the cue with next sample (if any) or create a new sample */
			continue;
		}
		else if (cue_start >= sample->start)
		{
			u32 j;
			if (cue_start > sample->start) {
				/* create a new sample, insert it after the current one */
				GF_WebVTTSample *new_sample = gf_webvtt_sample_new();
				new_sample->start = cue_start;
				new_sample->end = sample->end;
				gf_list_insert(samples, new_sample, i+1);
				/* split the cues from the old sample into the new one */
				for (j = 0; j < gf_list_count(sample->cues); j++) {
					GF_WebVTTCue *old_cue = (GF_WebVTTCue *)gf_list_get(sample->cues, j);
					GF_WebVTTCue *new_cue = gf_webvtt_cue_split_at(old_cue, &cue->start);
					gf_list_add(new_sample->cues, new_cue);
				}
				/* adjust the end of the old sample and flush it */
				sample->end = cue_start;
				gf_list_del_item(samples, sample);
				parser->on_sample_parsed(parser->user, sample);
				sample = NULL;
				i--;
				/* process the cue again with this new sample */
				continue;
			}
			if (cue_end > sample->end) {
				/* the cue is longer than the sample, we split the cue, add one part to the current sample
				and reevaluate with the last part of the cue */
				GF_WebVTTCue *old_cue = (GF_WebVTTCue *)gf_list_get(sample->cues, 0);
				GF_WebVTTCue *new_cue = gf_webvtt_cue_split_at(cue, &old_cue->end);
				gf_list_add(sample->cues, cue);
				cue = new_cue;
				cue_start = sample->end;
				/* cue_end unchanged */
				/* process the remaining part of the cue (i.e. the new cue) with the other samples */
				continue;
			} else { /* cue_end < sample->end */
				GF_WebVTTSample *new_sample = gf_webvtt_sample_new();
				new_sample->start = cue_end;
				new_sample->end   = sample->end;
				gf_list_insert(samples, new_sample, i+1);
				for (j = 0; j < gf_list_count(sample->cues); j++) {
					GF_WebVTTCue *old_cue = (GF_WebVTTCue *)gf_list_get(sample->cues, j);
					GF_WebVTTCue *new_cue = gf_webvtt_cue_split_at(old_cue, &cue->end);
					gf_list_add(new_sample->cues, new_cue);
				}
				gf_list_add(sample->cues, cue);
				sample->end = new_sample->start;
				/* done with this cue */
				return GF_OK;
			}
		}
	}
	/* (a part of) the cue remains (was not overlapping) */
	if (cue_start > sample_end) {
		/* if the new cue start is greater than the last sample end,
		    create an empty sample to fill the gap, flush it */
		GF_WebVTTSample *esample = gf_webvtt_sample_new();
		esample->start = sample_end;
		esample->end   = cue_start;
		parser->on_sample_parsed(parser->user, esample);
	}
	/* if the cue has not been added to a sample, create a new sample for it */
	{
		GF_WebVTTSample *sample;
		sample = gf_webvtt_sample_new();
		gf_list_add(samples, sample);
		sample->start = cue_start;
		sample->end = cue_end;
		gf_list_add(sample->cues, cue);
	}
	return GF_OK;
}
Beispiel #21
0
static u32 MM_SimulationStep_Decoder(GF_Terminal *term, u32 *nb_active_decs)
{
	CodecEntry *ce;
	GF_Err e;
	u32 count, remain;
	u32 time_taken, time_slice, time_left;

#ifndef GF_DISABLE_LOG
	term->compositor->networks_time = gf_sys_clock();
#endif

//	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Media Manager] Entering simultion step\n"));
	gf_term_handle_services(term);

#ifndef GF_DISABLE_LOG
	term->compositor->networks_time = gf_sys_clock() - term->compositor->networks_time;
#endif

#ifndef GF_DISABLE_LOG
	term->compositor->decoders_time = gf_sys_clock();
#endif
	gf_mx_p(term->mm_mx);

	count = gf_list_count(term->codecs);
	time_left = term->frame_duration;
	*nb_active_decs = 0;

	if (term->last_codec >= count) term->last_codec = 0;
	remain = count;
	/*this is ultra basic a nice scheduling system would be much better*/
	while (remain) {
		ce = (CodecEntry*)gf_list_get(term->codecs, term->last_codec);
		if (!ce) break;

		if (!(ce->flags & GF_MM_CE_RUNNING) || (ce->flags & GF_MM_CE_THREADED) || ce->dec->force_cb_resize) {
			remain--;
			if (!remain) break;
			term->last_codec = (term->last_codec + 1) % count;
			continue;
		}
		time_slice = ce->dec->Priority * time_left / term->cumulated_priority;
		if (ce->dec->PriorityBoost) time_slice *= 2;
		time_taken = gf_sys_clock();
		(*nb_active_decs) ++;
		e = gf_codec_process(ce->dec, time_slice);
		time_taken = gf_sys_clock() - time_taken;
		/*avoid signaling errors too often...*/
#ifndef GPAC_DISABLE_LOG
		if (e) {
			GF_LOG(GF_LOG_WARNING, GF_LOG_CODEC, ("[ODM%d] Decoding Error %s\n", ce->dec->odm->OD->objectDescriptorID, gf_error_to_string(e) ));
		} else {
			//GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Decode time slice %d ms out of %d ms\n", ce->dec->decio ? ce->dec->decio->module_name : "RAW", time_taken, time_left ));
		}
#endif
		if (ce->flags & GF_MM_CE_DISCARDED) {
			gf_free(ce);
			gf_list_rem(term->codecs, term->last_codec);
			count--;
			if (!count)
				break;
		} else {
			if (ce->dec->CB && (ce->dec->CB->UnitCount >= ce->dec->CB->Min)) ce->dec->PriorityBoost = 0;
		}
		term->last_codec = (term->last_codec + 1) % count;

		remain -= 1;
		if (time_left > time_taken) {
			time_left -= time_taken;
			if (!remain) break;
		} else {
			time_left = 0;
			break;
		}
	}
	gf_mx_v(term->mm_mx);
#ifndef GF_DISABLE_LOG
	term->compositor->decoders_time = gf_sys_clock() - term->compositor->decoders_time;
#endif

	return time_left;
}
Beispiel #22
0
GF_Err gf_webvtt_parser_parse(GF_WebVTTParser *parser, u32 duration)
{
	char            szLine[2048];
	char            *sOK;
	u32             len;
	GF_Err          e;
	Bool            do_parse = GF_TRUE;
	GF_WebVTTCue    *cue = NULL;
	u32             start = 0;
	u32             end = 0;
	char            *prevLine = NULL;
	char            *header = NULL;
	u32             header_len = 0;
	Bool            had_marks = GF_FALSE;

	if (!parser) return GF_BAD_PARAM;
	if (parser->is_srt) {
		parser->on_header_parsed(parser->user, "WEBVTT\n");
	}
	while (do_parse) {
		sOK = gf_text_get_utf8_line(szLine, 2048, parser->vtt_in, parser->unicode_type);
		REM_TRAIL_MARKS(szLine, "\r\n")
		len = (u32) strlen(szLine);
		switch (parser->state) {
		case WEBVTT_PARSER_STATE_WAITING_SIGNATURE:
			if (!sOK || len < 6 || strnicmp(szLine, "WEBVTT", 6) || (len > 6 && szLine[6] != ' ' && szLine[6] != '\t')) {
				e = GF_CORRUPTED_DATA;
				parser->report_message(parser->user, e, "Bad WEBVTT file signature %s", szLine);
				goto exit;
			} else {
				if (had_marks) {
					szLine[len] = '\n';
					len++;
				}
				header = gf_strdup(szLine);
				header_len = len;
				parser->state = WEBVTT_PARSER_STATE_WAITING_HEADER;
			}
			break; /* proceed to next line */
		case WEBVTT_PARSER_STATE_WAITING_HEADER:
			if (prevLine) {
				u32 prev_len = (u32) strlen(prevLine);
				header = (char *)gf_realloc(header, header_len + prev_len + 1);
				strcpy(header+header_len,prevLine);
				header_len += prev_len;
				gf_free(prevLine);
				prevLine = NULL;
			}
			if (sOK && len) {
				if (strstr(szLine, "-->")) {
					parser->on_header_parsed(parser->user, header);
					/* continue to the next state without breaking */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP;
					/* no break, continue to the next state*/
				} else {
					if (had_marks) {
						szLine[len] = '\n';
						len++;
					}
					prevLine = gf_strdup(szLine);
					break; /* proceed to next line */
				}
			} else {
				parser->on_header_parsed(parser->user, header);
				if (!sOK) {
					/* end of file, parsing is done */
					do_parse = GF_FALSE;
					break;
				} else {
					/* empty line means end of header */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
					/* no break, continue to the next state*/
				}
			}
		case WEBVTT_PARSER_STATE_WAITING_CUE:
			if (sOK && len) {
				if (strstr(szLine, "-->")) {
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP;
					/* continue to the next state without breaking */
				} else {
					/* discard the previous line */
					/* should we do something with it ? callback ?*/
					if (prevLine) {
						gf_free(prevLine);
						prevLine = NULL;
					}
					/* save this new line */
					if (had_marks) {
						szLine[len] = '\n';
						len++;
					}
					prevLine = gf_strdup(szLine);
					/* stay in the same state */
					break;
				}
			} else {
				/* discard the previous line */
				/* should we do something with it ? callback ?*/
				if (prevLine) {
					gf_free(prevLine);
					prevLine = NULL;
				}
				if (!sOK) {
					do_parse = GF_FALSE;
					break;
				} else {
					/* remove empty lines and stay in the same state */
					break;
				}
			}
		case WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP:
			if (sOK && len) {
				if (cue == NULL) {
					cue   = gf_webvtt_cue_new();
				}
				if (prevLine) {
					gf_webvtt_cue_add_property(cue, WEBVTT_ID, prevLine, (u32) strlen(prevLine));
					gf_free(prevLine);
					prevLine = NULL;
				}
				e = gf_webvtt_parser_parse_timings_settings(parser, cue, szLine, len);
				if (e) {
					if (cue) gf_webvtt_cue_del(cue);
					cue = NULL;
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
				} else {
					start = (u32)gf_webvtt_timestamp_get(&cue->start);
					end   = (u32)gf_webvtt_timestamp_get(&cue->end);
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_PAYLOAD;
				}
			} else {
				/* not possible */
				assert(0);
			}
			break;
		case WEBVTT_PARSER_STATE_WAITING_CUE_PAYLOAD:
			if (sOK && len) {
				if (had_marks) {
					szLine[len] = '\n';
					len++;
				}
				gf_webvtt_cue_add_property(cue, WEBVTT_PAYLOAD, szLine, len);
				/* remain in the same state as a cue payload can have multiple lines */
				break;
			} else {
				/* end of the current cue */
				gf_webvtt_add_cue_to_samples(parser, parser->samples, cue);
				cue = NULL;

				gf_set_progress("Importing WebVTT", gf_ftell(parser->vtt_in), parser->file_size);
				if ((duration && (end >= duration)) || !sOK) {
					do_parse = GF_FALSE;
					break;
				} else {
					/* empty line, move to next cue */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
					break;
				}
			}
		}
		if (duration && (start >= duration)) {
			break;
		}
	}


	/* no more cues to come, flush everything */
	if (cue) {
		gf_webvtt_add_cue_to_samples(parser, parser->samples, cue);
		cue = NULL;
	}
	while (gf_list_count(parser->samples) > 0) {
		GF_WebVTTSample *sample = (GF_WebVTTSample *)gf_list_get(parser->samples, 0);
		parser->last_duration = sample->end - sample->start;
		gf_list_rem(parser->samples, 0);
		parser->on_sample_parsed(parser->user, sample);
	}
	gf_set_progress("Importing WebVTT", parser->file_size, parser->file_size);
	e = GF_OK;
exit:
	if (cue) gf_webvtt_cue_del(cue);
	if (prevLine) gf_free(prevLine);
	if (header) gf_free(header);
	return e;
}
Beispiel #23
0
GF_Err gf_isom_add_meta_item_extended(GF_ISOFile *file, Bool root_meta, u32 track_num, Bool self_reference, char *resource_path, const char *item_name, const char *mime_type, const char *content_encoding, const char *URL, const char *URN, char *data, u32 data_len)
{
    GF_Err e;
    GF_ItemLocationEntry *location_entry;
    GF_ItemInfoEntryBox *infe;
    GF_MetaBox *meta;
    u32 lastItemID = 0;

    if (!self_reference && !item_name && !resource_path) return GF_BAD_PARAM;
    e = CanAccessMovie(file, GF_ISOM_OPEN_WRITE);
    if (e) return e;
    meta = gf_isom_get_meta(file, root_meta, track_num);
    if (!meta) {
        GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("Trying to add item, but missing meta box"));
        return GF_BAD_PARAM;
    }

    e = FlushCaptureMode(file);
    if (e) return e;

    /*check file exists */
    if (!URN && !URL && !self_reference && !data) {
        FILE *src = gf_f64_open(resource_path, "rb");
        if (!src) return GF_URL_ERROR;
        fclose(src);
    }

    if (meta->item_infos) {
        u32 i;
        u32 item_count = gf_list_count(meta->item_infos->item_infos);
        for (i = 0; i < item_count; i++) {
            GF_ItemInfoEntryBox *e= (GF_ItemInfoEntryBox *)gf_list_get(meta->item_infos->item_infos, i);
            if (e->item_ID > lastItemID) lastItemID = e->item_ID;
        }
    }

    infe = (GF_ItemInfoEntryBox *)infe_New();
    infe->item_ID = ++lastItemID;

    /*get relative name*/
    if (item_name) {
        infe->item_name = gf_strdup(item_name);
    } else if (resource_path) {
        if (strrchr(resource_path, GF_PATH_SEPARATOR)) {
            infe->item_name = gf_strdup(strrchr(resource_path, GF_PATH_SEPARATOR) + 1);
        } else {
            infe->item_name = gf_strdup(resource_path);
        }
    }

    if (mime_type) {
        infe->content_type = gf_strdup(mime_type);
    } else {
        infe->content_type = gf_strdup("application/octet-stream");
    }
    if (content_encoding) infe->content_encoding = gf_strdup(content_encoding);

    /*Creation of the ItemLocation */
    location_entry = (GF_ItemLocationEntry*)gf_malloc(sizeof(GF_ItemLocationEntry));
    if (!location_entry) {
        gf_isom_box_del((GF_Box *)infe);
        return GF_OUT_OF_MEM;
    }
    memset(location_entry, 0, sizeof(GF_ItemLocationEntry));
    location_entry->extent_entries = gf_list_new();

    /*Creates an mdat if it does not exist*/
    if (!file->mdat) {
        file->mdat = (GF_MediaDataBox *)mdat_New();
        gf_list_add(file->TopBoxes, file->mdat);
    }

    /*Creation an ItemLocation Box if it does not exist*/
    if (!meta->item_locations) meta->item_locations = (GF_ItemLocationBox *)iloc_New();
    gf_list_add(meta->item_locations->location_entries, location_entry);
    location_entry->item_ID = lastItemID;

    if (!meta->item_infos) meta->item_infos = (GF_ItemInfoBox *) iinf_New();
    e = gf_list_add(meta->item_infos->item_infos, infe);
    if (e) return e;

    /*0: the current file*/
    location_entry->data_reference_index = 0;
    if (self_reference) {
        GF_ItemExtentEntry *entry;
        GF_SAFEALLOC(entry, GF_ItemExtentEntry);
        gf_list_add(location_entry->extent_entries, entry);
        if (!infe->item_name) infe->item_name = gf_strdup("");
        return GF_OK;
    }

    /*file not copied, just referenced*/
    if (URL || URN) {
        u32 dataRefIndex;
        if (!meta->file_locations) meta->file_locations = (GF_DataInformationBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_DINF);
        if (!meta->file_locations->dref) meta->file_locations->dref = (GF_DataReferenceBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_DREF);
        e = Media_FindDataRef(meta->file_locations->dref, (char *) URL, (char *) URN, &dataRefIndex);
        if (e) return e;
        if (!dataRefIndex) {
            e = Media_CreateDataRef(meta->file_locations->dref, (char *) URL, (char *) URN, &dataRefIndex);
            if (e) return e;
        }
        location_entry->data_reference_index = dataRefIndex;
    }

    /*capture mode, write to disk*/
    if ((file->openMode == GF_ISOM_OPEN_WRITE) && !location_entry->data_reference_index) {
        FILE *src;
        GF_ItemExtentEntry *entry;
        GF_SAFEALLOC(entry, GF_ItemExtentEntry);

        location_entry->base_offset = gf_bs_get_position(file->editFileMap->bs);

        /*update base offset size*/
        if (location_entry->base_offset>0xFFFFFFFF) meta->item_locations->base_offset_size = 8;
        else if (location_entry->base_offset && !meta->item_locations->base_offset_size) meta->item_locations->base_offset_size = 4;

        entry->extent_length = 0;
        entry->extent_offset = 0;
        gf_list_add(location_entry->extent_entries, entry);

        if (data) {
            gf_bs_write_data(file->editFileMap->bs, data, data_len);
            /*update length size*/
            if (entry->extent_length>0xFFFFFFFF) meta->item_locations->length_size = 8;
            else if (entry->extent_length && !meta->item_locations->length_size) meta->item_locations->length_size = 4;
        } else if (resource_path) {
            src = gf_f64_open(resource_path, "rb");
            if (src) {
                char cache_data[4096];
                u64 remain;
                gf_f64_seek(src, 0, SEEK_END);
                entry->extent_length = gf_f64_tell(src);
                gf_f64_seek(src, 0, SEEK_SET);

                remain = entry->extent_length;
                while (remain) {
                    u32 size_cache = (remain>4096) ? 4096 : (u32) remain;
                    size_cache = fread(cache_data, 1, size_cache, src);
                    gf_bs_write_data(file->editFileMap->bs, cache_data, size_cache);
                    remain -= size_cache;
                }
                fclose(src);

                /*update length size*/
                if (entry->extent_length>0xFFFFFFFF) meta->item_locations->length_size = 8;
                else if (entry->extent_length && !meta->item_locations->length_size) meta->item_locations->length_size = 4;
            }
        }
    }
    /*store full path for info*/
    else if (!location_entry->data_reference_index) {
        if (data) {
            infe->full_path = (char *) gf_malloc(sizeof(char) * data_len);
            memcpy(infe->full_path, data, sizeof(char) * data_len);
            infe->data_len = data_len;
        } else {
            infe->full_path = gf_strdup(resource_path);
            infe->data_len = 0;
        }
    }
    return GF_OK;
}
Beispiel #24
0
GF_Err gf_codec_get_capability(GF_Codec *codec, GF_CodecCapability *cap)
{
	cap->cap.valueInt = 0;
	if (codec->decio)
		return codec->decio->GetCapabilities(codec->decio, cap);

	if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA) {
		GF_BitStream *bs;
		u32 pf, w, h, stride=0, out_size, sr, nb_ch, bpp, ch_cfg, is_flipped = 0;
		GF_Channel *ch = gf_list_get(codec->odm->channels, 0);
		if (!ch || !ch->esd->decoderConfig->decoderSpecificInfo || !ch->esd->decoderConfig->decoderSpecificInfo->data) return 0;
		bs = gf_bs_new(ch->esd->decoderConfig->decoderSpecificInfo->data, ch->esd->decoderConfig->decoderSpecificInfo->dataLength, GF_BITSTREAM_READ);

		pf = w = h = sr = nb_ch = bpp = ch_cfg = 0;
		if (codec->type==GF_STREAM_VISUAL) {
			pf = gf_bs_read_u32(bs);
			w = gf_bs_read_u16(bs);
			h = gf_bs_read_u16(bs);
			out_size = gf_bs_read_u32(bs);
			stride = gf_bs_read_u32(bs);
            is_flipped = gf_bs_read_u8(bs);
		} else {
			sr = gf_bs_read_u32(bs);
			nb_ch = gf_bs_read_u16(bs);
			bpp = gf_bs_read_u16(bs);
			out_size = gf_bs_read_u32(bs);
			ch_cfg = gf_bs_read_u32(bs);
		}
		gf_bs_del(bs);
		switch (cap->CapCode) {
		case GF_CODEC_WIDTH:
			cap->cap.valueInt = w;
			return GF_OK;
		case GF_CODEC_HEIGHT:
			cap->cap.valueInt = h;
			return GF_OK;
		case GF_CODEC_STRIDE:
			cap->cap.valueInt = stride;
			return GF_OK;
		case GF_CODEC_PIXEL_FORMAT:
			cap->cap.valueInt = pf;
			return GF_OK;
        case GF_CODEC_FLIP:
            cap->cap.valueInt = is_flipped;
            return GF_OK;
		case GF_CODEC_OUTPUT_SIZE:
			cap->cap.valueInt = out_size;
			return GF_OK;
		case GF_CODEC_SAMPLERATE:
			cap->cap.valueInt = sr;
			return GF_OK;
		case GF_CODEC_NB_CHAN:
			cap->cap.valueInt = nb_ch;
			return GF_OK;
		case GF_CODEC_BITS_PER_SAMPLE:
			cap->cap.valueInt = bpp;
			return GF_OK;
		case GF_CODEC_CHANNEL_CONFIG:
			cap->cap.valueInt = ch_cfg;
			return GF_OK;
		case GF_CODEC_PAR:
			cap->cap.valueInt = 0;
			return GF_OK;
		case GF_CODEC_PADDING_BYTES:
			cap->cap.valueInt = 0;
			return GF_OK;
		case GF_CODEC_RESILIENT:
			cap->cap.valueInt = 1;
			return GF_OK;
		}
	}
	return GF_BAD_PARAM;
}
Beispiel #25
0
GF_EXPORT
void gf_smil_timing_init_runtime_info(GF_Node *timed_elt)
{
	s32 interval_index;
	GF_SceneGraph *sg;
	SMIL_Timing_RTI *rti;
	SMILTimingAttributesPointers *timingp = NULL;
	u32 tag = gf_node_get_tag(timed_elt);

	if ((tag>=GF_NODE_RANGE_FIRST_SVG) && (tag<=GF_NODE_RANGE_LAST_SVG)) {
		SVGAllAttributes all_atts;
		SVGTimedAnimBaseElement *e = (SVGTimedAnimBaseElement *)timed_elt;
		gf_svg_flatten_attributes((SVG_Element *)e, &all_atts);
		e->timingp = malloc(sizeof(SMILTimingAttributesPointers));
		e->timingp->begin		= all_atts.begin;
		e->timingp->clipBegin	= all_atts.clipBegin;
		e->timingp->clipEnd		= all_atts.clipEnd;
		e->timingp->dur			= all_atts.dur;
		e->timingp->end			= all_atts.end;
		e->timingp->fill		= all_atts.smil_fill;
		e->timingp->max			= all_atts.max;
		e->timingp->min			= all_atts.min;
		e->timingp->repeatCount = all_atts.repeatCount;
		e->timingp->repeatDur	= all_atts.repeatDur;
		e->timingp->restart		= all_atts.restart;
		timingp = e->timingp;
	} 
#ifdef GPAC_ENABLE_SVG_SA
	else if ((tag>=GF_NODE_RANGE_FIRST_SVG_SA) && (tag<=GF_NODE_RANGE_LAST_SVG_SA)) {
		SVG_SA_Element *e = (SVG_SA_Element *)timed_elt;
		e->timingp = malloc(sizeof(SMILTimingAttributesPointers));
		e->timingp->begin		= &e->timing->begin;
		e->timingp->clipBegin	= &e->timing->clipBegin;
		e->timingp->clipEnd		= &e->timing->clipEnd;
		e->timingp->dur			= &e->timing->dur;
		e->timingp->end			= &e->timing->end;
		e->timingp->fill		= &e->timing->fill;
		e->timingp->max			= &e->timing->max;
		e->timingp->min			= &e->timing->min;
		e->timingp->repeatCount = &e->timing->repeatCount;
		e->timingp->repeatDur	= &e->timing->repeatDur;
		e->timingp->restart		= &e->timing->restart;
		timingp = e->timingp;
	}
#endif
#ifdef GPAC_ENABLE_SVG_SANI
	else if ((tag>=GF_NODE_RANGE_FIRST_SVG_SANI) && (tag<=GF_NODE_RANGE_LAST_SVG_SANI)) {
		SVG_SANI_Element *e = (SVG_SANI_Element *)timed_elt;
		e->timingp = malloc(sizeof(SMILTimingAttributesPointers));
		e->timingp->begin		= &e->timing->begin;
		e->timingp->clipBegin	= &e->timing->clipBegin;
		e->timingp->clipEnd		= &e->timing->clipEnd;
		e->timingp->dur			= &e->timing->dur;
		e->timingp->end			= &e->timing->end;
		e->timingp->fill		= &e->timing->fill;
		e->timingp->max			= &e->timing->max;
		e->timingp->min			= &e->timing->min;
		e->timingp->repeatCount = &e->timing->repeatCount;
		e->timingp->repeatDur	= &e->timing->repeatDur;
		e->timingp->restart		= &e->timing->restart;
		timingp = e->timingp;
	}
#endif
	else {
		return;
	}

	if (!timingp) return;

	GF_SAFEALLOC(rti, SMIL_Timing_RTI)
	timingp->runtime = rti;
	rti->timed_elt = timed_elt;
	rti->timingp = timingp;
	rti->status = SMIL_STATUS_WAITING_TO_BEGIN;
	rti->evaluate_status = SMIL_TIMING_EVAL_NONE;	
	rti->intervals = gf_list_new();
	rti->current_interval = NULL;
	rti->evaluate = gf_smil_timing_null_timed_function;
	rti->scene_time = -1;
	rti->media_duration = -1;

	gf_smil_timing_init_interval_list(rti);
	interval_index = gf_smil_timing_find_interval_index(rti, GF_MAX_DOUBLE);
	if (interval_index >= 0) {
		rti->current_interval_index = interval_index;
		rti->current_interval = (SMIL_Interval*)gf_list_get(rti->intervals, rti->current_interval_index);
	} 

	sg = timed_elt->sgprivate->scenegraph;
	while (sg->parent_scene) sg = sg->parent_scene;
	gf_smil_timing_add_to_sg(sg, rti);
}
Beispiel #26
0
static void MediaDecoder_GetNextAU(GF_Codec *codec, GF_Channel **activeChannel, GF_DBUnit **nextAU)
{
	GF_Channel *ch;
	GF_DBUnit *AU;
	u32 count, minDTS, i;
	count = gf_list_count(codec->inChannels);
	*nextAU = NULL;
	*activeChannel = NULL;

	if (!count) return;

	minDTS = 0;

	/*browse from base to top layer*/
	for (i=0;i<count;i++) {
		ch = (GF_Channel*)gf_list_get(codec->inChannels, i);

		if ((codec->type==GF_STREAM_OCR) && ch->IsClockInit) {
			/*check duration - we assume that scalable OCR streams are just pure nonsense...*/
			if (ch->is_pulling && codec->odm->duration) {
				if (gf_clock_time(codec->ck) > codec->odm->duration)
					gf_es_on_eos(ch);
			}
			return;
		}

		AU = gf_es_get_au(ch);
		if (!AU) {
			if (! (*activeChannel)) *activeChannel = ch;
			continue;
		}

		/*aggregate all AUs with the same timestamp on the base AU and delete the upper layers)*/
		if (! *nextAU) {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d selected as first layer\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS));
			*nextAU = AU;
			*activeChannel = ch;
			minDTS = AU->DTS;
		} else if (AU->DTS == minDTS) {
			GF_DBUnit *baseAU = *nextAU;
			assert(baseAU);
			baseAU->data = gf_realloc(baseAU->data, baseAU->dataLength + AU->dataLength);
			memcpy(baseAU->data + baseAU->dataLength , AU->data, AU->dataLength);
			baseAU->dataLength += AU->dataLength;
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d reaggregated on base layer %d\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS, (*activeChannel)->esd->ESID));
			gf_es_drop_au(ch);
		} else {
			break;
		}
	}

	if (codec->is_reordering && *nextAU && codec->first_frame_dispatched) {
		if ((*activeChannel)->esd->slConfig->no_dts_signaling) {
			u32 CTS = (*nextAU)->CTS;
			/*reordering !!*/
			u32 prev_ts_diff;
			u32 diff = 0;
			if (codec->recomputed_cts && (codec->recomputed_cts > (*nextAU)->CTS)) {
				diff = codec->recomputed_cts - CTS;
			}

			prev_ts_diff = (CTS > codec->last_unit_cts) ? (CTS - codec->last_unit_cts) : (codec->last_unit_cts - CTS);
			if (!diff) diff = prev_ts_diff;
			else if (prev_ts_diff && (prev_ts_diff < diff) ) diff = prev_ts_diff;

			if (!codec->min_au_duration || (diff < codec->min_au_duration))
				codec->min_au_duration = diff;
		} else {
			codec->min_au_duration = 0;
			/*FIXME - we're breaking sync (couple of frames delay)*/
			(*nextAU)->CTS = (*nextAU)->DTS;
		}
	}
}
Beispiel #27
0
Bool compositor_svg_evaluate_conditional(GF_Compositor *compositor, SVGAllAttributes *atts)
{
	u32 i, count;
	Bool found;
	const char *lang_3cc, *lang_2cc;

	/*process required features*/
	count = atts->requiredFeatures ? gf_list_count(*atts->requiredFeatures) : 0;
	for (i=0;i<count;i++) {
		char *feat = NULL;
		XMLRI *iri = gf_list_get(*atts->requiredFeatures, i);
		if (!iri->string) continue;

		if (!strnicmp(iri->string, "org.w3c.svg", 11)) {
			feat = iri->string+12;
			if (feat) {
				if (!stricmp(feat, "animation")) {}
				else if (!stricmp(feat, "dynamic")) {}
				/*no support for filters, clipping & co - SVG 1.0 featureStrings are badly designed*/
				else return 0;
			}
		}
		else if (!strnicmp(iri->string, "http://www.w3.org/TR/SVG11/feature", 34)) {
			feat = iri->string+35;
			if (feat) {
				Bool found = 0;
				u32 j, nbf;
				nbf  = sizeof(svg11_features) / sizeof(struct svg_11_feature);
				for (j=0; j<nbf; j++) {
					if (!strcmp(svg11_features[j].name, feat)) {
						found = 1;
						if (!svg11_features[j].supported) return 0;
						break;
					}
				}
				if (!found) return 0;
			}
		}
		else if (!strnicmp(iri->string, "http://www.w3.org/Graphics/SVG/feature/1.2/", 43)) {
			feat = iri->string+44;
			if (feat) {
				Bool found = 0;
				u32 j, nbf;
				nbf  = sizeof(svg12_features) / sizeof(struct svg_12_feature);
				for (j=0; j<nbf; j++) {
					if (!strcmp(svg12_features[j].name, feat)) {
						found = 1;
						if (!svg12_features[j].supported) return 0;
						break;
					}
				}
				if (!found) return 0;
			}
		}
		/*unrecognized feature*/
		else {
			return 0;
		}
	}

	/*process required extensions*/
	count = atts->requiredExtensions ? gf_list_count(*atts->requiredExtensions) : 0;
	if (count) return 0;

	/*process system language*/
	count = atts->systemLanguage ? gf_list_count(*atts->systemLanguage) : 0;
	if (count) {
		found = 0;
		lang_3cc = gf_cfg_get_key(compositor->user->config, "Systems", "Language3CC");
		if (!lang_3cc) lang_3cc = "und";
		lang_2cc = gf_cfg_get_key(compositor->user->config, "Systems", "Language2CC");
		if (!lang_2cc) lang_2cc = "un";
	} else {
		lang_3cc = "und";
		lang_2cc = "un";
		found = 1;
	}

	for (i=0;i<count;i++) {
		char *lang = gf_list_get(*atts->systemLanguage, i);
		/*3 char-code*/
		if (strlen(lang)==3) {
			if (!stricmp(lang, lang_3cc)) { found = 1; break; }
		}
		/*2 char-code, only check first 2 chars - TODO FIXME*/
		else if (!strnicmp(lang, lang_2cc, 2)) { found = 1; break; }
	}
	if (!found) return 0;

	/*process required formats*/
	count = atts->requiredFormats ? gf_list_count(*atts->requiredFormats) : 0;
	if (count) {
		for (i=0; i<count; i++) {
			const char *opt;
			char *mime = gf_list_get(*atts->requiredFormats, i);
			char *sep = strchr(mime, ';');
			if (sep) sep[0] = 0;
			opt = gf_cfg_get_key(compositor->user->config, "MimeTypes", mime);
			if (sep) sep[0] = ';';
			if (!opt) return 0;
		}
	}

	/*process required fonts*/
	count = atts->requiredFonts ? gf_list_count(*atts->requiredFonts) : 0;
	if (count) {
		for (i=0; i<count; i++) {
			char *font = gf_list_get(*atts->requiredFonts, i);
			if (gf_font_manager_set_font_ex(compositor->font_manager, &font, 1, 0, 1)==NULL)
				return 0;
		}
	}
	
	/*OK, we can render this one*/
	return 1;
}
Beispiel #28
0
/*special handling of decoders not using ESM*/
static GF_Err PrivateScene_Process(GF_Codec *codec, u32 TimeAvailable)
{
	u32 now;
	GF_Channel *ch;
	GF_Scene *scene_locked;
	GF_SceneDecoder *sdec = (GF_SceneDecoder *)codec->decio;
	GF_Err e = GF_OK;

	/*muting systems codec means we don't decode until mute is off - likely there will be visible however
	there is no other way to decode system AUs without modifying the content, which is what mute is about on visual...*/
	if (codec->Muted) return GF_OK;

	if (codec->Status == GF_ESM_CODEC_EOS) {
		gf_term_stop_codec(codec, 0);
		return GF_OK;
	}

	scene_locked = codec->odm->subscene ? codec->odm->subscene : codec->odm->parentscene;

	ch = (GF_Channel*)gf_list_get(codec->inChannels, 0);
	if (!ch) return GF_OK;
	/*init channel clock*/
	if (!ch->IsClockInit) {
		Bool started;
		/*signal seek*/
		if (!gf_mx_try_lock(scene_locked->root_od->term->compositor->mx)) return GF_OK;
		gf_es_init_dummy(ch);

		sdec->ProcessData(sdec, NULL, 0, ch->esd->ESID, -1, GF_CODEC_LEVEL_NORMAL);
		gf_mx_v(scene_locked->root_od->term->compositor->mx);
		started = gf_clock_is_started(ch->clock);
		/*let's be nice to the scene loader (that usually involves quite some parsing), pause clock while
		parsing*/
		gf_clock_pause(ch->clock);
		codec->last_unit_dts = 0;
		if (!started) return GF_OK;
	}

	codec->odm->current_time = codec->last_unit_cts = gf_clock_time(codec->ck);

	/*lock scene*/
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[PrivateDec] Codec %s Processing at %d\n", sdec->module_name , codec->odm->current_time));

	if (!gf_mx_try_lock(scene_locked->root_od->term->compositor->mx)) return GF_OK;
	now = gf_term_get_time(codec->odm->term);
	e = sdec->ProcessData(sdec, NULL, 0, ch->esd->ESID, codec->odm->current_time, GF_CODEC_LEVEL_NORMAL);
	now = gf_term_get_time(codec->odm->term) - now;
	codec->last_unit_dts ++;
	/*resume on error*/
	if (e && (codec->last_unit_dts<2) ) {
		gf_clock_resume(ch->clock);
		codec->last_unit_dts = 2;
	}
	/*resume clock on 2nd decode (we assume parsing is done in 2 steps, one for first frame display, one for complete parse)*/
	else if (codec->last_unit_dts==2) {
		gf_clock_resume(ch->clock);
	}

	codec_update_stats(codec, 0, now);

	gf_mx_v(scene_locked->root_od->term->compositor->mx);

	if (e==GF_EOS) {
		/*first end of stream, evaluate duration*/
		//if (!codec->odm->duration) gf_odm_set_duration(codec->odm, ch, codec->odm->current_time);
		gf_es_on_eos(ch);
		return GF_OK;
	}
	return e;
}
Beispiel #29
0
void isor_check_buffer_level(ISOMReader *read)
{
	Double dld_time_remaining, mov_rate;
	GF_NetworkCommand com;
	u32 i, total, done, Bps;
	u64 dur;
	GF_NetIOStatus status;
	Bool do_buffer = GF_FALSE;
	if (!read->dnload) return;
	if (!read->mov) return;

	gf_dm_sess_get_stats(read->dnload, NULL, NULL, &total, &done, &Bps, &status);
	if (!Bps) return;


	gf_mx_p(read->segment_mutex);

	dld_time_remaining = total-done;
	dld_time_remaining /= Bps;

	//we add 30 seconds to smooth out bitrate variations ..;
	dld_time_remaining += 30;

	mov_rate = total;
	dur = gf_isom_get_duration(read->mov);
	if (dur) {
		mov_rate /= dur;
		mov_rate *= gf_isom_get_timescale(read->mov);
	}

	for (i=0; i<gf_list_count(read->channels); i++) {
		ISOMChannel *ch = gf_list_get(read->channels, i);
		Double time_remain_ch = (Double) gf_isom_get_media_duration(read->mov, ch->track);
		u32 buffer_level=0;
		if (total==done) {
			time_remain_ch = 0;
			do_buffer = GF_FALSE;
		} else if (ch->last_state == GF_EOS) {
			time_remain_ch = 0;
			do_buffer = GF_TRUE;
		} else {
			u64 data_offset;
			u32 di, sn = ch->sample_num ? ch->sample_num : 1;
			GF_ISOSample *samp = gf_isom_get_sample_info(read->mov, ch->track, sn, &di, &data_offset);
			if (!samp) continue;

			data_offset += samp->dataLength;

			//we only send buffer on/off based on remainging playback time in channel
#if 0
			//we don't have enough data
			if (((data_offset + ch->buffer_min * mov_rate/1000 > done))) {
				do_buffer = GF_TRUE;
			}
			//we have enough buffer
			else if ((data_offset + ch->buffer_max * mov_rate/1000 <= done)) {
				do_buffer = GF_FALSE;
			}
#endif
			time_remain_ch -= (samp->DTS + samp->CTS_Offset);
			if (time_remain_ch<0) time_remain_ch=0;
			gf_isom_sample_del(&samp);

			time_remain_ch /= ch->time_scale;
			if (time_remain_ch && (time_remain_ch < dld_time_remaining)) {
				do_buffer = GF_TRUE;
				if (!read->remain_at_buffering_start || (read->remain_at_buffering_start < dld_time_remaining)) {
					buffer_level = 0;
					read->remain_at_buffering_start = dld_time_remaining;
				} else {
					buffer_level = (u32) (100 * (read->remain_at_buffering_start - dld_time_remaining) / (read->remain_at_buffering_start - time_remain_ch) );
				}
			} else {
				do_buffer = GF_FALSE;
			}
		}

		if (do_buffer != ch->buffering) {
			GF_LOG(GF_LOG_INFO, GF_LOG_NETWORK, ("[IsoMedia] Buffering %s at %d: %g sec still to download and %g sec still to play on track %d (movie rate %g - download rate %g kbps)\n", do_buffer ? "on" : "off", gf_sys_clock(), dld_time_remaining , time_remain_ch, ch->track_id, mov_rate*8/1000, Bps*8.0/1000));

			memset(&com, 0, sizeof(GF_NetworkCommand));
			com.command_type = do_buffer ? GF_NET_CHAN_PAUSE : GF_NET_CHAN_RESUME;
			com.buffer.on_channel = ch->channel;
			com.buffer.min = ch->buffer_min;
			com.buffer.max = ch->buffer_max;
			gf_service_command(read->service, &com, GF_OK);
			ch->buffering = do_buffer;
			read->buffering = do_buffer;
		} else if (ch->buffering) {
			memset(&com, 0, sizeof(GF_NetworkCommand));
			com.command_type = GF_NET_CHAN_BUFFER;
			com.buffer.on_channel = ch->channel;
			com.buffer.min = ch->buffer_min;
			com.buffer.max = ch->buffer_max;
			com.buffer.occupancy = buffer_level;
			gf_service_command(read->service, &com, GF_OK);
		}
	}
	gf_mx_v(read->segment_mutex);
}
Beispiel #30
0
static void TraverseBackground2D(GF_Node *node, void *rs, Bool is_destroy)
{
	u32 col;
	BackgroundStatus *status;
	M_Background2D *bck;
	Background2DStack *stack = (Background2DStack *) gf_node_get_private(node);
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;

	if (is_destroy) {
		DestroyBackground2D(node);
		return;
	}

	bck = (M_Background2D *)node;

	/*special case for background in Layer2D: the background is seen as a regular drawable, so
	RENDER_BINDABLE is not used*/
	switch (tr_state->traversing_mode) {
	case TRAVERSE_DRAW_2D:
		DrawBackground2D_2D(tr_state->ctx, tr_state);
		return;
	case  TRAVERSE_PICK:
	case TRAVERSE_GET_BOUNDS:
		return;
	}

	/*first traverse, bound if needed*/
	if (gf_list_find(tr_state->backgrounds, node) < 0) {
		M_Background2D *top_bck = (M_Background2D *)node;
		gf_list_add(tr_state->backgrounds, node);
		assert(gf_list_find(stack->reg_stacks, tr_state->backgrounds)==-1);
		gf_list_add(stack->reg_stacks, tr_state->backgrounds);
		b2D_new_status(stack, bck);

		/*only bound if we're on top*/
		top_bck = gf_list_get(tr_state->backgrounds, 0);
		if (!bck->isBound) {
			if (top_bck== bck) {
				Bindable_SetIsBound(node, 1);
			} else if (!top_bck->isBound) {
				bck->set_bind = 1;
				bck->on_set_bind(node, NULL);
			}
		}
		/*open the stream if any*/
		if (back_use_texture(bck) && !stack->txh.is_open) gf_sc_texture_play(&stack->txh, &bck->url);
		/*in any case don't draw the first time (since the background could have been declared last)*/
		gf_sc_invalidate(stack->txh.compositor, NULL);
		return;
	}
	if (!bck->isBound) return;

	status = b2d_get_status(stack, tr_state->backgrounds);
	if (!status) return;

	if (gf_node_dirty_get(node)) {
		u32 i;
		
		stack->flags |= CTX_APP_DIRTY;
		gf_node_dirty_clear(node, 0);


		col = GF_COL_ARGB_FIXED(FIX_ONE, bck->backColor.red, bck->backColor.green, bck->backColor.blue);
		if (col != status->ctx.aspect.fill_color) {
			status->ctx.aspect.fill_color = col;
			stack->flags |= CTX_APP_DIRTY;
		}
		for (i=0; i<4;i++) {
			stack->col_tx[3*i] = FIX2INT(255 * bck->backColor.red);
			stack->col_tx[3*i+1] = FIX2INT(255 * bck->backColor.green);
			stack->col_tx[3*i+2] = FIX2INT(255 * bck->backColor.blue);
		}
	}

	if (back_use_texture(bck) ) {
#ifndef GPAC_DISABLE_3D
		if (stack->txh.compositor->hybrid_opengl && !tr_state->visual->offscreen && stack->hybgl_init) {
			stack->flags |= CTX_HYBOGL_NO_CLEAR;
		}
		stack->hybgl_init = 1;
#endif
		if (stack->txh.tx_io && !(status->ctx.flags & CTX_APP_DIRTY) && stack->txh.needs_refresh) {
			stack->flags |= CTX_TEXTURE_DIRTY;
		}
	}
	if (status->ctx.flags & CTX_BACKROUND_NOT_LAYER) {
		status->ctx.flags = stack->flags | CTX_BACKROUND_NOT_LAYER;
	} else {
		status->ctx.flags = stack->flags;
		if (tr_state->is_layer)
			status->ctx.flags &= ~CTX_BACKROUND_NOT_LAYER;
	}


	if (tr_state->traversing_mode != TRAVERSE_BINDABLE) return;

	/*3D mode*/
#ifndef GPAC_DISABLE_3D
	if (tr_state->visual->type_3d) {
		DrawBackground2D_3D(bck, stack, tr_state);
	} else
#endif
		DrawBackground2D_2D(&status->ctx, tr_state);
}