Exemple #1
0
static void animationstream_check_url(AnimationStreamStack *stack, M_AnimationStream *as)
{
    if (!stack->stream) {
        gf_sg_vrml_mf_reset(&stack->current_url, GF_SG_VRML_MFURL);
        gf_sg_vrml_field_copy(&stack->current_url, &as->url, GF_SG_VRML_MFURL);
        stack->stream = gf_mo_register((GF_Node *)as, &as->url, 0, 0);
        gf_sc_invalidate(stack->compositor, NULL);

        /*if changed while playing trigger*/
        if (as->isActive) {
            gf_mo_play(stack->stream, 0, -1, 0);
            gf_mo_set_speed(stack->stream, as->speed);
        }
        return;
    }
    /*check change*/
    if (gf_mo_url_changed(stack->stream, &as->url)) {
        gf_sg_vrml_mf_reset(&stack->current_url, GF_SG_VRML_MFURL);
        gf_sg_vrml_field_copy(&stack->current_url, &as->url, GF_SG_VRML_MFURL);
        /*if changed while playing stop old source*/
        if (as->isActive) {
            gf_mo_set_flag(stack->stream, GF_MO_DISPLAY_REMOVE, 1);
            gf_mo_stop(stack->stream);
        }
        gf_mo_unregister((GF_Node *)as, stack->stream);

        stack->stream = gf_mo_register((GF_Node *)as, &as->url, 0, 0);
        /*if changed while playing play new source*/
        if (as->isActive) {
            gf_mo_play(stack->stream, 0, -1, 0);
            gf_mo_set_speed(stack->stream, as->speed);
        }
        gf_sc_invalidate(stack->compositor, NULL);
    }
}
Exemple #2
0
static void TraverseFog(GF_Node *node, void *rs, Bool is_destroy)
{
	Fixed density, vrange;
	SFVec3f start, end;
	ViewStack *vp_st;
	M_Viewpoint *vp;
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	M_Fog *fog = (M_Fog *) node;
	ViewStack *st = (ViewStack *) gf_node_get_private(node);

	if (is_destroy) {
		DestroyViewStack(node);
		return;
	}

	if (!tr_state->fogs) return;

	/*first traverse, bound if needed*/
	if (gf_list_find(tr_state->fogs, node) < 0) {
		gf_list_add(tr_state->fogs, node);
		if (gf_list_get(tr_state->fogs, 0) == fog) {
			if (!fog->isBound) Bindable_SetIsBound(node, 1);
		}
		assert(gf_list_find(st->reg_stacks, tr_state->fogs)==-1);
		gf_list_add(st->reg_stacks, tr_state->fogs);

		gf_mx_copy(st->world_view_mx, tr_state->model_matrix);
		/*in any case don't draw the first time*/
		gf_sc_invalidate(tr_state->visual->compositor, NULL);
		return;
	}
	/*not evaluating, return*/
	if (tr_state->traversing_mode != TRAVERSE_BINDABLE) {
		if ((tr_state->traversing_mode==TRAVERSE_SORT) || (tr_state->traversing_mode==TRAVERSE_GET_BOUNDS) )
			gf_mx_copy(st->world_view_mx, tr_state->model_matrix);
		return;
	}
	/*not bound*/
	if (!fog->isBound || !fog->visibilityRange) return;

	/*fog visibility is expressed in current bound VP so get its matrix*/
	vp = (M_Viewpoint*)gf_list_get(tr_state->viewpoints, 0);
	vp_st = NULL;
	if (vp && vp->isBound) vp_st = (ViewStack *) gf_node_get_private((GF_Node *)vp);

	start.x = start.y = start.z = 0;
	end.x = end.y = 0;
	end.z = fog->visibilityRange;
	if (vp_st) {
		gf_mx_apply_vec(&vp_st->world_view_mx, &start);
		gf_mx_apply_vec(&vp_st->world_view_mx, &end);
	}
	gf_mx_apply_vec(&st->world_view_mx, &start);
	gf_mx_apply_vec(&st->world_view_mx, &end);
	gf_vec_diff(end, end, start);
	vrange = gf_vec_len(end);

	density = gf_invfix(vrange);
	visual_3d_set_fog(tr_state->visual, fog->fogType.buffer, fog->color, density, vrange);
}
Exemple #3
0
static void back_set_bind(GF_Node *node, GF_Route *route)
{
	BackgroundStack *st = (BackgroundStack *)gf_node_get_private(node);
	Bindable_OnSetBind(node, st->reg_stacks, NULL);
	/*and redraw scene*/
	gf_sc_invalidate(st->compositor, NULL);
}
Exemple #4
0
void compositor_background_modified(GF_Node *node)
{
	M_Background *bck = (M_Background *)node;
	BackgroundStack *st = (BackgroundStack *) gf_node_get_private(node);
	if (!st) return;

	if (!gf_sg_vrml_field_equal(&bck->skyColor, &st->sky_col, GF_SG_VRML_MFCOLOR)
	        || !gf_sg_vrml_field_equal(&bck->skyAngle, &st->sky_ang, GF_SG_VRML_MFFLOAT)
	   ) {

		if (st->sky_mesh) mesh_free(st->sky_mesh);
		st->sky_mesh = NULL;
		gf_sg_vrml_field_copy(&st->sky_col, &bck->skyColor, GF_SG_VRML_MFCOLOR);
		gf_sg_vrml_field_copy(&st->sky_ang, &bck->skyAngle, GF_SG_VRML_MFFLOAT);
	}
	if (!gf_sg_vrml_field_equal(&bck->groundColor, &st->ground_col, GF_SG_VRML_MFCOLOR)
	        || !gf_sg_vrml_field_equal(&bck->groundAngle, &st->ground_ang, GF_SG_VRML_MFFLOAT)
	   ) {

		if (st->ground_mesh) mesh_free(st->ground_mesh);
		st->ground_mesh = NULL;
		gf_sg_vrml_field_copy(&st->ground_col, &bck->groundColor, GF_SG_VRML_MFCOLOR);
		gf_sg_vrml_field_copy(&st->ground_ang, &bck->groundAngle, GF_SG_VRML_MFFLOAT);
	}

	back_check_gf_sc_texture_change(&st->txh_front, &bck->frontUrl);
	back_check_gf_sc_texture_change(&st->txh_back, &bck->backUrl);
	back_check_gf_sc_texture_change(&st->txh_top, &bck->topUrl);
	back_check_gf_sc_texture_change(&st->txh_bottom, &bck->bottomUrl);
	back_check_gf_sc_texture_change(&st->txh_left, &bck->leftUrl);
	back_check_gf_sc_texture_change(&st->txh_right, &bck->rightUrl);


	gf_sc_invalidate(st->compositor, NULL);
}
Exemple #5
0
void compositor_audioclip_modified(GF_Node *node)
{
	M_AudioClip *ac = (M_AudioClip *)node;
	AudioClipStack *st = (AudioClipStack *) gf_node_get_private(node);
	if (!st) return;

	st->failure = 0;

	/*MPEG4 spec is not clear about that , so this is not forbidden*/
	if (st->input.is_open && st->input.is_open) {
		if (gf_sc_audio_check_url(&st->input, &ac->url)) {
			gf_sc_audio_stop(&st->input);
			gf_sc_audio_open(&st->input, &ac->url, 0, -1);
			/*force unregister to resetup audio cfg*/
			gf_sc_audio_unregister(&st->input);
			gf_sc_invalidate(st->input.compositor, NULL);
		}
	}

	//update state if we're active
	if (ac->isActive) {
		audioclip_update_time(&st->time_handle);
		/*we're no longer active fon't check for reactivation*/
		if (!ac->isActive) return;
	}

	/*make sure we are still registered*/
	if (!st->time_handle.is_registered && !st->time_handle.needs_unregister) 
		gf_sc_register_time_node(st->input.compositor, &st->time_handle);
	else
		st->time_handle.needs_unregister = 0;
}
Exemple #6
0
static void TraverseDepthViewPoint(GF_Node *node, void *rs, Bool is_destroy)
{
    if (!is_destroy && gf_node_dirty_get(node)) {
        GF_TraverseState *tr_state = (GF_TraverseState *) rs;
        GF_FieldInfo field;
        gf_node_dirty_clear(node, 0);

        tr_state->visual->depth_vp_position = 0;
        tr_state->visual->depth_vp_range = 0;
#ifndef GPAC_DISABLE_3D
        if (!tr_state->camera) return;
        tr_state->camera->flags |= CAM_IS_DIRTY;
#endif

        if (gf_node_get_field(node, 0, &field) != GF_OK) return;
        if (field.fieldType != GF_SG_VRML_SFBOOL) return;

        if ( *(SFBool *) field.far_ptr) {
            if (gf_node_get_field(node, 1, &field) != GF_OK) return;
            if (field.fieldType != GF_SG_VRML_SFFLOAT) return;
            tr_state->visual->depth_vp_position = *(SFFloat *) field.far_ptr;
            if (gf_node_get_field(node, 2, &field) != GF_OK) return;
            if (field.fieldType != GF_SG_VRML_SFFLOAT) return;
            tr_state->visual->depth_vp_range = *(SFFloat *) field.far_ptr;
        }
#ifndef GPAC_DISABLE_3D
        if (tr_state->layer3d) gf_node_dirty_set(tr_state->layer3d, GF_SG_NODE_DIRTY, 0);
#endif
        gf_sc_invalidate(tr_state->visual->compositor, NULL);
    }
}
Exemple #7
0
void compositor_audiosource_modified(GF_Node *node)
{
	M_AudioSource *as = (M_AudioSource *)node;
	AudioSourceStack *st = (AudioSourceStack *) gf_node_get_private(node);
	if (!st) return;

	/*MPEG4 spec is not clear about that , so this is not forbidden*/
	if (gf_sc_audio_check_url(&st->input, &as->url)) {
		if (st->input.is_open) gf_sc_audio_stop(&st->input);
		/*force unregister to resetup audio cfg*/
		gf_sc_audio_unregister(&st->input);
		gf_sc_invalidate(st->input.compositor, NULL);

		if (st->is_active) gf_sc_audio_open(&st->input, &as->url, 0, -1);
	}

	//update state if we're active
	if (st->is_active) {
		audiosource_update_time(&st->time_handle);
		if (!st->is_active) return;
	}

	/*make sure we are still registered*/
	if (!st->time_handle.is_registered && !st->time_handle.needs_unregister) 
		gf_sc_register_time_node(st->input.compositor, &st->time_handle);
	else
		st->time_handle.needs_unregister = 0;
}
Exemple #8
0
void wxGPACPanel::Update() 
{
	if (m_term) {
		//gf_term_set_option(m_term, GF_OPT_PLAY_STATE, GF_STATE_STEP_PAUSE);
		gf_sc_invalidate(m_term->compositor, NULL);
		gf_sc_draw_frame(m_term->compositor);
	}
}
Exemple #9
0
void compositor_3d_reset_camera(GF_Compositor *compositor)
{
	GF_Camera *cam = compositor_3d_get_camera(compositor);
	if (cam) {
		camera_reset_viewpoint(cam, GF_TRUE);
		gf_sc_invalidate(compositor, NULL);
	}
	if (compositor->active_layer) gf_node_dirty_set(compositor->active_layer, 0, GF_TRUE);
}
Exemple #10
0
static void audiobuffer_activate(AudioBufferStack *st, M_AudioBuffer *ab)
{
	ab->isActive = 1;
	gf_node_event_out_str((GF_Node *)ab, "isActive");
	/*rerender all graph to get parent audio group*/
	gf_sc_invalidate(st->output.compositor, NULL);
	st->done = 0;
	st->read_pos = 0;
}
Exemple #11
0
static void audiosource_activate(AudioSourceStack *st, M_AudioSource *as)
{
	if (gf_sc_audio_open(&st->input, &as->url, 0, -1) != GF_OK)
		return;
	st->is_active = 1;
	gf_mo_set_speed(st->input.stream, st->input.speed);
	/*traverse all graph to get parent audio group*/
	gf_sc_invalidate(st->input.compositor, NULL);
}
Exemple #12
0
static char *gf_audio_input_fetch_frame(void *callback, u32 *size, u32 audio_delay_ms)
{
	char *frame;
	u32 obj_time, ts;
	s32 drift;
	Fixed speed;
	GF_AudioInput *ai = (GF_AudioInput *) callback;
	/*even if the stream is signaled as finished we must check it, because it may have been restarted by a mediaControl*/
	if (!ai->stream) return NULL;
	
	frame = gf_mo_fetch_data(ai->stream, 0, &ai->stream_finished, &ts, size, NULL, NULL);
	/*invalidate scene on end of stream to refresh audio graph*/
	if (ai->stream_finished) gf_sc_invalidate(ai->compositor, NULL);

	/*no more data or not enough data, reset syncro drift*/
	if (!frame) {
		GF_LOG(GF_LOG_INFO, GF_LOG_AUDIO, ("[Audio Input] No data in audio object (eos %d)\n", ai->stream_finished));
		gf_mo_adjust_clock(ai->stream, 0);
		*size = 0;
		return NULL;
	}
	ai->need_release = 1;

	speed = gf_mo_get_current_speed(ai->stream);

	gf_mo_get_object_time(ai->stream, &obj_time);
	obj_time += audio_delay_ms;
	drift = (s32)obj_time;
	drift -= (s32)ts;

#ifdef ENABLE_EARLY_FRAME_DETECTION
	/*too early (silence insertions), skip*/
	if (drift < 0) {
		GF_LOG(GF_LOG_INFO, GF_LOG_AUDIO, ("[Audio Input] audio too early of %d (CTS %u at OTB %u with audio delay %d ms)\n", drift + audio_delay_ms, ts, obj_time, audio_delay_ms));
		ai->need_release = 0;
		gf_mo_release_data(ai->stream, 0, 0);
		*size = 0;
		return NULL;
	}
#endif
	/*adjust drift*/
	if (audio_delay_ms) {
		s32 resync_delay = FIX2INT(speed * MAX_RESYNC_TIME);
		/*CU is way too late, discard and fetch a new one - this usually happen when media speed is more than 1*/
		if (drift>resync_delay) {
			GF_LOG(GF_LOG_INFO, GF_LOG_AUDIO, ("[Audio Input] Audio data too late obj time %d - CTS %d - drift %d ms - resync forced\n", obj_time - audio_delay_ms, ts, drift));
			gf_mo_release_data(ai->stream, *size, 2);
			ai->need_release = 0;
			return gf_audio_input_fetch_frame(callback, size, audio_delay_ms);
		}
		GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[Audio Input] Audio clock: delay %d - obj time %d - CTS %d - adjust drift %d\n", audio_delay_ms, obj_time - audio_delay_ms, ts, drift));
		gf_mo_adjust_clock(ai->stream, drift);
	}
	return frame;
}
Exemple #13
0
static void svg_a_set_view(GF_Node *handler, GF_Compositor *compositor, const char *url)
{
	gf_scene_set_fragment_uri(handler, url);
	/*force recompute viewbox of root SVG - FIXME in full this should be the parent svg*/
	gf_node_dirty_set(gf_sg_get_root_node(gf_node_get_graph(handler)), 0, 0);

	compositor->trans_x = compositor->trans_y = 0;
	compositor->rotation = 0;
	compositor->zoom = FIX_ONE;
	compositor_2d_set_user_transform(compositor, FIX_ONE, 0, 0, 0); 				
	gf_sc_invalidate(compositor, NULL);
}
Exemple #14
0
static void viewport_set_bind(GF_Node *node, GF_Route *route)
{
	GF_Compositor *rend = gf_sc_get_compositor(node);
	ViewStack *st = (ViewStack *) gf_node_get_private(node);
	Bindable_OnSetBind(node, st->reg_stacks, NULL);

	gf_sc_invalidate(rend, NULL);
	/*notify change of vp stack*/
	VPCHANGED(rend);
	/*and dirty ourselves to force frustrum update*/
	gf_node_dirty_set(node, 0, 0);
}
Exemple #15
0
static Bool back_texture_enabled(M_Background2D *bck, GF_TextureHandler *txh)
{
	Bool use_texture = back_use_texture(bck);
	if (use_texture) {
		/*texture not ready*/
		if (!txh->tx_io) {
			use_texture = 0;
			gf_sc_invalidate(txh->compositor, NULL);
		}
		gf_sc_texture_set_blend_mode(txh, gf_sc_texture_is_transparent(txh) ? TX_REPLACE : TX_DECAL);
	}
	return use_texture;
}
Exemple #16
0
static void audioclip_activate(AudioClipStack *st, M_AudioClip *ac)
{
	if (gf_sc_audio_open(&st->input, &ac->url, 0, -1) != GF_OK) {
		st->failure = 1;
		return;
	}
	ac->isActive = 1;
	gf_node_event_out_str((GF_Node *)ac, "isActive");

	gf_mo_set_speed(st->input.stream, st->input.speed);
	/*traverse all graph to get parent audio group*/
	gf_sc_invalidate(st->input.compositor, NULL);
}
Exemple #17
0
GF_EXPORT
void gf_sc_audio_unregister(GF_AudioInput *ai)
{
	GF_AudioInterface *aifce = &ai->input_ifce;
	if (ai->filter) aifce = &ai->filter->input;

	if (ai->register_with_renderer) {
		ai->register_with_renderer = 0;
		gf_sc_ar_remove_src(ai->compositor->audio_renderer, aifce);
	} else {
		/*if used in a parent audio group, do a complete traverse to rebuild the group*/
		gf_sc_invalidate(ai->compositor, NULL);
	}
}
Exemple #18
0
static void animationstream_deactivate(AnimationStreamStack *stack, M_AnimationStream *as)
{
    if (as->isActive) {
        as->isActive = 0;
        gf_node_event_out((GF_Node*)as, 6/*"isActive"*/);
    }
    if (stack->stream) {
        if (gf_mo_url_changed(stack->stream, &as->url))
            gf_mo_set_flag(stack->stream, GF_MO_DISPLAY_REMOVE, 1);
        gf_mo_stop(stack->stream);
    }
    stack->time_handle.needs_unregister = 1;
    gf_sc_invalidate(stack->compositor, NULL);
}
Exemple #19
0
void gf_term_on_node_modified(void *_is, GF_Node *node)
{
	GF_Scene *scene = (GF_Scene *)_is;
	if (!scene) return;
	if (!node) {
		gf_sc_invalidate(scene->root_od->term->compositor, NULL); 
		return;
	}
	
	switch (gf_node_get_tag(node)) {
#ifndef GPAC_DISABLE_VRML
	case TAG_MPEG4_Inline: 
#ifndef GPAC_DISABLE_X3D
	case TAG_X3D_Inline: 
#endif
		gf_inline_on_modified(node); 
		break;
	case TAG_MPEG4_MediaBuffer: 
		break;
	case TAG_MPEG4_MediaControl: 
		MC_Modified(node); 
		break;
	case TAG_MPEG4_MediaSensor: 
		MS_Modified(node); 
		break;
	case TAG_MPEG4_InputSensor: 
		InputSensorModified(node); 
		break;
	case TAG_MPEG4_Conditional: 
		break;
	case TAG_MPEG4_Storage: 
		break;
#endif
	default: gf_sc_invalidate(scene->root_od->term->compositor, node); break;
	}
}
Exemple #20
0
static void SVG_Update_image(GF_TextureHandler *txh)
{	
	MFURL *txurl = &(((SVG_video_stack *)gf_node_get_private(txh->owner))->txurl);

	/*setup texture if needed*/
	if (!txh->is_open && txurl->count) {
		gf_sc_texture_play_from_to(txh, txurl, 0, -1, GF_FALSE, GF_FALSE);
	}

	gf_sc_texture_update_frame(txh, GF_FALSE);
	/*URL is present but not opened - redraw till fetch*/
	if (txh->stream && (!txh->tx_io || txh->needs_refresh) ) {
		/*mark all subtrees using this image as dirty*/
		gf_node_dirty_parents(txh->owner);
		gf_sc_invalidate(txh->compositor, NULL);
	}
}
Exemple #21
0
void Bindable_OnSetBind(GF_Node *bindable, GF_List *stack_list, GF_List *for_stack)
{
	u32 i;
	Bool on_top, is_bound, set_bind;
	GF_Node *node;
	GF_List *stack;

	set_bind = Bindable_GetSetBind(bindable);
	is_bound = Bindable_GetIsBound(bindable);

	if (!set_bind && !is_bound) return;
	if (set_bind && is_bound) return;

	i=0;
	while ((stack = (GF_List*)gf_list_enum(stack_list, &i))) {
		if (for_stack && (for_stack!=stack)) continue;

		on_top = (gf_list_get(stack, 0)==bindable) ? GF_TRUE : GF_FALSE;

		if (!set_bind) {
			if (is_bound) Bindable_SetIsBound(bindable, GF_FALSE);
			if (on_top && (gf_list_count(stack)>1)) {
				gf_list_rem(stack, 0);
				gf_list_add(stack, bindable);
				node = (GF_Node*)gf_list_get(stack, 0);
				Bindable_SetIsBound(node, GF_TRUE);
			}
		} else {
			if (!is_bound) Bindable_SetIsBound(bindable, GF_TRUE);
			if (!on_top) {
				/*push old top one down and unbind*/
				node = (GF_Node*)gf_list_get(stack, 0);
				Bindable_SetIsBound(node, GF_FALSE);
				/*insert new top*/
				gf_list_del_item(stack, bindable);
				gf_list_insert(stack, bindable, 0);
			}
		}
	}
	/*force invalidate of the bindable stack's owner*/
	gf_node_dirty_set(bindable, 0, GF_TRUE);
	/*and redraw scene*/
	gf_sc_invalidate(gf_sc_get_compositor(bindable), NULL);
}
Exemple #22
0
GF_EXPORT
void gf_sc_texture_stop(GF_TextureHandler *txh)
{
	if (!txh->is_open) return;
	/*release texture WITHOUT droping frame*/
	if (txh->needs_release) {
		gf_mo_release_data(txh->stream, 0xFFFFFFFF, -1);
		txh->needs_release = 0;
	}
	gf_sc_invalidate(txh->compositor, NULL);
	if (gf_mo_stop(txh->stream)) {
		txh->data = NULL;
	}
	txh->is_open = 0;

	/*and deassociate object*/
	gf_mo_unregister(txh->owner, txh->stream);
	txh->stream = NULL;
}
Exemple #23
0
void compositor_background2d_modified(GF_Node *node)
{
	M_Background2D *bck = (M_Background2D *)node;
	Background2DStack *st = (Background2DStack *) gf_node_get_private(node);
	if (!st) return;

	/*dirty node and parents in order to trigger parent visual redraw*/
	gf_node_dirty_set(node, 0, 1);

	/*if open and changed, stop and play*/
	if (st->txh.is_open) {
		if (! gf_sc_texture_check_url_change(&st->txh, &bck->url)) return;
		gf_sc_texture_stop(&st->txh);
		gf_sc_texture_play(&st->txh, &bck->url);
		return;
	}
	/*if not open and changed play*/
	if (bck->url.count) 
		gf_sc_texture_play(&st->txh, &bck->url);
	gf_sc_invalidate(st->txh.compositor, NULL);
}
Exemple #24
0
GF_EXPORT
void gf_sc_audio_register(GF_AudioInput *ai, GF_TraverseState *tr_state)
{
	GF_AudioInterface *aifce;

	/*check interface is valid*/
	if (!ai->input_ifce.FetchFrame
		|| !ai->input_ifce.GetChannelVolume
		|| !ai->input_ifce.GetConfig
		|| !ai->input_ifce.GetSpeed
		|| !ai->input_ifce.IsMuted
		|| !ai->input_ifce.ReleaseFrame
		) return;

	aifce = &ai->input_ifce;
	if (ai->filter) aifce = &ai->filter->input;

	if (tr_state->audio_parent) {
		/*this assume only one parent may use an audio node*/
		if (ai->register_with_parent) return;
		if (ai->register_with_renderer) {
			gf_sc_ar_remove_src(ai->compositor->audio_renderer, aifce);
			ai->register_with_renderer = 0;
		}
		tr_state->audio_parent->add_source(tr_state->audio_parent, ai);
		ai->register_with_parent = 1;
		ai->snd = tr_state->sound_holder;
	} else if (!ai->register_with_renderer) {
		
		if (ai->register_with_parent) {
			ai->register_with_parent = 0;
			/*if used in a parent audio group, do a complete traverse to rebuild the group*/
			gf_sc_invalidate(ai->compositor, NULL);
		}

		gf_sc_ar_add_src(ai->compositor->audio_renderer, aifce);
		ai->register_with_renderer = 1;
		ai->snd = tr_state->sound_holder;
	}
}
Exemple #25
0
static void movietexture_update(GF_TextureHandler *txh)
{
	M_MovieTexture *txnode = (M_MovieTexture *) txh->owner;
	MovieTextureStack *st = (MovieTextureStack *) gf_node_get_private(txh->owner);

	/*setup texture if needed*/
	if (!txh->is_open) return;
	if (!txnode->isActive && st->first_frame_fetched) return;

	/*when fetching the first frame disable resync*/
	gf_sc_texture_update_frame(txh, 0);

	if (txh->stream_finished) {
		if (movietexture_get_loop(st, txnode)) {
			gf_sc_texture_restart(txh);
		}
		/*if active deactivate*/
		else if (txnode->isActive && gf_mo_should_deactivate(st->txh.stream) ) {
			movietexture_deactivate(st, txnode);
		}
	}
	/*first frame is fetched*/
	if (!st->first_frame_fetched && (txh->needs_refresh) ) {
		st->first_frame_fetched = 1;
		txnode->duration_changed = gf_mo_get_duration(txh->stream);
		gf_node_event_out(txh->owner, 7/*"duration_changed"*/);
		/*stop stream if needed*/
		if (!txnode->isActive && txh->is_open) {
			gf_mo_pause(txh->stream);
			/*make sure the refresh flag is not cleared*/
			txh->needs_refresh = 1;
			gf_sc_invalidate(txh->compositor, NULL);
		}
	}
	if (txh->needs_refresh) {
		/*mark all subtrees using this image as dirty*/
		gf_node_dirty_parents(txh->owner);
	}
}
Exemple #26
0
static void camera_changed(GF_Compositor *compositor, GF_Camera *cam)
{
	cam->flags |= CAM_IS_DIRTY;
	gf_sc_invalidate(compositor, NULL);
	if (compositor->active_layer) gf_node_dirty_set(compositor->active_layer, 0, 1);
}
Exemple #27
0
static void fog_set_bind(GF_Node *node, GF_Route *route)
{
	ViewStack *st = (ViewStack *) gf_node_get_private(node);
	Bindable_OnSetBind(node, st->reg_stacks, NULL);
	gf_sc_invalidate(gf_sc_get_compositor(node), NULL);
}
Exemple #28
0
static void TraverseNavigationInfo(GF_Node *node, void *rs, Bool is_destroy)
{
	u32 i;
#ifndef GPAC_DISABLE_3D
	u32 nb_select_mode;
	SFVec3f start, end;
	Fixed scale;
	ViewStack *st = (ViewStack *) gf_node_get_private(node);
#endif
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	M_NavigationInfo *ni = (M_NavigationInfo *) node;

	if (is_destroy) {
		DestroyViewStack(node);
		return;
	}
#ifdef GPAC_DISABLE_3D

	/*FIXME, we only deal with one node, no bind stack for the current time*/
	for (i=0; i<ni->type.count; i++) {
		if (ni->type.vals[i] && !stricmp(ni->type.vals[i], "NONE")) {
			tr_state->visual->compositor->navigation_disabled = 1;
		}
	}
#else

	if (!tr_state->navigations) return;

	/*first traverse, bound if needed*/
	if (gf_list_find(tr_state->navigations, node) < 0) {
		gf_list_add(tr_state->navigations, node);
		if (gf_list_get(tr_state->navigations, 0) == ni) {
			if (!ni->isBound) Bindable_SetIsBound(node, 1);
		}
		assert(gf_list_find(st->reg_stacks, tr_state->navigations)==-1);
		gf_list_add(st->reg_stacks, tr_state->navigations);
		gf_mx_copy(st->world_view_mx, tr_state->model_matrix);
		/*in any case don't draw the first time*/
		gf_sc_invalidate(tr_state->visual->compositor, NULL);
		return;
	}
	/*not bound*/
	if (!ni->isBound) return;
	/*not evaluating, return*/
	if (tr_state->traversing_mode != TRAVERSE_BINDABLE) {
		if ((tr_state->traversing_mode==TRAVERSE_SORT) || (tr_state->traversing_mode==TRAVERSE_GET_BOUNDS) ) {
			if (!gf_mx_equal(&st->world_view_mx, &tr_state->model_matrix)) {
				gf_mx_copy(st->world_view_mx, tr_state->model_matrix);
				gf_node_dirty_set(node, 0, 0);
			}
		}
		return;
	}

	if (!gf_node_dirty_get(node)) return;
	gf_node_dirty_clear(node, 0);

	nb_select_mode = 0;
	tr_state->camera->navigation_flags = 0;
	tr_state->camera->navigate_mode = 0;
	for (i=0; i<ni->type.count; i++) {
		if (ni->type.vals[i] && !stricmp(ni->type.vals[i], "ANY")) tr_state->camera->navigation_flags |= NAV_ANY;
		else {
			nb_select_mode++;
		}

		if (!tr_state->camera->navigate_mode) {
			if (ni->type.vals[i] && !stricmp(ni->type.vals[i], "NONE")) tr_state->camera->navigate_mode = GF_NAVIGATE_NONE;
			else if (ni->type.vals[i] && !stricmp(ni->type.vals[i], "WALK")) tr_state->camera->navigate_mode = GF_NAVIGATE_WALK;
			else if (ni->type.vals[i] && !stricmp(ni->type.vals[i], "EXAMINE")) tr_state->camera->navigate_mode = GF_NAVIGATE_EXAMINE;
			else if (ni->type.vals[i] && !stricmp(ni->type.vals[i], "FLY")) tr_state->camera->navigate_mode = GF_NAVIGATE_FLY;
			else if (ni->type.vals[i] && !stricmp(ni->type.vals[i], "VR")) tr_state->camera->navigate_mode = GF_NAVIGATE_VR;
		}
	}
	if (nb_select_mode>1) tr_state->camera->navigation_flags |= NAV_SELECTABLE;

	if (ni->headlight) tr_state->camera->navigation_flags |= NAV_HEADLIGHT;

	start.x = start.y = start.z = 0;
	end.x = end.y = 0;
	end.z = FIX_ONE;
	gf_mx_apply_vec(&st->world_view_mx, &start);
	gf_mx_apply_vec(&st->world_view_mx, &end);
	gf_vec_diff(end, end, start);
	scale = gf_vec_len(end);

	tr_state->camera->speed = gf_mulfix(scale, ni->speed);
	tr_state->camera->visibility = gf_mulfix(scale, ni->visibilityLimit);
	if (ni->avatarSize.count) tr_state->camera->avatar_size.x = gf_mulfix(scale, ni->avatarSize.vals[0]);
	if (ni->avatarSize.count>1) tr_state->camera->avatar_size.y = gf_mulfix(scale, ni->avatarSize.vals[1]);
	if (ni->avatarSize.count>2) tr_state->camera->avatar_size.z = gf_mulfix(scale, ni->avatarSize.vals[2]);

	if (0 && tr_state->pixel_metrics) {
		u32 s = MAX(tr_state->visual->width, tr_state->visual->height);
		s /= 2;
//		tr_state->camera->speed = ni->speed;
		tr_state->camera->visibility *= s;
		tr_state->camera->avatar_size.x *= s;
		tr_state->camera->avatar_size.y *= s;
		tr_state->camera->avatar_size.z *= s;
	}
#endif

}
Exemple #29
0
static void TraverseViewpoint(GF_Node *node, void *rs, Bool is_destroy)
{
	SFVec3f pos, v1, v2;
	SFRotation ori;
	GF_Matrix mx;
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	M_Viewpoint *vp = (M_Viewpoint*) node;
	ViewStack *st = (ViewStack *) gf_node_get_private(node);

	if (is_destroy) {
		DestroyViewStack(node);
		return;
	}
	/*may happen in get_bounds*/
	if (!tr_state->viewpoints) return;
//	if (!tr_state->camera->is_3D) return;

	/*first traverse, bound if needed*/
	if (gf_list_find(tr_state->viewpoints, node) < 0) {
		gf_list_add(tr_state->viewpoints, node);
		assert(gf_list_find(st->reg_stacks, tr_state->viewpoints)==-1);
		gf_list_add(st->reg_stacks, tr_state->viewpoints);

		if (gf_list_get(tr_state->viewpoints, 0) == vp) {
			if (!vp->isBound) Bindable_SetIsBound(node, 1);
		} else {
			if (gf_inline_is_default_viewpoint(node)) Bindable_SetSetBind(node, 1);
		}
		VPCHANGED(tr_state->visual->compositor);
		/*in any case don't draw the first time (since the viewport could have been declared last)*/
		if (tr_state->layer3d) gf_node_dirty_set(tr_state->layer3d, GF_SG_VRML_BINDABLE_DIRTY, 0);
		gf_sc_invalidate(tr_state->visual->compositor, NULL);
	}
	/*not evaluating vp, return*/
	if (tr_state->traversing_mode != TRAVERSE_BINDABLE) {
		/*store model matrix if changed - NOTE: we always have a 1-frame delay between VP used and real world...
		we could remove this by pre-traversing the scene before applying vp, but that would mean 2 scene
		traversals*/
		if ((tr_state->traversing_mode==TRAVERSE_SORT) || (tr_state->traversing_mode==TRAVERSE_GET_BOUNDS) ) {
			if (!gf_mx_equal(&st->world_view_mx, &tr_state->model_matrix)) {
				gf_mx_copy(st->world_view_mx, tr_state->model_matrix);
				gf_node_dirty_set(node, 0, 0);
			}
		}
		return;
	}

	/*not bound or in 2D visual*/
	if (!vp->isBound || !tr_state->navigations) return;

	if (!gf_node_dirty_get(node)) return;
	gf_node_dirty_clear(node, 0);

	/*move to local system*/
	gf_mx_copy(mx, st->world_view_mx);
	gf_mx_add_translation(&mx, vp->position.x, vp->position.y, vp->position.z);
	gf_mx_add_rotation(&mx, vp->orientation.q, vp->orientation.x, vp->orientation.y, vp->orientation.z);
	gf_mx_decompose(&mx, &pos, &v1, &ori, &v2);
	/*get center*/
	v1.x = v1.y = v1.z = 0;
#ifndef GPAC_DISABLE_X3D
	/*X3D specifies examine center*/
	if (gf_node_get_tag(node)==TAG_X3D_Viewpoint) v1 = ((X_Viewpoint *)node)->centerOfRotation;
#endif
	gf_mx_apply_vec(&st->world_view_mx, &v1);
	/*set frustrum param - animate only if not bound last frame and jump false*/
	visual_3d_viewpoint_change(tr_state, node, (!st->prev_was_bound && !vp->jump) ? 1 : 0, vp->fieldOfView, pos, ori, v1);
	st->prev_was_bound = 1;
}
Exemple #30
0
static void TraverseViewport(GF_Node *node, void *rs, Bool is_destroy)
{
	Fixed sx, sy, w, h, tx, ty;
#ifndef GPAC_DISABLE_3D
	GF_Matrix mx;
#endif
	GF_Matrix2D mat;
	GF_Rect rc, rc_bckup;
	ViewStack *st = (ViewStack *) gf_node_get_private(node);
	M_Viewport *vp = (M_Viewport *) node;
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;

	if (is_destroy) {
		DestroyViewStack(node);
		return;
	}

#ifndef GPAC_DISABLE_3D
	if (tr_state->visual->type_3d>1) return;
#endif

	/*first traverse, bound if needed*/
	if (gf_list_find(tr_state->viewpoints, node) < 0) {
		gf_list_add(tr_state->viewpoints, node);
		assert(gf_list_find(st->reg_stacks, tr_state->viewpoints)==-1);
		gf_list_add(st->reg_stacks, tr_state->viewpoints);

		if (gf_list_get(tr_state->viewpoints, 0) == vp) {
			if (!vp->isBound) Bindable_SetIsBound(node, 1);
		} else {
			if (gf_inline_is_default_viewpoint(node)) Bindable_SetSetBindEx(node, 1, tr_state->viewpoints);
		}
		VPCHANGED(tr_state->visual->compositor);
		/*in any case don't draw the first time (since the viewport could have been declared last)*/
		gf_sc_invalidate(tr_state->visual->compositor, NULL);
		return;
	}

	if (tr_state->traversing_mode != TRAVERSE_BINDABLE) return;
	if (!vp->isBound) return;

	if (gf_list_get(tr_state->viewpoints, 0) != vp)
		return;

#ifndef GPAC_DISABLE_3D
	if (tr_state->visual->type_3d) {
		w = tr_state->bbox.max_edge.x - tr_state->bbox.min_edge.x;
		h = tr_state->bbox.max_edge.y - tr_state->bbox.min_edge.y;
	} else
#endif
	{
		w = tr_state->bounds.width;
		h = tr_state->bounds.height;
	}
	if (!w || !h) return;


	/*if no parent this is the main viewport, don't update if not changed*/
//	if (!tr_state->is_layer && !gf_node_dirty_get(node)) return;

	gf_node_dirty_clear(node, 0);

	gf_mx2d_init(mat);
	gf_mx2d_add_translation(&mat, vp->position.x, vp->position.y);
	gf_mx2d_add_rotation(&mat, 0, 0, vp->orientation);

	//compute scaling ratio
	sx = (vp->size.x>=0) ? vp->size.x : w;
	sy = (vp->size.y>=0) ? vp->size.y : h;
	rc = gf_rect_center(sx, sy);
	rc_bckup = rc;

	switch (vp->fit) {
	/*covers all area and respect aspect ratio*/
	case 2:
		if (gf_divfix(rc.width, w) > gf_divfix(rc.height, h)) {
			rc.width = gf_muldiv(rc.width, h, rc.height);
			rc.height = h;
		} else {
			rc.height = gf_muldiv(rc.height , w, rc.width);
			rc.width = w;
		}
		break;
	/*fits inside the area and respect AR*/
	case 1:
		if (gf_divfix(rc.width, w)> gf_divfix(rc.height, h)) {
			rc.height = gf_muldiv(rc.height, w, rc.width);
			rc.width = w;
		} else {
			rc.width = gf_muldiv(rc.width, h, rc.height);
			rc.height = h;
		}
		break;
	/*fit entirely: nothing to change*/
	case 0:
		rc.width = w;
		rc.height = h;
		break;
	default:
		return;
	}
	sx = gf_divfix(rc.width, rc_bckup.width);
	sy = gf_divfix(rc.height, rc_bckup.height);

	/*viewport on root visual, remove compositor scale*/
	if (!tr_state->is_layer && (tr_state->visual->compositor->visual==tr_state->visual) ) {
		sx = gf_divfix(sx, tr_state->visual->compositor->scale_x);
		sy = gf_divfix(sy, tr_state->visual->compositor->scale_y);
	}

	rc.x = - rc.width/2;
	rc.y = rc.height/2;

	tx = ty = 0;
	if (vp->fit && vp->alignment.count) {
		/*left alignment*/
		if (vp->alignment.vals[0] == -1) tx = rc.width/2 - w/2;
		else if (vp->alignment.vals[0] == 1) tx = w/2 - rc.width/2;

		if (vp->alignment.count>1) {
			/*top-alignment*/
			if (vp->alignment.vals[1]==-1) ty = rc.height/2 - h/2;
			else if (vp->alignment.vals[1]==1) ty = h/2 - rc.height/2;
		}
	}

	gf_mx2d_init(mat);
	if (tr_state->pixel_metrics) {
		gf_mx2d_add_scale(&mat, sx, sy);
	} else {
		/*if we are not in pixelMetrics, undo the meterMetrics->pixelMetrics transformation*/
		gf_mx2d_add_scale(&mat, gf_divfix(sx, tr_state->min_hsize), gf_divfix(sy, tr_state->min_hsize) );
	}
	gf_mx2d_add_translation(&mat, tx, ty);

	gf_mx2d_add_translation(&mat, -gf_mulfix(vp->position.x,sx), -gf_mulfix(vp->position.y,sy) );
	gf_mx2d_add_rotation(&mat, 0, 0, vp->orientation);

	tr_state->bounds = rc;
	tr_state->bounds.x += tx;
	tr_state->bounds.y += ty;

#ifndef GPAC_DISABLE_3D
	if (tr_state->visual->type_3d) {
		/*in layers directly modify the model matrix*/
		if (tr_state->is_layer) {
			gf_mx_from_mx2d(&mx, &mat);
			gf_mx_add_matrix(&tr_state->model_matrix, &mx);
		}
		/*otherwise add to camera viewport matrix*/
		else {
			gf_mx_from_mx2d(&tr_state->camera->viewport, &mat);
			tr_state->camera->flags = (CAM_HAS_VIEWPORT | CAM_IS_DIRTY);
		}
	} else
#endif
		gf_mx2d_pre_multiply(&tr_state->transform, &mat);
}