コード例 #1
0
static void OnProximitySensor2D(SensorHandler *sh, Bool is_over, GF_Event *ev, RayHitInfo *hit_info)
{
	M_ProximitySensor2D *ps = (M_ProximitySensor2D *)sh->owner;
	Prox2DStack *stack = (Prox2DStack *) gf_node_get_private(sh->owner);
	
	assert(ps->enabled);
	
	if (is_over) {
		stack->last_time = gf_node_get_scene_time(sh->owner);
		if (prox2D_is_in_sensor(stack, ps, hit_info->local_point.x, hit_info->local_point.y)) {
			ps->position_changed.x = hit_info->local_point.x;
			ps->position_changed.y = hit_info->local_point.y;
			gf_node_event_out_str(sh->owner, "position_changed");

			if (!ps->isActive) {
				ps->isActive = 1;
				gf_node_event_out_str(sh->owner, "isActive");
				ps->enterTime = stack->last_time;
				gf_node_event_out_str(sh->owner, "enterTime");
			}
			return;
		}
	} 
	/*either we're not over the shape or we're not in sensor*/
	if (ps->isActive) {
		ps->exitTime = stack->last_time;
		gf_node_event_out_str(sh->owner, "exitTime");
		ps->isActive = 0;
		gf_node_event_out_str(sh->owner, "isActive");
	}
}
コード例 #2
0
static Bool Composite_CheckBindables(GF_Node *n, RenderEffect2D *eff, Bool force_check)
{
	GF_Node *btop;
	Bool ret = 0;
	M_CompositeTexture2D *c2d = (M_CompositeTexture2D *)n;
	if (force_check || gf_node_dirty_get(c2d->background)) { gf_node_render(c2d->background, eff); ret = 1; }
	btop = (GF_Node*)gf_list_get(eff->back_stack, 0);
	if (btop != c2d->background) {
		gf_node_unregister(c2d->background, n);
		gf_node_register(btop, n); 
		c2d->background = btop;
		gf_node_event_out_str(n, "background");
		ret = 1;
	}

	if (force_check || gf_node_dirty_get(c2d->viewport)) { gf_node_render(c2d->viewport, eff); ret = 1; }
	btop = (GF_Node*)gf_list_get(eff->view_stack, 0);
	if (btop != c2d->viewport) { 
		gf_node_unregister(c2d->viewport, n);
		gf_node_register(btop, n); 
		c2d->viewport = btop;
		gf_node_event_out_str(n, "viewport");
		ret = 1;
	}
	return ret;
}
コード例 #3
0
ファイル: mpeg4_sensors.c プロジェクト: jnorthrup/gpac
static Bool OnProximitySensor2D(GF_SensorHandler *sh, Bool is_over, Bool is_cancel, GF_Event *ev, GF_Compositor *compositor)
{
	M_ProximitySensor2D *ps = (M_ProximitySensor2D *)sh->sensor;
	Prox2DStack *stack = (Prox2DStack *) gf_node_get_private(sh->sensor);

	assert(ps->enabled);

	if (is_over) {
		stack->last_time = gf_node_get_scene_time(sh->sensor);
		if (is_cancel) return 0;
		if (prox2D_is_in_sensor(stack, ps, compositor->hit_local_point.x, compositor->hit_local_point.y)) {
			ps->position_changed.x = compositor->hit_local_point.x;
			ps->position_changed.y = compositor->hit_local_point.y;
			gf_node_event_out_str(sh->sensor, "position_changed");

			if (!ps->isActive) {
				ps->isActive = 1;
				gf_node_event_out_str(sh->sensor, "isActive");
				ps->enterTime = stack->last_time;
				gf_node_event_out_str(sh->sensor, "enterTime");
			}
			return 1;
		}
	}
	/*either we're not over the shape or we're not in sensor*/
	if (ps->isActive) {
		ps->exitTime = stack->last_time;
		gf_node_event_out_str(sh->sensor, "exitTime");
		ps->isActive = 0;
		gf_node_event_out_str(sh->sensor, "isActive");
		return 1;
	}
	return 0;
}
コード例 #4
0
static void OnPlaneSensor(SensorHandler *sh, Bool is_over, GF_Event *ev, RayHitInfo *hit_info)
{
	M_PlaneSensor *ps = (M_PlaneSensor *)sh->owner;
	PSStack *stack = (PSStack *) gf_node_get_private(sh->owner);
	
	
	if (ps->isActive && (!ps->enabled || ((ev->type==GF_EVENT_MOUSEUP) && (ev->mouse.button==GF_MOUSE_LEFT)) )) {
		if (ps->autoOffset) {
			ps->offset = ps->translation_changed;
			gf_node_event_out_str(sh->owner, "offset");
		}
		ps->isActive = 0;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(stack->compositor, 0);
	}
	else if (!ps->isActive && (ev->type==GF_EVENT_MOUSEDOWN) && (ev->mouse.button==GF_MOUSE_LEFT)) {
		gf_mx_copy(stack->initial_matrix, hit_info->local_to_world);
		gf_vec_diff(stack->start_drag, hit_info->local_point, ps->offset);
		stack->tracker.normal.x = stack->tracker.normal.y = 0; stack->tracker.normal.z = FIX_ONE;
		stack->tracker.d = - gf_vec_dot(stack->start_drag, stack->tracker.normal);
		ps->isActive = 1;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(stack->compositor, 1);
	}
	else if (ps->isActive) {
		GF_Ray loc_ray;
		SFVec3f res;
		loc_ray = hit_info->world_ray;
		gf_mx_apply_ray(&stack->initial_matrix, &loc_ray);
		gf_plane_intersect_line(&stack->tracker, &loc_ray.orig, &loc_ray.dir, &res);
		ps->trackPoint_changed = res;
		gf_node_event_out_str(sh->owner, "trackPoint_changed");

		gf_vec_diff(res, res, stack->start_drag);
		/*clip*/
		if (ps->minPosition.x <= ps->maxPosition.x) {
			if (res.x < ps->minPosition.x) res.x = ps->minPosition.x;
			if (res.x > ps->maxPosition.x) res.x = ps->maxPosition.x;
		}
		if (ps->minPosition.y <= ps->maxPosition.y) {
			if (res.y < ps->minPosition.y) res.y = ps->minPosition.y;
			if (res.y > ps->maxPosition.y) res.y = ps->maxPosition.y;
		}
		ps->translation_changed = res;
		gf_node_event_out_str(sh->owner, "translation_changed");
	}
}
コード例 #5
0
static void OnPlaneSensor2D(SensorHandler *sh, Bool is_over, GF_Event *ev, RayHitInfo *hit_info)
{
	M_PlaneSensor2D *ps = (M_PlaneSensor2D *)sh->owner;
	PS2DStack *stack = (PS2DStack *) gf_node_get_private(sh->owner);

	if (ps->isActive && (!ps->enabled || ((ev->type==GF_EVENT_MOUSEUP) && (ev->mouse.button==GF_MOUSE_LEFT)) )) {
		if (ps->autoOffset) {
			ps->offset = ps->translation_changed;
			gf_node_event_out_str(sh->owner, "offset");
		}
		ps->isActive = 0;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(stack->compositor, 0);
	} else if (!ps->isActive && (ev->type==GF_EVENT_MOUSEDOWN) && (ev->mouse.button==GF_MOUSE_LEFT)) {
		gf_mx_copy(stack->initial_matrix, hit_info->local_to_world);
		stack->start_drag.x = hit_info->local_point.x - ps->offset.x;
		stack->start_drag.y = hit_info->local_point.y - ps->offset.y;
		ps->isActive = 1;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(stack->compositor, 1);
	} else if (ps->isActive) {
		GF_Ray loc_ray;
		SFVec3f res;
		loc_ray = hit_info->world_ray;
		gf_mx_apply_ray(&stack->initial_matrix, &loc_ray);

		R3D_Get2DPlaneIntersection(&loc_ray, &res);
		ps->trackPoint_changed.x = res.x;
		ps->trackPoint_changed.y = res.y;
		gf_node_event_out_str(sh->owner, "trackPoint_changed");

		res.x -= stack->start_drag.x;
		res.y -= stack->start_drag.y;
		/*clip*/
		if (ps->minPosition.x <= ps->maxPosition.x) {
			if (res.x < ps->minPosition.x) res.x = ps->minPosition.x;
			if (res.x > ps->maxPosition.x) res.x = ps->maxPosition.x;
		}
		if (ps->minPosition.y <= ps->maxPosition.y) {
			if (res.y < ps->minPosition.y) res.y = ps->minPosition.y;
			if (res.y > ps->maxPosition.y) res.y = ps->maxPosition.y;
		}
		ps->translation_changed.x = res.x;
		ps->translation_changed.y = res.y;
		gf_node_event_out_str(sh->owner, "translation_changed");
	}
}
コード例 #6
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
static void audioclip_deactivate(AudioClipStack *st, M_AudioClip *ac)
{
	gf_sc_audio_stop(&st->input);
	ac->isActive = 0;
	gf_node_event_out_str((GF_Node *)ac, "isActive");

	st->time_handle.needs_unregister = 1;
}
コード例 #7
0
static void OnTouchSensor(SensorHandler *sh, Bool is_over, GF_Event *ev, RayHitInfo *hit_info)
{
	M_TouchSensor *ts = (M_TouchSensor *)sh->owner;
	TouchSensorStack *st = (TouchSensorStack *) gf_node_get_private(sh->owner);
	//assert(ts->enabled);

	/*isActive becomes false, send touch time*/
	if ((ev->type==GF_EVENT_MOUSEUP) && (ev->mouse.button==GF_MOUSE_LEFT) && ts->isActive) {
		ts->touchTime = gf_node_get_scene_time(sh->owner);
		gf_node_event_out_str(sh->owner, "touchTime");
		ts->isActive = 0;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(st->compositor, 0);
	}
	if (is_over != ts->isOver) {
		ts->isOver = is_over;
		gf_node_event_out_str(sh->owner, "isOver");
	}
	if (!ts->isActive && (ev->type==GF_EVENT_MOUSEDOWN) && (ev->mouse.button==GF_MOUSE_LEFT)) {
		ts->isActive = 1;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(st->compositor, 1);
	}
	if (is_over) {
		ts->hitPoint_changed = hit_info->local_point;
		gf_node_event_out_str(sh->owner, "hitPoint_changed");
		ts->hitNormal_changed = hit_info->hit_normal;
		gf_node_event_out_str(sh->owner, "hitNormal_changed");
		ts->hitTexCoord_changed = hit_info->hit_texcoords;
		gf_node_event_out_str(sh->owner, "hitTexCoord_changed");
	}
}
コード例 #8
0
static void AS_Activate(AnimationStreamStack *stack, M_AnimationStream *as)
{
    AS_CheckURL(stack, as);
    as->isActive = 1;
    gf_node_event_out_str((GF_Node*)as, "isActive");

    gf_mo_play(stack->stream, 0, -1, 0);
    gf_mo_set_speed(stack->stream, as->speed);
}
コード例 #9
0
static
void ts_deactivate(TimeSensorStack *stack, M_TimeSensor *ts)
{
    ts->isActive = 0;
    gf_node_event_out_str((GF_Node *) ts, "isActive");
    assert(stack->time_handle.is_registered);
    stack->time_handle.needs_unregister = 1;
    stack->num_cycles = 0;
}
コード例 #10
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
static void audiobuffer_activate(AudioBufferStack *st, M_AudioBuffer *ab)
{
	ab->isActive = 1;
	gf_node_event_out_str((GF_Node *)ab, "isActive");
	/*rerender all graph to get parent audio group*/
	gf_sc_invalidate(st->output.compositor, NULL);
	st->done = 0;
	st->read_pos = 0;
}
コード例 #11
0
static void l3d_CheckBindables(GF_Node *n, GF_TraverseState *tr_state, Bool force_traverse)
{
	GF_Node *btop;
	u32 mode;
	M_Layer3D *l3d;
	l3d = (M_Layer3D *)n;

	mode = tr_state->traversing_mode;
	tr_state->traversing_mode = TRAVERSE_GET_BOUNDS;

	if (force_traverse) gf_node_traverse(l3d->background, tr_state);
	btop = (GF_Node*)gf_list_get(tr_state->backgrounds, 0);
	if (btop != l3d->background) { 
		gf_node_unregister(l3d->background, n);
		gf_node_register(btop, n); 
		l3d->background = btop;
		gf_node_event_out_str(n, "background");
	}
	if (force_traverse) gf_node_traverse(l3d->viewpoint, tr_state);
	btop = (GF_Node*)gf_list_get(tr_state->viewpoints, 0);
	if (btop != l3d->viewpoint) { 
		gf_node_unregister(l3d->viewpoint, n);
		gf_node_register(btop, n); 
		l3d->viewpoint = btop;
		gf_node_event_out_str(n, "viewpoint");
	}
	if (force_traverse) gf_node_traverse(l3d->navigationInfo, tr_state);
	btop = (GF_Node*)gf_list_get(tr_state->navigations, 0);
	if (btop != l3d->navigationInfo) { 
		gf_node_unregister(l3d->navigationInfo, n);
		gf_node_register(btop, n); 
		l3d->navigationInfo = btop;
		gf_node_event_out_str(n, "navigationInfo");
	}
	if (force_traverse) gf_node_traverse(l3d->fog, tr_state);
	btop = (GF_Node*)gf_list_get(tr_state->fogs, 0);
	if (btop != l3d->fog) { 
		gf_node_unregister(l3d->fog, n);
		gf_node_register(btop, n); 
		l3d->fog = btop;
		gf_node_event_out_str(n, "fog");
	}
	tr_state->traversing_mode = mode;
}
コード例 #12
0
static void OnDiscSensor(SensorHandler *sh, Bool is_over, GF_Event *ev, RayHitInfo *hit_info)
{
	M_DiscSensor *ds = (M_DiscSensor *)sh->owner;
	DiscSensorStack *stack = (DiscSensorStack *) gf_node_get_private(sh->owner);
	
	if (ds->isActive && (!ds->enabled || ((ev->type==GF_EVENT_MOUSEUP) && (ev->mouse.button==GF_MOUSE_LEFT) ) ) ) {
		if (ds->autoOffset) {
			ds->offset = ds->rotation_changed;
			/*that's an exposedField*/
			gf_node_event_out_str(sh->owner, "offset");
		}
		ds->isActive = 0;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(stack->compositor, 0);
	}
	else if (!ds->isActive && (ev->type==GF_EVENT_MOUSEDOWN) && (ev->mouse.button==GF_MOUSE_LEFT)) {
		/*store inverse matrix*/
		gf_mx_copy(stack->initial_matrix, hit_info->local_to_world);
		stack->start_angle = gf_atan2(hit_info->local_point.y, hit_info->local_point.x);
		ds->isActive = 1;
		gf_node_event_out_str(sh->owner, "isActive");
		R3D_SetGrabbed(stack->compositor, 1);
	}
	else if (ds->isActive) {
		GF_Ray loc_ray;
		Fixed rot;
		SFVec3f res;
		loc_ray = hit_info->world_ray;
		gf_mx_apply_ray(&stack->initial_matrix, &loc_ray);
		R3D_Get2DPlaneIntersection(&loc_ray, &res);
	
		rot = gf_atan2(res.y, res.x) - stack->start_angle + ds->offset;
		if (ds->minAngle < ds->maxAngle) {
			/*FIXME this doesn't work properly*/
			if (rot < ds->minAngle) rot = ds->minAngle;
			if (rot > ds->maxAngle) rot = ds->maxAngle;
		}
		ds->rotation_changed = rot;
		gf_node_event_out_str(sh->owner, "rotation_changed");
	   	ds->trackPoint_changed.x = res.x;
	   	ds->trackPoint_changed.y = res.y;
		gf_node_event_out_str(sh->owner, "trackPoint_changed");
	}
}
コード例 #13
0
ファイル: mpeg4_textures.c プロジェクト: erelh/gpac
static void movietexture_activate(MovieTextureStack *stack, M_MovieTexture *mt, Double scene_time)
{
	mt->isActive = 1;
	gf_node_event_out_str((GF_Node*)mt, "isActive");
	if (!stack->txh.is_open) {
		scene_time -= mt->startTime;
		gf_sc_texture_play_from_to(&stack->txh, &mt->url, scene_time, -1, gf_mo_get_loop(stack->txh.stream, mt->loop), 0);
	}
	gf_mo_set_speed(stack->txh.stream, mt->speed);
}
コード例 #14
0
ファイル: mpeg4_textures.c プロジェクト: erelh/gpac
static void movietexture_deactivate(MovieTextureStack *stack, M_MovieTexture *mt)
{
	mt->isActive = 0;
	gf_node_event_out_str((GF_Node*)mt, "isActive");
	stack->time_handle.needs_unregister = 1;

	if (stack->txh.is_open) {
		gf_sc_texture_stop(&stack->txh);
	}
}
コード例 #15
0
ファイル: mpeg4_sensors.c プロジェクト: jnorthrup/gpac
void TraverseVisibilitySensor(GF_Node *node, void *rs, Bool is_destroy)
{
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	M_VisibilitySensor *vs = (M_VisibilitySensor *)node;

	if (is_destroy || !vs->enabled) return;

	if (tr_state->traversing_mode==TRAVERSE_GET_BOUNDS) {
		/*work with twice bigger bbox to get sure we're notify when culled out*/
		gf_vec_add(tr_state->bbox.max_edge, vs->center, vs->size);
		gf_vec_diff(tr_state->bbox.min_edge, vs->center, vs->size);
		gf_bbox_refresh(&tr_state->bbox);

	} else if (tr_state->traversing_mode==TRAVERSE_SORT) {
		Bool visible;
		u32 cull_flag;
		GF_BBox bbox;
		SFVec3f s;
		s = gf_vec_scale(vs->size, FIX_ONE/2);
		/*cull with normal bbox*/
		gf_vec_add(bbox.max_edge, vs->center, s);
		gf_vec_diff(bbox.min_edge, vs->center, s);
		gf_bbox_refresh(&bbox);
		cull_flag = tr_state->cull_flag;
		tr_state->cull_flag = CULL_INTERSECTS;
		visible = visual_3d_node_cull(tr_state, &bbox, 0);
		tr_state->cull_flag = cull_flag;

		if (visible && !vs->isActive) {
			vs->isActive = 1;
			gf_node_event_out_str(node, "isActive");
			vs->enterTime = gf_node_get_scene_time(node);
			gf_node_event_out_str(node, "enterTime");
		}
		else if (!visible && vs->isActive) {
			vs->isActive = 0;
			gf_node_event_out_str(node, "isActive");
			vs->exitTime = gf_node_get_scene_time(node);
			gf_node_event_out_str(node, "exitTime");
		}
	}
}
コード例 #16
0
ファイル: mpeg4_sensors.c プロジェクト: jnorthrup/gpac
static Bool OnTouchSensor(GF_SensorHandler *sh, Bool is_over, Bool is_cancel, GF_Event *ev, GF_Compositor *compositor)
{
	Bool is_mouse = (ev->type<=GF_EVENT_MOUSEWHEEL);
	M_TouchSensor *ts = (M_TouchSensor *)sh->sensor;

	/*this is not specified in VRML, however we consider that a de-enabled sensor will not sent deactivation events*/
	if (!ts->enabled) {
		if (ts->isActive) {
			sh->grabbed = 0;
		}
		return 0;
	}

	/*isActive becomes false, send touch time*/
	if (ts->isActive) {
		if (
			/*mouse*/ ((ev->type==GF_EVENT_MOUSEUP) && (ev->mouse.button==GF_MOUSE_LEFT) )
		|| /*keyboard*/ ((ev->type==GF_EVENT_KEYUP) && (ev->key.key_code==GF_KEY_ENTER) )
		) {
			ts->touchTime = gf_node_get_scene_time(sh->sensor);
			if (!is_cancel) gf_node_event_out_str(sh->sensor, "touchTime");
			ts->isActive = 0;
			if (!is_cancel) gf_node_event_out_str(sh->sensor, "isActive");
			sh->grabbed = 0;
			return is_cancel ? 0 : 1;
		}
	}
	if (is_over != ts->isOver) {
		ts->isOver = is_over;
		if (!is_cancel) gf_node_event_out_str(sh->sensor, "isOver");
		return is_cancel ? 0 : 1;
	}
	if (!ts->isActive && is_over) {
		if (/*mouse*/ ((ev->type==GF_EVENT_MOUSEDOWN) && (ev->mouse.button==GF_MOUSE_LEFT))
			|| /*keyboard*/ ((ev->type==GF_EVENT_KEYDOWN) && (ev->key.key_code==GF_KEY_ENTER))
		) {
			ts->isActive = 1;
			gf_node_event_out_str(sh->sensor, "isActive");
			sh->grabbed = 1;
			return 1;
		}
		if (ev->type==GF_EVENT_MOUSEUP) return 0;
	}
	if (is_over && is_mouse) {
		/*THIS IS NOT CONFORMANT, the hitpoint should be in the touchsensor coordinate system, eg we
		should store the matrix from TS -> shape and apply that ...*/
		ts->hitPoint_changed = compositor->hit_local_point;
		gf_node_event_out_str(sh->sensor, "hitPoint_changed");
		ts->hitNormal_changed = compositor->hit_normal;
		gf_node_event_out_str(sh->sensor, "hitNormal_changed");
		ts->hitTexCoord_changed = compositor->hit_texcoords;
		gf_node_event_out_str(sh->sensor, "hitTexCoord_changed");
		return 1;
	}
	return 0;
}
コード例 #17
0
ファイル: mpeg4_layer_2d.c プロジェクト: golgol7777/gpac
static void l2d_CheckBindables(GF_Node *n, GF_TraverseState *tr_state, Bool force_traverse)
{
	GF_Node *btop;
	M_Layer2D *l2d;
	l2d = (M_Layer2D *)n;
	if (force_traverse) gf_node_traverse(l2d->background, tr_state);
	btop = (GF_Node*)gf_list_get(tr_state->backgrounds, 0);
	if (btop != l2d->background) { 
		gf_node_unregister(l2d->background, n);
		gf_node_register(btop, n); 
		l2d->background = btop;
		gf_node_event_out_str(n, "background");
	}
	if (force_traverse) gf_node_traverse(l2d->viewport, tr_state);
	btop = (GF_Node*)gf_list_get(tr_state->viewpoints, 0);
	if (btop != l2d->viewport) { 
		gf_node_unregister(l2d->viewport, n);
		gf_node_register(btop, n); 
		l2d->viewport = btop;
		gf_node_event_out_str(n, "viewport");
	}
}
コード例 #18
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
static void audioclip_activate(AudioClipStack *st, M_AudioClip *ac)
{
	if (gf_sc_audio_open(&st->input, &ac->url, 0, -1) != GF_OK) {
		st->failure = 1;
		return;
	}
	ac->isActive = 1;
	gf_node_event_out_str((GF_Node *)ac, "isActive");

	gf_mo_set_speed(st->input.stream, st->input.speed);
	/*traverse all graph to get parent audio group*/
	gf_sc_invalidate(st->input.compositor, NULL);
}
コード例 #19
0
static void AS_Deactivate(AnimationStreamStack *stack, M_AnimationStream *as)
{
    if (as->isActive) {
        as->isActive = 0;
        gf_node_event_out_str((GF_Node*)as, "isActive");
    }
    if (stack->stream) {
        if (gf_mo_url_changed(stack->stream, &as->url))
            gf_mo_set_flag(stack->stream, GF_MO_DISPLAY_REMOVE, 1);
        gf_mo_stop(stack->stream);
    }
    stack->time_handle.needs_unregister = 1;
    gf_sr_invalidate(stack->compositor, NULL);
}
コード例 #20
0
ファイル: mpeg4_textures.c プロジェクト: erelh/gpac
static void movietexture_update(GF_TextureHandler *txh)
{
	M_MovieTexture *txnode = (M_MovieTexture *) txh->owner;
	MovieTextureStack *st = (MovieTextureStack *) gf_node_get_private(txh->owner);

	/*setup texture if needed*/
	if (!txh->is_open) return;
	if (!txnode->isActive && st->first_frame_fetched) return;

	/*when fetching the first frame disable resync*/
	gf_sc_texture_update_frame(txh, !txnode->isActive);

	if (txh->stream_finished) {
		if (movietexture_get_loop(st, txnode)) {
			gf_sc_texture_restart(txh);
		}
		/*if active deactivate*/
		else if (txnode->isActive && gf_mo_should_deactivate(st->txh.stream) ) {
			movietexture_deactivate(st, txnode);
		}
	}
	/*first frame is fetched*/
	if (!st->first_frame_fetched && (txh->needs_refresh) ) {
		st->first_frame_fetched = 1;
		txnode->duration_changed = gf_mo_get_duration(txh->stream);
		gf_node_event_out_str(txh->owner, "duration_changed");
		/*stop stream if needed*/
		if (!txnode->isActive && txh->is_open) {
			gf_sc_texture_stop(txh);
			/*make sure the refresh flag is not cleared*/
			txh->needs_refresh = 1;
		}
	}
	if (txh->needs_refresh) {
		/*mark all subtrees using this image as dirty*/
		gf_node_dirty_parents(txh->owner);
		gf_sc_invalidate(txh->compositor, NULL);
	}
}
コード例 #21
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
static void audioclip_traverse(GF_Node *node, void *rs, Bool is_destroy)
{
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	M_AudioClip *ac = (M_AudioClip *)node;
	AudioClipStack *st = (AudioClipStack *)gf_node_get_private(node);

	if (is_destroy) {
		gf_sc_audio_predestroy(&st->input);
		if (st->time_handle.is_registered) {
			gf_sc_unregister_time_node(st->input.compositor, &st->time_handle);
		}
		gf_free(st);
		return;
	}
	if (st->failure) return;

	/*check end of stream*/
	if (st->input.stream && st->input.stream_finished) {
		if (gf_mo_get_loop(st->input.stream, ac->loop)) {
			gf_sc_audio_restart(&st->input);
		} else if (ac->isActive && gf_mo_should_deactivate(st->input.stream)) {
			/*deactivate*/
			audioclip_deactivate(st, ac);
		}
	}
	if (ac->isActive) {
		gf_sc_audio_register(&st->input, (GF_TraverseState*)rs);
	}
	if (st->set_duration && st->input.stream) {
		ac->duration_changed = gf_mo_get_duration(st->input.stream);
		gf_node_event_out_str(node, "duration_changed");
		st->set_duration = 0;
	}

	/*store mute flag*/
	st->input.is_muted = tr_state->switched_off;
}
コード例 #22
0
static void Conditional_execute(M_Conditional *node)
{
	GF_Err e;
	char *buffer;
	u32 len;
	GF_BitStream *bs;
	GF_BifsDecoder *codec;
	GF_Proto *prevproto;
	GF_SceneGraph *prev_graph, *cur_graph;
	ConditionalStack *priv = (ConditionalStack*)gf_node_get_private((GF_Node*)node);
	if (!priv) return;

	/*set the codec working graph to the node one (to handle conditional in protos)*/
	prev_graph = priv->codec->current_graph;
	cur_graph = priv->codec->current_graph = gf_node_get_graph((GF_Node*)node);
	assert(priv->codec->current_graph);

	priv->codec->info = priv->info;
	prevproto = priv->codec->pCurrentProto;
	priv->codec->pCurrentProto = NULL;
	if (priv->codec->current_graph->pOwningProto) priv->codec->pCurrentProto = priv->codec->current_graph->pOwningProto->proto_interface;

	/*set isActive - to clarify in the specs*/
	node->isActive = 1;
	gf_node_event_out_str((GF_Node *)node, "isActive");
	if (!node->buffer.bufferSize) return;

	/*we may replace ourselves*/
	buffer = (char*)node->buffer.buffer;
	len = node->buffer.bufferSize;
	node->buffer.buffer = NULL;
	node->buffer.bufferSize = 0;
	bs = gf_bs_new(buffer, len, GF_BITSTREAM_READ);
	codec = priv->codec;
	codec->cts_offset = gf_node_get_scene_time((GF_Node*)node);
	/*a conditional may destroy/replace itself - to prevent that, protect node by a register/unregister ...*/
	gf_node_register((GF_Node*)node, NULL);
#ifdef GF_SELF_REPLACE_ENABLE
	/*and a conditional may destroy the entire scene!*/
	cur_graph->graph_has_been_reset = 0;
#endif
	e = gf_bifs_dec_command(codec, bs);
	gf_bs_del(bs);
#ifdef GF_SELF_REPLACE_ENABLE
	if (cur_graph->graph_has_been_reset) {
		return;
	}
#endif
	if (node->buffer.buffer) {
		free(buffer);
	} else {
		node->buffer.buffer = buffer;
		node->buffer.bufferSize = len;
	}
	//set isActive - to clarify in the specs
//	node->isActive = 0;
	gf_node_unregister((GF_Node*)node, NULL);
	codec->cts_offset = 0;
	codec->pCurrentProto = prevproto;
	codec->current_graph = prev_graph;
}
コード例 #23
0
ファイル: input_sensor.c プロジェクト: Kurtnoise/gpac
Bool gf_term_keyboard_input(GF_Terminal *term, u32 key_code, u32 hw_code, Bool isKeyUp)
{
	u32 i;
	GF_BitStream *bs;
	GF_SLHeader slh;
	char *buf;
#ifndef GPAC_DISABLE_X3D
	X_KeySensor *n;
#endif
	u32 buf_size;
	u32 actionKey = 0;
	u32 shiftKeyDown, controlKeyDown, altKeyDown;
	GF_Codec *cod;
	s32 keyPressed, keyReleased, actionKeyPressed, actionKeyReleased;

	if (!term || (!gf_list_count(term->input_streams) && !gf_list_count(term->x3d_sensors)) ) return 0;

	memset(&slh, 0, sizeof(GF_SLHeader));
	slh.accessUnitStartFlag = slh.accessUnitEndFlag = 1;
	slh.compositionTimeStampFlag = 1;
	/*cf above*/
	slh.compositionTimeStamp = 0;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);

	shiftKeyDown = controlKeyDown = altKeyDown = 0;
	keyPressed = keyReleased = actionKeyPressed = actionKeyReleased = 0;
	/*key-sensor codes*/
	switch (key_code) {
	case GF_KEY_F1: actionKey = 1; break;
	case GF_KEY_F2: actionKey = 2; break;
	case GF_KEY_F3: actionKey = 3; break;
	case GF_KEY_F4: actionKey = 4; break;
	case GF_KEY_F5: actionKey = 5; break;
	case GF_KEY_F6: actionKey = 6; break;
	case GF_KEY_F7: actionKey = 7; break;
	case GF_KEY_F8: actionKey = 8; break;
	case GF_KEY_F9: actionKey = 9; break;
	case GF_KEY_F10: actionKey = 10; break;
	case GF_KEY_F11: actionKey = 11; break;
	case GF_KEY_F12: actionKey = 12; break;
	case GF_KEY_HOME: actionKey = 13; break;
	case GF_KEY_END: actionKey = 14; break;
	case GF_KEY_PAGEUP: actionKey = 15; break;
	case GF_KEY_PAGEDOWN: actionKey = 16; break;
	case GF_KEY_UP: actionKey = 17; break;
	case GF_KEY_DOWN: actionKey = 18; break;
	case GF_KEY_LEFT: actionKey = 19; break;
	case GF_KEY_RIGHT: actionKey = 20; break;
	case GF_KEY_SHIFT:
		actionKey = 0;
		shiftKeyDown = isKeyUp ? 1 : 2;
		break;
	case GF_KEY_CONTROL:
		actionKey = 0;
		controlKeyDown = isKeyUp ? 1 : 2;
		break;
	case GF_KEY_ALT:
		actionKey = 0;
		altKeyDown = isKeyUp ? 1 : 2;
		break;

	default: actionKey = 0; break;
	}
	if (actionKey) {
		if (isKeyUp) 
			actionKeyReleased = actionKey;
		else 
			actionKeyPressed = actionKey;
	} else {
		/*handle numeric pad*/
		if ((key_code>=GF_KEY_0) && (key_code<=GF_KEY_9) ) {
			key_code = key_code + 0x30 - GF_KEY_0;
		} 
		else 
			key_code = hw_code;

		if (isKeyUp) keyReleased = key_code;
		else keyPressed = key_code;
	}

	gf_bs_write_int(bs, keyPressed ? 1 : 0, 1); 
	if (keyPressed) gf_bs_write_int(bs, keyPressed, 32);
	gf_bs_write_int(bs, keyReleased ? 1 : 0, 1); 
	if (keyReleased) gf_bs_write_int(bs, keyReleased, 32);
	gf_bs_write_int(bs, actionKeyPressed ? 1 : 0, 1); 
	if (actionKeyPressed) gf_bs_write_int(bs, actionKeyPressed, 32);
	gf_bs_write_int(bs, actionKeyReleased ? 1 : 0, 1); 
	if (actionKeyReleased) gf_bs_write_int(bs, actionKeyReleased, 32);
	gf_bs_write_int(bs, shiftKeyDown ? 1 : 0 , 1); 
	if (shiftKeyDown) gf_bs_write_int(bs, shiftKeyDown-1, 1);
	gf_bs_write_int(bs, controlKeyDown ? 1 : 0 , 1); 
	if (controlKeyDown) gf_bs_write_int(bs, controlKeyDown-1, 1);
	gf_bs_write_int(bs, altKeyDown ? 1 : 0 , 1); 
	if (altKeyDown) gf_bs_write_int(bs, altKeyDown, 1);

	gf_bs_align(bs);
	gf_bs_get_content(bs, &buf, &buf_size);
	gf_bs_del(bs);

	/*get all IS keySensor decoders and send frame*/
	i=0;
	while ((cod = (GF_Codec*)gf_list_enum(term->input_streams, &i))) {
		ISPriv *is = (ISPriv *)cod->decio->privateStack;
		if (is->type==IS_KeySensor) {
//			GF_Channel *ch = gf_list_get(cod->inChannels, 0);
//			gf_es_receive_sl_packet(ch->service, ch, buf, buf_size, &slh, GF_OK);

			IS_ProcessData((GF_SceneDecoder*)cod->decio, buf, buf_size, 0, 0, 0);
		}
	}
	gf_free(buf);
	
#ifndef GPAC_DISABLE_X3D
	i=0;
	while ((n = (X_KeySensor*)gf_list_enum(term->x3d_sensors, &i))) {
		u16 tc[2];
		u32 len;
		char szStr[10];
		const unsigned short *ptr;
		if (gf_node_get_tag((GF_Node*)n) != TAG_X3D_KeySensor) continue;
		if (!n->enabled) return 0;

		if (keyPressed) {
			if (n->keyPress.buffer) gf_free(n->keyPress.buffer);
			tc[0] = keyPressed; tc[1] = 0;
			ptr = tc;
			len = gf_utf8_wcstombs(szStr, 10, &ptr);
			n->keyPress.buffer = (char*)gf_malloc(sizeof(char) * (len+1));
			memcpy(n->keyPress.buffer, szStr, sizeof(char) * len);
			n->keyPress.buffer[len] = 0;
			gf_node_event_out_str((GF_Node *)n, "keyPress");
		}
		if (keyReleased) {
			if (n->keyRelease.buffer) gf_free(n->keyRelease.buffer);
			tc[0] = keyReleased; tc[1] = 0;
			ptr = tc;
			len = gf_utf8_wcstombs(szStr, 10, &ptr);
			n->keyRelease.buffer = (char*)gf_malloc(sizeof(char) * (len+1));
			memcpy(n->keyRelease.buffer, szStr, sizeof(char) * len);
			n->keyRelease.buffer[len] = 0;
			gf_node_event_out_str((GF_Node *)n, "keyRelease");
		}
		if (actionKeyPressed) {
			n->actionKeyPress = actionKeyPressed;
			gf_node_event_out_str((GF_Node *)n, "actionKeyPress");
		}
		if (actionKeyReleased) {
			n->actionKeyRelease = actionKeyReleased;
			gf_node_event_out_str((GF_Node *)n, "actionKeyRelease");
		}
		if (shiftKeyDown) {
			n->shiftKey = (shiftKeyDown-1) ? 1 : 0;
			gf_node_event_out_str((GF_Node *)n, "shiftKey");
		}
		if (controlKeyDown) {
			n->controlKey = (controlKeyDown-1) ? 1 : 0;
			gf_node_event_out_str((GF_Node *)n, "controlKey");
		}
		if (altKeyDown) {
			n->altKey= (altKeyDown-1) ? 1 : 0;
			gf_node_event_out_str((GF_Node *)n, "altKey");
		}
		if (keyPressed || actionKeyPressed || (shiftKeyDown-1) || (controlKeyDown-1) || (altKeyDown-1)) {
			if (!n->isActive) {
				n->isActive = 1;
				gf_node_event_out_str((GF_Node *)n, "isActive");
			}
		} else if (n->isActive) {
			n->isActive = 0;
			gf_node_event_out_str((GF_Node *)n, "isActive");
		}
	}
#endif
	/*with KeySensor, we don't know if the key will be consumed or not, assume it is*/
	return 1;
}
コード例 #24
0
static
void UpdateTimeSensor(GF_TimeNode *st)
{
    Double currentTime, cycleTime;
    Fixed newFraction;
    u32 inc;
    M_TimeSensor *TS = (M_TimeSensor *)st->obj;
    TimeSensorStack *stack = (TimeSensorStack *)gf_node_get_private(st->obj);

    if (! TS->enabled) {
        if (TS->isActive) {
            TS->cycleTime = gf_node_get_scene_time(st->obj);
            gf_node_event_out_str(st->obj, "cycleTime");
            ts_deactivate(stack, TS);
        }
        return;
    }

    if (stack->store_info) {
        stack->store_info = 0;
        stack->start_time = TS->startTime;
        stack->cycle_interval = TS->cycleInterval;
    }

    currentTime = gf_node_get_scene_time(st->obj);
    if (!TS->isActive) {
        if (currentTime < stack->start_time) return;
        /*special case: if we're greater than both start and stop time don't activate*/
        if (!TS->isActive && (TS->stopTime > stack->start_time) && (currentTime >= TS->stopTime)) {
            stack->time_handle.needs_unregister = 1;
            return;
        }
        if (stack->is_x3d && !TS->loop) {
            if (!stack->start_time) return;
            if (currentTime >= TS->startTime+stack->cycle_interval) return;
        }
    }

    cycleTime = currentTime - stack->start_time - stack->num_cycles * stack->cycle_interval;
    newFraction = FLT2FIX ( fmod(cycleTime, stack->cycle_interval) / stack->cycle_interval );

    if (TS->isActive) {
        TS->time = currentTime;
        gf_node_event_out_str(st->obj, "time");

        /*VRML:
        	"f = fmod( (now - startTime) , cycleInterval) / cycleInterval
        	if (f == 0.0 && now > startTime) fraction_changed = 1.0
        	else fraction_changed = f"
        */
        if (!newFraction && (currentTime > stack->start_time ) ) newFraction = FIX_ONE;

        /*check for deactivation - if so generate a fraction_changed AS IF NOW WAS EXACTLY STOPTIME*/
        if ((TS->stopTime > stack->start_time) && (currentTime >= TS->stopTime) ) {
            cycleTime = TS->stopTime - stack->start_time - stack->num_cycles * stack->cycle_interval;
            TS->fraction_changed = FLT2FIX( fmod(cycleTime, stack->cycle_interval) / stack->cycle_interval );
            /*cf above*/
            if (TS->fraction_changed < FIX_EPSILON) TS->fraction_changed = FIX_ONE;
            gf_node_event_out_str(st->obj, "fraction_changed");
            ts_deactivate(stack, TS);
            return;
        }
        if (! TS->loop) {
            if (cycleTime >= stack->cycle_interval) {
                TS->fraction_changed = FIX_ONE;
                gf_node_event_out_str(st->obj, "fraction_changed");
                ts_deactivate(stack, TS);
                return;
            }
        }
        TS->fraction_changed = newFraction;
        gf_node_event_out_str(st->obj, "fraction_changed");
    }

    /*we're (about to be) active: VRML:
    "A time-dependent node is inactive until its startTime is reached. When time now becomes greater than or
    equal to startTime, an isActive TRUE event is generated and the time-dependent node becomes active 	*/

    if (!TS->isActive) {
        st->needs_unregister = 0;
        TS->isActive = 1;
        gf_node_event_out_str(st->obj, "isActive");
        TS->cycleTime = currentTime;
        gf_node_event_out_str(st->obj, "cycleTime");
        TS->fraction_changed = newFraction;
        gf_node_event_out_str(st->obj, "fraction_changed");
    }

    //compute cycle time
    if (TS->loop && (cycleTime >= stack->cycle_interval) ) {
        inc = 1 + (u32) ( (cycleTime - stack->cycle_interval ) / stack->cycle_interval );
        stack->num_cycles += inc;
        cycleTime -= inc*stack->cycle_interval;
        TS->cycleTime = currentTime - cycleTime;
        gf_node_event_out_str(st->obj, "cycleTime");
    }
}
コード例 #25
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
static void audiobuffer_deactivate(AudioBufferStack *st, M_AudioBuffer *ab)
{
	ab->isActive = 0;
	gf_node_event_out_str((GF_Node *)ab, "isActive");
	st->time_handle.needs_unregister = 1;
}
コード例 #26
0
ファイル: mpeg4_sensors.c プロジェクト: jnorthrup/gpac
static Bool OnCylinderSensor(GF_SensorHandler *sh, Bool is_over, Bool is_cancel, GF_Event *ev, GF_Compositor *compositor)
{
	Bool is_mouse = (ev->type<=GF_EVENT_MOUSEWHEEL) ? 1 : 0;
	M_CylinderSensor *cs = (M_CylinderSensor *)sh->sensor;
	CylinderSensorStack *st = (CylinderSensorStack *) gf_node_get_private(sh->sensor);

	if (cs->isActive && (!cs->enabled
		|| /*mouse*/((ev->type==GF_EVENT_MOUSEUP) && (ev->mouse.button==GF_MOUSE_LEFT))
		|| /*keyboar*/(!is_mouse && (!is_over|| ((ev->type==GF_EVENT_KEYDOWN) && (ev->key.key_code==GF_KEY_ENTER))))
	) ) {
		if (cs->autoOffset) {
			cs->offset = cs->rotation_changed.q;
			if (!is_cancel) gf_node_event_out_str(sh->sensor, "offset");
		}
		cs->isActive = 0;
		if (!is_cancel) gf_node_event_out_str(sh->sensor, "isActive");
		sh->grabbed = 0;
		return is_cancel ? 0 : 1;
	}
	else if (is_mouse) {
		if (!cs->isActive && (ev->type==GF_EVENT_MOUSEDOWN) && (ev->mouse.button==GF_MOUSE_LEFT)) {
			GF_Ray r;
			SFVec3f yaxis;
			Fixed acute, reva;
			SFVec3f bearing;

			gf_mx_copy(st->init_matrix, compositor->hit_world_to_local);
			/*get initial angle & check disk mode*/
			r = compositor->hit_world_ray;
			gf_vec_add(r.dir, r.orig, r.dir);
			gf_mx_apply_vec(&compositor->hit_world_to_local, &r.orig);
			gf_mx_apply_vec(&compositor->hit_world_to_local, &r.dir);
			gf_vec_diff(bearing, r.orig, r.dir);
			gf_vec_norm(&bearing);
			yaxis.x = yaxis.z = 0;
			yaxis.y = FIX_ONE;
			acute = gf_vec_dot(bearing, yaxis);
			if (acute < -FIX_ONE) acute = -FIX_ONE;
			else if (acute > FIX_ONE) acute = FIX_ONE;
			acute = gf_acos(acute);
			reva = ABS(GF_PI - acute);
			if (reva<acute) acute = reva;
			st->disk_mode = (acute < cs->diskAngle) ? 1 : 0;

			st->grab_start = compositor->hit_local_point;
			/*cos we're lazy*/
			st->yplane.d = 0; st->yplane.normal.x = st->yplane.normal.z = st->yplane.normal.y = 0;
			st->zplane = st->xplane = st->yplane;
			st->xplane.normal.x = FIX_ONE;
			st->yplane.normal.y = FIX_ONE;
			st->zplane.normal.z = FIX_ONE;

			cs->rotation_changed.x = 0;
			cs->rotation_changed.y = FIX_ONE;
			cs->rotation_changed.z = 0;

			cs->isActive = 1;
			gf_node_event_out_str(sh->sensor, "isActive");
			sh->grabbed = 1;
			return 1;
		}
		else if (cs->isActive) {
			GF_Ray r;
			Fixed radius, rot;
			SFVec3f dir1, dir2, cx;

			if (is_over) {
				cs->trackPoint_changed = compositor->hit_local_point;
				gf_node_event_out_str(sh->sensor, "trackPoint_changed");
			} else {
				GF_Plane project_to;
				r = compositor->hit_world_ray;
				gf_mx_apply_ray(&st->init_matrix, &r);

				/*no intersection, use intersection with "main" fronting plane*/
				if ( ABS(r.dir.z) > ABS(r.dir.y)) {
					if (ABS(r.dir.x) > ABS(r.dir.x)) project_to = st->xplane;
					else project_to = st->zplane;
				} else project_to = st->yplane;
				if (!gf_plane_intersect_line(&project_to, &r.orig, &r.dir, &compositor->hit_local_point)) return 0;
			}

  			dir1.x = compositor->hit_local_point.x; dir1.y = 0; dir1.z = compositor->hit_local_point.z;
			if (st->disk_mode) {
				radius = FIX_ONE;
			} else {
				radius = gf_vec_len(dir1);
			}
			gf_vec_norm(&dir1);
       		dir2.x = st->grab_start.x; dir2.y = 0; dir2.z = st->grab_start.z;
			gf_vec_norm(&dir2);
			cx = gf_vec_cross(dir2, dir1);
			gf_vec_norm(&cx);
			if (gf_vec_len(cx)<FIX_EPSILON) return 0;
			rot = gf_mulfix(radius, gf_acos(gf_vec_dot(dir2, dir1)) );
			if (fabs(cx.y + FIX_ONE) < FIX_EPSILON) rot = -rot;
			if (cs->autoOffset) rot += cs->offset;

			if (cs->minAngle < cs->maxAngle) {
				if (rot < cs->minAngle) rot = cs->minAngle;
				else if (rot > cs->maxAngle) rot = cs->maxAngle;
			}
			cs->rotation_changed.q = rot;
			gf_node_event_out_str(sh->sensor, "rotation_changed");
			return 1;
		}
	} else {
		if (!cs->isActive && is_over && (ev->type==GF_EVENT_KEYDOWN) && (ev->key.key_code==GF_KEY_ENTER)) {
			cs->isActive = 1;
			cs->rotation_changed.q = cs->offset;
			cs->rotation_changed.x = cs->rotation_changed.z = 0;
			cs->rotation_changed.y = FIX_ONE;
			gf_node_event_out_str(sh->sensor, "isActive");
			return 1;
		}
		else if (cs->isActive && (ev->type==GF_EVENT_KEYDOWN)) {
			SFFloat res;
			Fixed diff = (ev->key.flags & GF_KEY_MOD_SHIFT) ? GF_PI/8 : GF_PI/64;

			res = cs->rotation_changed.q;
			switch (ev->key.key_code) {
			case GF_KEY_LEFT: res -= diff; break;
			case GF_KEY_RIGHT: res += diff; break;
			case GF_KEY_HOME: res = cs->offset; break;
			default: return 0;
			}
			/*clip*/
			if (cs->minAngle <= cs->maxAngle) {
				if (res < cs->minAngle) res = cs->minAngle;
				if (res > cs->maxAngle) res = cs->maxAngle;
			}
			cs->rotation_changed.q = res;
			gf_node_event_out_str(sh->sensor, "rotation_changed");
			return 1;
		}
	}
	return 0;
}
コード例 #27
0
static Bool composite_do_bindable(GF_Node *n, GF_TraverseState *tr_state, Bool force_check)
{
	GF_Node *btop;
	Bool ret = 0;
	switch (gf_node_get_tag(n)) {
#ifndef GPAC_DISABLE_3D
	case TAG_MPEG4_CompositeTexture3D:
	{
		M_CompositeTexture3D*c3d = (M_CompositeTexture3D*)n;
		if (force_check || gf_node_dirty_get(c3d->background)) { gf_node_traverse(c3d->background, tr_state); ret = 1; }
		btop = (GF_Node*)gf_list_get(tr_state->backgrounds, 0);
		if (btop != c3d->background) {
			gf_node_unregister(c3d->background, n);
			gf_node_register(btop, n);
			c3d->background = btop;
			gf_node_event_out_str(n, "background");
			ret = 1;
		}
		if (force_check || gf_node_dirty_get(c3d->viewpoint)) { gf_node_traverse(c3d->viewpoint, tr_state); ret = 1; }
		btop = (GF_Node*)gf_list_get(tr_state->viewpoints, 0);
		if (btop != c3d->viewpoint) {
			gf_node_unregister(c3d->viewpoint, n);
			gf_node_register(btop, n);
			c3d->viewpoint = btop;
			gf_node_event_out_str(n, "viewpoint");
			ret = 1;
		}

		if (force_check || gf_node_dirty_get(c3d->fog)) { gf_node_traverse(c3d->fog, tr_state); ret = 1; }
		btop = (GF_Node*)gf_list_get(tr_state->fogs, 0);
		if (btop != c3d->fog) {
			gf_node_unregister(c3d->fog, n);
			gf_node_register(btop, n);
			c3d->fog = btop;
			gf_node_event_out_str(n, "fog");
			ret = 1;
		}

		if (force_check || gf_node_dirty_get(c3d->navigationInfo)) { gf_node_traverse(c3d->navigationInfo, tr_state); ret = 1; }
		btop = (GF_Node*)gf_list_get(tr_state->navigations, 0);
		if (btop != c3d->navigationInfo) {
			gf_node_unregister(c3d->navigationInfo, n);
			gf_node_register(btop, n);
			c3d->navigationInfo = btop;
			gf_node_event_out_str(n, "navigationInfo");
			ret = 1;
		}
		return ret;
	}
#endif
	case TAG_MPEG4_CompositeTexture2D:
	{
		M_CompositeTexture2D *c2d = (M_CompositeTexture2D*)n;
		if (force_check || gf_node_dirty_get(c2d->background)) { gf_node_traverse(c2d->background, tr_state); ret = 1; }
		btop = (GF_Node*)gf_list_get(tr_state->backgrounds, 0);
		if (btop != c2d->background) {
			gf_node_unregister(c2d->background, n);
			gf_node_register(btop, n);
			c2d->background = btop;
			gf_node_event_out_str(n, "background");
			ret = 1;
		}

		if (force_check || gf_node_dirty_get(c2d->viewport)) { gf_node_traverse(c2d->viewport, tr_state); ret = 1; }
		btop = (GF_Node*)gf_list_get(tr_state->viewpoints, 0);
		if (btop != c2d->viewport) {
			gf_node_unregister(c2d->viewport, n);
			gf_node_register(btop, n);
			c2d->viewport = btop;
			gf_node_event_out_str(n, "viewport");
			ret = 1;
		}

		return ret;
	}
	}
	return 0;
}
コード例 #28
0
static void RenderLayer2D(GF_Node *node, void *rs, Bool is_destroy)
{
	u32 i;
	GF_List *prevback, *prevviews;
	GF_Rect clip;
	M_Viewport *vp;
	ChildGroup2D *cg;
	GF_Matrix2D gf_mx2d_bck;
	GroupingNode2D *parent_bck;
	DrawableContext *back_ctx;
	Bool bool_bck;
	DrawableContext *ctx;
	M_Background2D *back;
	M_Layer2D *l = (M_Layer2D *)node;
	Layer2DStack *l2D = (Layer2DStack *) gf_node_get_private(node);
	RenderEffect2D *eff;

	if (is_destroy) {
		DeleteGroupingNode2D((GroupingNode2D *)l2D);
		gf_list_del(l2D->backs);
		gf_list_del(l2D->views);
		free(l2D);
		return;
	}

	eff = (RenderEffect2D *) rs;
	gf_mx2d_copy(gf_mx2d_bck, eff->transform);
	parent_bck = eff->parent;
	eff->parent = (GroupingNode2D *) l2D;
	gf_mx2d_init(eff->transform);
	bool_bck = eff->draw_background;
	prevback = eff->back_stack;
	prevviews = eff->view_stack;
	eff->back_stack = l2D->backs;
	eff->view_stack = l2D->views;

	if (l2D->first) {
		/*render on back first to register with stack*/
		if (l->background) {
			eff->draw_background = 0;
			gf_node_render((GF_Node*) l->background, eff);
			group2d_reset_children((GroupingNode2D*) l2D);
			eff->draw_background = 1;
		}
		vp = (M_Viewport*)l->viewport;
		if (vp) {
			gf_list_add(l2D->views, vp);
			if (!vp->isBound) {
				vp->isBound = 1;
				gf_node_event_out_str((GF_Node*)vp, "isBound");
			}
		}
	}

	back = NULL;
	if (gf_list_count(l2D->backs) ) {
		back = (M_Background2D*)gf_list_get(l2D->backs, 0);
		if (!back->isBound) back = NULL;
	}
	vp = NULL;
	if (gf_list_count(l2D->views)) {
		vp = (M_Viewport*)gf_list_get(l2D->views, 0);
		if (!vp->isBound) vp = NULL;
	}
	if (!eff->is_pixel_metrics) gf_mx2d_add_scale(&eff->transform, eff->min_hsize, eff->min_hsize);
	l2D->clip = R2D_ClipperToPixelMetrics(eff, l->size);

	/*apply viewport*/
	if (vp) {
		clip = l2D->clip;
		vp_setup((GF_Node *) vp, eff, &clip);
	}


	back_ctx = NULL;
	if (back) {
		/*setup back size and render*/
		group2d_start_child((GroupingNode2D *)l2D);
		
		eff->draw_background = 1;
		ctx = b2D_GetContext(back, l2D->backs);
		ctx->bi->unclip = l2D->clip;
		ctx->bi->clip = gf_rect_pixelize(&ctx->bi->unclip);
		gf_mx2d_init(ctx->transform);
		gf_node_render((GF_Node *) back, eff);
		eff->draw_background = 0;
	
		/*we need a trick since we're not using a dedicated surface for layer rendering, 
		we emulate the back context: remove previous context and insert fake one*/
		if (!(eff->trav_flags & TF_RENDER_DIRECT) && (gf_list_count(l2D->groups)==1)) {
			ChildGroup2D *cg = (ChildGroup2D *)gf_list_get(l2D->groups, 0);
			back_ctx = VS2D_GetDrawableContext(eff->surface);
			gf_list_rem(cg->contexts, 0);
			gf_list_add(cg->contexts, back_ctx);
			back_ctx->h_texture = ctx->h_texture;
			back_ctx->flags = ctx->flags;
			back_ctx->flags &= ~CTX_IS_TRANSPARENT;
			back_ctx->flags |= CTX_IS_BACKGROUND;
			back_ctx->aspect = ctx->aspect;
			back_ctx->drawable = ctx->drawable;
			drawable_check_bounds(back_ctx, eff->surface);
			back_ctx->bi->clip = ctx->bi->clip;
			back_ctx->bi->unclip = ctx->bi->unclip;
		}
		group2d_end_child((GroupingNode2D *)l2D);
	}

	group2d_traverse((GroupingNode2D *)l2D, l->children, eff);
	/*restore effect*/
	eff->draw_background = bool_bck;
	gf_mx2d_copy(eff->transform, gf_mx2d_bck);
	eff->parent = parent_bck;
	eff->back_stack = prevback;
	eff->view_stack = prevviews;

	/*check bindables*/
	if (l2D->first) {
		Bool redraw = 0;
		l2D->first = 0;
		if (!back && gf_list_count(l2D->backs)) redraw = 1;
		if (!vp && gf_list_count(l2D->views) ) redraw = 1;

		/*we missed background or viewport (was not declared as bound during traversal, and is bound now)*/
		if (redraw) {
			group2d_reset_children((GroupingNode2D*)l2D);
			gf_sr_invalidate(l2D->compositor, NULL);
			return;
		}
	}

	i=0;
	while ((cg = (ChildGroup2D *)gf_list_enum(l2D->groups, &i))) {
		child2d_render_done(cg, eff, &l2D->clip);
	}
	group2d_reset_children((GroupingNode2D*)l2D);

	group2d_force_bounds(eff->parent, &l2D->clip);
}
コード例 #29
0
ファイル: input_sensor.c プロジェクト: Kurtnoise/gpac
void gf_term_string_input(GF_Terminal *term, u32 character)
{
	u32 i;
	GF_BitStream *bs;
	GF_SLHeader slh;
#ifndef GPAC_DISABLE_X3D
	X_StringSensor *n;
#endif
	GF_Codec *cod;
	char *buf;
	u32 buf_size;

	if (!character || !term) return;
	if (!gf_list_count(term->input_streams) && !gf_list_count(term->x3d_sensors)) return;

	memset(&slh, 0, sizeof(GF_SLHeader));
	slh.accessUnitStartFlag = slh.accessUnitEndFlag = 1;
	slh.compositionTimeStampFlag = 1;
	/*cf above*/
	slh.compositionTimeStamp = 0;

	/*get all IS StringSensor decoders and send frame*/
	i=0;
	while ((cod = (GF_Codec*)gf_list_enum(term->input_streams, &i))) {
		ISPriv *is = (ISPriv *)cod->decio->privateStack;
		if (is->type==IS_StringSensor) {

//			GF_Channel *ch = (GF_Channel *)gf_list_get(cod->inChannels, 0);
			is->enteredText[is->text_len] = character;
			is->text_len += 1;

			/*write empty DDF*/
			bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
			gf_bs_write_int(bs, 0, 1);
			gf_bs_write_int(bs, 0, 1);
			gf_bs_align(bs);
			gf_bs_get_content(bs, &buf, &buf_size);
			gf_bs_del(bs);
			
//			gf_es_receive_sl_packet(ch->service, ch, buf, buf_size, &slh, GF_OK);
			IS_ProcessData((GF_SceneDecoder*)cod->decio, buf, buf_size, 0, 0, 0);
			
			gf_free(buf);
		}
	}


#ifndef GPAC_DISABLE_X3D
	/*get all X3D StringSensors*/
	i=0;
	while ((n = (X_StringSensor*)gf_list_enum(term->x3d_sensors, &i))) {
		StringSensorStack *st;
		char szStr[5000];
		const unsigned short *ptr;
		u32 len;
		if (gf_node_get_tag((GF_Node*)n) != TAG_X3D_StringSensor) continue;
		if (!n->enabled) continue;

		st = (StringSensorStack *) gf_node_get_private((GF_Node *)n);

		if (character=='\b') {
			if (n->deletionAllowed && st->text_len) {
				st->text_len -= 1;
				st->enteredText[st->text_len] = 0;
				ptr = st->enteredText;
				len = gf_utf8_wcstombs(szStr, 10, &ptr);
				if (n->enteredText.buffer) gf_free(n->enteredText.buffer);
				szStr[len] = 0;
				n->enteredText.buffer = gf_strdup(szStr);
				gf_node_event_out_str((GF_Node *)n, "enteredText");
			}
		} else if (character=='\r') {
			if (n->finalText.buffer) gf_free(n->finalText.buffer);
			n->finalText.buffer = n->enteredText.buffer;
			n->enteredText.buffer = gf_strdup("");
			st->text_len = 0;
			gf_node_event_out_str((GF_Node *)n, "enteredText");
			gf_node_event_out_str((GF_Node *)n, "finalText");
		} else {
			st->enteredText[st->text_len] = character;
			st->text_len += 1;
			st->enteredText[st->text_len] = 0;
			ptr = st->enteredText;
			len = gf_utf8_wcstombs(szStr, 10, &ptr);
			if (n->enteredText.buffer) gf_free(n->enteredText.buffer);
			szStr[len] = 0;
			n->enteredText.buffer = gf_strdup(szStr);
			gf_node_event_out_str((GF_Node *)n, "enteredText");
		}
	}
#endif
}
コード例 #30
0
ファイル: bindable.c プロジェクト: zsuo/gpac
void Bindable_SetIsBound(GF_Node *bindable, Bool val)
{
	Bool has_bind_time = GF_FALSE;
	if (!bindable) return;
	switch (gf_node_get_tag(bindable)) {
	case TAG_MPEG4_Background2D:
		if ( ((M_Background2D*)bindable)->isBound == val) return;
		((M_Background2D*)bindable)->isBound = val;
		break;
	case TAG_MPEG4_Viewport:
		if ( ((M_Viewport*)bindable)->isBound == val) return;
		((M_Viewport*)bindable)->isBound = val;
		((M_Viewport*)bindable)->bindTime = gf_node_get_scene_time(bindable);
		has_bind_time = GF_TRUE;
		break;
#ifndef GPAC_DISABLE_X3D
	case TAG_X3D_Background:
		if ( ((X_Background*)bindable)->isBound == val) return;
		((X_Background*)bindable)->isBound = val;
		((X_Background*)bindable)->bindTime = gf_node_get_scene_time(bindable);
		has_bind_time = GF_TRUE;
		break;
#endif
	case TAG_MPEG4_Background:
		if ( ((M_Background*)bindable)->isBound == val) return;
		((M_Background*)bindable)->isBound = val;
		break;
#ifndef GPAC_DISABLE_X3D
	case TAG_X3D_NavigationInfo:
		if ( ((X_NavigationInfo*)bindable)->isBound == val) return;
		((X_NavigationInfo*)bindable)->isBound = val;
		((X_NavigationInfo*)bindable)->bindTime = gf_node_get_scene_time(bindable);
		has_bind_time = GF_TRUE;
		break;
#endif
	case TAG_MPEG4_NavigationInfo:
		if ( ((M_NavigationInfo*)bindable)->isBound == val) return;
		((M_NavigationInfo*)bindable)->isBound = val;
		break;
	case TAG_MPEG4_Viewpoint:
#ifndef GPAC_DISABLE_X3D
	case TAG_X3D_Viewpoint:
#endif
		if ( ((M_Viewpoint*)bindable)->isBound == val) return;
		((M_Viewpoint*)bindable)->isBound = val;
		((M_Viewpoint*)bindable)->bindTime = gf_node_get_scene_time(bindable);
		has_bind_time = GF_TRUE;
		break;
#ifndef GPAC_DISABLE_X3D
	case TAG_X3D_Fog:
		if ( ((X_Fog*)bindable)->isBound == val) return;
		((X_Fog*)bindable)->isBound = val;
		((X_Fog*)bindable)->bindTime = gf_node_get_scene_time(bindable);
		has_bind_time = GF_TRUE;
		break;
#endif
	case TAG_MPEG4_Fog:
		if ( ((M_Fog*)bindable)->isBound == val) return;
		((M_Fog*)bindable)->isBound = val;
		break;
	default:
		return;
	}
	gf_node_event_out_str(bindable, "isBound");
	if (has_bind_time) gf_node_event_out_str(bindable, "bindTime");
	/*force invalidate of the bindable stack's owner*/
	gf_node_dirty_set(bindable, 0, GF_TRUE);
}