コード例 #1
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
void compositor_audiosource_modified(GF_Node *node)
{
	M_AudioSource *as = (M_AudioSource *)node;
	AudioSourceStack *st = (AudioSourceStack *) gf_node_get_private(node);
	if (!st) return;

	/*MPEG4 spec is not clear about that , so this is not forbidden*/
	if (gf_sc_audio_check_url(&st->input, &as->url)) {
		if (st->input.is_open) gf_sc_audio_stop(&st->input);
		/*force unregister to resetup audio cfg*/
		gf_sc_audio_unregister(&st->input);
		gf_sc_invalidate(st->input.compositor, NULL);

		if (st->is_active) gf_sc_audio_open(&st->input, &as->url, 0, -1);
	}

	//update state if we're active
	if (st->is_active) {
		audiosource_update_time(&st->time_handle);
		if (!st->is_active) return;
	}

	/*make sure we are still registered*/
	if (!st->time_handle.is_registered && !st->time_handle.needs_unregister) 
		gf_sc_register_time_node(st->input.compositor, &st->time_handle);
	else
		st->time_handle.needs_unregister = 0;
}
コード例 #2
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
void compositor_audioclip_modified(GF_Node *node)
{
	M_AudioClip *ac = (M_AudioClip *)node;
	AudioClipStack *st = (AudioClipStack *) gf_node_get_private(node);
	if (!st) return;

	st->failure = 0;

	/*MPEG4 spec is not clear about that , so this is not forbidden*/
	if (st->input.is_open && st->input.is_open) {
		if (gf_sc_audio_check_url(&st->input, &ac->url)) {
			gf_sc_audio_stop(&st->input);
			gf_sc_audio_open(&st->input, &ac->url, 0, -1);
			/*force unregister to resetup audio cfg*/
			gf_sc_audio_unregister(&st->input);
			gf_sc_invalidate(st->input.compositor, NULL);
		}
	}

	//update state if we're active
	if (ac->isActive) {
		audioclip_update_time(&st->time_handle);
		/*we're no longer active fon't check for reactivation*/
		if (!ac->isActive) return;
	}

	/*make sure we are still registered*/
	if (!st->time_handle.is_registered && !st->time_handle.needs_unregister) 
		gf_sc_register_time_node(st->input.compositor, &st->time_handle);
	else
		st->time_handle.needs_unregister = 0;
}
コード例 #3
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
static void audiosource_activate(AudioSourceStack *st, M_AudioSource *as)
{
	if (gf_sc_audio_open(&st->input, &as->url, 0, -1) != GF_OK)
		return;
	st->is_active = 1;
	gf_mo_set_speed(st->input.stream, st->input.speed);
	/*traverse all graph to get parent audio group*/
	gf_sc_invalidate(st->input.compositor, NULL);
}
コード例 #4
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
static void audioclip_activate(AudioClipStack *st, M_AudioClip *ac)
{
	if (gf_sc_audio_open(&st->input, &ac->url, 0, -1) != GF_OK) {
		st->failure = 1;
		return;
	}
	ac->isActive = 1;
	gf_node_event_out_str((GF_Node *)ac, "isActive");

	gf_mo_set_speed(st->input.stream, st->input.speed);
	/*traverse all graph to get parent audio group*/
	gf_sc_invalidate(st->input.compositor, NULL);
}
コード例 #5
0
ファイル: svg_media.c プロジェクト: DmitrySigaev/gpac_hbbtv
static void svg_traverse_audio_ex(GF_Node *node, void *rs, Bool is_destroy, SVGPropertiesPointers *props)
{
	SVGAllAttributes all_atts;
	SVGPropertiesPointers backup_props;
	u32 backup_flags, restore;
	GF_TraverseState *tr_state = (GF_TraverseState*)rs;
	SVG_audio_stack *stack = (SVG_audio_stack *)gf_node_get_private(node);

	if (is_destroy) {
		gf_sc_audio_predestroy(&stack->input);
		gf_sg_mfurl_del(stack->aurl);
		gf_free(stack);
		return;
	}
	if (stack->is_active) {
		gf_sc_audio_register(&stack->input, (GF_TraverseState*)rs);
	}

	restore = 0;
	if (!props) {
		restore = 1;
		gf_svg_flatten_attributes((SVG_Element *)node, &all_atts);
		if (!compositor_svg_traverse_base(node, &all_atts, (GF_TraverseState *)rs, &backup_props, &backup_flags))
			return;
		props = tr_state->svg_props;
	}

	if (gf_node_dirty_get(node) & GF_SG_SVG_XLINK_HREF_DIRTY) {
		SVGAllAttributes atts;
		Bool lock_timeline = GF_FALSE;
		if (stack->is_active) 
			gf_sc_audio_stop(&stack->input);

		stack->is_error = GF_FALSE;
		
		gf_node_dirty_clear(node, GF_SG_SVG_XLINK_HREF_DIRTY);
		gf_term_get_mfurl_from_xlink(node, &(stack->aurl));

		gf_svg_flatten_attributes((SVG_Element*) node, &atts);
		if (atts.syncBehavior) lock_timeline = (*atts.syncBehavior == SMIL_SYNCBEHAVIOR_LOCKED) ? GF_TRUE : GF_FALSE;

		if (stack->aurl.count && (gf_sc_audio_open(&stack->input, &stack->aurl,
				atts.clipBegin ? (*atts.clipBegin) : 0.0,
				atts.clipEnd ? (*atts.clipEnd) : -1.0,
				lock_timeline) == GF_OK) 

		) {
			gf_mo_set_speed(stack->input.stream, FIX_ONE);
			stack->is_active = GF_TRUE;
		} else if (stack->is_active) {
			gf_sc_audio_unregister(&stack->input);
			stack->is_active = GF_FALSE;
		}
	}

	/*store mute flag*/
	stack->input.is_muted = GF_FALSE;
	if (tr_state->switched_off
		|| compositor_svg_is_display_off(props)
		|| (*(props->visibility) == SVG_VISIBILITY_HIDDEN) ) {
	
		stack->input.is_muted = GF_TRUE;
	}

	stack->input.intensity = tr_state->svg_props->computed_audio_level;

	if (restore) {
		memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));
		tr_state->svg_flags = backup_flags;
	}
}
コード例 #6
0
ファイル: svg_media.c プロジェクト: DmitrySigaev/gpac_hbbtv
static void svg_audio_smil_evaluate_ex(SMIL_Timing_RTI *rti, Fixed normalized_scene_time, u32 status, GF_Node *slave_audio, GF_Node *video)
{
	GF_Node *audio;
	SVG_audio_stack *stack;

	audio = slave_audio;
	if (!audio) audio = gf_smil_get_element(rti);

	stack = (SVG_audio_stack *)gf_node_get_private(audio);
	
	switch (status) {
	case SMIL_TIMING_EVAL_UPDATE:
		if (!stack->is_active && !stack->is_error) { 
			if (stack->aurl.count) {
				SVGAllAttributes atts;
				Bool lock_timeline = GF_FALSE;
				gf_svg_flatten_attributes((SVG_Element*) (video ? video : audio), &atts);

				if (atts.syncBehavior) lock_timeline = (*atts.syncBehavior == SMIL_SYNCBEHAVIOR_LOCKED) ? GF_TRUE : GF_FALSE;

				if (gf_sc_audio_open(&stack->input, &stack->aurl,
						atts.clipBegin ? (*atts.clipBegin) : 0.0,
						atts.clipEnd ? (*atts.clipEnd) : -1.0,
						lock_timeline) == GF_OK) 
				{
					gf_mo_set_speed(stack->input.stream, FIX_ONE);
					stack->is_active = GF_TRUE;
				} else {
					stack->is_error = GF_TRUE;
				}
			}
		}
		else if (!slave_audio && stack->input.stream_finished && (gf_smil_get_media_duration(rti) < 0) ) { 
			Double dur = gf_mo_get_duration(stack->input.stream);
			if (dur <= 0) {
				dur = gf_mo_get_last_frame_time(stack->input.stream);
				dur /= 1000;
			}
			gf_smil_set_media_duration(rti, dur);
		}
		break;
	case SMIL_TIMING_EVAL_REPEAT:
		if (stack->is_active) 
			gf_sc_audio_restart(&stack->input);
		break;
	case SMIL_TIMING_EVAL_FREEZE:
		gf_sc_audio_stop(&stack->input);
		stack->is_active = GF_FALSE;
		break;
	case SMIL_TIMING_EVAL_REMOVE:
		gf_sc_audio_stop(&stack->input);
		stack->is_active = GF_FALSE;
		break;
	case SMIL_TIMING_EVAL_DEACTIVATE:
		if (stack->is_active) {
			gf_sc_audio_stop(&stack->input);
			gf_sc_audio_unregister(&stack->input);
			stack->is_active = GF_FALSE;
		}
		break;
	}
}