static void animationstream_check_url(AnimationStreamStack *stack, M_AnimationStream *as) { if (!stack->stream) { gf_sg_vrml_mf_reset(&stack->current_url, GF_SG_VRML_MFURL); gf_sg_vrml_field_copy(&stack->current_url, &as->url, GF_SG_VRML_MFURL); stack->stream = gf_mo_register((GF_Node *)as, &as->url, 0, 0); gf_sc_invalidate(stack->compositor, NULL); /*if changed while playing trigger*/ if (as->isActive) { gf_mo_play(stack->stream, 0, -1, 0); gf_mo_set_speed(stack->stream, as->speed); } return; } /*check change*/ if (gf_mo_url_changed(stack->stream, &as->url)) { gf_sg_vrml_mf_reset(&stack->current_url, GF_SG_VRML_MFURL); gf_sg_vrml_field_copy(&stack->current_url, &as->url, GF_SG_VRML_MFURL); /*if changed while playing stop old source*/ if (as->isActive) { gf_mo_set_flag(stack->stream, GF_MO_DISPLAY_REMOVE, 1); gf_mo_stop(stack->stream); } gf_mo_unregister((GF_Node *)as, stack->stream); stack->stream = gf_mo_register((GF_Node *)as, &as->url, 0, 0); /*if changed while playing play new source*/ if (as->isActive) { gf_mo_play(stack->stream, 0, -1, 0); gf_mo_set_speed(stack->stream, as->speed); } gf_sc_invalidate(stack->compositor, NULL); } }
static void audiosource_activate(AudioSourceStack *st, M_AudioSource *as) { if (gf_sc_audio_open(&st->input, &as->url, 0, -1) != GF_OK) return; st->is_active = 1; gf_mo_set_speed(st->input.stream, st->input.speed); /*traverse all graph to get parent audio group*/ gf_sc_invalidate(st->input.compositor, NULL); }
static void animationstream_activate(AnimationStreamStack *stack, M_AnimationStream *as) { animationstream_check_url(stack, as); as->isActive = 1; gf_node_event_out((GF_Node*)as, 6/*"isActive"*/); gf_mo_play(stack->stream, 0, -1, 0); gf_mo_set_speed(stack->stream, as->speed); }
static void AS_Activate(AnimationStreamStack *stack, M_AnimationStream *as) { AS_CheckURL(stack, as); as->isActive = 1; gf_node_event_out_str((GF_Node*)as, "isActive"); gf_mo_play(stack->stream, 0, -1, 0); gf_mo_set_speed(stack->stream, as->speed); }
static void movietexture_activate(MovieTextureStack *stack, M_MovieTexture *mt, Double scene_time) { mt->isActive = 1; gf_node_event_out_str((GF_Node*)mt, "isActive"); if (!stack->txh.is_open) { scene_time -= mt->startTime; gf_sc_texture_play_from_to(&stack->txh, &mt->url, scene_time, -1, gf_mo_get_loop(stack->txh.stream, mt->loop), 0); } gf_mo_set_speed(stack->txh.stream, mt->speed); }
static void audioclip_activate(AudioClipStack *st, M_AudioClip *ac) { if (gf_sc_audio_open(&st->input, &ac->url, 0, -1) != GF_OK) { st->failure = 1; return; } ac->isActive = 1; gf_node_event_out_str((GF_Node *)ac, "isActive"); gf_mo_set_speed(st->input.stream, st->input.speed); /*traverse all graph to get parent audio group*/ gf_sc_invalidate(st->input.compositor, NULL); }
static void svg_traverse_audio_ex(GF_Node *node, void *rs, Bool is_destroy, SVGPropertiesPointers *props) { SVGAllAttributes all_atts; SVGPropertiesPointers backup_props; u32 backup_flags, restore; GF_TraverseState *tr_state = (GF_TraverseState*)rs; SVG_audio_stack *stack = (SVG_audio_stack *)gf_node_get_private(node); if (is_destroy) { gf_sc_audio_predestroy(&stack->input); gf_sg_mfurl_del(stack->aurl); gf_free(stack); return; } if (stack->is_active) { gf_sc_audio_register(&stack->input, (GF_TraverseState*)rs); } restore = 0; if (!props) { restore = 1; gf_svg_flatten_attributes((SVG_Element *)node, &all_atts); if (!compositor_svg_traverse_base(node, &all_atts, (GF_TraverseState *)rs, &backup_props, &backup_flags)) return; props = tr_state->svg_props; } if (gf_node_dirty_get(node) & GF_SG_SVG_XLINK_HREF_DIRTY) { SVGAllAttributes atts; Bool lock_timeline = GF_FALSE; if (stack->is_active) gf_sc_audio_stop(&stack->input); stack->is_error = GF_FALSE; gf_node_dirty_clear(node, GF_SG_SVG_XLINK_HREF_DIRTY); gf_term_get_mfurl_from_xlink(node, &(stack->aurl)); gf_svg_flatten_attributes((SVG_Element*) node, &atts); if (atts.syncBehavior) lock_timeline = (*atts.syncBehavior == SMIL_SYNCBEHAVIOR_LOCKED) ? GF_TRUE : GF_FALSE; if (stack->aurl.count && (gf_sc_audio_open(&stack->input, &stack->aurl, atts.clipBegin ? (*atts.clipBegin) : 0.0, atts.clipEnd ? (*atts.clipEnd) : -1.0, lock_timeline) == GF_OK) ) { gf_mo_set_speed(stack->input.stream, FIX_ONE); stack->is_active = GF_TRUE; } else if (stack->is_active) { gf_sc_audio_unregister(&stack->input); stack->is_active = GF_FALSE; } } /*store mute flag*/ stack->input.is_muted = GF_FALSE; if (tr_state->switched_off || compositor_svg_is_display_off(props) || (*(props->visibility) == SVG_VISIBILITY_HIDDEN) ) { stack->input.is_muted = GF_TRUE; } stack->input.intensity = tr_state->svg_props->computed_audio_level; if (restore) { memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers)); tr_state->svg_flags = backup_flags; } }
static void svg_audio_smil_evaluate_ex(SMIL_Timing_RTI *rti, Fixed normalized_scene_time, u32 status, GF_Node *slave_audio, GF_Node *video) { GF_Node *audio; SVG_audio_stack *stack; audio = slave_audio; if (!audio) audio = gf_smil_get_element(rti); stack = (SVG_audio_stack *)gf_node_get_private(audio); switch (status) { case SMIL_TIMING_EVAL_UPDATE: if (!stack->is_active && !stack->is_error) { if (stack->aurl.count) { SVGAllAttributes atts; Bool lock_timeline = GF_FALSE; gf_svg_flatten_attributes((SVG_Element*) (video ? video : audio), &atts); if (atts.syncBehavior) lock_timeline = (*atts.syncBehavior == SMIL_SYNCBEHAVIOR_LOCKED) ? GF_TRUE : GF_FALSE; if (gf_sc_audio_open(&stack->input, &stack->aurl, atts.clipBegin ? (*atts.clipBegin) : 0.0, atts.clipEnd ? (*atts.clipEnd) : -1.0, lock_timeline) == GF_OK) { gf_mo_set_speed(stack->input.stream, FIX_ONE); stack->is_active = GF_TRUE; } else { stack->is_error = GF_TRUE; } } } else if (!slave_audio && stack->input.stream_finished && (gf_smil_get_media_duration(rti) < 0) ) { Double dur = gf_mo_get_duration(stack->input.stream); if (dur <= 0) { dur = gf_mo_get_last_frame_time(stack->input.stream); dur /= 1000; } gf_smil_set_media_duration(rti, dur); } break; case SMIL_TIMING_EVAL_REPEAT: if (stack->is_active) gf_sc_audio_restart(&stack->input); break; case SMIL_TIMING_EVAL_FREEZE: gf_sc_audio_stop(&stack->input); stack->is_active = GF_FALSE; break; case SMIL_TIMING_EVAL_REMOVE: gf_sc_audio_stop(&stack->input); stack->is_active = GF_FALSE; break; case SMIL_TIMING_EVAL_DEACTIVATE: if (stack->is_active) { gf_sc_audio_stop(&stack->input); gf_sc_audio_unregister(&stack->input); stack->is_active = GF_FALSE; } break; } }