static void svg_updates_smil_evaluate(SMIL_Timing_RTI *rti, Fixed normalized_scene_time, u32 status) { SVG_updates_stack *stack = (SVG_updates_stack *)gf_node_get_private(gf_smil_get_element(rti)); switch (status) { case SMIL_TIMING_EVAL_UPDATE: if (!stack->is_open) { if (stack->resource ) gf_mo_play(stack->resource, stack->clipBegin, stack->clipEnd, GF_FALSE); stack->is_open = GF_TRUE; } else if (gf_mo_is_done(stack->resource) && (gf_smil_get_media_duration(rti)<0) ) { Double dur = gf_mo_get_duration(stack->resource); gf_smil_set_media_duration(rti, dur); } break; case SMIL_TIMING_EVAL_FREEZE: case SMIL_TIMING_EVAL_REMOVE: stack->is_open = GF_FALSE; gf_mo_set_flag(stack->resource, GF_MO_DISPLAY_REMOVE, GF_TRUE); gf_mo_stop(stack->resource); break; case SMIL_TIMING_EVAL_REPEAT: gf_mo_restart(stack->resource); break; } }
static void svg_video_smil_evaluate(SMIL_Timing_RTI *rti, Fixed normalized_scene_time, u32 status) { SVG_video_stack *stack = (SVG_video_stack *)gf_node_get_private(gf_smil_get_element(rti)); switch (status) { case SMIL_TIMING_EVAL_UPDATE: if (!stack->txh.is_open) { if (stack->txurl.count) { svg_play_texture((SVG_video_stack*)stack, NULL); } } else if (stack->txh.stream_finished && (gf_smil_get_media_duration(rti)<0) ) { Double dur = gf_mo_get_duration(stack->txh.stream); if (dur <= 0) { dur = stack->txh.last_frame_time; dur /= 1000; } gf_smil_set_media_duration(rti, dur); } break; case SMIL_TIMING_EVAL_FREEZE: case SMIL_TIMING_EVAL_REMOVE: stack->stop_requested = GF_TRUE; break; case SMIL_TIMING_EVAL_REPEAT: gf_sc_texture_restart(&stack->txh); break; } if (stack->audio) svg_audio_smil_evaluate_ex(rti, normalized_scene_time, status, stack->audio, stack->txh.owner); }
static void movietexture_update(GF_TextureHandler *txh) { M_MovieTexture *txnode = (M_MovieTexture *) txh->owner; MovieTextureStack *st = (MovieTextureStack *) gf_node_get_private(txh->owner); /*setup texture if needed*/ if (!txh->is_open) return; if (!txnode->isActive && st->first_frame_fetched) return; /*when fetching the first frame disable resync*/ gf_sc_texture_update_frame(txh, 0); if (txh->stream_finished) { if (movietexture_get_loop(st, txnode)) { gf_sc_texture_restart(txh); } /*if active deactivate*/ else if (txnode->isActive && gf_mo_should_deactivate(st->txh.stream) ) { movietexture_deactivate(st, txnode); } } /*first frame is fetched*/ if (!st->first_frame_fetched && (txh->needs_refresh) ) { st->first_frame_fetched = 1; txnode->duration_changed = gf_mo_get_duration(txh->stream); gf_node_event_out(txh->owner, 7/*"duration_changed"*/); /*stop stream if needed*/ if (!txnode->isActive && txh->is_open) { gf_mo_pause(txh->stream); /*make sure the refresh flag is not cleared*/ txh->needs_refresh = 1; gf_sc_invalidate(txh->compositor, NULL); } } if (txh->needs_refresh) { /*mark all subtrees using this image as dirty*/ gf_node_dirty_parents(txh->owner); } }
static void audioclip_traverse(GF_Node *node, void *rs, Bool is_destroy) { GF_TraverseState *tr_state = (GF_TraverseState *)rs; M_AudioClip *ac = (M_AudioClip *)node; AudioClipStack *st = (AudioClipStack *)gf_node_get_private(node); if (is_destroy) { gf_sc_audio_predestroy(&st->input); if (st->time_handle.is_registered) { gf_sc_unregister_time_node(st->input.compositor, &st->time_handle); } gf_free(st); return; } if (st->failure) return; /*check end of stream*/ if (st->input.stream && st->input.stream_finished) { if (gf_mo_get_loop(st->input.stream, ac->loop)) { gf_sc_audio_restart(&st->input); } else if (ac->isActive && gf_mo_should_deactivate(st->input.stream)) { /*deactivate*/ audioclip_deactivate(st, ac); } } if (ac->isActive) { gf_sc_audio_register(&st->input, (GF_TraverseState*)rs); } if (st->set_duration && st->input.stream) { ac->duration_changed = gf_mo_get_duration(st->input.stream); gf_node_event_out_str(node, "duration_changed"); st->set_duration = 0; } /*store mute flag*/ st->input.is_muted = tr_state->switched_off; }
static void svg_audio_smil_evaluate_ex(SMIL_Timing_RTI *rti, Fixed normalized_scene_time, u32 status, GF_Node *slave_audio, GF_Node *video) { GF_Node *audio; SVG_audio_stack *stack; audio = slave_audio; if (!audio) audio = gf_smil_get_element(rti); stack = (SVG_audio_stack *)gf_node_get_private(audio); switch (status) { case SMIL_TIMING_EVAL_UPDATE: if (!stack->is_active && !stack->is_error) { if (stack->aurl.count) { SVGAllAttributes atts; Bool lock_timeline = GF_FALSE; gf_svg_flatten_attributes((SVG_Element*) (video ? video : audio), &atts); if (atts.syncBehavior) lock_timeline = (*atts.syncBehavior == SMIL_SYNCBEHAVIOR_LOCKED) ? GF_TRUE : GF_FALSE; if (gf_sc_audio_open(&stack->input, &stack->aurl, atts.clipBegin ? (*atts.clipBegin) : 0.0, atts.clipEnd ? (*atts.clipEnd) : -1.0, lock_timeline) == GF_OK) { gf_mo_set_speed(stack->input.stream, FIX_ONE); stack->is_active = GF_TRUE; } else { stack->is_error = GF_TRUE; } } } else if (!slave_audio && stack->input.stream_finished && (gf_smil_get_media_duration(rti) < 0) ) { Double dur = gf_mo_get_duration(stack->input.stream); if (dur <= 0) { dur = gf_mo_get_last_frame_time(stack->input.stream); dur /= 1000; } gf_smil_set_media_duration(rti, dur); } break; case SMIL_TIMING_EVAL_REPEAT: if (stack->is_active) gf_sc_audio_restart(&stack->input); break; case SMIL_TIMING_EVAL_FREEZE: gf_sc_audio_stop(&stack->input); stack->is_active = GF_FALSE; break; case SMIL_TIMING_EVAL_REMOVE: gf_sc_audio_stop(&stack->input); stack->is_active = GF_FALSE; break; case SMIL_TIMING_EVAL_DEACTIVATE: if (stack->is_active) { gf_sc_audio_stop(&stack->input); gf_sc_audio_unregister(&stack->input); stack->is_active = GF_FALSE; } break; } }