コード例 #1
0
ファイル: audio_input.c プロジェクト: bigbensk/gpac
GF_EXPORT
GF_Err gf_sc_audio_open(GF_AudioInput *ai, MFURL *url, Double clipBegin, Double clipEnd, Bool lock_timeline)
{
	u32 i;
	if (ai->is_open) return GF_BAD_PARAM;

	/*get media object*/
	ai->stream = gf_mo_register(ai->owner, url, lock_timeline, 0);
	/*bad URL*/
	if (!ai->stream) return GF_NOT_SUPPORTED;

	/*request play*/
	gf_mo_play(ai->stream, clipBegin, clipEnd, 0);

	ai->stream_finished = 0;
	ai->is_open = 1;
	gf_mo_set_flag(ai->stream, GF_MO_IS_INIT, 0);

	if (ai->filter) gf_af_del(ai->filter);
	ai->filter = NULL;
	
	for (i=0; i<url->count; i++) {
		if (url->vals[i].url && !strnicmp(url->vals[i].url, "#filter=", 8)) {
			ai->filter = gf_af_new(ai->compositor, &ai->input_ifce, url->vals[i].url+8);
			if (ai->filter) 
				break;
		}
	}
	return GF_OK;
}
コード例 #2
0
ファイル: mpeg4_animstream.c プロジェクト: HungMingWu/gpac
static void animationstream_check_url(AnimationStreamStack *stack, M_AnimationStream *as)
{
    if (!stack->stream) {
        gf_sg_vrml_mf_reset(&stack->current_url, GF_SG_VRML_MFURL);
        gf_sg_vrml_field_copy(&stack->current_url, &as->url, GF_SG_VRML_MFURL);
        stack->stream = gf_mo_register((GF_Node *)as, &as->url, 0, 0);
        gf_sc_invalidate(stack->compositor, NULL);

        /*if changed while playing trigger*/
        if (as->isActive) {
            gf_mo_play(stack->stream, 0, -1, 0);
            gf_mo_set_speed(stack->stream, as->speed);
        }
        return;
    }
    /*check change*/
    if (gf_mo_url_changed(stack->stream, &as->url)) {
        gf_sg_vrml_mf_reset(&stack->current_url, GF_SG_VRML_MFURL);
        gf_sg_vrml_field_copy(&stack->current_url, &as->url, GF_SG_VRML_MFURL);
        /*if changed while playing stop old source*/
        if (as->isActive) {
            gf_mo_set_flag(stack->stream, GF_MO_DISPLAY_REMOVE, 1);
            gf_mo_stop(stack->stream);
        }
        gf_mo_unregister((GF_Node *)as, stack->stream);

        stack->stream = gf_mo_register((GF_Node *)as, &as->url, 0, 0);
        /*if changed while playing play new source*/
        if (as->isActive) {
            gf_mo_play(stack->stream, 0, -1, 0);
            gf_mo_set_speed(stack->stream, as->speed);
        }
        gf_sc_invalidate(stack->compositor, NULL);
    }
}
コード例 #3
0
ファイル: texturing.c プロジェクト: bbshocking/gpac
static void setup_texture_object(GF_TextureHandler *txh, Bool private_media)
{
	if (!txh->tx_io) {
		gf_sc_texture_allocate(txh);
		if (!txh->tx_io) return;

		gf_mo_get_visual_info(txh->stream, &txh->width, &txh->height, &txh->stride, &txh->pixel_ar, &txh->pixelformat, &txh->is_flipped);

		if (private_media) {
			txh->transparent = 1;
			txh->pixelformat = GF_PIXEL_ARGB;
			txh->flags |= GF_SR_TEXTURE_PRIVATE_MEDIA;
		} else {
			txh->transparent = 0;
			switch (txh->pixelformat) {
			case GF_PIXEL_ALPHAGREY:
			case GF_PIXEL_ARGB:
			case GF_PIXEL_RGBA:
			case GF_PIXEL_YUVA:
			case GF_PIXEL_RGBDS:
				txh->transparent = 1;
				break;
			}
		}
		gf_mo_set_flag(txh->stream, GF_MO_IS_INIT, 1);
	}
}
コード例 #4
0
ファイル: svg_media.c プロジェクト: DmitrySigaev/gpac_hbbtv
static void svg_traverse_updates(GF_Node *node, void *rs, Bool is_destroy)
{
	/*video stack is just an extension of image stack, type-casting is OK*/
	SVG_updates_stack *stack = (SVG_updates_stack*)gf_node_get_private(node);
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	SVGAllAttributes all_atts;
	SVGPropertiesPointers backup_props;
	u32 backup_flags, dirty_flags;

	if (is_destroy) {
		if (stack->resource) {
			if (stack->is_open) {
				gf_mo_set_flag(stack->resource, GF_MO_DISPLAY_REMOVE, GF_TRUE);
				gf_mo_stop(stack->resource);
			}
			gf_mo_unregister(node, stack->resource);
		}
		gf_free(stack);
		return;
	} 

	if (tr_state->traversing_mode!=TRAVERSE_SORT) return;

	/*flatten attributes and apply animations + inheritance*/
	gf_svg_flatten_attributes((SVG_Element *)node, &all_atts);
	if (!compositor_svg_traverse_base(node, &all_atts, (GF_TraverseState *)rs, &backup_props, &backup_flags))
		return;

	dirty_flags = gf_node_dirty_get(node);
	if (dirty_flags) {
		stack->clipBegin = all_atts.clipBegin ? *all_atts.clipBegin : 0;
		stack->clipEnd = all_atts.clipEnd ? *all_atts.clipEnd : -1;
		if (dirty_flags & GF_SG_SVG_XLINK_HREF_DIRTY) {
			GF_MediaObject *new_res;
			MFURL url;
			Bool lock_timeline=GF_FALSE;
			url.vals = NULL;
			url.count = 0;

			if (all_atts.syncBehavior) lock_timeline = (*all_atts.syncBehavior == SMIL_SYNCBEHAVIOR_LOCKED) ? GF_TRUE : GF_FALSE;

			gf_term_get_mfurl_from_xlink(node, &url);

			new_res = gf_mo_register(node, &url, lock_timeline, GF_FALSE);
			gf_sg_mfurl_del(url);
			
			if (stack->resource!=new_res) {
				if (stack->resource) {
					gf_mo_stop(stack->resource);
					gf_mo_unregister(node, stack->resource);
				}
				stack->resource = new_res;
				if (stack->resource && stack->is_open) gf_mo_play(stack->resource, stack->clipBegin, stack->clipEnd, GF_FALSE);
			}
		}
		gf_node_dirty_clear(node, 0);
	}
	memcpy(tr_state->svg_props, &backup_props, sizeof(SVGPropertiesPointers));
	tr_state->svg_flags = backup_flags;
}
コード例 #5
0
ファイル: svg_media.c プロジェクト: DmitrySigaev/gpac_hbbtv
static void svg_updates_smil_evaluate(SMIL_Timing_RTI *rti, Fixed normalized_scene_time, u32 status)
{
	SVG_updates_stack *stack = (SVG_updates_stack *)gf_node_get_private(gf_smil_get_element(rti));

	switch (status) {
	case SMIL_TIMING_EVAL_UPDATE:
		if (!stack->is_open) { 
			if (stack->resource ) gf_mo_play(stack->resource, stack->clipBegin, stack->clipEnd, GF_FALSE);
			stack->is_open = GF_TRUE;
		}
		else if (gf_mo_is_done(stack->resource) && (gf_smil_get_media_duration(rti)<0) ) { 
			Double dur = gf_mo_get_duration(stack->resource);
			gf_smil_set_media_duration(rti, dur);
		}
		break;
	case SMIL_TIMING_EVAL_FREEZE:
	case SMIL_TIMING_EVAL_REMOVE:
		stack->is_open = GF_FALSE;
		gf_mo_set_flag(stack->resource, GF_MO_DISPLAY_REMOVE, GF_TRUE);
		gf_mo_stop(stack->resource);
		break;
	case SMIL_TIMING_EVAL_REPEAT:
		gf_mo_restart(stack->resource);
		break;
	}
}
コード例 #6
0
ファイル: audio_input.c プロジェクト: bigbensk/gpac
static Bool gf_audio_input_get_config(GF_AudioInterface *aifc, Bool for_recf)
{
	GF_AudioInput *ai = (GF_AudioInput *) aifc->callback;
	if (!ai->stream) return 0;
	/*watchout for object reuse*/
	if (aifc->samplerate && (gf_mo_get_flags(ai->stream) & GF_MO_IS_INIT)) return 1;
	if (!for_recf) 
		return 0;

	gf_mo_get_audio_info(ai->stream, &aifc->samplerate, &aifc->bps , &aifc->chan, &aifc->ch_cfg);

	if (aifc->samplerate * aifc->chan * aifc->bps && ((aifc->chan<=2) || aifc->ch_cfg))  {
		gf_mo_set_flag(ai->stream, GF_MO_IS_INIT, 1);
		return 1;
	}
	gf_mo_set_flag(ai->stream, GF_MO_IS_INIT, 0);
	return 0;
}
コード例 #7
0
ファイル: mpeg4_animstream.c プロジェクト: HungMingWu/gpac
static void animationstream_deactivate(AnimationStreamStack *stack, M_AnimationStream *as)
{
    if (as->isActive) {
        as->isActive = 0;
        gf_node_event_out((GF_Node*)as, 6/*"isActive"*/);
    }
    if (stack->stream) {
        if (gf_mo_url_changed(stack->stream, &as->url))
            gf_mo_set_flag(stack->stream, GF_MO_DISPLAY_REMOVE, 1);
        gf_mo_stop(stack->stream);
    }
    stack->time_handle.needs_unregister = 1;
    gf_sc_invalidate(stack->compositor, NULL);
}
コード例 #8
0
ファイル: mpeg4_animstream.c プロジェクト: HungMingWu/gpac
static void animationstream_destroy(GF_Node *node, void *rs, Bool is_destroy)
{
    if (is_destroy) {
        M_AnimationStream *as = (M_AnimationStream *)node;
        AnimationStreamStack *st = (AnimationStreamStack *) gf_node_get_private(node);

        if (st->time_handle.is_registered) {
            gf_sc_unregister_time_node(st->compositor, &st->time_handle);
        }
        if (st->stream && as->isActive) {
            gf_mo_set_flag(st->stream, GF_MO_DISPLAY_REMOVE, 1);
            gf_mo_stop(st->stream);
        }
        gf_sg_vrml_mf_reset(&st->current_url, GF_SG_VRML_MFURL);
        gf_free(st);
    }
}