コード例 #1
0
ファイル: odf_dec.c プロジェクト: erelh/gpac
GF_BaseDecoder *NewODDec()
{
    GF_SceneDecoder *tmp;
    ODPriv *priv;

    GF_SAFEALLOC(tmp, GF_SceneDecoder);
    if (!tmp) return NULL;
    GF_SAFEALLOC(priv, ODPriv);

    tmp->privateStack = priv;
    tmp->AttachStream = ODF_AttachStream;
    tmp->DetachStream = ODF_DetachStream;
    tmp->GetCapabilities = ODF_GetCapabilities;
    tmp->SetCapabilities = ODF_SetCapabilities;
    tmp->ProcessData = ODF_ProcessData;
    tmp->AttachScene = ODF_AttachScene;
    tmp->CanHandleStream = ODF_CanHandleStream;

    GF_REGISTER_MODULE_INTERFACE(tmp, GF_SCENE_DECODER_INTERFACE, "GPAC OD Decoder", "gpac distribution")
    return (GF_BaseDecoder *) tmp;
}
コード例 #2
0
void svg_sani_init_linearGradient(Render2D *sr, GF_Node *node)
{
	SVG_SANI_GradientStack *st;
	GF_SAFEALLOC(st, SVG_SANI_GradientStack);

	gf_sr_texture_setup(&st->txh, sr->compositor, node);
	st->txh.update_texture_fcnt = svg_sani_UpdateLinearGradient;

	st->txh.compute_gradient_matrix = svg_sani_LG_ComputeMatrix;
	gf_node_set_private(node, st);
	gf_node_set_callback_function(node, svg_sani_render_PaintServer);
}
コード例 #3
0
ファイル: media_sensor.c プロジェクト: JamesLinus/gpac
void InitMediaSensor(GF_Scene *scene, GF_Node *node)
{
	MediaSensorStack *st;
	GF_SAFEALLOC(st, MediaSensorStack);

	st->parent = scene;
	st->sensor = (M_MediaSensor *)node;
	st->seg = gf_list_new();
	gf_node_set_callback_function(node, RenderMediaSensor);
	gf_node_set_private(node, st);

}
コード例 #4
0
ファイル: commands.c プロジェクト: bigbensk/gpac
GF_EXPORT
GF_Command *gf_sg_command_new(GF_SceneGraph *graph, u32 tag)
{
	GF_Command *ptr;
	GF_SAFEALLOC(ptr, GF_Command);
	if (!ptr) return NULL;
	ptr->tag = tag;
	ptr->in_scene = graph;
	ptr->command_fields = gf_list_new();
	if (tag < GF_SG_LAST_BIFS_COMMAND) ptr->new_proto_list = gf_list_new();
	return ptr;
}
コード例 #5
0
ファイル: hardcoded_protos.c プロジェクト: dragonlucian/gpac
void compositor_init_untransform(GF_Compositor *compositor, GF_Node *node)
{
	Untransform tr;
	if (Untransform_GetNode(node, &tr)) {
		UntransformStack *stack;
		GF_SAFEALLOC(stack, UntransformStack);
		gf_node_set_private(node, stack);
		gf_node_set_callback_function(node, TraverseUntransform);
		stack->untr = tr;
		gf_node_proto_set_grouping(node);
	}
}
コード例 #6
0
ファイル: laser_dec.c プロジェクト: jnorthrup/gpac
GF_BaseDecoder *NewLSRDec()
{
	LSRPriv *priv;
	GF_SceneDecoder *tmp;

	GF_SAFEALLOC(tmp, GF_SceneDecoder);
	if (!tmp) return NULL;
	GF_SAFEALLOC(priv, LSRPriv);
	priv->codec = NULL;
	tmp->privateStack = priv;
	tmp->AttachStream = LSR_AttachStream;
	tmp->DetachStream = LSR_DetachStream;
	tmp->GetCapabilities = LSR_GetCapabilities;
	tmp->SetCapabilities = LSR_SetCapabilities;
	tmp->ProcessData = LSR_ProcessData;
	tmp->AttachScene = LSR_AttachScene;
	tmp->CanHandleStream = LSR_CanHandleStream;
	tmp->ReleaseScene = LSR_ReleaseScene;
	GF_REGISTER_MODULE_INTERFACE(tmp, GF_SCENE_DECODER_INTERFACE, "GPAC LASeR Decoder", "gpac distribution")
	return (GF_BaseDecoder *) tmp;
}
コード例 #7
0
ファイル: mpeg4_layer_2d.c プロジェクト: golgol7777/gpac
void compositor_init_layer2d(GF_Compositor *compositor, GF_Node *node)
{
	Layer2DStack *stack;
	GF_SAFEALLOC(stack, Layer2DStack);

	stack->backs = gf_list_new();
	stack->views = gf_list_new();
	stack->first = 1;

	gf_node_set_private(node, stack);
	gf_node_set_callback_function(node, TraverseLayer2D);
}
コード例 #8
0
ファイル: opensvc_dec.c プロジェクト: noelove/GPAC-old
GF_BaseDecoder *NewOSVCDec()
{
	GF_MediaDecoder *ifcd;
	OSVCDec *dec;
	
	GF_SAFEALLOC(ifcd, GF_MediaDecoder);
	GF_SAFEALLOC(dec, OSVCDec);
	GF_REGISTER_MODULE_INTERFACE(ifcd, GF_MEDIA_DECODER_INTERFACE, "OpenSVC Decoder", "gpac distribution")

	ifcd->privateStack = dec;

	/*setup our own interface*/	
	ifcd->AttachStream = OSVC_AttachStream;
	ifcd->DetachStream = OSVC_DetachStream;
	ifcd->GetCapabilities = OSVC_GetCapabilities;
	ifcd->SetCapabilities = OSVC_SetCapabilities;
	ifcd->GetName = OSVC_GetCodecName;
	ifcd->CanHandleStream = OSVC_CanHandleStream;
	ifcd->ProcessData = OSVC_ProcessData;
	return (GF_BaseDecoder *) ifcd;
}
コード例 #9
0
ファイル: scene_manager.c プロジェクト: ARSekkat/gpac
GF_EXPORT
GF_SceneManager *gf_sm_new(GF_SceneGraph *graph)
{
	GF_SceneManager *tmp;

	if (!graph) return NULL;
	GF_SAFEALLOC(tmp, GF_SceneManager);
	if (!tmp) return NULL;
	tmp->streams = gf_list_new();
	tmp->scene_graph = graph;
	return tmp;
}
コード例 #10
0
ファイル: faad_dec.c プロジェクト: Acidburn0zzz/gpac
GF_BaseDecoder *NewFAADDec()
{
	GF_MediaDecoder *ifce;
	FAADDec *dec;

	GF_SAFEALLOC(ifce, GF_MediaDecoder);
	GF_SAFEALLOC(dec, FAADDec);
	GF_REGISTER_MODULE_INTERFACE(ifce, GF_MEDIA_DECODER_INTERFACE, "FAAD2 Decoder", "gpac distribution")

	ifce->privateStack = dec;

	/*setup our own interface*/
	ifce->AttachStream = FAAD_AttachStream;
	ifce->DetachStream = FAAD_DetachStream;
	ifce->GetCapabilities = FAAD_GetCapabilities;
	ifce->SetCapabilities = FAAD_SetCapabilities;
	ifce->ProcessData = FAAD_ProcessData;
	ifce->CanHandleStream = FAAD_CanHandleStream;
	ifce->GetName = FAAD_GetCodecName;
	return (GF_BaseDecoder *) ifce;
}
コード例 #11
0
ファイル: saf_in.c プロジェクト: Bevara/GPAC
GF_InputService *NewSAFReader()
{
	SAFIn *reader;
	GF_InputService *plug;
	GF_SAFEALLOC(plug, GF_InputService);
	GF_REGISTER_MODULE_INTERFACE(plug, GF_NET_CLIENT_INTERFACE, "GPAC SAF Reader", "gpac distribution")

	plug->RegisterMimeTypes = SAF_RegisterMimeTypes;
	plug->CanHandleURL = SAF_CanHandleURL;
	plug->ConnectService = SAF_ConnectService;
	plug->CloseService = SAF_CloseService;
	plug->GetServiceDescriptor = SAF_GetServiceDesc;
	plug->ConnectChannel = SAF_ConnectChannel;
	plug->DisconnectChannel = SAF_DisconnectChannel;
	plug->ServiceCommand = SAF_ServiceCommand;

	GF_SAFEALLOC(reader, SAFIn);
	reader->channels = gf_list_new();
	plug->priv = reader;
	return plug;
}
コード例 #12
0
ファイル: hardcoded_protos.c プロジェクト: jnorthrup/gpac
void compositor_init_offscreen_group(GF_Compositor *compositor, GF_Node *node)
{
    OffscreenGroup og;
    if (OffscreenGroup_GetNode(node, &og)) {
        OffscreenGroupStack *stack;
        GF_SAFEALLOC(stack, OffscreenGroupStack);
        gf_node_set_private(node, stack);
        gf_node_set_callback_function(node, TraverseOffscreenGroup);
        stack->og = og;
        if (og.offscreen) stack->flags |= GROUP_IS_CACHED;
        gf_node_proto_set_grouping(node);
    }
}
コード例 #13
0
ファイル: hc_flash_shape.c プロジェクト: bigbensk/gpac
void compositor_init_hc_flashshape(GF_Compositor *compositor, GF_Node *node)
{
	FSStack *stack;

	GF_SAFEALLOC(stack, FSStack);
	stack->drawable = drawable_new();
	stack->drawable->node = node;
	stack->drawable->flags = DRAWABLE_USE_TRAVERSE_DRAW;
	stack->items = gf_list_new();

	gf_node_set_private(node, stack);
	gf_node_set_callback_function(node, fs_traverse);
}
コード例 #14
0
ファイル: mpeg4_animstream.c プロジェクト: HungMingWu/gpac
void compositor_init_animationstream(GF_Compositor *compositor, GF_Node *node)
{
    AnimationStreamStack *st;
    GF_SAFEALLOC(st, AnimationStreamStack);
    st->compositor = compositor;
    st->time_handle.UpdateTimeNode = animationstream_update_time;
    st->time_handle.udta = node;

    gf_node_set_private(node, st);
    gf_node_set_callback_function(node, animationstream_destroy);

    gf_sc_register_time_node(compositor, &st->time_handle);
}
コード例 #15
0
ファイル: surface.c プロジェクト: golgol7777/gpac
GF_SURFACE evg_surface_new(GF_Raster2D *_dr, Bool center_coords)
{
	EVGSurface *_this;
	GF_SAFEALLOC(_this, EVGSurface);
	if (_this) {
		_this->center_coords = center_coords;
		_this->texture_filter = GF_TEXTURE_FILTER_DEFAULT;
		_this->ftparams.source = &_this->ftoutline;
		_this->ftparams.user = _this;
		_this->raster = evg_raster_new();
	}
	return _this;
}
コード例 #16
0
ファイル: audio_filter.c プロジェクト: DmitrySigaev/gpac-sf
void *NewAudioFilter()
{
	FilterContext *ctx;
	GF_AudioFilter *mod;
	GF_SAFEALLOC(ctx, FilterContext);
	if(!ctx) return NULL;

	GF_SAFEALLOC(mod, GF_AudioFilter);
	if(!mod) {
		gf_free(ctx);
		return NULL;
	}
	mod->udta = ctx;
	mod->SetFilter = SetFilter;
	mod->Configure = Configure;
	mod->Process = ProcessIdentity;
	mod->SetOption = SetOption;
	mod->Reset = Reset;

	GF_REGISTER_MODULE_INTERFACE(mod, GF_AUDIO_FILTER_INTERFACE, "Sample Audio Filter", "gpac distribution");
	return mod;
}
コード例 #17
0
ファイル: sdp.c プロジェクト: golgol7777/gpac
GF_EXPORT
GF_SDPMedia *gf_sdp_media_new()
{
	GF_SDPMedia *tmp;
	GF_SAFEALLOC(tmp, GF_SDPMedia);
	tmp->FMTP = gf_list_new();
	tmp->RTPMaps = gf_list_new();
	tmp->Attributes = gf_list_new();
	tmp->Connections = gf_list_new();
	tmp->Bandwidths = gf_list_new();
	tmp->Quality = -1;
	return tmp;
}
コード例 #18
0
ファイル: validator.c プロジェクト: DmitrySigaev/gpac-sf
GF_TermExt *validator_new()
{
	GF_TermExt *dr;
	GF_Validator *validator;
	dr = (GF_TermExt*)gf_malloc(sizeof(GF_TermExt));
	memset(dr, 0, sizeof(GF_TermExt));
	GF_REGISTER_MODULE_INTERFACE(dr, GF_TERM_EXT_INTERFACE, "GPAC Test Validator", "gpac distribution");

	GF_SAFEALLOC(validator, GF_Validator);
	dr->process = validator_process;
	dr->udta = validator;
	return dr;
}
コード例 #19
0
ファイル: ui_rec.c プロジェクト: golgol7777/gpac
GF_TermExt *uir_new()
{
	GF_TermExt *dr;
	GF_UIRecord *uir;
	dr = (GF_TermExt*)gf_malloc(sizeof(GF_TermExt));
	memset(dr, 0, sizeof(GF_TermExt));
	GF_REGISTER_MODULE_INTERFACE(dr, GF_TERM_EXT_INTERFACE, "GPAC UI Recorder", "gpac distribution");

	GF_SAFEALLOC(uir, GF_UIRecord);
	dr->process = uir_process;
	dr->udta = uir;
	return dr;
}
コード例 #20
0
ファイル: scene_manager.c プロジェクト: Bevara/GPAC
GF_EXPORT
GF_AUContext *gf_sm_stream_au_new(GF_StreamContext *stream, u64 timing, Double time_sec, Bool isRap)
{
    u32 i;
    GF_AUContext *tmp;
    u64 tmp_timing;

    tmp_timing = timing ? timing : (u64) (time_sec*1000);
    if (stream->imp_exp_time >= tmp_timing) {
        /*look for existing AU*/
        i=0;
        while ((tmp = (GF_AUContext *)gf_list_enum(stream->AUs, &i))) {
            if (timing && (tmp->timing==timing)) return tmp;
            else if (time_sec && (tmp->timing_sec == time_sec)) return tmp;
            else if (!time_sec && !timing && !tmp->timing && !tmp->timing_sec) return tmp;
            /*insert AU*/
            else if ((time_sec && time_sec<tmp->timing_sec) || (timing && timing<tmp->timing)) {
                GF_SAFEALLOC(tmp, GF_AUContext);
                tmp->commands = gf_list_new();
                if (isRap) tmp->flags = GF_SM_AU_RAP;
                tmp->timing = timing;
                tmp->timing_sec = time_sec;
                tmp->owner = stream;
                gf_list_insert(stream->AUs, tmp, i-1);
                return tmp;
            }
        }
    }
    GF_SAFEALLOC(tmp, GF_AUContext);
    tmp->commands = gf_list_new();
    if (isRap) tmp->flags = GF_SM_AU_RAP;
    tmp->timing = timing;
    tmp->timing_sec = time_sec;
    tmp->owner = stream;
    if (stream->disable_aggregation) tmp->flags |= GF_SM_AU_NOT_AGGREGATED;
    gf_list_add(stream->AUs, tmp);
    stream->imp_exp_time = tmp_timing;
    return tmp;
}
コード例 #21
0
void InitAnimationStream(GF_Renderer *sr, GF_Node *node)
{
    AnimationStreamStack *st;
    GF_SAFEALLOC(st, AnimationStreamStack);
    gf_sr_traversable_setup(st, node, sr);
    st->time_handle.UpdateTimeNode = AS_UpdateTime;
    st->time_handle.obj = node;

    gf_node_set_private(node, st);
    gf_node_set_callback_function(node, DestroyAnimationStream);

    gf_sr_register_time_node(sr, &st->time_handle);
}
コード例 #22
0
ファイル: scene_stats.c プロジェクト: bigbensk/gpac
static void StatNode(GF_SceneStatistics *stat, GF_Node *n, Bool isUsed, Bool isDelete, GF_Node *prev)
{
	u32 i;
	GF_NodeStats *ptr = NULL;
	if (!stat) return;

	if (n->sgprivate->tag == TAG_ProtoNode) {
#ifndef GPAC_DISABLE_VRML
		GF_ProtoInstance *pr = (GF_ProtoInstance *)n;
		i=0;
		while ((ptr = (GF_NodeStats *)gf_list_enum(stat->proto_stats, &i))) {
			if (pr->proto_interface->ID == ptr->tag) break;
			ptr = NULL;
		}
		if (!ptr) {
			GF_SAFEALLOC(ptr, GF_NodeStats);
			ptr->tag = pr->proto_interface->ID;
			ptr->name = gf_sg_proto_get_class_name(pr->proto_interface);
			gf_list_add(stat->proto_stats, ptr);
		}
#endif
	} else {
		i=0;
		while ((ptr = (GF_NodeStats *)gf_list_enum(stat->node_stats, &i))) {
			if (n->sgprivate->tag == ptr->tag) break;
			ptr = NULL;
		}
		if (!ptr) {
			GF_SAFEALLOC(ptr, GF_NodeStats);
			ptr->tag = n->sgprivate->tag;
			ptr->name = gf_node_get_class_name(n);
			gf_list_add(stat->node_stats, ptr);
		}
	}
	if (isDelete) ptr->nb_del += n->sgprivate->num_instances;
	else if (isUsed) ptr->nb_used += 1;
	/*this is because the node passes twice in the stat, once on DumpNode and once in replaceALL*/
	else ptr->nb_created += prev ? (prev->sgprivate->num_instances - 1) : 1;
}
コード例 #23
0
ファイル: hardcoded_protos.c プロジェクト: jnorthrup/gpac
void compositor_init_depth_group(GF_Compositor *compositor, GF_Node *node)
{
    DepthGroup dg;
    if (DepthGroup_GetNode(node, &dg)) {
        DepthGroupStack *stack;
        GF_SAFEALLOC(stack, DepthGroupStack);
        gf_node_set_private(node, stack);
        gf_node_set_callback_function(node, TraverseDepthGroup);
        stack->dg = dg;
        gf_node_proto_set_grouping(node);
    } else GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor2D] Unable to initialize depth group  \n"));

}
コード例 #24
0
void R2D_InitRadialGradient(Render2D *sr, GF_Node *node)
{
	GradientStack *st;
	GF_SAFEALLOC(st, GradientStack);

	gf_sr_texture_setup(&st->txh, sr->compositor, node);
	st->txh.update_texture_fcnt = UpdateRadialGradient;

	st->txh.compute_gradient_matrix = RG_ComputeMatrix;

	gf_node_set_private(node, st);
	gf_node_set_callback_function(node, DestroyRadialGradient);
}
コード例 #25
0
ファイル: mpeg4_background2d.c プロジェクト: bigbensk/gpac
static void b2D_new_status(Background2DStack *bck, M_Background2D*back)
{
	BackgroundStatus *status;

	GF_SAFEALLOC(status, BackgroundStatus);
	gf_mx2d_init(status->ctx.transform);
	status->ctx.drawable = bck->drawable;
	status->ctx.flags = CTX_IS_BACKGROUND;
	status->ctx.bi = &status->bi;
	status->ctx.aspect.fill_color = GF_COL_ARGB_FIXED(FIX_ONE, back->backColor.red, back->backColor.green, back->backColor.blue);
	status->ctx.aspect.fill_texture = &bck->txh;
	gf_list_add(bck->status_stack, status);
}
コード例 #26
0
ファイル: osd.c プロジェクト: noelove/GPAC-old
GF_TermExt *osd_new()
{
	GF_TermExt *dr;
	GF_OSD *osd;
	dr = (GF_TermExt*)gf_malloc(sizeof(GF_TermExt));
	memset(dr, 0, sizeof(GF_TermExt));
	GF_REGISTER_MODULE_INTERFACE(dr, GF_TERM_EXT_INTERFACE, "GPAC OnSscreen Display", "gpac distribution");

	GF_SAFEALLOC(osd, GF_OSD);
	dr->process = osd_process;
	dr->udta = osd;
	return dr;
}
コード例 #27
0
ファイル: audio_mixer.c プロジェクト: HungMingWu/gpac
GF_EXPORT
void gf_mixer_add_input(GF_AudioMixer *am, GF_AudioInterface *src)
{
	MixerInput *in;
	if (gf_mixer_is_src_present(am, src)) return;
	gf_mixer_lock(am, GF_TRUE);
	GF_SAFEALLOC(in, MixerInput);
	in->src = src;
	gf_list_add(am->sources, in);
	am->must_reconfig = GF_TRUE;
	am->isEmpty = GF_FALSE;
	gf_mixer_lock(am, GF_FALSE);
}
コード例 #28
0
ファイル: xml_parser.c プロジェクト: noelove/GPAC-old
static void on_dom_text_content(void *cbk, const char *content, Bool is_cdata)
{
	GF_DOMParser *par = (GF_DOMParser *)cbk;
	GF_XMLNode *node;
	GF_XMLNode *last = (GF_XMLNode *)gf_list_last(par->stack);
	if (!last) return;
	assert(last->content);

	GF_SAFEALLOC(node, GF_XMLNode);
	node->type = is_cdata ? GF_XML_CDATA_TYPE : GF_XML_TEXT_TYPE;
	node->name = gf_strdup(content);
	gf_list_add(last->content, node);
}
コード例 #29
0
ファイル: mpeg4_audio.c プロジェクト: golgol7777/gpac
void compositor_init_audiosource(GF_Compositor *compositor, GF_Node *node)
{
	AudioSourceStack *st;
	GF_SAFEALLOC(st, AudioSourceStack);
	gf_sc_audio_setup(&st->input, compositor, node);

	st->time_handle.UpdateTimeNode = audiosource_update_time;
	st->time_handle.udta = node;

	gf_node_set_private(node, st);
	gf_node_set_callback_function(node, audiosource_traverse);
	gf_sc_register_time_node(compositor, &st->time_handle);
}
コード例 #30
0
ファイル: decoder.c プロジェクト: golgol7777/gpac
GF_Codec *gf_codec_use_codec(GF_Codec *codec, GF_ObjectManager *odm)
{
	GF_Codec *tmp;
	if (!codec->decio) return NULL;
	GF_SAFEALLOC(tmp, GF_Codec);
	tmp->type = codec->type;
	tmp->inChannels = gf_list_new();
	tmp->Status = GF_ESM_CODEC_STOP;
	tmp->odm = odm;
	tmp->flags = codec->flags | GF_ESM_CODEC_IS_USE;
	tmp->decio = codec->decio;
	return tmp;
}