Exemplo n.º 1
0
void Playlist::OnReverseList()
{
	u32 count = gf_list_count(m_entries);
	u32 hcount = count / 2;
	count--;
	for (u32 i=0; i<hcount; i++) {
		PLEntry *ple1 = (PLEntry *) gf_list_get(m_entries, i);
		PLEntry *ple2 = (PLEntry *) gf_list_get(m_entries, count-i);
		gf_list_rem(m_entries, i);
		gf_list_insert(m_entries, ple2, i);
		gf_list_rem(m_entries, count-i);
		gf_list_insert(m_entries, ple1, count-i);
	}
	RefreshList();
}
Exemplo n.º 2
0
GF_EXPORT
GF_Err gf_cfg_insert_key(GF_Config *iniFile, const char *secName, const char *keyName, const char *keyValue, u32 index)
{
	u32 i;
	IniSection *sec;
	IniKey *key;

	if (!iniFile || !secName || !keyName || !keyValue) return GF_BAD_PARAM;

	i = 0;
	while ((sec = (IniSection *)gf_list_enum(iniFile->sections, &i))) {
		if (!strcmp(secName, sec->section_name)) break;
	}
	if (!sec) return GF_BAD_PARAM;

	i = 0;
	while ((key = (IniKey *)gf_list_enum(sec->keys, &i))) {
		if (!strcmp(key->name, keyName)) return GF_BAD_PARAM;
	}

	key = (IniKey *)gf_malloc(sizeof(IniKey));
	key->name = gf_strdup(keyName);
	key->value = gf_strdup(keyValue);
	gf_list_insert(sec->keys, key, index);
	iniFile->hasChanged = GF_TRUE;
	return GF_OK;
}
Exemplo n.º 3
0
GF_EXPORT
GF_AUContext *gf_sm_stream_au_new(GF_StreamContext *stream, u64 timing, Double time_sec, Bool isRap)
{
	u32 i;
	GF_AUContext *tmp;

	/*look for existing AU*/
	i=0;
	while ((tmp = (GF_AUContext *)gf_list_enum(stream->AUs, &i))) {
		if (timing && (tmp->timing==timing)) return tmp;
		else if (time_sec && (tmp->timing_sec == time_sec)) return tmp;
		else if (!time_sec && !timing && !tmp->timing && !tmp->timing_sec) return tmp;
		/*insert AU*/
		else if ((time_sec && time_sec<tmp->timing_sec) || (timing && timing<tmp->timing)) {
			tmp = (GF_AUContext *)malloc(sizeof(GF_AUContext));
			tmp->commands = gf_list_new();
			tmp->is_rap = isRap;
			tmp->timing = timing;
			tmp->timing_sec = time_sec;
			tmp->owner = stream;
			gf_list_insert(stream->AUs, tmp, i);
			return tmp;
		}
	}
	tmp = (GF_AUContext *)malloc(sizeof(GF_AUContext));
	tmp->commands = gf_list_new();
	tmp->is_rap = isRap;
	tmp->timing = timing;
	tmp->timing_sec = time_sec;
	tmp->owner = stream;
	gf_list_add(stream->AUs, tmp);
	return tmp;
}
Exemplo n.º 4
0
/*guarentee the tr_state->candidate has the lowest delta value*/
static void group_cache_insert_entry(GF_Node *node, GroupingNode2D *group, GF_TraverseState *tr_state)
{
	u32 i, count;
	GF_List *cache_candidates = tr_state->visual->compositor->cached_groups;
	GroupingNode2D *current;

	current = NULL;
	count = gf_list_count(cache_candidates);
	for (i=0; i<count; i++) {
		current = gf_list_get(cache_candidates, i);
		/*if entry's priority is higher than our group, insert our group here*/
		if (current->priority >= group->priority) {
			gf_list_insert(cache_candidates, group, i);
			break;
		}
	}
	if (i==count)
		gf_list_add(cache_candidates, group);

	tr_state->visual->compositor->video_cache_current_size += group->cached_size;
	/*log the information*/
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CACHE, ("[CACHE]\tAdding object %s\tObjects: %d\tSlope: %g\tSize: %d\tTime: %d\n",
								gf_node_get_log_name(node),
								group->nb_objects,
								FIX2FLT(group->priority),
								group->cached_size,
								group->traverse_time));

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CACHE, ("[CACHE] Status (KB): Max: %d\tUsed: %d\tNb Groups: %d\n",
								tr_state->visual->compositor->video_cache_max_size,
								tr_state->visual->compositor->video_cache_current_size,
								gf_list_count(tr_state->visual->compositor->cached_groups)
								));
}
Exemplo n.º 5
0
void Playlist::Sort(u32 type)
{
	u32 i, j, smallest;
	if (gf_list_count(m_entries)<=1) return;

	for (i=0; i<gf_list_count(m_entries)-1; i++) {
		smallest = i;
		for (j=i+1; j<gf_list_count(m_entries); j++) {
			PLEntry *ple2 = (PLEntry *) gf_list_get(m_entries, smallest);
			PLEntry *ple1 = (PLEntry *) gf_list_get(m_entries, j);
			s32 test = 0;
			switch (type) {
			case 0:
				test = stricmp(ple1->m_url, ple2->m_url);
				break;
			case 1:
				test = stricmp(ple1->m_disp_name, ple2->m_disp_name);
				break;
			case 2:
				test = ple1->m_duration - ple2->m_duration;
				break;
			}
			if (test<0) smallest = j;
		}
		PLEntry *ple = (PLEntry *)gf_list_get(m_entries, smallest);
		gf_list_rem(m_entries, smallest);
		gf_list_insert(m_entries, ple, i);
	}
	m_cur_entry = -1;
	RefreshList();
}
Exemplo n.º 6
0
static void merge_nalus_list(GF_List  *src, GF_List *dst)
{
	u32 i, count = gf_list_count(src);
	for (i=0; i<count; i++) {
		void *p = gf_list_get(src, i);
		if (p) gf_list_insert(dst, p, 0);
	}
}
Exemplo n.º 7
0
GF_Err gf_bifs_dec_qp_set(GF_BifsDecoder *codec, GF_Node *qp)
{
	assert(gf_node_get_tag(qp) == TAG_MPEG4_QuantizationParameter);

	/*if we have an active QP, push it into the stack*/
	if (codec->ActiveQP && ((GF_Node*)codec->ActiveQP != codec->scenegraph->global_qp) )
		gf_list_insert(codec->QPs, codec->ActiveQP, 0);

	codec->ActiveQP = (M_QuantizationParameter *)qp;
	return GF_OK;
}
Exemplo n.º 8
0
GF_EXPORT
void gf_sc_texture_setup(GF_TextureHandler *txh, GF_Compositor *compositor, GF_Node *owner)
{
	memset(txh, 0, sizeof(GF_TextureHandler));
	txh->owner = owner;
	txh->compositor = compositor;
	/*insert texture in reverse order, so that textures in sub documents/scenes are updated before parent ones*/
	if (gf_list_find(compositor->textures, txh)<0) 
		gf_list_insert(compositor->textures, txh, 0);
	if (!txh->update_texture_fcnt) txh->update_texture_fcnt = update_texture_void;
}
Exemplo n.º 9
0
Arquivo: hinting.c Projeto: erelh/gpac
GF_Err gf_isom_hint_pck_add_dte(u8 HintType, GF_HintPacket *ptr, GF_GenericDTE *dte, u8 AtBegin)
{
	switch (HintType) {
	case GF_ISMO_HINT_RTP:
		if (AtBegin)
			return gf_list_insert( ((GF_RTPPacket *)ptr)->DataTable, dte, 0);
		else
			return gf_list_add( ((GF_RTPPacket *)ptr)->DataTable, dte);

	default:
		return GF_NOT_SUPPORTED;
	}
}
Exemplo n.º 10
0
static GF_Err swf_svg_show_frame(SWFReader *read)
{
    u32     i;
    u32     len;
    GF_List *sdl = gf_list_new(); // sorted display list

    /* sorting the display list */
    while (gf_list_count(read->display_list))
    {
        Bool        inserted = 0;
        DispShape   *s;

        s = (DispShape *)gf_list_get(read->display_list, 0);
        gf_list_rem(read->display_list, 0);
        
        for (i = 0; i < gf_list_count(sdl); i++)
        {
            DispShape *s2 = (DispShape *)gf_list_get(sdl, i);
            if (s->depth < s2->depth) 
            {
                gf_list_insert(sdl, s, i);
                inserted = 1;
                break;
            }
        }
        if (!inserted)
        {
            gf_list_add(sdl, s);
        }
    }
    gf_list_del(read->display_list);
    read->display_list = sdl;

    /* dumping the display list */
    len = gf_list_count(read->display_list);
    for (i=0; i<len; i++)
    {
        DispShape   *s;
        s = (DispShape *)gf_list_get(read->display_list, i);
        fprintf(read->svg_output, "<use xlink:href=\"#S%d\" z-index=\"%d\" ", s->char_id, s->depth);
        swf_svg_print_matrix(read, &s->mat);
        fprintf(read->svg_output, "/>\n");
    }
    fprintf(read->svg_output, "</g>\n");

    fprintf(read->svg_output, "<g id=\"frame%d\" display=\"none\">\n",read->current_frame+1);
    fprintf(read->svg_output, "<animate attributeName=\"display\" to=\"inline\" begin=\"%f\" end=\"%f\" fill=\"%s\" restart=\"never\"/>\n", 
        1.0*(read->current_frame+1)/read->frame_rate, 1.0*(read->current_frame+2)/read->frame_rate,
        (((read->current_frame+1) <= (read->frame_count-1)) ? "remove" : "freeze"));
    return GF_OK;
}
Exemplo n.º 11
0
void V4StudioTree::OnEndDrag(wxTreeEvent& event)
{
    wxTreeItemId itemSrc = m_draggedItem, itemDst = event.GetItem(), dstParentItem = GetItemParent(itemDst);
    m_draggedItem = (wxTreeItemId)0l;

	V4StudioTreeItemData *srcData = (V4StudioTreeItemData *)GetItemData(itemSrc);
	GF_FieldInfo srcField;
	srcData->GetField(&srcField);
	// Removal of the src item from its parent field
	switch (srcField.fieldType) {
	case GF_SG_VRML_SFNODE:
		if (* (GF_Node **) srcField.far_ptr) {}
		break;
	case GF_SG_VRML_MFNODE:
		{
			GF_List *nodes = (* (GF_List **) srcField.far_ptr);
			gf_list_rem(nodes, srcData->GetPosition());
		}
		break;
	default:
		break;
	}

	GF_Node *srcNode = srcData->GetNode();
	GF_FieldInfo dstField;
	V4StudioTreeItemData *dstData = (V4StudioTreeItemData *)GetItemData(itemDst);
	dstData->GetField(&dstField);
	// Addition of the src item prior to the dest item
	switch (dstField.fieldType) {
	case GF_SG_VRML_SFNODE:
		if (* (GF_Node **) dstField.far_ptr) {}
		break;
	case GF_SG_VRML_MFNODE:
		{
			GF_List *nodes = (* (GF_List **) dstField.far_ptr);
			gf_list_insert(nodes, srcNode, dstData->GetPosition());
			gf_node_dirty_set(dstData->GetNode(), 0, 1);
		}
		break;
	default:
		break;
	}

	GF_Node *dstParentNode = dstData->GetNodeParent();
	
	Delete(itemSrc);
	AddNodesToItem(dstParentItem, srcNode, dstData->GetFieldIndex(), dstData->GetPosition());
	V4StudioFrame *mainFrame = (V4StudioFrame *)GetParent();
	mainFrame->UpdateSelection(srcNode,dstData->GetNodeParent());
	mainFrame->Update();
}
Exemplo n.º 12
0
GF_Err SetupWriters(MovieWriter *mw, GF_List *writers, u8 interleaving)
{
	u32 i, trackCount;
	TrackWriter *writer;
	GF_TrackBox *trak;
	GF_ISOFile *movie = mw->movie;

	mw->total_samples = mw->nb_done = 0;
	if (!movie->moov) return GF_OK;

	trackCount = gf_list_count(movie->moov->trackList);
	for (i = 0; i < trackCount; i++) {
		trak = gf_isom_get_track(movie->moov, i+1);
		
		GF_SAFEALLOC(writer, TrackWriter);
		if (!writer) goto exit;
		writer->sampleNumber = 1;
		writer->mdia = trak->Media;
		writer->timeScale = trak->Media->mediaHeader->timeScale;
		writer->isDone = 0;
		writer->DTSprev = 0;
		writer->chunkDur = 0;
		writer->stsc = (GF_SampleToChunkBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_STSC);
		if (trak->Media->information->sampleTable->ChunkOffset->type == GF_ISOM_BOX_TYPE_STCO) {
			writer->stco = gf_isom_box_new(GF_ISOM_BOX_TYPE_STCO);
		} else {
			writer->stco = gf_isom_box_new(GF_ISOM_BOX_TYPE_CO64);
		}
		/*stops from chunk escape*/
		if (interleaving) writer->mdia->information->sampleTable->MaxSamplePerChunk = 0;
		/*for progress, assume only one descIndex*/
		if (Media_IsSelfContained(writer->mdia, 1)) mw->total_samples += trak->Media->information->sampleTable->SampleSize->sampleCount;
		/*optimization for interleaving: put audio last (this can be overriden by priorities)*/
		if (movie->storageMode != GF_ISOM_STORE_INTERLEAVED) {
			gf_list_add(writers, writer);	
		} else {
			if (writer->mdia->information->InfoHeader && writer->mdia->information->InfoHeader->type == GF_ISOM_BOX_TYPE_SMHD) {
				gf_list_add(writers, writer);	
			} else {
				gf_list_insert(writers, writer, 0);	
			}
		}
	}
	return GF_OK;

exit:
	CleanWriters(writers);
	return GF_OUT_OF_MEM;
}
Exemplo n.º 13
0
/* To reduce the process of notifying the time to all timed elements, we add to the scene graph 
   only the timed elements which have a resolved current interval, other timed elements will be 
   added at runtime when an event leads to the creation of a new interval.
   We also insert the new timed element in the order of the current_interval begin value, to stop 
   the notification of time when not necessary */
static Bool gf_smil_timing_add_to_sg(GF_SceneGraph *sg, SMIL_Timing_RTI *rti)
{
	if (rti->current_interval->begin != -1) {
		SMIL_Timing_RTI *cur_rti = NULL;
		u32 i;

		for (i = 0; i < gf_list_count(sg->smil_timed_elements); i++) {
			cur_rti = (SMIL_Timing_RTI *)gf_list_get(sg->smil_timed_elements, i);
			if (cur_rti->current_interval->begin > rti->current_interval->begin) break;
		}
		gf_list_insert(sg->smil_timed_elements, rti, i);
		return 1;
	}
	return 0;
}
Exemplo n.º 14
0
/* To reduce the process of notifying the time to each timed element, we add to the scene graph 
   only the timed elements which have a resolved current interval, other timed elements will be 
   added at runtime when an event leads to the creation of a new interval.
   We also insert the new timed element in the order of the current_interval begin value, to stop 
   the notification of time earlier */
static void gf_smil_timing_add_to_sg(GF_SceneGraph *sg, SMIL_Timing_RTI *rti)
{
	if (rti->current_interval) {
		SMIL_Timing_RTI *cur_rti = NULL;
		u32 i, count;

		count = gf_list_count(sg->smil_timed_elements);
		for (i = 0; i < count; i++) {
			cur_rti = (SMIL_Timing_RTI *)gf_list_get(sg->smil_timed_elements, i);
			if (cur_rti->current_interval->begin > rti->current_interval->begin) break;
		}
		gf_list_insert(sg->smil_timed_elements, rti, i);
	}

}
Exemplo n.º 15
0
static void gf_mse_track_buffer_add_packet(GF_HTML_Track *track, GF_MSE_Packet *frame)
{
	u32 i, count;
	Bool inserted = GF_FALSE;

	gf_mx_p(track->buffer_mutex);
	/* TODO: improve insertion*/
	count = gf_list_count(track->buffer);
	for (i = 0; i < count; i++) {
		GF_MSE_Packet *next_frame =  (GF_MSE_Packet *)gf_list_get(track->buffer, i);
		if (frame->sl_header.decodingTimeStamp < next_frame->sl_header.decodingTimeStamp) {
			gf_list_insert(track->buffer, frame, i);
			/* if the frame had no duration, we can now tell its duration because of the next frame */
			if (!frame->sl_header.au_duration) {
				frame->sl_header.au_duration = (u32)(next_frame->sl_header.decodingTimeStamp - frame->sl_header.decodingTimeStamp);
				/* we need also to check the duration of the previous frame */
				if (i > 0) {
					GF_MSE_Packet *prev_frame =  (GF_MSE_Packet *)gf_list_get(track->buffer, i-1);
					/* we update the frame duration if the newly inserted frame modifies it */
					if (!prev_frame->sl_header.au_duration ||
					        prev_frame->sl_header.au_duration > frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp) {
						prev_frame->sl_header.au_duration = (u32)(frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp);
					}
				}
			}
			inserted = GF_TRUE;
			break;
		}
	}
	if (!inserted) {
		gf_list_add(track->buffer, frame);
		/* if the frame is inserted last, we cannot know its duration until a new frame is appended or unless the transport format carried it */
		count = gf_list_count(track->buffer);
		if (count > 1) {
			GF_MSE_Packet *prev_frame =  (GF_MSE_Packet *)gf_list_get(track->buffer, count-2);
			/* we update the frame duration if the newly inserted frame modifies it */
			if (!prev_frame->sl_header.au_duration ||
			        prev_frame->sl_header.au_duration > frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp) {
				prev_frame->sl_header.au_duration = (u32)(frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp);
			}
		}
	}
	GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] Adding frame with PTS %g s and duration %g s (%d frames in buffer)\n", TIMESCALE_TO_SECONDS(frame->sl_header.compositionTimeStamp), TIMESCALE_TO_SECONDS(frame->sl_header.au_duration), gf_list_count(track->buffer)));
	gf_mx_v(track->buffer_mutex);
}
Exemplo n.º 16
0
static void AddSDPLine(GF_List *list, char *sdp_text, Bool is_movie_sdp)
{
	const char *sdp_order;
	u32 i, count = gf_list_count(list);
	char fc = sdp_text[0];

	sdp_order = (is_movie_sdp) ? "vosiuepcbzkatr" : "micbka";
	for (i=0; i<count; i++) {
		char *l = (char *)gf_list_get(list, i);
		char *s1 = (char *)strchr(sdp_order, l[0]);
		char *s2 = (char *)strchr(sdp_order, fc);
		if (s1 && s2 && (strlen(s2)>strlen(s1))) {
			gf_list_insert(list, sdp_text, i);
			return;
		}
	}
	gf_list_add(list, sdp_text);
}
Exemplo n.º 17
0
GF_Err gf_isom_hint_pck_add_dte(GF_HintPacket *ptr, GF_GenericDTE *dte, u8 AtBegin)
{
	if (!ptr) return GF_BAD_PARAM;
	switch (ptr->hint_subtype) {
	case GF_ISOM_BOX_TYPE_RTP_STSD:
	case GF_ISOM_BOX_TYPE_SRTP_STSD:
	case GF_ISOM_BOX_TYPE_RRTP_STSD:
		if (AtBegin)
			return gf_list_insert( ((GF_RTPPacket *)ptr)->DataTable, dte, 0);
		else
			return gf_list_add( ((GF_RTPPacket *)ptr)->DataTable, dte);

	case GF_ISOM_BOX_TYPE_RTCP_STSD:
		return GF_BAD_PARAM;
	default:
		return GF_NOT_SUPPORTED;
	}
}
Exemplo n.º 18
0
Arquivo: bindable.c Projeto: zsuo/gpac
void Bindable_OnSetBind(GF_Node *bindable, GF_List *stack_list, GF_List *for_stack)
{
	u32 i;
	Bool on_top, is_bound, set_bind;
	GF_Node *node;
	GF_List *stack;

	set_bind = Bindable_GetSetBind(bindable);
	is_bound = Bindable_GetIsBound(bindable);

	if (!set_bind && !is_bound) return;
	if (set_bind && is_bound) return;

	i=0;
	while ((stack = (GF_List*)gf_list_enum(stack_list, &i))) {
		if (for_stack && (for_stack!=stack)) continue;

		on_top = (gf_list_get(stack, 0)==bindable) ? GF_TRUE : GF_FALSE;

		if (!set_bind) {
			if (is_bound) Bindable_SetIsBound(bindable, GF_FALSE);
			if (on_top && (gf_list_count(stack)>1)) {
				gf_list_rem(stack, 0);
				gf_list_add(stack, bindable);
				node = (GF_Node*)gf_list_get(stack, 0);
				Bindable_SetIsBound(node, GF_TRUE);
			}
		} else {
			if (!is_bound) Bindable_SetIsBound(bindable, GF_TRUE);
			if (!on_top) {
				/*push old top one down and unbind*/
				node = (GF_Node*)gf_list_get(stack, 0);
				Bindable_SetIsBound(node, GF_FALSE);
				/*insert new top*/
				gf_list_del_item(stack, bindable);
				gf_list_insert(stack, bindable, 0);
			}
		}
	}
	/*force invalidate of the bindable stack's owner*/
	gf_node_dirty_set(bindable, 0, GF_TRUE);
	/*and redraw scene*/
	gf_sc_invalidate(gf_sc_get_compositor(bindable), NULL);
}
Exemplo n.º 19
0
void Playlist::OnSelUp()
{
	s32 i;
	if (!m_FileList.GetSelectedCount()) return;
	POSITION pos = m_FileList.GetFirstSelectedItemPosition();
	int nItem = m_FileList.GetNextSelectedItem(pos);
	if (nItem==0) return;

	pos = m_FileList.GetFirstSelectedItemPosition();
	while (pos != NULL) {
		nItem = m_FileList.GetNextSelectedItem(pos);
		PLEntry *ple = (PLEntry *) m_FileList.GetItemData(nItem);
		i = gf_list_del_item(m_entries, ple);
		assert(i>=1);
		gf_list_insert(m_entries, ple, i-1);
		ple->m_bIsSelected = GF_TRUE;
	}
	RefreshList();
}
Exemplo n.º 20
0
void Playlist::OnSelDown()
{
	s32 i, nItem;
	if (!m_FileList.GetSelectedCount()) return;
	POSITION pos = m_FileList.GetFirstSelectedItemPosition();
	while (pos != NULL) nItem = m_FileList.GetNextSelectedItem(pos);

	if ((u32) nItem + 1 == gf_list_count(m_entries)) return;

	pos = m_FileList.GetFirstSelectedItemPosition();
	while (pos != NULL) {
		nItem = m_FileList.GetNextSelectedItem(pos);
		PLEntry *ple = (PLEntry *) m_FileList.GetItemData(nItem);
		i = gf_list_del_item(m_entries, ple);
		gf_list_insert(m_entries, ple, i+1);
		ple->m_bIsSelected = GF_TRUE;
	}
	RefreshList();
}
Exemplo n.º 21
0
void isor_emulate_chapters(GF_ISOFile *file, GF_InitialObjectDescriptor *iod)
{
	GF_Segment *prev_seg;
	u64 prev_start;
	u64 start;
	u32 i, count;
	if (!iod || gf_list_count(iod->OCIDescriptors)) return;
	count = gf_isom_get_chapter_count(file, 0);
	if (!count) return;

	prev_seg = NULL;
	start = prev_start = 0;
	for (i=0; i<count; i++) {
		const char *name;
		GF_Segment *seg;
		gf_isom_get_chapter(file, 0, i+1, &start, &name);
		seg = (GF_Segment *) gf_odf_desc_new(GF_ODF_SEGMENT_TAG);
		seg->startTime = (Double) (s64) start;
		seg->startTime /= 1000;
		seg->SegmentName = gf_strdup(name);
		gf_list_add(iod->OCIDescriptors, seg);
		if (prev_seg) {
			prev_seg->Duration = (Double) (s64) (start - prev_start);
			prev_seg->Duration /= 1000;
		} else if (start) {
			prev_seg = (GF_Segment *) gf_odf_desc_new(GF_ODF_SEGMENT_TAG);
			prev_seg->startTime = 0;
			prev_seg->Duration = (Double) (s64) (start);
			prev_seg->Duration /= 1000;
			gf_list_insert(iod->OCIDescriptors, prev_seg, 0);
		}
		prev_seg = seg;
		prev_start = start;
	}
	if (prev_seg) {
		start = 1000*gf_isom_get_duration(file);
		start /= gf_isom_get_timescale(file);
		if (start>prev_start) {
			prev_seg->Duration = (Double) (s64) (start - prev_start);
			prev_seg->Duration /= 1000;
		}
	}
}
Exemplo n.º 22
0
void wxPlaylist::OnSelUp(wxCommandEvent &WXUNUSED(event))
{
	s32 i;
	if (!m_FileList->GetSelectedItemCount()) return;
	long item = -1;
	item = m_FileList->GetNextItem(item, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED);
	if (item <= 0) return;

	item = -1;
	for (;;) {
		item = m_FileList->GetNextItem(item, wxLIST_NEXT_ALL, wxLIST_STATE_SELECTED);
		if (item == -1) break;
		PLEntry *ple = (PLEntry *) m_FileList->GetItemData(item);
		i = gf_list_del_item(m_entries, ple);
		assert(i>=1);
		gf_list_insert(m_entries, ple, i-1);
		ple->m_bIsSelected = 1;
	}
	RefreshList();
}
Exemplo n.º 23
0
GF_EXPORT
GF_AUContext *gf_sm_stream_au_new(GF_StreamContext *stream, u64 timing, Double time_sec, Bool isRap)
{
	u32 i;
	GF_AUContext *tmp;
	u64 tmp_timing;

	tmp_timing = timing ? timing : (u64) (time_sec*1000);
	if (stream->imp_exp_time >= tmp_timing) {
		/*look for existing AU*/
		i=0;
		while ((tmp = (GF_AUContext *)gf_list_enum(stream->AUs, &i))) {
			if (timing && (tmp->timing==timing)) return tmp;
			else if (time_sec && (tmp->timing_sec == time_sec)) return tmp;
			else if (!time_sec && !timing && !tmp->timing && !tmp->timing_sec) return tmp;
			/*insert AU*/
			else if ((time_sec && time_sec<tmp->timing_sec) || (timing && timing<tmp->timing)) {
				GF_SAFEALLOC(tmp, GF_AUContext);
				if (!tmp) return NULL;
				tmp->commands = gf_list_new();
				if (isRap) tmp->flags = GF_SM_AU_RAP;
				tmp->timing = timing;
				tmp->timing_sec = time_sec;
				tmp->owner = stream;
				gf_list_insert(stream->AUs, tmp, i-1);
				return tmp;
			}
		}
	}
	GF_SAFEALLOC(tmp, GF_AUContext);
	if (!tmp) return NULL;
	tmp->commands = gf_list_new();
	if (isRap) tmp->flags = GF_SM_AU_RAP;
	tmp->timing = timing;
	tmp->timing_sec = time_sec;
	tmp->owner = stream;
	if (stream->disable_aggregation) tmp->flags |= GF_SM_AU_NOT_AGGREGATED;
	gf_list_add(stream->AUs, tmp);
	stream->imp_exp_time = tmp_timing;
	return tmp;
}
Exemplo n.º 24
0
/* Inserts a new resolved time instant in the begin or end attribute. 
   The insertion preserves the sorting and removes the previous insertions
    which have become obsolete
   WARNING: Only used for inserting time when an <a> element, whose target is a timed element, is activated. */
GF_EXPORT
void gf_smil_timing_insert_clock(GF_Node *elt, Bool is_end, Double clock)
{
	u32 i, count, found;
	SVGTimedAnimBaseElement *timed = (SVGTimedAnimBaseElement*)elt;
	SMIL_Time *begin;
	GF_List *l;
	GF_SAFEALLOC(begin, SMIL_Time);

	begin->type = GF_SMIL_TIME_EVENT_RESOLVED;
	begin->clock = clock;

	l = is_end ? *timed->timingp->end : *timed->timingp->begin;

	found = 0;
	count = gf_list_count(l);
	for (i=0; i<count; i++) {
		SMIL_Time *first = (SMIL_Time *)gf_list_get(l, i);
		/*remove past instanciations*/
		if ((first->type==GF_SMIL_TIME_EVENT_RESOLVED) && (first->clock < begin->clock)) {
			gf_list_rem(l, i);
			gf_free(first);
			i--;
			count--;
			continue;
		}
		if ( (first->type == GF_SMIL_TIME_INDEFINITE) 
			|| ( (first->type == GF_SMIL_TIME_CLOCK) && (first->clock > begin->clock) ) 
		) {
			gf_list_insert(l, begin, i);
			found = 1;
			break;
		}
	}
	if (!found) gf_list_add(l, begin);

	/* call gf_smil_timing_modified */
	gf_node_changed(elt, NULL);
}
Exemplo n.º 25
0
Arquivo: hinting.c Projeto: erelh/gpac
static GF_ISOSample *gf_isom_get_data_sample(GF_HintSample *hsamp, GF_TrackBox *trak, u32 sample_num)
{
	GF_ISOSample *samp;
	GF_HintDataCache *hdc;
	u32 i, count;
	count = gf_list_count(hsamp->sample_cache);
	for (i=0; i<count; i++) {
		hdc = (GF_HintDataCache *)gf_list_get(hsamp->sample_cache, i);
		if ((hdc->sample_num==sample_num) && (hdc->trak==trak)) return hdc->samp;
	}

	samp = gf_isom_sample_new();
	Media_GetSample(trak->Media, sample_num, &samp, &i, 0, NULL);
	if (!samp) return NULL;
	GF_SAFEALLOC(hdc, GF_HintDataCache);
	hdc->samp = samp;
	hdc->sample_num = sample_num;
	hdc->trak = trak;
	/*we insert all new samples, since they're more likely to be fetched next (except for audio
	interleaving and other multiplex)*/
	gf_list_insert(hsamp->sample_cache, hdc, 0);
	return samp;
}
Exemplo n.º 26
0
GF_EXPORT
GF_Err gf_sm_aggregate(GF_SceneManager *ctx, u16 ESID)
{
    GF_Err e;
    u32 i, stream_count;
#ifndef GPAC_DISABLE_VRML
    u32 j;
    GF_AUContext *au;
    GF_Command *com;
#endif

    e = GF_OK;

#if DEBUG_RAP
    com_count = 0;
    stream_count = gf_list_count(ctx->streams);
    for (i=0; i<stream_count; i++) {
        GF_StreamContext *sc = (GF_StreamContext *)gf_list_get(ctx->streams, i);
        if (sc->streamType == GF_STREAM_SCENE) {
            au_count = gf_list_count(sc->AUs);
            for (j=0; j<au_count; j++) {
                au = (GF_AUContext *)gf_list_get(sc->AUs, j);
                com_count += gf_list_count(au->commands);
            }
        }
    }
    GF_LOG(GF_LOG_INFO, GF_LOG_SCENE, ("[SceneManager] Making RAP with %d commands\n", com_count));
#endif

    stream_count = gf_list_count(ctx->streams);
    for (i=0; i<stream_count; i++) {
        GF_AUContext *carousel_au;
        GF_List *carousel_commands;
        GF_StreamContext *aggregate_on_stream;
        GF_StreamContext *sc = (GF_StreamContext *)gf_list_get(ctx->streams, i);
        if (ESID && (sc->ESID!=ESID)) continue;

        /*locate the AU in which our commands will be aggregated*/
        carousel_au = NULL;
        carousel_commands = NULL;
        aggregate_on_stream = sc->aggregate_on_esid ? gf_sm_get_stream(ctx, sc->aggregate_on_esid) : NULL;
        if (aggregate_on_stream==sc) {
            carousel_commands = gf_list_new();
        } else if (aggregate_on_stream) {
            if (!gf_list_count(aggregate_on_stream->AUs)) {
                carousel_au = gf_sm_stream_au_new(aggregate_on_stream, 0, 0, 1);
            } else {
                /* assert we already performed aggregation */
                assert(gf_list_count(aggregate_on_stream->AUs)==1);
                carousel_au = gf_list_get(aggregate_on_stream->AUs, 0);
            }
            carousel_commands = carousel_au->commands;
        }
        /*TODO - do this as well for ODs*/
#ifndef GPAC_DISABLE_VRML
        if (sc->streamType == GF_STREAM_SCENE) {
            Bool has_modif = 0;
            /*we check for each stream if it is a base stream (SceneReplace ...) - several streams may carry RAPs if inline nodes are used*/
            Bool base_stream_found = 0;

            /*in DIMS we use an empty initial AU with no commands to signal the RAP*/
            if (sc->objectType == GPAC_OTI_SCENE_DIMS) base_stream_found = 1;

            /*apply all commands - this will also apply the SceneReplace*/
            while (gf_list_count(sc->AUs)) {
                u32 count;
                au = (GF_AUContext *) gf_list_get(sc->AUs, 0);
                gf_list_rem(sc->AUs, 0);

                /*AU not aggregated*/
                if (au->flags & GF_SM_AU_NOT_AGGREGATED) {
                    gf_sm_au_del(sc, au);
                    continue;
                }

                count = gf_list_count(au->commands);

                for (j=0; j<count; j++) {
                    u32 store=0;
                    com = gf_list_get(au->commands, j);
                    if (!base_stream_found) {
                        switch (com->tag) {
                        case GF_SG_SCENE_REPLACE:
                        case GF_SG_LSR_NEW_SCENE:
                        case GF_SG_LSR_REFRESH_SCENE:
                            base_stream_found = 1;
                            break;
                        }
                    }

                    /*aggregate the command*/

                    /*if stream doesn't carry a carousel or carries the base carousel (scene replace), always apply the command*/
                    if (base_stream_found || !sc->aggregate_on_esid) {
                        store = 0;
                    }
                    /*otherwise, check wether the command should be kept in this stream as is, or can be aggregated on this stream*/
                    else {
                        switch (com->tag) {
                        /*the following commands do not impact a sub-tree (eg do not deal with nodes), we cannot
                        aggregate them... */
                        case GF_SG_ROUTE_REPLACE:
                        case GF_SG_ROUTE_DELETE:
                        case GF_SG_ROUTE_INSERT:
                        case GF_SG_PROTO_INSERT:
                        case GF_SG_PROTO_DELETE:
                        case GF_SG_PROTO_DELETE_ALL:
                        case GF_SG_GLOBAL_QUANTIZER:
                        case GF_SG_LSR_RESTORE:
                        case GF_SG_LSR_SAVE:
                        case GF_SG_LSR_SEND_EVENT:
                        case GF_SG_LSR_CLEAN:
                            /*todo check in which category to put these commands*/
//						case GF_SG_LSR_ACTIVATE:
//						case GF_SG_LSR_DEACTIVATE:
                            store = 1;
                            break;
                        /*other commands:
                        	!!! we need to know if the target node of the command has been inserted in this stream !!!

                        This is a tedious task, for now we will consider the following cases:
                        	- locate a similar command in the stored list: remove the similar one and aggregate on stream
                        	- by default all AUs are stored if the stream is in aggregate mode - we should fix that by checking insertion points:
                        	 if a command apllies on a node that has been inserted in this stream, we can aggregate, otherwise store
                        */
                        default:
                            /*check if we can directly store the command*/
                            assert(carousel_commands);
                            store = store_or_aggregate(sc, com, carousel_commands, &has_modif);
                            break;
                        }
                    }

                    switch (store) {
                    /*command has been merged with a previous command in carousel and needs to be destroyed*/
                    case 2:
                        gf_list_rem(au->commands, j);
                        j--;
                        count--;
                        gf_sg_command_del((GF_Command *)com);
                        break;
                    /*command shall be moved to carousel without being applied*/
                    case 1:
                        gf_list_insert(carousel_commands, com, 0);
                        gf_list_rem(au->commands, j);
                        j--;
                        count--;
                        break;
                    /*command can be applied*/
                    default:
                        e = gf_sg_command_apply(ctx->scene_graph, com, 0);
                        break;
                    }
                }
                gf_sm_au_del(sc, au);
            }

            /*and recreate scene replace*/
            if (base_stream_found) {
                au = gf_sm_stream_au_new(sc, 0, 0, 1);

                switch (sc->objectType) {
                case GPAC_OTI_SCENE_BIFS:
                case GPAC_OTI_SCENE_BIFS_V2:
                    com = gf_sg_command_new(ctx->scene_graph, GF_SG_SCENE_REPLACE);
                    break;
                case GPAC_OTI_SCENE_LASER:
                    com = gf_sg_command_new(ctx->scene_graph, GF_SG_LSR_NEW_SCENE);
                    break;
                case GPAC_OTI_SCENE_DIMS:
                /* We do not create a new command, empty AU is enough in DIMS*/
                default:
                    com = NULL;
                    break;
                }

                if (com) {
                    com->node = ctx->scene_graph->RootNode;
                    ctx->scene_graph->RootNode = NULL;
                    gf_list_del(com->new_proto_list);
                    com->new_proto_list = ctx->scene_graph->protos;
                    ctx->scene_graph->protos = NULL;
                    /*indicate the command is the aggregated scene graph, so that PROTOs and ROUTEs
                    are taken from the scenegraph when encoding*/
                    com->aggregated = 1;
                    gf_list_add(au->commands, com);
                }
            }
            /*update carousel flags of the AU*/
            else if (carousel_commands) {
                /*if current stream caries its own carousel*/
                if (!carousel_au) {
                    carousel_au = gf_sm_stream_au_new(sc, 0, 0, 1);
                    gf_list_del(carousel_au->commands);
                    carousel_au->commands = carousel_commands;
                }
                carousel_au->flags |= GF_SM_AU_RAP | GF_SM_AU_CAROUSEL;
                if (has_modif) carousel_au->flags |= GF_SM_AU_MODIFIED;
            }
        }
#endif
    }
    return e;
}
Exemplo n.º 27
0
void isor_declare_objects(ISOMReader *read)
{
	GF_ObjectDescriptor *od;
	GF_ESD *esd;
	const char *tag;
	u32 i, count, ocr_es_id, tlen, base_track, j, track_id;
	Bool highest_stream;
	char *opt;
	Bool add_ps_lower = GF_TRUE;

	ocr_es_id = 0;
	opt = (char*) gf_modules_get_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS");
	if (!opt) {
		gf_modules_set_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS", "yes");
	} else if (!strcmp(opt, "no")) {
		add_ps_lower = GF_FALSE;
	}

	/*TODO check for alternate tracks*/
	count = gf_isom_get_track_count(read->mov);
	for (i=0; i<count; i++) {
		if (!gf_isom_is_track_enabled(read->mov, i+1)) continue;

		switch (gf_isom_get_media_type(read->mov, i+1)) {
		case GF_ISOM_MEDIA_AUDIO:
		case GF_ISOM_MEDIA_VISUAL:
		case GF_ISOM_MEDIA_TEXT:
		case GF_ISOM_MEDIA_SUBT:
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_SUBPIC:
			break;
		default:
			continue;
		}

		/*we declare only the highest video track (i.e the track we play)*/
		highest_stream = GF_TRUE;
		track_id = gf_isom_get_track_id(read->mov, i+1);
		for (j = 0; j < count; j++) {
			if (gf_isom_has_track_reference(read->mov, j+1, GF_ISOM_REF_SCAL, track_id) > 0) {
				highest_stream = GF_FALSE;
				break;
			}
		}
		if ((gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) && !highest_stream)
			continue;
		esd = gf_media_map_esd(read->mov, i+1);
		if (esd) {
			gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_BASE, 1, &base_track);
			esd->has_ref_base = base_track ? GF_TRUE : GF_FALSE;
			/*FIXME: if we declare only SPS/PPS of the highest layer, we have a problem in decoding even though we have all SPS/PPS inband (OpenSVC bug ?)*/
			/*so we add by default the SPS/PPS of the lower layers to this esd*/
			if (esd->has_ref_base && add_ps_lower) {
				u32 count, refIndex, ref_track, num_sps, num_pps, t;
				GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
				GF_AVCConfig *avccfg, *svccfg;

				count = gf_isom_get_reference_count(read->mov, i+1, GF_ISOM_REF_SCAL);
				for (refIndex = count; refIndex != 0; refIndex--) {
					gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_SCAL, refIndex, &ref_track);
					avccfg = gf_isom_avc_config_get(read->mov, ref_track, 1);
					svccfg = gf_isom_svc_config_get(read->mov, ref_track, 1);
					if (avccfg) {
						num_sps = gf_list_count(avccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(avccfg->pictureParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(avccfg);
					}
					if (svccfg) {
						num_sps = gf_list_count(svccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(svccfg->pictureParameterSets);
						for (t = 0; t < num_pps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(svccfg);
					}
				}

				if (esd->decoderConfig->decoderSpecificInfo->data) gf_free(esd->decoderConfig->decoderSpecificInfo->data);
				gf_odf_avc_cfg_write(cfg, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
				gf_odf_avc_cfg_del(cfg);
			}

			od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
			od->service_ifce = read->input;
			od->objectDescriptorID = 0;
			if (!ocr_es_id) ocr_es_id = esd->ESID;
			esd->OCRESID = ocr_es_id;
			gf_list_add(od->ESDescriptors, esd);
			if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
				send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
			} else {
				gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
			}
		}
	}
	/*if cover art, extract it in cache*/
	if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COVER_ART, &tag, &tlen)==GF_OK) {
		const char *cdir = gf_modules_get_option((GF_BaseInterface *)gf_term_get_service_interface(read->service), "General", "CacheDirectory");
		if (cdir) {
			char szName[GF_MAX_PATH];
			const char *sep;
			FILE *t;
			sep = strrchr(gf_isom_get_filename(read->mov), '\\');
			if (!sep) sep = strrchr(gf_isom_get_filename(read->mov), '/');
			if (!sep) sep = gf_isom_get_filename(read->mov);

			if ((cdir[strlen(cdir)-1] != '\\') && (cdir[strlen(cdir)-1] != '/')) {
				sprintf(szName, "%s/%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			} else {
				sprintf(szName, "%s%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			}

			t = gf_f64_open(szName, "wb");

			if (t) {
				Bool isom_contains_video = GF_FALSE;

				/*write cover data*/
				assert(!(tlen & 0x80000000));
				gf_fwrite(tag, tlen & 0x7FFFFFFF, 1, t);
				fclose(t);

				/*don't display cover art when video is present*/
				for (i=0; i<gf_isom_get_track_count(read->mov); i++) {
					if (!gf_isom_is_track_enabled(read->mov, i+1))
						continue;
					if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) {
						isom_contains_video = GF_TRUE;
						break;
					}
				}

				if (!isom_contains_video) {
					od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
					od->service_ifce = read->input;
					od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID;
					od->URLString = gf_strdup(szName);
					if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
						send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
					} else {
						gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
					}
				}
			}
		}
	}
	if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
		send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, NULL, NULL);
	} else {
		gf_term_add_media(read->service, NULL, GF_FALSE);
	}
}
Exemplo n.º 28
0
void gf_term_add_codec(GF_Terminal *term, GF_Codec *codec)
{
	u32 i, count;
	Bool locked;
	Bool threaded;
	CodecEntry *cd;
	CodecEntry *ptr, *next;
	GF_CodecCapability cap;
	assert(codec);

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Registering codec %s\n", codec->decio ? codec->decio->module_name : "RAW"));

	/*caution: the mutex can be grabbed by a decoder waiting for a mutex owned by the calling thread
	this happens when several scene codecs are running concurently and triggering play/pause on media*/
	locked = gf_mx_try_lock(term->mm_mx);

	cd = mm_get_codec(term->codecs, codec);
	if (cd) goto exit;

	GF_SAFEALLOC(cd, CodecEntry);
	cd->dec = codec;
	if (!cd->dec->Priority)
		cd->dec->Priority = 1;

	/*we force audio codecs to be threaded in free mode, so that we avoid waiting in the audio renderer if another decoder is locking the main mutex
	this can happen when the audio decoder is running late*/
	if (codec->type==GF_STREAM_AUDIO) {
		threaded = 1;
	} else {
		cap.CapCode = GF_CODEC_WANTS_THREAD;
		cap.cap.valueInt = 0;
		gf_codec_get_capability(codec, &cap);
		threaded = cap.cap.valueInt;
	}

	if (threaded) cd->flags |= GF_MM_CE_REQ_THREAD;


	if (term->flags & GF_TERM_MULTI_THREAD) {
		if ((codec->type==GF_STREAM_AUDIO) || (codec->type==GF_STREAM_VISUAL)) threaded = 1;
	} else if (term->flags & GF_TERM_SINGLE_THREAD) {
		threaded = 0;
	}
	if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA)
		threaded = 0;

	if (threaded) {
		cd->thread = gf_th_new(cd->dec->decio->module_name);
		cd->mx = gf_mx_new(cd->dec->decio->module_name);
		cd->flags |= GF_MM_CE_THREADED;
		gf_list_add(term->codecs, cd);
		goto exit;
	}

	//add codec 1- per priority 2- per type, audio being first
	//priorities inherits from Systems (5bits) so range from 0 to 31
	//we sort from MAX to MIN
	count = gf_list_count(term->codecs);
	for (i=0; i<count; i++) {
		ptr = (CodecEntry*)gf_list_get(term->codecs, i);
		if (ptr->flags & GF_MM_CE_THREADED) continue;

		//higher priority, continue
		if (ptr->dec->Priority > codec->Priority) continue;

		//same priority, put audio first
		if (ptr->dec->Priority == codec->Priority) {
			//we insert audio (0x05) before video (0x04)
			if (ptr->dec->type < codec->type) {
				gf_list_insert(term->codecs, cd, i);
				goto exit;
			}
			//same prior, same type: insert after
			if (ptr->dec->type == codec->type) {
				if (i+1==count) {
					gf_list_add(term->codecs, cd);
				} else {
					gf_list_insert(term->codecs, cd, i+1);
				}
				goto exit;
			}
			//we insert video (0x04) after audio (0x05) if next is not audio
			//last one
			if (i+1 == count) {
				gf_list_add(term->codecs, cd);
				goto exit;
			}
			next = (CodecEntry*)gf_list_get(term->codecs, i+1);
			//# priority level, insert
			if ((next->flags & GF_MM_CE_THREADED) || (next->dec->Priority != codec->Priority)) {
				gf_list_insert(term->codecs, cd, i+1);
				goto exit;
			}
			//same priority level and at least one after : continue
			continue;
		}
		gf_list_insert(term->codecs, cd, i);
		goto exit;
	}
	//if we got here, first in list
	gf_list_add(term->codecs, cd);

exit:
	if (locked) gf_mx_v(term->mm_mx);
	return;
}
Exemplo n.º 29
0
static GF_Err gf_webvtt_add_cue_to_samples(GF_WebVTTParser *parser, GF_List *samples, GF_WebVTTCue *cue)
{
	s32 i;
	u64 cue_start;
	u64 cue_end;
	u64 sample_end;

	sample_end = 0;
	cue_start = gf_webvtt_timestamp_get(&cue->start);
	cue_end   = gf_webvtt_timestamp_get(&cue->end);
	/* samples in the samples list are contiguous: sample(n)->start == sample(n-1)->end */
	for (i = 0; i < (s32)gf_list_count(samples); i++) {
		GF_WebVTTSample *sample;
		sample = (GF_WebVTTSample *)gf_list_get(samples, i);
		/* save the sample end in case there are no more samples to test */
		sample_end = sample->end;
		if (cue_start < sample->start)
		{
			/* cues must be ordered according to their start time, so drop the cue */
			/* TODO delete the cue */
			return GF_BAD_PARAM;
		}
		else if (cue_start == sample->start && cue_end == sample->end)
		{
			/* if the timing of the new cue matches the sample, no need to split, add the cue to the sample */
			gf_list_add(sample->cues, cue);
			/* the cue does not need to processed further */
			return GF_OK;
		}
		else if (cue_start >= sample->end)
		{
			/* flush the current sample */
			gf_list_del_item(samples, sample);
			parser->on_sample_parsed(parser->user, sample);
			sample = NULL;
			i--;
			/* process the cue with next sample (if any) or create a new sample */
			continue;
		}
		else if (cue_start >= sample->start)
		{
			u32 j;
			if (cue_start > sample->start) {
				/* create a new sample, insert it after the current one */
				GF_WebVTTSample *new_sample = gf_webvtt_sample_new();
				new_sample->start = cue_start;
				new_sample->end = sample->end;
				gf_list_insert(samples, new_sample, i+1);
				/* split the cues from the old sample into the new one */
				for (j = 0; j < gf_list_count(sample->cues); j++) {
					GF_WebVTTCue *old_cue = (GF_WebVTTCue *)gf_list_get(sample->cues, j);
					GF_WebVTTCue *new_cue = gf_webvtt_cue_split_at(old_cue, &cue->start);
					gf_list_add(new_sample->cues, new_cue);
				}
				/* adjust the end of the old sample and flush it */
				sample->end = cue_start;
				gf_list_del_item(samples, sample);
				parser->on_sample_parsed(parser->user, sample);
				sample = NULL;
				i--;
				/* process the cue again with this new sample */
				continue;
			}
			if (cue_end > sample->end) {
				/* the cue is longer than the sample, we split the cue, add one part to the current sample
				and reevaluate with the last part of the cue */
				GF_WebVTTCue *old_cue = (GF_WebVTTCue *)gf_list_get(sample->cues, 0);
				GF_WebVTTCue *new_cue = gf_webvtt_cue_split_at(cue, &old_cue->end);
				gf_list_add(sample->cues, cue);
				cue = new_cue;
				cue_start = sample->end;
				/* cue_end unchanged */
				/* process the remaining part of the cue (i.e. the new cue) with the other samples */
				continue;
			} else { /* cue_end < sample->end */
				GF_WebVTTSample *new_sample = gf_webvtt_sample_new();
				new_sample->start = cue_end;
				new_sample->end   = sample->end;
				gf_list_insert(samples, new_sample, i+1);
				for (j = 0; j < gf_list_count(sample->cues); j++) {
					GF_WebVTTCue *old_cue = (GF_WebVTTCue *)gf_list_get(sample->cues, j);
					GF_WebVTTCue *new_cue = gf_webvtt_cue_split_at(old_cue, &cue->end);
					gf_list_add(new_sample->cues, new_cue);
				}
				gf_list_add(sample->cues, cue);
				sample->end = new_sample->start;
				/* done with this cue */
				return GF_OK;
			}
		}
	}
	/* (a part of) the cue remains (was not overlapping) */
	if (cue_start > sample_end) {
		/* if the new cue start is greater than the last sample end,
		    create an empty sample to fill the gap, flush it */
		GF_WebVTTSample *esample = gf_webvtt_sample_new();
		esample->start = sample_end;
		esample->end   = cue_start;
		parser->on_sample_parsed(parser->user, esample);
	}
	/* if the cue has not been added to a sample, create a new sample for it */
	{
		GF_WebVTTSample *sample;
		sample = gf_webvtt_sample_new();
		gf_list_add(samples, sample);
		sample->start = cue_start;
		sample->end = cue_end;
		gf_list_add(sample->cues, cue);
	}
	return GF_OK;
}
Exemplo n.º 30
0
static GF_Err swf_svg_show_frame(SWFReader *read)
{
	u32     i;
	u32     len;
	GF_List *sdl = gf_list_new(); // sorted display list

	/* sorting the display list because SVG/CSS z-index is not well supported */
	while (gf_list_count(read->display_list))
	{
		Bool        inserted = GF_FALSE;
		DispShape   *s;

		s = (DispShape *)gf_list_get(read->display_list, 0);
		gf_list_rem(read->display_list, 0);

		for (i = 0; i < gf_list_count(sdl); i++)
		{
			DispShape *s2 = (DispShape *)gf_list_get(sdl, i);
			if (s->depth < s2->depth)
			{
				gf_list_insert(sdl, s, i);
				inserted = GF_TRUE;
				break;
			}
		}
		if (!inserted)
		{
			gf_list_add(sdl, s);
		}
	}
	gf_list_del(read->display_list);
	read->display_list = sdl;

	/* dumping the display list */
	len = gf_list_count(read->display_list);
	for (i=0; i<len; i++)
	{
		DispShape   *s;
		s = (DispShape *)gf_list_get(read->display_list, i);
		swf_svg_print(read, "<use xlink:href=\"#S%d\" ", s->char_id);
		//swf_svg_print(read, "z-index=\"%d\" ", s->depth);
		swf_svg_print_matrix(read, &s->mat);
		swf_svg_print(read, "/>\n");
		read->empty_frame = GF_FALSE;
	}
	if (!read->empty_frame) {
		read->print_frame_header = GF_TRUE;
		read->frame_header_offset = 0;
		swf_svg_print(read, "<g display=\"none\">\n");
		swf_svg_print(read, "<animate id=\"frame%d_anim\" attributeName=\"display\" to=\"inline\" ", read->current_frame);
		swf_svg_print(read, "begin=\"%g\" ", 1.0*(read->current_frame)/read->frame_rate);
		if (read->current_frame+1 < read->frame_count) {
			swf_svg_print(read, "end=\"frame%d_anim.begin\" fill=\"remove\" ", (read->current_frame+1));
		} else {
			swf_svg_print(read, "fill=\"freeze\" ");
		}
		swf_svg_print(read, "/>\n");
		read->print_frame_header = GF_FALSE;

		swf_svg_print(read, "</g>\n");
	}
	read->add_sample(read->user, read->svg_data, read->svg_data_size, read->current_frame*1000/read->frame_rate, (read->current_frame == 0));
	gf_free(read->svg_data);
	read->svg_data = NULL;
	read->svg_data_size = 0;

	read->empty_frame = GF_TRUE;
	return GF_OK;
}