Пример #1
0
/* This function notifies the scene time to all the timed elements from the list in the given scene graph.
   It returns the number of active timed elements. If no timed element is active, this means that from the timing
   point of view, the scene has not changed and no rendering refresh is needed, even if the time has changed.
   It uses an additional list of modified timed elements to insure that no timing 
   element was modified by the begin/end/repeat of another timed element.
*/
Bool gf_smil_notify_timed_elements(GF_SceneGraph *sg)
{
	SMIL_Timing_RTI *rti;
	u32 active_count, i;
	s32 ret;
	Bool do_loop;
	if (!sg) return 0;

	active_count = 0;

	/*
		Note: whenever a timed element is active, we trigger a gf_node_dirty_parent_graph so that the parent graph 
		is aware that some modifications may happen in the subtree. This is needed for cases where the subtree
		is in an offscreen surface, to force retraversing of the subtree and thus apply the animation. 

	*/
	
	/* notify the new scene time to the register timed elements 
	   this might modify other timed elements or the element itself 
	   in which case it will be added to the list of modified elements */
	i = 0;
	do_loop = 1;
	while(do_loop && (rti = (SMIL_Timing_RTI *)gf_list_enum(sg->smil_timed_elements, &i))) {
		ret = gf_smil_timing_notify_time(rti, gf_node_get_scene_time((GF_Node*)rti->timed_elt) );
		switch (ret) {
		case -1:
			/* special case for discard element
			   when a discard element is executed, it automatically removes itself from the list of timed element 
			   in the scene graph, we need to fix the index i. */
			i--;
			break;
		case -2:
			/* special return value, -2 means that the tested timed element is waiting to begin
			   Assuming that the timed elements are sorted by begin order, 
			   the next ones don't need to be checked */
			do_loop = 0;
			break;
		case -3:
			/* special case for animation elements which do not need to be notified anymore, 
			   but which require a tree traversal */
			i--;
			active_count ++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 1:
			active_count++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 0:
		default:
			break;
		}
	}

	/* notify the timed elements which have been modified either since the previous frame (updates, scripts) or 
	   because of the start/end/repeat of the previous notifications */
	while (gf_list_count(sg->modified_smil_timed_elements)) {
		/* first remove the modified smil timed element */
		rti = gf_list_get(sg->modified_smil_timed_elements, 0);
		gf_list_rem(sg->modified_smil_timed_elements, 0);

		/* then remove it in the list of non modified (if it was there) */
		gf_list_del_item(sg->smil_timed_elements, rti);

		/* then insert it at its right position (in the sorted list of timed elements) */
		gf_smil_timing_add_to_sg(sg, rti);

		/* finally again notify this timed element */
		rti->force_reevaluation = 1;
		ret = gf_smil_timing_notify_time(rti, gf_node_get_scene_time((GF_Node*)rti->timed_elt) );
		switch (ret) {
		case -1:
			break;
		case -2:
			break;
		case -3:
			active_count++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 1:
			active_count++;
			gf_node_dirty_parent_graph(rti->timed_elt);
			break;
		case 0:
		default:
			break;
		}

	}
	return (active_count>0);
}
Пример #2
0
GF_Err gf_isom_parse_movie_boxes(GF_ISOFile *mov, u64 *bytesMissing, Bool progressive_mode)
{
    GF_Box *a;
    u64 totSize;
    GF_Err e = GF_OK;

    totSize = 0;


#ifndef	GPAC_DISABLE_ISOM_FRAGMENTS
    if (mov->single_moof_mode && mov->single_moof_state == 2) {
        return e;
    }

    /*restart from where we stopped last*/
    totSize = mov->current_top_box_start;
    gf_bs_seek(mov->movieFileMap->bs, mov->current_top_box_start);

#endif


    /*while we have some data, parse our boxes*/
    while (gf_bs_available(mov->movieFileMap->bs)) {
        *bytesMissing = 0;
#ifndef	GPAC_DISABLE_ISOM_FRAGMENTS
        mov->current_top_box_start = gf_bs_get_position(mov->movieFileMap->bs);
        GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[iso file] Current top box start before parsing %d\n", mov->current_top_box_start));
#endif

        e = gf_isom_parse_root_box(&a, mov->movieFileMap->bs, bytesMissing, progressive_mode);

        if (e >= 0) {
            e = GF_OK;
        } else if (e == GF_ISOM_INCOMPLETE_FILE) {
            /*our mdat is uncomplete, only valid for READ ONLY files...*/
            if (mov->openMode != GF_ISOM_OPEN_READ) {
                GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Incomplete MDAT while file is not read-only\n"));
                return GF_ISOM_INVALID_FILE;
            }
            return e;
        } else {
            return e;
        }

        switch (a->type) {
        /*MOOV box*/
        case GF_ISOM_BOX_TYPE_MOOV:
            if (mov->moov) {
                GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Duplicate MOOV detected!\n"));
                return GF_ISOM_INVALID_FILE;
            }
            mov->moov = (GF_MovieBox *)a;
            /*set our pointer to the movie*/
            mov->moov->mov = mov;
#ifndef GPAC_DISABLE_ISOM_FRAGMENTS
            if (mov->moov->mvex) mov->moov->mvex->mov = mov;
#endif
            e = gf_list_add(mov->TopBoxes, a);
            if (e) {
                return e;
            }
            totSize += a->size;
            break;

        /*META box*/
        case GF_ISOM_BOX_TYPE_META:
            if (mov->meta) {
                GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Duplicate META detected!\n"));
                return GF_ISOM_INVALID_FILE;
            }
            mov->meta = (GF_MetaBox *)a;
            e = gf_list_add(mov->TopBoxes, a);
            if (e) {
                return e;
            }
            totSize += a->size;
            break;

        /*we only keep the MDAT in READ for dump purposes*/
        case GF_ISOM_BOX_TYPE_MDAT:
            totSize += a->size;
            if (mov->openMode == GF_ISOM_OPEN_READ) {
                if (!mov->mdat) {
                    mov->mdat = (GF_MediaDataBox *) a;
                    e = gf_list_add(mov->TopBoxes, mov->mdat);
                    if (e) {
                        return e;
                    }
                }
#ifndef	GPAC_DISABLE_ISOM_FRAGMENTS
                else if (mov->FragmentsFlags & GF_ISOM_FRAG_READ_DEBUG) gf_list_add(mov->TopBoxes, a);
#endif
                else gf_isom_box_del(a);
            }
            /*if we don't have any MDAT yet, create one (edit-write mode)
            We only work with one mdat, but we're puting it at the place
            of the first mdat found when opening a file for editing*/
            else if (!mov->mdat && (mov->openMode != GF_ISOM_OPEN_READ) && (mov->openMode != GF_ISOM_OPEN_CAT_FRAGMENTS)) {
                gf_isom_box_del(a);
                mov->mdat = (GF_MediaDataBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_MDAT);
                e = gf_list_add(mov->TopBoxes, mov->mdat);
                if (e) {
                    return e;
                }
            } else {
                gf_isom_box_del(a);
            }
            break;
        case GF_ISOM_BOX_TYPE_FTYP:
            /*ONE AND ONLY ONE FTYP*/
            if (mov->brand) {
                gf_isom_box_del(a);
                GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Duplicate FTYP detected!\n"));
                return GF_ISOM_INVALID_FILE;
            }
            mov->brand = (GF_FileTypeBox *)a;
            totSize += a->size;
            e = gf_list_add(mov->TopBoxes, a);
            break;

        case GF_ISOM_BOX_TYPE_PDIN:
            /*ONE AND ONLY ONE PDIN*/
            if (mov->pdin) {
                gf_isom_box_del(a);
                GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Duplicate PDIN detected!\n"));
                return GF_ISOM_INVALID_FILE;
            }
            mov->pdin = (GF_ProgressiveDownloadBox *) a;
            totSize += a->size;
            e = gf_list_add(mov->TopBoxes, a);
            break;


#ifndef	GPAC_DISABLE_ISOM_FRAGMENTS
        case GF_ISOM_BOX_TYPE_STYP:
        {
            u32 brand = ((GF_SegmentTypeBox *)a)->majorBrand;
            switch (brand) {
            case GF_4CC('s', 'i', 's', 'x'):
            case GF_4CC('r', 'i', 's', 'x'):
            case GF_4CC('s', 's', 's', 's'):
                mov->is_index_segment = GF_TRUE;
                break;
            default:
                break;
            }
        }
        /*fall-through*/

        case GF_ISOM_BOX_TYPE_SIDX:
            totSize += a->size;
            if (mov->FragmentsFlags & GF_ISOM_FRAG_READ_DEBUG) {
                e = gf_list_add(mov->TopBoxes, a);
            } else {
                gf_isom_box_del(a);
            }
            break;

        case GF_ISOM_BOX_TYPE_MOOF:
            if (!mov->moov) {
                GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[iso file] Movie fragment but no moov (yet) - possibly broken parsing!\n"));
            }
            if (mov->single_moof_mode) {
                mov->single_moof_state++;
                if (mov->single_moof_state > 1) {
                    gf_isom_box_del(a);
                    return GF_OK;
                }
            }
            ((GF_MovieFragmentBox *)a)->mov = mov;

            totSize += a->size;
            mov->moof = (GF_MovieFragmentBox *) a;
            /*read & debug: store at root level*/
            if (mov->FragmentsFlags & GF_ISOM_FRAG_READ_DEBUG) {
                u32 k;
                gf_list_add(mov->TopBoxes, a);
                /*also update pointers to trex for debug*/
                if (mov->moov) {
                    for (k=0; k<gf_list_count(mov->moof->TrackList); k++) {
                        GF_TrackFragmentBox *traf = gf_list_get(mov->moof->TrackList, k);
                        if (traf->tfhd) {
                            GF_TrackBox *trak = gf_isom_get_track_from_id(mov->moov, traf->tfhd->trackID);
                            u32 j=0;
                            while ((traf->trex = (GF_TrackExtendsBox*)gf_list_enum(mov->moov->mvex->TrackExList, &j))) {
                                if (traf->trex->trackID == traf->tfhd->trackID) {
                                    if (!traf->trex->track) traf->trex->track = trak;
                                    break;
                                }
                                traf->trex = NULL;
                            }
                        }
                        //we should only parse senc/psec when no saiz/saio is present, otherwise we fetch the info directly
                        if (traf->trex && traf->trex->track && (traf->piff_sample_encryption || traf->sample_encryption)) {
                            GF_TrackBox *trak = GetTrackbyID(mov->moov, traf->tfhd->trackID);
                            e = senc_Parse(mov->movieFileMap->bs, trak, traf, traf->piff_sample_encryption ? (GF_SampleEncryptionBox *) traf->piff_sample_encryption : traf->sample_encryption);
                        }
                    }
                }
            } else if (mov->openMode==GF_ISOM_OPEN_CAT_FRAGMENTS) {
                mov->NextMoofNumber = mov->moof->mfhd->sequence_number+1;
                mov->moof = NULL;
                gf_isom_box_del(a);
            } else {
                /*merge all info*/
                e = MergeFragment((GF_MovieFragmentBox *)a, mov);
                gf_isom_box_del(a);
            }
            break;
#endif
        case GF_4CC('j','P',' ',' '):
        {
            GF_UnknownBox *box = (GF_UnknownBox*)a;
            u8 *c = (u8 *) box->data;
            if ((box->dataSize==4)
                    && (GF_4CC(c[0],c[1],c[2],c[3])==(u32)0x0D0A870A))
                mov->is_jp2 = 1;

            gf_isom_box_del(a);
        }
        break;

        case GF_ISOM_BOX_TYPE_PRFT:
#ifndef GPAC_DISABLE_ISOM_FRAGMENTS
            if (!(mov->FragmentsFlags & GF_ISOM_FRAG_READ_DEBUG)) {
                //keep the last one read
                if (mov->last_producer_ref_time)
                    gf_isom_box_del(a);
                else
                    mov->last_producer_ref_time = (GF_ProducerReferenceTimeBox *)a;
                break;
            }
#endif
        //fallthrough

        default:
            totSize += a->size;
            e = gf_list_add(mov->TopBoxes, a);
            break;
        }

#ifndef	GPAC_DISABLE_ISOM_FRAGMENTS
        /*remember where we left, in case we append an entire number of movie fragments*/
        mov->current_top_box_start = gf_bs_get_position(mov->movieFileMap->bs);
#endif
    }

    /*we need at least moov or meta*/
    if (!mov->moov && !mov->meta
#ifndef GPAC_DISABLE_ISOM_FRAGMENTS
            && !mov->moof && !mov->is_index_segment
#endif
       ) {
        return GF_ISOM_INCOMPLETE_FILE;
    }
    /*we MUST have movie header*/
    if (mov->moov && !mov->moov->mvhd) {
        GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Missing MVHD in MOOV!\n"));
        return GF_ISOM_INVALID_FILE;
    }
    /*we MUST have meta handler*/
    if (mov->meta && !mov->meta->handler) {
        GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Missing handler in META!\n"));
        return GF_ISOM_INVALID_FILE;
    }

#ifndef GPAC_DISABLE_ISOM_WRITE

    if (mov->moov) {
        /*set the default interleaving time*/
        mov->interleavingTime = mov->moov->mvhd->timeScale;

#ifndef	GPAC_DISABLE_ISOM_FRAGMENTS
        /*in edit mode with successfully loaded fragments, delete all fragment signaling since
        file is no longer fragmented*/
        if ((mov->openMode > GF_ISOM_OPEN_READ) && (mov->openMode != GF_ISOM_OPEN_CAT_FRAGMENTS) && mov->moov->mvex) {
            gf_isom_box_del((GF_Box *)mov->moov->mvex);
            mov->moov->mvex = NULL;
        }
#endif

    }

    //create a default mdat if none was found
    if (!mov->mdat && (mov->openMode != GF_ISOM_OPEN_READ) && (mov->openMode != GF_ISOM_OPEN_CAT_FRAGMENTS)) {
        mov->mdat = (GF_MediaDataBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_MDAT);
        e = gf_list_add(mov->TopBoxes, mov->mdat);
        if (e) return e;
    }
#endif /*GPAC_DISABLE_ISOM_WRITE*/

    return GF_OK;
}
Пример #3
0
GF_Err gf_cache_delete_entry ( const DownloadedCacheEntry entry )
{
	if ( !entry )
		return GF_OK;
	GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[CACHE] gf_cache_delete_entry:%d, entry=%p\n", __LINE__, entry));
	if (entry->writeFilePtr) {
		/** Cache should have been close before, abornormal situation */
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_delete_entry:%d, entry=%p, cache has not been closed properly\n", __LINE__, entry));
		fclose(entry->writeFilePtr);
	}
#ifdef ENABLE_WRITE_MX
	if (entry->write_mutex) {
		gf_mx_del(entry->write_mutex);
	}
#endif
	if (entry->file_exists && entry->deletableFilesOnDelete) {
		GF_LOG(GF_LOG_INFO, GF_LOG_NETWORK, ("[CACHE] url %s cleanup, deleting %s...\n", entry->url, entry->cache_filename));
		if (GF_OK != gf_delete_file(entry->cache_filename))
			GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_delete_entry:%d, failed to delete file %s\n", __LINE__, entry->cache_filename));
	}
#ifdef ENABLE_WRITE_MX
	entry->write_mutex = NULL;
#endif
	entry->write_session = NULL;
	entry->writeFilePtr = NULL;
	if (entry->serverETag)
		gf_free(entry->serverETag);
	entry->serverETag = NULL;

	if (entry->diskETag)
		gf_free(entry->diskETag);
	entry->diskETag = NULL;

	if (entry->serverLastModified)
		gf_free(entry->serverLastModified);
	entry->serverLastModified = NULL;

	if (entry->diskLastModified)
		gf_free(entry->diskLastModified);
	entry->diskLastModified = NULL;

	if ( entry->hash )
	{
		gf_free ( entry->hash );
		entry->hash = NULL;
	}
	if ( entry->url )
	{
		gf_free ( entry->url );
		entry->url = NULL;
	}
	if ( entry->mimeType )
	{
		gf_free ( entry->mimeType );
		entry->mimeType = NULL;
	}
	if (entry->mem_storage) {
		gf_free(entry->mem_storage);
	}

	if ( entry->cache_filename )
	{
		gf_free ( entry->cache_filename );
		entry->cache_filename = NULL;
	}
	if ( entry->properties )
	{
		char * propfile;
		if (entry->deletableFilesOnDelete)
			propfile = gf_cfg_get_filename(entry->properties);
		else
			propfile = NULL;
		gf_cfg_del ( entry->properties );
		entry->properties = NULL;
		if (propfile) {
			if (GF_OK !=  gf_delete_file( propfile ))
				GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_delete_entry:%d, failed to delete file %s\n", __LINE__, propfile));
			gf_free ( propfile );
		}
	}
	entry->dm = NULL;
	if (entry->sessions) {
		assert( gf_list_count(entry->sessions) == 0);
		gf_list_del(entry->sessions);
		entry->sessions = NULL;
	}

	gf_free (entry);
	return GF_OK;
}
Пример #4
0
Файл: saf.c Проект: erelh/gpac
GF_Err gf_saf_mux_for_time(GF_SAFMuxer *mux, u32 time_ms, Bool force_end_of_session, char **out_data, u32 *out_size)
{
	u32 i, count, dlen;
	char *data;
	GF_SAFStream *str;
	GF_SAFSample*au;
	GF_BitStream *bs, *payload;

	*out_data = NULL;
	*out_size = 0;

	gf_mx_p(mux->mx);
	if (!force_end_of_session && (mux->state!=1)) {
		gf_mx_v(mux->mx);
		return GF_OK;
	}

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);

	count = gf_list_count(mux->streams);

	/*1: write all stream headers*/
	for (i=0; i<count; i++) {
		str = (GF_SAFStream *)gf_list_get(mux->streams, i);
		if (str->state & 1) continue;

		au = (GF_SAFSample *)gf_list_get(str->aus, 0);

		/*write stream declaration*/
		payload = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
		gf_bs_write_int(payload, str->remote_url ? SAF_REMOTE_STREAM_HEADER : SAF_STREAM_HEADER, 4);
		gf_bs_write_int(payload, str->stream_id, 12);

		gf_bs_write_u8(payload, str->object_type);
		gf_bs_write_u8(payload, str->stream_type);
		gf_bs_write_int(payload, str->ts_resolution, 24);
		gf_bs_write_u16(payload, str->buffersize_db);
		if (str->mime_type) {
			u32 len = (u32) strlen(str->mime_type);
			gf_bs_write_u16(payload, len);
			gf_bs_write_data(payload, str->mime_type, len);
		}
		if (str->remote_url) {
			u32 len = (u32) strlen(str->remote_url);
			gf_bs_write_u16(payload, len);
			gf_bs_write_data(payload, str->remote_url, len);
		}
		if (str->dsi) {
			gf_bs_write_data(payload, str->dsi, str->dsi_len);
		}

		gf_bs_get_content(payload, &data, &dlen);
		gf_bs_del(payload);

		/*write SAF packet header*/
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_int(bs, 0, 15);
		gf_bs_write_int(bs, 0, 1);
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_int(bs, au ? au->ts : 0, 30);
		gf_bs_write_int(bs, dlen, 16);
		gf_bs_write_data(bs, data, dlen);
		gf_free(data);

		/*mark as signaled*/
		str->state |= 1;
	}

	/*write all pending AUs*/
	while (1) {
		GF_SAFStream *src = NULL;
		u32 mux_time = time_ms;

		for (i=0; i<count; i++) {
			str = (GF_SAFStream*)gf_list_get(mux->streams, i);
			au = (GF_SAFSample*)gf_list_get(str->aus, 0);
			if (au && (au->ts*1000 < mux_time*str->ts_resolution)) {
				mux_time = 1000*au->ts/str->ts_resolution;
				src = str;
			}
		}

		if (!src) break;

		au = (GF_SAFSample*)gf_list_get(src->aus, 0);
		gf_list_rem(src->aus, 0);

		/*write stream declaration*/
		gf_bs_write_int(bs, au->is_rap ? 1 : 0, 1);
		gf_bs_write_int(bs, src->last_au_sn, 15);
		gf_bs_write_int(bs, 0, 1);
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_int(bs, au->ts, 30);
		gf_bs_write_u16(bs, 2+au->data_size);
		gf_bs_write_int(bs, SAF_ACCESS_UNIT, 4);
		gf_bs_write_int(bs, src->stream_id, 12);
		gf_bs_write_data(bs, au->data, au->data_size);

		src->last_au_sn ++;
		src->last_au_ts = au->ts;
		gf_free(au->data);
		gf_free(au);
	}

	/*3: write all end of stream*/
	for (i=0; i<count; i++) {
		str = (GF_SAFStream*)gf_list_get(mux->streams, i);
		/*mark as signaled*/
		if (!(str->state & 2)) continue;
		if (gf_list_count(str->aus)) continue;

		/*write stream declaration*/
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_int(bs, str->last_au_sn, 15);
		gf_bs_write_int(bs, 0, 1);
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_int(bs, str->last_au_ts, 30);
		gf_bs_write_int(bs, 2, 16);
		gf_bs_write_int(bs, SAF_END_OF_STREAM, 4);
		gf_bs_write_int(bs, str->stream_id, 12);

		/*remove stream*/
		gf_list_rem(mux->streams, i);
		i--;
		count--;
		saf_stream_del(str);
	}
	mux->state = 0;
	if (force_end_of_session) {
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_int(bs, 0, 15);
		gf_bs_write_int(bs, 0, 1);
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_int(bs, 0, 30);
		gf_bs_write_int(bs, 2, 16);
		gf_bs_write_int(bs, SAF_END_OF_SESSION, 4);
		gf_bs_write_int(bs, 0, 12);
		mux->state = 2;
	}
	gf_bs_get_content(bs, out_data, out_size);
	gf_bs_del(bs);
	gf_mx_v(mux->mx);
	return GF_OK;
}
Пример #5
0
/*we have no choice but always browsing the children, since a src can be replaced by a new one
without the parent being modified. We just collect the src and check against the current mixer inputs
to reset the mixer or not - the spec is not clear about that btw, shall rebuffering happen if a source is modified or not ...*/
static void audiobuffer_traverse(GF_Node *node, void *rs, Bool is_destroy)
{
	u32 j;
	Bool update_mixer;
	GF_ChildNodeItem *l;
	GF_AudioGroup *parent;
	AudioBufferStack *st = (AudioBufferStack *)gf_node_get_private(node);
	M_AudioBuffer *ab = (M_AudioBuffer *)node;
	GF_TraverseState*tr_state = (GF_TraverseState*) rs;

	if (is_destroy) {
		gf_sc_audio_unregister(&st->output);
		if (st->time_handle.is_registered) 
			gf_sc_unregister_time_node(st->output.compositor, &st->time_handle);

		gf_mixer_del(st->am);
		if (st->buffer) gf_free(st->buffer);
		gf_list_del(st->new_inputs);
		gf_free(st);
		return;
	}
	parent = tr_state->audio_parent;
	tr_state->audio_parent = (GF_AudioGroup *) st;
	l = ab->children;
	while (l) {
		gf_node_traverse(l->node, tr_state);
		l = l->next;
	}

	gf_mixer_lock(st->am, 1);

	/*if no new inputs don't change mixer config*/
	update_mixer = gf_list_count(st->new_inputs) ? 1 : 0;
	
	if (gf_mixer_get_src_count(st->am) == gf_list_count(st->new_inputs)) {
		u32 count = gf_list_count(st->new_inputs);
		update_mixer = 0;
		for (j=0; j<count; j++) {
			GF_AudioInput *cur = (GF_AudioInput *)gf_list_get(st->new_inputs, j);
			if (!gf_mixer_is_src_present(st->am, &cur->input_ifce)) {
				update_mixer = 1;
				break;
			}
		}
	}

	if (update_mixer) {
		gf_mixer_remove_all(st->am);
		gf_mixer_force_chanel_out(st->am, ab->numChan);
	}

	while (gf_list_count(st->new_inputs)) {
		GF_AudioInput *src = (GF_AudioInput *)gf_list_get(st->new_inputs, 0);
		gf_list_rem(st->new_inputs, 0);
		if (update_mixer) gf_mixer_add_input(st->am, &src->input_ifce);
	}

	gf_mixer_lock(st->am, 0);
	tr_state->audio_parent = parent;

	/*Note the audio buffer is ALWAYS registered untill destroyed since buffer filling shall happen even when inactive*/
	if (!st->output.register_with_parent || !st->output.register_with_renderer) 
		gf_sc_audio_register(&st->output, tr_state);

	/*store mute flag*/
	st->is_muted = tr_state->switched_off;
}
Пример #6
0
GF_EXPORT
GF_Err gf_rtp_streamer_append_sdp_extended(GF_RTPStreamer *rtp, u16 ESID, char *dsi, u32 dsi_len, GF_ISOFile *isofile, u32 isotrack, char *KMS_URI, u32 width, u32 height, char **out_sdp_buffer) 
{	
	u32 size;
	u16 port;
	char mediaName[30], payloadName[30];
	char sdp[20000], sdpLine[10000];

	if (!out_sdp_buffer) return GF_BAD_PARAM;

	gf_rtp_builder_get_payload_name(rtp->packetizer, payloadName, mediaName);
	gf_rtp_get_ports(rtp->channel, &port, NULL);

	sprintf(sdp, "m=%s %d RTP/%s %d\n", mediaName, port, rtp->packetizer->slMap.IV_length ? "SAVP" : "AVP", rtp->packetizer->PayloadType);
	sprintf(sdpLine, "a=rtpmap:%d %s/%d\n", rtp->packetizer->PayloadType, payloadName, rtp->packetizer->sl_config.timestampResolution);
	strcat(sdp, sdpLine);
    if (ESID && (rtp->packetizer->rtp_payt != GF_RTP_PAYT_3GPP_DIMS)) {
		sprintf(sdpLine, "a=mpeg4-esid:%d\n", ESID);
		strcat(sdp, sdpLine);		
	}

	if (width && height) {
		if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H263) {
			sprintf(sdpLine, "a=cliprect:0,0,%d,%d\n", height, width);
			strcat(sdp, sdpLine);
		}
		/*extensions for some mobile phones*/
		sprintf(sdpLine, "a=framesize:%d %d-%d\n", rtp->packetizer->PayloadType, width, height);
		strcat(sdp, sdpLine);
	}
		
	strcpy(sdpLine, "");

	/*AMR*/
	if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR_WB)) {
		sprintf(sdpLine, "a=fmtp:%d octet-align=1\n", rtp->packetizer->PayloadType);
	}
	/*Text*/
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) {
		gf_media_format_ttxt_sdp(rtp->packetizer, payloadName, sdpLine, isofile, isotrack);
		strcat(sdpLine, "\n");
	}
	/*EVRC/SMV in non header-free mode*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (rtp->packetizer->auh_size>1)) {
		sprintf(sdpLine, "a=fmtp:%d maxptime=%d\n", rtp->packetizer->PayloadType, rtp->packetizer->auh_size*20);
	}
	/*H264/AVC*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_SVC)) {
		GF_AVCConfig *avcc = dsi ? gf_odf_avc_cfg_read(dsi, dsi_len) : NULL;

		if (avcc) {
			sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", rtp->packetizer->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication);
			if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) {
				u32 i, count, b64s;
				char b64[200];
				strcat(sdpLine, "; sprop-parameter-sets=");
				count = gf_list_count(avcc->sequenceParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
				if (i) strcat(sdpLine, ",");
				count = gf_list_count(avcc->pictureParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
			}
			gf_odf_avc_cfg_del(avcc);
			strcat(sdpLine, "\n");
		}
	}
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_HEVC) {
#ifndef GPAC_DISABLE_HEVC
		GF_HEVCConfig *hevcc = dsi ? gf_odf_hevc_cfg_read(dsi, dsi_len, 0) : NULL;
		if (hevcc) {
			u32 count, i, j, b64s;
			char b64[200];
			sprintf(sdpLine, "a=fmtp:%d", rtp->packetizer->PayloadType);
			count = gf_list_count(hevcc->param_array);
			for (i = 0; i < count; i++) {
				GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(hevcc->param_array, i);
				if (ar->type==GF_HEVC_NALU_SEQ_PARAM) {
					strcat(sdpLine, "; sprop-sps=");						
				} else if (ar->type==GF_HEVC_NALU_PIC_PARAM) {
					strcat(sdpLine, "; sprop-pps=");
				} else if (ar->type==GF_HEVC_NALU_VID_PARAM) {
					strcat(sdpLine, "; sprop-vps=");
				}
				for (j = 0; j < gf_list_count(ar->nalus); j++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					if (j) strcat(sdpLine, ", ");
					strcat(sdpLine, b64);
				}
			}
			gf_odf_hevc_cfg_del(hevcc);
			strcat(sdpLine, "\n");
		}
#endif
	}
	/*MPEG-4 decoder config*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_MPEG4) {
		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, dsi, dsi_len);
		strcat(sdpLine, "\n");

		if (rtp->packetizer->slMap.IV_length && KMS_URI) {
			if (!strnicmp(KMS_URI, "(key)", 5) || !strnicmp(KMS_URI, "(ipmp)", 6) || !strnicmp(KMS_URI, "(uri)", 5)) {
				strcat(sdpLine, "; ISMACrypKey=");
			} else {
				strcat(sdpLine, "; ISMACrypKey=(uri)");
			}
			strcat(sdpLine, KMS_URI);
			strcat(sdpLine, "\n");
		}
	}
    /*DIMS decoder config*/
    else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) {
        sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", rtp->packetizer->PayloadType, 10);
        if (rtp->packetizer->flags & GP_RTP_DIMS_COMPRESSED) {
            strcat(sdpLine, ";content-coding=deflate");
        }
		strcat(sdpLine, "\n");
    }
	/*MPEG-4 Audio LATM*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_LATM) { 
		GF_BitStream *bs; 
		char *config_bytes; 
		u32 config_size; 

		/* form config string */ 
		bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); 
		gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ 
		gf_bs_write_int(bs, 1, 1); /* all streams same time */ 
		gf_bs_write_int(bs, 0, 6); /* numSubFrames */ 
		gf_bs_write_int(bs, 0, 4); /* numPrograms */ 
		gf_bs_write_int(bs, 0, 3); /* numLayer */ 

		/* audio-specific config  - PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/
		if (dsi) gf_bs_write_data(bs, dsi, MIN(dsi_len, 2) ); 

		/* other data */ 
		gf_bs_write_int(bs, 0, 3); /* frameLengthType */ 
		gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ 
		gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ 
		gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ 
		gf_bs_get_content(bs, &config_bytes, &config_size); 
		gf_bs_del(bs); 

		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, config_bytes, config_size); 
		gf_free(config_bytes); 
		strcat(sdpLine, "\n");
	}

	strcat(sdp, sdpLine);

	size = (u32) strlen(sdp) + (*out_sdp_buffer ? (u32) strlen(*out_sdp_buffer) : 0) + 1;
	if ( !*out_sdp_buffer) {
		*out_sdp_buffer = gf_malloc(sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcpy(*out_sdp_buffer, sdp);
	} else {
		*out_sdp_buffer = gf_realloc(*out_sdp_buffer, sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcat(*out_sdp_buffer, sdp);
	}
	return GF_OK;
} 
Пример #7
0
static u32 store_or_aggregate(GF_StreamContext *sc, GF_Command *com, GF_List *commands, Bool *has_modif)
{
#ifndef GPAC_DISABLE_VRML
    u32 i, count, j, nb_fields;
    GF_CommandField *field, *check_field;

    /*if our command deals with a node inserted in the commands list, apply command list*/
    if (node_in_commands_subtree(com->node, commands)) return 0;

    /*otherwise, check if we can substitute a previous command with this one*/
    count = gf_list_count(commands);
    for (i=0; i<count; i++) {
        GF_Command *check = gf_list_get(commands, i);

        if (sc->streamType == GF_STREAM_SCENE) {
            Bool check_index=0;
            Bool original_is_index = 0;
            Bool apply;
            switch (com->tag) {
            case GF_SG_INDEXED_REPLACE:
                check_index=1;
            case GF_SG_MULTIPLE_INDEXED_REPLACE:
            case GF_SG_FIELD_REPLACE:
            case GF_SG_MULTIPLE_REPLACE:
                if (check->node != com->node) break;
                /*we may aggregate an indexed insertion and a replace one*/
                if (check_index) {
                    if (check->tag == GF_SG_INDEXED_REPLACE) {}
                    else if (check->tag == GF_SG_INDEXED_INSERT) {
                        original_is_index = 1;
                    }
                    else {
                        break;
                    }
                } else {
                    if (check->tag != com->tag) break;
                }
                nb_fields = gf_list_count(com->command_fields);
                if (gf_list_count(check->command_fields) != nb_fields) break;
                apply=1;
                for (j=0; j<nb_fields; j++) {
                    field = gf_list_get(com->command_fields, j);
                    check_field = gf_list_get(check->command_fields, j);
                    if ((field->pos != check_field->pos) || (field->fieldIndex != check_field->fieldIndex)) {
                        apply=0;
                        break;
                    }
                }
                /*same target node+fields, destroy first command and store new one*/
                if (apply) {
                    /*if indexed, change command tag*/
                    if (original_is_index) com->tag = GF_SG_INDEXED_INSERT;

                    gf_sg_command_del((GF_Command *)check);
                    gf_list_rem(commands, i);
                    if (has_modif) *has_modif = 1;
                    return 1;
                }
                break;

            case GF_SG_NODE_REPLACE:
                if (check->tag != GF_SG_NODE_REPLACE) {
                    break;
                }
                /*TODO - THIS IS NOT SUPPORTED IN GPAC SINCE WE NEVER ALLOW FOR DUPLICATE NODE IDs IN THE SCENE !!!*/
                if (gf_node_get_id(check->node) != gf_node_get_id(com->node) ) {
                    break;
                }
                /*same node ID, destroy first command and store new one*/
                gf_sg_command_del((GF_Command *)check);
                gf_list_rem(commands, i);
                if (has_modif) *has_modif = 1;
                return 1;

            case GF_SG_INDEXED_DELETE:
                /*look for an indexed insert before the indexed delete with same target pos and node. If found, discard both commands!*/
                if (check->tag != GF_SG_INDEXED_INSERT) break;
                if (com->node != check->node) break;
                field = gf_list_get(com->command_fields, 0);
                check_field = gf_list_get(check->command_fields, 0);
                if (!field || !check_field) break;
                if (field->pos != check_field->pos) break;
                if (field->fieldIndex != check_field->fieldIndex) break;

                gf_sg_command_del((GF_Command *)check);
                gf_list_rem(commands, i);
                if (has_modif) *has_modif = 1;
                return 2;

            default:
                GF_LOG(GF_LOG_ERROR, GF_LOG_SCENE, ("[Scene Manager] Stream Aggregation not implemented for command - aggregating on main scene\n"));
                break;
            }
        }
    }
    /*the command modifies another stream than associated current carousel stream, we have to store it.*/
    if (has_modif) *has_modif=1;
#endif
    return 1;
}
Пример #8
0
GF_Err gf_bifs_dec_sf_field(GF_BifsDecoder * codec, GF_BitStream *bs, GF_Node *node, GF_FieldInfo *field, Bool is_mem_com)
{
	GF_Err e;
	GF_Node *new_node;
	u32 size, length, w, h, i;
	char *buffer;

	//blindly call unquantize. return is OK, error or GF_EOS
	if (codec->ActiveQP && node) {
		e = gf_bifs_dec_unquant_field(codec, bs, node, field);
		if (e != GF_EOS) return e;
	}
	//not quantized, use normal scheme
	switch (field->fieldType) {
	case GF_SG_VRML_SFBOOL:
		* ((SFBool *) field->far_ptr) = (SFBool) gf_bs_read_int(bs, 1);
		break;
	case GF_SG_VRML_SFCOLOR:
		((SFColor *)field->far_ptr)->red = BD_ReadSFFloat(codec, bs);;
		((SFColor *)field->far_ptr)->green = BD_ReadSFFloat(codec, bs);
		((SFColor *)field->far_ptr)->blue = BD_ReadSFFloat(codec, bs);
		break;
	case GF_SG_VRML_SFFLOAT:
		*((SFFloat *)field->far_ptr) = BD_ReadSFFloat(codec, bs);
		break;
	case GF_SG_VRML_SFINT32:
		*((SFInt32 *)field->far_ptr) = (s32) gf_bs_read_int(bs, 32);
		break;
	case GF_SG_VRML_SFTIME:
		*((SFTime *)field->far_ptr) = gf_bs_read_double(bs);
		if (node) BD_CheckSFTimeOffset(codec, node, field);
		break;
	case GF_SG_VRML_SFVEC2F:
		((SFVec2f *)field->far_ptr)->x = BD_ReadSFFloat(codec, bs);
		((SFVec2f *)field->far_ptr)->y = BD_ReadSFFloat(codec, bs);
		break;
	case GF_SG_VRML_SFVEC3F:
		((SFVec3f *)field->far_ptr)->x = BD_ReadSFFloat(codec, bs);
		((SFVec3f *)field->far_ptr)->y = BD_ReadSFFloat(codec, bs);
		((SFVec3f *)field->far_ptr)->z = BD_ReadSFFloat(codec, bs);
		break;
	case GF_SG_VRML_SFROTATION:
		((SFRotation *)field->far_ptr)->x = BD_ReadSFFloat(codec, bs);
		((SFRotation *)field->far_ptr)->y = BD_ReadSFFloat(codec, bs);
		((SFRotation *)field->far_ptr)->z = BD_ReadSFFloat(codec, bs);
		((SFRotation *)field->far_ptr)->q = BD_ReadSFFloat(codec, bs);
		break;
	case GF_SG_VRML_SFSTRING:
		size = gf_bs_read_int(bs, 5);
		length = gf_bs_read_int(bs, size);
		if (gf_bs_available(bs) < length) return GF_NON_COMPLIANT_BITSTREAM;

		if (node && (node->sgprivate->tag==TAG_MPEG4_CacheTexture) && (field->fieldIndex<=2)) {
			M_CacheTexture *ct = (M_CacheTexture *) node;
			ct->data_len = length;
			if (ct->data) gf_free(ct->data);
			ct->data = gf_malloc(sizeof(char)*length);
			gf_bs_read_data(bs, (char*)ct->data, length);
		} else if (node && (node->sgprivate->tag==TAG_MPEG4_BitWrapper) ) {
			M_BitWrapper *bw = (M_BitWrapper*) node;
			if (bw->buffer.buffer) gf_free(bw->buffer.buffer);
			bw->buffer_len = length;
			bw->buffer.buffer = gf_malloc(sizeof(char)*length);
			gf_bs_read_data(bs, (char*)bw->buffer.buffer, length);
		} else {
			if ( ((SFString *)field->far_ptr)->buffer ) gf_free( ((SFString *)field->far_ptr)->buffer);
			((SFString *)field->far_ptr)->buffer = (char *)gf_malloc(sizeof(char)*(length+1));
			memset(((SFString *)field->far_ptr)->buffer , 0, length+1);
			for (i=0; i<length; i++) {
				((SFString *)field->far_ptr)->buffer[i] = gf_bs_read_int(bs, 8);
			}
		}
		break;
	case GF_SG_VRML_SFURL:
	{
		SFURL *url = (SFURL *) field->far_ptr;
		size = gf_bs_read_int(bs, 1);
		if (size) {
			if (url->url) gf_free(url->url );
			url->url = NULL;
			length = gf_bs_read_int(bs, 10);
			url->OD_ID = length;
		} else {
			if ( url->OD_ID ) url->OD_ID = (u32) -1;
			size = gf_bs_read_int(bs, 5);
			length = gf_bs_read_int(bs, size);
			if (gf_bs_available(bs) < length) return GF_NON_COMPLIANT_BITSTREAM;
			buffer = NULL;
			if (length) {
				buffer = (char *)gf_malloc(sizeof(char)*(length+1));
				memset(buffer, 0, length+1);
				for (i=0; i<length; i++) buffer[i] = gf_bs_read_int(bs, 8);
			}
			if (url->url) gf_free( url->url);
			/*if URL is empty set it to NULL*/
			if (buffer && strlen(buffer)) {
				url->url = buffer;
			} else {
				gf_free(buffer);
				url->url = NULL;
			}
		}
	}
	break;
	case GF_SG_VRML_SFIMAGE:
		if (((SFImage *)field->far_ptr)->pixels) gf_free(((SFImage *)field->far_ptr)->pixels);
		w = gf_bs_read_int(bs, 12);
		h = gf_bs_read_int(bs, 12);
		length = gf_bs_read_int(bs, 2);

		if (length > 3) length = 3;
		length += 1;
		size = w * h * length;
		if (gf_bs_available(bs) < size) return GF_NON_COMPLIANT_BITSTREAM;
		((SFImage *)field->far_ptr)->width = w;
		((SFImage *)field->far_ptr)->height = h;
		((SFImage *)field->far_ptr)->numComponents = length;
		((SFImage *)field->far_ptr)->pixels = (unsigned char *)gf_malloc(sizeof(char)*size);
		//WARNING: Buffers are NOT ALIGNED IN THE BITSTREAM
		for (i=0; i<size; i++) {
			((SFImage *)field->far_ptr)->pixels[i] = gf_bs_read_int(bs, 8);
		}
		break;
	case GF_SG_VRML_SFCOMMANDBUFFER:
	{
		SFCommandBuffer *sfcb = (SFCommandBuffer *)field->far_ptr;
		if (sfcb->buffer) {
			gf_free(sfcb->buffer);
			sfcb->buffer = NULL;
		}
		while (gf_list_count(sfcb->commandList)) {
			GF_Command *com = (GF_Command*)gf_list_get(sfcb->commandList, 0);
			gf_list_rem(sfcb->commandList, 0);
			gf_sg_command_del(com);
		}

		size = gf_bs_read_int(bs, 5);
		length = gf_bs_read_int(bs, size);
		if (gf_bs_available(bs) < length) return GF_NON_COMPLIANT_BITSTREAM;

		sfcb->bufferSize = length;
		if (length) {
			sfcb->buffer = (unsigned char *)gf_malloc(sizeof(char)*(length));
			//WARNING Buffers are NOT ALIGNED IN THE BITSTREAM
			for (i=0; i<length; i++) {
				sfcb->buffer[i] = gf_bs_read_int(bs, 8);
			}
		}
		//notify the node - this is needed in case an enhencement layer replaces the buffer, in which case
		//the # ID Bits may change
		SFCommandBufferChanged(codec, node);

		/*
		 1 - memory mode, register command buffer for later parsing
		 2 - InputSensor only works on decompressed commands
		*/
		if (codec->dec_memory_mode || (node->sgprivate->tag==TAG_MPEG4_InputSensor)) {
			CommandBufferItem *cbi = (CommandBufferItem *)gf_malloc(sizeof(CommandBufferItem));
			cbi->node = node;
			cbi->cb = sfcb;
			gf_list_add(codec->command_buffers, cbi);
		}
	}
	break;
	case GF_SG_VRML_SFNODE:
		//for nodes the field ptr is a ptr to the field, which is a node ptr ;)
		new_node = gf_bifs_dec_node(codec, bs, field->NDTtype);
		if (new_node) {
			e = gf_node_register(new_node, is_mem_com ? NULL : node);
			if (e) return e;
		}
		//it may happen that new_node is NULL (this is valid for a proto declaration)
		*((GF_Node **) field->far_ptr) = new_node;
		break;
	case GF_SG_VRML_SFSCRIPT:
#ifdef GPAC_HAS_SPIDERMONKEY
		codec->LastError = SFScript_Parse(codec, (SFScript*)field->far_ptr, bs, node);
#else
		return GF_NOT_SUPPORTED;
#endif
		break;
	case GF_SG_VRML_SFATTRREF:
	{
		SFAttrRef *ar = (SFAttrRef *)field->far_ptr;
		u32 nodeID = 1 + gf_bs_read_int(bs, codec->info->config.NodeIDBits);
		ar->node = gf_sg_find_node(codec->current_graph, nodeID);
		if (!ar->node) {

		} else {
			u32 nbBitsDEF = gf_get_bit_size(gf_node_get_num_fields_in_mode(ar->node, GF_SG_FIELD_CODING_DEF) - 1);
			u32 field_ref = gf_bs_read_int(bs, nbBitsDEF);
			codec->LastError = gf_bifs_get_field_index(ar->node, field_ref, GF_SG_FIELD_CODING_DEF, &ar->fieldIndex);
		}
	}
	break;
	default:
		return GF_NON_COMPLIANT_BITSTREAM;
	}
	return codec->LastError;
}
Пример #9
0
u32 gf_mixer_get_src_count(GF_AudioMixer *am)
{
	return gf_list_count(am->sources);
}
Пример #10
0
Bool gf_sc_fit_world_to_screen(GF_Compositor *compositor)
{
	GF_TraverseState tr_state;
	SFVec3f pos, diff;
	Fixed dist, d;
	GF_Camera *cam;
	GF_Node *top;

#ifndef GPAC_DISABLE_VRML
//	if (gf_list_count(compositor->visual->back_stack)) return;
	if (gf_list_count(compositor->visual->view_stack)) return 0;
#endif

	gf_mx_p(compositor->mx);
	top = gf_sg_get_root_node(compositor->scene);
	if (!top) {
		gf_mx_v(compositor->mx);
		return 0;
	}
	memset(&tr_state, 0, sizeof(GF_TraverseState));
	gf_mx_init(tr_state.model_matrix);
	tr_state.traversing_mode = TRAVERSE_GET_BOUNDS;
	tr_state.visual = compositor->visual;
	gf_node_traverse(top, &tr_state);
	if (gf_node_dirty_get(top)) {
		tr_state.bbox.is_set = 0;
	}

	if (!tr_state.bbox.is_set) {
		gf_mx_v(compositor->mx);
		/*empty world ...*/
		if (tr_state.bbox.radius==-1) return 1;
		/*2D world with 3D camera forced*/
		if (tr_state.bounds.width&&tr_state.bounds.height) return 1;
		return 0;
	}

	cam = &compositor->visual->camera;

	cam->world_bbox = tr_state.bbox;
	/*fit is based on bounding sphere*/
	dist = gf_divfix(tr_state.bbox.radius, gf_sin(cam->fieldOfView/2) );
	gf_vec_diff(diff, cam->center, tr_state.bbox.center);
	/*do not update if camera is outside the scene bounding sphere and dist is too close*/
	if (gf_vec_len(diff) > tr_state.bbox.radius + cam->radius) {
		gf_vec_diff(diff, cam->vp_position, tr_state.bbox.center);
		d = gf_vec_len(diff);
		if (d<dist) {
			gf_mx_v(compositor->mx);
			return 1;
		}
	}

	diff = gf_vec_scale(camera_get_pos_dir(cam), dist);
	gf_vec_add(pos, tr_state.bbox.center, diff);
	diff = cam->position;
	camera_set_vectors(cam, pos, cam->vp_orientation, cam->fieldOfView);
	cam->position = diff;
	camera_move_to(cam, pos, cam->target, cam->up);
	cam->examine_center = tr_state.bbox.center;
	cam->flags |= CF_STORE_VP;
	if (cam->z_far < dist) cam->z_far = 10*dist;
	camera_changed(compositor, cam);
	gf_mx_v(compositor->mx);
	return 1;
}
Пример #11
0
GF_Err SFScript_Parse(GF_BifsDecoder *codec, SFScript *script_field, GF_BitStream *bs, GF_Node *n)
{
	GF_Err e;
	u32 i, count, nbBits;
	char *ptr;
	ScriptParser parser;
	Bool has_fields = 0;
	e = GF_OK;
	if (gf_node_get_tag(n) != TAG_MPEG4_Script) return GF_NON_COMPLIANT_BITSTREAM;

	parser.codec = codec;
	parser.script = n;
	parser.bs = bs;
	parser.length = 500;
	parser.string = (char *) gf_malloc(sizeof(char)* parser.length);
	parser.string[0] = 0;
	parser.identifiers = gf_list_new();
	parser.new_line = (char *) (codec->dec_memory_mode ? "\n" : NULL);
	parser.indent = 0;

	//first parse fields

	if (gf_bs_read_int(bs, 1)) {
		//endFlag
		while (!gf_bs_read_int(bs, 1)){
			e = ParseScriptField(&parser);
			if (e) goto exit;
			else has_fields = 1;
		}
	} else {
		nbBits = gf_bs_read_int(bs, 4);
		count = gf_bs_read_int(bs, nbBits);
		for (i=0; i<count; i++) {
			e = ParseScriptField(&parser);
			if (e) goto exit;
			else has_fields = 1;
		}
	}
	//reserevd
	gf_bs_read_int(bs, 1);
	//then parse
	SFS_AddString(&parser, "javascript:");
	SFS_AddString(&parser, parser.new_line);

	//hasFunction
	while (gf_bs_read_int(bs, 1)) {
		SFS_AddString(&parser, "function ");
		SFS_Identifier(&parser);
		SFS_Arguments(&parser, 0);
		SFS_Space(&parser);
		SFS_StatementBlock(&parser, 1);
		SFS_Line(&parser);
	}

	SFS_Line(&parser);

	if (script_field->script_text) gf_free(script_field->script_text);
	script_field->script_text = (unsigned char *) gf_strdup(parser.string);

exit:
	//clean up
	while (gf_list_count(parser.identifiers)) {
		ptr = (char *)gf_list_get(parser.identifiers, 0);
		gf_free(ptr);
		gf_list_rem(parser.identifiers, 0);
	}
	gf_list_del(parser.identifiers);
	if (parser.string) gf_free(parser.string);
	return e;
}
Пример #12
0
static Bool validator_xvs_open(GF_Validator *validator)
{
	GF_Err e;
	GF_LOG(GF_LOG_DEBUG, GF_LOG_MODULE, ("[Validator] Opening Validation Script: %s\n", validator->xvs_filename));
	validator->snapshot_number = 0;
	validator->xvs_parser = gf_xml_dom_new();
	e = gf_xml_dom_parse(validator->xvs_parser, validator->xvs_filename, NULL, NULL);
	if (e != GF_OK) {
		if (validator->is_recording) {
			GF_SAFEALLOC(validator->xvs_node, GF_XMLNode);
			validator->xvs_node->name = gf_strdup("TestValidationScript");
			validator->xvs_node->attributes = gf_list_new();
			validator->xvs_node->content = gf_list_new();
		} else {
			gf_xml_dom_del(validator->xvs_parser);
			validator->xvs_parser = NULL;
			return 0;
		}
	} else {
		validator->xvs_node = gf_xml_dom_get_root(validator->xvs_parser);
	}
	/* Get the file name from the XVS if not found in the XVL */
	if (!validator->test_filename) {
		GF_XMLAttribute *att;
		GF_XMLAttribute *att_file;
		u32 att_index = 0;
		att_file = NULL;
		while (1) {
			att = gf_list_get(validator->xvs_node->attributes, att_index);
			if (!att) {
				break;
			} else if (!strcmp(att->name, "file")) {
				att_file = att;
			}
			att_index++;
		}

		if (!att_file) {
			gf_xml_dom_del(validator->xvs_parser);
			validator->xvs_parser = NULL;
			validator->xvs_node = NULL;
			return 0;
		} else {
			char *sep;
			sep = strrchr(att_file->value, GF_PATH_SEPARATOR);
			if (!sep) {
				validator->test_filename = att_file->value;
			} else {
				sep[0] = 0;
				validator->test_base = gf_strdup(att_file->value);
				sep[0] = GF_PATH_SEPARATOR;
				validator->test_filename = sep+1;
			}
		}
	}
	if (validator->is_recording) {
		GF_XMLNode *node;
		/* Removing prerecorded interactions */
		while (gf_list_count(validator->xvs_node->content)) {
			GF_XMLNode *child = (GF_XMLNode *)gf_list_last(validator->xvs_node->content);
			gf_list_rem_last(validator->xvs_node->content);
			gf_xml_dom_node_del(child);
		}
		/* adding an extra text node for line break in serialization */
		GF_SAFEALLOC(node, GF_XMLNode);
		node->type = GF_XML_TEXT_TYPE;
		node->name = gf_strdup("\n");
		gf_list_add(validator->xvs_node->content, node);
	} else {
		validator->xvs_result = 1;
	}
	return 1;
}
Пример #13
0
GF_EXPORT
GF_Err gf_hinter_finalize(GF_ISOFile *file, u32 IOD_Profile, u32 bandwidth)
{
	u32 i, sceneT, odT, descIndex, size, size64;
	GF_InitialObjectDescriptor *iod;
	GF_SLConfig slc;
	GF_ESD *esd;
	GF_ISOSample *samp;
	Bool remove_ocr;
	char *buffer;
	char buf64[5000], sdpLine[2300];


	gf_isom_sdp_clean(file);

	if (bandwidth) {
		sprintf(buf64, "b=AS:%d", bandwidth);
		gf_isom_sdp_add_line(file, buf64);
	}
	//xtended attribute for copyright
	sprintf(buf64, "a=x-copyright: %s", "MP4/3GP File hinted with GPAC " GPAC_FULL_VERSION " (C)2000-2005 - http://gpac.sourceforge.net");
	gf_isom_sdp_add_line(file, buf64);

	if (IOD_Profile == GF_SDP_IOD_NONE) return GF_OK;

	odT = sceneT = 0;
	for (i=0; i<gf_isom_get_track_count(file); i++) {
		if (!gf_isom_is_track_in_root_od(file, i+1)) continue;
		switch (gf_isom_get_media_type(file,i+1)) {
		case GF_ISOM_MEDIA_OD:
			odT = i+1;
			break;
		case GF_ISOM_MEDIA_SCENE:
			sceneT = i+1;
			break;
		}
	}
	remove_ocr = 0;
	if (IOD_Profile == GF_SDP_IOD_ISMA_STRICT) {
		IOD_Profile = GF_SDP_IOD_ISMA;
		remove_ocr = 1;
	}

	/*if we want ISMA like iods, we need at least BIFS */
	if ( (IOD_Profile == GF_SDP_IOD_ISMA) && !sceneT ) return GF_BAD_PARAM;

	/*do NOT change PLs, we assume they are correct*/
	iod = (GF_InitialObjectDescriptor *) gf_isom_get_root_od(file);
	if (!iod) return GF_NOT_SUPPORTED;

	/*rewrite an IOD with good SL config - embbed data if possible*/
	if (IOD_Profile == GF_SDP_IOD_ISMA) {
		Bool is_ok = 1;
		while (gf_list_count(iod->ESDescriptors)) {
			esd = (GF_ESD*)gf_list_get(iod->ESDescriptors, 0);
			gf_odf_desc_del((GF_Descriptor *) esd);
			gf_list_rem(iod->ESDescriptors, 0);
		}


		/*get OD esd, and embbed stream data if possible*/
		if (odT) {
			esd = gf_isom_get_esd(file, odT, 1);
			if (gf_isom_get_sample_count(file, odT)==1) {
				samp = gf_isom_get_sample(file, odT, 1, &descIndex);
				if (gf_hinter_can_embbed_data(samp->data, samp->dataLength, GF_STREAM_OD)) {
					InitSL_NULL(&slc);
					slc.predefined = 0;
					slc.hasRandomAccessUnitsOnlyFlag = 1;
					slc.timeScale = slc.timestampResolution = gf_isom_get_media_timescale(file, odT);	
					slc.OCRResolution = 1000;
					slc.startCTS = samp->DTS+samp->CTS_Offset;
					slc.startDTS = samp->DTS;
					//set the SL for future extraction
					gf_isom_set_extraction_slc(file, odT, 1, &slc);

					size64 = gf_base64_encode(samp->data, samp->dataLength, buf64, 2000);
					buf64[size64] = 0;
					sprintf(sdpLine, "data:application/mpeg4-od-au;base64,%s", buf64);

					esd->decoderConfig->avgBitrate = 0;
					esd->decoderConfig->bufferSizeDB = samp->dataLength;
					esd->decoderConfig->maxBitrate = 0;
					size64 = strlen(sdpLine)+1;
					esd->URLString = (char*)gf_malloc(sizeof(char) * size64);
					strcpy(esd->URLString, sdpLine);
				} else {
					GF_LOG(GF_LOG_WARNING, GF_LOG_RTP, ("[rtp hinter] OD sample too large to be embedded in IOD - ISMA disabled\n"));
					is_ok = 0;
				}
				gf_isom_sample_del(&samp);
			}
			if (remove_ocr) esd->OCRESID = 0;
			else if (esd->OCRESID == esd->ESID) esd->OCRESID = 0;
			
			//OK, add this to our IOD
			gf_list_add(iod->ESDescriptors, esd);
		}

		esd = gf_isom_get_esd(file, sceneT, 1);
		if (gf_isom_get_sample_count(file, sceneT)==1) {
			samp = gf_isom_get_sample(file, sceneT, 1, &descIndex);
			if (gf_hinter_can_embbed_data(samp->data, samp->dataLength, GF_STREAM_SCENE)) {

				slc.timeScale = slc.timestampResolution = gf_isom_get_media_timescale(file, sceneT);	
				slc.OCRResolution = 1000;
				slc.startCTS = samp->DTS+samp->CTS_Offset;
				slc.startDTS = samp->DTS;
				//set the SL for future extraction
				gf_isom_set_extraction_slc(file, sceneT, 1, &slc);
				//encode in Base64 the sample
				size64 = gf_base64_encode(samp->data, samp->dataLength, buf64, 2000);
				buf64[size64] = 0;
				sprintf(sdpLine, "data:application/mpeg4-bifs-au;base64,%s", buf64);

				esd->decoderConfig->avgBitrate = 0;
				esd->decoderConfig->bufferSizeDB = samp->dataLength;
				esd->decoderConfig->maxBitrate = 0;
				esd->URLString = (char*)gf_malloc(sizeof(char) * (strlen(sdpLine)+1));
				strcpy(esd->URLString, sdpLine);
			} else {
				GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[rtp hinter] Scene description sample too large to be embedded in IOD - ISMA disabled\n"));
				is_ok = 0;
			}
			gf_isom_sample_del(&samp);
		}
		if (remove_ocr) esd->OCRESID = 0;
		else if (esd->OCRESID == esd->ESID) esd->OCRESID = 0;

		gf_list_add(iod->ESDescriptors, esd);

		if (is_ok) {
			u32 has_a, has_v, has_i_a, has_i_v;
			has_a = has_v = has_i_a = has_i_v = 0;
			for (i=0; i<gf_isom_get_track_count(file); i++) {
				esd = gf_isom_get_esd(file, i+1, 1);
				if (!esd) continue;
				if (esd->decoderConfig->streamType==GF_STREAM_VISUAL) {
					if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_MPEG4_PART2) has_i_v ++;
					else has_v++;
				} else if (esd->decoderConfig->streamType==GF_STREAM_AUDIO) {
					if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_AUDIO_AAC_MPEG4) has_i_a ++;
					else has_a++;
				}
				gf_odf_desc_del((GF_Descriptor *)esd);
			}
			/*only 1 MPEG-4 visual max and 1 MPEG-4 audio max for ISMA compliancy*/
			if (!has_v && !has_a && (has_i_v<=1) && (has_i_a<=1)) {
				sprintf(sdpLine, "a=isma-compliance:1,1.0,1");
				gf_isom_sdp_add_line(file, sdpLine);
			}
		}
	}

	//encode the IOD
	buffer = NULL;
	size = 0;
	gf_odf_desc_write((GF_Descriptor *) iod, &buffer, &size);
	gf_odf_desc_del((GF_Descriptor *)iod);

	//encode in Base64 the iod
	size64 = gf_base64_encode(buffer, size, buf64, 2000);
	buf64[size64] = 0;
	gf_free(buffer);

	sprintf(sdpLine, "a=mpeg4-iod:\"data:application/mpeg4-iod;base64,%s\"", buf64);
	gf_isom_sdp_add_line(file, sdpLine);

	return GF_OK;
}
Пример #14
0
GF_EXPORT
GF_Err gf_hinter_track_finalize(GF_RTPHinter *tkHint, Bool AddSystemInfo)
{
	u32 Width, Height;
	GF_ESD *esd;
	char sdpLine[20000];
	char mediaName[30], payloadName[30];

	Width = Height = 0;
	gf_isom_sdp_clean_track(tkHint->file, tkHint->TrackNum);
	if (gf_isom_get_media_type(tkHint->file, tkHint->TrackNum) == GF_ISOM_MEDIA_VISUAL)
		gf_isom_get_visual_info(tkHint->file, tkHint->TrackNum, 1, &Width, &Height);

	gf_rtp_builder_get_payload_name(tkHint->rtp_p, payloadName, mediaName);

	/*TODO- extract out of rtp_p for future live tools*/
	sprintf(sdpLine, "m=%s 0 RTP/%s %d", mediaName, tkHint->rtp_p->slMap.IV_length ? "SAVP" : "AVP", tkHint->rtp_p->PayloadType);
	gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	if (tkHint->bandwidth) {
		sprintf(sdpLine, "b=AS:%d", tkHint->bandwidth);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	if (tkHint->nb_chan) {
		sprintf(sdpLine, "a=rtpmap:%d %s/%d/%d", tkHint->rtp_p->PayloadType, payloadName, tkHint->rtp_p->sl_config.timestampResolution, tkHint->nb_chan);
	} else {
		sprintf(sdpLine, "a=rtpmap:%d %s/%d", tkHint->rtp_p->PayloadType, payloadName, tkHint->rtp_p->sl_config.timestampResolution);
	}
	gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	/*control for MPEG-4*/
	if (AddSystemInfo) {
		sprintf(sdpLine, "a=mpeg4-esid:%d", gf_isom_get_track_id(tkHint->file, tkHint->TrackNum));
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*control for QTSS/DSS*/
	sprintf(sdpLine, "a=control:trackID=%d", gf_isom_get_track_id(tkHint->file, tkHint->HintTrack));
	gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);

	/*H263 extensions*/
	if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_H263) {
		sprintf(sdpLine, "a=cliprect:0,0,%d,%d", Height, Width);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*AMR*/
	else if ((tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_AMR) || (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_AMR_WB)) {
		sprintf(sdpLine, "a=fmtp:%d octet-align=1", tkHint->rtp_p->PayloadType);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*Text*/
	else if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) {
		gf_media_format_ttxt_sdp(tkHint->rtp_p, payloadName, sdpLine, tkHint->file, tkHint->TrackNum);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*EVRC/SMV in non header-free mode*/
	else if ((tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (tkHint->rtp_p->auh_size>1)) {
		sprintf(sdpLine, "a=fmtp:%d maxptime=%d", tkHint->rtp_p->PayloadType, tkHint->rtp_p->auh_size*20);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*H264/AVC*/
	else if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_H264_AVC) {
		GF_AVCConfig *avcc = gf_isom_avc_config_get(tkHint->file, tkHint->TrackNum, 1);
		sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", tkHint->rtp_p->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication);
		if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) {
			u32 i, count, b64s;
			char b64[200];
			strcat(sdpLine, "; sprop-parameter-sets=");
			count = gf_list_count(avcc->sequenceParameterSets);
			for (i=0; i<count; i++) {
				GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i);
				b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
				b64[b64s]=0;
				strcat(sdpLine, b64);
				if (i+1<count) strcat(sdpLine, ",");
			}
			if (i) strcat(sdpLine, ",");
			count = gf_list_count(avcc->pictureParameterSets);
			for (i=0; i<count; i++) {
				GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i);
				b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
				b64[b64s]=0;
				strcat(sdpLine, b64);
				if (i+1<count) strcat(sdpLine, ",");
			}
		}
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
		gf_odf_avc_cfg_del(avcc);
	}
	/*MPEG-4 decoder config*/
	else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_MPEG4) {
		esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1);

		if (esd && esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
			gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
		} else {
			gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, NULL, 0);
		}
		if (esd) gf_odf_desc_del((GF_Descriptor *)esd);

		if (tkHint->rtp_p->slMap.IV_length) {
			const char *kms;
			gf_isom_get_ismacryp_info(tkHint->file, tkHint->TrackNum, 1, NULL, NULL, NULL, NULL, &kms, NULL, NULL, NULL);
			if (!strnicmp(kms, "(key)", 5) || !strnicmp(kms, "(ipmp)", 6) || !strnicmp(kms, "(uri)", 5)) {
				strcat(sdpLine, "; ISMACrypKey=");
			} else {
				strcat(sdpLine, "; ISMACrypKey=(uri)");
			}
			strcat(sdpLine, kms);
		}

		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*MPEG-4 Audio LATM*/
	else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_LATM) { 
		GF_BitStream *bs; 
		char *config_bytes; 
		u32 config_size; 
 
		/* form config string */ 
		bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); 
		gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ 
		gf_bs_write_int(bs, 1, 1); /* all streams same time */ 
		gf_bs_write_int(bs, 0, 6); /* numSubFrames */ 
		gf_bs_write_int(bs, 0, 4); /* numPrograms */ 
		gf_bs_write_int(bs, 0, 3); /* numLayer */ 
 
		/* audio-specific config */ 
		esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1); 
		if (esd && esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo) { 
			/*PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/
			gf_bs_write_data(bs, esd->decoderConfig->decoderSpecificInfo->data, MIN(esd->decoderConfig->decoderSpecificInfo->dataLength, 2) ); 
		} 
		if (esd) gf_odf_desc_del((GF_Descriptor *)esd); 
 
		/* other data */ 
		gf_bs_write_int(bs, 0, 3); /* frameLengthType */ 
		gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ 
		gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ 
		gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ 
		gf_bs_get_content(bs, &config_bytes, &config_size); 
		gf_bs_del(bs); 
 
		gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, config_bytes, config_size); 
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); 
		gf_free(config_bytes); 
	}
	/*3GPP DIMS*/
	else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) { 
		GF_DIMSDescription dims;
		char fmt[200];
		gf_isom_get_visual_info(tkHint->file, tkHint->TrackNum, 1, &Width, &Height);

		gf_isom_get_dims_description(tkHint->file, tkHint->TrackNum, 1, &dims);
		sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", tkHint->rtp_p->PayloadType, dims.profile);
		if (! dims.fullRequestHost) {
			strcat(sdpLine, ";useFullRequestHost=0");
			sprintf(fmt, ";pathComponents=%d", dims.pathComponents);
			strcat(sdpLine, fmt);
		}
		if (!dims.streamType) strcat(sdpLine, ";stream-type=secondary");
		if (dims.containsRedundant == 1) strcat(sdpLine, ";contains-redundant=main");
		else if (dims.containsRedundant == 2) strcat(sdpLine, ";contains-redundant=redundant");

		if (dims.textEncoding && strlen(dims.textEncoding)) {
			strcat(sdpLine, ";text-encoding=");
			strcat(sdpLine, dims.textEncoding);
		}
		if (dims.contentEncoding && strlen(dims.contentEncoding)) {
			strcat(sdpLine, ";content-coding=");
			strcat(sdpLine, dims.contentEncoding);
		}
		if (dims.content_script_types && strlen(dims.content_script_types) ) {
			strcat(sdpLine, ";content-script-types=");
			strcat(sdpLine, dims.contentEncoding);
		}
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*extensions for some mobile phones*/
	if (Width && Height) {
		sprintf(sdpLine, "a=framesize:%d %d-%d", tkHint->rtp_p->PayloadType, Width, Height);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}

	esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1);
	if (esd && esd->decoderConfig && (esd->decoderConfig->rvc_config || esd->decoderConfig->predefined_rvc_config)) {
		if (esd->decoderConfig->predefined_rvc_config) {
			sprintf(sdpLine, "a=rvc-config-predef:%d", esd->decoderConfig->predefined_rvc_config);
		} else {
			/*temporary ...*/
			if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) {
				sprintf(sdpLine, "a=rvc-config:%s", "http://download.tsi.telecom-paristech.fr/gpac/RVC/rvc_config_avc.xml");
			} else {
				sprintf(sdpLine, "a=rvc-config:%s", "http://download.tsi.telecom-paristech.fr/gpac/RVC/rvc_config_sp.xml");
			}
		}
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	if (esd) gf_odf_desc_del((GF_Descriptor *)esd);

	gf_isom_set_track_enabled(tkHint->file, tkHint->HintTrack, 1);
	return GF_OK;
}
Пример #15
0
GF_Err MergeTrack(GF_TrackBox *trak, GF_TrackFragmentBox *traf, u64 moof_offset, Bool is_first_merge)
{
    u32 i, j, chunk_size;
    u64 base_offset, data_offset;
    u32 def_duration, DescIndex, def_size, def_flags;
    u32 duration, size, flags, cts_offset, prev_trun_data_offset;
    u8 pad, sync;
    u16 degr;
    GF_TrackFragmentRunBox *trun;
    GF_TrunEntry *ent;

    void stbl_AppendTime(GF_SampleTableBox *stbl, u32 duration);
    void stbl_AppendSize(GF_SampleTableBox *stbl, u32 size);
    void stbl_AppendChunk(GF_SampleTableBox *stbl, u64 offset);
    void stbl_AppendSampleToChunk(GF_SampleTableBox *stbl, u32 DescIndex, u32 samplesInChunk);
    void stbl_AppendCTSOffset(GF_SampleTableBox *stbl, u32 CTSOffset);
    void stbl_AppendRAP(GF_SampleTableBox *stbl, u8 isRap);
    void stbl_AppendPadding(GF_SampleTableBox *stbl, u8 padding);
    void stbl_AppendDegradation(GF_SampleTableBox *stbl, u16 DegradationPriority);

    if (trak->Header->trackID != traf->tfhd->trackID) return GF_OK;

    //setup all our defaults
    DescIndex = (traf->tfhd->flags & GF_ISOM_TRAF_SAMPLE_DESC) ? traf->tfhd->sample_desc_index : traf->trex->def_sample_desc_index;
    def_duration = (traf->tfhd->flags & GF_ISOM_TRAF_SAMPLE_DUR) ? traf->tfhd->def_sample_duration : traf->trex->def_sample_duration;
    def_size = (traf->tfhd->flags & GF_ISOM_TRAF_SAMPLE_SIZE) ? traf->tfhd->def_sample_size : traf->trex->def_sample_size;
    def_flags = (traf->tfhd->flags & GF_ISOM_TRAF_SAMPLE_FLAGS) ? traf->tfhd->def_sample_flags : traf->trex->def_sample_flags;

    //locate base offset
    base_offset = (traf->tfhd->flags & GF_ISOM_TRAF_BASE_OFFSET) ? traf->tfhd->base_data_offset : moof_offset;

    chunk_size = 0;
    prev_trun_data_offset = 0;

    /*in playback mode*/
    if (traf->tfdt && is_first_merge) {
#ifndef GPAC_DISABLE_LOG
        if (trak->sample_count_at_seg_start && (trak->dts_at_seg_start != traf->tfdt->baseMediaDecodeTime)) {
            s32 drift = (s32) ((s64)trak->dts_at_seg_start - (s64) traf->tfdt->baseMediaDecodeTime);
            if (drift<0) drift = -drift;
            if (drift > 1) {
                GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[iso file] Error: TFDT timing "LLD" different from track cumulated timing "LLD" - using tfdt\n", traf->tfdt->baseMediaDecodeTime, trak->dts_at_seg_start ));
            }
        }
#endif
        trak->dts_at_seg_start = traf->tfdt->baseMediaDecodeTime;
    }

    i=0;
    while ((trun = (GF_TrackFragmentRunBox *)gf_list_enum(traf->TrackRuns, &i))) {
        //merge the run
        for (j=0; j<trun->sample_count; j++) {
            ent = (GF_TrunEntry*)gf_list_get(trun->entries, j);
            size = def_size;
            duration = def_duration;
            flags = def_flags;

            if (ent) {
                if (trun->flags & GF_ISOM_TRUN_DURATION) duration = ent->Duration;
                if (trun->flags & GF_ISOM_TRUN_SIZE) size = ent->size;
                if (trun->flags & GF_ISOM_TRUN_FLAGS) {
                    flags = ent->flags;
                } else if (!j && (trun->flags & GF_ISOM_TRUN_FIRST_FLAG)) {
                    flags = trun->first_sample_flags;
                }
            }
            //add size first
            stbl_AppendSize(trak->Media->information->sampleTable, size);
            //then TS
            stbl_AppendTime(trak->Media->information->sampleTable, duration);
            //add chunk on first sample
            if (!j) {
                data_offset = base_offset;
                //aggregated offset if base-data-offset-present is not set AND if default-base-is-moof is not set
                if (!(traf->tfhd->flags & GF_ISOM_TRAF_BASE_OFFSET) && !(traf->tfhd->flags & GF_ISOM_MOOF_BASE_OFFSET) )
                    data_offset += chunk_size;

                if (trun->flags & GF_ISOM_TRUN_DATA_OFFSET) {
                    data_offset += trun->data_offset;
                    /*reset chunk size since data is now relative to this trun*/
                    chunk_size = 0;
                    /*remember this data offset for following trun*/
                    prev_trun_data_offset = trun->data_offset;
                } else {
                    /*data offset is previous chunk size plus previous offset of the trun*/
                    data_offset += prev_trun_data_offset;
                }
                stbl_AppendChunk(trak->Media->information->sampleTable, data_offset);
                //then sampleToChunk
                stbl_AppendSampleToChunk(trak->Media->information->sampleTable,
                                         DescIndex, trun->sample_count);
            }
            chunk_size += size;


            //CTS
            cts_offset = (trun->flags & GF_ISOM_TRUN_CTS_OFFSET) ? ent->CTS_Offset : 0;
            stbl_AppendCTSOffset(trak->Media->information->sampleTable, cts_offset);

            //flags
            sync = GF_ISOM_GET_FRAG_SYNC(flags);
            stbl_AppendRAP(trak->Media->information->sampleTable, sync);
            pad = GF_ISOM_GET_FRAG_PAD(flags);
            if (pad) stbl_AppendPadding(trak->Media->information->sampleTable, pad);
            degr = GF_ISOM_GET_FRAG_DEG(flags);
            if (degr) stbl_AppendDegradation(trak->Media->information->sampleTable, degr);
        }
    }
    /*merge sample groups*/
    if (traf->sampleGroups) {
        GF_List *groups;
        GF_List *groupDescs;
        if (!trak->Media->information->sampleTable->sampleGroups)
            trak->Media->information->sampleTable->sampleGroups = gf_list_new();

        if (!trak->Media->information->sampleTable->sampleGroupsDescription)
            trak->Media->information->sampleTable->sampleGroupsDescription = gf_list_new();

        groupDescs = trak->Media->information->sampleTable->sampleGroupsDescription;
        for (i=0; i<gf_list_count(traf->sampleGroupsDescription); i++) {
            GF_SampleGroupDescriptionBox *new_sgdesc = NULL;
            GF_SampleGroupDescriptionBox *sgdesc = gf_list_get(traf->sampleGroupsDescription, i);
            for (j=0; j<gf_list_count(groupDescs); j++) {
                new_sgdesc = gf_list_get(groupDescs, j);
                if (new_sgdesc->grouping_type==sgdesc->grouping_type) break;
                new_sgdesc = NULL;
            }
            /*new description, move it to our sample table*/
            if (!new_sgdesc) {
                gf_list_add(groupDescs, sgdesc);
                gf_list_rem(traf->sampleGroupsDescription, i);
                i--;
            }
            /*merge descriptions*/
            else {
                u32 idx = gf_list_count(new_sgdesc->group_descriptions);
                for (j=idx; j<gf_list_count(sgdesc->group_descriptions); j++) {
                    void *ptr = gf_list_get(sgdesc->group_descriptions, j);
                    if (ptr) {
                        gf_list_add(new_sgdesc->group_descriptions, ptr);
                        gf_list_rem(sgdesc->group_descriptions, j);
                        j--;
                    }
                }
            }
        }

        groups = trak->Media->information->sampleTable->sampleGroups;
        for (i=0; i<gf_list_count(traf->sampleGroups); i++) {
            GF_SampleGroupBox *stbl_group = NULL;
            GF_SampleGroupBox *frag_group = gf_list_get(traf->sampleGroups, i);


            for (j=0; j<gf_list_count(groups); j++) {
                stbl_group = gf_list_get(groups, j);
                if ((frag_group->grouping_type==stbl_group->grouping_type) && (frag_group->grouping_type_parameter==stbl_group->grouping_type_parameter))
                    break;
                stbl_group = NULL;
            }
            if (!stbl_group) {
                stbl_group = (GF_SampleGroupBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_SBGP);
                stbl_group->grouping_type = frag_group->grouping_type;
                stbl_group->grouping_type_parameter = frag_group->grouping_type_parameter;
                stbl_group->version = frag_group->version;
                gf_list_add(groups, stbl_group);
            }
            if (frag_group->entry_count && stbl_group->entry_count &&
                    (frag_group->sample_entries[0].group_description_index==stbl_group->sample_entries[stbl_group->entry_count-1].group_description_index)
               ) {
                stbl_group->sample_entries[stbl_group->entry_count - 1].sample_count += frag_group->sample_entries[0].sample_count;
                if (frag_group->entry_count>1) {
                    stbl_group->sample_entries = gf_realloc(stbl_group->sample_entries, sizeof(GF_SampleGroupEntry) * (stbl_group->entry_count + frag_group->entry_count - 1));
                    memcpy(&stbl_group->sample_entries[stbl_group->entry_count], &frag_group->sample_entries[1], sizeof(GF_SampleGroupEntry) * (frag_group->entry_count - 1));
                    stbl_group->entry_count += frag_group->entry_count - 1;
                }
            } else {
                stbl_group->sample_entries = gf_realloc(stbl_group->sample_entries, sizeof(GF_SampleGroupEntry) * (stbl_group->entry_count + frag_group->entry_count));
                memcpy(&stbl_group->sample_entries[stbl_group->entry_count], &frag_group->sample_entries[0], sizeof(GF_SampleGroupEntry) * frag_group->entry_count);
                stbl_group->entry_count += frag_group->entry_count;
            }
        }
    }
    return GF_OK;
}
Пример #16
0
GF_EXPORT
Bool gf_mixer_reconfig(GF_AudioMixer *am)
{
	u32 i, count, numInit, max_sample_rate, max_channels, max_bps, cfg_changed, ch_cfg;
	gf_mixer_lock(am, GF_TRUE);
	if (am->isEmpty || !am->must_reconfig) {
		gf_mixer_lock(am, GF_FALSE);
		return GF_FALSE;
	}

	if (am->ar && am->ar->config_forced) {
		am->must_reconfig = GF_FALSE;
		gf_mixer_lock(am, GF_FALSE);
		return GF_FALSE;
	}

	numInit = 0;
	max_sample_rate = am->sample_rate;
	max_channels = am->nb_channels;
	max_bps = am->bits_per_sample;
	cfg_changed = 0;
	ch_cfg = 0;

	max_sample_rate = 0,

	count = gf_list_count(am->sources);
	assert(count);
	for (i=0; i<count; i++) {
		Bool has_cfg;
		MixerInput *in = (MixerInput *) gf_list_get(am->sources, i);
		has_cfg = in->src->GetConfig(in->src, GF_TRUE);
		if (has_cfg) {
			/*check same cfg...*/
			if (in->src->samplerate * in->src->chan * in->src->bps == 8*in->bytes_per_sec) {
				numInit++;
				continue;
			}
		} else continue;
		/*update out cfg*/
		if ((count==1) && (max_sample_rate != in->src->samplerate)) {
//			cfg_changed = 1;
			max_sample_rate = in->src->samplerate;
		} else if (max_sample_rate<in->src->samplerate) {
//			cfg_changed = 1;
			max_sample_rate = in->src->samplerate;
		}
		if ((count==1) && (max_bps!=in->src->bps)) {
			cfg_changed = 1;
			max_bps = in->src->bps;
		} else if (max_bps<in->src->bps) {
			cfg_changed = 1;
			max_bps = in->src->bps;
		}
		if (!am->force_channel_out) {
			if ((count==1) && (max_channels!=in->src->chan)) {
				cfg_changed = 1;
				max_channels = in->src->chan;
				if (in->src->chan>2) ch_cfg |= in->src->ch_cfg;
			} else if (max_channels < in->src->chan) {
				cfg_changed = 1;
				max_channels = in->src->chan;
				if (in->src->chan>2) ch_cfg |= in->src->ch_cfg;
			}
		}

		numInit++;
		in->bytes_per_sec = in->src->samplerate * in->src->chan * in->src->bps / 8;
		/*cfg has changed, we must reconfig everything*/
		if (cfg_changed || (max_sample_rate != am->sample_rate) ) {
			in->has_prev = GF_FALSE;
			memset(&in->last_channels, 0, sizeof(s16)*GF_SR_MAX_CHANNELS);
		}
	}

	if (cfg_changed || (max_sample_rate && (max_sample_rate != am->sample_rate)) ) {
		if (max_channels>2) {
			if (ch_cfg != am->channel_cfg) {
				/*recompute num channel based on all input channels*/
				max_channels = 0;
				if (ch_cfg & GF_AUDIO_CH_FRONT_LEFT) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_FRONT_RIGHT) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_FRONT_CENTER) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_LFE) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_BACK_LEFT) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_BACK_RIGHT) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_BACK_CENTER) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_SIDE_LEFT) max_channels ++;
				if (ch_cfg & GF_AUDIO_CH_SIDE_RIGHT) max_channels ++;
			}
		} else {
			ch_cfg = GF_AUDIO_CH_FRONT_LEFT;
			if (max_channels==2) ch_cfg |= GF_AUDIO_CH_FRONT_RIGHT;
		}
		gf_mixer_set_config(am, max_sample_rate, max_channels, max_bps, ch_cfg);
	}

	if (numInit == count) am->must_reconfig = GF_FALSE;
	if (am->ar) cfg_changed = 1;

	gf_mixer_lock(am, GF_FALSE);
	return cfg_changed;
}
Пример #17
0
GF_EXPORT
void gf_sg_vrml_field_clone(void *dest, void *orig, u32 field_type, GF_SceneGraph *inScene)
{
	u32 size, i, sf_type;
	void *dst_field, *orig_field;

	if (!dest || !orig) return;

	switch (field_type) {
	case GF_SG_VRML_SFBOOL:
		memcpy(dest, orig, sizeof(SFBool));
		break;
	case GF_SG_VRML_SFCOLOR:
		memcpy(dest, orig, sizeof(SFColor));
		break;
	case GF_SG_VRML_SFFLOAT:
		memcpy(dest, orig, sizeof(SFFloat));
		break;
	case GF_SG_VRML_SFINT32:
		memcpy(dest, orig, sizeof(SFInt32));
		break;
	case GF_SG_VRML_SFROTATION:
		memcpy(dest, orig, sizeof(SFRotation));
		break;
	case GF_SG_VRML_SFTIME:
		memcpy(dest, orig, sizeof(SFTime));
		break;
	case GF_SG_VRML_SFVEC2F:
		memcpy(dest, orig, sizeof(SFVec2f));
		break;
	case GF_SG_VRML_SFVEC3F:
		memcpy(dest, orig, sizeof(SFVec3f));
		break;
	case GF_SG_VRML_SFATTRREF:
		memcpy(dest, orig, sizeof(SFAttrRef));
		break;
	case GF_SG_VRML_SFSTRING:
		if ( ((SFString*)dest)->buffer) gf_free(((SFString*)dest)->buffer);
		if ( ((SFString*)orig)->buffer )
			((SFString*)dest)->buffer = gf_strdup(((SFString*)orig)->buffer);
		else
			((SFString*)dest)->buffer = NULL;
		break;
	case GF_SG_VRML_SFURL:
		if ( ((SFURL *)dest)->url ) gf_free( ((SFURL *)dest)->url );
		((SFURL *)dest)->OD_ID = ((SFURL *)orig)->OD_ID;
		if (((SFURL *)orig)->url)
			((SFURL *)dest)->url = gf_strdup(((SFURL *)orig)->url);
		else
			((SFURL *)dest)->url = NULL;
		break;
	case GF_SG_VRML_SFIMAGE:
		if (((SFImage *)dest)->pixels) gf_free(((SFImage *)dest)->pixels);
		((SFImage *)dest)->width = ((SFImage *)orig)->width;
		((SFImage *)dest)->height = ((SFImage *)orig)->height;
		((SFImage *)dest)->numComponents  = ((SFImage *)orig)->numComponents;
		size = ((SFImage *)dest)->width * ((SFImage *)dest)->height * ((SFImage *)dest)->numComponents;
		((SFImage *)dest)->pixels = (u8*)gf_malloc(sizeof(char)*size);
		memcpy(((SFImage *)dest)->pixels, ((SFImage *)orig)->pixels, sizeof(char)*size);
		break;
	case GF_SG_VRML_SFCOMMANDBUFFER:
	{
		SFCommandBuffer *cb_dst = (SFCommandBuffer *)dest;
		SFCommandBuffer *cb_src = (SFCommandBuffer *)orig;

		cb_dst->bufferSize = cb_src->bufferSize;
		if (cb_dst->bufferSize && !gf_list_count(cb_src->commandList) ) {
			cb_dst->buffer = (u8*)gf_realloc(cb_dst->buffer, sizeof(char)*cb_dst->bufferSize);
			memcpy(cb_dst->buffer, cb_src->buffer, sizeof(char)*cb_src->bufferSize);
		} else {
			u32 j, c2;
			if (cb_dst->buffer) gf_free(cb_dst->buffer);
			cb_dst->buffer = NULL;
			/*clone command list*/
			c2 = gf_list_count(cb_src->commandList);
			for (j=0; j<c2;j++) {
				GF_Command *sub_com = (GF_Command *)gf_list_get(cb_src->commandList, j);
				GF_Command *new_com = gf_sg_vrml_command_clone(sub_com, inScene, 0);
				gf_list_add(cb_dst->commandList, new_com);
			}
		}
	}
		break;

	/*simply copy text string*/
	case GF_SG_VRML_SFSCRIPT:
		if (((SFScript*)dest)->script_text) gf_free(((SFScript*)dest)->script_text);
		((SFScript*)dest)->script_text = NULL;
		if ( ((SFScript*)orig)->script_text)
			((SFScript *)dest)->script_text = (char *)gf_strdup( (char*) ((SFScript*)orig)->script_text );
		break;


	//simple MFFields, do a memcpy
	case GF_SG_VRML_MFBOOL:
	case GF_SG_VRML_MFFLOAT:
	case GF_SG_VRML_MFTIME:
	case GF_SG_VRML_MFINT32:
	case GF_SG_VRML_MFVEC3F:
	case GF_SG_VRML_MFVEC2F:
	case GF_SG_VRML_MFCOLOR:
	case GF_SG_VRML_MFROTATION:
	case GF_SG_VRML_MFATTRREF:
		size = gf_sg_vrml_get_sf_size(field_type) * ((GenMFField *)orig)->count;
		if (((GenMFField *)orig)->count != ((GenMFField *)dest)->count) {
			((GenMFField *)dest)->array = gf_realloc(((GenMFField *)dest)->array, size);
			((GenMFField *)dest)->count = ((GenMFField *)orig)->count;
		}
		memcpy(((GenMFField *)dest)->array, ((GenMFField *)orig)->array, size);
		break;
	//complex MFFields
	case GF_SG_VRML_MFSTRING:
	case GF_SG_VRML_MFIMAGE:
	case GF_SG_VRML_MFURL:
	case GF_SG_VRML_MFSCRIPT:
		size = ((GenMFField *)orig)->count;
		gf_sg_vrml_mf_reset(dest, field_type);
		gf_sg_vrml_mf_alloc(dest, field_type, size);
		sf_type = gf_sg_vrml_get_sf_type(field_type);
		//duplicate all items
		for (i=0; i<size; i++) {
			gf_sg_vrml_mf_get_item(dest, field_type, &dst_field, i);
			gf_sg_vrml_mf_get_item(orig, field_type, &orig_field, i);
			gf_sg_vrml_field_copy(dst_field, orig_field, sf_type);
		}
		break;
	}
}
Пример #18
0
GF_Node *CloneNodeForEditing(GF_SceneGraph *inScene, GF_Node *orig) //, GF_Node *cloned_parent)
{
	u32 i, j, count;
	GF_Node *node, *child, *tmp;
	GF_List *list, *list2;
	GF_FieldInfo field_orig, field;

	/*this is not a mistake*/
	if (!orig) return NULL;

	/*check for DEF/USE
	if (orig->sgprivate->NodeID) {
		node = gf_sg_find_node(inScene, orig->sgprivate->NodeID);
		//node already created, USE
		if (node) {
			gf_node_register(node, cloned_parent);
			return node;
		}
	}
	*/
	/*create a node*/
/*
	if (orig->sgprivate->tag == TAG_MPEG4_ProtoNode) {
		proto_node = ((GF_ProtoInstance *)orig)->proto_interface;
		//create the instance but don't load the code -c we MUST wait for ISed routes to be cloned before
		node = gf_sg_proto_create_node(inScene, proto_node, (GF_ProtoInstance *) orig);
	} else {
*/
	node = gf_node_new(inScene, gf_node_get_tag(orig));
//	}

	count = gf_node_get_field_count(orig);

	/*copy each field*/
	for (i=0; i<count; i++) {
		gf_node_get_field(orig, i, &field_orig);

		/*get target ptr*/
		gf_node_get_field(node, i, &field);

		assert(field.eventType==field_orig.eventType);
		assert(field.fieldType==field_orig.fieldType);

		/*duplicate it*/
		switch (field.fieldType) {
		case GF_SG_VRML_SFNODE:
			child = CloneNodeForEditing(inScene, (GF_Node *) (* ((GF_Node **) field_orig.far_ptr)));//, node);
			*((GF_Node **) field.far_ptr) = child;
			break;
		case GF_SG_VRML_MFNODE:
			list = *( (GF_List **) field_orig.far_ptr);
			list2 = *( (GF_List **) field.far_ptr);

			for (j=0; j<gf_list_count(list); j++) {
				tmp = (GF_Node *)gf_list_get(list, j);
				child = CloneNodeForEditing(inScene, tmp);//, node);
				gf_list_add(list2, child);
			}
			break;
		default:
			gf_sg_vrml_field_copy(field.far_ptr, field_orig.far_ptr, field.fieldType);
			break;
		}
	}
	/*register node
	if (orig->sgprivate->NodeID) {
		Node_SetID(node, orig->sgprivate->NodeID);
		gf_node_register(node, cloned_parent);
	}*/

	/*init node before creating ISed routes so the eventIn handler are in place*/
	if (gf_node_get_tag(node) != TAG_ProtoNode) gf_node_init(node);

	return node;
}
Пример #19
0
GF_EXPORT
u32 gf_modules_get_count(GF_ModuleManager *pm)
{
	if (!pm) return 0;
	return gf_list_count(pm->plug_list);
}
Пример #20
0
static void term_on_media_add(void *user_priv, GF_ClientService *service, GF_Descriptor *media_desc, Bool no_scene_check)
{
	u32 i, min_od_id;
	GF_MediaObject *the_mo;
	GF_Scene *scene;
	GF_ObjectManager *odm, *root;
	GF_ObjectDescriptor *od;
	GET_TERM();

	root = service->owner;
	if (!root) {
	  GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Service %s] has not root, aborting !\n", service->url));
	  return;
	}
	if (root->flags & GF_ODM_DESTROYED) {
	  GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Service %s] root has been scheduled for destruction - aborting !\n", service->url));
	  return;
	}
	scene = root->subscene ? root->subscene : root->parentscene;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Service %s] %s\n", service->url, media_desc ? "Adding new media object" : "Regenerating scene graph"));
	if (!media_desc) {
		if (!no_scene_check) gf_scene_regenerate(scene);
		return;
	}

	switch (media_desc->tag) {
	case GF_ODF_OD_TAG:
	case GF_ODF_IOD_TAG:
		if (root && (root->net_service == service)) {
			od = (GF_ObjectDescriptor *) media_desc;
			break;
		}
	default:
		gf_odf_desc_del(media_desc);
		return;
	}

	gf_term_lock_net(term, 1);
	/*object declared this way are not part of an OD stream and are considered as dynamic*/
/*	od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID; */

	/*check if we have a mediaObject in the scene not attached and matching this object*/
	the_mo = NULL;
	odm = NULL;
	min_od_id = 0;
	for (i=0; i<gf_list_count(scene->scene_objects); i++) {
		char *frag, *ext;
		GF_ESD *esd;
		char *url;
		u32 match_esid = 0;
		GF_MediaObject *mo = gf_list_get(scene->scene_objects, i);

		if ((mo->OD_ID != GF_MEDIA_EXTERNAL_ID) && (min_od_id<mo->OD_ID))
			min_od_id = mo->OD_ID;

		if (!mo->odm) continue;
		/*if object is attached to a service, don't bother looking in a different one*/
		if (mo->odm->net_service && (mo->odm->net_service != service)) continue;

		/*already assigned object - this may happen since the compositor has no control on when objects are declared by the service,
		therefore opening file#video and file#audio may result in the objects being declared twice if the service doesn't
		keep track of declared objects*/
		if (mo->odm->OD) {
			if (od->objectDescriptorID && is_same_od(mo->odm->OD, od)) {
				/*reassign OD ID*/
				mo->OD_ID = od->objectDescriptorID;
				gf_odf_desc_del(media_desc);
				gf_term_lock_net(term, 0);
				return;
			}
			continue;
		}
		if (mo->OD_ID != GF_MEDIA_EXTERNAL_ID) {
			if (mo->OD_ID == od->objectDescriptorID) {
				the_mo = mo;
				odm = mo->odm;
				break;
			}
			continue;
		}
		if (!mo->URLs.count || !mo->URLs.vals[0].url) continue;

		frag = NULL;
		ext = strrchr(mo->URLs.vals[0].url, '#');
		if (ext) {
			frag = strchr(ext, '=');
			ext[0] = 0;
		}
		url = mo->URLs.vals[0].url;
		if (!strnicmp(url, "file://localhost", 16)) url += 16;
		else if (!strnicmp(url, "file://", 7)) url += 7;
		else if (!strnicmp(url, "gpac://", 7)) url += 7;
		else if (!strnicmp(url, "pid://", 6)) match_esid = atoi(url+6);

		if (!match_esid && !strstr(service->url, url)) {
			if (ext) ext[0] = '#';
			continue;
		}
		if (ext) ext[0] = '#';

		esd = gf_list_get(od->ESDescriptors, 0);
		if (match_esid && (esd->ESID != match_esid))
			continue;
		/*match type*/
		switch (esd->decoderConfig->streamType) {
		case GF_STREAM_VISUAL:
			if (mo->type != GF_MEDIA_OBJECT_VIDEO) continue;
			break;
		case GF_STREAM_AUDIO:
			if (mo->type != GF_MEDIA_OBJECT_AUDIO) continue;
			break;
		case GF_STREAM_PRIVATE_MEDIA:
			if ((mo->type != GF_MEDIA_OBJECT_AUDIO) && (mo->type != GF_MEDIA_OBJECT_VIDEO)) continue;
			break;
		case GF_STREAM_SCENE:
			if (mo->type != GF_MEDIA_OBJECT_UPDATES) continue;
			break;
		default:
			continue;
		}
		if (frag) {
			u32 frag_id = 0;
			u32 ID = od->objectDescriptorID;
			if (ID==GF_MEDIA_EXTERNAL_ID) ID = esd->ESID;
			frag++;
			frag_id = atoi(frag);
			if (ID!=frag_id) continue;
		}
		the_mo = mo;
		odm = mo->odm;
		break;
	}

	/*add a pass on scene->resource to check for min_od_id,
	otherwise we may have another modules declaring an object with ID 0 from
	another thread, which will assert (only one object with a givne OD ID)*/
	for (i=0; i<gf_list_count(scene->resources); i++) {
		GF_ObjectManager *an_odm = gf_list_get(scene->resources, i);

		if (an_odm->OD && (an_odm->OD->objectDescriptorID != GF_MEDIA_EXTERNAL_ID) && (min_od_id < an_odm->OD->objectDescriptorID))
			min_od_id = an_odm->OD->objectDescriptorID;
	}

	if (!odm) {
		odm = gf_odm_new();
		odm->term = term;
		odm->parentscene = scene;
		gf_list_add(scene->resources, odm);
	}
	odm->OD = od;
	odm->mo = the_mo;
	odm->flags |= GF_ODM_NOT_IN_OD_STREAM;
	if (!od->objectDescriptorID) {
		od->objectDescriptorID = min_od_id + 1;
	}

	if (the_mo) the_mo->OD_ID = od->objectDescriptorID;
	if (!scene->selected_service_id)
		scene->selected_service_id = od->ServiceID;


	/*net is unlocked before seting up the object as this might trigger events going into JS and deadlocks
	with the compositor*/
	gf_term_lock_net(term, 0);

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] setup object - MO %08x\n", odm->OD->objectDescriptorID, odm->mo));
	gf_odm_setup_object(odm, service);

	/*OD inserted by service: resetup scene*/
	if (!no_scene_check && scene->is_dynamic_scene) gf_scene_regenerate(scene);
}
Пример #21
0
GF_EXPORT
GF_Err gf_sm_aggregate(GF_SceneManager *ctx, u16 ESID)
{
    GF_Err e;
    u32 i, stream_count;
#ifndef GPAC_DISABLE_VRML
    u32 j;
    GF_AUContext *au;
    GF_Command *com;
#endif

    e = GF_OK;

#if DEBUG_RAP
    com_count = 0;
    stream_count = gf_list_count(ctx->streams);
    for (i=0; i<stream_count; i++) {
        GF_StreamContext *sc = (GF_StreamContext *)gf_list_get(ctx->streams, i);
        if (sc->streamType == GF_STREAM_SCENE) {
            au_count = gf_list_count(sc->AUs);
            for (j=0; j<au_count; j++) {
                au = (GF_AUContext *)gf_list_get(sc->AUs, j);
                com_count += gf_list_count(au->commands);
            }
        }
    }
    GF_LOG(GF_LOG_INFO, GF_LOG_SCENE, ("[SceneManager] Making RAP with %d commands\n", com_count));
#endif

    stream_count = gf_list_count(ctx->streams);
    for (i=0; i<stream_count; i++) {
        GF_AUContext *carousel_au;
        GF_List *carousel_commands;
        GF_StreamContext *aggregate_on_stream;
        GF_StreamContext *sc = (GF_StreamContext *)gf_list_get(ctx->streams, i);
        if (ESID && (sc->ESID!=ESID)) continue;

        /*locate the AU in which our commands will be aggregated*/
        carousel_au = NULL;
        carousel_commands = NULL;
        aggregate_on_stream = sc->aggregate_on_esid ? gf_sm_get_stream(ctx, sc->aggregate_on_esid) : NULL;
        if (aggregate_on_stream==sc) {
            carousel_commands = gf_list_new();
        } else if (aggregate_on_stream) {
            if (!gf_list_count(aggregate_on_stream->AUs)) {
                carousel_au = gf_sm_stream_au_new(aggregate_on_stream, 0, 0, 1);
            } else {
                /* assert we already performed aggregation */
                assert(gf_list_count(aggregate_on_stream->AUs)==1);
                carousel_au = gf_list_get(aggregate_on_stream->AUs, 0);
            }
            carousel_commands = carousel_au->commands;
        }
        /*TODO - do this as well for ODs*/
#ifndef GPAC_DISABLE_VRML
        if (sc->streamType == GF_STREAM_SCENE) {
            Bool has_modif = 0;
            /*we check for each stream if it is a base stream (SceneReplace ...) - several streams may carry RAPs if inline nodes are used*/
            Bool base_stream_found = 0;

            /*in DIMS we use an empty initial AU with no commands to signal the RAP*/
            if (sc->objectType == GPAC_OTI_SCENE_DIMS) base_stream_found = 1;

            /*apply all commands - this will also apply the SceneReplace*/
            while (gf_list_count(sc->AUs)) {
                u32 count;
                au = (GF_AUContext *) gf_list_get(sc->AUs, 0);
                gf_list_rem(sc->AUs, 0);

                /*AU not aggregated*/
                if (au->flags & GF_SM_AU_NOT_AGGREGATED) {
                    gf_sm_au_del(sc, au);
                    continue;
                }

                count = gf_list_count(au->commands);

                for (j=0; j<count; j++) {
                    u32 store=0;
                    com = gf_list_get(au->commands, j);
                    if (!base_stream_found) {
                        switch (com->tag) {
                        case GF_SG_SCENE_REPLACE:
                        case GF_SG_LSR_NEW_SCENE:
                        case GF_SG_LSR_REFRESH_SCENE:
                            base_stream_found = 1;
                            break;
                        }
                    }

                    /*aggregate the command*/

                    /*if stream doesn't carry a carousel or carries the base carousel (scene replace), always apply the command*/
                    if (base_stream_found || !sc->aggregate_on_esid) {
                        store = 0;
                    }
                    /*otherwise, check wether the command should be kept in this stream as is, or can be aggregated on this stream*/
                    else {
                        switch (com->tag) {
                        /*the following commands do not impact a sub-tree (eg do not deal with nodes), we cannot
                        aggregate them... */
                        case GF_SG_ROUTE_REPLACE:
                        case GF_SG_ROUTE_DELETE:
                        case GF_SG_ROUTE_INSERT:
                        case GF_SG_PROTO_INSERT:
                        case GF_SG_PROTO_DELETE:
                        case GF_SG_PROTO_DELETE_ALL:
                        case GF_SG_GLOBAL_QUANTIZER:
                        case GF_SG_LSR_RESTORE:
                        case GF_SG_LSR_SAVE:
                        case GF_SG_LSR_SEND_EVENT:
                        case GF_SG_LSR_CLEAN:
                            /*todo check in which category to put these commands*/
//						case GF_SG_LSR_ACTIVATE:
//						case GF_SG_LSR_DEACTIVATE:
                            store = 1;
                            break;
                        /*other commands:
                        	!!! we need to know if the target node of the command has been inserted in this stream !!!

                        This is a tedious task, for now we will consider the following cases:
                        	- locate a similar command in the stored list: remove the similar one and aggregate on stream
                        	- by default all AUs are stored if the stream is in aggregate mode - we should fix that by checking insertion points:
                        	 if a command apllies on a node that has been inserted in this stream, we can aggregate, otherwise store
                        */
                        default:
                            /*check if we can directly store the command*/
                            assert(carousel_commands);
                            store = store_or_aggregate(sc, com, carousel_commands, &has_modif);
                            break;
                        }
                    }

                    switch (store) {
                    /*command has been merged with a previous command in carousel and needs to be destroyed*/
                    case 2:
                        gf_list_rem(au->commands, j);
                        j--;
                        count--;
                        gf_sg_command_del((GF_Command *)com);
                        break;
                    /*command shall be moved to carousel without being applied*/
                    case 1:
                        gf_list_insert(carousel_commands, com, 0);
                        gf_list_rem(au->commands, j);
                        j--;
                        count--;
                        break;
                    /*command can be applied*/
                    default:
                        e = gf_sg_command_apply(ctx->scene_graph, com, 0);
                        break;
                    }
                }
                gf_sm_au_del(sc, au);
            }

            /*and recreate scene replace*/
            if (base_stream_found) {
                au = gf_sm_stream_au_new(sc, 0, 0, 1);

                switch (sc->objectType) {
                case GPAC_OTI_SCENE_BIFS:
                case GPAC_OTI_SCENE_BIFS_V2:
                    com = gf_sg_command_new(ctx->scene_graph, GF_SG_SCENE_REPLACE);
                    break;
                case GPAC_OTI_SCENE_LASER:
                    com = gf_sg_command_new(ctx->scene_graph, GF_SG_LSR_NEW_SCENE);
                    break;
                case GPAC_OTI_SCENE_DIMS:
                /* We do not create a new command, empty AU is enough in DIMS*/
                default:
                    com = NULL;
                    break;
                }

                if (com) {
                    com->node = ctx->scene_graph->RootNode;
                    ctx->scene_graph->RootNode = NULL;
                    gf_list_del(com->new_proto_list);
                    com->new_proto_list = ctx->scene_graph->protos;
                    ctx->scene_graph->protos = NULL;
                    /*indicate the command is the aggregated scene graph, so that PROTOs and ROUTEs
                    are taken from the scenegraph when encoding*/
                    com->aggregated = 1;
                    gf_list_add(au->commands, com);
                }
            }
            /*update carousel flags of the AU*/
            else if (carousel_commands) {
                /*if current stream caries its own carousel*/
                if (!carousel_au) {
                    carousel_au = gf_sm_stream_au_new(sc, 0, 0, 1);
                    gf_list_del(carousel_au->commands);
                    carousel_au->commands = carousel_commands;
                }
                carousel_au->flags |= GF_SM_AU_RAP | GF_SM_AU_CAROUSEL;
                if (has_modif) carousel_au->flags |= GF_SM_AU_MODIFIED;
            }
        }
#endif
    }
    return e;
}
Пример #22
0
static void term_on_command(void *user_priv, GF_ClientService *service, GF_NetworkCommand *com, GF_Err response)
{
	GF_Channel *ch;
	GET_TERM();

	if (com->command_type==GF_NET_BUFFER_QUERY) {
		GF_List *od_list;
		u32 i;
		GF_ObjectManager *odm;
		com->buffer.max = 0;
		com->buffer.min = com->buffer.occupancy = (u32) -1;
		if (!service->owner) {
			com->buffer.occupancy = 0;
			return;
		}

		/*browse all channels in the scene, running on this service, and get buffer info*/
		od_list = NULL;
		if (service->owner->subscene) {
			od_list = service->owner->subscene->resources;
		} else if (service->owner->parentscene) {
			od_list = service->owner->parentscene->resources;
		}
		if (!od_list) {
			com->buffer.occupancy = 0;
			return;
		}
		/*get exclusive access to media scheduler, to make sure ODs are not being
		manipulated*/
		gf_mx_p(term->mm_mx);
		if (!gf_list_count(od_list))
			GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("[ODM] No object manager found for the scene (URL: %s), buffer occupancy will remain unchanged\n", service->url));
		i=0;
		while ((odm = (GF_ObjectManager*)gf_list_enum(od_list, &i))) {
			u32 j, count;
			if (!odm->codec) continue;
			count = gf_list_count(odm->channels);
			for (j=0; j<count; j++) {
				GF_Channel *ch = (GF_Channel *)gf_list_get(odm->channels, j);
				if (ch->service != service) continue;
				if (ch->es_state != GF_ESM_ES_RUNNING) continue;
				if (/*!ch->MaxBuffer || */ch->dispatch_after_db || ch->bypass_sl_and_db || ch->IsEndOfStream) continue;
				//perform buffer management only on base layer  -this is because we don't signal which ESs are on/off in the underlying service ...
				if (ch->esd->dependsOnESID) continue;
				if (ch->MaxBuffer>com->buffer.max) com->buffer.max = ch->MaxBuffer;
				if (ch->MinBuffer<com->buffer.min) com->buffer.min = ch->MinBuffer;
				if (ch->IsClockInit && (u32) ch->BufferTime  < com->buffer.occupancy) {
					/*if we don't have more units (compressed or not) than requested max for the composition memory, request more data*/
					if (odm->codec->CB->UnitCount + ch->AU_Count <= odm->codec->CB->Capacity) {
//						com->buffer.occupancy = 0;
						com->buffer.occupancy = ch->BufferTime;
					} else {
						com->buffer.occupancy = ch->BufferTime;
					}
				}
			}
		}
		gf_mx_v(term->mm_mx);
//		fprintf(stderr, "Buffer occupancy %d\n", com->buffer.occupancy);
		if (com->buffer.occupancy==(u32) -1) com->buffer.occupancy = 0;
		return;
	}
	if (com->command_type==GF_NET_SERVICE_INFO) {
		GF_Event evt;
		evt.type = GF_EVENT_METADATA;
		gf_term_send_event(term, &evt);
		return;
	}


	if (!com->base.on_channel) return;


	ch = gf_term_get_channel(service, com->base.on_channel);
	if (!ch) return;

	switch (com->command_type) {
	/*SL reconfiguration*/
	case GF_NET_CHAN_RECONFIG:
		gf_term_lock_net(term, 1);
		gf_es_reconfig_sl(ch, &com->cfg.sl_config, com->cfg.use_m2ts_sections);
		gf_term_lock_net(term, 0);
		return;
	/*time mapping (TS to media-time)*/
	case GF_NET_CHAN_MAP_TIME:

		if (ch->esd->dependsOnESID) {
			//ignore everything
		} else {
			u32 i;
			GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: before mapping: seed TS %d - TS offset %d\n", ch->esd->ESID, ch->seed_ts, ch->ts_offset));
			ch->seed_ts = com->map_time.timestamp;
			ch->ts_offset = (u32) (com->map_time.media_time*1000);
			GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: mapping TS "LLD" to media time %f - current time %d\n", ch->esd->ESID, com->map_time.timestamp, com->map_time.media_time, gf_clock_time(ch->clock)));
			GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: after mapping: seed TS %d - TS offset %d\n", ch->esd->ESID, ch->seed_ts, ch->ts_offset));

			if (com->map_time.reset_buffers) {
				gf_es_reset_buffers(ch);
			}
			/*if we were reassembling an AU, do not perform clock init check when dispatching it since we computed its timestamps
			according to the previous clock origin*/
			else {
				gf_mx_p(ch->mx);
				ch->skip_time_check_for_pending = 1;
				gf_mx_v(ch->mx);
			}
			/*if the channel is the clock, force a re-init*/
			if (gf_es_owns_clock(ch)) {
				ch->IsClockInit = 0;
				gf_clock_reset(ch->clock);
			}
			else if (ch->odm->flags & GF_ODM_INHERIT_TIMELINE) {
				ch->IsClockInit = 0;
//				ch->ts_offset -= ch->seed_ts*1000/ch->ts_res;
			}

			for (i=0; i<gf_list_count(ch->odm->channels); i++) {
				GF_Channel *a_ch = gf_list_get(ch->odm->channels, i);
				if (ch==a_ch) continue;
				if (! a_ch->esd->dependsOnESID) continue;
				a_ch->seed_ts = ch->seed_ts;
				a_ch->IsClockInit = 0;
				a_ch->ts_offset = ch->ts_offset;
			}
		}
		break;
	/*duration changed*/
	case GF_NET_CHAN_DURATION:
		gf_odm_set_duration(ch->odm, ch, (u32) (1000*com->duration.duration));
		break;
	case GF_NET_CHAN_BUFFER_QUERY:
		if (ch->IsEndOfStream) {
			com->buffer.max = com->buffer.min = com->buffer.occupancy = 0;
		} else {
			com->buffer.max = ch->MaxBuffer;
			com->buffer.min = ch->MinBuffer;
			com->buffer.occupancy = ch->BufferTime;
		}
		break;
	case GF_NET_CHAN_DRM_CFG:
		gf_term_lock_net(term, 1);
		gf_es_config_drm(ch, &com->drm_cfg);
		gf_term_lock_net(term, 0);
		return;
	case GF_NET_CHAN_GET_ESD:
		gf_term_lock_net(term, 1);
		com->cache_esd.esd = ch->esd;
		com->cache_esd.is_iod_stream = (ch->odm->subscene /*&& (ch->odm->subscene->root_od==ch->odm)*/) ? 1 : 0;
		gf_term_lock_net(term, 0);
		return;
	default:
		return;
	}
}
Пример #23
0
void visual_2d_setup_projection(GF_VisualManager *visual, GF_TraverseState *tr_state)
{
	GF_Rect rc;

	tr_state->visual = visual;
#ifndef GPAC_DISABLE_VRML
	tr_state->backgrounds = visual->back_stack;
	tr_state->viewpoints = visual->view_stack;
#endif

	/*setup clipper*/
	if (visual->center_coords) {
		if (!visual->offscreen) {
			if (visual->compositor->scalable_zoom)
				rc = gf_rect_center(INT2FIX(visual->compositor->display_width), INT2FIX(visual->compositor->display_height));
			else
				rc = gf_rect_center(INT2FIX(visual->compositor->output_width + 2*visual->compositor->vp_x), INT2FIX(visual->compositor->output_height + 2*visual->compositor->vp_y));
		} else {
			rc = gf_rect_center(INT2FIX(visual->width), INT2FIX(visual->height));
		}
	} else {
		rc.x = 0;
		rc.width = INT2FIX(visual->width);
		rc.y = rc.height = INT2FIX(visual->height);
	}
	/*set top-transform to pixelMetrics*/
	if (!tr_state->pixel_metrics) gf_mx2d_add_scale(&tr_state->transform, tr_state->min_hsize, tr_state->min_hsize);

	visual->surf_rect = gf_rect_pixelize(&rc);

//	GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Visual2D] output rectangle setup - width %d height %d\n", visual->surf_rect.width, visual->surf_rect.height));

	/*setup top clipper*/
	if (visual->center_coords) {
		rc = gf_rect_center(INT2FIX(visual->width), INT2FIX(visual->height));
	} else {
		rc.width = INT2FIX(visual->width);
		rc.height = INT2FIX(visual->height);
		rc.x = 0;
		rc.y = rc.height;
		if (visual->compositor->visual==visual) {
			rc.x += INT2FIX(visual->compositor->vp_x);
			rc.y += INT2FIX(visual->compositor->vp_y);
		}
	}

	/*setup viewport*/
#ifndef GPAC_DISABLE_VRML
	if (gf_list_count(visual->view_stack)) {
		tr_state->traversing_mode = TRAVERSE_BINDABLE;
		tr_state->bounds = rc;
		gf_node_traverse((GF_Node *) gf_list_get(visual->view_stack, 0), tr_state);
	}
#endif

#ifndef GPAC_DISABLE_3D
	gf_mx_init(tr_state->model_matrix);
#endif

	visual->top_clipper = gf_rect_pixelize(&rc);
	tr_state->clipper = rc;
//	GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Visual2D] Cliper setup - %d:%d@%dx%d\n", visual->top_clipper.x, visual->top_clipper.y, visual->top_clipper.width, visual->top_clipper.height));
}
Пример #24
0
static void term_on_connect(void *user_priv, GF_ClientService *service, LPNETCHANNEL netch, GF_Err err)
{
	GF_Channel *ch;
	GF_ObjectManager *root;
	GET_TERM();

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] %s connection ACK received from %s - %s\n", netch ? "Channel" : "Service", service->url, gf_error_to_string(err) ));

	root = service->owner;
	if (root && (root->net_service != service)) {
		gf_term_message(term, service->url, "Incompatible module type", GF_SERVICE_ERROR);
		return;
	}
	/*this is service connection*/
	if (!netch) {
		gf_term_service_media_event(service->owner, GF_EVENT_MEDIA_SETUP_DONE);
		if (err) {
			char msg[5000];
			snprintf(msg, sizeof(msg)-1, "Cannot open %s", service->url);
			gf_term_message(term, service->url, msg, err);

			gf_term_service_media_event(service->owner, GF_EVENT_ERROR);

			/*destroy service only if attached*/
			if (root) {
				gf_term_lock_media_queue(term, 1);
				service->ifce->CloseService(service->ifce);
				root->net_service = NULL;
				if (service->owner && service->nb_odm_users) service->nb_odm_users--;
				service->owner = NULL;
				/*depends on module: some module could forget to call gf_term_on_disconnect */
				if ( gf_list_del_item(term->net_services, service) >= 0) {
					/*and queue for destroy*/
					gf_list_add(term->net_services_to_remove, service);
				}
				gf_term_lock_media_queue(term, 0);

				if (!root->parentscene) {
					GF_Event evt;
					evt.type = GF_EVENT_CONNECT;
					evt.connect.is_connected = 0;
					gf_term_send_event(term, &evt);
				} else {
					if (root->subscene) gf_scene_notify_event(root->subscene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, err);
					/*try to reinsert OD for VRML/X3D with multiple URLs:
					1- first remove from parent scene without destroying object, this will trigger a re-setup
					if other URLs are present
					2- then destroy object*/
					gf_scene_remove_object(root->parentscene, root, 0);
					gf_odm_disconnect(root, 1);
				}
				return;
			}
		}

		if (!root) {
			/*channel service connect*/
			u32 i;
			GF_ChannelSetup *cs;
			GF_List *ODs;

			if (!gf_list_count(term->channels_pending)) {
				return;
			}
			ODs = gf_list_new();
			gf_term_lock_net(term, 1);
			i=0;
			while ((cs = (GF_ChannelSetup*)gf_list_enum(term->channels_pending, &i))) {
				if (cs->ch->service != service) continue;
				gf_list_rem(term->channels_pending, i-1);
				i--;
				/*even if error do setup (channel needs to be deleted)*/
				if (gf_odm_post_es_setup(cs->ch, cs->dec, err) == GF_OK) {
					if (cs->ch->odm && (gf_list_find(ODs, cs->ch->odm)==-1) ) gf_list_add(ODs, cs->ch->odm);
				}
				gf_free(cs);
			}
			gf_term_lock_net(term, 0);
			/*finally setup all ODs concerned (we do this later in case of scalability)*/
			while (gf_list_count(ODs)) {
				GF_ObjectManager *odm = (GF_ObjectManager*)gf_list_get(ODs, 0);
				gf_list_rem(ODs, 0);
				/*force re-setup*/
				gf_scene_setup_object(odm->parentscene, odm);
			}
			gf_list_del(ODs);
		} else {
			/*setup od*/
			gf_odm_setup_entry_point(root, service->url);
		}
		/*load cache if requested*/
		if (!err && term->enable_cache) {
			err = gf_term_service_cache_load(service);
			/*not a fatal error*/
			if (err) gf_term_message(term, "GPAC Cache", "Cannot load cache", err);
		}
		return;
	}

	/*this is channel connection*/
	ch = gf_term_get_channel(service, netch);
	if (!ch) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Terminal] Channel connection ACK error: channel not found\n"));
		return;
	}

	/*confirm channel connection even if error - this allow playback of objects even if not all streams are setup*/
	gf_term_lock_net(term, 1);
	gf_es_on_connect(ch);
	gf_term_lock_net(term, 0);

	if (err && ((err!=GF_STREAM_NOT_FOUND) || (ch->esd->decoderConfig->streamType!=GF_STREAM_INTERACT))) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_MEDIA, ("[Terminal] Channel %d connection error: %s\n", ch->esd->ESID, gf_error_to_string(err) ));
		ch->es_state = GF_ESM_ES_UNAVAILABLE;
/*		return;*/
	}

	if (ch->odm->mo) {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Channel %d connected - %d objects opened\n", ch->esd->ESID, ch->odm->mo->num_open ));
	} else {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Channel %d connected - not attached to the scene\n", ch->esd->ESID));
	}
	/*Plays request are skiped until all channels are connected. We send a PLAY on the objecy in case
		1-OD user has requested a play
		2-this is a channel of the root OD
	*/
	if ( (ch->odm->mo && ch->odm->mo->num_open)
		|| !ch->odm->parentscene
	) {
		gf_odm_start(ch->odm, 0);
	}
#if 0
	else if (ch->odm->codec && ch->odm->codec->ck && ch->odm->codec->ck->no_time_ctrl) {
		gf_odm_play(ch->odm);
	}
#endif
}
Пример #25
0
GF_Err gf_odf_codec_apply_com(GF_ODCodec *codec, GF_ODCom *command)
{
	GF_ODCom *com;
	GF_ODUpdate *odU, *odU_o;
	u32 i, count;
	count = gf_list_count(codec->CommandList);

	switch (command->tag) {
	case GF_ODF_OD_REMOVE_TAG:
		for (i = 0; i<count; i++) {
			com = (GF_ODCom *)gf_list_get(codec->CommandList, i);
			/*process OD updates*/
			if (com->tag == GF_ODF_OD_UPDATE_TAG) {
				u32 count, j, k;
				GF_ODRemove *odR = (GF_ODRemove *)command;
				odU = (GF_ODUpdate *)com;
				count = gf_list_count(odU->objectDescriptors);
				/*remove all descs*/
				for (k = 0; k<count; k++) {
					GF_ObjectDescriptor *od = (GF_ObjectDescriptor *)gf_list_get(odU->objectDescriptors, k);
					for (j = 0; j<odR->NbODs; j++) {
						if (od->objectDescriptorID == odR->OD_ID[j]) {
							gf_list_rem(odU->objectDescriptors, k);
							k--;
							count--;
							gf_odf_desc_del((GF_Descriptor *)od);
							break;
						}
					}
				}
				if (!gf_list_count(odU->objectDescriptors)) {
					gf_list_rem(codec->CommandList, i);
					i--;
					count--;
				}
			}
			/*process ESD updates*/
			else if (com->tag == GF_ODF_ESD_UPDATE_TAG) {
				u32 j;
				GF_ODRemove *odR = (GF_ODRemove *)command;
				GF_ESDUpdate *esdU = (GF_ESDUpdate*)com;
				for (j = 0; j<odR->NbODs; j++) {
					if (esdU->ODID == odR->OD_ID[j]) {
						gf_list_rem(codec->CommandList, i);
						i--;
						count--;
						gf_odf_com_del((GF_ODCom**)&esdU);
						break;
					}
				}
			}
		}
		return GF_OK;
	case GF_ODF_OD_UPDATE_TAG:
		odU_o = NULL;
		for (i = 0; i<count; i++) {
			odU_o = (GF_ODUpdate*)gf_list_get(codec->CommandList, i);
			/*process OD updates*/
			if (odU_o->tag == GF_ODF_OD_UPDATE_TAG) break;
			odU_o = NULL;
		}
		if (!odU_o) {
			odU_o = (GF_ODUpdate *)gf_odf_com_new(GF_ODF_OD_UPDATE_TAG);
			gf_list_add(codec->CommandList, odU_o);
		}
		odU = (GF_ODUpdate*)command;
		count = gf_list_count(odU->objectDescriptors);
		for (i = 0; i<count; i++) {
			Bool found = GF_FALSE;
			GF_ObjectDescriptor *od = (GF_ObjectDescriptor *)gf_list_get(odU->objectDescriptors, i);
			u32 j, count2 = gf_list_count(odU_o->objectDescriptors);
			for (j = 0; j<count2; j++) {
				GF_ObjectDescriptor *od2 = (GF_ObjectDescriptor *)gf_list_get(odU_o->objectDescriptors, j);
				if (od2->objectDescriptorID == od->objectDescriptorID) {
					found = GF_TRUE;
					break;
				}
			}
			if (!found) {
				GF_ObjectDescriptor *od_new;
				GF_Err e = gf_odf_desc_copy((GF_Descriptor*)od, (GF_Descriptor**)&od_new);
				if (e == GF_OK)
					gf_list_add(odU_o->objectDescriptors, od_new);
			}

		}
		return GF_OK;
	}
	return GF_NOT_SUPPORTED;
}
Пример #26
0
GF_Err RTSP_WriteCommand(GF_RTSPSession *sess, GF_RTSPCommand *com, unsigned char *req_buffer, 
						 unsigned char **out_buffer, u32 *out_size)
{
	u32 i, cur_pos, size, count;
	char *buffer, temp[50];
	GF_RTSPTransport *trans;
	GF_X_Attribute *att;

	*out_buffer = NULL;

	size = RTSP_WRITE_STEPALLOC;
	buffer = (char *) gf_malloc(size);
	cur_pos = 0;

	//request
	RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, req_buffer);

	//then all headers
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Accept", com->Accept);
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Accept-Encoding", com->Accept_Encoding);	
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Accept-Language", com->Accept_Language);	
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Authorization", com->Authorization);	
	if (com->Bandwidth) {
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Bandwidth: ");	
		RTSP_WRITE_INT(buffer, size, cur_pos, com->Bandwidth, 0);
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");	
	}
	if (com->Blocksize) {
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Blocksize: ");
		RTSP_WRITE_INT(buffer, size, cur_pos, com->Blocksize, 0);
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");	
	}
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Cache-Control", com->Cache_Control);	
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Conference", com->Conference);	
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Connection", com->Connection);
	//if we have a body write the content length
	if (com->body) {
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Content-Length: ");
		RTSP_WRITE_INT(buffer, size, cur_pos, (u32) strlen(com->body), 0);
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");	
	}
	//write the CSeq - use the SESSION CSeq
	RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "CSeq: ");
	RTSP_WRITE_INT(buffer, size, cur_pos, sess->CSeq, 0);
	RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");	

	RTSP_WRITE_HEADER(buffer, size, cur_pos, "From", com->From);	
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Proxy-Authorization", com->Proxy_Authorization);	
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Proxy-Require", com->Proxy_Require);	

	//Range, only NPT
	if (com->Range && !com->Range->UseSMPTE) {
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Range: npt=");
		RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, com->Range->start);
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-");	
		if (com->Range->end > com->Range->start) {
			RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, com->Range->end);
		}
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");	
	}

	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Referer", com->Referer);	
	if (com->Scale != 0.0) {
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Scale: ");	
		RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, com->Scale);
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");	
	}
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "Session", com->Session);	
	if (com->Speed != 0.0) {
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Speed: ");	
		RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, com->Speed);
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");	
	}

	//transport info
	count = gf_list_count(com->Transports);
	if (count) {
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Transport: ");
		for (i=0; i<count; i++) {
			//line separator for headers
			if (i) RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n ,");
			trans = (GF_RTSPTransport *) gf_list_get(com->Transports, i);

			//then write the structure
			RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, trans->Profile);
			RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, (trans->IsUnicast ? ";unicast" : ";multicast"));
			if (trans->destination) {
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";destination=");
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, trans->destination);
			}
			if (trans->source) {
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";source=");
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, trans->source);
			}
			if (trans->IsRecord) {
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";mode=RECORD");
				if (trans->Append) RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";append");
			}
			if (trans->IsInterleaved) {
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";interleaved=");
				RTSP_WRITE_INT(buffer, size, cur_pos, trans->rtpID, 0);
				if (trans->rtcpID != trans->rtpID) {
					RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-");
					RTSP_WRITE_INT(buffer, size, cur_pos, trans->rtcpID, 0);
				}
			}
			if (trans->port_first) {
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, (trans->IsUnicast ? ";server_port=" : ";port="));
				RTSP_WRITE_INT(buffer, size, cur_pos, trans->port_first, 0);
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-");
				RTSP_WRITE_INT(buffer, size, cur_pos, trans->port_last, 0);
			}
			if (/*trans->IsUnicast && */trans->client_port_first) {
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";client_port=");
				RTSP_WRITE_INT(buffer, size, cur_pos, trans->client_port_first, 0);
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-");
				RTSP_WRITE_INT(buffer, size, cur_pos, trans->client_port_last, 0);
			}
			//multicast specific
			if (!trans->IsUnicast) {
				if (trans->MulticastLayers) {
					RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";layers=");
					RTSP_WRITE_INT(buffer, size, cur_pos, trans->MulticastLayers, 0);
				}
				if (trans->TTL) {
					RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";ttl=");
					RTSP_WRITE_INT(buffer, size, cur_pos, trans->TTL, 0);
				}
			}
			if (trans->SSRC) {
				RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";ssrc=");
				RTSP_WRITE_INT(buffer, size, cur_pos, trans->SSRC, 0);
			}
		}
		//done with transport
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");
	}
	RTSP_WRITE_HEADER(buffer, size, cur_pos, "User-Agent", com->User_Agent);	

	//eXtensions
	count = gf_list_count(com->Xtensions);
	for (i=0; i<count; i++) {
		att = (GF_X_Attribute *) gf_list_get(com->Xtensions, i);
		RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "x-");
		RTSP_WRITE_HEADER(buffer, size, cur_pos, att->Name, att->Value);	
	}

	//the end of header
	RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");
	//then body
	RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, com->body);
	//the end of message ? to check, should not be needed...
//	RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n");

	*out_buffer = (unsigned char *)buffer;
	*out_size = (u32) strlen(buffer);
	return GF_OK;
}
Пример #27
0
//WARNING: MOVIETIME IS EXPRESSED IN MEDIA TS
GF_Err GetMediaTime(GF_TrackBox *trak, Bool force_non_empty, u64 movieTime, u64 *MediaTime, s64 *SegmentStartTime, s64 *MediaOffset, u8 *useEdit, u64 *next_edit_start_plus_one)
{
#if 0
    GF_Err e;
    u32 sampleNumber, prevSampleNumber;
    u64 firstDTS;
#endif
    u32 i, count;
    Bool last_is_empty = 0;
    u64 time, lastSampleTime;
    s64 mtime;
    GF_EdtsEntry *ent;
    Double scale_ts;
    GF_SampleTableBox *stbl = trak->Media->information->sampleTable;

    if (next_edit_start_plus_one) *next_edit_start_plus_one = 0;
    *useEdit = 1;
    *MediaTime = 0;
    //no segment yet...
    *SegmentStartTime = -1;
    *MediaOffset = -1;
    if (!trak->moov->mvhd->timeScale || !trak->Media->mediaHeader->timeScale || !stbl->SampleSize) {
        return GF_ISOM_INVALID_FILE;
    }

    //no samples...
    if (!stbl->SampleSize->sampleCount) {
        lastSampleTime = 0;
    } else {
        lastSampleTime = trak->Media->mediaHeader->duration;
    }

    //No edits, 1 to 1 mapping
    if (! trak->editBox || !trak->editBox->editList) {
        *MediaTime = movieTime;
        //check this is in our media time line
        if ((*MediaTime > lastSampleTime)
#ifndef GPAC_DISABLE_ISOM_FRAGMENTS
                && !trak->moov->mov->moof
#endif
           ) {
            *MediaTime = lastSampleTime;
        }
        *useEdit = 0;
        return GF_OK;
    }
    //browse the edit list and get the time
    scale_ts = trak->Media->mediaHeader->timeScale;
    scale_ts /= trak->moov->mvhd->timeScale;

    time = 0;
    ent = NULL;
    count=gf_list_count(trak->editBox->editList->entryList);
    for (i=0; i<count; i++) {
        ent = (GF_EdtsEntry *)gf_list_get(trak->editBox->editList->entryList, i);
        if ( (time + ent->segmentDuration) * scale_ts > movieTime) {
            if (!force_non_empty || (ent->mediaTime >= 0)) {
                if (next_edit_start_plus_one) *next_edit_start_plus_one = 1 + (u64) ((time + ent->segmentDuration) * scale_ts);
                goto ent_found;
            }
        }
        time += ent->segmentDuration;
        last_is_empty = ent->segmentDuration ? 0 : 1;
    }

    if (last_is_empty) {
        ent = (GF_EdtsEntry *)gf_list_last(trak->editBox->editList->entryList);
        if (ent->mediaRate==1) {
            *MediaTime = movieTime + ent->mediaTime;
        } else {
            ent = (GF_EdtsEntry *)gf_list_get(trak->editBox->editList->entryList, 0);
            if (ent->mediaRate==-1) {
                u64 dur = (u64) (ent->segmentDuration * scale_ts);
                *MediaTime = (movieTime > dur) ? (movieTime-dur) : 0;
            }
        }
        *useEdit = 0;
        return GF_OK;
    }


    //we had nothing in the list (strange file but compliant...)
    //return the 1 to 1 mapped vale of the last media sample
    if (!ent) {
        *MediaTime = movieTime;
        //check this is in our media time line
        if (*MediaTime > lastSampleTime) *MediaTime = lastSampleTime;
        *useEdit = 0;
        return GF_OK;
    }
    //request for a bigger time that what we can give: return the last sample (undefined behavior...)
    *MediaTime = lastSampleTime;
    return GF_OK;

ent_found:
    //OK, we found our entry, set the SegmentTime
    *SegmentStartTime = time;

    //we request an empty list, there's no media here...
    if (ent->mediaTime < 0) {
        *MediaTime = 0;
        return GF_OK;
    }
    //we request a dwell edit
    if (! ent->mediaRate) {
        *MediaTime = ent->mediaTime;
        //no media offset
        *MediaOffset = 0;
        *useEdit = 2;
        return GF_OK;
    }

    /*WARNING: this can be "-1" when doing searchForward mode (to prevent jumping to next entry)*/
    mtime = ent->mediaTime + movieTime - (time * trak->Media->mediaHeader->timeScale / trak->moov->mvhd->timeScale);
    if (mtime<0) mtime = 0;
    *MediaTime = (u64) mtime;
    *MediaOffset = ent->mediaTime;

#if 0
    //
    //Sanity check: is the requested time valid ? This is to cope with wrong EditLists
    //we have the translated time, but we need to make sure we have a sample at this time ...
    //we have to find a COMPOSITION time
    e = findEntryForTime(stbl, (u32) *MediaTime, 1, &sampleNumber, &prevSampleNumber);
    if (e) return e;

    //first case: our time is after the last sample DTS (it's a broken editList somehow)
    //set the media time to the last sample
    if (!sampleNumber && !prevSampleNumber) {
        *MediaTime = lastSampleTime;
        return GF_OK;
    }
    //get the appropriated sample
    if (!sampleNumber) sampleNumber = prevSampleNumber;

    stbl_GetSampleDTS(stbl->TimeToSample, sampleNumber, &DTS);
    CTS = 0;
    if (stbl->CompositionOffset) stbl_GetSampleCTS(stbl->CompositionOffset, sampleNumber, &CTS);

    //now get the entry sample (the entry time gives the CTS, and we need the DTS
    e = findEntryForTime(stbl, (u32) ent->mediaTime, 0, &sampleNumber, &prevSampleNumber);
    if (e) return e;

    //oops, the mediaTime indicates a sample that is not in our media !
    if (!sampleNumber && !prevSampleNumber) {
        *MediaTime = lastSampleTime;
        return GF_ISOM_INVALID_FILE;
    }
    if (!sampleNumber) sampleNumber = prevSampleNumber;

    stbl_GetSampleDTS(stbl->TimeToSample, sampleNumber, &firstDTS);

    //and store the "time offset" of the desired sample in this segment
    //this is weird, used to rebuild the timeStamp when reading from the track, not the
    //media ...
    *MediaOffset = firstDTS;
#endif
    return GF_OK;
}
Пример #28
0
void isor_declare_objects(ISOMReader *read)
{
	GF_ObjectDescriptor *od;
	GF_ESD *esd;
	const char *tag;
	u32 i, count, ocr_es_id, tlen, base_track, j, track_id;
	Bool highest_stream;
	char *opt;
	Bool add_ps_lower = GF_TRUE;

	ocr_es_id = 0;
	opt = (char*) gf_modules_get_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS");
	if (!opt) {
		gf_modules_set_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS", "yes");
	} else if (!strcmp(opt, "no")) {
		add_ps_lower = GF_FALSE;
	}

	/*TODO check for alternate tracks*/
	count = gf_isom_get_track_count(read->mov);
	for (i=0; i<count; i++) {
		if (!gf_isom_is_track_enabled(read->mov, i+1)) continue;

		switch (gf_isom_get_media_type(read->mov, i+1)) {
		case GF_ISOM_MEDIA_AUDIO:
		case GF_ISOM_MEDIA_VISUAL:
		case GF_ISOM_MEDIA_TEXT:
		case GF_ISOM_MEDIA_SUBT:
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_SUBPIC:
			break;
		default:
			continue;
		}

		/*we declare only the highest video track (i.e the track we play)*/
		highest_stream = GF_TRUE;
		track_id = gf_isom_get_track_id(read->mov, i+1);
		for (j = 0; j < count; j++) {
			if (gf_isom_has_track_reference(read->mov, j+1, GF_ISOM_REF_SCAL, track_id) > 0) {
				highest_stream = GF_FALSE;
				break;
			}
		}
		if ((gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) && !highest_stream)
			continue;
		esd = gf_media_map_esd(read->mov, i+1);
		if (esd) {
			gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_BASE, 1, &base_track);
			esd->has_ref_base = base_track ? GF_TRUE : GF_FALSE;
			/*FIXME: if we declare only SPS/PPS of the highest layer, we have a problem in decoding even though we have all SPS/PPS inband (OpenSVC bug ?)*/
			/*so we add by default the SPS/PPS of the lower layers to this esd*/
			if (esd->has_ref_base && add_ps_lower) {
				u32 count, refIndex, ref_track, num_sps, num_pps, t;
				GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
				GF_AVCConfig *avccfg, *svccfg;

				count = gf_isom_get_reference_count(read->mov, i+1, GF_ISOM_REF_SCAL);
				for (refIndex = count; refIndex != 0; refIndex--) {
					gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_SCAL, refIndex, &ref_track);
					avccfg = gf_isom_avc_config_get(read->mov, ref_track, 1);
					svccfg = gf_isom_svc_config_get(read->mov, ref_track, 1);
					if (avccfg) {
						num_sps = gf_list_count(avccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(avccfg->pictureParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(avccfg);
					}
					if (svccfg) {
						num_sps = gf_list_count(svccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(svccfg->pictureParameterSets);
						for (t = 0; t < num_pps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(svccfg);
					}
				}

				if (esd->decoderConfig->decoderSpecificInfo->data) gf_free(esd->decoderConfig->decoderSpecificInfo->data);
				gf_odf_avc_cfg_write(cfg, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
				gf_odf_avc_cfg_del(cfg);
			}

			od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
			od->service_ifce = read->input;
			od->objectDescriptorID = 0;
			if (!ocr_es_id) ocr_es_id = esd->ESID;
			esd->OCRESID = ocr_es_id;
			gf_list_add(od->ESDescriptors, esd);
			if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
				send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
			} else {
				gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
			}
		}
	}
	/*if cover art, extract it in cache*/
	if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COVER_ART, &tag, &tlen)==GF_OK) {
		const char *cdir = gf_modules_get_option((GF_BaseInterface *)gf_term_get_service_interface(read->service), "General", "CacheDirectory");
		if (cdir) {
			char szName[GF_MAX_PATH];
			const char *sep;
			FILE *t;
			sep = strrchr(gf_isom_get_filename(read->mov), '\\');
			if (!sep) sep = strrchr(gf_isom_get_filename(read->mov), '/');
			if (!sep) sep = gf_isom_get_filename(read->mov);

			if ((cdir[strlen(cdir)-1] != '\\') && (cdir[strlen(cdir)-1] != '/')) {
				sprintf(szName, "%s/%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			} else {
				sprintf(szName, "%s%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			}

			t = gf_f64_open(szName, "wb");

			if (t) {
				Bool isom_contains_video = GF_FALSE;

				/*write cover data*/
				assert(!(tlen & 0x80000000));
				gf_fwrite(tag, tlen & 0x7FFFFFFF, 1, t);
				fclose(t);

				/*don't display cover art when video is present*/
				for (i=0; i<gf_isom_get_track_count(read->mov); i++) {
					if (!gf_isom_is_track_enabled(read->mov, i+1))
						continue;
					if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) {
						isom_contains_video = GF_TRUE;
						break;
					}
				}

				if (!isom_contains_video) {
					od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
					od->service_ifce = read->input;
					od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID;
					od->URLString = gf_strdup(szName);
					if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
						send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
					} else {
						gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
					}
				}
			}
		}
	}
	if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
		send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, NULL, NULL);
	} else {
		gf_term_add_media(read->service, NULL, GF_FALSE);
	}
}
Пример #29
0
u32 gf_cache_get_sessions_count_for_cache_entry(const DownloadedCacheEntry entry)
{
	if (!entry)
		return 0;
	return gf_list_count(entry->sessions);
}
Пример #30
0
static GF_Err RP_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com)
{
	RTPStream *ch;
	RTPClient *priv = (RTPClient *)plug->priv;


	if (com->command_type==GF_NET_SERVICE_HAS_AUDIO) {
		u32 i;
		for (i=0; i<gf_list_count(priv->channels); i++) {
			ch = gf_list_get(priv->channels, i);
			if (ch->depacketizer->sl_map.StreamType==GF_STREAM_AUDIO)
				return GF_OK;
		}
		return GF_NOT_SUPPORTED;
	}
	if (com->command_type==GF_NET_SERVICE_MIGRATION_INFO) {
		RP_SaveSessionState(priv);
		priv->session_migration=1;
		if (priv->session_state_data) {
			com->migrate.data = priv->session_state_data;
			com->migrate.data_len = strlen(priv->session_state_data);
			return GF_OK;
		}
		return GF_NOT_SUPPORTED;
	}

	/*ignore commands other than channels one*/
	if (!com->base.on_channel) {
		if (com->command_type==GF_NET_IS_CACHABLE) return GF_OK;
		return GF_NOT_SUPPORTED;
	}

	ch = RP_FindChannel(priv, com->base.on_channel, 0, NULL, 0);
	if (!ch) return GF_STREAM_NOT_FOUND;

	switch (com->command_type) {
	case GF_NET_CHAN_SET_PULL:
		if (ch->rtp_ch || ch->rtsp || !ch->control) return GF_NOT_SUPPORTED;
		/*embedded channels work in pull mode*/
		if (strstr(ch->control, "data:application/")) return GF_OK;
		return GF_NOT_SUPPORTED;
	case GF_NET_CHAN_INTERACTIVE:
		/*looks like pure RTP / multicast etc, not interactive*/
		if (!ch->control) return GF_NOT_SUPPORTED;
		/*emulated broadcast mode*/
		else if (ch->flags & RTP_FORCE_BROADCAST) return GF_NOT_SUPPORTED;
		/*regular rtsp mode*/
		else if (ch->flags & RTP_HAS_RANGE) return GF_OK;
		/*embedded data*/
		else if (strstr(ch->control, "application")) return GF_OK;
		return GF_NOT_SUPPORTED;
	case GF_NET_CHAN_BUFFER:
		if (!(ch->rtp_ch || ch->rtsp || !ch->control)) {
			com->buffer.max = com->buffer.min = 0;
		} else {
			const char *opt;
			/*amount of buffering in ms*/
			opt = gf_modules_get_option((GF_BaseInterface *)plug, "Network", "BufferLength");
			com->buffer.max = opt ? atoi(opt) : 1000;
			/*rebuffer low limit in ms - if the amount of buffering is less than this, rebuffering will never occur*/
			opt = gf_modules_get_option((GF_BaseInterface *)plug, "Network", "RebufferLength");
			if (opt) com->buffer.min = atoi(opt);
			else com->buffer.min = 500;
			if (com->buffer.min >= com->buffer.max ) com->buffer.min = 0;
		}
		return GF_OK;
	case GF_NET_CHAN_DURATION:
		com->duration.duration = (ch->flags & RTP_HAS_RANGE) ? (ch->range_end - ch->range_start) : 0;
		return GF_OK;
	/*RTP channel config is done upon connection, once the complete SL mapping is known
	however we must store some info not carried in SDP*/
	case GF_NET_CHAN_CONFIG:
		if (com->cfg.frame_duration) ch->depacketizer->sl_hdr.au_duration = com->cfg.frame_duration;
		ch->ts_res = com->cfg.sl_config.timestampResolution;
		return GF_OK;

	case GF_NET_CHAN_PLAY:
		GF_LOG(GF_LOG_DEBUG, GF_LOG_RTP, ("[RTP] Processing play on channel @%08x - %s\n", ch, ch->rtsp ? "RTSP control" : "No control (RTP)" ));
		/*is this RTSP or direct RTP?*/
		ch->flags &= ~RTP_EOS;
		if (ch->rtsp) {
			if (ch->status==RTP_SessionResume) {
				const char *opt = gf_modules_get_option((GF_BaseInterface *) plug, "Streaming", "SessionMigrationPause");
				if (opt && !strcmp(opt, "yes")) {
					ch->status = RTP_Connected;
					com->play.start_range = ch->current_start;
				} else {
					ch->status = RTP_Running;
					return GF_OK;
				}
			}
			RP_UserCommand(ch->rtsp, ch, com);
		} else {
			ch->status = RTP_Running;
			if (ch->rtp_ch) {
				/*technically we shouldn't attempt to synchronize streams based on RTP, we should use RTCP/ However it
				may happen that the RTCP traffic is absent ...*/
				ch->check_rtp_time = RTP_SET_TIME_RTP;
				ch->rtcp_init = 0;
				gf_mx_p(priv->mx);
				RP_InitStream(ch, (ch->flags & RTP_CONNECTED) ? 1 : 0);
				gf_mx_v(priv->mx);
				gf_rtp_set_info_rtp(ch->rtp_ch, 0, 0, 0);
			} else {
				/*direct channel, store current start*/
				ch->current_start = com->play.start_range;
				ch->flags |= GF_RTP_NEW_AU;
				gf_rtp_depacketizer_reset(ch->depacketizer, 0);
			}
		}
		return GF_OK;
	case GF_NET_CHAN_STOP:
		/*is this RTSP or direct RTP?*/
		if (ch->rtsp) {
			if (! ch->owner->session_migration) {
				RP_UserCommand(ch->rtsp, ch, com);
			}
		} else {
			ch->status = RTP_Connected;
			ch->owner->last_ntp = 0;
		}
		ch->rtcp_init = 0;
		return GF_OK;
	case GF_NET_CHAN_SET_SPEED:
	case GF_NET_CHAN_PAUSE:
	case GF_NET_CHAN_RESUME:
		assert(ch->rtsp);
		RP_UserCommand(ch->rtsp, ch, com);
		return GF_OK;

	case GF_NET_CHAN_GET_DSI:
		if (ch->depacketizer && ch->depacketizer->sl_map.configSize) {
			com->get_dsi.dsi_len = ch->depacketizer->sl_map.configSize;
			com->get_dsi.dsi = (char*)gf_malloc(sizeof(char)*com->get_dsi.dsi_len);
			memcpy(com->get_dsi.dsi, ch->depacketizer->sl_map.config, sizeof(char)*com->get_dsi.dsi_len);
		} else {
			com->get_dsi.dsi = NULL;
			com->get_dsi.dsi_len = 0;
		}
		return GF_OK;


	case GF_NET_GET_STATS:
		memset(&com->net_stats, 0, sizeof(GF_NetComStats));
		if (ch->rtp_ch) {
			u32 time;
			Float bps;
			com->net_stats.pck_loss_percentage = gf_rtp_get_loss(ch->rtp_ch);
			if (ch->flags & RTP_INTERLEAVED) {
				com->net_stats.multiplex_port = gf_rtsp_get_session_port(ch->rtsp->session);
				com->net_stats.port = gf_rtp_get_low_interleave_id(ch->rtp_ch);
				com->net_stats.ctrl_port = gf_rtp_get_hight_interleave_id(ch->rtp_ch);
			} else {
				com->net_stats.multiplex_port = 0;
				gf_rtp_get_ports(ch->rtp_ch, &com->net_stats.port, &com->net_stats.ctrl_port);
			}
			if (ch->stat_stop_time) {
				time = ch->stat_stop_time - ch->stat_start_time;
			} else {
				time = gf_sys_clock() - ch->stat_start_time;
			}
			bps = 8.0f * ch->rtp_bytes; bps *= 1000; bps /= time; com->net_stats.bw_down = (u32) bps;
			bps = 8.0f * ch->rtcp_bytes; bps *= 1000; bps /= time; com->net_stats.ctrl_bw_down = (u32) bps;
			bps = 8.0f * gf_rtp_get_tcp_bytes_sent(ch->rtp_ch); bps *= 1000; bps /= time; com->net_stats.ctrl_bw_up = (u32) bps;
		}
		return GF_OK;
	}
	return GF_NOT_SUPPORTED;
}