Пример #1
0
GF_EXPORT
GF_Err gf_webvtt_dump_iso_track(GF_MediaExporter *dumper, char *szName, u32 track, Bool merge, Bool box_dump)
{
#ifdef GPAC_DISABLE_MEDIA_IMPORT
	return GF_NOT_SUPPORTED;
#else
	GF_Err  e;
	u32     i;
	u32     count;
	u32     timescale;
	FILE    *out;
	u32     di;
	u64     duration;
	GF_WebVTTParser *parser;

	out = szName ? gf_fopen(szName, "wt") : (dumper->dump_file ? dumper->dump_file : stdout);
	if (!out) return GF_IO_ERR;// gf_export_message(dumper, GF_IO_ERR, "Error opening %s for writing - check disk access & permissions", szName);

	parser = gf_webvtt_parser_new();
	parser->user = out;
	parser->on_cue_read = gf_webvtt_dump_cue;

	if (box_dump)
		fprintf(out, "<WebVTTTrack trackID=\"%d\">\n", gf_isom_get_track_id(dumper->file, track) );

	e = gf_webvtt_dump_header(out, dumper->file, track, box_dump, 1);
	if (e) goto exit;

	timescale = gf_isom_get_media_timescale(dumper->file, track);

	count = gf_isom_get_sample_count(dumper->file, track);
	for (i=0; i<count; i++) {
		GF_ISOSample *samp = gf_isom_get_sample(dumper->file, track, i+1, &di);
		if (!samp) {
			e = gf_isom_last_error(dumper->file);
			goto exit;
		}
		e = gf_webvtt_parse_iso_sample(parser, timescale, samp, merge, box_dump);
		if (e) {
			goto exit;
		}
		gf_isom_sample_del(&samp);
	}
	duration = gf_isom_get_media_duration(dumper->file, track);
	gf_webvtt_parser_dump_finalize(parser, duration);

	if (box_dump)
		fprintf(out, "</WebVTTTrack>\n");

exit:
	gf_webvtt_parser_del(parser);
	if (szName) gf_fclose(out);
	return e;
#endif
}
Пример #2
0
static GF_Err TTIn_ChannelGetSLP(GF_InputService *plug, LPNETCHANNEL channel, char **out_data_ptr, u32 *out_data_size, GF_SLHeader *out_sl_hdr, Bool *sl_compressed, GF_Err *out_reception_status, Bool *is_new_data)
{
    TTIn *tti = (TTIn *)plug->priv;

    *out_reception_status = GF_OK;
    *sl_compressed = 0;
    *is_new_data = 0;

    memset(&tti->sl_hdr, 0, sizeof(GF_SLHeader));
    tti->sl_hdr.randomAccessPointFlag = 1;
    tti->sl_hdr.compositionTimeStampFlag = 1;
    tti->sl_hdr.accessUnitStartFlag = tti->sl_hdr.accessUnitEndFlag = 1;

    /*fetching es data*/
    if (tti->ch == channel) {
        if (tti->samp_num>=gf_isom_get_sample_count(tti->mp4, tti->tt_track)) {
            *out_reception_status = GF_EOS;
            return GF_OK;
        }

        if (!tti->samp) {
            u32 di;
            if (tti->start_range) {
                u32 di;
                *out_reception_status = gf_isom_get_sample_for_movie_time(tti->mp4, tti->tt_track, tti->start_range, &di, GF_ISOM_SEARCH_SYNC_BACKWARD, &tti->samp, &tti->samp_num);
                tti->start_range = 0;
            } else {
                tti->samp = gf_isom_get_sample(tti->mp4, tti->tt_track, tti->samp_num+1, &di);
            }
            if (!tti->samp) {
                *out_reception_status = GF_CORRUPTED_DATA;
                return GF_OK;
            }
            *is_new_data = 1;
        }
        tti->sl_hdr.compositionTimeStamp = tti->sl_hdr.decodingTimeStamp = tti->samp->DTS;
        *out_data_ptr = tti->samp->data;
        *out_data_size = tti->samp->dataLength;
        *out_sl_hdr = tti->sl_hdr;
        return GF_OK;
    }
    return GF_STREAM_NOT_FOUND;
}
Пример #3
0
GF_Err gf_webvtt_dump_iso_track(GF_MediaExporter *dumper, char *szName, u32 track, Bool merge)
{
	GF_Err  e;
	u32     i;
	u32     count;
	u32     timescale;
	FILE    *out;
	u32     di;
	u64     duration;
	GF_WebVTTParser *parser;

	out = szName ? gf_fopen(szName, "wt") : stdout;
	if (!out) return GF_IO_ERR;// gf_export_message(dumper, GF_IO_ERR, "Error opening %s for writing - check disk access & permissions", szName);

	parser = gf_webvtt_parser_new();
	parser->user = out;
	parser->on_cue_read = gf_webvtt_dump_cue;

	e = gf_webvtt_dump_header(out, dumper->file, track, 1);
	if (e) goto exit;

	timescale = gf_isom_get_media_timescale(dumper->file, track);

	count = gf_isom_get_sample_count(dumper->file, track);
	for (i=0; i<count; i++) {
		GF_ISOSample *samp = gf_isom_get_sample(dumper->file, track, i+1, &di);
		if (!samp) {
			e = gf_isom_last_error(dumper->file);
			goto exit;
		}
		e = gf_webvtt_parse_iso_sample(parser, timescale, samp, merge);
		if (e) goto exit;
		//gf_webvtt_dump_iso_sample(out, timescale, samp);
	}
	duration = gf_isom_get_media_duration(dumper->file, track);
	gf_webvtt_parser_dump_finalize(parser, duration);

exit:
	gf_webvtt_parser_del(parser);
	if (szName) gf_fclose(out);
	return e;
}
Пример #4
0
GF_EXPORT
u32 gf_isom_find_od_for_track(GF_ISOFile *file, u32 track)
{
	u32 i, j, di, the_od_id;
	GF_TrackBox *od_tk;
	GF_TrackBox *tk = gf_isom_get_track_from_file(file, track);
	if (!tk) return 0;

	i = 0;
	while ((od_tk = (GF_TrackBox*)gf_list_enum(file->moov->trackList, &i))) {
		if (od_tk->Media->handler->handlerType != GF_ISOM_MEDIA_OD) continue;

		for (j = 0; j<od_tk->Media->information->sampleTable->SampleSize->sampleCount; j++) {
			GF_ISOSample *samp = gf_isom_get_sample(file, i, j + 1, &di);
			the_od_id = Media_FindOD_ID(od_tk->Media, samp, tk->Header->trackID);
			gf_isom_sample_del(&samp);
			if (the_od_id) return the_od_id;
		}
	}
	return 0;
}
Пример #5
0
static void process_samples_from_track(GF_ISOFile *movie, u32 track_id, u32 *sample_index)
{
	u32 track_number;
	u32 sample_count;
	/* Error indicator */
	GF_Err e;
	/* Number of bytes required to finish the current ISO Box reading */
	u64 missing_bytes;

	track_number = gf_isom_get_track_by_id(movie, track_id);
	if (track_number == 0) {
		fprintf(stdout, "Could not find track ID=%u. Ignore segment.\n", track_id);
		return;
	}

	sample_count = gf_isom_get_sample_count(movie, track_number);
	while (*sample_index <= sample_count) {
		GF_ISOSample *iso_sample;
		u32 sample_description_index;

		iso_sample = gf_isom_get_sample(movie, track_number, *sample_index, &sample_description_index);
		if (iso_sample) {
			fprintf(stdout, "Found sample #%5d/%5d of length %8d, RAP: %d, DTS: "LLD", CTS: "LLD"\n", *sample_index, sample_count, iso_sample->dataLength, iso_sample->IsRAP, iso_sample->DTS, iso_sample->DTS+iso_sample->CTS_Offset);
			(*sample_index)++;

			/* Release the sample data, once you're done with it*/
			gf_isom_sample_del(&iso_sample);
		} else {
			e = gf_isom_last_error(movie);
			if (e == GF_ISOM_INCOMPLETE_FILE) {
				missing_bytes = gf_isom_get_missing_bytes(movie, track_number);
				fprintf(stdout, "Missing "LLU" bytes on input file\n", missing_bytes);
				gf_sleep(1000);
			}
		}
	}
}
Пример #6
0
static GF_Err gf_isom_load_next_hint_sample(GF_ISOFile *the_file, u32 trackNumber, GF_TrackBox *trak, GF_HintSampleEntryBox *entry)
{
	GF_BitStream *bs;
	u32 descIdx;
	GF_ISOSample *samp;

	if (!entry->cur_sample) return GF_BAD_PARAM;
	if (entry->cur_sample>trak->Media->information->sampleTable->SampleSize->sampleCount) return GF_EOS;

	samp = gf_isom_get_sample(the_file, trackNumber, entry->cur_sample, &descIdx);
	if (!samp) return GF_IO_ERR;
	entry->cur_sample++;

	if (entry->hint_sample) gf_isom_hint_sample_del(entry->hint_sample);

	bs = gf_bs_new(samp->data, samp->dataLength, GF_BITSTREAM_READ);
	entry->hint_sample = gf_isom_hint_sample_new(entry->type);
	gf_isom_hint_sample_read(entry->hint_sample, bs, samp->dataLength);
	gf_bs_del(bs);
	entry->hint_sample->TransmissionTime = samp->DTS;
	gf_isom_sample_del(&samp);
	entry->hint_sample->sample_cache = gf_list_new();
	return GF_OK;
}
Пример #7
0
GF_EXPORT
GF_RTPHinter *gf_hinter_track_new(GF_ISOFile *file, u32 TrackNum, 
							u32 Path_MTU, u32 max_ptime, u32 default_rtp_rate, u32 flags, u8 PayloadID, 
							Bool copy_media, u32 InterleaveGroupID, u8 InterleaveGroupPriority, GF_Err *e)
{

	GF_SLConfig my_sl;
	u32 descIndex, MinSize, MaxSize, avgTS, streamType, oti, const_dur, nb_ch, maxDTSDelta;
	u8 OfficialPayloadID;
	u32 TrackMediaSubType, TrackMediaType, hintType, nbEdts, required_rate, force_dts_delta, avc_nalu_size, PL_ID, bandwidth, IV_length, KI_length;
	const char *url, *urn;
	char *mpeg4mode;
	Bool is_crypted, has_mpeg4_mapping;
	GF_RTPHinter *tmp;
	GF_ESD *esd;

	*e = GF_BAD_PARAM;
	if (!file || !TrackNum || !gf_isom_get_track_id(file, TrackNum)) return NULL;

	if (!gf_isom_get_sample_count(file, TrackNum)) {
		*e = GF_OK;
		return NULL;
	}
	*e = GF_NOT_SUPPORTED;
	nbEdts = gf_isom_get_edit_segment_count(file, TrackNum);
	if (nbEdts>1) {
		u64 et, sd, mt;
		u8 em;
		gf_isom_get_edit_segment(file, TrackNum, 1, &et, &sd, &mt, &em);
		if ((nbEdts>2) || (em!=GF_ISOM_EDIT_EMPTY)) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[rtp hinter] Cannot hint track whith EditList\n"));
			return NULL;
		}
	}
	if (nbEdts) gf_isom_remove_edit_segments(file, TrackNum);

	if (!gf_isom_is_track_enabled(file, TrackNum)) return NULL;

	/*by default NO PL signaled*/
	PL_ID = 0;
	OfficialPayloadID = 0;
	force_dts_delta = 0;
	streamType = oti = 0;
	mpeg4mode = NULL;
	required_rate = 0;
	is_crypted = 0;
	IV_length = KI_length = 0;
	oti = 0;
	nb_ch = 0;
	avc_nalu_size = 0;
	has_mpeg4_mapping = 1;
	TrackMediaType = gf_isom_get_media_type(file, TrackNum);
	TrackMediaSubType = gf_isom_get_media_subtype(file, TrackNum, 1);
	
	/*for max compatibility with QT*/
	if (!default_rtp_rate) default_rtp_rate = 90000;

	/*timed-text is a bit special, we support multiple stream descriptions & co*/
	if ( (TrackMediaType==GF_ISOM_MEDIA_TEXT) || (TrackMediaType==GF_ISOM_MEDIA_SUBT)) {
		hintType = GF_RTP_PAYT_3GPP_TEXT;
		oti = GPAC_OTI_TEXT_MPEG4;
		streamType = GF_STREAM_TEXT;
		/*fixme - this works cos there's only one PL for text in mpeg4 at the current time*/
		PL_ID = 0x10;
	} else {
		if (gf_isom_get_sample_description_count(file, TrackNum) > 1) return NULL;

		TrackMediaSubType = gf_isom_get_media_subtype(file, TrackNum, 1);
		switch (TrackMediaSubType) {
		case GF_ISOM_SUBTYPE_MPEG4_CRYP: 
			is_crypted = 1;
		case GF_ISOM_SUBTYPE_MPEG4:
			esd = gf_isom_get_esd(file, TrackNum, 1);
			hintType = GF_RTP_PAYT_MPEG4;
			if (esd) {
				streamType = esd->decoderConfig->streamType;
				oti = esd->decoderConfig->objectTypeIndication;
				if (esd->URLString) hintType = 0;
				/*AAC*/
				if ((streamType==GF_STREAM_AUDIO) && esd->decoderConfig->decoderSpecificInfo
				/*(nb: we use mpeg4 for MPEG-2 AAC)*/
				&& ((oti==GPAC_OTI_AUDIO_AAC_MPEG4) || (oti==GPAC_OTI_AUDIO_AAC_MPEG4) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_MP) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_LCP) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_SSRP)) ) {

					u32 sample_rate;
					GF_M4ADecSpecInfo a_cfg;
					gf_m4a_get_config(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &a_cfg);
					nb_ch = a_cfg.nb_chan;
					sample_rate = a_cfg.base_sr;
					PL_ID = a_cfg.audioPL;
					switch (a_cfg.base_object_type) {
					case GF_M4A_AAC_MAIN:
					case GF_M4A_AAC_LC:
						if (flags & GP_RTP_PCK_USE_LATM_AAC) {
							hintType = GF_RTP_PAYT_LATM;
							break;
						}
					case GF_M4A_AAC_SBR:
					case GF_M4A_AAC_PS:
					case GF_M4A_AAC_LTP:
					case GF_M4A_AAC_SCALABLE:
					case GF_M4A_ER_AAC_LC:
					case GF_M4A_ER_AAC_LTP:
					case GF_M4A_ER_AAC_SCALABLE:
						mpeg4mode = "AAC";
						break;
					case GF_M4A_CELP:
					case GF_M4A_ER_CELP:
						mpeg4mode = "CELP";
						break;
					}
					required_rate = sample_rate;
				}
				/*MPEG1/2 audio*/
				else if ((streamType==GF_STREAM_AUDIO) && ((oti==GPAC_OTI_AUDIO_MPEG2_PART3) || (oti==GPAC_OTI_AUDIO_MPEG1))) {
					u32 sample_rate;
					if (!is_crypted) {
						GF_ISOSample *samp = gf_isom_get_sample(file, TrackNum, 1, NULL);
						u32 hdr = GF_4CC((u8)samp->data[0], (u8)samp->data[1], (u8)samp->data[2], (u8)samp->data[3]);
						nb_ch = gf_mp3_num_channels(hdr);
						sample_rate = gf_mp3_sampling_rate(hdr);
						gf_isom_sample_del(&samp);
						hintType = GF_RTP_PAYT_MPEG12_AUDIO;
						/*use official RTP/AVP payload type*/
						OfficialPayloadID = 14;
						required_rate = 90000;
					}
					/*encrypted MP3 must be sent through MPEG-4 generic to signal all ISMACryp stuff*/
					else {
						u8 bps;
						gf_isom_get_audio_info(file, TrackNum, 1, &sample_rate, &nb_ch, &bps);
						required_rate = sample_rate;
					}
				}
				/*QCELP audio*/
				else if ((streamType==GF_STREAM_AUDIO) && (oti==GPAC_OTI_AUDIO_13K_VOICE)) {
					hintType = GF_RTP_PAYT_QCELP;
					OfficialPayloadID = 12;
					required_rate = 8000;
					streamType = GF_STREAM_AUDIO;
					nb_ch = 1;
				}
				/*EVRC/SVM audio*/
				else if ((streamType==GF_STREAM_AUDIO) && ((oti==GPAC_OTI_AUDIO_EVRC_VOICE) || (oti==GPAC_OTI_AUDIO_SMV_VOICE)) ) {
					hintType = GF_RTP_PAYT_EVRC_SMV;
					required_rate = 8000;
					streamType = GF_STREAM_AUDIO;
					nb_ch = 1;
				}
				/*visual streams*/
				else if (streamType==GF_STREAM_VISUAL) {
					if (oti==GPAC_OTI_VIDEO_MPEG4_PART2) {
						GF_M4VDecSpecInfo dsi;
						gf_m4v_get_config(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &dsi);
						PL_ID = dsi.VideoPL;
					}
					/*MPEG1/2 video*/
					if ( ((oti>=GPAC_OTI_VIDEO_MPEG2_SIMPLE) && (oti<=GPAC_OTI_VIDEO_MPEG2_422)) || (oti==GPAC_OTI_VIDEO_MPEG1)) {
						if (!is_crypted) {
							hintType = GF_RTP_PAYT_MPEG12_VIDEO;
							OfficialPayloadID = 32;
						}
					}
					/*for ISMA*/
					if (is_crypted) {
						/*that's another pain with ISMACryp, even if no B-frames the DTS is signaled...*/
						if (oti==GPAC_OTI_VIDEO_MPEG4_PART2) force_dts_delta = 22;
						else if (oti==GPAC_OTI_VIDEO_AVC) {
							flags &= ~GP_RTP_PCK_USE_MULTI;
							force_dts_delta = 22;
						}
						flags |= GP_RTP_PCK_SIGNAL_RAP | GP_RTP_PCK_SIGNAL_TS;
					}

					required_rate = default_rtp_rate;
				}
				/*systems streams*/
				else if (gf_isom_has_sync_shadows(file, TrackNum) || gf_isom_has_sample_dependency(file, TrackNum)) {
					flags |= GP_RTP_PCK_SYSTEMS_CAROUSEL;
				}
				gf_odf_desc_del((GF_Descriptor*)esd);
			}
			break;
		case GF_ISOM_SUBTYPE_3GP_H263:
			hintType = GF_RTP_PAYT_H263;
			required_rate = 90000;
			streamType = GF_STREAM_VISUAL;
			OfficialPayloadID = 34;
			/*not 100% compliant (short header is missing) but should still work*/
			oti = GPAC_OTI_VIDEO_MPEG4_PART2;
			PL_ID = 0x01;
			break;
		case GF_ISOM_SUBTYPE_3GP_AMR:
			required_rate = 8000;
			hintType = GF_RTP_PAYT_AMR;
			streamType = GF_STREAM_AUDIO;
			has_mpeg4_mapping = 0;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_3GP_AMR_WB:
			required_rate = 16000;
			hintType = GF_RTP_PAYT_AMR_WB;
			streamType = GF_STREAM_AUDIO;
			has_mpeg4_mapping = 0;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_AVC_H264:
		case GF_ISOM_SUBTYPE_AVC2_H264:
		case GF_ISOM_SUBTYPE_SVC_H264:
		{
			GF_AVCConfig *avcc = gf_isom_avc_config_get(file, TrackNum, 1);
			required_rate = 90000;	/* "90 kHz clock rate MUST be used"*/
			hintType = GF_RTP_PAYT_H264_AVC;
			streamType = GF_STREAM_VISUAL;
			avc_nalu_size = avcc->nal_unit_size;
			oti = GPAC_OTI_VIDEO_AVC;
			PL_ID = 0x0F;
			gf_odf_avc_cfg_del(avcc);
		}
			break;
		case GF_ISOM_SUBTYPE_3GP_QCELP:
			required_rate = 8000;
			hintType = GF_RTP_PAYT_QCELP;
			streamType = GF_STREAM_AUDIO;
			oti = GPAC_OTI_AUDIO_13K_VOICE;
			OfficialPayloadID = 12;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_3GP_EVRC:
		case GF_ISOM_SUBTYPE_3GP_SMV:
			required_rate = 8000;
			hintType = GF_RTP_PAYT_EVRC_SMV;
			streamType = GF_STREAM_AUDIO;
			oti = (TrackMediaSubType==GF_ISOM_SUBTYPE_3GP_EVRC) ? GPAC_OTI_AUDIO_EVRC_VOICE : GPAC_OTI_AUDIO_SMV_VOICE;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_3GP_DIMS:
			hintType = GF_RTP_PAYT_3GPP_DIMS;
			streamType = GF_STREAM_SCENE;
			break;
		case GF_ISOM_SUBTYPE_AC3:
			hintType = GF_RTP_PAYT_AC3;
			streamType = GF_STREAM_AUDIO;
			gf_isom_get_audio_info(file, TrackNum, 1, NULL, &nb_ch, NULL);
			break;
		default:
			/*ERROR*/
			hintType = 0;
			break;
		}
	}

	/*not hintable*/
	if (!hintType) return NULL;
	/*we only support self-contained files for hinting*/
	gf_isom_get_data_reference(file, TrackNum, 1, &url, &urn);
	if (url || urn) return NULL;
	
	*e = GF_OUT_OF_MEM;
	GF_SAFEALLOC(tmp, GF_RTPHinter);
	if (!tmp) return NULL;

	/*override hinter type if requested and possible*/
	if (has_mpeg4_mapping && (flags & GP_RTP_PCK_FORCE_MPEG4)) {
		hintType = GF_RTP_PAYT_MPEG4;
		avc_nalu_size = 0;
	}
	/*use static payload ID if enabled*/
	else if (OfficialPayloadID && (flags & GP_RTP_PCK_USE_STATIC_ID) ) {
		PayloadID = OfficialPayloadID;
	}

	tmp->file = file;
	tmp->TrackNum = TrackNum;
	tmp->avc_nalu_size = avc_nalu_size;
	tmp->nb_chan = nb_ch;

	/*spatial scalability check*/
	tmp->has_ctts = gf_isom_has_time_offset(file, TrackNum);

	/*get sample info*/
	gf_media_get_sample_average_infos(file, TrackNum, &MinSize, &MaxSize, &avgTS, &maxDTSDelta, &const_dur, &bandwidth);

	/*systems carousel: we need at least IDX and RAP signaling*/
	if (flags & GP_RTP_PCK_SYSTEMS_CAROUSEL) {
		flags |= GP_RTP_PCK_SIGNAL_RAP;
	}

	/*update flags in MultiSL*/
	if (flags & GP_RTP_PCK_USE_MULTI) {
		if (MinSize != MaxSize) flags |= GP_RTP_PCK_SIGNAL_SIZE;
		if (!const_dur) flags |= GP_RTP_PCK_SIGNAL_TS;
	}
	if (tmp->has_ctts) flags |= GP_RTP_PCK_SIGNAL_TS;

	/*default SL for RTP */
	InitSL_RTP(&my_sl);

	my_sl.timestampResolution = gf_isom_get_media_timescale(file, TrackNum);
	/*override clockrate if set*/
	if (required_rate) {
		Double sc = required_rate;
		sc /= my_sl.timestampResolution;
		maxDTSDelta = (u32) (maxDTSDelta*sc);
		my_sl.timestampResolution = required_rate;
	}
	/*switch to RTP TS*/
	max_ptime = (u32) (max_ptime * my_sl.timestampResolution / 1000);

	my_sl.AUSeqNumLength = gf_get_bit_size(gf_isom_get_sample_count(file, TrackNum));
	my_sl.CUDuration = const_dur;

	if (gf_isom_has_sync_points(file, TrackNum)) {
		my_sl.useRandomAccessPointFlag = 1;
	} else {
		my_sl.useRandomAccessPointFlag = 0;
		my_sl.hasRandomAccessUnitsOnlyFlag = 1;
	}

	if (is_crypted) {
		Bool use_sel_enc;
		gf_isom_get_ismacryp_info(file, TrackNum, 1, NULL, NULL, NULL, NULL, NULL, &use_sel_enc, &IV_length, &KI_length);
		if (use_sel_enc) flags |= GP_RTP_PCK_SELECTIVE_ENCRYPTION;
	}

	// in case a different timescale was provided
	tmp->OrigTimeScale = gf_isom_get_media_timescale(file, TrackNum);
	tmp->rtp_p = gf_rtp_builder_new(hintType, &my_sl, flags, tmp, 
								MP4T_OnNewPacket, MP4T_OnPacketDone, 
								/*if copy, no data ref*/
								copy_media ? NULL : MP4T_OnDataRef, 
								MP4T_OnData);

	//init the builder
	gf_rtp_builder_init(tmp->rtp_p, PayloadID, Path_MTU, max_ptime,
					   streamType, oti, PL_ID, MinSize, MaxSize, avgTS, maxDTSDelta, IV_length, KI_length, mpeg4mode);

	/*ISMA compliance is a pain...*/
	if (force_dts_delta) tmp->rtp_p->slMap.DTSDeltaLength = force_dts_delta;


	/*		Hint Track Setup	*/
	tmp->TrackID = gf_isom_get_track_id(file, TrackNum);
	tmp->HintID = tmp->TrackID + 65535;
	while (gf_isom_get_track_by_id(file, tmp->HintID)) tmp->HintID++;

	tmp->HintTrack = gf_isom_new_track(file, tmp->HintID, GF_ISOM_MEDIA_HINT, my_sl.timestampResolution);
	gf_isom_setup_hint_track(file, tmp->HintTrack, GF_ISOM_HINT_RTP);
	/*create a hint description*/
	gf_isom_new_hint_description(file, tmp->HintTrack, -1, -1, 0, &descIndex);
	gf_isom_rtp_set_timescale(file, tmp->HintTrack, descIndex, my_sl.timestampResolution);

	if (hintType==GF_RTP_PAYT_MPEG4) {
		tmp->rtp_p->slMap.ObjectTypeIndication = oti;
		/*set this SL for extraction.*/
		gf_isom_set_extraction_slc(file, TrackNum, 1, &my_sl);
	}
	tmp->bandwidth = bandwidth;

	/*set interleaving*/
	gf_isom_set_track_group(file, TrackNum, InterleaveGroupID);
	if (!copy_media) {
		/*if we don't copy data set hint track and media track in the same group*/
		gf_isom_set_track_group(file, tmp->HintTrack, InterleaveGroupID);
	} else {
		gf_isom_set_track_group(file, tmp->HintTrack, InterleaveGroupID + OFFSET_HINT_GROUP_ID);
	}
	/*use user-secified priority*/
	InterleaveGroupPriority*=2;
	gf_isom_set_track_priority_in_group(file, TrackNum, InterleaveGroupPriority+1);
	gf_isom_set_track_priority_in_group(file, tmp->HintTrack, InterleaveGroupPriority);

#if 0
	/*QT FF: not setting these flags = server uses a random offset*/
	gf_isom_rtp_set_time_offset(file, tmp->HintTrack, 1, 0);
	/*we don't use seq offset for maintainance pruposes*/
	gf_isom_rtp_set_time_sequence_offset(file, tmp->HintTrack, 1, 0);
#endif
	*e = GF_OK;
	return tmp;
}
Пример #8
0
/*import cubic QTVR to mp4*/
GF_Err gf_sm_load_init_qt(GF_SceneLoader *load)
{
	u32 i, di, w, h, tk, nb_samp;
	Bool has_qtvr;
	GF_ISOSample *samp;
	GF_ISOFile *src;
	GF_StreamContext *st;
	GF_AUContext *au;
	GF_Command *com;
	M_Background *back;
	M_NavigationInfo *ni;
	M_Group *gr;
	GF_ODUpdate *odU;
	GF_SceneGraph *sg;
	GF_ObjectDescriptor *od;
	GF_ESD *esd;

	if (!load->ctx) return GF_NOT_SUPPORTED;

	src = gf_isom_open(load->fileName, GF_ISOM_OPEN_READ, NULL);
	if (!src) return gf_qt_report(load, GF_URL_ERROR, "Opening file %s failed", load->fileName);

	w = h = tk = 0;
	nb_samp = 0;

	has_qtvr = 0;
	for (i=0; i<gf_isom_get_track_count(src); i++) {
		switch (gf_isom_get_media_type(src, i+1)) {
		case GF_ISOM_MEDIA_VISUAL:
			if (gf_isom_get_media_subtype(src, i+1, 1) == GF_4CC('j', 'p', 'e', 'g')) {
				GF_GenericSampleDescription *udesc = gf_isom_get_generic_sample_description(src, i+1, 1);
				if ((udesc->width>w) || (udesc->height>h)) {
					w = udesc->width;
					h = udesc->height;
					tk = i+1;
					nb_samp = gf_isom_get_sample_count(src, i+1);
				}
				if (udesc->extension_buf) gf_free(udesc->extension_buf);
				gf_free(udesc);
			}
			break;
		case GF_4CC('q','t','v','r'):
			has_qtvr = 1;
			break;
		}
	}
	if (!has_qtvr) {
		gf_isom_delete(src);
		return gf_qt_report(load, GF_NOT_SUPPORTED, "QTVR not found - no conversion available for this QuickTime movie");
	}
	if (!tk) {
		gf_isom_delete(src);
		return gf_qt_report(load, GF_NON_COMPLIANT_BITSTREAM, "No associated visual track with QTVR movie");
	}
	if (nb_samp!=6) {
		gf_isom_delete(src);
		return gf_qt_report(load, GF_NOT_SUPPORTED, "Movie %s doesn't look a Cubic QTVR - sorry...", load->fileName);
	}

	GF_LOG(GF_LOG_INFO, GF_LOG_PARSER, ("QT: Importing Cubic QTVR Movie"));

	/*create scene*/
	sg = load->ctx->scene_graph;
	gr = (M_Group *) gf_node_new(sg, TAG_MPEG4_Group);
	gf_node_register((GF_Node *)gr, NULL);
	st = gf_sm_stream_new(load->ctx, 1, GF_STREAM_SCENE, 1);
	au = gf_sm_stream_au_new(st, 0, 0, 1);
	com = gf_sg_command_new(load->ctx->scene_graph, GF_SG_SCENE_REPLACE);
	gf_list_add(au->commands, com);
	com->node = (GF_Node *)gr;

	back = (M_Background *) gf_node_new(sg, TAG_MPEG4_Background);
	gf_node_list_add_child( &gr->children, (GF_Node*)back);
	gf_node_register((GF_Node *)back, (GF_Node *)gr);

	gf_sg_vrml_mf_alloc(&back->leftUrl, GF_SG_VRML_MFURL, 1);
	back->leftUrl.vals[0].OD_ID = 2;
	gf_sg_vrml_mf_alloc(&back->frontUrl, GF_SG_VRML_MFURL, 1);
	back->frontUrl.vals[0].OD_ID = 3;
	gf_sg_vrml_mf_alloc(&back->rightUrl, GF_SG_VRML_MFURL, 1);
	back->rightUrl.vals[0].OD_ID = 4;
	gf_sg_vrml_mf_alloc(&back->backUrl, GF_SG_VRML_MFURL, 1);
	back->backUrl.vals[0].OD_ID = 5;
	gf_sg_vrml_mf_alloc(&back->topUrl, GF_SG_VRML_MFURL, 1);
	back->topUrl.vals[0].OD_ID = 6;
	gf_sg_vrml_mf_alloc(&back->bottomUrl, GF_SG_VRML_MFURL, 1);
	back->bottomUrl.vals[0].OD_ID = 7;

	ni = (M_NavigationInfo *) gf_node_new(sg, TAG_MPEG4_NavigationInfo);
	gf_node_list_add_child(&gr->children, (GF_Node*)ni);
	gf_node_register((GF_Node *)ni, (GF_Node *)gr);
	gf_sg_vrml_mf_reset(&ni->type, GF_SG_VRML_MFSTRING);
	gf_sg_vrml_mf_alloc(&ni->type, GF_SG_VRML_MFSTRING, 1);
	ni->type.vals[0] = gf_strdup("QTVR");

	/*create ODs*/
	st = gf_sm_stream_new(load->ctx, 2, GF_STREAM_OD, 1);
	au = gf_sm_stream_au_new(st, 0, 0, 1);
	odU = (GF_ODUpdate*) gf_odf_com_new(GF_ODF_OD_UPDATE_TAG);
	gf_list_add(au->commands, odU);
	for (i=0; i<6; i++) {
		GF_MuxInfo *mi;
		FILE *img;
		char szName[1024];
		od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
		od->objectDescriptorID = 2+i;
		esd = gf_odf_desc_esd_new(2);
		esd->decoderConfig->streamType = GF_STREAM_VISUAL;
		esd->decoderConfig->objectTypeIndication = GPAC_OTI_IMAGE_JPEG;
		esd->ESID = 3+i;
		/*extract image and remember it*/
		mi = (GF_MuxInfo *) gf_odf_desc_new(GF_ODF_MUXINFO_TAG);
		gf_list_add(esd->extensionDescriptors, mi);
		mi->delete_file = 1;
		sprintf(szName, "%s_img%d.jpg", load->fileName, esd->ESID);
		mi->file_name = gf_strdup(szName);
		
		gf_list_add(od->ESDescriptors, esd);
		gf_list_add(odU->objectDescriptors, od);

		samp = gf_isom_get_sample(src, tk, i+1, &di);
		img = gf_f64_open(mi->file_name, "wb");
		fwrite(samp->data, samp->dataLength, 1, img);
		fclose(img);
		gf_isom_sample_del(&samp);
	}
	gf_isom_delete(src);
	return GF_OK;
}
Пример #9
0
GF_EXPORT
GF_Err gf_isom_streamer_send_next_packet(GF_ISOMRTPStreamer *streamer, s32 send_ahead_delay, s32 max_sleep_time) 
{
	GF_Err e = GF_OK;
	GF_RTPTrack *track, *to_send;
	u32 time, duration;
	s32 diff;
	u64 min_ts, dts, cts;

	if (!streamer) return GF_BAD_PARAM;
	
	/*browse all sessions and locate most mature stream*/
	to_send = NULL;
	min_ts = (u64) -1;

	time = gf_sys_clock();

	/*init session timeline - all sessions are sync'ed for packet scheduling purposes*/
	if (!streamer->timelineOrigin)
		streamer->timelineOrigin = time*1000;

	track = streamer->stream;
	while (track) {
		/*load next AU*/
		if (!track->au) {
			if (track->current_au >= track->nb_aus) {
				Double scale;
				if (!streamer->loop) {
					track = track->next;
					continue;
				}
				/*increment ts offset*/
				scale = track->timescale/1000.0;
				track->ts_offset += (u32) (streamer->duration_ms * scale);
				track->microsec_ts_offset = (u32) (track->ts_offset*(1000000.0/track->timescale)) + streamer->timelineOrigin;
				track->current_au = 0;
			}

			track->au = gf_isom_get_sample(streamer->isom, track->track_num, track->current_au + 1, &track->sample_desc_index);
			track->current_au ++;
			if (track->au) {
				track->microsec_dts = (u64) (track->microsec_ts_scale * (s64) (track->au->DTS)) + track->microsec_ts_offset + streamer->timelineOrigin;
			}
		}

		/*check timing*/
		if (track->au) {
			if (min_ts > track->microsec_dts) {
				min_ts = track->microsec_dts;
				to_send = track;
			}
		}

		track = track->next;
	}

	/*no input data ...*/
	if( !to_send) return GF_EOS;
	min_ts /= 1000;

	if (max_sleep_time) {
		diff = ((u32) min_ts) - gf_sys_clock();	
		if (diff>max_sleep_time) 
			return GF_OK;
	}

	/*sleep until TS is mature*/
	while (1) {
		diff = ((u32) min_ts) - gf_sys_clock();
		
		if (diff > send_ahead_delay) {
			gf_sleep(1);
		} else {
			if (diff<10) {
				GF_LOG(GF_LOG_DEBUG, GF_LOG_RTP, ("WARNING: RTP session %s stream %d - sending packet %d ms too late\n", gf_isom_get_filename(streamer->isom), to_send->track_num, -diff));
			}
			break;
		}
	}

	/*send packets*/

	dts = to_send->au->DTS + to_send->ts_offset;
	cts = to_send->au->DTS + to_send->au->CTS_Offset + to_send->ts_offset;
	duration = gf_isom_get_sample_duration(streamer->isom, to_send->track_num, to_send->current_au);

	/*unpack nal units*/
	if (to_send->avc_nalu_size) {
		Bool au_start, au_end;
		u32 v, size;
		u32 remain = to_send->au->dataLength;
		char *ptr = to_send->au->data;

		au_start = 1;
		au_end = 0;
		while (remain) {
			size = 0;
			v = to_send->avc_nalu_size;
			while (v) {
				size |= (u8) *ptr;
				ptr++;
				remain--;
				v-=1;
				if (v) size<<=8;
			}
			remain -= size;
			au_end = remain ? 0 : 1;

			e = gf_rtp_streamer_send_data(to_send->rtp, ptr, size, to_send->au->dataLength, cts, dts, to_send->au->IsRAP, au_start, au_end, to_send->current_au, duration, to_send->sample_desc_index);
			ptr += size;
			au_start = 0;
		}
	} else {
		e = gf_rtp_streamer_send_data(to_send->rtp, to_send->au->data, to_send->au->dataLength, to_send->au->dataLength, cts, dts, to_send->au->IsRAP, 1, 1, to_send->current_au, duration, to_send->sample_desc_index);
	}
	/*delete sample*/
	gf_isom_sample_del(&to_send->au);

	return e;
}
Пример #10
0
GF_EXPORT
GF_Err gf_isom_streamer_send_next_packet(GF_ISOMRTPStreamer *streamer, s32 send_ahead_delay, s32 max_sleep_time)
{
	GF_Err e = GF_OK;
	GF_RTPTrack *track, *to_send;
	u32 time, duration;
	s32 diff;
	u64 min_ts, dts, cts;

	if (!streamer) return GF_BAD_PARAM;

	/*browse all sessions and locate most mature stream*/
	to_send = NULL;
	min_ts = (u64) -1;

	time = gf_sys_clock();

	/*init session timeline - all sessions are sync'ed for packet scheduling purposes*/
	if (!streamer->timelineOrigin) {
		streamer->timelineOrigin = time*1000;
		GF_LOG(GF_LOG_INFO, GF_LOG_RTP, ("[FileStreamer] RTP session %s initialized - time origin set to %d\n", gf_isom_get_filename(streamer->isom), time));
	}

	track = streamer->stream;
	while (track) {
		/*load next AU*/
		gf_isom_set_nalu_extract_mode(streamer->isom, track->track_num, GF_ISOM_NALU_EXTRACT_LAYER_ONLY);
		if (!track->au) {
			if (track->current_au >= track->nb_aus) {
				Double scale;
				if (!streamer->loop) {
					track = track->next;
					continue;
				}
				/*increment ts offset*/
				scale = track->timescale/1000.0;
				track->ts_offset += (u32) (streamer->duration_ms * scale);
				track->microsec_ts_offset = (u32) (track->ts_offset*(1000000.0/track->timescale)) + streamer->timelineOrigin;
				track->current_au = 0;
			}

			track->au = gf_isom_get_sample(streamer->isom, track->track_num, track->current_au + 1, &track->sample_desc_index);
			track->current_au ++;
			if (track->au) {
				track->microsec_dts = (u64) (track->microsec_ts_scale * (s64) (track->au->DTS)) + track->microsec_ts_offset + streamer->timelineOrigin;
			}
		}

		/*check timing*/
		if (track->au) {
			if (min_ts > track->microsec_dts) {
				min_ts = track->microsec_dts;
				to_send = track;
			}
		}

		track = track->next;
	}

	/*no input data ...*/
	if( !to_send) return GF_EOS;


	/*we are about to send scalable base: trigger RTCP reports with the same NTP. This avoids
	NTP drift due to system clock precision which could break sync decoding*/
	if (!streamer->first_RTCP_sent || (streamer->base_track && streamer->base_track==to_send->track_num)) {
		u32 ntp_sec, ntp_frac;
		/*force sending RTCP SR every RAP ? - not really compliant but we cannot perform scalable tuning otherwise*/
		u32 ntp_type = to_send->au->IsRAP ? 2 : 1;
		gf_net_get_ntp(&ntp_sec, &ntp_frac);
		track = streamer->stream;
		while (track) {
			u32 ts = (u32) (track->au->DTS + track->au->CTS_Offset + track->ts_offset);
			gf_rtp_streamer_send_rtcp(track->rtp, GF_TRUE, ts, ntp_type, ntp_sec, ntp_frac);
			track = track->next;
		}

		streamer->first_RTCP_sent = 1;
	}

	min_ts /= 1000;

	if (max_sleep_time) {
		diff = ((u32) min_ts) - gf_sys_clock();
		if (diff>max_sleep_time)
			return GF_OK;
	}


	/*sleep until TS is mature*/
	while (1) {
		diff = ((u32) min_ts) - gf_sys_clock();

		if (diff > send_ahead_delay) {
			gf_sleep(1);
		} else {
			if (diff<10) {
				GF_LOG(GF_LOG_DEBUG, GF_LOG_RTP, ("WARNING: RTP session %s stream %d - sending packet %d ms too late\n", gf_isom_get_filename(streamer->isom), to_send->track_num, -diff));
			}
			break;
		}
	}

	/*send packets*/
	dts = to_send->au->DTS + to_send->ts_offset;
	cts = to_send->au->DTS + to_send->au->CTS_Offset + to_send->ts_offset;
	duration = gf_isom_get_sample_duration(streamer->isom, to_send->track_num, to_send->current_au);

	GF_LOG(GF_LOG_INFO, GF_LOG_RTP, ("[FileStreamer] Sending RTP packets for track %d AU %d/%d DTS "LLU" - CTS "LLU" - RTP TS "LLU" - size %d - RAP %d\n", to_send->track_num, to_send->current_au, to_send->nb_aus, to_send->au->DTS, to_send->au->DTS+to_send->au->CTS_Offset, cts, to_send->au->dataLength, to_send->au->IsRAP ) );

	/*unpack nal units*/
	if (to_send->avc_nalu_size) {
		Bool au_start, au_end;
		u32 v, size;
		u32 remain = to_send->au->dataLength;
		char *ptr = to_send->au->data;

		au_start = 1;
		au_end = 0;
		while (remain) {
			size = 0;
			v = to_send->avc_nalu_size;
			while (v) {
				size |= (u8) *ptr;
				ptr++;
				remain--;
				v-=1;
				if (v) size<<=8;
			}
			if (remain < size) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[rtp hinter] Broken AVC nalu encapsulation: NALU size is %d but only %d bytes left in sample %d\n", size, remain, to_send->current_au));
				break;
			}
			remain -= size;
			au_end = remain ? 0 : 1;

			e = gf_rtp_streamer_send_data(to_send->rtp, ptr, size, to_send->au->dataLength, cts, dts, (to_send->au->IsRAP==RAP) ? 1 : 0, au_start, au_end, to_send->current_au, duration, to_send->sample_desc_index);
			ptr += size;
			au_start = 0;
		}
	} else {
		e = gf_rtp_streamer_send_data(to_send->rtp, to_send->au->data, to_send->au->dataLength, to_send->au->dataLength, cts, dts, (to_send->au->IsRAP==RAP) ? 1 : 0, 1, 1, to_send->current_au, duration, to_send->sample_desc_index);
	}
	/*delete sample*/
	gf_isom_sample_del(&to_send->au);

	return e;
}
Пример #11
0
Файл: main.c Проект: Bevara/GPAC
void bifs_to_vid(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time)
{
	GF_User user;
	BIFSVID b2v;
	u16 es_id;
	Bool first_dump, needs_raw;
	u32 i, j, di, count, timescale, frameNum;
	u32 duration, cur_time;
	GF_VideoSurface fb;
	GF_Err e;
	char old_driv[1024];
	const char *test;
	char config_path[GF_MAX_PATH];
	avi_t *avi_out;
	Bool reset_fps;
	GF_ESD *esd;
	char comp[5];
	char *conv_buf;

	memset(&user, 0, sizeof(GF_User));
	if (szConfigFile && strlen(szConfigFile)) {
		user.config = gf_cfg_init(config_path, NULL);
	} else {
		user.config = gf_cfg_init(NULL, NULL);
	}

	if (!user.config) {
		fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
		return;
	}
	avi_out = NULL;
	conv_buf = NULL;
	esd = NULL;
	needs_raw = 0;
	test = gf_cfg_get_key(user.config, "General", "ModulesDirectory");
	user.modules = gf_modules_new((const unsigned char *) test, user.config);
	strcpy(old_driv, "raw_out");
	if (!gf_modules_get_count(user.modules)) {
		printf("Error: no modules found\n");
		goto err_exit;
	}

	/*switch driver to raw_driver*/
	test = gf_cfg_get_key(user.config, "Video", "DriverName");
	if (test) strcpy(old_driv, test);

	test = gf_cfg_get_key(user.config, "Compositor", "RendererName");
	/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
	if (test && strstr(test, "2D")) {
		gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output");
		needs_raw = 1;
	}

	needs_raw = 0;
	user.init_flags = GF_TERM_NO_AUDIO | GF_TERM_FORCE_3D;
	b2v.sr = gf_sc_new(&user, 0, NULL);
	gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0);

	b2v.sg = gf_sg_new();
	gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v);
	gf_sg_set_init_callback(b2v.sg, node_init, &b2v);
	gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v);

	/*load config*/
	gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1);

	b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0);

	if (needs_raw) {
		test = gf_cfg_get_key(user.config, "Video", "DriverName");
		if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
			printf("couldn't load raw output driver (%s used)\n", test);
			goto err_exit;
		}
	}

	strcpy(config_path, "");
	if (out_dir) {
		strcat(config_path, out_dir);
		if (config_path[strlen(config_path)-1] != '\\') strcat(config_path, "\\");
	}
	strcat(config_path, rad_name);
	strcat(config_path, "_bifs");
	if (!dump_type) {
		strcat(config_path, ".avi");
		avi_out = AVI_open_output_file(config_path);
		comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0;
		if (!avi_out) goto err_exit;
	}


	for (i=0; i<gf_isom_get_track_count(file); i++) {
		esd = gf_isom_get_esd(file, i+1, 1);
		if (!esd) continue;
		if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break;
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
	}
	if (!esd) {
		printf("no bifs track found\n");
		goto err_exit;
	}

	b2v.duration = gf_isom_get_media_duration(file, i+1);
	timescale = gf_isom_get_media_timescale(file, i+1);
	es_id = (u16) gf_isom_get_track_id(file, i+1);
	e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
	if (e) {
		printf("BIFS init error %s\n", gf_error_to_string(e));
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
		goto err_exit;
	}
	if (dump_time>=0) dump_time = dump_time *1000 / timescale;

	gf_sc_set_scene(b2v.sr, b2v.sg);
	count = gf_isom_get_sample_count(file, i+1);

	reset_fps = 0;
	if (!fps) {
		fps = (Float) (count * timescale);
		fps /= (Double) (s64) b2v.duration;
		printf("Estimated BIFS FrameRate %g\n", fps);
		reset_fps = 1;
	}

	if (!width || !height) {
		gf_sg_get_scene_size_info(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}

	gf_sc_set_size(b2v.sr, width, height);
	gf_sc_draw_frame(b2v.sr);

	gf_sc_get_screen_buffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	if (avi_out) {
		AVI_set_video(avi_out, width, height, fps, comp);
		conv_buf = gf_malloc(sizeof(char) * width * height * 3);
	}
	printf("Dumping at BIFS resolution %d x %d\n\n", width, height);
	gf_sc_release_screen_buffer(b2v.sr, &fb);

	cur_time = 0;

	duration = (u32)(timescale / fps);
	if (reset_fps) fps = 0;

	frameNum = 1;
	first_dump = 1;
	for (j=0; j<count; j++) {
		GF_ISOSample *samp = gf_isom_get_sample(file, i+1, j+1, &di);

		b2v.cts = samp->DTS + samp->CTS_Offset;
		/*apply command*/
		gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0);
		gf_isom_sample_del(&samp);

		if ((frameID>=0) && (j<(u32)frameID)) continue;
		if ((dump_time>=0) && ((u32) dump_time>b2v.cts)) continue;
		/*render frame*/
		gf_sc_draw_frame(b2v.sr);
		/*needed for background2D !!*/
		if (first_dump) {
			gf_sc_draw_frame(b2v.sr);
			first_dump = 0;
		}

		if (fps) {
			if (cur_time > b2v.cts) continue;

			while (1) {
				printf("dumped frame time %f (frame %d - sample %d)\r", ((Float)cur_time)/timescale, frameNum, j+1);
				dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, frameNum);
				frameNum++;
				cur_time += duration;
				if (cur_time > b2v.cts) break;
			}
		} else {
			dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, (frameID>=0) ? frameID : frameNum);
			if (frameID>=0 || dump_time>=0) break;
			frameNum++;
			printf("dumped frame %d / %d\r", j+1, count);
		}

	}
	gf_odf_desc_del((GF_Descriptor *) esd);

	/*destroy everything*/
	gf_bifs_decoder_del(b2v.bifs);
	gf_sg_del(b2v.sg);
	gf_sc_set_scene(b2v.sr, NULL);
	gf_sc_del(b2v.sr);

err_exit:
	if (avi_out) AVI_close(avi_out);
	if (conv_buf) gf_free(conv_buf);
	if (user.modules) gf_modules_del(user.modules);
	if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv);
	gf_cfg_del(user.config);
}
Пример #12
0
static u32 iso_progressive_read_thread(void *param)
{
	ISOProgressiveReader *reader = (ISOProgressiveReader *)param;
	u32 track_number;
	GF_ISOSample *iso_sample;
	u32 samples_processed;
	u32 sample_index;
	u32 sample_count;

	samples_processed = 0;
	sample_count = 0;
	track_number = 0;
	/* samples are numbered starting from 1 */
	sample_index = 1;

	while (reader->do_run == GF_TRUE) {

		/* we can only parse if there is a movie */
		if (reader->movie) {

			/* block the data input until we are done in the parsing */
			gf_mx_p(reader->mutex);

			/* get the track number we want */
			if (track_number == 0) {
				track_number = gf_isom_get_track_by_id(reader->movie, reader->track_id);
			}

			/* only if we have the track number can we try to get the sample data */
			if (track_number != 0) {
				u32 new_sample_count;
				u32 di; /*descriptor index*/

				/* let's see how many samples we have since the last parsed */
				new_sample_count = gf_isom_get_sample_count(reader->movie, track_number);
				if (new_sample_count > sample_count) {
					/* New samples have been added to the file */
					fprintf(stdout, "Found %d new samples (total: %d)\n", new_sample_count - sample_count, new_sample_count);
					if (sample_count == 0) {
						sample_count = new_sample_count;
					}
				}
				if (sample_count == 0) {
					/*let the reader push new data */
					gf_mx_v(reader->mutex);
					//gf_sleep(1000);
				} else {
					/* let's analyze the samples we have parsed so far one by one */
					iso_sample = gf_isom_get_sample(reader->movie, track_number, sample_index, &di);
					if (iso_sample) {
						/* if you want the sample description data, you can call:
						   GF_Descriptor *desc = gf_isom_get_decoder_config(reader->movie, reader->track_handle, di);
						*/

						samples_processed++;
						/*here we dump some sample info: samp->data, samp->dataLength, samp->isRAP, samp->DTS, samp->CTS_Offset */
						fprintf(stdout, "Found sample #%5d (#%5d) of length %8d, RAP: %d, DTS: "LLD", CTS: "LLD"\r", sample_index, samples_processed, iso_sample->dataLength, iso_sample->IsRAP, iso_sample->DTS, iso_sample->DTS+iso_sample->CTS_Offset);
						sample_index++;

						/*release the sample data, once you're done with it*/
						gf_isom_sample_del(&iso_sample);

						/* once we have read all the samples, we can release some data and force a reparse of the input buffer */
						if (sample_index > sample_count) {
							u64 new_buffer_start;
							u64 missing_bytes;

							fprintf(stdout, "\nReleasing unnecessary buffers\n");
							/* release internal structures associated with the samples read so far */
							gf_isom_reset_tables(reader->movie, GF_TRUE);

#if 1
							/* release the associated input data as well */
							gf_isom_reset_data_offset(reader->movie, &new_buffer_start);
							if (new_buffer_start) {
								u32 offset = (u32)new_buffer_start;
								memmove(reader->data, reader->data+offset, reader->data_size-offset);
								reader->valid_data_size -= offset;
							}
							sprintf(reader->data_url, "gmem://%d@%p", reader->valid_data_size, reader->data);
							gf_isom_refresh_fragmented(reader->movie, &missing_bytes, reader->data_url);
#endif

							/* update the sample count and sample index */
							sample_count = new_sample_count - sample_count;
							assert(sample_count == 0);
							sample_index = 1;
						}
					} else {
						GF_Err e = gf_isom_last_error(reader->movie);
						fprintf(stdout, "Could not get sample %s\r", gf_error_to_string(e));
					}
					/* and finally, let the data reader push more data */
					gf_mx_v(reader->mutex);
				}
			}
		} else {
			//gf_sleep(1);
		}
	}
	return 0;
}
Пример #13
0
static GF_Err gf_sm_load_run_isom(GF_SceneLoader *load)
{
	GF_Err e;
	FILE *logs;
	u32 i, j, di, nbBifs, nbLaser, nb_samp, samp_done, init_offset;
	GF_StreamContext *sc;
	GF_ESD *esd;
	GF_ODCodec *od_dec;
#ifndef GPAC_DISABLE_BIFS
	GF_BifsDecoder *bifs_dec;
#endif
#ifndef GPAC_DISABLE_LASER
	GF_LASeRCodec *lsr_dec;
#endif

	if (!load || !load->isom) return GF_BAD_PARAM;

	nbBifs = nbLaser = 0;
	e = GF_OK;
#ifndef GPAC_DISABLE_BIFS
	bifs_dec = gf_bifs_decoder_new(load->scene_graph, 1);
	gf_bifs_decoder_set_extraction_path(bifs_dec, load->localPath, load->fileName);
#endif
	od_dec = gf_odf_codec_new();
	logs = NULL;
#ifndef GPAC_DISABLE_LASER
	lsr_dec = gf_laser_decoder_new(load->scene_graph);
#endif
	esd = NULL;
	/*load each stream*/
	nb_samp = 0;
	for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
		u32 type = gf_isom_get_media_type(load->isom, i+1);
		switch (type) {
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_OD:
			nb_samp += gf_isom_get_sample_count(load->isom, i+1);
			break;
		default:
			break;
		}
	}
	samp_done = 1;
	gf_isom_text_set_streaming_mode(load->isom, 1);

	for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
		u32 type = gf_isom_get_media_type(load->isom, i+1);
		switch (type) {
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_OD:
			break;
		default:
			continue;
		}
		esd = gf_isom_get_esd(load->isom, i+1, 1);
		if (!esd) continue;


		if ((esd->decoderConfig->objectTypeIndication == GPAC_OTI_SCENE_AFX) ||
		        (esd->decoderConfig->objectTypeIndication == GPAC_OTI_SCENE_SYNTHESIZED_TEXTURE)
		   ) {
			nb_samp += gf_isom_get_sample_count(load->isom, i+1);
			continue;
		}
		sc = gf_sm_stream_new(load->ctx, esd->ESID, esd->decoderConfig->streamType, esd->decoderConfig->objectTypeIndication);
		sc->streamType = esd->decoderConfig->streamType;
		sc->ESID = esd->ESID;
		sc->objectType = esd->decoderConfig->objectTypeIndication;
		sc->timeScale = gf_isom_get_media_timescale(load->isom, i+1);

		/*we still need to reconfig the BIFS*/
		if (esd->decoderConfig->streamType==GF_STREAM_SCENE) {
#ifndef GPAC_DISABLE_BIFS
			/*BIFS*/
			if (esd->decoderConfig->objectTypeIndication<=2) {
				if (!esd->dependsOnESID && nbBifs && !i)
					mp4_report(load, GF_OK, "several scene namespaces used or improper scene dependencies in file - import may be incorrect");
				if (!esd->decoderConfig->decoderSpecificInfo) {
					/* Hack for T-DMB non compliant streams */
					e = gf_bifs_decoder_configure_stream(bifs_dec, esd->ESID, NULL, 0, esd->decoderConfig->objectTypeIndication);
				} else {
					e = gf_bifs_decoder_configure_stream(bifs_dec, esd->ESID, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
				}
				if (e) goto exit;
				nbBifs++;
			}
#endif

#ifndef GPAC_DISABLE_LASER
			/*LASER*/
			if (esd->decoderConfig->objectTypeIndication==0x09) {
				if (!esd->dependsOnESID && nbBifs && !i)
					mp4_report(load, GF_OK, "several scene namespaces used or improper scene dependencies in file - import may be incorrect");
				e = gf_laser_decoder_configure_stream(lsr_dec, esd->ESID, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
				if (e) goto exit;
				nbLaser++;
			}
#endif
		}

		init_offset = 0;
		/*dump all AUs*/
		for (j=0; j<gf_isom_get_sample_count(load->isom, i+1); j++) {
			GF_AUContext *au;
			GF_ISOSample *samp = gf_isom_get_sample(load->isom, i+1, j+1, &di);
			if (!samp) {
				mp4_report(load, gf_isom_last_error(load->isom), "Unable to fetch sample %d from track ID %d - aborting track import", j+1, gf_isom_get_track_id(load->isom, i+1));
				break;
			}
			/*check if track has initial offset*/
			if (!j && gf_isom_get_edit_segment_count(load->isom, i+1)) {
				u64 EditTime, dur, mtime;
				u8 mode;
				gf_isom_get_edit_segment(load->isom, i+1, 1, &EditTime, &dur, &mtime, &mode);
				if (mode==GF_ISOM_EDIT_EMPTY) {
					init_offset = (u32) (dur * sc->timeScale / gf_isom_get_timescale(load->isom) );
				}
			}
			samp->DTS += init_offset;

			au = gf_sm_stream_au_new(sc, samp->DTS, ((Double)(s64) samp->DTS) / sc->timeScale, (samp->IsRAP==RAP) ? 1 : 0);

			if (esd->decoderConfig->streamType==GF_STREAM_SCENE) {
#ifndef GPAC_DISABLE_BIFS
				if (esd->decoderConfig->objectTypeIndication<=2)
					e = gf_bifs_decode_command_list(bifs_dec, esd->ESID, samp->data, samp->dataLength, au->commands);
#endif
#ifndef GPAC_DISABLE_LASER
				if (esd->decoderConfig->objectTypeIndication==0x09)
					e = gf_laser_decode_command_list(lsr_dec, esd->ESID, samp->data, samp->dataLength, au->commands);
#endif
			} else {
				e = gf_odf_codec_set_au(od_dec, samp->data, samp->dataLength);
				if (!e) e = gf_odf_codec_decode(od_dec);
				if (!e) {
					while (1) {
						GF_ODCom *odc = gf_odf_codec_get_com(od_dec);
						if (!odc) break;
						/*update ESDs if any*/
						UpdateODCommand(load->isom, odc);
						gf_list_add(au->commands, odc);
					}
				}
			}
			gf_isom_sample_del(&samp);
			if (e) {
				mp4_report(load, gf_isom_last_error(load->isom), "decoding sample %d from track ID %d failed", j+1, gf_isom_get_track_id(load->isom, i+1));
				goto exit;
			}

			samp_done++;
			gf_set_progress("MP4 Loading", samp_done, nb_samp);
		}
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
	}
	gf_isom_text_set_streaming_mode(load->isom, 0);

exit:
#ifndef GPAC_DISABLE_BIFS
	gf_bifs_decoder_del(bifs_dec);
#endif
	gf_odf_codec_del(od_dec);
#ifndef GPAC_DISABLE_LASER
	gf_laser_decoder_del(lsr_dec);
#endif
	if (esd) gf_odf_desc_del((GF_Descriptor *) esd);
	if (logs) gf_fclose(logs);
	return e;
}
Пример #14
0
GF_EXPORT
GF_Err gf_hinter_track_process(GF_RTPHinter *tkHint)
{
	GF_Err e;
	u32 i, descIndex, duration;
	u64 ts;
	u8 PadBits;
	Double ft;
	GF_ISOSample *samp;

	tkHint->HintSample = tkHint->RTPTime = 0;

	tkHint->TotalSample = gf_isom_get_sample_count(tkHint->file, tkHint->TrackNum);
	ft = tkHint->rtp_p->sl_config.timestampResolution;
	ft /= tkHint->OrigTimeScale;
	
	e = GF_OK;
	for (i=0; i<tkHint->TotalSample; i++) {
		samp = gf_isom_get_sample(tkHint->file, tkHint->TrackNum, i+1, &descIndex);
		if (!samp) return GF_IO_ERR;

		//setup SL
		tkHint->CurrentSample = i + 1;

		/*keep same AU indicator if sync shadow - TODO FIXME: this assumes shadows are placed interleaved with 
		the track content which is the case for GPAC scene carousel generation, but may not always be true*/
		if (samp->IsRAP==2) {
			tkHint->rtp_p->sl_header.AU_sequenceNumber -= 1;
			samp->IsRAP = 1;
		}

		ts = (u64) (ft * (s64) (samp->DTS+samp->CTS_Offset));
		tkHint->rtp_p->sl_header.compositionTimeStamp = ts;

		ts = (u64) (ft * (s64)(samp->DTS));
		tkHint->rtp_p->sl_header.decodingTimeStamp = ts;
		tkHint->rtp_p->sl_header.randomAccessPointFlag = samp->IsRAP;

		tkHint->base_offset_in_sample = 0;
		/*crypted*/
		if (tkHint->rtp_p->slMap.IV_length) {
			GF_ISMASample *s = gf_isom_get_ismacryp_sample(tkHint->file, tkHint->TrackNum, samp, descIndex);
			/*one byte take for selective_enc flag*/
			if (s->flags & GF_ISOM_ISMA_USE_SEL_ENC) tkHint->base_offset_in_sample += 1;
			if (s->flags & GF_ISOM_ISMA_IS_ENCRYPTED) tkHint->base_offset_in_sample += s->IV_length + s->KI_length;
			gf_free(samp->data);
			samp->data = s->data;
			samp->dataLength = s->dataLength;
			gp_rtp_builder_set_cryp_info(tkHint->rtp_p, s->IV, (char*)s->key_indicator, (s->flags & GF_ISOM_ISMA_IS_ENCRYPTED) ? 1 : 0);
			s->data = NULL;
			s->dataLength = 0;
			gf_isom_ismacryp_delete_sample(s);
		}

		if (tkHint->rtp_p->sl_config.usePaddingFlag) {
			gf_isom_get_sample_padding_bits(tkHint->file, tkHint->TrackNum, i+1, &PadBits);
			tkHint->rtp_p->sl_header.paddingBits = PadBits;
		} else {
			tkHint->rtp_p->sl_header.paddingBits = 0;
		}
		
		duration = gf_isom_get_sample_duration(tkHint->file, tkHint->TrackNum, i+1);
		ts = (u32) (ft * (s64) (duration));

		/*unpack nal units*/
		if (tkHint->avc_nalu_size) {
			u32 v, size;
			u32 remain = samp->dataLength;
			char *ptr = samp->data;

			tkHint->rtp_p->sl_header.accessUnitStartFlag = 1;
			tkHint->rtp_p->sl_header.accessUnitEndFlag = 0;
			while (remain) {
				size = 0;
				v = tkHint->avc_nalu_size;
				while (v) {
					size |= (u8) *ptr;
					ptr++;
					remain--;
					v-=1;
					if (v) size<<=8;
				}
				tkHint->base_offset_in_sample = samp->dataLength-remain;
				remain -= size;
				tkHint->rtp_p->sl_header.accessUnitEndFlag = remain ? 0 : 1;
				e = gf_rtp_builder_process(tkHint->rtp_p, ptr, size, (u8) !remain, samp->dataLength, duration, (u8) (descIndex + GF_RTP_TX3G_SIDX_OFFSET) );
				ptr += size;
				tkHint->rtp_p->sl_header.accessUnitStartFlag = 0;
			}
		} else {
			e = gf_rtp_builder_process(tkHint->rtp_p, samp->data, samp->dataLength, 1, samp->dataLength, duration, (u8) (descIndex + GF_RTP_TX3G_SIDX_OFFSET) );
		}
		tkHint->rtp_p->sl_header.packetSequenceNumber += 1;

		//signal some progress
		gf_set_progress("Hinting", tkHint->CurrentSample, tkHint->TotalSample);

		tkHint->rtp_p->sl_header.AU_sequenceNumber += 1;
		gf_isom_sample_del(&samp);

		if (e) return e;
	}

	//flush
	gf_rtp_builder_process(tkHint->rtp_p, NULL, 0, 1, 0, 0, 0);

	gf_isom_end_hint_sample(tkHint->file, tkHint->HintTrack, (u8) tkHint->SampleIsRAP);
	return GF_OK;
}
Пример #15
0
Файл: main.c Проект: Bevara/GPAC
/*generates an intertwined bmp from a scene file with 5 different viewpoints*/
void bifs3d_viewpoints_merger(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time)
{
	GF_User user;
	char out_path[GF_MAX_PATH];
	char old_driv[1024];
	BIFSVID b2v;
	Bool needs_raw;
	GF_Err e;
	GF_VideoSurface fb;
	unsigned char **rendered_frames;
	u32 nb_viewpoints = 5;
	u32 viewpoint_index;


	/* Configuration of the Rendering Capabilities */
	{
		const char *test;
		char config_path[GF_MAX_PATH];
		memset(&user, 0, sizeof(GF_User));
		user.config = gf_cfg_init(szConfigFile, NULL);

		if (!user.config) {
			fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
			return;
		}

		test = gf_cfg_get_key(user.config, "General", "ModulesDirectory");
		user.modules = gf_modules_new((const unsigned char *) test, user.config);
		strcpy(old_driv, "raw_out");
		if (!gf_modules_get_count(user.modules)) {
			printf("Error: no modules found\n");
			goto err_exit;
		}

		/*switch driver to raw_driver*/
		test = gf_cfg_get_key(user.config, "Video", "DriverName");
		if (test) strcpy(old_driv, test);

		needs_raw = 0;
		test = gf_cfg_get_key(user.config, "Compositor", "RendererName");
		/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
		if (test && strstr(test, "2D")) {
			gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output");
			needs_raw = 1;
		}
		if (needs_raw) {
			test = gf_cfg_get_key(user.config, "Video", "DriverName");
			if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
				printf("couldn't load raw output driver (%s used)\n", test);
				goto err_exit;
			}
		}
	}

	memset(&b2v, 0, sizeof(BIFSVID));
	user.init_flags = GF_TERM_NO_AUDIO;
	/* Initialization of the compositor */
	b2v.sr = gf_sc_new(&user, 0, NULL);
	gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0);

	/* Initialization of the scene graph */
	b2v.sg = gf_sg_new();
	gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v);
	gf_sg_set_init_callback(b2v.sg, node_init, &b2v);
	gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v);

	/*load config*/
	gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1);

	{
		u32 di;
		u32 track_number;
		GF_ESD *esd;
		u16 es_id;
		b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0);

		for (track_number=0; track_number<gf_isom_get_track_count(file); track_number++) {
			esd = gf_isom_get_esd(file, track_number+1, 1);
			if (!esd) continue;
			if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break;
			gf_odf_desc_del((GF_Descriptor *) esd);
			esd = NULL;
		}
		if (!esd) {
			printf("no bifs track found\n");
			goto err_exit;
		}

		es_id = (u16) gf_isom_get_track_id(file, track_number+1);
		e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
		if (e) {
			printf("BIFS init error %s\n", gf_error_to_string(e));
			gf_odf_desc_del((GF_Descriptor *) esd);
			esd = NULL;
			goto err_exit;
		}

		{
			GF_ISOSample *samp = gf_isom_get_sample(file, track_number+1, 1, &di);
			b2v.cts = samp->DTS + samp->CTS_Offset;
			/*apply command*/
			gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0);
			gf_isom_sample_del(&samp);
		}

		b2v.duration = gf_isom_get_media_duration(file, track_number+1);

		gf_odf_desc_del((GF_Descriptor *) esd);

	}
	gf_sc_set_scene(b2v.sr, b2v.sg);

	if (!width || !height) {
		gf_sg_get_scene_size_info(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}
	gf_sc_set_size(b2v.sr, width, height);
	gf_sc_get_screen_buffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	gf_sc_release_screen_buffer(b2v.sr, &fb);

	GF_SAFEALLOC(rendered_frames, nb_viewpoints*sizeof(char *));
	for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) {
		GF_SAFEALLOC(rendered_frames[viewpoint_index-1], fb.width*fb.height*3);
		gf_sc_set_viewpoint(b2v.sr, viewpoint_index, NULL);
		gf_sc_draw_frame(b2v.sr);
		/*needed for background2D !!*/
		gf_sc_draw_frame(b2v.sr);
		strcpy(out_path, "");
		if (out_dir) {
			strcat(out_path, out_dir);
			if (out_path[strlen(out_path)-1] != '\\') strcat(out_path, "\\");
		}
		strcat(out_path, rad_name);
		strcat(out_path, "_view");
		gf_sc_get_screen_buffer(b2v.sr, &fb);
		write_bmp(&fb, out_path, viewpoint_index);
		memcpy(rendered_frames[viewpoint_index-1], fb.video_buffer, fb.width*fb.height*3);
		gf_sc_release_screen_buffer(b2v.sr, &fb);
	}

	if (width != 800 || height != 480) {
		printf("Wrong scene dimension, cannot produce output\n");
		goto err_exit;
	} else {
		u32 x, y;
		GF_VideoSurface out_fb;
		u32 bpp = 3;
		out_fb.width = 800;
		out_fb.height = 480;
		out_fb.pitch = 800*bpp;
		out_fb.pixel_format = GF_PIXEL_RGB_24;
		out_fb.is_hardware_memory = 0;
		GF_SAFEALLOC(out_fb.video_buffer, out_fb.pitch*out_fb.height)
#if 1
		for (y=0; y<out_fb.height; y++) {
			/*starting red pixel is R1, R5, R4, R3, R2, R1, R5, ... when increasing line num*/
			u32 line_shift = (5-y) % 5;
			for (x=0; x<out_fb.width; x++) {
				u32 view_shift = (line_shift+bpp*x)%5;
				u32 offset = out_fb.pitch*y + x*bpp;
				/* red */
				out_fb.video_buffer[offset] = rendered_frames[view_shift][offset];
				/* green */
				out_fb.video_buffer[offset+1] = rendered_frames[(view_shift+1)%5][offset+1];
				/* blue */
				out_fb.video_buffer[offset+2] = rendered_frames[(view_shift+2)%5][offset+2];
			}
		}
#else
		/*calibration*/
		for (y=0; y<out_fb.height; y++) {
			u32 line_shift = (5- y%5) % 5;
			for (x=0; x<out_fb.width; x++) {
				u32 view_shift = (line_shift+bpp*x)%5;
				u32 offset = out_fb.pitch*y + x*bpp;
				out_fb.video_buffer[offset] = ((view_shift)%5 == 2) ? 0xFF : 0;
				out_fb.video_buffer[offset+1] = ((view_shift+1)%5 == 2) ? 0xFF : 0;
				out_fb.video_buffer[offset+2] = ((view_shift+2)%5 == 2) ? 0xFF : 0;
			}
		}
#endif
		write_bmp(&out_fb, "output", 0);
	}

	/*destroy everything*/
	gf_bifs_decoder_del(b2v.bifs);
	gf_sg_del(b2v.sg);
	gf_sc_set_scene(b2v.sr, NULL);
	gf_sc_del(b2v.sr);



err_exit:
	/*	if (rendered_frames) {
			for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) {
				if (rendered_frames[viewpoint_index-1]) gf_free(rendered_frames[viewpoint_index-1]);
			}
			gf_free(rendered_frames);
		}
		if (output_merged_frame) gf_free(output_merged_frame);
	*/
	if (user.modules) gf_modules_del(user.modules);
	if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv);
	gf_cfg_del(user.config);
}
Пример #16
0
GF_EXPORT
GF_Err gf_hinter_finalize(GF_ISOFile *file, u32 IOD_Profile, u32 bandwidth)
{
	u32 i, sceneT, odT, descIndex, size, size64;
	GF_InitialObjectDescriptor *iod;
	GF_SLConfig slc;
	GF_ESD *esd;
	GF_ISOSample *samp;
	Bool remove_ocr;
	char *buffer;
	char buf64[5000], sdpLine[2300];


	gf_isom_sdp_clean(file);

	if (bandwidth) {
		sprintf(buf64, "b=AS:%d", bandwidth);
		gf_isom_sdp_add_line(file, buf64);
	}
	//xtended attribute for copyright
	sprintf(buf64, "a=x-copyright: %s", "MP4/3GP File hinted with GPAC " GPAC_FULL_VERSION " (C)2000-2005 - http://gpac.sourceforge.net");
	gf_isom_sdp_add_line(file, buf64);

	if (IOD_Profile == GF_SDP_IOD_NONE) return GF_OK;

	odT = sceneT = 0;
	for (i=0; i<gf_isom_get_track_count(file); i++) {
		if (!gf_isom_is_track_in_root_od(file, i+1)) continue;
		switch (gf_isom_get_media_type(file,i+1)) {
		case GF_ISOM_MEDIA_OD:
			odT = i+1;
			break;
		case GF_ISOM_MEDIA_SCENE:
			sceneT = i+1;
			break;
		}
	}
	remove_ocr = 0;
	if (IOD_Profile == GF_SDP_IOD_ISMA_STRICT) {
		IOD_Profile = GF_SDP_IOD_ISMA;
		remove_ocr = 1;
	}

	/*if we want ISMA like iods, we need at least BIFS */
	if ( (IOD_Profile == GF_SDP_IOD_ISMA) && !sceneT ) return GF_BAD_PARAM;

	/*do NOT change PLs, we assume they are correct*/
	iod = (GF_InitialObjectDescriptor *) gf_isom_get_root_od(file);
	if (!iod) return GF_NOT_SUPPORTED;

	/*rewrite an IOD with good SL config - embbed data if possible*/
	if (IOD_Profile == GF_SDP_IOD_ISMA) {
		Bool is_ok = 1;
		while (gf_list_count(iod->ESDescriptors)) {
			esd = (GF_ESD*)gf_list_get(iod->ESDescriptors, 0);
			gf_odf_desc_del((GF_Descriptor *) esd);
			gf_list_rem(iod->ESDescriptors, 0);
		}


		/*get OD esd, and embbed stream data if possible*/
		if (odT) {
			esd = gf_isom_get_esd(file, odT, 1);
			if (gf_isom_get_sample_count(file, odT)==1) {
				samp = gf_isom_get_sample(file, odT, 1, &descIndex);
				if (gf_hinter_can_embbed_data(samp->data, samp->dataLength, GF_STREAM_OD)) {
					InitSL_NULL(&slc);
					slc.predefined = 0;
					slc.hasRandomAccessUnitsOnlyFlag = 1;
					slc.timeScale = slc.timestampResolution = gf_isom_get_media_timescale(file, odT);	
					slc.OCRResolution = 1000;
					slc.startCTS = samp->DTS+samp->CTS_Offset;
					slc.startDTS = samp->DTS;
					//set the SL for future extraction
					gf_isom_set_extraction_slc(file, odT, 1, &slc);

					size64 = gf_base64_encode(samp->data, samp->dataLength, buf64, 2000);
					buf64[size64] = 0;
					sprintf(sdpLine, "data:application/mpeg4-od-au;base64,%s", buf64);

					esd->decoderConfig->avgBitrate = 0;
					esd->decoderConfig->bufferSizeDB = samp->dataLength;
					esd->decoderConfig->maxBitrate = 0;
					size64 = strlen(sdpLine)+1;
					esd->URLString = (char*)gf_malloc(sizeof(char) * size64);
					strcpy(esd->URLString, sdpLine);
				} else {
					GF_LOG(GF_LOG_WARNING, GF_LOG_RTP, ("[rtp hinter] OD sample too large to be embedded in IOD - ISMA disabled\n"));
					is_ok = 0;
				}
				gf_isom_sample_del(&samp);
			}
			if (remove_ocr) esd->OCRESID = 0;
			else if (esd->OCRESID == esd->ESID) esd->OCRESID = 0;
			
			//OK, add this to our IOD
			gf_list_add(iod->ESDescriptors, esd);
		}

		esd = gf_isom_get_esd(file, sceneT, 1);
		if (gf_isom_get_sample_count(file, sceneT)==1) {
			samp = gf_isom_get_sample(file, sceneT, 1, &descIndex);
			if (gf_hinter_can_embbed_data(samp->data, samp->dataLength, GF_STREAM_SCENE)) {

				slc.timeScale = slc.timestampResolution = gf_isom_get_media_timescale(file, sceneT);	
				slc.OCRResolution = 1000;
				slc.startCTS = samp->DTS+samp->CTS_Offset;
				slc.startDTS = samp->DTS;
				//set the SL for future extraction
				gf_isom_set_extraction_slc(file, sceneT, 1, &slc);
				//encode in Base64 the sample
				size64 = gf_base64_encode(samp->data, samp->dataLength, buf64, 2000);
				buf64[size64] = 0;
				sprintf(sdpLine, "data:application/mpeg4-bifs-au;base64,%s", buf64);

				esd->decoderConfig->avgBitrate = 0;
				esd->decoderConfig->bufferSizeDB = samp->dataLength;
				esd->decoderConfig->maxBitrate = 0;
				esd->URLString = (char*)gf_malloc(sizeof(char) * (strlen(sdpLine)+1));
				strcpy(esd->URLString, sdpLine);
			} else {
				GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[rtp hinter] Scene description sample too large to be embedded in IOD - ISMA disabled\n"));
				is_ok = 0;
			}
			gf_isom_sample_del(&samp);
		}
		if (remove_ocr) esd->OCRESID = 0;
		else if (esd->OCRESID == esd->ESID) esd->OCRESID = 0;

		gf_list_add(iod->ESDescriptors, esd);

		if (is_ok) {
			u32 has_a, has_v, has_i_a, has_i_v;
			has_a = has_v = has_i_a = has_i_v = 0;
			for (i=0; i<gf_isom_get_track_count(file); i++) {
				esd = gf_isom_get_esd(file, i+1, 1);
				if (!esd) continue;
				if (esd->decoderConfig->streamType==GF_STREAM_VISUAL) {
					if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_MPEG4_PART2) has_i_v ++;
					else has_v++;
				} else if (esd->decoderConfig->streamType==GF_STREAM_AUDIO) {
					if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_AUDIO_AAC_MPEG4) has_i_a ++;
					else has_a++;
				}
				gf_odf_desc_del((GF_Descriptor *)esd);
			}
			/*only 1 MPEG-4 visual max and 1 MPEG-4 audio max for ISMA compliancy*/
			if (!has_v && !has_a && (has_i_v<=1) && (has_i_a<=1)) {
				sprintf(sdpLine, "a=isma-compliance:1,1.0,1");
				gf_isom_sdp_add_line(file, sdpLine);
			}
		}
	}

	//encode the IOD
	buffer = NULL;
	size = 0;
	gf_odf_desc_write((GF_Descriptor *) iod, &buffer, &size);
	gf_odf_desc_del((GF_Descriptor *)iod);

	//encode in Base64 the iod
	size64 = gf_base64_encode(buffer, size, buf64, 2000);
	buf64[size64] = 0;
	gf_free(buffer);

	sprintf(sdpLine, "a=mpeg4-iod:\"data:application/mpeg4-iod;base64,%s\"", buf64);
	gf_isom_sdp_add_line(file, sdpLine);

	return GF_OK;
}
Пример #17
0
/* Rewrite mode:
 * mode = 0: playback
 * mode = 1: streaming
 */
GF_Err gf_isom_nalu_sample_rewrite(GF_MediaBox *mdia, GF_ISOSample *sample, u32 sampleNumber, GF_MPEGVisualSampleEntryBox *entry)
{
	Bool is_hevc = 0;
	GF_Err e = GF_OK;
	GF_ISOSample *ref_samp;
	GF_BitStream *src_bs, *ref_bs, *dst_bs;
	u64 offset;
	u32 ref_nalu_size, data_offset, data_length, copy_size, nal_size, max_size, di, nal_unit_size_field, cur_extract_mode, extractor_mode;
	Bool rewrite_ps, rewrite_start_codes;
	u8 ref_track_ID, ref_track_num;
	s8 sample_offset, nal_type;
	u32 nal_hdr;
	char *buffer;
	GF_ISOFile *file = mdia->mediaTrack->moov->mov;

	src_bs = ref_bs = dst_bs = NULL;
	ref_samp = NULL;
	buffer = NULL;
	rewrite_ps = (mdia->mediaTrack->extractor_mode & GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG) ? 1 : 0;
	if (! sample->IsRAP) rewrite_ps = 0;
	rewrite_start_codes = (mdia->mediaTrack->extractor_mode & GF_ISOM_NALU_EXTRACT_ANNEXB_FLAG) ? 1 : 0;
	extractor_mode = mdia->mediaTrack->extractor_mode&0x0000FFFF;

	if (extractor_mode == GF_ISOM_NALU_EXTRACT_INSPECT) {
		if (!rewrite_ps && !rewrite_start_codes)
			return GF_OK;
	}

	if (!entry) return GF_BAD_PARAM;
	nal_unit_size_field = 0;
	/*if svc rewrire*/
	if (entry->svc_config && entry->svc_config->config) nal_unit_size_field = entry->svc_config->config->nal_unit_size;
	/*if mvc rewrire*/

	/*otherwise do nothing*/
	else if (!rewrite_ps && !rewrite_start_codes) {
		return GF_OK;
	}

	if (!nal_unit_size_field) {
		if (entry->avc_config) nal_unit_size_field = entry->avc_config->config->nal_unit_size;
		else if (entry->hevc_config) {
			nal_unit_size_field = entry->hevc_config->config->nal_unit_size;
			is_hevc = 1;
		}
	}
	if (!nal_unit_size_field) return GF_ISOM_INVALID_FILE;

	dst_bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	src_bs = gf_bs_new(sample->data, sample->dataLength, GF_BITSTREAM_READ);
	max_size = 4096;

	/*rewrite start code with NALU delim*/
	if (rewrite_start_codes) {
		gf_bs_write_int(dst_bs, 1, 32);
		if (is_hevc) {
			gf_bs_write_int(dst_bs, 0, 1);
			gf_bs_write_int(dst_bs, GF_HEVC_NALU_ACCESS_UNIT, 6);
			gf_bs_write_int(dst_bs, 0, 9);
			/*pic-type - by default we signal all slice types possible*/
			gf_bs_write_int(dst_bs, 2, 3);
			gf_bs_write_int(dst_bs, 0, 5);
		} else {
			gf_bs_write_int(dst_bs, (sample->data[0] & 0x60) | GF_AVC_NALU_ACCESS_UNIT, 8);
			gf_bs_write_int(dst_bs, 0xF0 , 8); /*7 "all supported NALUs" (=111) + rbsp trailing (10000)*/;
		}
	}

	if (rewrite_ps) {
		if (is_hevc) {
			u32 i, count;
			count = gf_list_count(entry->hevc_config->config->param_array);
			for (i=0; i<count; i++) {
				GF_HEVCParamArray *ar = gf_list_get(entry->hevc_config->config->param_array, i);
				rewrite_nalus_list(ar->nalus, dst_bs, rewrite_start_codes, nal_unit_size_field);
			}

			/*little optimization if we are not asked to start codes: copy over the sample*/
			if (!rewrite_start_codes) {
				gf_bs_write_data(dst_bs, sample->data, sample->dataLength);
				gf_free(sample->data);
				sample->data = NULL;
				gf_bs_get_content(dst_bs, &sample->data, &sample->dataLength);
				gf_bs_del(src_bs);
				gf_bs_del(dst_bs);
				return GF_OK;
			}
		} else {

			/*this is an SVC track: get all SPS/PPS from this track down to the base layer and rewrite them*/
			if (mdia->mediaTrack->has_base_layer) {
				u32 j;
				GF_List *nalu_sps = gf_list_new();
				GF_List *nalu_pps = gf_list_new();
				GF_TrackReferenceTypeBox *dpnd = NULL;
				Track_FindRef(mdia->mediaTrack, GF_ISOM_REF_SCAL, &dpnd);

#if 0
				/*get all upper layers with SCAL reference to this track*/
				for (j = 0; j < gf_isom_get_track_count(file); j++) {
					if (gf_isom_has_track_reference(file, j+1, GF_ISOM_REF_SCAL, mdia->mediaTrack->Header->trackID)) {
						u32 tkID;
						GF_TrackBox *base_track;
						GF_MPEGVisualSampleEntryBox *base_entry;
						gf_isom_get_reference_ID(file, j+1, GF_ISOM_REF_SCAL, 1, &tkID);

						base_track = GetTrackbyID(mdia->mediaTrack->moov, tkID);
						base_entry = base_track ? gf_list_get(base_track->Media->information->sampleTable->SampleDescription->other_boxes, 0) : NULL;
						if (base_entry)
							merge_nalus(base_entry, nalu_sps, nalu_pps);
					}
				}

#endif

				merge_nalus(entry, nalu_sps, nalu_pps);
				if (dpnd) {
					for (j=0; j<dpnd->trackIDCount; j++) {
						GF_TrackBox *base_track = GetTrackbyID(mdia->mediaTrack->moov, dpnd->trackIDs[j]);
						GF_MPEGVisualSampleEntryBox *base_entry = base_track ? gf_list_get(base_track->Media->information->sampleTable->SampleDescription->other_boxes, 0) : NULL;
						if (base_entry)
							merge_nalus(base_entry, nalu_sps, nalu_pps);
					}
				}

				//rewrite nalus
				rewrite_nalus_list(nalu_sps, dst_bs, rewrite_start_codes, nal_unit_size_field);
				rewrite_nalus_list(nalu_pps, dst_bs, rewrite_start_codes, nal_unit_size_field);

				gf_list_del(nalu_sps);
				gf_list_del(nalu_pps);
			} else {

				if (entry->avc_config) {
					rewrite_nalus_list(entry->avc_config->config->sequenceParameterSets, dst_bs, rewrite_start_codes, nal_unit_size_field);
					rewrite_nalus_list(entry->avc_config->config->pictureParameterSets, dst_bs, rewrite_start_codes, nal_unit_size_field);
					rewrite_nalus_list(entry->avc_config->config->sequenceParameterSetExtensions, dst_bs, rewrite_start_codes, nal_unit_size_field);
				}

				/*add svc config */
				if (entry->svc_config) {
					rewrite_nalus_list(entry->svc_config->config->sequenceParameterSets, dst_bs, rewrite_start_codes, nal_unit_size_field);
					rewrite_nalus_list(entry->svc_config->config->pictureParameterSets, dst_bs, rewrite_start_codes, nal_unit_size_field);
				}

				/*little optimization if we are not asked to rewrite extractors or start codes: copy over the sample*/
				if (!entry->svc_config && !rewrite_start_codes) {
					gf_bs_write_data(dst_bs, sample->data, sample->dataLength);
					gf_free(sample->data);
					sample->data = NULL;
					gf_bs_get_content(dst_bs, &sample->data, &sample->dataLength);
					gf_bs_del(src_bs);
					gf_bs_del(dst_bs);
					return GF_OK;
				}

			}
		}
	}

	buffer = (char *)gf_malloc(sizeof(char)*max_size);

	while (gf_bs_available(src_bs))
	{
		nal_size = gf_bs_read_int(src_bs, 8*nal_unit_size_field);
		if (nal_size>max_size) {
			buffer = (char*) gf_realloc(buffer, sizeof(char)*nal_size);
			max_size = nal_size;
		}
		if (is_hevc) {
			nal_hdr = gf_bs_read_u16(src_bs);
			nal_type = (nal_hdr&0x7E00) >> 9;
		} else {
			nal_hdr = gf_bs_read_u8(src_bs);
			nal_type = nal_hdr & 0x1F;
		}

		if (is_hevc) {
			/*we already wrote this stuff*/
			if (nal_type==GF_HEVC_NALU_ACCESS_UNIT) {
				gf_bs_skip_bytes(src_bs, nal_size-2);
				continue;
			}

			/*rewrite nal*/
			gf_bs_read_data(src_bs, buffer, nal_size-2);
			if (rewrite_start_codes)
				gf_bs_write_u32(dst_bs, 1);
			else
				gf_bs_write_int(dst_bs, nal_size, 8*nal_unit_size_field);

			gf_bs_write_u16(dst_bs, nal_hdr);
			gf_bs_write_data(dst_bs, buffer, nal_size-2);

			continue;
		}

		/*we already wrote this stuff*/
		if (nal_type==GF_AVC_NALU_ACCESS_UNIT) {
			gf_bs_skip_bytes(src_bs, nal_size-1);
			continue;
		}

		//extractor
		if (nal_type == 31) {
			switch (extractor_mode) {
			case 0:
				gf_bs_read_int(src_bs, 24); //3 bytes of NALUHeader in extractor
				ref_track_ID = gf_bs_read_u8(src_bs);
				sample_offset = (s8) gf_bs_read_int(src_bs, 8);
				data_offset = gf_bs_read_u32(src_bs);
				data_length = gf_bs_read_u32(src_bs);

				ref_track_num = gf_isom_get_track_by_id(file, ref_track_ID);
				if (!ref_track_num) {
					e = GF_BAD_PARAM;
					goto exit;
				}
				cur_extract_mode = gf_isom_get_nalu_extract_mode(file, ref_track_num);
				gf_isom_set_nalu_extract_mode(file, ref_track_num, GF_ISOM_NALU_EXTRACT_INSPECT);
				ref_samp = gf_isom_get_sample(file, ref_track_num, sampleNumber+sample_offset, &di);
				if (!ref_samp) {
					e = GF_IO_ERR;
					goto exit;
				}
				ref_bs = gf_bs_new(ref_samp->data, ref_samp->dataLength, GF_BITSTREAM_READ);
				offset = 0;
				while (gf_bs_available(ref_bs)) {
					if (gf_bs_get_position(ref_bs) < data_offset) {
						ref_nalu_size = gf_bs_read_int(ref_bs, 8*nal_unit_size_field);
						offset += ref_nalu_size + nal_unit_size_field;
						if ((offset > data_offset) || (offset >= gf_bs_get_size(ref_bs))) {
							e = GF_BAD_PARAM;
							goto exit;
						}

						e = gf_bs_seek(ref_bs, offset);
						if (e)
							goto exit;
						continue;
					}
					ref_nalu_size = gf_bs_read_int(ref_bs, 8*nal_unit_size_field);
					copy_size = data_length ? data_length : ref_nalu_size;
					assert(copy_size <= ref_nalu_size);
					nal_hdr = gf_bs_read_u8(ref_bs); //rewrite NAL type
					if ((copy_size-1)>max_size) {
						buffer = (char*)gf_realloc(buffer, sizeof(char)*(copy_size-1));
						max_size = copy_size-1;
					}
					gf_bs_read_data(ref_bs, buffer, copy_size-1);

					if (rewrite_start_codes)
						gf_bs_write_u32(dst_bs, 1);
					else
						gf_bs_write_int(dst_bs, copy_size, 8*nal_unit_size_field);

					gf_bs_write_u8(dst_bs, nal_hdr);
					gf_bs_write_data(dst_bs, buffer, copy_size-1);
				}

				gf_isom_sample_del(&ref_samp);
				ref_samp = NULL;
				gf_bs_del(ref_bs);
				ref_bs = NULL;
				gf_isom_set_nalu_extract_mode(file, ref_track_num, cur_extract_mode);
				break;
			default:
				//skip to end of this NALU
				gf_bs_skip_bytes(src_bs, nal_size-1);
				continue;
			}
		} else {
			gf_bs_read_data(src_bs, buffer, nal_size-1);
			if (rewrite_start_codes)
				gf_bs_write_u32(dst_bs, 1);
			else
				gf_bs_write_int(dst_bs, nal_size, 8*nal_unit_size_field);

			gf_bs_write_u8(dst_bs, nal_hdr);
			gf_bs_write_data(dst_bs, buffer, nal_size-1);
		}
	}