Exemplo n.º 1
0
static void recompute_bitrate_mp4( GF_ISOFile *p_file, int i_track )
{
    u32 count, di, timescale, time_wnd, rate;
    u64 offset;
    Double br;
    GF_ESD *esd;

    esd = gf_isom_get_esd( p_file, i_track, 1 );
    if( !esd )
        return;

    esd->decoderConfig->avgBitrate = 0;
    esd->decoderConfig->maxBitrate = 0;
    rate = time_wnd = 0;

    timescale = gf_isom_get_media_timescale( p_file, i_track );
    count = gf_isom_get_sample_count( p_file, i_track );
    for( u32 i = 0; i < count; i++ )
    {
        GF_ISOSample *samp = gf_isom_get_sample_info( p_file, i_track, i+1, &di, &offset );
        if( !samp )
        {
            x264_cli_log( "mp4", X264_LOG_ERROR, "failure reading back frame %u\n", i );
            break;
        }

        if( esd->decoderConfig->bufferSizeDB < samp->dataLength )
            esd->decoderConfig->bufferSizeDB = samp->dataLength;

        esd->decoderConfig->avgBitrate += samp->dataLength;
        rate += samp->dataLength;
        if( samp->DTS > time_wnd + timescale )
        {
            if( rate > esd->decoderConfig->maxBitrate )
                esd->decoderConfig->maxBitrate = rate;
            time_wnd = samp->DTS;
            rate = 0;
        }

        gf_isom_sample_del( &samp );
    }

    br = (Double)(s64)gf_isom_get_media_duration( p_file, i_track );
    br /= timescale;
    esd->decoderConfig->avgBitrate = (u32)(esd->decoderConfig->avgBitrate / br);
    /*move to bps*/
    esd->decoderConfig->avgBitrate *= 8;
    esd->decoderConfig->maxBitrate *= 8;

    gf_isom_change_mpeg4_description( p_file, i_track, 1, esd );
    gf_odf_desc_del( (GF_Descriptor*)esd );
}
Exemplo n.º 2
0
GF_EXPORT
GF_Err gf_hinter_finalize(GF_ISOFile *file, u32 IOD_Profile, u32 bandwidth)
{
	u32 i, sceneT, odT, descIndex, size, size64;
	GF_InitialObjectDescriptor *iod;
	GF_SLConfig slc;
	GF_ESD *esd;
	GF_ISOSample *samp;
	Bool remove_ocr;
	char *buffer;
	char buf64[5000], sdpLine[2300];


	gf_isom_sdp_clean(file);

	if (bandwidth) {
		sprintf(buf64, "b=AS:%d", bandwidth);
		gf_isom_sdp_add_line(file, buf64);
	}
	//xtended attribute for copyright
	sprintf(buf64, "a=x-copyright: %s", "MP4/3GP File hinted with GPAC " GPAC_FULL_VERSION " (C)2000-2005 - http://gpac.sourceforge.net");
	gf_isom_sdp_add_line(file, buf64);

	if (IOD_Profile == GF_SDP_IOD_NONE) return GF_OK;

	odT = sceneT = 0;
	for (i=0; i<gf_isom_get_track_count(file); i++) {
		if (!gf_isom_is_track_in_root_od(file, i+1)) continue;
		switch (gf_isom_get_media_type(file,i+1)) {
		case GF_ISOM_MEDIA_OD:
			odT = i+1;
			break;
		case GF_ISOM_MEDIA_SCENE:
			sceneT = i+1;
			break;
		}
	}
	remove_ocr = 0;
	if (IOD_Profile == GF_SDP_IOD_ISMA_STRICT) {
		IOD_Profile = GF_SDP_IOD_ISMA;
		remove_ocr = 1;
	}

	/*if we want ISMA like iods, we need at least BIFS */
	if ( (IOD_Profile == GF_SDP_IOD_ISMA) && !sceneT ) return GF_BAD_PARAM;

	/*do NOT change PLs, we assume they are correct*/
	iod = (GF_InitialObjectDescriptor *) gf_isom_get_root_od(file);
	if (!iod) return GF_NOT_SUPPORTED;

	/*rewrite an IOD with good SL config - embbed data if possible*/
	if (IOD_Profile == GF_SDP_IOD_ISMA) {
		Bool is_ok = 1;
		while (gf_list_count(iod->ESDescriptors)) {
			esd = (GF_ESD*)gf_list_get(iod->ESDescriptors, 0);
			gf_odf_desc_del((GF_Descriptor *) esd);
			gf_list_rem(iod->ESDescriptors, 0);
		}


		/*get OD esd, and embbed stream data if possible*/
		if (odT) {
			esd = gf_isom_get_esd(file, odT, 1);
			if (gf_isom_get_sample_count(file, odT)==1) {
				samp = gf_isom_get_sample(file, odT, 1, &descIndex);
				if (gf_hinter_can_embbed_data(samp->data, samp->dataLength, GF_STREAM_OD)) {
					InitSL_NULL(&slc);
					slc.predefined = 0;
					slc.hasRandomAccessUnitsOnlyFlag = 1;
					slc.timeScale = slc.timestampResolution = gf_isom_get_media_timescale(file, odT);	
					slc.OCRResolution = 1000;
					slc.startCTS = samp->DTS+samp->CTS_Offset;
					slc.startDTS = samp->DTS;
					//set the SL for future extraction
					gf_isom_set_extraction_slc(file, odT, 1, &slc);

					size64 = gf_base64_encode(samp->data, samp->dataLength, buf64, 2000);
					buf64[size64] = 0;
					sprintf(sdpLine, "data:application/mpeg4-od-au;base64,%s", buf64);

					esd->decoderConfig->avgBitrate = 0;
					esd->decoderConfig->bufferSizeDB = samp->dataLength;
					esd->decoderConfig->maxBitrate = 0;
					size64 = strlen(sdpLine)+1;
					esd->URLString = (char*)gf_malloc(sizeof(char) * size64);
					strcpy(esd->URLString, sdpLine);
				} else {
					GF_LOG(GF_LOG_WARNING, GF_LOG_RTP, ("[rtp hinter] OD sample too large to be embedded in IOD - ISMA disabled\n"));
					is_ok = 0;
				}
				gf_isom_sample_del(&samp);
			}
			if (remove_ocr) esd->OCRESID = 0;
			else if (esd->OCRESID == esd->ESID) esd->OCRESID = 0;
			
			//OK, add this to our IOD
			gf_list_add(iod->ESDescriptors, esd);
		}

		esd = gf_isom_get_esd(file, sceneT, 1);
		if (gf_isom_get_sample_count(file, sceneT)==1) {
			samp = gf_isom_get_sample(file, sceneT, 1, &descIndex);
			if (gf_hinter_can_embbed_data(samp->data, samp->dataLength, GF_STREAM_SCENE)) {

				slc.timeScale = slc.timestampResolution = gf_isom_get_media_timescale(file, sceneT);	
				slc.OCRResolution = 1000;
				slc.startCTS = samp->DTS+samp->CTS_Offset;
				slc.startDTS = samp->DTS;
				//set the SL for future extraction
				gf_isom_set_extraction_slc(file, sceneT, 1, &slc);
				//encode in Base64 the sample
				size64 = gf_base64_encode(samp->data, samp->dataLength, buf64, 2000);
				buf64[size64] = 0;
				sprintf(sdpLine, "data:application/mpeg4-bifs-au;base64,%s", buf64);

				esd->decoderConfig->avgBitrate = 0;
				esd->decoderConfig->bufferSizeDB = samp->dataLength;
				esd->decoderConfig->maxBitrate = 0;
				esd->URLString = (char*)gf_malloc(sizeof(char) * (strlen(sdpLine)+1));
				strcpy(esd->URLString, sdpLine);
			} else {
				GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[rtp hinter] Scene description sample too large to be embedded in IOD - ISMA disabled\n"));
				is_ok = 0;
			}
			gf_isom_sample_del(&samp);
		}
		if (remove_ocr) esd->OCRESID = 0;
		else if (esd->OCRESID == esd->ESID) esd->OCRESID = 0;

		gf_list_add(iod->ESDescriptors, esd);

		if (is_ok) {
			u32 has_a, has_v, has_i_a, has_i_v;
			has_a = has_v = has_i_a = has_i_v = 0;
			for (i=0; i<gf_isom_get_track_count(file); i++) {
				esd = gf_isom_get_esd(file, i+1, 1);
				if (!esd) continue;
				if (esd->decoderConfig->streamType==GF_STREAM_VISUAL) {
					if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_MPEG4_PART2) has_i_v ++;
					else has_v++;
				} else if (esd->decoderConfig->streamType==GF_STREAM_AUDIO) {
					if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_AUDIO_AAC_MPEG4) has_i_a ++;
					else has_a++;
				}
				gf_odf_desc_del((GF_Descriptor *)esd);
			}
			/*only 1 MPEG-4 visual max and 1 MPEG-4 audio max for ISMA compliancy*/
			if (!has_v && !has_a && (has_i_v<=1) && (has_i_a<=1)) {
				sprintf(sdpLine, "a=isma-compliance:1,1.0,1");
				gf_isom_sdp_add_line(file, sdpLine);
			}
		}
	}

	//encode the IOD
	buffer = NULL;
	size = 0;
	gf_odf_desc_write((GF_Descriptor *) iod, &buffer, &size);
	gf_odf_desc_del((GF_Descriptor *)iod);

	//encode in Base64 the iod
	size64 = gf_base64_encode(buffer, size, buf64, 2000);
	buf64[size64] = 0;
	gf_free(buffer);

	sprintf(sdpLine, "a=mpeg4-iod:\"data:application/mpeg4-iod;base64,%s\"", buf64);
	gf_isom_sdp_add_line(file, sdpLine);

	return GF_OK;
}
Exemplo n.º 3
0
GF_EXPORT
GF_Err gf_hinter_track_finalize(GF_RTPHinter *tkHint, Bool AddSystemInfo)
{
	u32 Width, Height;
	GF_ESD *esd;
	char sdpLine[20000];
	char mediaName[30], payloadName[30];

	Width = Height = 0;
	gf_isom_sdp_clean_track(tkHint->file, tkHint->TrackNum);
	if (gf_isom_get_media_type(tkHint->file, tkHint->TrackNum) == GF_ISOM_MEDIA_VISUAL)
		gf_isom_get_visual_info(tkHint->file, tkHint->TrackNum, 1, &Width, &Height);

	gf_rtp_builder_get_payload_name(tkHint->rtp_p, payloadName, mediaName);

	/*TODO- extract out of rtp_p for future live tools*/
	sprintf(sdpLine, "m=%s 0 RTP/%s %d", mediaName, tkHint->rtp_p->slMap.IV_length ? "SAVP" : "AVP", tkHint->rtp_p->PayloadType);
	gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	if (tkHint->bandwidth) {
		sprintf(sdpLine, "b=AS:%d", tkHint->bandwidth);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	if (tkHint->nb_chan) {
		sprintf(sdpLine, "a=rtpmap:%d %s/%d/%d", tkHint->rtp_p->PayloadType, payloadName, tkHint->rtp_p->sl_config.timestampResolution, tkHint->nb_chan);
	} else {
		sprintf(sdpLine, "a=rtpmap:%d %s/%d", tkHint->rtp_p->PayloadType, payloadName, tkHint->rtp_p->sl_config.timestampResolution);
	}
	gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	/*control for MPEG-4*/
	if (AddSystemInfo) {
		sprintf(sdpLine, "a=mpeg4-esid:%d", gf_isom_get_track_id(tkHint->file, tkHint->TrackNum));
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*control for QTSS/DSS*/
	sprintf(sdpLine, "a=control:trackID=%d", gf_isom_get_track_id(tkHint->file, tkHint->HintTrack));
	gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);

	/*H263 extensions*/
	if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_H263) {
		sprintf(sdpLine, "a=cliprect:0,0,%d,%d", Height, Width);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*AMR*/
	else if ((tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_AMR) || (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_AMR_WB)) {
		sprintf(sdpLine, "a=fmtp:%d octet-align=1", tkHint->rtp_p->PayloadType);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*Text*/
	else if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) {
		gf_media_format_ttxt_sdp(tkHint->rtp_p, payloadName, sdpLine, tkHint->file, tkHint->TrackNum);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*EVRC/SMV in non header-free mode*/
	else if ((tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (tkHint->rtp_p->auh_size>1)) {
		sprintf(sdpLine, "a=fmtp:%d maxptime=%d", tkHint->rtp_p->PayloadType, tkHint->rtp_p->auh_size*20);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*H264/AVC*/
	else if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_H264_AVC) {
		GF_AVCConfig *avcc = gf_isom_avc_config_get(tkHint->file, tkHint->TrackNum, 1);
		sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", tkHint->rtp_p->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication);
		if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) {
			u32 i, count, b64s;
			char b64[200];
			strcat(sdpLine, "; sprop-parameter-sets=");
			count = gf_list_count(avcc->sequenceParameterSets);
			for (i=0; i<count; i++) {
				GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i);
				b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
				b64[b64s]=0;
				strcat(sdpLine, b64);
				if (i+1<count) strcat(sdpLine, ",");
			}
			if (i) strcat(sdpLine, ",");
			count = gf_list_count(avcc->pictureParameterSets);
			for (i=0; i<count; i++) {
				GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i);
				b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
				b64[b64s]=0;
				strcat(sdpLine, b64);
				if (i+1<count) strcat(sdpLine, ",");
			}
		}
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
		gf_odf_avc_cfg_del(avcc);
	}
	/*MPEG-4 decoder config*/
	else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_MPEG4) {
		esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1);

		if (esd && esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
			gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
		} else {
			gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, NULL, 0);
		}
		if (esd) gf_odf_desc_del((GF_Descriptor *)esd);

		if (tkHint->rtp_p->slMap.IV_length) {
			const char *kms;
			gf_isom_get_ismacryp_info(tkHint->file, tkHint->TrackNum, 1, NULL, NULL, NULL, NULL, &kms, NULL, NULL, NULL);
			if (!strnicmp(kms, "(key)", 5) || !strnicmp(kms, "(ipmp)", 6) || !strnicmp(kms, "(uri)", 5)) {
				strcat(sdpLine, "; ISMACrypKey=");
			} else {
				strcat(sdpLine, "; ISMACrypKey=(uri)");
			}
			strcat(sdpLine, kms);
		}

		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*MPEG-4 Audio LATM*/
	else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_LATM) { 
		GF_BitStream *bs; 
		char *config_bytes; 
		u32 config_size; 
 
		/* form config string */ 
		bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); 
		gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ 
		gf_bs_write_int(bs, 1, 1); /* all streams same time */ 
		gf_bs_write_int(bs, 0, 6); /* numSubFrames */ 
		gf_bs_write_int(bs, 0, 4); /* numPrograms */ 
		gf_bs_write_int(bs, 0, 3); /* numLayer */ 
 
		/* audio-specific config */ 
		esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1); 
		if (esd && esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo) { 
			/*PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/
			gf_bs_write_data(bs, esd->decoderConfig->decoderSpecificInfo->data, MIN(esd->decoderConfig->decoderSpecificInfo->dataLength, 2) ); 
		} 
		if (esd) gf_odf_desc_del((GF_Descriptor *)esd); 
 
		/* other data */ 
		gf_bs_write_int(bs, 0, 3); /* frameLengthType */ 
		gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ 
		gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ 
		gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ 
		gf_bs_get_content(bs, &config_bytes, &config_size); 
		gf_bs_del(bs); 
 
		gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, config_bytes, config_size); 
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); 
		gf_free(config_bytes); 
	}
	/*3GPP DIMS*/
	else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) { 
		GF_DIMSDescription dims;
		char fmt[200];
		gf_isom_get_visual_info(tkHint->file, tkHint->TrackNum, 1, &Width, &Height);

		gf_isom_get_dims_description(tkHint->file, tkHint->TrackNum, 1, &dims);
		sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", tkHint->rtp_p->PayloadType, dims.profile);
		if (! dims.fullRequestHost) {
			strcat(sdpLine, ";useFullRequestHost=0");
			sprintf(fmt, ";pathComponents=%d", dims.pathComponents);
			strcat(sdpLine, fmt);
		}
		if (!dims.streamType) strcat(sdpLine, ";stream-type=secondary");
		if (dims.containsRedundant == 1) strcat(sdpLine, ";contains-redundant=main");
		else if (dims.containsRedundant == 2) strcat(sdpLine, ";contains-redundant=redundant");

		if (dims.textEncoding && strlen(dims.textEncoding)) {
			strcat(sdpLine, ";text-encoding=");
			strcat(sdpLine, dims.textEncoding);
		}
		if (dims.contentEncoding && strlen(dims.contentEncoding)) {
			strcat(sdpLine, ";content-coding=");
			strcat(sdpLine, dims.contentEncoding);
		}
		if (dims.content_script_types && strlen(dims.content_script_types) ) {
			strcat(sdpLine, ";content-script-types=");
			strcat(sdpLine, dims.contentEncoding);
		}
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	/*extensions for some mobile phones*/
	if (Width && Height) {
		sprintf(sdpLine, "a=framesize:%d %d-%d", tkHint->rtp_p->PayloadType, Width, Height);
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}

	esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1);
	if (esd && esd->decoderConfig && (esd->decoderConfig->rvc_config || esd->decoderConfig->predefined_rvc_config)) {
		if (esd->decoderConfig->predefined_rvc_config) {
			sprintf(sdpLine, "a=rvc-config-predef:%d", esd->decoderConfig->predefined_rvc_config);
		} else {
			/*temporary ...*/
			if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) {
				sprintf(sdpLine, "a=rvc-config:%s", "http://download.tsi.telecom-paristech.fr/gpac/RVC/rvc_config_avc.xml");
			} else {
				sprintf(sdpLine, "a=rvc-config:%s", "http://download.tsi.telecom-paristech.fr/gpac/RVC/rvc_config_sp.xml");
			}
		}
		gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine);
	}
	if (esd) gf_odf_desc_del((GF_Descriptor *)esd);

	gf_isom_set_track_enabled(tkHint->file, tkHint->HintTrack, 1);
	return GF_OK;
}
Exemplo n.º 4
0
GF_EXPORT
GF_RTPHinter *gf_hinter_track_new(GF_ISOFile *file, u32 TrackNum, 
							u32 Path_MTU, u32 max_ptime, u32 default_rtp_rate, u32 flags, u8 PayloadID, 
							Bool copy_media, u32 InterleaveGroupID, u8 InterleaveGroupPriority, GF_Err *e)
{

	GF_SLConfig my_sl;
	u32 descIndex, MinSize, MaxSize, avgTS, streamType, oti, const_dur, nb_ch, maxDTSDelta;
	u8 OfficialPayloadID;
	u32 TrackMediaSubType, TrackMediaType, hintType, nbEdts, required_rate, force_dts_delta, avc_nalu_size, PL_ID, bandwidth, IV_length, KI_length;
	const char *url, *urn;
	char *mpeg4mode;
	Bool is_crypted, has_mpeg4_mapping;
	GF_RTPHinter *tmp;
	GF_ESD *esd;

	*e = GF_BAD_PARAM;
	if (!file || !TrackNum || !gf_isom_get_track_id(file, TrackNum)) return NULL;

	if (!gf_isom_get_sample_count(file, TrackNum)) {
		*e = GF_OK;
		return NULL;
	}
	*e = GF_NOT_SUPPORTED;
	nbEdts = gf_isom_get_edit_segment_count(file, TrackNum);
	if (nbEdts>1) {
		u64 et, sd, mt;
		u8 em;
		gf_isom_get_edit_segment(file, TrackNum, 1, &et, &sd, &mt, &em);
		if ((nbEdts>2) || (em!=GF_ISOM_EDIT_EMPTY)) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[rtp hinter] Cannot hint track whith EditList\n"));
			return NULL;
		}
	}
	if (nbEdts) gf_isom_remove_edit_segments(file, TrackNum);

	if (!gf_isom_is_track_enabled(file, TrackNum)) return NULL;

	/*by default NO PL signaled*/
	PL_ID = 0;
	OfficialPayloadID = 0;
	force_dts_delta = 0;
	streamType = oti = 0;
	mpeg4mode = NULL;
	required_rate = 0;
	is_crypted = 0;
	IV_length = KI_length = 0;
	oti = 0;
	nb_ch = 0;
	avc_nalu_size = 0;
	has_mpeg4_mapping = 1;
	TrackMediaType = gf_isom_get_media_type(file, TrackNum);
	TrackMediaSubType = gf_isom_get_media_subtype(file, TrackNum, 1);
	
	/*for max compatibility with QT*/
	if (!default_rtp_rate) default_rtp_rate = 90000;

	/*timed-text is a bit special, we support multiple stream descriptions & co*/
	if ( (TrackMediaType==GF_ISOM_MEDIA_TEXT) || (TrackMediaType==GF_ISOM_MEDIA_SUBT)) {
		hintType = GF_RTP_PAYT_3GPP_TEXT;
		oti = GPAC_OTI_TEXT_MPEG4;
		streamType = GF_STREAM_TEXT;
		/*fixme - this works cos there's only one PL for text in mpeg4 at the current time*/
		PL_ID = 0x10;
	} else {
		if (gf_isom_get_sample_description_count(file, TrackNum) > 1) return NULL;

		TrackMediaSubType = gf_isom_get_media_subtype(file, TrackNum, 1);
		switch (TrackMediaSubType) {
		case GF_ISOM_SUBTYPE_MPEG4_CRYP: 
			is_crypted = 1;
		case GF_ISOM_SUBTYPE_MPEG4:
			esd = gf_isom_get_esd(file, TrackNum, 1);
			hintType = GF_RTP_PAYT_MPEG4;
			if (esd) {
				streamType = esd->decoderConfig->streamType;
				oti = esd->decoderConfig->objectTypeIndication;
				if (esd->URLString) hintType = 0;
				/*AAC*/
				if ((streamType==GF_STREAM_AUDIO) && esd->decoderConfig->decoderSpecificInfo
				/*(nb: we use mpeg4 for MPEG-2 AAC)*/
				&& ((oti==GPAC_OTI_AUDIO_AAC_MPEG4) || (oti==GPAC_OTI_AUDIO_AAC_MPEG4) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_MP) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_LCP) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_SSRP)) ) {

					u32 sample_rate;
					GF_M4ADecSpecInfo a_cfg;
					gf_m4a_get_config(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &a_cfg);
					nb_ch = a_cfg.nb_chan;
					sample_rate = a_cfg.base_sr;
					PL_ID = a_cfg.audioPL;
					switch (a_cfg.base_object_type) {
					case GF_M4A_AAC_MAIN:
					case GF_M4A_AAC_LC:
						if (flags & GP_RTP_PCK_USE_LATM_AAC) {
							hintType = GF_RTP_PAYT_LATM;
							break;
						}
					case GF_M4A_AAC_SBR:
					case GF_M4A_AAC_PS:
					case GF_M4A_AAC_LTP:
					case GF_M4A_AAC_SCALABLE:
					case GF_M4A_ER_AAC_LC:
					case GF_M4A_ER_AAC_LTP:
					case GF_M4A_ER_AAC_SCALABLE:
						mpeg4mode = "AAC";
						break;
					case GF_M4A_CELP:
					case GF_M4A_ER_CELP:
						mpeg4mode = "CELP";
						break;
					}
					required_rate = sample_rate;
				}
				/*MPEG1/2 audio*/
				else if ((streamType==GF_STREAM_AUDIO) && ((oti==GPAC_OTI_AUDIO_MPEG2_PART3) || (oti==GPAC_OTI_AUDIO_MPEG1))) {
					u32 sample_rate;
					if (!is_crypted) {
						GF_ISOSample *samp = gf_isom_get_sample(file, TrackNum, 1, NULL);
						u32 hdr = GF_4CC((u8)samp->data[0], (u8)samp->data[1], (u8)samp->data[2], (u8)samp->data[3]);
						nb_ch = gf_mp3_num_channels(hdr);
						sample_rate = gf_mp3_sampling_rate(hdr);
						gf_isom_sample_del(&samp);
						hintType = GF_RTP_PAYT_MPEG12_AUDIO;
						/*use official RTP/AVP payload type*/
						OfficialPayloadID = 14;
						required_rate = 90000;
					}
					/*encrypted MP3 must be sent through MPEG-4 generic to signal all ISMACryp stuff*/
					else {
						u8 bps;
						gf_isom_get_audio_info(file, TrackNum, 1, &sample_rate, &nb_ch, &bps);
						required_rate = sample_rate;
					}
				}
				/*QCELP audio*/
				else if ((streamType==GF_STREAM_AUDIO) && (oti==GPAC_OTI_AUDIO_13K_VOICE)) {
					hintType = GF_RTP_PAYT_QCELP;
					OfficialPayloadID = 12;
					required_rate = 8000;
					streamType = GF_STREAM_AUDIO;
					nb_ch = 1;
				}
				/*EVRC/SVM audio*/
				else if ((streamType==GF_STREAM_AUDIO) && ((oti==GPAC_OTI_AUDIO_EVRC_VOICE) || (oti==GPAC_OTI_AUDIO_SMV_VOICE)) ) {
					hintType = GF_RTP_PAYT_EVRC_SMV;
					required_rate = 8000;
					streamType = GF_STREAM_AUDIO;
					nb_ch = 1;
				}
				/*visual streams*/
				else if (streamType==GF_STREAM_VISUAL) {
					if (oti==GPAC_OTI_VIDEO_MPEG4_PART2) {
						GF_M4VDecSpecInfo dsi;
						gf_m4v_get_config(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &dsi);
						PL_ID = dsi.VideoPL;
					}
					/*MPEG1/2 video*/
					if ( ((oti>=GPAC_OTI_VIDEO_MPEG2_SIMPLE) && (oti<=GPAC_OTI_VIDEO_MPEG2_422)) || (oti==GPAC_OTI_VIDEO_MPEG1)) {
						if (!is_crypted) {
							hintType = GF_RTP_PAYT_MPEG12_VIDEO;
							OfficialPayloadID = 32;
						}
					}
					/*for ISMA*/
					if (is_crypted) {
						/*that's another pain with ISMACryp, even if no B-frames the DTS is signaled...*/
						if (oti==GPAC_OTI_VIDEO_MPEG4_PART2) force_dts_delta = 22;
						else if (oti==GPAC_OTI_VIDEO_AVC) {
							flags &= ~GP_RTP_PCK_USE_MULTI;
							force_dts_delta = 22;
						}
						flags |= GP_RTP_PCK_SIGNAL_RAP | GP_RTP_PCK_SIGNAL_TS;
					}

					required_rate = default_rtp_rate;
				}
				/*systems streams*/
				else if (gf_isom_has_sync_shadows(file, TrackNum) || gf_isom_has_sample_dependency(file, TrackNum)) {
					flags |= GP_RTP_PCK_SYSTEMS_CAROUSEL;
				}
				gf_odf_desc_del((GF_Descriptor*)esd);
			}
			break;
		case GF_ISOM_SUBTYPE_3GP_H263:
			hintType = GF_RTP_PAYT_H263;
			required_rate = 90000;
			streamType = GF_STREAM_VISUAL;
			OfficialPayloadID = 34;
			/*not 100% compliant (short header is missing) but should still work*/
			oti = GPAC_OTI_VIDEO_MPEG4_PART2;
			PL_ID = 0x01;
			break;
		case GF_ISOM_SUBTYPE_3GP_AMR:
			required_rate = 8000;
			hintType = GF_RTP_PAYT_AMR;
			streamType = GF_STREAM_AUDIO;
			has_mpeg4_mapping = 0;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_3GP_AMR_WB:
			required_rate = 16000;
			hintType = GF_RTP_PAYT_AMR_WB;
			streamType = GF_STREAM_AUDIO;
			has_mpeg4_mapping = 0;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_AVC_H264:
		case GF_ISOM_SUBTYPE_AVC2_H264:
		case GF_ISOM_SUBTYPE_SVC_H264:
		{
			GF_AVCConfig *avcc = gf_isom_avc_config_get(file, TrackNum, 1);
			required_rate = 90000;	/* "90 kHz clock rate MUST be used"*/
			hintType = GF_RTP_PAYT_H264_AVC;
			streamType = GF_STREAM_VISUAL;
			avc_nalu_size = avcc->nal_unit_size;
			oti = GPAC_OTI_VIDEO_AVC;
			PL_ID = 0x0F;
			gf_odf_avc_cfg_del(avcc);
		}
			break;
		case GF_ISOM_SUBTYPE_3GP_QCELP:
			required_rate = 8000;
			hintType = GF_RTP_PAYT_QCELP;
			streamType = GF_STREAM_AUDIO;
			oti = GPAC_OTI_AUDIO_13K_VOICE;
			OfficialPayloadID = 12;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_3GP_EVRC:
		case GF_ISOM_SUBTYPE_3GP_SMV:
			required_rate = 8000;
			hintType = GF_RTP_PAYT_EVRC_SMV;
			streamType = GF_STREAM_AUDIO;
			oti = (TrackMediaSubType==GF_ISOM_SUBTYPE_3GP_EVRC) ? GPAC_OTI_AUDIO_EVRC_VOICE : GPAC_OTI_AUDIO_SMV_VOICE;
			nb_ch = 1;
			break;
		case GF_ISOM_SUBTYPE_3GP_DIMS:
			hintType = GF_RTP_PAYT_3GPP_DIMS;
			streamType = GF_STREAM_SCENE;
			break;
		case GF_ISOM_SUBTYPE_AC3:
			hintType = GF_RTP_PAYT_AC3;
			streamType = GF_STREAM_AUDIO;
			gf_isom_get_audio_info(file, TrackNum, 1, NULL, &nb_ch, NULL);
			break;
		default:
			/*ERROR*/
			hintType = 0;
			break;
		}
	}

	/*not hintable*/
	if (!hintType) return NULL;
	/*we only support self-contained files for hinting*/
	gf_isom_get_data_reference(file, TrackNum, 1, &url, &urn);
	if (url || urn) return NULL;
	
	*e = GF_OUT_OF_MEM;
	GF_SAFEALLOC(tmp, GF_RTPHinter);
	if (!tmp) return NULL;

	/*override hinter type if requested and possible*/
	if (has_mpeg4_mapping && (flags & GP_RTP_PCK_FORCE_MPEG4)) {
		hintType = GF_RTP_PAYT_MPEG4;
		avc_nalu_size = 0;
	}
	/*use static payload ID if enabled*/
	else if (OfficialPayloadID && (flags & GP_RTP_PCK_USE_STATIC_ID) ) {
		PayloadID = OfficialPayloadID;
	}

	tmp->file = file;
	tmp->TrackNum = TrackNum;
	tmp->avc_nalu_size = avc_nalu_size;
	tmp->nb_chan = nb_ch;

	/*spatial scalability check*/
	tmp->has_ctts = gf_isom_has_time_offset(file, TrackNum);

	/*get sample info*/
	gf_media_get_sample_average_infos(file, TrackNum, &MinSize, &MaxSize, &avgTS, &maxDTSDelta, &const_dur, &bandwidth);

	/*systems carousel: we need at least IDX and RAP signaling*/
	if (flags & GP_RTP_PCK_SYSTEMS_CAROUSEL) {
		flags |= GP_RTP_PCK_SIGNAL_RAP;
	}

	/*update flags in MultiSL*/
	if (flags & GP_RTP_PCK_USE_MULTI) {
		if (MinSize != MaxSize) flags |= GP_RTP_PCK_SIGNAL_SIZE;
		if (!const_dur) flags |= GP_RTP_PCK_SIGNAL_TS;
	}
	if (tmp->has_ctts) flags |= GP_RTP_PCK_SIGNAL_TS;

	/*default SL for RTP */
	InitSL_RTP(&my_sl);

	my_sl.timestampResolution = gf_isom_get_media_timescale(file, TrackNum);
	/*override clockrate if set*/
	if (required_rate) {
		Double sc = required_rate;
		sc /= my_sl.timestampResolution;
		maxDTSDelta = (u32) (maxDTSDelta*sc);
		my_sl.timestampResolution = required_rate;
	}
	/*switch to RTP TS*/
	max_ptime = (u32) (max_ptime * my_sl.timestampResolution / 1000);

	my_sl.AUSeqNumLength = gf_get_bit_size(gf_isom_get_sample_count(file, TrackNum));
	my_sl.CUDuration = const_dur;

	if (gf_isom_has_sync_points(file, TrackNum)) {
		my_sl.useRandomAccessPointFlag = 1;
	} else {
		my_sl.useRandomAccessPointFlag = 0;
		my_sl.hasRandomAccessUnitsOnlyFlag = 1;
	}

	if (is_crypted) {
		Bool use_sel_enc;
		gf_isom_get_ismacryp_info(file, TrackNum, 1, NULL, NULL, NULL, NULL, NULL, &use_sel_enc, &IV_length, &KI_length);
		if (use_sel_enc) flags |= GP_RTP_PCK_SELECTIVE_ENCRYPTION;
	}

	// in case a different timescale was provided
	tmp->OrigTimeScale = gf_isom_get_media_timescale(file, TrackNum);
	tmp->rtp_p = gf_rtp_builder_new(hintType, &my_sl, flags, tmp, 
								MP4T_OnNewPacket, MP4T_OnPacketDone, 
								/*if copy, no data ref*/
								copy_media ? NULL : MP4T_OnDataRef, 
								MP4T_OnData);

	//init the builder
	gf_rtp_builder_init(tmp->rtp_p, PayloadID, Path_MTU, max_ptime,
					   streamType, oti, PL_ID, MinSize, MaxSize, avgTS, maxDTSDelta, IV_length, KI_length, mpeg4mode);

	/*ISMA compliance is a pain...*/
	if (force_dts_delta) tmp->rtp_p->slMap.DTSDeltaLength = force_dts_delta;


	/*		Hint Track Setup	*/
	tmp->TrackID = gf_isom_get_track_id(file, TrackNum);
	tmp->HintID = tmp->TrackID + 65535;
	while (gf_isom_get_track_by_id(file, tmp->HintID)) tmp->HintID++;

	tmp->HintTrack = gf_isom_new_track(file, tmp->HintID, GF_ISOM_MEDIA_HINT, my_sl.timestampResolution);
	gf_isom_setup_hint_track(file, tmp->HintTrack, GF_ISOM_HINT_RTP);
	/*create a hint description*/
	gf_isom_new_hint_description(file, tmp->HintTrack, -1, -1, 0, &descIndex);
	gf_isom_rtp_set_timescale(file, tmp->HintTrack, descIndex, my_sl.timestampResolution);

	if (hintType==GF_RTP_PAYT_MPEG4) {
		tmp->rtp_p->slMap.ObjectTypeIndication = oti;
		/*set this SL for extraction.*/
		gf_isom_set_extraction_slc(file, TrackNum, 1, &my_sl);
	}
	tmp->bandwidth = bandwidth;

	/*set interleaving*/
	gf_isom_set_track_group(file, TrackNum, InterleaveGroupID);
	if (!copy_media) {
		/*if we don't copy data set hint track and media track in the same group*/
		gf_isom_set_track_group(file, tmp->HintTrack, InterleaveGroupID);
	} else {
		gf_isom_set_track_group(file, tmp->HintTrack, InterleaveGroupID + OFFSET_HINT_GROUP_ID);
	}
	/*use user-secified priority*/
	InterleaveGroupPriority*=2;
	gf_isom_set_track_priority_in_group(file, TrackNum, InterleaveGroupPriority+1);
	gf_isom_set_track_priority_in_group(file, tmp->HintTrack, InterleaveGroupPriority);

#if 0
	/*QT FF: not setting these flags = server uses a random offset*/
	gf_isom_rtp_set_time_offset(file, tmp->HintTrack, 1, 0);
	/*we don't use seq offset for maintainance pruposes*/
	gf_isom_rtp_set_time_sequence_offset(file, tmp->HintTrack, 1, 0);
#endif
	*e = GF_OK;
	return tmp;
}
Exemplo n.º 5
0
GF_EXPORT
GF_ISOMRTPStreamer *gf_isom_streamer_new(const char *file_name, const char *ip_dest, u16 port, Bool loop, Bool force_mpeg4, u32 path_mtu, u32 ttl, char *ifce_addr)
{
	GF_ISOMRTPStreamer *streamer;
	GF_Err e = GF_OK;
	const char *opt = NULL;
	/*GF_Config *configFile = NULL;	*/
	u32 i, max_ptime, au_sn_len;	
	u8 payt;			
	GF_ISOFile *file;
	GF_RTPTrack *track, *prev_track;
	u16 first_port;
	u32 nb_tracks;
	u32 sess_data_size;

	if (!ip_dest) ip_dest = "127.0.0.1";
	if (!port) port = 7000;
	if (!path_mtu) path_mtu = 1450;

	GF_SAFEALLOC(streamer, GF_ISOMRTPStreamer);
	streamer->dest_ip = gf_strdup(ip_dest);

	payt = 96;
	max_ptime = au_sn_len = 0;

	file = gf_isom_open(file_name, GF_ISOM_OPEN_READ, NULL);
	if (!file) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Error opening file %s: %s\n", opt, gf_error_to_string(gf_isom_last_error(NULL))));
		return NULL;
	}

	streamer->isom = file;
	streamer->loop = loop;
	streamer->force_mpeg4_generic = force_mpeg4;
	first_port = port;

	sess_data_size = 0;
	prev_track = NULL;
	
	nb_tracks = gf_isom_get_track_count(streamer->isom);
	for (i=0; i<nb_tracks; i++) {
		u32 mediaSize, mediaDuration, flags, MinSize, MaxSize, avgTS, streamType, oti, const_dur, nb_ch, samplerate, maxDTSDelta, TrackMediaSubType, TrackMediaType, bandwidth, IV_length, KI_length, dsi_len;
		const char *url, *urn;
		char *dsi;
		Bool is_crypted;
		
		dsi_len = samplerate = streamType = oti = nb_ch = IV_length = KI_length = 0;
		is_crypted = 0;
		dsi = NULL;

		flags = 0;

		/*we only support self-contained files for hinting*/
		gf_isom_get_data_reference(streamer->isom, i+1, 1, &url, &urn);
		if (url || urn) continue;

		TrackMediaType = gf_isom_get_media_type(streamer->isom, i+1);
		TrackMediaSubType = gf_isom_get_media_subtype(streamer->isom, i+1, 1);

		switch (TrackMediaType) {
		case GF_ISOM_MEDIA_TEXT:
			break;
		case GF_ISOM_MEDIA_VISUAL:
		case GF_ISOM_MEDIA_AUDIO:
		case GF_ISOM_MEDIA_SUBT:
		case GF_ISOM_MEDIA_OD:
		case GF_ISOM_MEDIA_SCENE:
			if (gf_isom_get_sample_description_count(streamer->isom, i+1) > 1) continue;
			break;
		default:
			continue;
		}

		GF_SAFEALLOC(track, GF_RTPTrack);
		if (prev_track) prev_track->next = track;
		else streamer->stream = track;
		prev_track = track;

		track->track_num = i+1;

		track->nb_aus = gf_isom_get_sample_count(streamer->isom, track->track_num);
		track->timescale = gf_isom_get_media_timescale(streamer->isom, track->track_num);
		mediaDuration = (u32)(gf_isom_get_media_duration(streamer->isom, track->track_num)*1000/track->timescale); // ms
		mediaSize = (u32)gf_isom_get_media_data_size(streamer->isom, track->track_num);

		sess_data_size += mediaSize;
		if (mediaDuration > streamer->duration_ms) streamer->duration_ms = mediaDuration;

		track->port = check_next_port(streamer, first_port);
		first_port = track->port+2;

		/*init packetizer*/
		if (streamer->force_mpeg4_generic) flags = GP_RTP_PCK_SIGNAL_RAP | GP_RTP_PCK_FORCE_MPEG4;


		switch (TrackMediaSubType) {
		case GF_ISOM_SUBTYPE_MPEG4_CRYP: 
			is_crypted = 1;
		case GF_ISOM_SUBTYPE_MPEG4:
		{	
			GF_ESD *esd = gf_isom_get_esd(streamer->isom, track->track_num, 1);
			if (esd) {
				streamType = esd->decoderConfig->streamType;
				oti = esd->decoderConfig->objectTypeIndication;

				/*systems streams*/
				if (streamType==GF_STREAM_AUDIO) {
					gf_isom_get_audio_info(streamer->isom, track->track_num, 1, &samplerate, &nb_ch, NULL);
				}
				/*systems streams*/
				else if (streamType==GF_STREAM_SCENE) {
					if (gf_isom_has_sync_shadows(streamer->isom, track->track_num) || gf_isom_has_sample_dependency(streamer->isom, track->track_num))
						flags |= GP_RTP_PCK_SYSTEMS_CAROUSEL;
				}
				
				if (esd->decoderConfig->decoderSpecificInfo) {
					dsi = esd->decoderConfig->decoderSpecificInfo->data;
					dsi_len = esd->decoderConfig->decoderSpecificInfo->dataLength;
					esd->decoderConfig->decoderSpecificInfo->data = NULL;
					esd->decoderConfig->decoderSpecificInfo->dataLength = 0;
				}
				gf_odf_desc_del((GF_Descriptor*)esd);
			}
		}
			break;
		case GF_ISOM_SUBTYPE_AVC_H264:
		case GF_ISOM_SUBTYPE_AVC2_H264:
		case GF_ISOM_SUBTYPE_SVC_H264:
		{
			GF_AVCConfig *avcc = gf_isom_avc_config_get(streamer->isom, track->track_num, 1);
			track->avc_nalu_size = avcc->nal_unit_size;
			gf_odf_avc_cfg_del(avcc);
			streamType = GF_STREAM_VISUAL;
			oti = GPAC_OTI_VIDEO_AVC;
		}
			break;
		default:
			streamType = GF_STREAM_4CC;
			oti = TrackMediaSubType;
			break;
		}
		
		/*get sample info*/
		gf_media_get_sample_average_infos(streamer->isom, track->track_num, &MinSize, &MaxSize, &avgTS, &maxDTSDelta, &const_dur, &bandwidth);

		if (is_crypted) {
			Bool use_sel_enc;
			gf_isom_get_ismacryp_info(streamer->isom, track->track_num, 1, NULL, NULL, NULL, NULL, NULL, &use_sel_enc, &IV_length, &KI_length);
			if (use_sel_enc) flags |= GP_RTP_PCK_SELECTIVE_ENCRYPTION;
		}

		track->rtp = gf_rtp_streamer_new_extended(streamType, oti, track->timescale, 
									(char *) streamer->dest_ip, track->port, path_mtu, ttl, ifce_addr, 
									flags, dsi, dsi_len, 								 
									 payt, samplerate, nb_ch,
									 is_crypted, IV_length, KI_length,
									 MinSize, MaxSize, avgTS, maxDTSDelta, const_dur, bandwidth, max_ptime, au_sn_len);

		if (!track->rtp) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Could not initialize RTP streamer: %s\n", gf_error_to_string(e)));
			goto exit;
		}		

		payt++;
		track->microsec_ts_scale = 1000000;
		track->microsec_ts_scale /= gf_isom_get_media_timescale(streamer->isom, track->track_num);
	}
	return streamer;

exit:
	gf_free(streamer);
	return NULL;
} 
Exemplo n.º 6
0
Arquivo: main.c Projeto: Bevara/GPAC
void bifs_to_vid(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time)
{
	GF_User user;
	BIFSVID b2v;
	u16 es_id;
	Bool first_dump, needs_raw;
	u32 i, j, di, count, timescale, frameNum;
	u32 duration, cur_time;
	GF_VideoSurface fb;
	GF_Err e;
	char old_driv[1024];
	const char *test;
	char config_path[GF_MAX_PATH];
	avi_t *avi_out;
	Bool reset_fps;
	GF_ESD *esd;
	char comp[5];
	char *conv_buf;

	memset(&user, 0, sizeof(GF_User));
	if (szConfigFile && strlen(szConfigFile)) {
		user.config = gf_cfg_init(config_path, NULL);
	} else {
		user.config = gf_cfg_init(NULL, NULL);
	}

	if (!user.config) {
		fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
		return;
	}
	avi_out = NULL;
	conv_buf = NULL;
	esd = NULL;
	needs_raw = 0;
	test = gf_cfg_get_key(user.config, "General", "ModulesDirectory");
	user.modules = gf_modules_new((const unsigned char *) test, user.config);
	strcpy(old_driv, "raw_out");
	if (!gf_modules_get_count(user.modules)) {
		printf("Error: no modules found\n");
		goto err_exit;
	}

	/*switch driver to raw_driver*/
	test = gf_cfg_get_key(user.config, "Video", "DriverName");
	if (test) strcpy(old_driv, test);

	test = gf_cfg_get_key(user.config, "Compositor", "RendererName");
	/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
	if (test && strstr(test, "2D")) {
		gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output");
		needs_raw = 1;
	}

	needs_raw = 0;
	user.init_flags = GF_TERM_NO_AUDIO | GF_TERM_FORCE_3D;
	b2v.sr = gf_sc_new(&user, 0, NULL);
	gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0);

	b2v.sg = gf_sg_new();
	gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v);
	gf_sg_set_init_callback(b2v.sg, node_init, &b2v);
	gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v);

	/*load config*/
	gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1);

	b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0);

	if (needs_raw) {
		test = gf_cfg_get_key(user.config, "Video", "DriverName");
		if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
			printf("couldn't load raw output driver (%s used)\n", test);
			goto err_exit;
		}
	}

	strcpy(config_path, "");
	if (out_dir) {
		strcat(config_path, out_dir);
		if (config_path[strlen(config_path)-1] != '\\') strcat(config_path, "\\");
	}
	strcat(config_path, rad_name);
	strcat(config_path, "_bifs");
	if (!dump_type) {
		strcat(config_path, ".avi");
		avi_out = AVI_open_output_file(config_path);
		comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0;
		if (!avi_out) goto err_exit;
	}


	for (i=0; i<gf_isom_get_track_count(file); i++) {
		esd = gf_isom_get_esd(file, i+1, 1);
		if (!esd) continue;
		if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break;
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
	}
	if (!esd) {
		printf("no bifs track found\n");
		goto err_exit;
	}

	b2v.duration = gf_isom_get_media_duration(file, i+1);
	timescale = gf_isom_get_media_timescale(file, i+1);
	es_id = (u16) gf_isom_get_track_id(file, i+1);
	e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
	if (e) {
		printf("BIFS init error %s\n", gf_error_to_string(e));
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
		goto err_exit;
	}
	if (dump_time>=0) dump_time = dump_time *1000 / timescale;

	gf_sc_set_scene(b2v.sr, b2v.sg);
	count = gf_isom_get_sample_count(file, i+1);

	reset_fps = 0;
	if (!fps) {
		fps = (Float) (count * timescale);
		fps /= (Double) (s64) b2v.duration;
		printf("Estimated BIFS FrameRate %g\n", fps);
		reset_fps = 1;
	}

	if (!width || !height) {
		gf_sg_get_scene_size_info(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}

	gf_sc_set_size(b2v.sr, width, height);
	gf_sc_draw_frame(b2v.sr);

	gf_sc_get_screen_buffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	if (avi_out) {
		AVI_set_video(avi_out, width, height, fps, comp);
		conv_buf = gf_malloc(sizeof(char) * width * height * 3);
	}
	printf("Dumping at BIFS resolution %d x %d\n\n", width, height);
	gf_sc_release_screen_buffer(b2v.sr, &fb);

	cur_time = 0;

	duration = (u32)(timescale / fps);
	if (reset_fps) fps = 0;

	frameNum = 1;
	first_dump = 1;
	for (j=0; j<count; j++) {
		GF_ISOSample *samp = gf_isom_get_sample(file, i+1, j+1, &di);

		b2v.cts = samp->DTS + samp->CTS_Offset;
		/*apply command*/
		gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0);
		gf_isom_sample_del(&samp);

		if ((frameID>=0) && (j<(u32)frameID)) continue;
		if ((dump_time>=0) && ((u32) dump_time>b2v.cts)) continue;
		/*render frame*/
		gf_sc_draw_frame(b2v.sr);
		/*needed for background2D !!*/
		if (first_dump) {
			gf_sc_draw_frame(b2v.sr);
			first_dump = 0;
		}

		if (fps) {
			if (cur_time > b2v.cts) continue;

			while (1) {
				printf("dumped frame time %f (frame %d - sample %d)\r", ((Float)cur_time)/timescale, frameNum, j+1);
				dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, frameNum);
				frameNum++;
				cur_time += duration;
				if (cur_time > b2v.cts) break;
			}
		} else {
			dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, (frameID>=0) ? frameID : frameNum);
			if (frameID>=0 || dump_time>=0) break;
			frameNum++;
			printf("dumped frame %d / %d\r", j+1, count);
		}

	}
	gf_odf_desc_del((GF_Descriptor *) esd);

	/*destroy everything*/
	gf_bifs_decoder_del(b2v.bifs);
	gf_sg_del(b2v.sg);
	gf_sc_set_scene(b2v.sr, NULL);
	gf_sc_del(b2v.sr);

err_exit:
	if (avi_out) AVI_close(avi_out);
	if (conv_buf) gf_free(conv_buf);
	if (user.modules) gf_modules_del(user.modules);
	if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv);
	gf_cfg_del(user.config);
}
Exemplo n.º 7
0
Arquivo: main.c Projeto: Bevara/GPAC
/*generates an intertwined bmp from a scene file with 5 different viewpoints*/
void bifs3d_viewpoints_merger(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time)
{
	GF_User user;
	char out_path[GF_MAX_PATH];
	char old_driv[1024];
	BIFSVID b2v;
	Bool needs_raw;
	GF_Err e;
	GF_VideoSurface fb;
	unsigned char **rendered_frames;
	u32 nb_viewpoints = 5;
	u32 viewpoint_index;


	/* Configuration of the Rendering Capabilities */
	{
		const char *test;
		char config_path[GF_MAX_PATH];
		memset(&user, 0, sizeof(GF_User));
		user.config = gf_cfg_init(szConfigFile, NULL);

		if (!user.config) {
			fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
			return;
		}

		test = gf_cfg_get_key(user.config, "General", "ModulesDirectory");
		user.modules = gf_modules_new((const unsigned char *) test, user.config);
		strcpy(old_driv, "raw_out");
		if (!gf_modules_get_count(user.modules)) {
			printf("Error: no modules found\n");
			goto err_exit;
		}

		/*switch driver to raw_driver*/
		test = gf_cfg_get_key(user.config, "Video", "DriverName");
		if (test) strcpy(old_driv, test);

		needs_raw = 0;
		test = gf_cfg_get_key(user.config, "Compositor", "RendererName");
		/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
		if (test && strstr(test, "2D")) {
			gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output");
			needs_raw = 1;
		}
		if (needs_raw) {
			test = gf_cfg_get_key(user.config, "Video", "DriverName");
			if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
				printf("couldn't load raw output driver (%s used)\n", test);
				goto err_exit;
			}
		}
	}

	memset(&b2v, 0, sizeof(BIFSVID));
	user.init_flags = GF_TERM_NO_AUDIO;
	/* Initialization of the compositor */
	b2v.sr = gf_sc_new(&user, 0, NULL);
	gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0);

	/* Initialization of the scene graph */
	b2v.sg = gf_sg_new();
	gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v);
	gf_sg_set_init_callback(b2v.sg, node_init, &b2v);
	gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v);

	/*load config*/
	gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1);

	{
		u32 di;
		u32 track_number;
		GF_ESD *esd;
		u16 es_id;
		b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0);

		for (track_number=0; track_number<gf_isom_get_track_count(file); track_number++) {
			esd = gf_isom_get_esd(file, track_number+1, 1);
			if (!esd) continue;
			if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break;
			gf_odf_desc_del((GF_Descriptor *) esd);
			esd = NULL;
		}
		if (!esd) {
			printf("no bifs track found\n");
			goto err_exit;
		}

		es_id = (u16) gf_isom_get_track_id(file, track_number+1);
		e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
		if (e) {
			printf("BIFS init error %s\n", gf_error_to_string(e));
			gf_odf_desc_del((GF_Descriptor *) esd);
			esd = NULL;
			goto err_exit;
		}

		{
			GF_ISOSample *samp = gf_isom_get_sample(file, track_number+1, 1, &di);
			b2v.cts = samp->DTS + samp->CTS_Offset;
			/*apply command*/
			gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0);
			gf_isom_sample_del(&samp);
		}

		b2v.duration = gf_isom_get_media_duration(file, track_number+1);

		gf_odf_desc_del((GF_Descriptor *) esd);

	}
	gf_sc_set_scene(b2v.sr, b2v.sg);

	if (!width || !height) {
		gf_sg_get_scene_size_info(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}
	gf_sc_set_size(b2v.sr, width, height);
	gf_sc_get_screen_buffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	gf_sc_release_screen_buffer(b2v.sr, &fb);

	GF_SAFEALLOC(rendered_frames, nb_viewpoints*sizeof(char *));
	for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) {
		GF_SAFEALLOC(rendered_frames[viewpoint_index-1], fb.width*fb.height*3);
		gf_sc_set_viewpoint(b2v.sr, viewpoint_index, NULL);
		gf_sc_draw_frame(b2v.sr);
		/*needed for background2D !!*/
		gf_sc_draw_frame(b2v.sr);
		strcpy(out_path, "");
		if (out_dir) {
			strcat(out_path, out_dir);
			if (out_path[strlen(out_path)-1] != '\\') strcat(out_path, "\\");
		}
		strcat(out_path, rad_name);
		strcat(out_path, "_view");
		gf_sc_get_screen_buffer(b2v.sr, &fb);
		write_bmp(&fb, out_path, viewpoint_index);
		memcpy(rendered_frames[viewpoint_index-1], fb.video_buffer, fb.width*fb.height*3);
		gf_sc_release_screen_buffer(b2v.sr, &fb);
	}

	if (width != 800 || height != 480) {
		printf("Wrong scene dimension, cannot produce output\n");
		goto err_exit;
	} else {
		u32 x, y;
		GF_VideoSurface out_fb;
		u32 bpp = 3;
		out_fb.width = 800;
		out_fb.height = 480;
		out_fb.pitch = 800*bpp;
		out_fb.pixel_format = GF_PIXEL_RGB_24;
		out_fb.is_hardware_memory = 0;
		GF_SAFEALLOC(out_fb.video_buffer, out_fb.pitch*out_fb.height)
#if 1
		for (y=0; y<out_fb.height; y++) {
			/*starting red pixel is R1, R5, R4, R3, R2, R1, R5, ... when increasing line num*/
			u32 line_shift = (5-y) % 5;
			for (x=0; x<out_fb.width; x++) {
				u32 view_shift = (line_shift+bpp*x)%5;
				u32 offset = out_fb.pitch*y + x*bpp;
				/* red */
				out_fb.video_buffer[offset] = rendered_frames[view_shift][offset];
				/* green */
				out_fb.video_buffer[offset+1] = rendered_frames[(view_shift+1)%5][offset+1];
				/* blue */
				out_fb.video_buffer[offset+2] = rendered_frames[(view_shift+2)%5][offset+2];
			}
		}
#else
		/*calibration*/
		for (y=0; y<out_fb.height; y++) {
			u32 line_shift = (5- y%5) % 5;
			for (x=0; x<out_fb.width; x++) {
				u32 view_shift = (line_shift+bpp*x)%5;
				u32 offset = out_fb.pitch*y + x*bpp;
				out_fb.video_buffer[offset] = ((view_shift)%5 == 2) ? 0xFF : 0;
				out_fb.video_buffer[offset+1] = ((view_shift+1)%5 == 2) ? 0xFF : 0;
				out_fb.video_buffer[offset+2] = ((view_shift+2)%5 == 2) ? 0xFF : 0;
			}
		}
#endif
		write_bmp(&out_fb, "output", 0);
	}

	/*destroy everything*/
	gf_bifs_decoder_del(b2v.bifs);
	gf_sg_del(b2v.sg);
	gf_sc_set_scene(b2v.sr, NULL);
	gf_sc_del(b2v.sr);



err_exit:
	/*	if (rendered_frames) {
			for (viewpoint_index = 1; viewpoint_index <= nb_viewpoints; viewpoint_index++) {
				if (rendered_frames[viewpoint_index-1]) gf_free(rendered_frames[viewpoint_index-1]);
			}
			gf_free(rendered_frames);
		}
		if (output_merged_frame) gf_free(output_merged_frame);
	*/
	if (user.modules) gf_modules_del(user.modules);
	if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv);
	gf_cfg_del(user.config);
}
Exemplo n.º 8
0
GF_Err gf_sm_load_init_isom(GF_SceneLoader *load)
{
	u32 i;
	GF_BIFSConfig *bc;
	GF_ESD *esd;
	GF_Err e;
	char *scene_msg = "MPEG-4 BIFS Scene Parsing";
	if (!load->isom) return GF_BAD_PARAM;

	/*load IOD*/
	load->ctx->root_od = (GF_ObjectDescriptor *) gf_isom_get_root_od(load->isom);
	if (!load->ctx->root_od) {
		e = gf_isom_last_error(load->isom);
		if (e) return e;
	} else if ((load->ctx->root_od->tag != GF_ODF_OD_TAG) && (load->ctx->root_od->tag != GF_ODF_IOD_TAG)) {
		gf_odf_desc_del((GF_Descriptor *) load->ctx->root_od);
		load->ctx->root_od = NULL;
	}

	esd = NULL;

	/*get root scene stream*/
	for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
		u32 type = gf_isom_get_media_type(load->isom, i+1);
		if (type != GF_ISOM_MEDIA_SCENE) continue;
		if (! gf_isom_is_track_in_root_od(load->isom, i+1) ) continue;
		esd = gf_isom_get_esd(load->isom, i+1, 1);

		if (esd && esd->URLString) {
			gf_odf_desc_del((GF_Descriptor *)esd);
			esd = NULL;
			continue;
		}

		/*make sure we load the root BIFS stream first*/
		if (esd && esd->dependsOnESID && (esd->dependsOnESID!=esd->ESID) ) {
			u32 track = gf_isom_get_track_by_id(load->isom, esd->dependsOnESID);
			if (gf_isom_get_media_type(load->isom, track) != GF_ISOM_MEDIA_OD) {
				gf_odf_desc_del((GF_Descriptor *)esd);
				esd = NULL;
				continue;
			}
		}
		if (esd->decoderConfig->objectTypeIndication==0x09) scene_msg = "MPEG-4 LASeR Scene Parsing";
		break;
	}
	if (!esd) return GF_OK;

	e = GF_OK;
	GF_LOG(GF_LOG_INFO, GF_LOG_PARSER, ("%s\n", scene_msg));

	/*BIFS: update size & pixel metrics info*/
	if (esd->decoderConfig->objectTypeIndication<=2) {
		bc = gf_odf_get_bifs_config(esd->decoderConfig->decoderSpecificInfo, esd->decoderConfig->objectTypeIndication);
		if (!bc->elementaryMasks && bc->pixelWidth && bc->pixelHeight) {
			load->ctx->scene_width = bc->pixelWidth;
			load->ctx->scene_height = bc->pixelHeight;
			load->ctx->is_pixel_metrics = bc->pixelMetrics;
		}
		gf_odf_desc_del((GF_Descriptor *) bc);
		/*note we don't load the first BIFS AU to avoid storing the BIFS decoder, needed to properly handle quantization*/
	}
	/*LASeR*/
	else if (esd->decoderConfig->objectTypeIndication==0x09) {
		load->ctx->is_pixel_metrics = 1;
	}
	gf_odf_desc_del((GF_Descriptor *) esd);
	esd = NULL;

	load->process = gf_sm_load_run_isom;
	load->done = gf_sm_load_done_isom;
	load->suspend = gf_sm_isom_suspend;
	return GF_OK;
}
Exemplo n.º 9
0
static GF_Err gf_sm_load_run_isom(GF_SceneLoader *load)
{
	GF_Err e;
	FILE *logs;
	u32 i, j, di, nbBifs, nbLaser, nb_samp, samp_done, init_offset;
	GF_StreamContext *sc;
	GF_ESD *esd;
	GF_ODCodec *od_dec;
#ifndef GPAC_DISABLE_BIFS
	GF_BifsDecoder *bifs_dec;
#endif
#ifndef GPAC_DISABLE_LASER
	GF_LASeRCodec *lsr_dec;
#endif

	if (!load || !load->isom) return GF_BAD_PARAM;

	nbBifs = nbLaser = 0;
	e = GF_OK;
#ifndef GPAC_DISABLE_BIFS
	bifs_dec = gf_bifs_decoder_new(load->scene_graph, 1);
	gf_bifs_decoder_set_extraction_path(bifs_dec, load->localPath, load->fileName);
#endif
	od_dec = gf_odf_codec_new();
	logs = NULL;
#ifndef GPAC_DISABLE_LASER
	lsr_dec = gf_laser_decoder_new(load->scene_graph);
#endif
	esd = NULL;
	/*load each stream*/
	nb_samp = 0;
	for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
		u32 type = gf_isom_get_media_type(load->isom, i+1);
		switch (type) {
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_OD:
			nb_samp += gf_isom_get_sample_count(load->isom, i+1);
			break;
		default:
			break;
		}
	}
	samp_done = 1;
	gf_isom_text_set_streaming_mode(load->isom, 1);

	for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
		u32 type = gf_isom_get_media_type(load->isom, i+1);
		switch (type) {
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_OD:
			break;
		default:
			continue;
		}
		esd = gf_isom_get_esd(load->isom, i+1, 1);
		if (!esd) continue;


		if ((esd->decoderConfig->objectTypeIndication == GPAC_OTI_SCENE_AFX) ||
		        (esd->decoderConfig->objectTypeIndication == GPAC_OTI_SCENE_SYNTHESIZED_TEXTURE)
		   ) {
			nb_samp += gf_isom_get_sample_count(load->isom, i+1);
			continue;
		}
		sc = gf_sm_stream_new(load->ctx, esd->ESID, esd->decoderConfig->streamType, esd->decoderConfig->objectTypeIndication);
		sc->streamType = esd->decoderConfig->streamType;
		sc->ESID = esd->ESID;
		sc->objectType = esd->decoderConfig->objectTypeIndication;
		sc->timeScale = gf_isom_get_media_timescale(load->isom, i+1);

		/*we still need to reconfig the BIFS*/
		if (esd->decoderConfig->streamType==GF_STREAM_SCENE) {
#ifndef GPAC_DISABLE_BIFS
			/*BIFS*/
			if (esd->decoderConfig->objectTypeIndication<=2) {
				if (!esd->dependsOnESID && nbBifs && !i)
					mp4_report(load, GF_OK, "several scene namespaces used or improper scene dependencies in file - import may be incorrect");
				if (!esd->decoderConfig->decoderSpecificInfo) {
					/* Hack for T-DMB non compliant streams */
					e = gf_bifs_decoder_configure_stream(bifs_dec, esd->ESID, NULL, 0, esd->decoderConfig->objectTypeIndication);
				} else {
					e = gf_bifs_decoder_configure_stream(bifs_dec, esd->ESID, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
				}
				if (e) goto exit;
				nbBifs++;
			}
#endif

#ifndef GPAC_DISABLE_LASER
			/*LASER*/
			if (esd->decoderConfig->objectTypeIndication==0x09) {
				if (!esd->dependsOnESID && nbBifs && !i)
					mp4_report(load, GF_OK, "several scene namespaces used or improper scene dependencies in file - import may be incorrect");
				e = gf_laser_decoder_configure_stream(lsr_dec, esd->ESID, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
				if (e) goto exit;
				nbLaser++;
			}
#endif
		}

		init_offset = 0;
		/*dump all AUs*/
		for (j=0; j<gf_isom_get_sample_count(load->isom, i+1); j++) {
			GF_AUContext *au;
			GF_ISOSample *samp = gf_isom_get_sample(load->isom, i+1, j+1, &di);
			if (!samp) {
				mp4_report(load, gf_isom_last_error(load->isom), "Unable to fetch sample %d from track ID %d - aborting track import", j+1, gf_isom_get_track_id(load->isom, i+1));
				break;
			}
			/*check if track has initial offset*/
			if (!j && gf_isom_get_edit_segment_count(load->isom, i+1)) {
				u64 EditTime, dur, mtime;
				u8 mode;
				gf_isom_get_edit_segment(load->isom, i+1, 1, &EditTime, &dur, &mtime, &mode);
				if (mode==GF_ISOM_EDIT_EMPTY) {
					init_offset = (u32) (dur * sc->timeScale / gf_isom_get_timescale(load->isom) );
				}
			}
			samp->DTS += init_offset;

			au = gf_sm_stream_au_new(sc, samp->DTS, ((Double)(s64) samp->DTS) / sc->timeScale, (samp->IsRAP==RAP) ? 1 : 0);

			if (esd->decoderConfig->streamType==GF_STREAM_SCENE) {
#ifndef GPAC_DISABLE_BIFS
				if (esd->decoderConfig->objectTypeIndication<=2)
					e = gf_bifs_decode_command_list(bifs_dec, esd->ESID, samp->data, samp->dataLength, au->commands);
#endif
#ifndef GPAC_DISABLE_LASER
				if (esd->decoderConfig->objectTypeIndication==0x09)
					e = gf_laser_decode_command_list(lsr_dec, esd->ESID, samp->data, samp->dataLength, au->commands);
#endif
			} else {
				e = gf_odf_codec_set_au(od_dec, samp->data, samp->dataLength);
				if (!e) e = gf_odf_codec_decode(od_dec);
				if (!e) {
					while (1) {
						GF_ODCom *odc = gf_odf_codec_get_com(od_dec);
						if (!odc) break;
						/*update ESDs if any*/
						UpdateODCommand(load->isom, odc);
						gf_list_add(au->commands, odc);
					}
				}
			}
			gf_isom_sample_del(&samp);
			if (e) {
				mp4_report(load, gf_isom_last_error(load->isom), "decoding sample %d from track ID %d failed", j+1, gf_isom_get_track_id(load->isom, i+1));
				goto exit;
			}

			samp_done++;
			gf_set_progress("MP4 Loading", samp_done, nb_samp);
		}
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
	}
	gf_isom_text_set_streaming_mode(load->isom, 0);

exit:
#ifndef GPAC_DISABLE_BIFS
	gf_bifs_decoder_del(bifs_dec);
#endif
	gf_odf_codec_del(od_dec);
#ifndef GPAC_DISABLE_LASER
	gf_laser_decoder_del(lsr_dec);
#endif
	if (esd) gf_odf_desc_del((GF_Descriptor *) esd);
	if (logs) gf_fclose(logs);
	return e;
}
Exemplo n.º 10
0
GF_Err ISOR_ConnectChannel(GF_InputService *plug, LPNETCHANNEL channel, const char *url, Bool upstream)
{
    u32 ESID;
    ISOMChannel *ch;
    GF_NetworkCommand com;
    u32 track;
    Bool is_esd_url;
    GF_Err e;
    ISOMReader *read;
    if (!plug || !plug->priv) return GF_SERVICE_ERROR;
    read = (ISOMReader *) plug->priv;

    track = 0;
    ch = NULL;
    is_esd_url = 0;
    e = GF_OK;
    if (upstream) {
        e = GF_ISOM_INVALID_FILE;
        goto exit;
    }
    if (!read->mov) return GF_SERVICE_ERROR;

    if (strstr(url, "ES_ID")) {
        sscanf(url, "ES_ID=%ud", &ESID);
    } else {
        /*handle url like mypath/myfile.mp4#trackID*/
        char *track_id = strrchr(url, '.');
        if (track_id) {
            track_id = strchr(url, '#');
            if (track_id) track_id ++;
        }
        is_esd_url = 1;

        ESID = 0;
        /*if only one track ok*/
        if (gf_isom_get_track_count(read->mov)==1) ESID = gf_isom_get_track_id(read->mov, 1);
        else if (track_id) {
            ESID = atoi(track_id);
            track = gf_isom_get_track_by_id(read->mov, (u32) ESID);
            if (!track) ESID = 0;
        }

    }
    if (!ESID) {
        e = GF_NOT_SUPPORTED;
        goto exit;
    }

    /*a channel cannot be open twice, it has to be closed before - NOTE a track is NOT a channel and the user can open
    several times the same track as long as a dedicated channel is used*/
    ch = isor_get_channel(read, channel);
    if (ch) {
        e = GF_SERVICE_ERROR;
        goto exit;
    }
    track = gf_isom_get_track_by_id(read->mov, (u32) ESID);
    if (!track) {
        e = GF_STREAM_NOT_FOUND;
        goto exit;
    }

    GF_SAFEALLOC(ch, ISOMChannel);
    ch->owner = read;
    ch->channel = channel;
    gf_list_add(read->channels, ch);
    ch->track = track;
    switch (gf_isom_get_media_type(ch->owner->mov, ch->track)) {
    case GF_ISOM_MEDIA_OCR:
        ch->streamType = GF_STREAM_OCR;
        break;
    case GF_ISOM_MEDIA_SCENE:
        ch->streamType = GF_STREAM_SCENE;
        break;
    }

    ch->has_edit_list = gf_isom_get_edit_segment_count(ch->owner->mov, ch->track) ? 1 : 0;
    ch->has_rap = (gf_isom_has_sync_points(ch->owner->mov, ch->track)==1) ? 1 : 0;
    ch->time_scale = gf_isom_get_media_timescale(ch->owner->mov, ch->track);

exit:
    gf_term_on_connect(read->service, channel, e);
    /*if esd url reconfig SL layer*/
    if (!e && is_esd_url) {
        GF_ESD *esd;
        memset(&com, 0, sizeof(GF_NetworkCommand));
        com.base.on_channel = channel;
        com.command_type = GF_NET_CHAN_RECONFIG;
        esd = gf_isom_get_esd(read->mov, ch->track, 1);
        if (esd) {
            memcpy(&com.cfg.sl_config, esd->slConfig, sizeof(GF_SLConfig));
            gf_odf_desc_del((GF_Descriptor *)esd);
        } else {
            com.cfg.sl_config.tag = GF_ODF_SLC_TAG;
            com.cfg.sl_config.timestampLength = 32;
            com.cfg.sl_config.timestampResolution = ch->time_scale;
            com.cfg.sl_config.useRandomAccessPointFlag = 1;
        }
        gf_term_on_command(read->service, &com, GF_OK);
    }
    if (!e && track && gf_isom_is_track_encrypted(read->mov, track)) {
        memset(&com, 0, sizeof(GF_NetworkCommand));
        com.base.on_channel = channel;
        com.command_type = GF_NET_CHAN_DRM_CFG;
        ch->is_encrypted = 1;
        if (gf_isom_is_ismacryp_media(read->mov, track, 1)) {
            gf_isom_get_ismacryp_info(read->mov, track, 1, NULL, &com.drm_cfg.scheme_type, &com.drm_cfg.scheme_version, &com.drm_cfg.scheme_uri, &com.drm_cfg.kms_uri, NULL, NULL, NULL);
            gf_term_on_command(read->service, &com, GF_OK);
        } else if (gf_isom_is_omadrm_media(read->mov, track, 1)) {
            gf_isom_get_omadrm_info(read->mov, track, 1, NULL, &com.drm_cfg.scheme_type, &com.drm_cfg.scheme_version, &com.drm_cfg.contentID, &com.drm_cfg.kms_uri, &com.drm_cfg.oma_drm_textual_headers, &com.drm_cfg.oma_drm_textual_headers_len, NULL, &com.drm_cfg.oma_drm_crypt_type, NULL, NULL, NULL);

            gf_media_get_file_hash(gf_isom_get_filename(read->mov), com.drm_cfg.hash);
            gf_term_on_command(read->service, &com, GF_OK);

        }
    }
    return e;
}