Esempio n. 1
0
GF_Err hvcc_Read(GF_Box *s, GF_BitStream *bs)
{
	u64 pos;
	GF_HEVCConfigurationBox *ptr = (GF_HEVCConfigurationBox *)s;

	if (ptr->config) gf_odf_hevc_cfg_del(ptr->config);

	pos = gf_bs_get_position(bs);
	ptr->config = gf_odf_hevc_cfg_read_bs(bs);
	pos = gf_bs_get_position(bs) - pos ;
	if (pos < ptr->size)
		ptr->size -= (u32) pos;

	return GF_OK;
}
Esempio n. 2
0
GF_Err gf_isom_hevc_config_update(GF_ISOFile *the_file, u32 trackNumber, u32 DescriptionIndex, GF_HEVCConfig *cfg)
{
	u32 i, array_incomplete;
	GF_TrackBox *trak;
	GF_Err e;
	GF_MPEGVisualSampleEntryBox *entry;

	e = CanAccessMovie(the_file, GF_ISOM_OPEN_WRITE);
	if (e) return e;
	trak = gf_isom_get_track_from_file(the_file, trackNumber);
	if (!trak || !trak->Media || !cfg || !DescriptionIndex) return GF_BAD_PARAM;
	entry = (GF_MPEGVisualSampleEntryBox *)gf_list_get(trak->Media->information->sampleTable->SampleDescription->other_boxes, DescriptionIndex-1);
	if (!entry) return GF_BAD_PARAM;
	switch (entry->type) {
	case GF_ISOM_BOX_TYPE_HVC1:
	case GF_ISOM_BOX_TYPE_HEV1:
		break;
	default:
		return GF_BAD_PARAM;
	}

	if (!entry->hevc_config) entry->hevc_config = (GF_HEVCConfigurationBox*)gf_isom_box_new(GF_ISOM_BOX_TYPE_HVCC);
	if (entry->hevc_config->config) gf_odf_hevc_cfg_del(entry->hevc_config->config);
	entry->hevc_config->config = HEVC_DuplicateConfig(cfg);

	array_incomplete = 0;
	for (i=0; i<gf_list_count(entry->hevc_config->config->param_array); i++) {
		GF_HEVCParamArray *ar = gf_list_get(entry->hevc_config->config->param_array, i);
		if (!ar->array_completeness) {
			array_incomplete = 1;
			break;
		}
	}
	entry->type = array_incomplete ? GF_ISOM_BOX_TYPE_HEV1 : GF_ISOM_BOX_TYPE_HVC1;

	HEVC_RewriteESDescriptor(entry);
	return GF_OK;
}
Esempio n. 3
0
GF_EXPORT
GF_Err gf_rtp_streamer_append_sdp_extended(GF_RTPStreamer *rtp, u16 ESID, char *dsi, u32 dsi_len, GF_ISOFile *isofile, u32 isotrack, char *KMS_URI, u32 width, u32 height, char **out_sdp_buffer) 
{	
	u32 size;
	u16 port;
	char mediaName[30], payloadName[30];
	char sdp[20000], sdpLine[10000];

	if (!out_sdp_buffer) return GF_BAD_PARAM;

	gf_rtp_builder_get_payload_name(rtp->packetizer, payloadName, mediaName);
	gf_rtp_get_ports(rtp->channel, &port, NULL);

	sprintf(sdp, "m=%s %d RTP/%s %d\n", mediaName, port, rtp->packetizer->slMap.IV_length ? "SAVP" : "AVP", rtp->packetizer->PayloadType);
	sprintf(sdpLine, "a=rtpmap:%d %s/%d\n", rtp->packetizer->PayloadType, payloadName, rtp->packetizer->sl_config.timestampResolution);
	strcat(sdp, sdpLine);
    if (ESID && (rtp->packetizer->rtp_payt != GF_RTP_PAYT_3GPP_DIMS)) {
		sprintf(sdpLine, "a=mpeg4-esid:%d\n", ESID);
		strcat(sdp, sdpLine);		
	}

	if (width && height) {
		if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H263) {
			sprintf(sdpLine, "a=cliprect:0,0,%d,%d\n", height, width);
			strcat(sdp, sdpLine);
		}
		/*extensions for some mobile phones*/
		sprintf(sdpLine, "a=framesize:%d %d-%d\n", rtp->packetizer->PayloadType, width, height);
		strcat(sdp, sdpLine);
	}
		
	strcpy(sdpLine, "");

	/*AMR*/
	if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR_WB)) {
		sprintf(sdpLine, "a=fmtp:%d octet-align=1\n", rtp->packetizer->PayloadType);
	}
	/*Text*/
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) {
		gf_media_format_ttxt_sdp(rtp->packetizer, payloadName, sdpLine, isofile, isotrack);
		strcat(sdpLine, "\n");
	}
	/*EVRC/SMV in non header-free mode*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (rtp->packetizer->auh_size>1)) {
		sprintf(sdpLine, "a=fmtp:%d maxptime=%d\n", rtp->packetizer->PayloadType, rtp->packetizer->auh_size*20);
	}
	/*H264/AVC*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_SVC)) {
		GF_AVCConfig *avcc = dsi ? gf_odf_avc_cfg_read(dsi, dsi_len) : NULL;

		if (avcc) {
			sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", rtp->packetizer->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication);
			if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) {
				u32 i, count, b64s;
				char b64[200];
				strcat(sdpLine, "; sprop-parameter-sets=");
				count = gf_list_count(avcc->sequenceParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
				if (i) strcat(sdpLine, ",");
				count = gf_list_count(avcc->pictureParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
			}
			gf_odf_avc_cfg_del(avcc);
			strcat(sdpLine, "\n");
		}
	}
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_HEVC) {
#ifndef GPAC_DISABLE_HEVC
		GF_HEVCConfig *hevcc = dsi ? gf_odf_hevc_cfg_read(dsi, dsi_len, 0) : NULL;
		if (hevcc) {
			u32 count, i, j, b64s;
			char b64[200];
			sprintf(sdpLine, "a=fmtp:%d", rtp->packetizer->PayloadType);
			count = gf_list_count(hevcc->param_array);
			for (i = 0; i < count; i++) {
				GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(hevcc->param_array, i);
				if (ar->type==GF_HEVC_NALU_SEQ_PARAM) {
					strcat(sdpLine, "; sprop-sps=");						
				} else if (ar->type==GF_HEVC_NALU_PIC_PARAM) {
					strcat(sdpLine, "; sprop-pps=");
				} else if (ar->type==GF_HEVC_NALU_VID_PARAM) {
					strcat(sdpLine, "; sprop-vps=");
				}
				for (j = 0; j < gf_list_count(ar->nalus); j++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					if (j) strcat(sdpLine, ", ");
					strcat(sdpLine, b64);
				}
			}
			gf_odf_hevc_cfg_del(hevcc);
			strcat(sdpLine, "\n");
		}
#endif
	}
	/*MPEG-4 decoder config*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_MPEG4) {
		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, dsi, dsi_len);
		strcat(sdpLine, "\n");

		if (rtp->packetizer->slMap.IV_length && KMS_URI) {
			if (!strnicmp(KMS_URI, "(key)", 5) || !strnicmp(KMS_URI, "(ipmp)", 6) || !strnicmp(KMS_URI, "(uri)", 5)) {
				strcat(sdpLine, "; ISMACrypKey=");
			} else {
				strcat(sdpLine, "; ISMACrypKey=(uri)");
			}
			strcat(sdpLine, KMS_URI);
			strcat(sdpLine, "\n");
		}
	}
    /*DIMS decoder config*/
    else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) {
        sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", rtp->packetizer->PayloadType, 10);
        if (rtp->packetizer->flags & GP_RTP_DIMS_COMPRESSED) {
            strcat(sdpLine, ";content-coding=deflate");
        }
		strcat(sdpLine, "\n");
    }
	/*MPEG-4 Audio LATM*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_LATM) { 
		GF_BitStream *bs; 
		char *config_bytes; 
		u32 config_size; 

		/* form config string */ 
		bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); 
		gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ 
		gf_bs_write_int(bs, 1, 1); /* all streams same time */ 
		gf_bs_write_int(bs, 0, 6); /* numSubFrames */ 
		gf_bs_write_int(bs, 0, 4); /* numPrograms */ 
		gf_bs_write_int(bs, 0, 3); /* numLayer */ 

		/* audio-specific config  - PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/
		if (dsi) gf_bs_write_data(bs, dsi, MIN(dsi_len, 2) ); 

		/* other data */ 
		gf_bs_write_int(bs, 0, 3); /* frameLengthType */ 
		gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ 
		gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ 
		gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ 
		gf_bs_get_content(bs, &config_bytes, &config_size); 
		gf_bs_del(bs); 

		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, config_bytes, config_size); 
		gf_free(config_bytes); 
		strcat(sdpLine, "\n");
	}

	strcat(sdp, sdpLine);

	size = (u32) strlen(sdp) + (*out_sdp_buffer ? (u32) strlen(*out_sdp_buffer) : 0) + 1;
	if ( !*out_sdp_buffer) {
		*out_sdp_buffer = gf_malloc(sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcpy(*out_sdp_buffer, sdp);
	} else {
		*out_sdp_buffer = gf_realloc(*out_sdp_buffer, sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcat(*out_sdp_buffer, sdp);
	}
	return GF_OK;
} 
Esempio n. 4
0
GF_EXPORT
GF_ISOMRTPStreamer *gf_isom_streamer_new(const char *file_name, const char *ip_dest, u16 port, Bool loop, Bool force_mpeg4, u32 path_mtu, u32 ttl, char *ifce_addr)
{
	GF_ISOMRTPStreamer *streamer;
	GF_Err e = GF_OK;
	const char *opt = NULL;
	/*GF_Config *configFile = NULL;	*/
	u32 i, max_ptime, au_sn_len;
	u8 payt;
	GF_ISOFile *file;
	GF_RTPTrack *track, *prev_track;
	u16 first_port;
	u32 nb_tracks;
	u32 sess_data_size;
	u32 base_track;

	if (!ip_dest) ip_dest = "127.0.0.1";
	if (!port) port = 7000;
	if (!path_mtu) path_mtu = 1450;

	GF_SAFEALLOC(streamer, GF_ISOMRTPStreamer);
	streamer->dest_ip = gf_strdup(ip_dest);

	payt = 96;
	max_ptime = au_sn_len = 0;

	file = gf_isom_open(file_name, GF_ISOM_OPEN_READ, NULL);
	if (!file) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Error opening file %s: %s\n", opt, gf_error_to_string(gf_isom_last_error(NULL))));
		return NULL;
	}

	streamer->isom = file;
	streamer->loop = loop;
	streamer->force_mpeg4_generic = force_mpeg4;
	first_port = port;

	sess_data_size = 0;
	prev_track = NULL;

	nb_tracks = gf_isom_get_track_count(streamer->isom);
	for (i=0; i<nb_tracks; i++) {
		u32 mediaSize, mediaDuration, flags, MinSize, MaxSize, avgTS, streamType, oti, const_dur, nb_ch, samplerate, maxDTSDelta, TrackMediaSubType, TrackMediaType, bandwidth, IV_length, KI_length, dsi_len;
		const char *url, *urn;
		char *dsi;
		Bool is_crypted;

		dsi_len = samplerate = streamType = oti = nb_ch = IV_length = KI_length = 0;
		is_crypted = 0;
		dsi = NULL;

		flags = 0;

		/*we only support self-contained files for hinting*/
		gf_isom_get_data_reference(streamer->isom, i+1, 1, &url, &urn);
		if (url || urn) continue;

		TrackMediaType = gf_isom_get_media_type(streamer->isom, i+1);
		TrackMediaSubType = gf_isom_get_media_subtype(streamer->isom, i+1, 1);

		switch (TrackMediaType) {
		case GF_ISOM_MEDIA_TEXT:
			break;
		case GF_ISOM_MEDIA_VISUAL:
		case GF_ISOM_MEDIA_AUDIO:
		case GF_ISOM_MEDIA_SUBT:
		case GF_ISOM_MEDIA_OD:
		case GF_ISOM_MEDIA_SCENE:
			if (gf_isom_get_sample_description_count(streamer->isom, i+1) > 1) continue;
			break;
		default:
			continue;
		}

		GF_SAFEALLOC(track, GF_RTPTrack);
		if (prev_track) prev_track->next = track;
		else streamer->stream = track;
		prev_track = track;

		track->track_num = i+1;

		track->nb_aus = gf_isom_get_sample_count(streamer->isom, track->track_num);
		track->timescale = gf_isom_get_media_timescale(streamer->isom, track->track_num);
		mediaDuration = (u32)(gf_isom_get_media_duration(streamer->isom, track->track_num)*1000/track->timescale); // ms
		mediaSize = (u32)gf_isom_get_media_data_size(streamer->isom, track->track_num);

		sess_data_size += mediaSize;
		if (mediaDuration > streamer->duration_ms) streamer->duration_ms = mediaDuration;

		track->port = check_next_port(streamer, first_port);
		first_port = track->port+2;

		/*init packetizer*/
		if (streamer->force_mpeg4_generic) flags = GP_RTP_PCK_SIGNAL_RAP | GP_RTP_PCK_FORCE_MPEG4;


		switch (TrackMediaSubType) {
		case GF_ISOM_SUBTYPE_MPEG4_CRYP:
			is_crypted = 1;
		case GF_ISOM_SUBTYPE_MPEG4:
		{
			GF_ESD *esd = gf_isom_get_esd(streamer->isom, track->track_num, 1);
			if (esd) {
				streamType = esd->decoderConfig->streamType;
				oti = esd->decoderConfig->objectTypeIndication;

				/*systems streams*/
				if (streamType==GF_STREAM_AUDIO) {
					gf_isom_get_audio_info(streamer->isom, track->track_num, 1, &samplerate, &nb_ch, NULL);
				}
				/*systems streams*/
				else if (streamType==GF_STREAM_SCENE) {
					if (gf_isom_has_sync_shadows(streamer->isom, track->track_num) || gf_isom_has_sample_dependency(streamer->isom, track->track_num))
						flags |= GP_RTP_PCK_SYSTEMS_CAROUSEL;
				}

				if (esd->decoderConfig->decoderSpecificInfo) {
					dsi = esd->decoderConfig->decoderSpecificInfo->data;
					dsi_len = esd->decoderConfig->decoderSpecificInfo->dataLength;
					esd->decoderConfig->decoderSpecificInfo->data = NULL;
					esd->decoderConfig->decoderSpecificInfo->dataLength = 0;
				}
				gf_odf_desc_del((GF_Descriptor*)esd);
			}
		}
		break;
		case GF_ISOM_SUBTYPE_AVC_H264:
		case GF_ISOM_SUBTYPE_AVC2_H264:
		case GF_ISOM_SUBTYPE_AVC3_H264:
		case GF_ISOM_SUBTYPE_AVC4_H264:
		case GF_ISOM_SUBTYPE_SVC_H264:
		{
			GF_AVCConfig *avcc, *svcc;
			avcc = gf_isom_avc_config_get(streamer->isom, track->track_num, 1);
			if (avcc)
			{
				track->avc_nalu_size = avcc->nal_unit_size;
				gf_odf_avc_cfg_del(avcc);
				streamType = GF_STREAM_VISUAL;
				oti = GPAC_OTI_VIDEO_AVC;
			}
			svcc = gf_isom_svc_config_get(streamer->isom, track->track_num, 1);
			if (svcc)
			{
				track->avc_nalu_size = svcc->nal_unit_size;
				gf_odf_avc_cfg_del(svcc);
				streamType = GF_STREAM_VISUAL;
				oti = GPAC_OTI_VIDEO_SVC;
			}
			break;
		}
		break;
		case GF_ISOM_SUBTYPE_HVC1:
		case GF_ISOM_SUBTYPE_HEV1:
		case GF_ISOM_SUBTYPE_HVC2:
		case GF_ISOM_SUBTYPE_HEV2:
		case GF_ISOM_SUBTYPE_SHC1:
		{
			GF_HEVCConfig *hevcc = NULL, *shvcc = NULL;
			hevcc = gf_isom_hevc_config_get(streamer->isom, track->track_num, 1);
			if (hevcc) {
				track->avc_nalu_size = hevcc->nal_unit_size;
				gf_odf_hevc_cfg_del(hevcc);
				streamType = GF_STREAM_VISUAL;
				oti = GPAC_OTI_VIDEO_HEVC;
			}
			shvcc = gf_isom_shvc_config_get(streamer->isom, track->track_num, 1);
			if (shvcc) {
				track->avc_nalu_size = shvcc->nal_unit_size;
				gf_odf_hevc_cfg_del(shvcc);
				streamType = GF_STREAM_VISUAL;
				oti = GPAC_OTI_VIDEO_SHVC;
			}
			flags |= GP_RTP_PCK_USE_MULTI;
			break;
		}
		break;
		default:
			streamType = GF_STREAM_4CC;
			oti = TrackMediaSubType;
			break;
		}

		/*get sample info*/
		gf_media_get_sample_average_infos(streamer->isom, track->track_num, &MinSize, &MaxSize, &avgTS, &maxDTSDelta, &const_dur, &bandwidth);

		if (is_crypted) {
			Bool use_sel_enc;
			gf_isom_get_ismacryp_info(streamer->isom, track->track_num, 1, NULL, NULL, NULL, NULL, NULL, &use_sel_enc, &IV_length, &KI_length);
			if (use_sel_enc) flags |= GP_RTP_PCK_SELECTIVE_ENCRYPTION;
		}

		track->rtp = gf_rtp_streamer_new_extended(streamType, oti, track->timescale,
		             (char *) streamer->dest_ip, track->port, path_mtu, ttl, ifce_addr,
		             flags, dsi, dsi_len,
		             payt, samplerate, nb_ch,
		             is_crypted, IV_length, KI_length,
		             MinSize, MaxSize, avgTS, maxDTSDelta, const_dur, bandwidth, max_ptime, au_sn_len);

		if (!track->rtp) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Could not initialize RTP streamer: %s\n", gf_error_to_string(e)));
			goto exit;
		}

		payt++;
		track->microsec_ts_scale = 1000000;
		track->microsec_ts_scale /= gf_isom_get_media_timescale(streamer->isom, track->track_num);

		/*does this stream have the decoding dependency ?*/
		gf_isom_get_reference(streamer->isom, track->track_num, GF_ISOM_REF_BASE, 1, &base_track);
		if (base_track)
			streamer->base_track = base_track;
	}

	/*if scalable coding is found, disable auto RTCP reports and send them ourselves*/
	if (streamer->base_track) {
		GF_RTPTrack *track = streamer->stream;
		while (track) {
			gf_rtp_streamer_disable_auto_rtcp(track->rtp);
			track = track->next;
		}
	}
	return streamer;

exit:
	gf_free(streamer);
	return NULL;
}
Esempio n. 5
0
static GF_Err HEVC_ConfigureStream(HEVCDec *ctx, GF_ESD *esd)
{
	u32 i, j;
	GF_HEVCConfig *cfg = NULL;
	ctx->ES_ID = esd->ESID;
	ctx->width = ctx->height = ctx->out_size = ctx->luma_bpp = ctx->chroma_bpp = ctx->chroma_format_idc = 0;

	ctx->nb_layers = 1;

	if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
		HEVCState hevc;
		memset(&hevc, 0, sizeof(HEVCState));

		cfg = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, GF_FALSE);
		if (!cfg) return GF_NON_COMPLIANT_BITSTREAM;
		ctx->nalu_size_length = cfg->nal_unit_size;

		for (i=0; i< gf_list_count(cfg->param_array); i++) {
			GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(cfg->param_array, i);
			for (j=0; j< gf_list_count(ar->nalus); j++) {
				GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
				s32 idx;
				u16 hdr = sl->data[0] << 8 | sl->data[1];

				if (ar->type==GF_HEVC_NALU_SEQ_PARAM) {
					idx = gf_media_hevc_read_sps(sl->data, sl->size, &hevc);
					ctx->width = MAX(hevc.sps[idx].width, ctx->width);
					ctx->height = MAX(hevc.sps[idx].height, ctx->height);
					ctx->luma_bpp = MAX(hevc.sps[idx].bit_depth_luma, ctx->luma_bpp);
					ctx->chroma_bpp = MAX(hevc.sps[idx].bit_depth_chroma, ctx->chroma_bpp);
					ctx->chroma_format_idc  = hevc.sps[idx].chroma_format_idc;
					
					if (hdr & 0x1f8) {
						ctx->nb_layers ++;
					}
				}
				else if (ar->type==GF_HEVC_NALU_VID_PARAM) {
					gf_media_hevc_read_vps(sl->data, sl->size, &hevc);
				}
				else if (ar->type==GF_HEVC_NALU_PIC_PARAM) {
					gf_media_hevc_read_pps(sl->data, sl->size, &hevc);
				}
			}
		}
		gf_odf_hevc_cfg_del(cfg);
	} else {
		ctx->nalu_size_length = 0;
	}

	ctx->openHevcHandle = libOpenHevcInit(ctx->nb_threads, ctx->threading_type);

#ifndef GPAC_DISABLE_LOG
	if (gf_log_tool_level_on(GF_LOG_CODEC, GF_LOG_DEBUG) ) {
		libOpenHevcSetDebugMode(ctx->openHevcHandle, 1);
	}
#endif


	if (esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
		libOpenHevcSetActiveDecoders(ctx->openHevcHandle, 1/*ctx->nb_layers*/);
		libOpenHevcSetViewLayers(ctx->openHevcHandle, ctx->nb_layers-1);

		libOpenHevcCopyExtraData(ctx->openHevcHandle, (u8 *) esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
	} else {
		//hardcoded values: 2 layers max, display layer 0
		libOpenHevcSetActiveDecoders(ctx->openHevcHandle, 1/*ctx->nb_layers*/);
		libOpenHevcSetViewLayers(ctx->openHevcHandle, 0/*ctx->nb_layers-1*/);
	}

	libOpenHevcStartDecoder(ctx->openHevcHandle);


	ctx->stride = ((ctx->luma_bpp==8) && (ctx->chroma_bpp==8)) ? ctx->width : ctx->width * 2;
	if ( ctx->chroma_format_idc  == 1) { // 4:2:0
		ctx->out_size = ctx->stride * ctx->height * 3 / 2;
	}
	else if ( ctx->chroma_format_idc  == 2) { // 4:2:2
		ctx->out_size = ctx->stride * ctx->height * 2 ;
	}
	else if ( ctx->chroma_format_idc  == 3) { // 4:4:4
		ctx->out_size = ctx->stride * ctx->height * 3;
	}
	else {
		return GF_NOT_SUPPORTED;
	}
	
   
   
	if (ctx->output_as_8bit && (ctx->stride>ctx->width)) {
		ctx->stride /=2;
		ctx->out_size /= 2;
		ctx->chroma_bpp = ctx->luma_bpp = 8;
		ctx->conv_to_8bit = GF_TRUE;
		ctx->pack_mode = GF_FALSE;
	}
	ctx->dec_frames = 0;
	return GF_OK;
}
Esempio n. 6
0
GF_Err MCDec_InitHevcDecoder(MCDec *ctx) 
{
    u32 i, j;
    GF_HEVCConfig *cfg = NULL;

    ctx->ES_ID = ctx->esd->ESID;
    ctx->width = ctx->height = ctx->out_size = ctx->luma_bpp = ctx->chroma_bpp = ctx->chroma_format_idc = 0;

    if (ctx->esd->decoderConfig->decoderSpecificInfo && ctx->esd->decoderConfig->decoderSpecificInfo->data) {
        HEVCState hevc;
        memset(&hevc, 0, sizeof(HEVCState));

        cfg = gf_odf_hevc_cfg_read(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, GF_FALSE);
        if (!cfg) return GF_NON_COMPLIANT_BITSTREAM;
        ctx->nalu_size_length = cfg->nal_unit_size;

        for (i=0; i< gf_list_count(cfg->param_array); i++) {
            GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(cfg->param_array, i);
            for (j=0; j< gf_list_count(ar->nalus); j++) {
                GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
                s32 idx;
                u16 hdr = sl->data[0] << 8 | sl->data[1];

                if (ar->type==GF_HEVC_NALU_SEQ_PARAM) {
                    idx = gf_media_hevc_read_sps(sl->data, sl->size, &hevc);
                    ctx->width = MAX(hevc.sps[idx].width, ctx->width);
                    ctx->height = MAX(hevc.sps[idx].height, ctx->height);
                    ctx->luma_bpp = MAX(hevc.sps[idx].bit_depth_luma, ctx->luma_bpp);
                    ctx->chroma_bpp = MAX(hevc.sps[idx].bit_depth_chroma, ctx->chroma_bpp);
                    ctx->chroma_format_idc  = hevc.sps[idx].chroma_format_idc;

                    ctx->sps = (char *) malloc(4 + sl->size);
                    ctx->sps_size = sl->size;
                    prependStartCode(sl->data, ctx->sps, &ctx->sps_size);
                }
                else if (ar->type==GF_HEVC_NALU_VID_PARAM) {
                    gf_media_hevc_read_vps(sl->data, sl->size, &hevc);

                    ctx->vps = (char *) malloc(4 + sl->size);
                    ctx->vps_size = sl->size;
                    prependStartCode(sl->data, ctx->vps, &ctx->vps_size);
                }
                else if (ar->type==GF_HEVC_NALU_PIC_PARAM) {
                    gf_media_hevc_read_pps(sl->data, sl->size, &hevc);

                    ctx->pps = (char *) malloc(4 + sl->size);
                    ctx->pps_size = sl->size;
                    prependStartCode(sl->data, ctx->pps, &ctx->pps_size);          
                }
            }
        }
        gf_odf_hevc_cfg_del(cfg);
    } else {
        ctx->nalu_size_length = 0;
    }

    ctx->stride = ((ctx->luma_bpp==8) && (ctx->chroma_bpp==8)) ? ctx->width : ctx->width * 2;
    if ( ctx->chroma_format_idc  == 1) { // 4:2:0
        ctx->out_size = ctx->stride * ctx->height * 3 / 2;
    }
    else if ( ctx->chroma_format_idc  == 2) { // 4:2:2
        ctx->out_size = ctx->stride * ctx->height * 2 ;
    }
    else if ( ctx->chroma_format_idc  == 3) { // 4:4:4
        ctx->out_size = ctx->stride * ctx->height * 3;
    }
    else {
        return GF_NOT_SUPPORTED;
    }
    
    ctx->mime = "video/hevc";
    
    u32 csd0_size = ctx->sps_size + ctx-> pps_size + ctx->vps_size;
    char *csd0 = (char *) malloc(csd0_size);

    u32 k;

    for(k = 0; k < csd0_size; k++) {

       if(k < ctx->vps_size) {
            csd0[k] = ctx->vps[k];
       }
       else if (k < ctx-> vps_size + ctx->sps_size ) {
            csd0[k] = ctx->sps[k - ctx->vps_size];
       }
       else csd0[k] = ctx->pps[k - ctx->vps_size - ctx->sps_size];
    }

    AMediaFormat_setBuffer(ctx->format, "csd-0", csd0, csd0_size);
    return GF_OK;
}
Esempio n. 7
0
static GF_Err dc_gpac_video_write_config(VideoOutputFile *video_output_file, u32 *di, u32 track) {
	GF_Err ret;
	if (video_output_file->codec_ctx->codec_id == CODEC_ID_H264) {
		GF_AVCConfig *avccfg;
		avccfg = gf_odf_avc_cfg_new();
		if (!avccfg) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot create AVCConfig\n"));
			return GF_OUT_OF_MEM;
		}

		ret = avc_import_ffextradata(video_output_file->codec_ctx->extradata, video_output_file->codec_ctx->extradata_size, avccfg);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot parse AVC/H264 SPS/PPS\n"));
			gf_odf_avc_cfg_del(avccfg);
			return ret;
		}

		ret = gf_isom_avc_config_new(video_output_file->isof, track, avccfg, NULL, NULL, di);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_avc_config_new\n", gf_error_to_string(ret)));
			return ret;
		}

		gf_odf_avc_cfg_del(avccfg);

		//inband SPS/PPS
		if (video_output_file->muxer_type == GPAC_INIT_VIDEO_MUXER_AVC3) {
			ret = gf_isom_avc_set_inband_config(video_output_file->isof, track, 1);
			if (ret != GF_OK) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_avc_set_inband_config\n", gf_error_to_string(ret)));
				return ret;
			}
		}
	} else if (!strcmp(video_output_file->codec_ctx->codec->name, "libx265")) { //FIXME CODEC_ID_HEVC would break on old releases
		GF_HEVCConfig *hevccfg = gf_odf_hevc_cfg_new();
		if (!hevccfg) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot create HEVCConfig\n"));
			return GF_OUT_OF_MEM;
		}

		ret = hevc_import_ffextradata(video_output_file->codec_ctx->extradata, video_output_file->codec_ctx->extradata_size, hevccfg);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot parse HEVC/H265 SPS/PPS\n"));
			gf_odf_hevc_cfg_del(hevccfg);
			return ret;
		}

		ret = gf_isom_hevc_config_new(video_output_file->isof, track, hevccfg, NULL, NULL, di);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_hevc_config_new\n", gf_error_to_string(ret)));
			return ret;
		}

		gf_odf_hevc_cfg_del(hevccfg);

		//inband SPS/PPS
		if (video_output_file->muxer_type == GPAC_INIT_VIDEO_MUXER_AVC3) {
			ret = gf_isom_hevc_set_inband_config(video_output_file->isof, track, 1);
			if (ret != GF_OK) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_hevc_set_inband_config\n", gf_error_to_string(ret)));
				return ret;
			}
		}
	}

	return GF_OK;
}
Esempio n. 8
0
GF_Err AVC_HEVC_UpdateESD(GF_MPEGVisualSampleEntryBox *avc, GF_ESD *esd)
{
	if (!avc->bitrate) avc->bitrate = (GF_MPEG4BitRateBox*)gf_isom_box_new(GF_ISOM_BOX_TYPE_BTRT);
	if (avc->descr) gf_isom_box_del((GF_Box *) avc->descr);
	avc->descr = NULL;
	avc->bitrate->avgBitrate = esd->decoderConfig->avgBitrate;
	avc->bitrate->maxBitrate = esd->decoderConfig->maxBitrate;
	avc->bitrate->bufferSizeDB = esd->decoderConfig->bufferSizeDB;

	if (gf_list_count(esd->IPIDataSet)
		|| gf_list_count(esd->IPMPDescriptorPointers)
		|| esd->langDesc
		|| gf_list_count(esd->extensionDescriptors)
		|| esd->ipiPtr || esd->qos || esd->RegDescriptor) {

		avc->descr = (GF_MPEG4ExtensionDescriptorsBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_M4DS);
		if (esd->RegDescriptor) { gf_list_add(avc->descr->descriptors, esd->RegDescriptor); esd->RegDescriptor = NULL; }
		if (esd->qos) { gf_list_add(avc->descr->descriptors, esd->qos); esd->qos = NULL; }
		if (esd->ipiPtr) { gf_list_add(avc->descr->descriptors, esd->ipiPtr); esd->ipiPtr= NULL; }

		while (gf_list_count(esd->IPIDataSet)) {
			GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->IPIDataSet, 0);
			gf_list_rem(esd->IPIDataSet, 0);
			gf_list_add(avc->descr->descriptors, desc);
		}
		while (gf_list_count(esd->IPMPDescriptorPointers)) {
			GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->IPMPDescriptorPointers, 0);
			gf_list_rem(esd->IPMPDescriptorPointers, 0);
			gf_list_add(avc->descr->descriptors, desc);
		}
		if (esd->langDesc) {
			gf_list_add(avc->descr->descriptors, esd->langDesc);
			esd->langDesc = NULL;
		}
		while (gf_list_count(esd->extensionDescriptors)) {
			GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->extensionDescriptors, 0);
			gf_list_rem(esd->extensionDescriptors, 0);
			gf_list_add(avc->descr->descriptors, desc);
		}
	}

	/*update GF_AVCConfig*/
	if (!avc->svc_config) {
		if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_HEVC) {
			if (!avc->hevc_config) avc->hevc_config = (GF_HEVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_HVCC);
			if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
				if (avc->hevc_config->config) gf_odf_hevc_cfg_del(avc->hevc_config->config);
				avc->hevc_config->config = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
			}
		} else {
			if (!avc->avc_config) avc->avc_config = (GF_AVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_AVCC);
			if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
				if (avc->avc_config->config) gf_odf_avc_cfg_del(avc->avc_config->config);
				avc->avc_config->config = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
			}
		}

	}
	gf_odf_desc_del((GF_Descriptor *)esd);
	if (avc->hevc_config) {
		HEVC_RewriteESDescriptor(avc);
	} else {
		AVC_RewriteESDescriptor(avc);
	}
	return GF_OK;
}
Esempio n. 9
0
void hvcc_del(GF_Box *s)
{
	GF_HEVCConfigurationBox *ptr = (GF_HEVCConfigurationBox*)s;
	if (ptr->config) gf_odf_hevc_cfg_del(ptr->config);
	gf_free(ptr);
}
Esempio n. 10
0
GF_EXPORT
GF_HEVCConfig *gf_odf_hevc_cfg_read_bs(GF_BitStream *bs, Bool is_shvc)
{
	u32 i, count;
	GF_HEVCConfig *cfg = gf_odf_hevc_cfg_new();

	cfg->is_shvc = is_shvc;

	cfg->configurationVersion = gf_bs_read_int(bs, 8);

	if (!is_shvc) {
		cfg->profile_space = gf_bs_read_int(bs, 2);
		cfg->tier_flag = gf_bs_read_int(bs, 1);
		cfg->profile_idc = gf_bs_read_int(bs, 5);
		cfg->general_profile_compatibility_flags = gf_bs_read_int(bs, 32);

		cfg->progressive_source_flag = gf_bs_read_int(bs, 1);
		cfg->interlaced_source_flag = gf_bs_read_int(bs, 1);
		cfg->non_packed_constraint_flag = gf_bs_read_int(bs, 1);
		cfg->frame_only_constraint_flag = gf_bs_read_int(bs, 1);
		/*only lowest 44 bits used*/
		cfg->constraint_indicator_flags = gf_bs_read_long_int(bs, 44);
		cfg->level_idc = gf_bs_read_int(bs, 8);
	}

	gf_bs_read_int(bs, 4); //reserved
	cfg->min_spatial_segmentation_idc = gf_bs_read_int(bs, 12);

	gf_bs_read_int(bs, 6);//reserved
	cfg->parallelismType = gf_bs_read_int(bs, 2);

	if (!is_shvc) {
		gf_bs_read_int(bs, 6);
		cfg->chromaFormat = gf_bs_read_int(bs, 2);
		gf_bs_read_int(bs, 5);
		cfg->luma_bit_depth = gf_bs_read_int(bs, 3) + 8;
		gf_bs_read_int(bs, 5);
		cfg->chroma_bit_depth = gf_bs_read_int(bs, 3) + 8;
		cfg->avgFrameRate = gf_bs_read_int(bs, 16);
	}

	if (!is_shvc)
		cfg->constantFrameRate = gf_bs_read_int(bs, 2);
	else
		gf_bs_read_int(bs, 2); //reserved

	cfg->numTemporalLayers = gf_bs_read_int(bs, 3);
	cfg->temporalIdNested = gf_bs_read_int(bs, 1);

	cfg->nal_unit_size = 1 + gf_bs_read_int(bs, 2);

	count = gf_bs_read_int(bs, 8);
	for (i=0; i<count; i++) {
		u32 nalucount, j;
		GF_HEVCParamArray *ar;
		GF_SAFEALLOC(ar, GF_HEVCParamArray);
		if (!ar) {
			gf_odf_hevc_cfg_del(cfg);
			return NULL;
		}
		ar->nalus = gf_list_new();
		gf_list_add(cfg->param_array, ar);

		ar->array_completeness = gf_bs_read_int(bs, 1);
		gf_bs_read_int(bs, 1);
		ar->type = gf_bs_read_int(bs, 6);
		nalucount = gf_bs_read_int(bs, 16);
		for (j=0; j<nalucount; j++) {
			GF_AVCConfigSlot *sl;
			GF_SAFEALLOC(sl, GF_AVCConfigSlot );
			if (!sl) {
				gf_odf_hevc_cfg_del(cfg);
				return NULL;
			}

			sl->size = gf_bs_read_int(bs, 16);

			sl->data = (char *)gf_malloc(sizeof(char) * sl->size);
			gf_bs_read_data(bs, sl->data, sl->size);
			gf_list_add(ar->nalus, sl);
		}
	}
	return cfg;
}