Esempio n. 1
0
static u32 OSVC_CanHandleStream(GF_BaseDecoder *dec, u32 StreamType, GF_ESD *esd, u8 PL)
{
	if (StreamType != GF_STREAM_VISUAL) return GF_CODEC_NOT_SUPPORTED;

	/*media type query*/
	if (!esd) return GF_CODEC_STREAM_TYPE_SUPPORTED;

	switch (esd->decoderConfig->objectTypeIndication) {
	case GPAC_OTI_VIDEO_AVC:
		if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
			Bool is_svc = 0;
			u32 i, count;
			GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
			if (!cfg) return GF_CODEC_NOT_SUPPORTED;

			/*decode all NALUs*/
			count = gf_list_count(cfg->sequenceParameterSets);
			for (i=0; i<count; i++) {
				GF_AVCConfigSlot *slc = gf_list_get(cfg->sequenceParameterSets, i);
				u8 nal_type = slc->data[0] & 0x1F;

				if (nal_type==GF_AVC_NALU_SVC_SUBSEQ_PARAM) {
					is_svc = 1;
					break;
				}
			}
			gf_odf_avc_cfg_del(cfg);
			return is_svc ? GF_CODEC_SUPPORTED : GF_CODEC_MAYBE_SUPPORTED;
		}
		return GF_CODEC_MAYBE_SUPPORTED;
	}
	return GF_CODEC_NOT_SUPPORTED;
}
Esempio n. 2
0
static u32 VTBDec_CanHandleStream(GF_BaseDecoder *dec, u32 StreamType, GF_ESD *esd, u8 PL)
{
	if (StreamType != GF_STREAM_VISUAL) return GF_CODEC_NOT_SUPPORTED;
	/*media type query*/
	if (!esd) return GF_CODEC_STREAM_TYPE_SUPPORTED;

	switch (esd->decoderConfig->objectTypeIndication) {
	case GPAC_OTI_VIDEO_AVC:
		if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
			GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
			Bool cp_ok = GF_TRUE;
			if (!cfg->chroma_format) {
				GF_AVCConfigSlot *s = gf_list_get(cfg->sequenceParameterSets, 0);
				if (s) {
					AVCState avc;
					s32 idx;
					memset(&avc, 0, sizeof(AVCState));
					avc.sps_active_idx = -1;
					idx = gf_media_avc_read_sps(s->data, s->size, &avc, 0, NULL);
					cfg->chroma_format = avc.sps[idx].chroma_format;
					cfg->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
					cfg->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
				}
			}
			if ((cfg->chroma_bit_depth>8) || (cfg->luma_bit_depth > 8) || (cfg->chroma_format>1)) {
				cp_ok = GF_FALSE;
			}
			gf_odf_avc_cfg_del(cfg);
			if (!cp_ok) return GF_CODEC_PROFILE_NOT_SUPPORTED;
		}
		return GF_CODEC_SUPPORTED * 2;

	case GPAC_OTI_VIDEO_MPEG4_PART2:
		return GF_CODEC_SUPPORTED * 2;

	case GPAC_OTI_VIDEO_MPEG2_SIMPLE:
	case GPAC_OTI_VIDEO_MPEG2_MAIN:
	case GPAC_OTI_VIDEO_MPEG2_SNR:
	case GPAC_OTI_VIDEO_MPEG2_SPATIAL:
	case GPAC_OTI_VIDEO_MPEG2_HIGH:
	case GPAC_OTI_VIDEO_MPEG2_422:
		//not supported on iphone
#ifdef GPAC_IPHONE
		return GF_CODEC_NOT_SUPPORTED;
#else
		return GF_CODEC_SUPPORTED * 2;
#endif

	//cannot make it work on ios and OSX version seems buggy (wrong frame output order)
//	case GPAC_OTI_VIDEO_MPEG1:
//		return GF_CODEC_SUPPORTED * 2;

	case GPAC_OTI_MEDIA_GENERIC:
		if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->dataLength) {
			char *dsi = esd->decoderConfig->decoderSpecificInfo->data;
			if (!strnicmp(dsi, "s263", 4)) return GF_CODEC_SUPPORTED*2;
		}
	}
	return GF_CODEC_NOT_SUPPORTED;
}
Esempio n. 3
0
static GF_Err VTBDec_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd)
{
	GF_Err e;
	VTBDec *ctx = (VTBDec *)ifcg->privateStack;
	ctx->esd = esd;

	//check AVC config
	if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) {
		if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) {
			ctx->is_annex_b = GF_TRUE;
			ctx->width=ctx->height=128;
			ctx->out_size = ctx->width*ctx->height*3/2;
			ctx->pix_fmt = GF_PIXEL_YV12;
			return GF_OK;
		} else {
			GF_AVCConfigSlot *slc;
			GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
			slc = gf_list_get(cfg->sequenceParameterSets, 0);
			if (slc) {
				ctx->sps = slc->data;
				ctx->sps_size = slc->size;
			}
			slc = gf_list_get(cfg->pictureParameterSets, 0);
			if (slc) {
				ctx->pps = slc->data;
				ctx->pps_size = slc->size;
			}
			
			if (ctx->sps && ctx->pps) {
				e = VTBDec_InitDecoder(ctx, GF_FALSE);
			} else {
				ctx->nalu_size_length = cfg->nal_unit_size;
				e = GF_OK;
			}
			gf_odf_avc_cfg_del(cfg);
			return e;
		}
	}

	//check VOSH config
	if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_MPEG4_PART2) {
		if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) {
			ctx->width=ctx->height=128;
			ctx->out_size = ctx->width*ctx->height*3/2;
			ctx->pix_fmt = GF_PIXEL_YV12;
			//todo, we will have to collect the vosh
		} else {
			return VTBDec_InitDecoder(ctx, GF_FALSE);
		}
	}

	return VTBDec_InitDecoder(ctx, GF_FALSE);
}
Esempio n. 4
0
GF_Err AVC_UpdateESD(GF_MPEGVisualSampleEntryBox *avc, GF_ESD *esd)
{
	if (!avc->bitrate) avc->bitrate = (GF_MPEG4BitRateBox*)gf_isom_box_new(GF_ISOM_BOX_TYPE_BTRT);
	if (avc->descr) gf_isom_box_del((GF_Box *) avc->descr);
	avc->descr = NULL;
	avc->bitrate->avgBitrate = esd->decoderConfig->avgBitrate;
	avc->bitrate->maxBitrate = esd->decoderConfig->maxBitrate;
	avc->bitrate->bufferSizeDB = esd->decoderConfig->bufferSizeDB;

	if (gf_list_count(esd->IPIDataSet)
		|| gf_list_count(esd->IPMPDescriptorPointers)
		|| esd->langDesc
		|| gf_list_count(esd->extensionDescriptors)
		|| esd->ipiPtr || esd->qos || esd->RegDescriptor) {

		avc->descr = (GF_MPEG4ExtensionDescriptorsBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_M4DS);
		if (esd->RegDescriptor) { gf_list_add(avc->descr->descriptors, esd->RegDescriptor); esd->RegDescriptor = NULL; }
		if (esd->qos) { gf_list_add(avc->descr->descriptors, esd->qos); esd->qos = NULL; }
		if (esd->ipiPtr) { gf_list_add(avc->descr->descriptors, esd->ipiPtr); esd->ipiPtr= NULL; }

		while (gf_list_count(esd->IPIDataSet)) {
			GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->IPIDataSet, 0);
			gf_list_rem(esd->IPIDataSet, 0);
			gf_list_add(avc->descr->descriptors, desc);
		}
		while (gf_list_count(esd->IPMPDescriptorPointers)) {
			GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->IPMPDescriptorPointers, 0);
			gf_list_rem(esd->IPMPDescriptorPointers, 0);
			gf_list_add(avc->descr->descriptors, desc);
		}
		if (esd->langDesc) {
			gf_list_add(avc->descr->descriptors, esd->langDesc);
			esd->langDesc = NULL;
		}
		while (gf_list_count(esd->extensionDescriptors)) {
			GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->extensionDescriptors, 0);
			gf_list_rem(esd->extensionDescriptors, 0);
			gf_list_add(avc->descr->descriptors, desc);
		}
	}

	/*update GF_AVCConfig*/
	if (!avc->svc_config) {
		if (!avc->avc_config) avc->avc_config = (GF_AVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_AVCC);
		if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
			if (avc->avc_config->config) gf_odf_avc_cfg_del(avc->avc_config->config);
			avc->avc_config->config = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
		}
		gf_odf_desc_del((GF_Descriptor *)esd);
	}
	AVC_RewriteESDescriptor(avc);
	return GF_OK;
}
Esempio n. 5
0
GF_EXPORT
GF_Err gf_rtp_streamer_append_sdp_extended(GF_RTPStreamer *rtp, u16 ESID, char *dsi, u32 dsi_len, GF_ISOFile *isofile, u32 isotrack, char *KMS_URI, u32 width, u32 height, char **out_sdp_buffer) 
{	
	u32 size;
	u16 port;
	char mediaName[30], payloadName[30];
	char sdp[20000], sdpLine[10000];

	if (!out_sdp_buffer) return GF_BAD_PARAM;

	gf_rtp_builder_get_payload_name(rtp->packetizer, payloadName, mediaName);
	gf_rtp_get_ports(rtp->channel, &port, NULL);

	sprintf(sdp, "m=%s %d RTP/%s %d\n", mediaName, port, rtp->packetizer->slMap.IV_length ? "SAVP" : "AVP", rtp->packetizer->PayloadType);
	sprintf(sdpLine, "a=rtpmap:%d %s/%d\n", rtp->packetizer->PayloadType, payloadName, rtp->packetizer->sl_config.timestampResolution);
	strcat(sdp, sdpLine);
    if (ESID && (rtp->packetizer->rtp_payt != GF_RTP_PAYT_3GPP_DIMS)) {
		sprintf(sdpLine, "a=mpeg4-esid:%d\n", ESID);
		strcat(sdp, sdpLine);		
	}

	if (width && height) {
		if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H263) {
			sprintf(sdpLine, "a=cliprect:0,0,%d,%d\n", height, width);
			strcat(sdp, sdpLine);
		}
		/*extensions for some mobile phones*/
		sprintf(sdpLine, "a=framesize:%d %d-%d\n", rtp->packetizer->PayloadType, width, height);
		strcat(sdp, sdpLine);
	}
		
	strcpy(sdpLine, "");

	/*AMR*/
	if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR_WB)) {
		sprintf(sdpLine, "a=fmtp:%d octet-align=1\n", rtp->packetizer->PayloadType);
	}
	/*Text*/
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) {
		gf_media_format_ttxt_sdp(rtp->packetizer, payloadName, sdpLine, isofile, isotrack);
		strcat(sdpLine, "\n");
	}
	/*EVRC/SMV in non header-free mode*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (rtp->packetizer->auh_size>1)) {
		sprintf(sdpLine, "a=fmtp:%d maxptime=%d\n", rtp->packetizer->PayloadType, rtp->packetizer->auh_size*20);
	}
	/*H264/AVC*/
	else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_SVC)) {
		GF_AVCConfig *avcc = dsi ? gf_odf_avc_cfg_read(dsi, dsi_len) : NULL;

		if (avcc) {
			sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", rtp->packetizer->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication);
			if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) {
				u32 i, count, b64s;
				char b64[200];
				strcat(sdpLine, "; sprop-parameter-sets=");
				count = gf_list_count(avcc->sequenceParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
				if (i) strcat(sdpLine, ",");
				count = gf_list_count(avcc->pictureParameterSets);
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					strcat(sdpLine, b64);
					if (i+1<count) strcat(sdpLine, ",");
				}
			}
			gf_odf_avc_cfg_del(avcc);
			strcat(sdpLine, "\n");
		}
	}
	else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_HEVC) {
#ifndef GPAC_DISABLE_HEVC
		GF_HEVCConfig *hevcc = dsi ? gf_odf_hevc_cfg_read(dsi, dsi_len, 0) : NULL;
		if (hevcc) {
			u32 count, i, j, b64s;
			char b64[200];
			sprintf(sdpLine, "a=fmtp:%d", rtp->packetizer->PayloadType);
			count = gf_list_count(hevcc->param_array);
			for (i = 0; i < count; i++) {
				GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(hevcc->param_array, i);
				if (ar->type==GF_HEVC_NALU_SEQ_PARAM) {
					strcat(sdpLine, "; sprop-sps=");						
				} else if (ar->type==GF_HEVC_NALU_PIC_PARAM) {
					strcat(sdpLine, "; sprop-pps=");
				} else if (ar->type==GF_HEVC_NALU_VID_PARAM) {
					strcat(sdpLine, "; sprop-vps=");
				}
				for (j = 0; j < gf_list_count(ar->nalus); j++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
					b64s = gf_base64_encode(sl->data, sl->size, b64, 200);
					b64[b64s]=0;
					if (j) strcat(sdpLine, ", ");
					strcat(sdpLine, b64);
				}
			}
			gf_odf_hevc_cfg_del(hevcc);
			strcat(sdpLine, "\n");
		}
#endif
	}
	/*MPEG-4 decoder config*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_MPEG4) {
		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, dsi, dsi_len);
		strcat(sdpLine, "\n");

		if (rtp->packetizer->slMap.IV_length && KMS_URI) {
			if (!strnicmp(KMS_URI, "(key)", 5) || !strnicmp(KMS_URI, "(ipmp)", 6) || !strnicmp(KMS_URI, "(uri)", 5)) {
				strcat(sdpLine, "; ISMACrypKey=");
			} else {
				strcat(sdpLine, "; ISMACrypKey=(uri)");
			}
			strcat(sdpLine, KMS_URI);
			strcat(sdpLine, "\n");
		}
	}
    /*DIMS decoder config*/
    else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) {
        sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", rtp->packetizer->PayloadType, 10);
        if (rtp->packetizer->flags & GP_RTP_DIMS_COMPRESSED) {
            strcat(sdpLine, ";content-coding=deflate");
        }
		strcat(sdpLine, "\n");
    }
	/*MPEG-4 Audio LATM*/
	else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_LATM) { 
		GF_BitStream *bs; 
		char *config_bytes; 
		u32 config_size; 

		/* form config string */ 
		bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); 
		gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ 
		gf_bs_write_int(bs, 1, 1); /* all streams same time */ 
		gf_bs_write_int(bs, 0, 6); /* numSubFrames */ 
		gf_bs_write_int(bs, 0, 4); /* numPrograms */ 
		gf_bs_write_int(bs, 0, 3); /* numLayer */ 

		/* audio-specific config  - PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/
		if (dsi) gf_bs_write_data(bs, dsi, MIN(dsi_len, 2) ); 

		/* other data */ 
		gf_bs_write_int(bs, 0, 3); /* frameLengthType */ 
		gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ 
		gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ 
		gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ 
		gf_bs_get_content(bs, &config_bytes, &config_size); 
		gf_bs_del(bs); 

		gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, config_bytes, config_size); 
		gf_free(config_bytes); 
		strcat(sdpLine, "\n");
	}

	strcat(sdp, sdpLine);

	size = (u32) strlen(sdp) + (*out_sdp_buffer ? (u32) strlen(*out_sdp_buffer) : 0) + 1;
	if ( !*out_sdp_buffer) {
		*out_sdp_buffer = gf_malloc(sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcpy(*out_sdp_buffer, sdp);
	} else {
		*out_sdp_buffer = gf_realloc(*out_sdp_buffer, sizeof(char)*size);
		if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
		strcat(*out_sdp_buffer, sdp);
	}
	return GF_OK;
} 
Esempio n. 6
0
void isor_declare_objects(ISOMReader *read)
{
	GF_ObjectDescriptor *od;
	GF_ESD *esd;
	const char *tag;
	u32 i, count, ocr_es_id, tlen, base_track, j, track_id;
	Bool highest_stream;
	char *opt;
	Bool add_ps_lower = GF_TRUE;

	ocr_es_id = 0;
	opt = (char*) gf_modules_get_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS");
	if (!opt) {
		gf_modules_set_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS", "yes");
	} else if (!strcmp(opt, "no")) {
		add_ps_lower = GF_FALSE;
	}

	/*TODO check for alternate tracks*/
	count = gf_isom_get_track_count(read->mov);
	for (i=0; i<count; i++) {
		if (!gf_isom_is_track_enabled(read->mov, i+1)) continue;

		switch (gf_isom_get_media_type(read->mov, i+1)) {
		case GF_ISOM_MEDIA_AUDIO:
		case GF_ISOM_MEDIA_VISUAL:
		case GF_ISOM_MEDIA_TEXT:
		case GF_ISOM_MEDIA_SUBT:
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_SUBPIC:
			break;
		default:
			continue;
		}

		/*we declare only the highest video track (i.e the track we play)*/
		highest_stream = GF_TRUE;
		track_id = gf_isom_get_track_id(read->mov, i+1);
		for (j = 0; j < count; j++) {
			if (gf_isom_has_track_reference(read->mov, j+1, GF_ISOM_REF_SCAL, track_id) > 0) {
				highest_stream = GF_FALSE;
				break;
			}
		}
		if ((gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) && !highest_stream)
			continue;
		esd = gf_media_map_esd(read->mov, i+1);
		if (esd) {
			gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_BASE, 1, &base_track);
			esd->has_ref_base = base_track ? GF_TRUE : GF_FALSE;
			/*FIXME: if we declare only SPS/PPS of the highest layer, we have a problem in decoding even though we have all SPS/PPS inband (OpenSVC bug ?)*/
			/*so we add by default the SPS/PPS of the lower layers to this esd*/
			if (esd->has_ref_base && add_ps_lower) {
				u32 count, refIndex, ref_track, num_sps, num_pps, t;
				GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
				GF_AVCConfig *avccfg, *svccfg;

				count = gf_isom_get_reference_count(read->mov, i+1, GF_ISOM_REF_SCAL);
				for (refIndex = count; refIndex != 0; refIndex--) {
					gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_SCAL, refIndex, &ref_track);
					avccfg = gf_isom_avc_config_get(read->mov, ref_track, 1);
					svccfg = gf_isom_svc_config_get(read->mov, ref_track, 1);
					if (avccfg) {
						num_sps = gf_list_count(avccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(avccfg->pictureParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(avccfg);
					}
					if (svccfg) {
						num_sps = gf_list_count(svccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(svccfg->pictureParameterSets);
						for (t = 0; t < num_pps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(svccfg);
					}
				}

				if (esd->decoderConfig->decoderSpecificInfo->data) gf_free(esd->decoderConfig->decoderSpecificInfo->data);
				gf_odf_avc_cfg_write(cfg, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
				gf_odf_avc_cfg_del(cfg);
			}

			od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
			od->service_ifce = read->input;
			od->objectDescriptorID = 0;
			if (!ocr_es_id) ocr_es_id = esd->ESID;
			esd->OCRESID = ocr_es_id;
			gf_list_add(od->ESDescriptors, esd);
			if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
				send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
			} else {
				gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
			}
		}
	}
	/*if cover art, extract it in cache*/
	if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COVER_ART, &tag, &tlen)==GF_OK) {
		const char *cdir = gf_modules_get_option((GF_BaseInterface *)gf_term_get_service_interface(read->service), "General", "CacheDirectory");
		if (cdir) {
			char szName[GF_MAX_PATH];
			const char *sep;
			FILE *t;
			sep = strrchr(gf_isom_get_filename(read->mov), '\\');
			if (!sep) sep = strrchr(gf_isom_get_filename(read->mov), '/');
			if (!sep) sep = gf_isom_get_filename(read->mov);

			if ((cdir[strlen(cdir)-1] != '\\') && (cdir[strlen(cdir)-1] != '/')) {
				sprintf(szName, "%s/%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			} else {
				sprintf(szName, "%s%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			}

			t = gf_f64_open(szName, "wb");

			if (t) {
				Bool isom_contains_video = GF_FALSE;

				/*write cover data*/
				assert(!(tlen & 0x80000000));
				gf_fwrite(tag, tlen & 0x7FFFFFFF, 1, t);
				fclose(t);

				/*don't display cover art when video is present*/
				for (i=0; i<gf_isom_get_track_count(read->mov); i++) {
					if (!gf_isom_is_track_enabled(read->mov, i+1))
						continue;
					if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) {
						isom_contains_video = GF_TRUE;
						break;
					}
				}

				if (!isom_contains_video) {
					od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
					od->service_ifce = read->input;
					od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID;
					od->URLString = gf_strdup(szName);
					if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
						send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
					} else {
						gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
					}
				}
			}
		}
	}
	if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
		send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, NULL, NULL);
	} else {
		gf_term_add_media(read->service, NULL, GF_FALSE);
	}
}
Esempio n. 7
0
static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd)
{
	u32 i, count;
	s32 res;
	OPENSVCFRAME Picture;
	int Layer[4];
	OSVCDec *ctx = (OSVCDec*) ifcg->privateStack;

	/*todo: we should check base layer of this stream is indeed our base layer*/
	if (!ctx->ES_ID) {
		ctx->ES_ID = esd->ESID;
		ctx->width = ctx->height = ctx->out_size = 0;
		if (!esd->dependsOnESID) ctx->baseES_ID = esd->ESID;
	}

	if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
		GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
		if (!cfg) return GF_NON_COMPLIANT_BITSTREAM;
		if (!esd->dependsOnESID) {
			ctx->nalu_size_length = cfg->nal_unit_size;
			if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR;
		}

		/*decode all NALUs*/
		count = gf_list_count(cfg->sequenceParameterSets);
		SetCommandLayer(Layer, 255, 0, &res, 0);//bufindex can be reset without pb
		for (i=0; i<count; i++) {
			u32 w=0, h=0, sid;
			s32 par_n=0, par_d=0;
			GF_AVCConfigSlot *slc = gf_list_get(cfg->sequenceParameterSets, i);

#ifndef GPAC_DISABLE_AV_PARSERS
			gf_avc_get_sps_info(slc->data, slc->size, &sid, &w, &h, &par_n, &par_d);
#endif
			/*by default use the base layer*/
			if (!i) {
				if ((ctx->width<w) || (ctx->height<h)) {
					ctx->width = w;
					ctx->height = h;
					if ( ((s32)par_n>0) && ((s32)par_d>0) )
						ctx->pixel_ar = (par_n<<16) || par_d;
				}
			}
			res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer);
			if (res<0) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d\n", res));
			}
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: SPS id=\"%d\" code=\"%d\" size=\"%d\"\n", slc->id, slc->data[0] & 0x1F, slc->size));
		}

		count = gf_list_count(cfg->pictureParameterSets);
		for (i=0; i<count; i++) {
			u32 sps_id, pps_id;
			GF_AVCConfigSlot *slc = gf_list_get(cfg->pictureParameterSets, i);
			gf_avc_get_pps_info(slc->data, slc->size, &pps_id, &sps_id);
			res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer);
			if (res<0) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res));
			}
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: PPS id=\"%d\" code=\"%d\" size=\"%d\" sps_id=\"%d\"\n", pps_id, slc->data[0] & 0x1F, slc->size, sps_id));
		}
		ctx->state_found = 1;
		gf_odf_avc_cfg_del(cfg);
	} else {
		if (ctx->nalu_size_length) {
			return GF_NOT_SUPPORTED;
		}
		ctx->nalu_size_length = 0;
		if (!esd->dependsOnESID) {
			if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR;
		}
		ctx->pixel_ar = (1<<16) || 1;
	}
	ctx->stride = ctx->width + 32;
	ctx->CurrentDqId = ctx->MaxDqId = 0;
	ctx->out_size = ctx->stride * ctx->height * 3 / 2;
	return GF_OK;
}
Esempio n. 8
0
static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd)
{
	u32 i, count;
	s32 res;
	OPENSVCFRAME Picture;
	int Layer[4];
	OSVCDec *ctx = (OSVCDec*) ifcg->privateStack;

	/*not supported in this version*/
	if (esd->dependsOnESID) return GF_NOT_SUPPORTED;
	
	ctx->ES_ID = esd->ESID;
	ctx->width = ctx->height = ctx->out_size = 0;
	
	if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
		GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
		if (!cfg) return GF_NON_COMPLIANT_BITSTREAM;
		ctx->nalu_size_length = cfg->nal_unit_size;
		if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR;

		/*decode all NALUs*/
		count = gf_list_count(cfg->sequenceParameterSets);
        SetCommandLayer(Layer, 255, 0, &i, 0);//bufindex can be reset without pb
		for (i=0; i<count; i++) {
			u32 w, h, par_n, par_d;
			GF_AVCConfigSlot *slc = gf_list_get(cfg->sequenceParameterSets, i);

			gf_avc_get_sps_info(slc->data, slc->size, &slc->id, &w, &h, &par_n, &par_d);
			/*by default use the base layer*/
			if (!i) {
				if ((ctx->width<w) || (ctx->height<h)) {
					ctx->width = w;
					ctx->height = h;
					if ( ((s32)par_n>0) && ((s32)par_d>0) )
						ctx->pixel_ar = (par_n<<16) || par_d;
				}
			}
			res = decodeNAL(ctx->codec, slc->data, slc->size, &Picture, Layer);
			if (res<0) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d\n", res));
			}
		}

		count = gf_list_count(cfg->pictureParameterSets);
		for (i=0; i<count; i++) {
			GF_AVCConfigSlot *slc = gf_list_get(cfg->pictureParameterSets, i);
			res = decodeNAL(ctx->codec, slc->data, slc->size, &Picture, Layer);
			if (res<0) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res));
			}
		}

		gf_odf_avc_cfg_del(cfg);
	} else {
		ctx->nalu_size_length = 0;
		if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR;
	}
	ctx->stride = ctx->width + 32;
	ctx->layer = 0;
	ctx->CurrDqId = ctx->layer;
	ctx->out_size = ctx->stride * ctx->height * 3 / 2;
	return GF_OK;
}
Esempio n. 9
0
static GF_Err MCDec_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd)
{
    MCDec *ctx = (MCDec *)ifcg->privateStack;
    ctx->esd = esd;
    GF_Err e;

    //check AVC config
    if (esd->decoderConfig->objectTypeIndication == GPAC_OTI_VIDEO_AVC) {
		ctx->SPSs = gf_list_new();
		ctx->PPSs = gf_list_new();
		ctx->mime = "video/avc";
		ctx->avc.sps_active_idx = -1;
		ctx->active_sps = ctx->active_pps = -1;
        if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) {
            ctx->width=ctx->height=128;
            ctx->out_size = ctx->width*ctx->height*3/2;
            ctx->pix_fmt = GF_PIXEL_NV12;
            return GF_OK;
        } else {
			u32 i;
			GF_AVCConfigSlot *slc;
			GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
			for (i = 0; i<gf_list_count(cfg->sequenceParameterSets); i++) {
				slc = gf_list_get(cfg->sequenceParameterSets, i);
				slc->id = -1;
				MCDec_RegisterParameterSet(ctx, slc->data, slc->size, GF_TRUE);
			}

			for (i = 0; i<gf_list_count(cfg->pictureParameterSets); i++) {
				slc = gf_list_get(cfg->pictureParameterSets, i);
				slc->id = -1;
				MCDec_RegisterParameterSet(ctx, slc->data, slc->size, GF_FALSE);
			}

			slc = gf_list_get(ctx->SPSs, 0);
			if (slc) ctx->active_sps = slc->id;

			slc = gf_list_get(ctx->PPSs, 0);
			if (slc) ctx->active_pps = slc->id;
			
			ctx->nalu_size_length = cfg->nal_unit_size;
		
			
			if (gf_list_count(ctx->SPSs) && gf_list_count(ctx->PPSs)) {
				e = MCDec_InitDecoder(ctx);
			}
			else {
				e = GF_OK;
			}
			gf_odf_avc_cfg_del(cfg);
			return e;
		}
    }

    if (esd->decoderConfig->objectTypeIndication == GPAC_OTI_VIDEO_MPEG4_PART2) {
        if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) {
            ctx->width=ctx->height=128;
            ctx->out_size = ctx->width*ctx->height*3/2;
            ctx->pix_fmt = GF_PIXEL_NV12;
        } else {
            GF_M4VDecSpecInfo vcfg;            
            gf_m4v_get_config(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, &vcfg);
            ctx->width = vcfg.width;
            ctx->height = vcfg.height;
            ctx->out_size = ctx->width*ctx->height*3/2;
            ctx->pix_fmt = GF_PIXEL_NV12;

            return MCDec_InitDecoder(ctx);
        }
    }
  
    if (esd->decoderConfig->objectTypeIndication == GPAC_OTI_VIDEO_HEVC) {
        ctx->esd= esd;
        if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) {
            ctx->width=ctx->height=128;
            ctx->out_size = ctx->width*ctx->height*3/2;
            ctx->pix_fmt = GF_PIXEL_NV12;
        } else {
            return MCDec_InitDecoder(ctx);
        }
    }

    return MCDec_InitDecoder(ctx);
}