Exemple #1
0
GF_EXPORT
GF_AVCConfig *gf_odf_avc_cfg_read(char *dsi, u32 dsi_size)
{
	u32 i, count;
	GF_AVCConfig *avcc = gf_odf_avc_cfg_new();
	GF_BitStream *bs = gf_bs_new(dsi, dsi_size, GF_BITSTREAM_READ);
	avcc->configurationVersion = gf_bs_read_int(bs, 8);
	avcc->AVCProfileIndication  = gf_bs_read_int(bs, 8);
	avcc->profile_compatibility = gf_bs_read_int(bs, 8);
	avcc->AVCLevelIndication  = gf_bs_read_int(bs, 8);
	gf_bs_read_int(bs, 6);
	avcc->nal_unit_size = 1 + gf_bs_read_int(bs, 2);
	gf_bs_read_int(bs, 3);
	count = gf_bs_read_int(bs, 5);
	for (i=0; i<count; i++) {
		GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_malloc(sizeof(GF_AVCConfigSlot));
		sl->size = gf_bs_read_int(bs, 16);
		sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
		gf_bs_read_data(bs, sl->data, sl->size);
		gf_list_add(avcc->sequenceParameterSets, sl);
	}
	count = gf_bs_read_int(bs, 8);
	for (i=0; i<count; i++) {
		GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_malloc(sizeof(GF_AVCConfigSlot));
		sl->size = gf_bs_read_int(bs, 16);
		sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
		gf_bs_read_data(bs, sl->data, sl->size);
		gf_list_add(avcc->pictureParameterSets, sl);
	}
	gf_bs_del(bs);
	return avcc;
}
Exemple #2
0
GF_Err avcc_Read(GF_Box *s, GF_BitStream *bs)
{
	u32 i, count;
	GF_AVCConfigurationBox *ptr = (GF_AVCConfigurationBox *)s;

	if (ptr->config) gf_odf_avc_cfg_del(ptr->config);
	ptr->config = gf_odf_avc_cfg_new();
	ptr->config->configurationVersion = gf_bs_read_u8(bs);
	ptr->config->AVCProfileIndication = gf_bs_read_u8(bs);
	ptr->config->profile_compatibility = gf_bs_read_u8(bs);
	ptr->config->AVCLevelIndication = gf_bs_read_u8(bs);
	gf_bs_read_int(bs, 6);
	ptr->config->nal_unit_size = 1 + gf_bs_read_int(bs, 2);
	gf_bs_read_int(bs, 3);
	count = gf_bs_read_int(bs, 5);

	for (i=0; i<count; i++) {
		GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *) malloc(sizeof(GF_AVCConfigSlot));
		sl->size = gf_bs_read_u16(bs);
		sl->data = (char *)malloc(sizeof(char) * sl->size);
		gf_bs_read_data(bs, sl->data, sl->size);
		gf_list_add(ptr->config->sequenceParameterSets, sl);
	}

	count = gf_bs_read_u8(bs);
	for (i=0; i<count; i++) {
		GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)malloc(sizeof(GF_AVCConfigSlot));
		sl->size = gf_bs_read_u16(bs);
		sl->data = (char *)malloc(sizeof(char) * sl->size);
		gf_bs_read_data(bs, sl->data, sl->size);
		gf_list_add(ptr->config->pictureParameterSets, sl);
	}
	return GF_OK;
}
Exemple #3
0
static GF_AVCConfig *AVC_DuplicateConfig(GF_AVCConfig *cfg)
{
	u32 i, count;
	GF_AVCConfigSlot *p1, *p2;
	GF_AVCConfig *cfg_new = gf_odf_avc_cfg_new();
	cfg_new->AVCLevelIndication = cfg->AVCLevelIndication;
	cfg_new->AVCProfileIndication = cfg->AVCProfileIndication;
	cfg_new->configurationVersion = cfg->configurationVersion;
	cfg_new->nal_unit_size = cfg->nal_unit_size;
	cfg_new->profile_compatibility = cfg->profile_compatibility;

	count = gf_list_count(cfg->sequenceParameterSets);
	for (i=0; i<count; i++) {
		p1 = (GF_AVCConfigSlot*)gf_list_get(cfg->sequenceParameterSets, i);
		p2 = (GF_AVCConfigSlot*)malloc(sizeof(GF_AVCConfigSlot));
		p2->size = p1->size;
		p2->data = (char *)malloc(sizeof(char)*p1->size);
		memcpy(p2->data, p1->data, sizeof(char)*p1->size);
		gf_list_add(cfg_new->sequenceParameterSets, p2);
	}

	count = gf_list_count(cfg->pictureParameterSets);
	for (i=0; i<count; i++) {
		p1 = (GF_AVCConfigSlot*)gf_list_get(cfg->pictureParameterSets, i);
		p2 = (GF_AVCConfigSlot*)malloc(sizeof(GF_AVCConfigSlot));
		p2->size = p1->size;
		p2->data = (char*)malloc(sizeof(char)*p1->size);
		memcpy(p2->data, p1->data, sizeof(char)*p1->size);
		gf_list_add(cfg_new->pictureParameterSets, p2);
	}
	return cfg_new;	
}
Exemple #4
0
static int set_param( hnd_t handle, x264_param_t *p_param )
{
    mp4_hnd_t *p_mp4 = handle;

    p_mp4->i_delay_frames = p_param->i_bframe ? (p_param->i_bframe_pyramid ? 2 : 1) : 0;
    p_mp4->i_dts_compress_multiplier = p_mp4->b_dts_compress * p_mp4->i_delay_frames + 1;

    p_mp4->i_time_res = (uint64_t)p_param->i_timebase_den * p_mp4->i_dts_compress_multiplier;
    p_mp4->i_time_inc = (uint64_t)p_param->i_timebase_num * p_mp4->i_dts_compress_multiplier;
    FAIL_IF_ERR( p_mp4->i_time_res > UINT32_MAX, "mp4", "MP4 media timescale %"PRIu64" exceeds maximum\n", p_mp4->i_time_res )

    p_mp4->i_track = gf_isom_new_track( p_mp4->p_file, 0, GF_ISOM_MEDIA_VISUAL,
                                        p_mp4->i_time_res );

    p_mp4->p_config = gf_odf_avc_cfg_new();
    gf_isom_avc_config_new( p_mp4->p_file, p_mp4->i_track, p_mp4->p_config,
                            NULL, NULL, &p_mp4->i_descidx );

    gf_isom_set_track_enabled( p_mp4->p_file, p_mp4->i_track, 1 );

    gf_isom_set_visual_info( p_mp4->p_file, p_mp4->i_track, p_mp4->i_descidx,
                             p_param->i_width, p_param->i_height );

    if( p_param->vui.i_sar_width && p_param->vui.i_sar_height )
    {
        uint64_t dw = p_param->i_width << 16;
        uint64_t dh = p_param->i_height << 16;
        double sar = (double)p_param->vui.i_sar_width / p_param->vui.i_sar_height;
        if( sar > 1.0 )
            dw *= sar ;
        else
            dh /= sar;
        gf_isom_set_pixel_aspect_ratio( p_mp4->p_file, p_mp4->i_track, p_mp4->i_descidx, p_param->vui.i_sar_width, p_param->vui.i_sar_height );
        gf_isom_set_track_layout_info( p_mp4->p_file, p_mp4->i_track, dw, dh, 0, 0, 0 );
    }

    p_mp4->i_data_size = p_param->i_width * p_param->i_height * 3 / 2;
    p_mp4->p_sample->data = malloc( p_mp4->i_data_size );
    if( !p_mp4->p_sample->data )
    {
        p_mp4->i_data_size = 0;
        return -1;
    }

    return 0;
}
//sps,pps第一个字节为0x67或68,
bool EasyMP4Writer::WriteH264SPSandPPS(unsigned char*sps,int spslen,unsigned char*pps,int ppslen,int width,int height)
{	

	p_videosample=gf_isom_sample_new();
	p_videosample->data=(char*)malloc(1024*1024);


	p_config=gf_odf_avc_cfg_new();	
	gf_isom_avc_config_new(p_file,m_videtrackid,p_config,NULL,NULL,&i_videodescidx);
	gf_isom_set_visual_info(p_file,m_videtrackid,i_videodescidx,width,height);

	GF_AVCConfigSlot m_slotsps={0};
	GF_AVCConfigSlot m_slotpps={0};
	
	p_config->configurationVersion = 1;
	p_config->AVCProfileIndication = sps[1];
	p_config->profile_compatibility = sps[2];
	p_config->AVCLevelIndication = sps[3];
	
	m_slotsps.size=spslen;
	m_slotsps.data=(char*)malloc(spslen);
	memcpy(m_slotsps.data,sps,spslen);	
	gf_list_add(p_config->sequenceParameterSets,&m_slotsps);
	
	m_slotpps.size=ppslen;
	m_slotpps.data=(char*)malloc(ppslen);
	memcpy(m_slotpps.data,pps,ppslen);
	gf_list_add(p_config->pictureParameterSets,&m_slotpps);
	
	gf_isom_avc_config_update(p_file,m_videtrackid,1,p_config);

	free(m_slotsps.data);
	free(m_slotpps.data);

	return true;
}
Exemple #6
0
static GF_Err VTBDec_InitDecoder(VTBDec *ctx, Bool force_dsi_rewrite)
{
	CFMutableDictionaryRef dec_dsi, dec_type;
	CFMutableDictionaryRef dsi;
	VTDecompressionOutputCallbackRecord cbacks;
    CFDictionaryRef buffer_attribs;
    OSStatus status;
	OSType kColorSpace;
	
	CFDataRef data = NULL;
	char *dsi_data=NULL;
	u32 dsi_data_size=0;
	
    dec_dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
	
	kColorSpace = kCVPixelFormatType_420YpCbCr8Planar;
	ctx->pix_fmt = GF_PIXEL_YV12;
	
	switch (ctx->esd->decoderConfig->objectTypeIndication) {
    case GPAC_OTI_VIDEO_AVC :
		if (ctx->sps && ctx->pps) {
			AVCState avc;
			s32 idx;
			memset(&avc, 0, sizeof(AVCState));
			avc.sps_active_idx = -1;

			idx = gf_media_avc_read_sps(ctx->sps, ctx->sps_size, &avc, 0, NULL);

			ctx->vtb_type = kCMVideoCodecType_H264;
			assert(ctx->sps);
			ctx->width = avc.sps[idx].width;
			ctx->height = avc.sps[idx].height;
			if (avc.sps[idx].vui.par_num && avc.sps[idx].vui.par_den) {
				ctx->pixel_ar = avc.sps[idx].vui.par_num;
				ctx->pixel_ar <<= 16;
				ctx->pixel_ar |= avc.sps[idx].vui.par_den;
			}
			ctx->chroma_format = avc.sps[idx].chroma_format;
			ctx->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
			ctx->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
		
			switch (ctx->chroma_format) {
			case 2:
				//422 decoding doesn't seem supported ...
				if (ctx->luma_bit_depth>8) {
					kColorSpace = kCVPixelFormatType_422YpCbCr10;
					ctx->pix_fmt = GF_PIXEL_YUV422_10;
				} else {
					kColorSpace = kCVPixelFormatType_422YpCbCr8;
					ctx->pix_fmt = GF_PIXEL_YUV422;
				}
				break;
			case 3:
				if (ctx->luma_bit_depth>8) {
					kColorSpace = kCVPixelFormatType_444YpCbCr10;
					ctx->pix_fmt = GF_PIXEL_YUV444_10;
				} else {
					kColorSpace = kCVPixelFormatType_444YpCbCr8;
					ctx->pix_fmt = GF_PIXEL_YUV444;
				}
				break;
			default:
				if (ctx->luma_bit_depth>8) {
					kColorSpace = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
					ctx->pix_fmt = GF_PIXEL_YV12_10;
				}
				break;
			}
		
			if (!ctx->esd->decoderConfig->decoderSpecificInfo || force_dsi_rewrite || !ctx->esd->decoderConfig->decoderSpecificInfo->data) {
				GF_AVCConfigSlot *slc_s, *slc_p;
				GF_AVCConfig *cfg = gf_odf_avc_cfg_new();
				cfg->configurationVersion = 1;
				cfg->profile_compatibility = avc.sps[idx].prof_compat;
				cfg->AVCProfileIndication = avc.sps[idx].profile_idc;
				cfg->AVCLevelIndication = avc.sps[idx].level_idc;
				cfg->chroma_format = avc.sps[idx].chroma_format;
				cfg->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
				cfg->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
				cfg->nal_unit_size = 4;
				
				GF_SAFEALLOC(slc_s, GF_AVCConfigSlot);
				slc_s->data = ctx->sps;
				slc_s->size = ctx->sps_size;
				gf_list_add(cfg->sequenceParameterSets, slc_s);
				
				GF_SAFEALLOC(slc_p, GF_AVCConfigSlot);
				slc_p->data = ctx->pps;
				slc_p->size = ctx->pps_size;
				gf_list_add(cfg->pictureParameterSets , slc_p);
				
				gf_odf_avc_cfg_write(cfg, &dsi_data, &dsi_data_size);
				slc_s->data = slc_p->data = NULL;
				gf_odf_avc_cfg_del((cfg));
			} else {
				dsi_data = ctx->esd->decoderConfig->decoderSpecificInfo->data;
				dsi_data_size = ctx->esd->decoderConfig->decoderSpecificInfo->dataLength;
			}
			
			dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
			data = CFDataCreate(kCFAllocatorDefault, dsi_data, dsi_data_size);
			if (data) {
				CFDictionarySetValue(dsi, CFSTR("avcC"), data);
				CFDictionarySetValue(dec_dsi, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, dsi);
				CFRelease(data);
			}
			CFRelease(dsi);
		
			if (!ctx->esd->decoderConfig->decoderSpecificInfo || !ctx->esd->decoderConfig->decoderSpecificInfo->data) {
				gf_free(ctx->sps);
				ctx->sps = NULL;
				gf_free(ctx->pps);
				ctx->pps = NULL;
				gf_free(dsi_data);
			}
		}
        break;
	case GPAC_OTI_VIDEO_MPEG2_SIMPLE:
	case GPAC_OTI_VIDEO_MPEG2_MAIN:
	case GPAC_OTI_VIDEO_MPEG2_SNR:
	case GPAC_OTI_VIDEO_MPEG2_SPATIAL:
	case GPAC_OTI_VIDEO_MPEG2_HIGH:
	case GPAC_OTI_VIDEO_MPEG2_422:
        ctx->vtb_type = kCMVideoCodecType_MPEG2Video;
		if (!ctx->width || !ctx->height) {
			ctx->init_mpeg12 = GF_TRUE;
			return GF_OK;
		}
		ctx->init_mpeg12 = GF_FALSE;
        break;
		
	case GPAC_OTI_VIDEO_MPEG1:
		ctx->vtb_type = kCMVideoCodecType_MPEG1Video;
		if (!ctx->width || !ctx->height) {
			ctx->init_mpeg12 = GF_TRUE;
			return GF_OK;
		}
		ctx->init_mpeg12 = GF_FALSE;
		break;
    case GPAC_OTI_VIDEO_MPEG4_PART2 :
	{
		Bool reset_dsi = GF_FALSE;
		ctx->vtb_type = kCMVideoCodecType_MPEG4Video;
		if (!ctx->esd->decoderConfig->decoderSpecificInfo) {
			ctx->esd->decoderConfig->decoderSpecificInfo = (GF_DefaultDescriptor *) gf_odf_desc_new(GF_ODF_DSI_TAG);
		}

		if (!ctx->esd->decoderConfig->decoderSpecificInfo->data) {
			reset_dsi = GF_TRUE;
			ctx->esd->decoderConfig->decoderSpecificInfo->data = ctx->vosh;
			ctx->esd->decoderConfig->decoderSpecificInfo->dataLength = ctx->vosh_size;
		}
		
		if (ctx->esd->decoderConfig->decoderSpecificInfo->data) {
			GF_M4VDecSpecInfo vcfg;
			GF_BitStream *bs;
			
			gf_m4v_get_config(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, &vcfg);
			ctx->width = vcfg.width;
			ctx->height = vcfg.height;
			if (ctx->esd->slConfig) {
				ctx->esd->slConfig->predefined  = 2;
			}
			bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
			gf_bs_write_u32(bs, 0);
			gf_odf_desc_write_bs((GF_Descriptor *) ctx->esd, bs);
			gf_bs_get_content(bs, &dsi_data, &dsi_data_size);
			gf_bs_del(bs);
			
			dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
			data = CFDataCreate(kCFAllocatorDefault, dsi_data, dsi_data_size);
			gf_free(dsi_data);
			
			if (data) {
				CFDictionarySetValue(dsi, CFSTR("esds"), data);
				CFDictionarySetValue(dec_dsi, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, dsi);
				CFRelease(data);
			}
			CFRelease(dsi);
			
			if (reset_dsi) {
				ctx->esd->decoderConfig->decoderSpecificInfo->data = NULL;
				ctx->esd->decoderConfig->decoderSpecificInfo->dataLength = 0;
			}
			ctx->skip_mpeg4_vosh = GF_FALSE;
		} else {
			ctx->skip_mpeg4_vosh = GF_TRUE;
			return GF_OK;
		}
        break;
    }
	case GPAC_OTI_MEDIA_GENERIC:
		if (ctx->esd->decoderConfig->decoderSpecificInfo && ctx->esd->decoderConfig->decoderSpecificInfo->dataLength) {
			char *dsi = ctx->esd->decoderConfig->decoderSpecificInfo->data;
			if (ctx->esd->decoderConfig->decoderSpecificInfo->dataLength<8) return GF_NON_COMPLIANT_BITSTREAM;
			if (strnicmp(dsi, "s263", 4)) return GF_NOT_SUPPORTED;
			
			ctx->width = ((u8) dsi[4]); ctx->width<<=8; ctx->width |= ((u8) dsi[5]);
			ctx->height = ((u8) dsi[6]); ctx->height<<=8; ctx->height |= ((u8) dsi[7]);
			ctx->vtb_type = kCMVideoCodecType_H263;
		}
		break;
		
	default :
		return GF_NOT_SUPPORTED;
    }

	if (! ctx->width || !ctx->height) return GF_NOT_SUPPORTED;

    status = CMVideoFormatDescriptionCreate(kCFAllocatorDefault, ctx->vtb_type, ctx->width, ctx->height, dec_dsi, &ctx->fmt_desc);

    if (!ctx->fmt_desc) {
		if (dec_dsi) CFRelease(dec_dsi);
        return GF_NON_COMPLIANT_BITSTREAM;
    }
	buffer_attribs = VTBDec_CreateBufferAttributes(ctx->width, ctx->height, kColorSpace);
	
	cbacks.decompressionOutputCallback = VTBDec_on_frame;
    cbacks.decompressionOutputRefCon   = ctx;

    dec_type = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
    CFDictionarySetValue(dec_type, kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder, kCFBooleanTrue);
	ctx->is_hardware = GF_TRUE;

    status = VTDecompressionSessionCreate(NULL, ctx->fmt_desc, dec_type, NULL, &cbacks, &ctx->vtb_session);
	//if HW decoder not available, try soft one
	if (status) {
		status = VTDecompressionSessionCreate(NULL, ctx->fmt_desc, NULL, buffer_attribs, &cbacks, &ctx->vtb_session);
		ctx->is_hardware = GF_FALSE;
	}
	
	if (dec_dsi)
		CFRelease(dec_dsi);
	if (dec_type)
		CFRelease(dec_type);
    if (buffer_attribs)
        CFRelease(buffer_attribs);

    switch (status) {
    case kVTVideoDecoderNotAvailableNowErr:
    case kVTVideoDecoderUnsupportedDataFormatErr:
        return GF_NOT_SUPPORTED;
    case kVTVideoDecoderMalfunctionErr:
        return GF_IO_ERR;
    case kVTVideoDecoderBadDataErr :
        return GF_BAD_PARAM;

	case kVTPixelTransferNotSupportedErr:
	case kVTCouldNotFindVideoDecoderErr:
		return GF_NOT_SUPPORTED;
    case 0:
        break;
    default:
        return GF_SERVICE_ERROR;
    }
	
	//good to go !
	if (ctx->pix_fmt == GF_PIXEL_YUV422) {
		ctx->out_size = ctx->width*ctx->height*2;
	} else if (ctx->pix_fmt == GF_PIXEL_YUV444) {
		ctx->out_size = ctx->width*ctx->height*3;
	} else {
		// (ctx->pix_fmt == GF_PIXEL_YV12)
		ctx->out_size = ctx->width*ctx->height*3/2;
	}
	if (ctx->luma_bit_depth>8) {
		ctx->out_size *= 2;
	}
	
	return GF_OK;
}
Exemple #7
0
static GF_Err dc_gpac_video_write_config(VideoOutputFile *video_output_file, u32 *di, u32 track) {
	GF_Err ret;
	if (video_output_file->codec_ctx->codec_id == CODEC_ID_H264) {
		GF_AVCConfig *avccfg;
		avccfg = gf_odf_avc_cfg_new();
		if (!avccfg) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot create AVCConfig\n"));
			return GF_OUT_OF_MEM;
		}

		ret = avc_import_ffextradata(video_output_file->codec_ctx->extradata, video_output_file->codec_ctx->extradata_size, avccfg);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot parse AVC/H264 SPS/PPS\n"));
			gf_odf_avc_cfg_del(avccfg);
			return ret;
		}

		ret = gf_isom_avc_config_new(video_output_file->isof, track, avccfg, NULL, NULL, di);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_avc_config_new\n", gf_error_to_string(ret)));
			return ret;
		}

		gf_odf_avc_cfg_del(avccfg);

		//inband SPS/PPS
		if (video_output_file->muxer_type == GPAC_INIT_VIDEO_MUXER_AVC3) {
			ret = gf_isom_avc_set_inband_config(video_output_file->isof, track, 1);
			if (ret != GF_OK) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_avc_set_inband_config\n", gf_error_to_string(ret)));
				return ret;
			}
		}
	} else if (!strcmp(video_output_file->codec_ctx->codec->name, "libx265")) { //FIXME CODEC_ID_HEVC would break on old releases
		GF_HEVCConfig *hevccfg = gf_odf_hevc_cfg_new();
		if (!hevccfg) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot create HEVCConfig\n"));
			return GF_OUT_OF_MEM;
		}

		ret = hevc_import_ffextradata(video_output_file->codec_ctx->extradata, video_output_file->codec_ctx->extradata_size, hevccfg);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot parse HEVC/H265 SPS/PPS\n"));
			gf_odf_hevc_cfg_del(hevccfg);
			return ret;
		}

		ret = gf_isom_hevc_config_new(video_output_file->isof, track, hevccfg, NULL, NULL, di);
		if (ret != GF_OK) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_hevc_config_new\n", gf_error_to_string(ret)));
			return ret;
		}

		gf_odf_hevc_cfg_del(hevccfg);

		//inband SPS/PPS
		if (video_output_file->muxer_type == GPAC_INIT_VIDEO_MUXER_AVC3) {
			ret = gf_isom_hevc_set_inband_config(video_output_file->isof, track, 1);
			if (ret != GF_OK) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_hevc_set_inband_config\n", gf_error_to_string(ret)));
				return ret;
			}
		}
	}

	return GF_OK;
}
Exemple #8
0
GF_Err avcc_Read(GF_Box *s, GF_BitStream *bs)
{
	u32 i, count;
	GF_AVCConfigurationBox *ptr = (GF_AVCConfigurationBox *)s;

	if (ptr->config) gf_odf_avc_cfg_del(ptr->config);
	ptr->config = gf_odf_avc_cfg_new();
	ptr->config->configurationVersion = gf_bs_read_u8(bs);
	ptr->config->AVCProfileIndication = gf_bs_read_u8(bs);
	ptr->config->profile_compatibility = gf_bs_read_u8(bs);
	ptr->config->AVCLevelIndication = gf_bs_read_u8(bs);
	if (ptr->type==GF_ISOM_BOX_TYPE_AVCC) {
		gf_bs_read_int(bs, 6);
	} else {
		ptr->config->complete_representation = gf_bs_read_int(bs, 1);
		gf_bs_read_int(bs, 5);
	}
	ptr->config->nal_unit_size = 1 + gf_bs_read_int(bs, 2);
	gf_bs_read_int(bs, 3);
	count = gf_bs_read_int(bs, 5);

	ptr->size -= 7; //including 2nd count

	for (i=0; i<count; i++) {
		GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *) gf_malloc(sizeof(GF_AVCConfigSlot));
		sl->size = gf_bs_read_u16(bs);
		sl->data = (char *)gf_malloc(sizeof(char) * sl->size);
		gf_bs_read_data(bs, sl->data, sl->size);
		gf_list_add(ptr->config->sequenceParameterSets, sl);
		ptr->size -= 2+sl->size;
	}

	count = gf_bs_read_u8(bs);
	for (i=0; i<count; i++) {
		GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_malloc(sizeof(GF_AVCConfigSlot));
		sl->size = gf_bs_read_u16(bs);
		sl->data = (char *)gf_malloc(sizeof(char) * sl->size);
		gf_bs_read_data(bs, sl->data, sl->size);
		gf_list_add(ptr->config->pictureParameterSets, sl);
		ptr->size -= 2+sl->size;
	}

	if (ptr->type==GF_ISOM_BOX_TYPE_AVCC) {

		switch (ptr->config->AVCProfileIndication) {
		case 100:
		case 110:
		case 122:
		case 144:
			if (!ptr->size) {
#ifndef GPAC_DISABLE_AV_PARSERS
				AVCState avc;
				s32 idx, vui_flag_pos;
				GF_AVCConfigSlot *sl = gf_list_get(ptr->config->sequenceParameterSets, 0);
				idx = gf_media_avc_read_sps(sl->data, sl->size, &avc, 0, &vui_flag_pos);
				if (idx>=0) {
					ptr->config->chroma_format = avc.sps[idx].chroma_format;
					ptr->config->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8;
					ptr->config->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8;
				}
#else
				/*set default values ...*/
				ptr->config->chroma_format = 1;
				ptr->config->luma_bit_depth = 8;
				ptr->config->chroma_bit_depth = 8;
#endif
				return GF_OK;
			}
			gf_bs_read_int(bs, 6);
			ptr->config->chroma_format = gf_bs_read_int(bs, 2);
			gf_bs_read_int(bs, 5);
			ptr->config->luma_bit_depth = 8 + gf_bs_read_int(bs, 3);
			gf_bs_read_int(bs, 5);
			ptr->config->chroma_bit_depth = 8 + gf_bs_read_int(bs, 3);

			count = gf_bs_read_int(bs, 8);
			ptr->size -= 4;
			if (count) {
				ptr->config->sequenceParameterSetExtensions = gf_list_new();
				for (i=0; i<count; i++) {
					GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_malloc(sizeof(GF_AVCConfigSlot));
					sl->size = gf_bs_read_u16(bs);
					sl->data = (char *)gf_malloc(sizeof(char) * sl->size);
					gf_bs_read_data(bs, sl->data, sl->size);
					gf_list_add(ptr->config->sequenceParameterSetExtensions, sl);
					ptr->size -= sl->size + 2;
				}
			}
			break;
		}
	}
	return GF_OK;
}