GF_Err AVC_UpdateESD(GF_MPEGVisualSampleEntryBox *avc, GF_ESD *esd) { if (!avc->bitrate) avc->bitrate = (GF_MPEG4BitRateBox*)gf_isom_box_new(GF_ISOM_BOX_TYPE_BTRT); if (avc->descr) gf_isom_box_del((GF_Box *) avc->descr); avc->descr = NULL; avc->bitrate->avgBitrate = esd->decoderConfig->avgBitrate; avc->bitrate->maxBitrate = esd->decoderConfig->maxBitrate; avc->bitrate->bufferSizeDB = esd->decoderConfig->bufferSizeDB; if (gf_list_count(esd->IPIDataSet) || gf_list_count(esd->IPMPDescriptorPointers) || esd->langDesc || gf_list_count(esd->extensionDescriptors) || esd->ipiPtr || esd->qos || esd->RegDescriptor) { avc->descr = (GF_MPEG4ExtensionDescriptorsBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_M4DS); if (esd->RegDescriptor) { gf_list_add(avc->descr->descriptors, esd->RegDescriptor); esd->RegDescriptor = NULL; } if (esd->qos) { gf_list_add(avc->descr->descriptors, esd->qos); esd->qos = NULL; } if (esd->ipiPtr) { gf_list_add(avc->descr->descriptors, esd->ipiPtr); esd->ipiPtr= NULL; } while (gf_list_count(esd->IPIDataSet)) { GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->IPIDataSet, 0); gf_list_rem(esd->IPIDataSet, 0); gf_list_add(avc->descr->descriptors, desc); } while (gf_list_count(esd->IPMPDescriptorPointers)) { GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->IPMPDescriptorPointers, 0); gf_list_rem(esd->IPMPDescriptorPointers, 0); gf_list_add(avc->descr->descriptors, desc); } if (esd->langDesc) { gf_list_add(avc->descr->descriptors, esd->langDesc); esd->langDesc = NULL; } while (gf_list_count(esd->extensionDescriptors)) { GF_Descriptor *desc = (GF_Descriptor *)gf_list_get(esd->extensionDescriptors, 0); gf_list_rem(esd->extensionDescriptors, 0); gf_list_add(avc->descr->descriptors, desc); } } /*update GF_AVCConfig*/ if (!avc->svc_config) { if (!avc->avc_config) avc->avc_config = (GF_AVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_AVCC); if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { if (avc->avc_config->config) gf_odf_avc_cfg_del(avc->avc_config->config); avc->avc_config->config = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); } gf_odf_desc_del((GF_Descriptor *)esd); } AVC_RewriteESDescriptor(avc); return GF_OK; }
GF_Err gf_isom_avc_config_update(GF_ISOFile *the_file, u32 trackNumber, u32 DescriptionIndex, GF_AVCConfig *cfg) { GF_TrackBox *trak; GF_Err e; GF_MPEGVisualSampleEntryBox *entry; e = CanAccessMovie(the_file, GF_ISOM_OPEN_WRITE); if (e) return e; trak = gf_isom_get_track_from_file(the_file, trackNumber); if (!trak || !trak->Media || !cfg || !DescriptionIndex) return GF_BAD_PARAM; entry = (GF_MPEGVisualSampleEntryBox *)gf_list_get(trak->Media->information->sampleTable->SampleDescription->boxList, DescriptionIndex-1); if (!entry) return GF_BAD_PARAM; if (entry->type != GF_ISOM_BOX_TYPE_AVC1) return GF_BAD_PARAM; if (entry->avc_config->config) gf_odf_avc_cfg_del(entry->avc_config->config); entry->avc_config->config = AVC_DuplicateConfig(cfg); AVC_RewriteESDescriptor(entry); return GF_OK; }
GF_Err avcc_Read(GF_Box *s, GF_BitStream *bs) { u32 i, count; GF_AVCConfigurationBox *ptr = (GF_AVCConfigurationBox *)s; if (ptr->config) gf_odf_avc_cfg_del(ptr->config); ptr->config = gf_odf_avc_cfg_new(); ptr->config->configurationVersion = gf_bs_read_u8(bs); ptr->config->AVCProfileIndication = gf_bs_read_u8(bs); ptr->config->profile_compatibility = gf_bs_read_u8(bs); ptr->config->AVCLevelIndication = gf_bs_read_u8(bs); if (ptr->type==GF_ISOM_BOX_TYPE_AVCC) { gf_bs_read_int(bs, 6); } else { ptr->config->complete_representation = gf_bs_read_int(bs, 1); gf_bs_read_int(bs, 5); } ptr->config->nal_unit_size = 1 + gf_bs_read_int(bs, 2); gf_bs_read_int(bs, 3); count = gf_bs_read_int(bs, 5); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *) gf_malloc(sizeof(GF_AVCConfigSlot)); sl->size = gf_bs_read_u16(bs); sl->data = (char *)gf_malloc(sizeof(char) * sl->size); gf_bs_read_data(bs, sl->data, sl->size); gf_list_add(ptr->config->sequenceParameterSets, sl); } count = gf_bs_read_u8(bs); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_malloc(sizeof(GF_AVCConfigSlot)); sl->size = gf_bs_read_u16(bs); sl->data = (char *)gf_malloc(sizeof(char) * sl->size); gf_bs_read_data(bs, sl->data, sl->size); gf_list_add(ptr->config->pictureParameterSets, sl); } return GF_OK; }
static u32 OSVC_CanHandleStream(GF_BaseDecoder *dec, u32 StreamType, GF_ESD *esd, u8 PL) { if (StreamType != GF_STREAM_VISUAL) return GF_CODEC_NOT_SUPPORTED; /*media type query*/ if (!esd) return GF_CODEC_STREAM_TYPE_SUPPORTED; switch (esd->decoderConfig->objectTypeIndication) { case GPAC_OTI_VIDEO_AVC: case GPAC_OTI_VIDEO_SVC: if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { Bool is_svc = 0; u32 i, count; GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); if (!cfg) return GF_CODEC_NOT_SUPPORTED; if (esd->has_ref_base) is_svc = 1; /*decode all NALUs*/ count = gf_list_count(cfg->sequenceParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *slc = gf_list_get(cfg->sequenceParameterSets, i); u8 nal_type = slc->data[0] & 0x1F; if (nal_type==GF_AVC_NALU_SVC_SUBSEQ_PARAM) { is_svc = 1; break; } } gf_odf_avc_cfg_del(cfg); return is_svc ? GF_CODEC_SUPPORTED : GF_CODEC_MAYBE_SUPPORTED; } return esd->has_ref_base ? GF_CODEC_SUPPORTED : GF_CODEC_MAYBE_SUPPORTED; } return GF_CODEC_NOT_SUPPORTED; }
GF_EXPORT GF_Err gf_hinter_track_finalize(GF_RTPHinter *tkHint, Bool AddSystemInfo) { u32 Width, Height; GF_ESD *esd; char sdpLine[20000]; char mediaName[30], payloadName[30]; Width = Height = 0; gf_isom_sdp_clean_track(tkHint->file, tkHint->TrackNum); if (gf_isom_get_media_type(tkHint->file, tkHint->TrackNum) == GF_ISOM_MEDIA_VISUAL) gf_isom_get_visual_info(tkHint->file, tkHint->TrackNum, 1, &Width, &Height); gf_rtp_builder_get_payload_name(tkHint->rtp_p, payloadName, mediaName); /*TODO- extract out of rtp_p for future live tools*/ sprintf(sdpLine, "m=%s 0 RTP/%s %d", mediaName, tkHint->rtp_p->slMap.IV_length ? "SAVP" : "AVP", tkHint->rtp_p->PayloadType); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); if (tkHint->bandwidth) { sprintf(sdpLine, "b=AS:%d", tkHint->bandwidth); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } if (tkHint->nb_chan) { sprintf(sdpLine, "a=rtpmap:%d %s/%d/%d", tkHint->rtp_p->PayloadType, payloadName, tkHint->rtp_p->sl_config.timestampResolution, tkHint->nb_chan); } else { sprintf(sdpLine, "a=rtpmap:%d %s/%d", tkHint->rtp_p->PayloadType, payloadName, tkHint->rtp_p->sl_config.timestampResolution); } gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); /*control for MPEG-4*/ if (AddSystemInfo) { sprintf(sdpLine, "a=mpeg4-esid:%d", gf_isom_get_track_id(tkHint->file, tkHint->TrackNum)); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } /*control for QTSS/DSS*/ sprintf(sdpLine, "a=control:trackID=%d", gf_isom_get_track_id(tkHint->file, tkHint->HintTrack)); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); /*H263 extensions*/ if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_H263) { sprintf(sdpLine, "a=cliprect:0,0,%d,%d", Height, Width); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } /*AMR*/ else if ((tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_AMR) || (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_AMR_WB)) { sprintf(sdpLine, "a=fmtp:%d octet-align=1", tkHint->rtp_p->PayloadType); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } /*Text*/ else if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) { gf_media_format_ttxt_sdp(tkHint->rtp_p, payloadName, sdpLine, tkHint->file, tkHint->TrackNum); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } /*EVRC/SMV in non header-free mode*/ else if ((tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (tkHint->rtp_p->auh_size>1)) { sprintf(sdpLine, "a=fmtp:%d maxptime=%d", tkHint->rtp_p->PayloadType, tkHint->rtp_p->auh_size*20); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } /*H264/AVC*/ else if (tkHint->rtp_p->rtp_payt == GF_RTP_PAYT_H264_AVC) { GF_AVCConfig *avcc = gf_isom_avc_config_get(tkHint->file, tkHint->TrackNum, 1); sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", tkHint->rtp_p->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication); if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) { u32 i, count, b64s; char b64[200]; strcat(sdpLine, "; sprop-parameter-sets="); count = gf_list_count(avcc->sequenceParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i); b64s = gf_base64_encode(sl->data, sl->size, b64, 200); b64[b64s]=0; strcat(sdpLine, b64); if (i+1<count) strcat(sdpLine, ","); } if (i) strcat(sdpLine, ","); count = gf_list_count(avcc->pictureParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i); b64s = gf_base64_encode(sl->data, sl->size, b64, 200); b64[b64s]=0; strcat(sdpLine, b64); if (i+1<count) strcat(sdpLine, ","); } } gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); gf_odf_avc_cfg_del(avcc); } /*MPEG-4 decoder config*/ else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_MPEG4) { esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1); if (esd && esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); } else { gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, NULL, 0); } if (esd) gf_odf_desc_del((GF_Descriptor *)esd); if (tkHint->rtp_p->slMap.IV_length) { const char *kms; gf_isom_get_ismacryp_info(tkHint->file, tkHint->TrackNum, 1, NULL, NULL, NULL, NULL, &kms, NULL, NULL, NULL); if (!strnicmp(kms, "(key)", 5) || !strnicmp(kms, "(ipmp)", 6) || !strnicmp(kms, "(uri)", 5)) { strcat(sdpLine, "; ISMACrypKey="); } else { strcat(sdpLine, "; ISMACrypKey=(uri)"); } strcat(sdpLine, kms); } gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } /*MPEG-4 Audio LATM*/ else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_LATM) { GF_BitStream *bs; char *config_bytes; u32 config_size; /* form config string */ bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ gf_bs_write_int(bs, 1, 1); /* all streams same time */ gf_bs_write_int(bs, 0, 6); /* numSubFrames */ gf_bs_write_int(bs, 0, 4); /* numPrograms */ gf_bs_write_int(bs, 0, 3); /* numLayer */ /* audio-specific config */ esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1); if (esd && esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo) { /*PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/ gf_bs_write_data(bs, esd->decoderConfig->decoderSpecificInfo->data, MIN(esd->decoderConfig->decoderSpecificInfo->dataLength, 2) ); } if (esd) gf_odf_desc_del((GF_Descriptor *)esd); /* other data */ gf_bs_write_int(bs, 0, 3); /* frameLengthType */ gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ gf_bs_get_content(bs, &config_bytes, &config_size); gf_bs_del(bs); gf_rtp_builder_format_sdp(tkHint->rtp_p, payloadName, sdpLine, config_bytes, config_size); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); gf_free(config_bytes); } /*3GPP DIMS*/ else if (tkHint->rtp_p->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) { GF_DIMSDescription dims; char fmt[200]; gf_isom_get_visual_info(tkHint->file, tkHint->TrackNum, 1, &Width, &Height); gf_isom_get_dims_description(tkHint->file, tkHint->TrackNum, 1, &dims); sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", tkHint->rtp_p->PayloadType, dims.profile); if (! dims.fullRequestHost) { strcat(sdpLine, ";useFullRequestHost=0"); sprintf(fmt, ";pathComponents=%d", dims.pathComponents); strcat(sdpLine, fmt); } if (!dims.streamType) strcat(sdpLine, ";stream-type=secondary"); if (dims.containsRedundant == 1) strcat(sdpLine, ";contains-redundant=main"); else if (dims.containsRedundant == 2) strcat(sdpLine, ";contains-redundant=redundant"); if (dims.textEncoding && strlen(dims.textEncoding)) { strcat(sdpLine, ";text-encoding="); strcat(sdpLine, dims.textEncoding); } if (dims.contentEncoding && strlen(dims.contentEncoding)) { strcat(sdpLine, ";content-coding="); strcat(sdpLine, dims.contentEncoding); } if (dims.content_script_types && strlen(dims.content_script_types) ) { strcat(sdpLine, ";content-script-types="); strcat(sdpLine, dims.contentEncoding); } gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } /*extensions for some mobile phones*/ if (Width && Height) { sprintf(sdpLine, "a=framesize:%d %d-%d", tkHint->rtp_p->PayloadType, Width, Height); gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } esd = gf_isom_get_esd(tkHint->file, tkHint->TrackNum, 1); if (esd && esd->decoderConfig && (esd->decoderConfig->rvc_config || esd->decoderConfig->predefined_rvc_config)) { if (esd->decoderConfig->predefined_rvc_config) { sprintf(sdpLine, "a=rvc-config-predef:%d", esd->decoderConfig->predefined_rvc_config); } else { /*temporary ...*/ if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) { sprintf(sdpLine, "a=rvc-config:%s", "http://download.tsi.telecom-paristech.fr/gpac/RVC/rvc_config_avc.xml"); } else { sprintf(sdpLine, "a=rvc-config:%s", "http://download.tsi.telecom-paristech.fr/gpac/RVC/rvc_config_sp.xml"); } } gf_isom_sdp_add_track_line(tkHint->file, tkHint->HintTrack, sdpLine); } if (esd) gf_odf_desc_del((GF_Descriptor *)esd); gf_isom_set_track_enabled(tkHint->file, tkHint->HintTrack, 1); return GF_OK; }
GF_EXPORT GF_RTPHinter *gf_hinter_track_new(GF_ISOFile *file, u32 TrackNum, u32 Path_MTU, u32 max_ptime, u32 default_rtp_rate, u32 flags, u8 PayloadID, Bool copy_media, u32 InterleaveGroupID, u8 InterleaveGroupPriority, GF_Err *e) { GF_SLConfig my_sl; u32 descIndex, MinSize, MaxSize, avgTS, streamType, oti, const_dur, nb_ch, maxDTSDelta; u8 OfficialPayloadID; u32 TrackMediaSubType, TrackMediaType, hintType, nbEdts, required_rate, force_dts_delta, avc_nalu_size, PL_ID, bandwidth, IV_length, KI_length; const char *url, *urn; char *mpeg4mode; Bool is_crypted, has_mpeg4_mapping; GF_RTPHinter *tmp; GF_ESD *esd; *e = GF_BAD_PARAM; if (!file || !TrackNum || !gf_isom_get_track_id(file, TrackNum)) return NULL; if (!gf_isom_get_sample_count(file, TrackNum)) { *e = GF_OK; return NULL; } *e = GF_NOT_SUPPORTED; nbEdts = gf_isom_get_edit_segment_count(file, TrackNum); if (nbEdts>1) { u64 et, sd, mt; u8 em; gf_isom_get_edit_segment(file, TrackNum, 1, &et, &sd, &mt, &em); if ((nbEdts>2) || (em!=GF_ISOM_EDIT_EMPTY)) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[rtp hinter] Cannot hint track whith EditList\n")); return NULL; } } if (nbEdts) gf_isom_remove_edit_segments(file, TrackNum); if (!gf_isom_is_track_enabled(file, TrackNum)) return NULL; /*by default NO PL signaled*/ PL_ID = 0; OfficialPayloadID = 0; force_dts_delta = 0; streamType = oti = 0; mpeg4mode = NULL; required_rate = 0; is_crypted = 0; IV_length = KI_length = 0; oti = 0; nb_ch = 0; avc_nalu_size = 0; has_mpeg4_mapping = 1; TrackMediaType = gf_isom_get_media_type(file, TrackNum); TrackMediaSubType = gf_isom_get_media_subtype(file, TrackNum, 1); /*for max compatibility with QT*/ if (!default_rtp_rate) default_rtp_rate = 90000; /*timed-text is a bit special, we support multiple stream descriptions & co*/ if ( (TrackMediaType==GF_ISOM_MEDIA_TEXT) || (TrackMediaType==GF_ISOM_MEDIA_SUBT)) { hintType = GF_RTP_PAYT_3GPP_TEXT; oti = GPAC_OTI_TEXT_MPEG4; streamType = GF_STREAM_TEXT; /*fixme - this works cos there's only one PL for text in mpeg4 at the current time*/ PL_ID = 0x10; } else { if (gf_isom_get_sample_description_count(file, TrackNum) > 1) return NULL; TrackMediaSubType = gf_isom_get_media_subtype(file, TrackNum, 1); switch (TrackMediaSubType) { case GF_ISOM_SUBTYPE_MPEG4_CRYP: is_crypted = 1; case GF_ISOM_SUBTYPE_MPEG4: esd = gf_isom_get_esd(file, TrackNum, 1); hintType = GF_RTP_PAYT_MPEG4; if (esd) { streamType = esd->decoderConfig->streamType; oti = esd->decoderConfig->objectTypeIndication; if (esd->URLString) hintType = 0; /*AAC*/ if ((streamType==GF_STREAM_AUDIO) && esd->decoderConfig->decoderSpecificInfo /*(nb: we use mpeg4 for MPEG-2 AAC)*/ && ((oti==GPAC_OTI_AUDIO_AAC_MPEG4) || (oti==GPAC_OTI_AUDIO_AAC_MPEG4) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_MP) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_LCP) || (oti==GPAC_OTI_AUDIO_AAC_MPEG2_SSRP)) ) { u32 sample_rate; GF_M4ADecSpecInfo a_cfg; gf_m4a_get_config(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &a_cfg); nb_ch = a_cfg.nb_chan; sample_rate = a_cfg.base_sr; PL_ID = a_cfg.audioPL; switch (a_cfg.base_object_type) { case GF_M4A_AAC_MAIN: case GF_M4A_AAC_LC: if (flags & GP_RTP_PCK_USE_LATM_AAC) { hintType = GF_RTP_PAYT_LATM; break; } case GF_M4A_AAC_SBR: case GF_M4A_AAC_PS: case GF_M4A_AAC_LTP: case GF_M4A_AAC_SCALABLE: case GF_M4A_ER_AAC_LC: case GF_M4A_ER_AAC_LTP: case GF_M4A_ER_AAC_SCALABLE: mpeg4mode = "AAC"; break; case GF_M4A_CELP: case GF_M4A_ER_CELP: mpeg4mode = "CELP"; break; } required_rate = sample_rate; } /*MPEG1/2 audio*/ else if ((streamType==GF_STREAM_AUDIO) && ((oti==GPAC_OTI_AUDIO_MPEG2_PART3) || (oti==GPAC_OTI_AUDIO_MPEG1))) { u32 sample_rate; if (!is_crypted) { GF_ISOSample *samp = gf_isom_get_sample(file, TrackNum, 1, NULL); u32 hdr = GF_4CC((u8)samp->data[0], (u8)samp->data[1], (u8)samp->data[2], (u8)samp->data[3]); nb_ch = gf_mp3_num_channels(hdr); sample_rate = gf_mp3_sampling_rate(hdr); gf_isom_sample_del(&samp); hintType = GF_RTP_PAYT_MPEG12_AUDIO; /*use official RTP/AVP payload type*/ OfficialPayloadID = 14; required_rate = 90000; } /*encrypted MP3 must be sent through MPEG-4 generic to signal all ISMACryp stuff*/ else { u8 bps; gf_isom_get_audio_info(file, TrackNum, 1, &sample_rate, &nb_ch, &bps); required_rate = sample_rate; } } /*QCELP audio*/ else if ((streamType==GF_STREAM_AUDIO) && (oti==GPAC_OTI_AUDIO_13K_VOICE)) { hintType = GF_RTP_PAYT_QCELP; OfficialPayloadID = 12; required_rate = 8000; streamType = GF_STREAM_AUDIO; nb_ch = 1; } /*EVRC/SVM audio*/ else if ((streamType==GF_STREAM_AUDIO) && ((oti==GPAC_OTI_AUDIO_EVRC_VOICE) || (oti==GPAC_OTI_AUDIO_SMV_VOICE)) ) { hintType = GF_RTP_PAYT_EVRC_SMV; required_rate = 8000; streamType = GF_STREAM_AUDIO; nb_ch = 1; } /*visual streams*/ else if (streamType==GF_STREAM_VISUAL) { if (oti==GPAC_OTI_VIDEO_MPEG4_PART2) { GF_M4VDecSpecInfo dsi; gf_m4v_get_config(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &dsi); PL_ID = dsi.VideoPL; } /*MPEG1/2 video*/ if ( ((oti>=GPAC_OTI_VIDEO_MPEG2_SIMPLE) && (oti<=GPAC_OTI_VIDEO_MPEG2_422)) || (oti==GPAC_OTI_VIDEO_MPEG1)) { if (!is_crypted) { hintType = GF_RTP_PAYT_MPEG12_VIDEO; OfficialPayloadID = 32; } } /*for ISMA*/ if (is_crypted) { /*that's another pain with ISMACryp, even if no B-frames the DTS is signaled...*/ if (oti==GPAC_OTI_VIDEO_MPEG4_PART2) force_dts_delta = 22; else if (oti==GPAC_OTI_VIDEO_AVC) { flags &= ~GP_RTP_PCK_USE_MULTI; force_dts_delta = 22; } flags |= GP_RTP_PCK_SIGNAL_RAP | GP_RTP_PCK_SIGNAL_TS; } required_rate = default_rtp_rate; } /*systems streams*/ else if (gf_isom_has_sync_shadows(file, TrackNum) || gf_isom_has_sample_dependency(file, TrackNum)) { flags |= GP_RTP_PCK_SYSTEMS_CAROUSEL; } gf_odf_desc_del((GF_Descriptor*)esd); } break; case GF_ISOM_SUBTYPE_3GP_H263: hintType = GF_RTP_PAYT_H263; required_rate = 90000; streamType = GF_STREAM_VISUAL; OfficialPayloadID = 34; /*not 100% compliant (short header is missing) but should still work*/ oti = GPAC_OTI_VIDEO_MPEG4_PART2; PL_ID = 0x01; break; case GF_ISOM_SUBTYPE_3GP_AMR: required_rate = 8000; hintType = GF_RTP_PAYT_AMR; streamType = GF_STREAM_AUDIO; has_mpeg4_mapping = 0; nb_ch = 1; break; case GF_ISOM_SUBTYPE_3GP_AMR_WB: required_rate = 16000; hintType = GF_RTP_PAYT_AMR_WB; streamType = GF_STREAM_AUDIO; has_mpeg4_mapping = 0; nb_ch = 1; break; case GF_ISOM_SUBTYPE_AVC_H264: case GF_ISOM_SUBTYPE_AVC2_H264: case GF_ISOM_SUBTYPE_SVC_H264: { GF_AVCConfig *avcc = gf_isom_avc_config_get(file, TrackNum, 1); required_rate = 90000; /* "90 kHz clock rate MUST be used"*/ hintType = GF_RTP_PAYT_H264_AVC; streamType = GF_STREAM_VISUAL; avc_nalu_size = avcc->nal_unit_size; oti = GPAC_OTI_VIDEO_AVC; PL_ID = 0x0F; gf_odf_avc_cfg_del(avcc); } break; case GF_ISOM_SUBTYPE_3GP_QCELP: required_rate = 8000; hintType = GF_RTP_PAYT_QCELP; streamType = GF_STREAM_AUDIO; oti = GPAC_OTI_AUDIO_13K_VOICE; OfficialPayloadID = 12; nb_ch = 1; break; case GF_ISOM_SUBTYPE_3GP_EVRC: case GF_ISOM_SUBTYPE_3GP_SMV: required_rate = 8000; hintType = GF_RTP_PAYT_EVRC_SMV; streamType = GF_STREAM_AUDIO; oti = (TrackMediaSubType==GF_ISOM_SUBTYPE_3GP_EVRC) ? GPAC_OTI_AUDIO_EVRC_VOICE : GPAC_OTI_AUDIO_SMV_VOICE; nb_ch = 1; break; case GF_ISOM_SUBTYPE_3GP_DIMS: hintType = GF_RTP_PAYT_3GPP_DIMS; streamType = GF_STREAM_SCENE; break; case GF_ISOM_SUBTYPE_AC3: hintType = GF_RTP_PAYT_AC3; streamType = GF_STREAM_AUDIO; gf_isom_get_audio_info(file, TrackNum, 1, NULL, &nb_ch, NULL); break; default: /*ERROR*/ hintType = 0; break; } } /*not hintable*/ if (!hintType) return NULL; /*we only support self-contained files for hinting*/ gf_isom_get_data_reference(file, TrackNum, 1, &url, &urn); if (url || urn) return NULL; *e = GF_OUT_OF_MEM; GF_SAFEALLOC(tmp, GF_RTPHinter); if (!tmp) return NULL; /*override hinter type if requested and possible*/ if (has_mpeg4_mapping && (flags & GP_RTP_PCK_FORCE_MPEG4)) { hintType = GF_RTP_PAYT_MPEG4; avc_nalu_size = 0; } /*use static payload ID if enabled*/ else if (OfficialPayloadID && (flags & GP_RTP_PCK_USE_STATIC_ID) ) { PayloadID = OfficialPayloadID; } tmp->file = file; tmp->TrackNum = TrackNum; tmp->avc_nalu_size = avc_nalu_size; tmp->nb_chan = nb_ch; /*spatial scalability check*/ tmp->has_ctts = gf_isom_has_time_offset(file, TrackNum); /*get sample info*/ gf_media_get_sample_average_infos(file, TrackNum, &MinSize, &MaxSize, &avgTS, &maxDTSDelta, &const_dur, &bandwidth); /*systems carousel: we need at least IDX and RAP signaling*/ if (flags & GP_RTP_PCK_SYSTEMS_CAROUSEL) { flags |= GP_RTP_PCK_SIGNAL_RAP; } /*update flags in MultiSL*/ if (flags & GP_RTP_PCK_USE_MULTI) { if (MinSize != MaxSize) flags |= GP_RTP_PCK_SIGNAL_SIZE; if (!const_dur) flags |= GP_RTP_PCK_SIGNAL_TS; } if (tmp->has_ctts) flags |= GP_RTP_PCK_SIGNAL_TS; /*default SL for RTP */ InitSL_RTP(&my_sl); my_sl.timestampResolution = gf_isom_get_media_timescale(file, TrackNum); /*override clockrate if set*/ if (required_rate) { Double sc = required_rate; sc /= my_sl.timestampResolution; maxDTSDelta = (u32) (maxDTSDelta*sc); my_sl.timestampResolution = required_rate; } /*switch to RTP TS*/ max_ptime = (u32) (max_ptime * my_sl.timestampResolution / 1000); my_sl.AUSeqNumLength = gf_get_bit_size(gf_isom_get_sample_count(file, TrackNum)); my_sl.CUDuration = const_dur; if (gf_isom_has_sync_points(file, TrackNum)) { my_sl.useRandomAccessPointFlag = 1; } else { my_sl.useRandomAccessPointFlag = 0; my_sl.hasRandomAccessUnitsOnlyFlag = 1; } if (is_crypted) { Bool use_sel_enc; gf_isom_get_ismacryp_info(file, TrackNum, 1, NULL, NULL, NULL, NULL, NULL, &use_sel_enc, &IV_length, &KI_length); if (use_sel_enc) flags |= GP_RTP_PCK_SELECTIVE_ENCRYPTION; } // in case a different timescale was provided tmp->OrigTimeScale = gf_isom_get_media_timescale(file, TrackNum); tmp->rtp_p = gf_rtp_builder_new(hintType, &my_sl, flags, tmp, MP4T_OnNewPacket, MP4T_OnPacketDone, /*if copy, no data ref*/ copy_media ? NULL : MP4T_OnDataRef, MP4T_OnData); //init the builder gf_rtp_builder_init(tmp->rtp_p, PayloadID, Path_MTU, max_ptime, streamType, oti, PL_ID, MinSize, MaxSize, avgTS, maxDTSDelta, IV_length, KI_length, mpeg4mode); /*ISMA compliance is a pain...*/ if (force_dts_delta) tmp->rtp_p->slMap.DTSDeltaLength = force_dts_delta; /* Hint Track Setup */ tmp->TrackID = gf_isom_get_track_id(file, TrackNum); tmp->HintID = tmp->TrackID + 65535; while (gf_isom_get_track_by_id(file, tmp->HintID)) tmp->HintID++; tmp->HintTrack = gf_isom_new_track(file, tmp->HintID, GF_ISOM_MEDIA_HINT, my_sl.timestampResolution); gf_isom_setup_hint_track(file, tmp->HintTrack, GF_ISOM_HINT_RTP); /*create a hint description*/ gf_isom_new_hint_description(file, tmp->HintTrack, -1, -1, 0, &descIndex); gf_isom_rtp_set_timescale(file, tmp->HintTrack, descIndex, my_sl.timestampResolution); if (hintType==GF_RTP_PAYT_MPEG4) { tmp->rtp_p->slMap.ObjectTypeIndication = oti; /*set this SL for extraction.*/ gf_isom_set_extraction_slc(file, TrackNum, 1, &my_sl); } tmp->bandwidth = bandwidth; /*set interleaving*/ gf_isom_set_track_group(file, TrackNum, InterleaveGroupID); if (!copy_media) { /*if we don't copy data set hint track and media track in the same group*/ gf_isom_set_track_group(file, tmp->HintTrack, InterleaveGroupID); } else { gf_isom_set_track_group(file, tmp->HintTrack, InterleaveGroupID + OFFSET_HINT_GROUP_ID); } /*use user-secified priority*/ InterleaveGroupPriority*=2; gf_isom_set_track_priority_in_group(file, TrackNum, InterleaveGroupPriority+1); gf_isom_set_track_priority_in_group(file, tmp->HintTrack, InterleaveGroupPriority); #if 0 /*QT FF: not setting these flags = server uses a random offset*/ gf_isom_rtp_set_time_offset(file, tmp->HintTrack, 1, 0); /*we don't use seq offset for maintainance pruposes*/ gf_isom_rtp_set_time_sequence_offset(file, tmp->HintTrack, 1, 0); #endif *e = GF_OK; return tmp; }
GF_EXPORT GF_ISOMRTPStreamer *gf_isom_streamer_new(const char *file_name, const char *ip_dest, u16 port, Bool loop, Bool force_mpeg4, u32 path_mtu, u32 ttl, char *ifce_addr) { GF_ISOMRTPStreamer *streamer; GF_Err e = GF_OK; const char *opt = NULL; /*GF_Config *configFile = NULL; */ u32 i, max_ptime, au_sn_len; u8 payt; GF_ISOFile *file; GF_RTPTrack *track, *prev_track; u16 first_port; u32 nb_tracks; u32 sess_data_size; if (!ip_dest) ip_dest = "127.0.0.1"; if (!port) port = 7000; if (!path_mtu) path_mtu = 1450; GF_SAFEALLOC(streamer, GF_ISOMRTPStreamer); streamer->dest_ip = gf_strdup(ip_dest); payt = 96; max_ptime = au_sn_len = 0; file = gf_isom_open(file_name, GF_ISOM_OPEN_READ, NULL); if (!file) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Error opening file %s: %s\n", opt, gf_error_to_string(gf_isom_last_error(NULL)))); return NULL; } streamer->isom = file; streamer->loop = loop; streamer->force_mpeg4_generic = force_mpeg4; first_port = port; sess_data_size = 0; prev_track = NULL; nb_tracks = gf_isom_get_track_count(streamer->isom); for (i=0; i<nb_tracks; i++) { u32 mediaSize, mediaDuration, flags, MinSize, MaxSize, avgTS, streamType, oti, const_dur, nb_ch, samplerate, maxDTSDelta, TrackMediaSubType, TrackMediaType, bandwidth, IV_length, KI_length, dsi_len; const char *url, *urn; char *dsi; Bool is_crypted; dsi_len = samplerate = streamType = oti = nb_ch = IV_length = KI_length = 0; is_crypted = 0; dsi = NULL; flags = 0; /*we only support self-contained files for hinting*/ gf_isom_get_data_reference(streamer->isom, i+1, 1, &url, &urn); if (url || urn) continue; TrackMediaType = gf_isom_get_media_type(streamer->isom, i+1); TrackMediaSubType = gf_isom_get_media_subtype(streamer->isom, i+1, 1); switch (TrackMediaType) { case GF_ISOM_MEDIA_TEXT: break; case GF_ISOM_MEDIA_VISUAL: case GF_ISOM_MEDIA_AUDIO: case GF_ISOM_MEDIA_SUBT: case GF_ISOM_MEDIA_OD: case GF_ISOM_MEDIA_SCENE: if (gf_isom_get_sample_description_count(streamer->isom, i+1) > 1) continue; break; default: continue; } GF_SAFEALLOC(track, GF_RTPTrack); if (prev_track) prev_track->next = track; else streamer->stream = track; prev_track = track; track->track_num = i+1; track->nb_aus = gf_isom_get_sample_count(streamer->isom, track->track_num); track->timescale = gf_isom_get_media_timescale(streamer->isom, track->track_num); mediaDuration = (u32)(gf_isom_get_media_duration(streamer->isom, track->track_num)*1000/track->timescale); // ms mediaSize = (u32)gf_isom_get_media_data_size(streamer->isom, track->track_num); sess_data_size += mediaSize; if (mediaDuration > streamer->duration_ms) streamer->duration_ms = mediaDuration; track->port = check_next_port(streamer, first_port); first_port = track->port+2; /*init packetizer*/ if (streamer->force_mpeg4_generic) flags = GP_RTP_PCK_SIGNAL_RAP | GP_RTP_PCK_FORCE_MPEG4; switch (TrackMediaSubType) { case GF_ISOM_SUBTYPE_MPEG4_CRYP: is_crypted = 1; case GF_ISOM_SUBTYPE_MPEG4: { GF_ESD *esd = gf_isom_get_esd(streamer->isom, track->track_num, 1); if (esd) { streamType = esd->decoderConfig->streamType; oti = esd->decoderConfig->objectTypeIndication; /*systems streams*/ if (streamType==GF_STREAM_AUDIO) { gf_isom_get_audio_info(streamer->isom, track->track_num, 1, &samplerate, &nb_ch, NULL); } /*systems streams*/ else if (streamType==GF_STREAM_SCENE) { if (gf_isom_has_sync_shadows(streamer->isom, track->track_num) || gf_isom_has_sample_dependency(streamer->isom, track->track_num)) flags |= GP_RTP_PCK_SYSTEMS_CAROUSEL; } if (esd->decoderConfig->decoderSpecificInfo) { dsi = esd->decoderConfig->decoderSpecificInfo->data; dsi_len = esd->decoderConfig->decoderSpecificInfo->dataLength; esd->decoderConfig->decoderSpecificInfo->data = NULL; esd->decoderConfig->decoderSpecificInfo->dataLength = 0; } gf_odf_desc_del((GF_Descriptor*)esd); } } break; case GF_ISOM_SUBTYPE_AVC_H264: case GF_ISOM_SUBTYPE_AVC2_H264: case GF_ISOM_SUBTYPE_SVC_H264: { GF_AVCConfig *avcc = gf_isom_avc_config_get(streamer->isom, track->track_num, 1); track->avc_nalu_size = avcc->nal_unit_size; gf_odf_avc_cfg_del(avcc); streamType = GF_STREAM_VISUAL; oti = GPAC_OTI_VIDEO_AVC; } break; default: streamType = GF_STREAM_4CC; oti = TrackMediaSubType; break; } /*get sample info*/ gf_media_get_sample_average_infos(streamer->isom, track->track_num, &MinSize, &MaxSize, &avgTS, &maxDTSDelta, &const_dur, &bandwidth); if (is_crypted) { Bool use_sel_enc; gf_isom_get_ismacryp_info(streamer->isom, track->track_num, 1, NULL, NULL, NULL, NULL, NULL, &use_sel_enc, &IV_length, &KI_length); if (use_sel_enc) flags |= GP_RTP_PCK_SELECTIVE_ENCRYPTION; } track->rtp = gf_rtp_streamer_new_extended(streamType, oti, track->timescale, (char *) streamer->dest_ip, track->port, path_mtu, ttl, ifce_addr, flags, dsi, dsi_len, payt, samplerate, nb_ch, is_crypted, IV_length, KI_length, MinSize, MaxSize, avgTS, maxDTSDelta, const_dur, bandwidth, max_ptime, au_sn_len); if (!track->rtp) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("Could not initialize RTP streamer: %s\n", gf_error_to_string(e))); goto exit; } payt++; track->microsec_ts_scale = 1000000; track->microsec_ts_scale /= gf_isom_get_media_timescale(streamer->isom, track->track_num); } return streamer; exit: gf_free(streamer); return NULL; }
static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) { u32 i, count; s32 res; OPENSVCFRAME Picture; int Layer[4]; OSVCDec *ctx = (OSVCDec*) ifcg->privateStack; /*todo: we should check base layer of this stream is indeed our base layer*/ if (!ctx->ES_ID) { ctx->ES_ID = esd->ESID; ctx->width = ctx->height = ctx->out_size = 0; if (!esd->dependsOnESID) ctx->baseES_ID = esd->ESID; } if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); if (!cfg) return GF_NON_COMPLIANT_BITSTREAM; if (!esd->dependsOnESID) { ctx->nalu_size_length = cfg->nal_unit_size; if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR; } /*decode all NALUs*/ count = gf_list_count(cfg->sequenceParameterSets); SetCommandLayer(Layer, 255, 0, &res, 0);//bufindex can be reset without pb for (i=0; i<count; i++) { u32 w=0, h=0, sid; s32 par_n=0, par_d=0; GF_AVCConfigSlot *slc = gf_list_get(cfg->sequenceParameterSets, i); #ifndef GPAC_DISABLE_AV_PARSERS gf_avc_get_sps_info(slc->data, slc->size, &sid, &w, &h, &par_n, &par_d); #endif /*by default use the base layer*/ if (!i) { if ((ctx->width<w) || (ctx->height<h)) { ctx->width = w; ctx->height = h; if ( ((s32)par_n>0) && ((s32)par_d>0) ) ctx->pixel_ar = (par_n<<16) || par_d; } } res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d\n", res)); } GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: SPS id=\"%d\" code=\"%d\" size=\"%d\"\n", slc->id, slc->data[0] & 0x1F, slc->size)); } count = gf_list_count(cfg->pictureParameterSets); for (i=0; i<count; i++) { u32 sps_id, pps_id; GF_AVCConfigSlot *slc = gf_list_get(cfg->pictureParameterSets, i); gf_avc_get_pps_info(slc->data, slc->size, &pps_id, &sps_id); res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res)); } GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[SVC Decoder] Attach: PPS id=\"%d\" code=\"%d\" size=\"%d\" sps_id=\"%d\"\n", pps_id, slc->data[0] & 0x1F, slc->size, sps_id)); } ctx->state_found = 1; gf_odf_avc_cfg_del(cfg); } else { if (ctx->nalu_size_length) { return GF_NOT_SUPPORTED; } ctx->nalu_size_length = 0; if (!esd->dependsOnESID) { if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR; } ctx->pixel_ar = (1<<16) || 1; } ctx->stride = ctx->width + 32; ctx->CurrentDqId = ctx->MaxDqId = 0; ctx->out_size = ctx->stride * ctx->height * 3 / 2; return GF_OK; }
static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) { u32 i, count; s32 res; OPENSVCFRAME Picture; int Layer[4]; OSVCDec *ctx = (OSVCDec*) ifcg->privateStack; /*not supported in this version*/ if (esd->dependsOnESID) return GF_NOT_SUPPORTED; ctx->ES_ID = esd->ESID; ctx->width = ctx->height = ctx->out_size = 0; if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); if (!cfg) return GF_NON_COMPLIANT_BITSTREAM; ctx->nalu_size_length = cfg->nal_unit_size; if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR; /*decode all NALUs*/ count = gf_list_count(cfg->sequenceParameterSets); SetCommandLayer(Layer, 255, 0, &i, 0);//bufindex can be reset without pb for (i=0; i<count; i++) { u32 w, h, par_n, par_d; GF_AVCConfigSlot *slc = gf_list_get(cfg->sequenceParameterSets, i); gf_avc_get_sps_info(slc->data, slc->size, &slc->id, &w, &h, &par_n, &par_d); /*by default use the base layer*/ if (!i) { if ((ctx->width<w) || (ctx->height<h)) { ctx->width = w; ctx->height = h; if ( ((s32)par_n>0) && ((s32)par_d>0) ) ctx->pixel_ar = (par_n<<16) || par_d; } } res = decodeNAL(ctx->codec, slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d\n", res)); } } count = gf_list_count(cfg->pictureParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *slc = gf_list_get(cfg->pictureParameterSets, i); res = decodeNAL(ctx->codec, slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res)); } } gf_odf_avc_cfg_del(cfg); } else { ctx->nalu_size_length = 0; if (SVCDecoder_init(&ctx->codec) == SVC_STATUS_ERROR) return GF_IO_ERR; } ctx->stride = ctx->width + 32; ctx->layer = 0; ctx->CurrDqId = ctx->layer; ctx->out_size = ctx->stride * ctx->height * 3 / 2; return GF_OK; }
static GF_Err MCDec_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) { MCDec *ctx = (MCDec *)ifcg->privateStack; ctx->esd = esd; GF_Err e; //check AVC config if (esd->decoderConfig->objectTypeIndication == GPAC_OTI_VIDEO_AVC) { ctx->SPSs = gf_list_new(); ctx->PPSs = gf_list_new(); ctx->mime = "video/avc"; ctx->avc.sps_active_idx = -1; ctx->active_sps = ctx->active_pps = -1; if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) { ctx->width=ctx->height=128; ctx->out_size = ctx->width*ctx->height*3/2; ctx->pix_fmt = GF_PIXEL_NV12; return GF_OK; } else { u32 i; GF_AVCConfigSlot *slc; GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); for (i = 0; i<gf_list_count(cfg->sequenceParameterSets); i++) { slc = gf_list_get(cfg->sequenceParameterSets, i); slc->id = -1; MCDec_RegisterParameterSet(ctx, slc->data, slc->size, GF_TRUE); } for (i = 0; i<gf_list_count(cfg->pictureParameterSets); i++) { slc = gf_list_get(cfg->pictureParameterSets, i); slc->id = -1; MCDec_RegisterParameterSet(ctx, slc->data, slc->size, GF_FALSE); } slc = gf_list_get(ctx->SPSs, 0); if (slc) ctx->active_sps = slc->id; slc = gf_list_get(ctx->PPSs, 0); if (slc) ctx->active_pps = slc->id; ctx->nalu_size_length = cfg->nal_unit_size; if (gf_list_count(ctx->SPSs) && gf_list_count(ctx->PPSs)) { e = MCDec_InitDecoder(ctx); } else { e = GF_OK; } gf_odf_avc_cfg_del(cfg); return e; } } if (esd->decoderConfig->objectTypeIndication == GPAC_OTI_VIDEO_MPEG4_PART2) { if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) { ctx->width=ctx->height=128; ctx->out_size = ctx->width*ctx->height*3/2; ctx->pix_fmt = GF_PIXEL_NV12; } else { GF_M4VDecSpecInfo vcfg; gf_m4v_get_config(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, &vcfg); ctx->width = vcfg.width; ctx->height = vcfg.height; ctx->out_size = ctx->width*ctx->height*3/2; ctx->pix_fmt = GF_PIXEL_NV12; return MCDec_InitDecoder(ctx); } } if (esd->decoderConfig->objectTypeIndication == GPAC_OTI_VIDEO_HEVC) { ctx->esd= esd; if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) { ctx->width=ctx->height=128; ctx->out_size = ctx->width*ctx->height*3/2; ctx->pix_fmt = GF_PIXEL_NV12; } else { return MCDec_InitDecoder(ctx); } } return MCDec_InitDecoder(ctx); }
static GF_Err VTBDec_InitDecoder(VTBDec *ctx, Bool force_dsi_rewrite) { CFMutableDictionaryRef dec_dsi, dec_type; CFMutableDictionaryRef dsi; VTDecompressionOutputCallbackRecord cbacks; CFDictionaryRef buffer_attribs; OSStatus status; OSType kColorSpace; CFDataRef data = NULL; char *dsi_data=NULL; u32 dsi_data_size=0; dec_dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); kColorSpace = kCVPixelFormatType_420YpCbCr8Planar; ctx->pix_fmt = GF_PIXEL_YV12; switch (ctx->esd->decoderConfig->objectTypeIndication) { case GPAC_OTI_VIDEO_AVC : if (ctx->sps && ctx->pps) { AVCState avc; s32 idx; memset(&avc, 0, sizeof(AVCState)); avc.sps_active_idx = -1; idx = gf_media_avc_read_sps(ctx->sps, ctx->sps_size, &avc, 0, NULL); ctx->vtb_type = kCMVideoCodecType_H264; assert(ctx->sps); ctx->width = avc.sps[idx].width; ctx->height = avc.sps[idx].height; if (avc.sps[idx].vui.par_num && avc.sps[idx].vui.par_den) { ctx->pixel_ar = avc.sps[idx].vui.par_num; ctx->pixel_ar <<= 16; ctx->pixel_ar |= avc.sps[idx].vui.par_den; } ctx->chroma_format = avc.sps[idx].chroma_format; ctx->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8; ctx->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8; switch (ctx->chroma_format) { case 2: //422 decoding doesn't seem supported ... if (ctx->luma_bit_depth>8) { kColorSpace = kCVPixelFormatType_422YpCbCr10; ctx->pix_fmt = GF_PIXEL_YUV422_10; } else { kColorSpace = kCVPixelFormatType_422YpCbCr8; ctx->pix_fmt = GF_PIXEL_YUV422; } break; case 3: if (ctx->luma_bit_depth>8) { kColorSpace = kCVPixelFormatType_444YpCbCr10; ctx->pix_fmt = GF_PIXEL_YUV444_10; } else { kColorSpace = kCVPixelFormatType_444YpCbCr8; ctx->pix_fmt = GF_PIXEL_YUV444; } break; default: if (ctx->luma_bit_depth>8) { kColorSpace = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; ctx->pix_fmt = GF_PIXEL_YV12_10; } break; } if (!ctx->esd->decoderConfig->decoderSpecificInfo || force_dsi_rewrite || !ctx->esd->decoderConfig->decoderSpecificInfo->data) { GF_AVCConfigSlot *slc_s, *slc_p; GF_AVCConfig *cfg = gf_odf_avc_cfg_new(); cfg->configurationVersion = 1; cfg->profile_compatibility = avc.sps[idx].prof_compat; cfg->AVCProfileIndication = avc.sps[idx].profile_idc; cfg->AVCLevelIndication = avc.sps[idx].level_idc; cfg->chroma_format = avc.sps[idx].chroma_format; cfg->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8; cfg->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8; cfg->nal_unit_size = 4; GF_SAFEALLOC(slc_s, GF_AVCConfigSlot); slc_s->data = ctx->sps; slc_s->size = ctx->sps_size; gf_list_add(cfg->sequenceParameterSets, slc_s); GF_SAFEALLOC(slc_p, GF_AVCConfigSlot); slc_p->data = ctx->pps; slc_p->size = ctx->pps_size; gf_list_add(cfg->pictureParameterSets , slc_p); gf_odf_avc_cfg_write(cfg, &dsi_data, &dsi_data_size); slc_s->data = slc_p->data = NULL; gf_odf_avc_cfg_del((cfg)); } else { dsi_data = ctx->esd->decoderConfig->decoderSpecificInfo->data; dsi_data_size = ctx->esd->decoderConfig->decoderSpecificInfo->dataLength; } dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); data = CFDataCreate(kCFAllocatorDefault, dsi_data, dsi_data_size); if (data) { CFDictionarySetValue(dsi, CFSTR("avcC"), data); CFDictionarySetValue(dec_dsi, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, dsi); CFRelease(data); } CFRelease(dsi); if (!ctx->esd->decoderConfig->decoderSpecificInfo || !ctx->esd->decoderConfig->decoderSpecificInfo->data) { gf_free(ctx->sps); ctx->sps = NULL; gf_free(ctx->pps); ctx->pps = NULL; gf_free(dsi_data); } } break; case GPAC_OTI_VIDEO_MPEG2_SIMPLE: case GPAC_OTI_VIDEO_MPEG2_MAIN: case GPAC_OTI_VIDEO_MPEG2_SNR: case GPAC_OTI_VIDEO_MPEG2_SPATIAL: case GPAC_OTI_VIDEO_MPEG2_HIGH: case GPAC_OTI_VIDEO_MPEG2_422: ctx->vtb_type = kCMVideoCodecType_MPEG2Video; if (!ctx->width || !ctx->height) { ctx->init_mpeg12 = GF_TRUE; return GF_OK; } ctx->init_mpeg12 = GF_FALSE; break; case GPAC_OTI_VIDEO_MPEG1: ctx->vtb_type = kCMVideoCodecType_MPEG1Video; if (!ctx->width || !ctx->height) { ctx->init_mpeg12 = GF_TRUE; return GF_OK; } ctx->init_mpeg12 = GF_FALSE; break; case GPAC_OTI_VIDEO_MPEG4_PART2 : { Bool reset_dsi = GF_FALSE; ctx->vtb_type = kCMVideoCodecType_MPEG4Video; if (!ctx->esd->decoderConfig->decoderSpecificInfo) { ctx->esd->decoderConfig->decoderSpecificInfo = (GF_DefaultDescriptor *) gf_odf_desc_new(GF_ODF_DSI_TAG); } if (!ctx->esd->decoderConfig->decoderSpecificInfo->data) { reset_dsi = GF_TRUE; ctx->esd->decoderConfig->decoderSpecificInfo->data = ctx->vosh; ctx->esd->decoderConfig->decoderSpecificInfo->dataLength = ctx->vosh_size; } if (ctx->esd->decoderConfig->decoderSpecificInfo->data) { GF_M4VDecSpecInfo vcfg; GF_BitStream *bs; gf_m4v_get_config(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, &vcfg); ctx->width = vcfg.width; ctx->height = vcfg.height; if (ctx->esd->slConfig) { ctx->esd->slConfig->predefined = 2; } bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u32(bs, 0); gf_odf_desc_write_bs((GF_Descriptor *) ctx->esd, bs); gf_bs_get_content(bs, &dsi_data, &dsi_data_size); gf_bs_del(bs); dsi = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); data = CFDataCreate(kCFAllocatorDefault, dsi_data, dsi_data_size); gf_free(dsi_data); if (data) { CFDictionarySetValue(dsi, CFSTR("esds"), data); CFDictionarySetValue(dec_dsi, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, dsi); CFRelease(data); } CFRelease(dsi); if (reset_dsi) { ctx->esd->decoderConfig->decoderSpecificInfo->data = NULL; ctx->esd->decoderConfig->decoderSpecificInfo->dataLength = 0; } ctx->skip_mpeg4_vosh = GF_FALSE; } else { ctx->skip_mpeg4_vosh = GF_TRUE; return GF_OK; } break; } case GPAC_OTI_MEDIA_GENERIC: if (ctx->esd->decoderConfig->decoderSpecificInfo && ctx->esd->decoderConfig->decoderSpecificInfo->dataLength) { char *dsi = ctx->esd->decoderConfig->decoderSpecificInfo->data; if (ctx->esd->decoderConfig->decoderSpecificInfo->dataLength<8) return GF_NON_COMPLIANT_BITSTREAM; if (strnicmp(dsi, "s263", 4)) return GF_NOT_SUPPORTED; ctx->width = ((u8) dsi[4]); ctx->width<<=8; ctx->width |= ((u8) dsi[5]); ctx->height = ((u8) dsi[6]); ctx->height<<=8; ctx->height |= ((u8) dsi[7]); ctx->vtb_type = kCMVideoCodecType_H263; } break; default : return GF_NOT_SUPPORTED; } if (! ctx->width || !ctx->height) return GF_NOT_SUPPORTED; status = CMVideoFormatDescriptionCreate(kCFAllocatorDefault, ctx->vtb_type, ctx->width, ctx->height, dec_dsi, &ctx->fmt_desc); if (!ctx->fmt_desc) { if (dec_dsi) CFRelease(dec_dsi); return GF_NON_COMPLIANT_BITSTREAM; } buffer_attribs = VTBDec_CreateBufferAttributes(ctx->width, ctx->height, kColorSpace); cbacks.decompressionOutputCallback = VTBDec_on_frame; cbacks.decompressionOutputRefCon = ctx; dec_type = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); CFDictionarySetValue(dec_type, kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder, kCFBooleanTrue); ctx->is_hardware = GF_TRUE; status = VTDecompressionSessionCreate(NULL, ctx->fmt_desc, dec_type, NULL, &cbacks, &ctx->vtb_session); //if HW decoder not available, try soft one if (status) { status = VTDecompressionSessionCreate(NULL, ctx->fmt_desc, NULL, buffer_attribs, &cbacks, &ctx->vtb_session); ctx->is_hardware = GF_FALSE; } if (dec_dsi) CFRelease(dec_dsi); if (dec_type) CFRelease(dec_type); if (buffer_attribs) CFRelease(buffer_attribs); switch (status) { case kVTVideoDecoderNotAvailableNowErr: case kVTVideoDecoderUnsupportedDataFormatErr: return GF_NOT_SUPPORTED; case kVTVideoDecoderMalfunctionErr: return GF_IO_ERR; case kVTVideoDecoderBadDataErr : return GF_BAD_PARAM; case kVTPixelTransferNotSupportedErr: case kVTCouldNotFindVideoDecoderErr: return GF_NOT_SUPPORTED; case 0: break; default: return GF_SERVICE_ERROR; } //good to go ! if (ctx->pix_fmt == GF_PIXEL_YUV422) { ctx->out_size = ctx->width*ctx->height*2; } else if (ctx->pix_fmt == GF_PIXEL_YUV444) { ctx->out_size = ctx->width*ctx->height*3; } else { // (ctx->pix_fmt == GF_PIXEL_YV12) ctx->out_size = ctx->width*ctx->height*3/2; } if (ctx->luma_bit_depth>8) { ctx->out_size *= 2; } return GF_OK; }
static GF_Err dc_gpac_video_write_config(VideoOutputFile *video_output_file, u32 *di, u32 track) { GF_Err ret; if (video_output_file->codec_ctx->codec_id == CODEC_ID_H264) { GF_AVCConfig *avccfg; avccfg = gf_odf_avc_cfg_new(); if (!avccfg) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot create AVCConfig\n")); return GF_OUT_OF_MEM; } ret = avc_import_ffextradata(video_output_file->codec_ctx->extradata, video_output_file->codec_ctx->extradata_size, avccfg); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot parse AVC/H264 SPS/PPS\n")); gf_odf_avc_cfg_del(avccfg); return ret; } ret = gf_isom_avc_config_new(video_output_file->isof, track, avccfg, NULL, NULL, di); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_avc_config_new\n", gf_error_to_string(ret))); return ret; } gf_odf_avc_cfg_del(avccfg); //inband SPS/PPS if (video_output_file->muxer_type == GPAC_INIT_VIDEO_MUXER_AVC3) { ret = gf_isom_avc_set_inband_config(video_output_file->isof, track, 1); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_avc_set_inband_config\n", gf_error_to_string(ret))); return ret; } } } else if (!strcmp(video_output_file->codec_ctx->codec->name, "libx265")) { //FIXME CODEC_ID_HEVC would break on old releases GF_HEVCConfig *hevccfg = gf_odf_hevc_cfg_new(); if (!hevccfg) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot create HEVCConfig\n")); return GF_OUT_OF_MEM; } ret = hevc_import_ffextradata(video_output_file->codec_ctx->extradata, video_output_file->codec_ctx->extradata_size, hevccfg); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot parse HEVC/H265 SPS/PPS\n")); gf_odf_hevc_cfg_del(hevccfg); return ret; } ret = gf_isom_hevc_config_new(video_output_file->isof, track, hevccfg, NULL, NULL, di); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_hevc_config_new\n", gf_error_to_string(ret))); return ret; } gf_odf_hevc_cfg_del(hevccfg); //inband SPS/PPS if (video_output_file->muxer_type == GPAC_INIT_VIDEO_MUXER_AVC3) { ret = gf_isom_hevc_set_inband_config(video_output_file->isof, track, 1); if (ret != GF_OK) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("%s: gf_isom_hevc_set_inband_config\n", gf_error_to_string(ret))); return ret; } } } return GF_OK; }
static GF_Err gf_isom_avc_config_update_ex(GF_ISOFile *the_file, u32 trackNumber, u32 DescriptionIndex, GF_AVCConfig *cfg, u32 op_type) { GF_TrackBox *trak; GF_Err e; GF_MPEGVisualSampleEntryBox *entry; e = CanAccessMovie(the_file, GF_ISOM_OPEN_WRITE); if (e) return e; trak = gf_isom_get_track_from_file(the_file, trackNumber); if (!trak || !trak->Media || !DescriptionIndex) return GF_BAD_PARAM; entry = (GF_MPEGVisualSampleEntryBox *)gf_list_get(trak->Media->information->sampleTable->SampleDescription->other_boxes, DescriptionIndex-1); if (!entry) return GF_BAD_PARAM; switch (entry->type) { case GF_ISOM_BOX_TYPE_AVC1: case GF_ISOM_BOX_TYPE_AVC2: case GF_ISOM_BOX_TYPE_AVC3: case GF_ISOM_BOX_TYPE_AVC4: case GF_ISOM_BOX_TYPE_SVC1: break; default: return GF_BAD_PARAM; } switch (op_type) { /*AVCC replacement*/ case 0: if (!cfg) return GF_BAD_PARAM; if (!entry->avc_config) entry->avc_config = (GF_AVCConfigurationBox*)gf_isom_box_new(GF_ISOM_BOX_TYPE_AVCC); if (entry->avc_config->config) gf_odf_avc_cfg_del(entry->avc_config->config); entry->avc_config->config = AVC_DuplicateConfig(cfg); entry->type = GF_ISOM_BOX_TYPE_AVC1; break; /*SVCC replacement*/ case 1: if (!cfg) return GF_BAD_PARAM; if (!entry->svc_config) entry->svc_config = (GF_AVCConfigurationBox*)gf_isom_box_new(GF_ISOM_BOX_TYPE_SVCC); if (entry->svc_config->config) gf_odf_avc_cfg_del(entry->svc_config->config); entry->svc_config->config = AVC_DuplicateConfig(cfg); entry->type = GF_ISOM_BOX_TYPE_AVC1; break; /*SVCC replacement and AVC removal*/ case 2: if (!cfg) return GF_BAD_PARAM; if (entry->avc_config) { gf_isom_box_del((GF_Box*)entry->avc_config); entry->avc_config = NULL; } if (!entry->svc_config) entry->svc_config = (GF_AVCConfigurationBox*)gf_isom_box_new(GF_ISOM_BOX_TYPE_SVCC); if (entry->svc_config->config) gf_odf_avc_cfg_del(entry->svc_config->config); entry->svc_config->config = AVC_DuplicateConfig(cfg); entry->type = GF_ISOM_BOX_TYPE_SVC1; break; /*AVCC removal and switch to avc3*/ case 3: if (!entry->avc_config || !entry->avc_config->config) return GF_BAD_PARAM; if (entry->svc_config) { gf_isom_box_del((GF_Box*)entry->svc_config); entry->svc_config = NULL; } while (gf_list_count(entry->avc_config->config->sequenceParameterSets)) { GF_AVCConfigSlot *sl = gf_list_get(entry->avc_config->config->sequenceParameterSets, 0); gf_list_rem(entry->avc_config->config->sequenceParameterSets, 0); if (sl->data) gf_free(sl->data); gf_free(sl); } while (gf_list_count(entry->avc_config->config->pictureParameterSets)) { GF_AVCConfigSlot *sl = gf_list_get(entry->avc_config->config->pictureParameterSets, 0); gf_list_rem(entry->avc_config->config->pictureParameterSets, 0); if (sl->data) gf_free(sl->data); gf_free(sl); } if (entry->type == GF_ISOM_BOX_TYPE_AVC1) entry->type = GF_ISOM_BOX_TYPE_AVC3; else if (entry->type == GF_ISOM_BOX_TYPE_AVC2) entry->type = GF_ISOM_BOX_TYPE_AVC4; break; } AVC_RewriteESDescriptor(entry); return GF_OK; }
GF_Err avcc_Read(GF_Box *s, GF_BitStream *bs) { u32 i, count; GF_AVCConfigurationBox *ptr = (GF_AVCConfigurationBox *)s; if (ptr->config) gf_odf_avc_cfg_del(ptr->config); ptr->config = gf_odf_avc_cfg_new(); ptr->config->configurationVersion = gf_bs_read_u8(bs); ptr->config->AVCProfileIndication = gf_bs_read_u8(bs); ptr->config->profile_compatibility = gf_bs_read_u8(bs); ptr->config->AVCLevelIndication = gf_bs_read_u8(bs); if (ptr->type==GF_ISOM_BOX_TYPE_AVCC) { gf_bs_read_int(bs, 6); } else { ptr->config->complete_representation = gf_bs_read_int(bs, 1); gf_bs_read_int(bs, 5); } ptr->config->nal_unit_size = 1 + gf_bs_read_int(bs, 2); gf_bs_read_int(bs, 3); count = gf_bs_read_int(bs, 5); ptr->size -= 7; //including 2nd count for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *) gf_malloc(sizeof(GF_AVCConfigSlot)); sl->size = gf_bs_read_u16(bs); sl->data = (char *)gf_malloc(sizeof(char) * sl->size); gf_bs_read_data(bs, sl->data, sl->size); gf_list_add(ptr->config->sequenceParameterSets, sl); ptr->size -= 2+sl->size; } count = gf_bs_read_u8(bs); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_malloc(sizeof(GF_AVCConfigSlot)); sl->size = gf_bs_read_u16(bs); sl->data = (char *)gf_malloc(sizeof(char) * sl->size); gf_bs_read_data(bs, sl->data, sl->size); gf_list_add(ptr->config->pictureParameterSets, sl); ptr->size -= 2+sl->size; } if (ptr->type==GF_ISOM_BOX_TYPE_AVCC) { switch (ptr->config->AVCProfileIndication) { case 100: case 110: case 122: case 144: if (!ptr->size) { #ifndef GPAC_DISABLE_AV_PARSERS AVCState avc; s32 idx, vui_flag_pos; GF_AVCConfigSlot *sl = gf_list_get(ptr->config->sequenceParameterSets, 0); idx = gf_media_avc_read_sps(sl->data, sl->size, &avc, 0, &vui_flag_pos); if (idx>=0) { ptr->config->chroma_format = avc.sps[idx].chroma_format; ptr->config->luma_bit_depth = 8 + avc.sps[idx].luma_bit_depth_m8; ptr->config->chroma_bit_depth = 8 + avc.sps[idx].chroma_bit_depth_m8; } #else /*set default values ...*/ ptr->config->chroma_format = 1; ptr->config->luma_bit_depth = 8; ptr->config->chroma_bit_depth = 8; #endif return GF_OK; } gf_bs_read_int(bs, 6); ptr->config->chroma_format = gf_bs_read_int(bs, 2); gf_bs_read_int(bs, 5); ptr->config->luma_bit_depth = 8 + gf_bs_read_int(bs, 3); gf_bs_read_int(bs, 5); ptr->config->chroma_bit_depth = 8 + gf_bs_read_int(bs, 3); count = gf_bs_read_int(bs, 8); ptr->size -= 4; if (count) { ptr->config->sequenceParameterSetExtensions = gf_list_new(); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_malloc(sizeof(GF_AVCConfigSlot)); sl->size = gf_bs_read_u16(bs); sl->data = (char *)gf_malloc(sizeof(char) * sl->size); gf_bs_read_data(bs, sl->data, sl->size); gf_list_add(ptr->config->sequenceParameterSetExtensions, sl); ptr->size -= sl->size + 2; } } break; } } return GF_OK; }
void isor_declare_objects(ISOMReader *read) { GF_ObjectDescriptor *od; GF_ESD *esd; const char *tag; u32 i, count, ocr_es_id, tlen, base_track, j, track_id; Bool highest_stream; char *opt; Bool add_ps_lower = GF_TRUE; ocr_es_id = 0; opt = (char*) gf_modules_get_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS"); if (!opt) { gf_modules_set_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS", "yes"); } else if (!strcmp(opt, "no")) { add_ps_lower = GF_FALSE; } /*TODO check for alternate tracks*/ count = gf_isom_get_track_count(read->mov); for (i=0; i<count; i++) { if (!gf_isom_is_track_enabled(read->mov, i+1)) continue; switch (gf_isom_get_media_type(read->mov, i+1)) { case GF_ISOM_MEDIA_AUDIO: case GF_ISOM_MEDIA_VISUAL: case GF_ISOM_MEDIA_TEXT: case GF_ISOM_MEDIA_SUBT: case GF_ISOM_MEDIA_SCENE: case GF_ISOM_MEDIA_SUBPIC: break; default: continue; } /*we declare only the highest video track (i.e the track we play)*/ highest_stream = GF_TRUE; track_id = gf_isom_get_track_id(read->mov, i+1); for (j = 0; j < count; j++) { if (gf_isom_has_track_reference(read->mov, j+1, GF_ISOM_REF_SCAL, track_id) > 0) { highest_stream = GF_FALSE; break; } } if ((gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) && !highest_stream) continue; esd = gf_media_map_esd(read->mov, i+1); if (esd) { gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_BASE, 1, &base_track); esd->has_ref_base = base_track ? GF_TRUE : GF_FALSE; /*FIXME: if we declare only SPS/PPS of the highest layer, we have a problem in decoding even though we have all SPS/PPS inband (OpenSVC bug ?)*/ /*so we add by default the SPS/PPS of the lower layers to this esd*/ if (esd->has_ref_base && add_ps_lower) { u32 count, refIndex, ref_track, num_sps, num_pps, t; GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); GF_AVCConfig *avccfg, *svccfg; count = gf_isom_get_reference_count(read->mov, i+1, GF_ISOM_REF_SCAL); for (refIndex = count; refIndex != 0; refIndex--) { gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_SCAL, refIndex, &ref_track); avccfg = gf_isom_avc_config_get(read->mov, ref_track, 1); svccfg = gf_isom_svc_config_get(read->mov, ref_track, 1); if (avccfg) { num_sps = gf_list_count(avccfg->sequenceParameterSets); for (t = 0; t < num_sps; t++) { GF_AVCConfigSlot *slc = gf_list_get(avccfg->sequenceParameterSets, t); GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); sl->id = slc->id; sl->size = slc->size; sl->data = (char*)gf_malloc(sizeof(char)*sl->size); memcpy(sl->data, slc->data, sizeof(char)*sl->size); gf_list_insert(cfg->sequenceParameterSets, sl, 0); } num_pps = gf_list_count(avccfg->pictureParameterSets); for (t = 0; t < num_sps; t++) { GF_AVCConfigSlot *slc = gf_list_get(avccfg->pictureParameterSets, t); GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); sl->id = slc->id; sl->size = slc->size; sl->data = (char*)gf_malloc(sizeof(char)*sl->size); memcpy(sl->data, slc->data, sizeof(char)*sl->size); gf_list_insert(cfg->pictureParameterSets, sl, 0); } gf_odf_avc_cfg_del(avccfg); } if (svccfg) { num_sps = gf_list_count(svccfg->sequenceParameterSets); for (t = 0; t < num_sps; t++) { GF_AVCConfigSlot *slc = gf_list_get(svccfg->sequenceParameterSets, t); GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); sl->id = slc->id; sl->size = slc->size; sl->data = (char*)gf_malloc(sizeof(char)*sl->size); memcpy(sl->data, slc->data, sizeof(char)*sl->size); gf_list_insert(cfg->sequenceParameterSets, sl, 0); } num_pps = gf_list_count(svccfg->pictureParameterSets); for (t = 0; t < num_pps; t++) { GF_AVCConfigSlot *slc = gf_list_get(svccfg->pictureParameterSets, t); GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); sl->id = slc->id; sl->size = slc->size; sl->data = (char*)gf_malloc(sizeof(char)*sl->size); memcpy(sl->data, slc->data, sizeof(char)*sl->size); gf_list_insert(cfg->pictureParameterSets, sl, 0); } gf_odf_avc_cfg_del(svccfg); } } if (esd->decoderConfig->decoderSpecificInfo->data) gf_free(esd->decoderConfig->decoderSpecificInfo->data); gf_odf_avc_cfg_write(cfg, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength); gf_odf_avc_cfg_del(cfg); } od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG); od->service_ifce = read->input; od->objectDescriptorID = 0; if (!ocr_es_id) ocr_es_id = esd->ESID; esd->OCRESID = ocr_es_id; gf_list_add(od->ESDescriptors, esd); if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) { send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL); } else { gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE); } } } /*if cover art, extract it in cache*/ if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COVER_ART, &tag, &tlen)==GF_OK) { const char *cdir = gf_modules_get_option((GF_BaseInterface *)gf_term_get_service_interface(read->service), "General", "CacheDirectory"); if (cdir) { char szName[GF_MAX_PATH]; const char *sep; FILE *t; sep = strrchr(gf_isom_get_filename(read->mov), '\\'); if (!sep) sep = strrchr(gf_isom_get_filename(read->mov), '/'); if (!sep) sep = gf_isom_get_filename(read->mov); if ((cdir[strlen(cdir)-1] != '\\') && (cdir[strlen(cdir)-1] != '/')) { sprintf(szName, "%s/%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg"); } else { sprintf(szName, "%s%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg"); } t = gf_f64_open(szName, "wb"); if (t) { Bool isom_contains_video = GF_FALSE; /*write cover data*/ assert(!(tlen & 0x80000000)); gf_fwrite(tag, tlen & 0x7FFFFFFF, 1, t); fclose(t); /*don't display cover art when video is present*/ for (i=0; i<gf_isom_get_track_count(read->mov); i++) { if (!gf_isom_is_track_enabled(read->mov, i+1)) continue; if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) { isom_contains_video = GF_TRUE; break; } } if (!isom_contains_video) { od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG); od->service_ifce = read->input; od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID; od->URLString = gf_strdup(szName); if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) { send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL); } else { gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE); } } } } } if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) { send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, NULL, NULL); } else { gf_term_add_media(read->service, NULL, GF_FALSE); } }
GF_EXPORT GF_Err gf_rtp_streamer_append_sdp_extended(GF_RTPStreamer *rtp, u16 ESID, char *dsi, u32 dsi_len, GF_ISOFile *isofile, u32 isotrack, char *KMS_URI, u32 width, u32 height, char **out_sdp_buffer) { u32 size; u16 port; char mediaName[30], payloadName[30]; char sdp[20000], sdpLine[10000]; if (!out_sdp_buffer) return GF_BAD_PARAM; gf_rtp_builder_get_payload_name(rtp->packetizer, payloadName, mediaName); gf_rtp_get_ports(rtp->channel, &port, NULL); sprintf(sdp, "m=%s %d RTP/%s %d\n", mediaName, port, rtp->packetizer->slMap.IV_length ? "SAVP" : "AVP", rtp->packetizer->PayloadType); sprintf(sdpLine, "a=rtpmap:%d %s/%d\n", rtp->packetizer->PayloadType, payloadName, rtp->packetizer->sl_config.timestampResolution); strcat(sdp, sdpLine); if (ESID && (rtp->packetizer->rtp_payt != GF_RTP_PAYT_3GPP_DIMS)) { sprintf(sdpLine, "a=mpeg4-esid:%d\n", ESID); strcat(sdp, sdpLine); } if (width && height) { if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H263) { sprintf(sdpLine, "a=cliprect:0,0,%d,%d\n", height, width); strcat(sdp, sdpLine); } /*extensions for some mobile phones*/ sprintf(sdpLine, "a=framesize:%d %d-%d\n", rtp->packetizer->PayloadType, width, height); strcat(sdp, sdpLine); } strcpy(sdpLine, ""); /*AMR*/ if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_AMR_WB)) { sprintf(sdpLine, "a=fmtp:%d octet-align=1\n", rtp->packetizer->PayloadType); } /*Text*/ else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_3GPP_TEXT) { gf_media_format_ttxt_sdp(rtp->packetizer, payloadName, sdpLine, isofile, isotrack); strcat(sdpLine, "\n"); } /*EVRC/SMV in non header-free mode*/ else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_EVRC_SMV) && (rtp->packetizer->auh_size>1)) { sprintf(sdpLine, "a=fmtp:%d maxptime=%d\n", rtp->packetizer->PayloadType, rtp->packetizer->auh_size*20); } /*H264/AVC*/ else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_SVC)) { GF_AVCConfig *avcc = dsi ? gf_odf_avc_cfg_read(dsi, dsi_len) : NULL; if (avcc) { sprintf(sdpLine, "a=fmtp:%d profile-level-id=%02X%02X%02X; packetization-mode=1", rtp->packetizer->PayloadType, avcc->AVCProfileIndication, avcc->profile_compatibility, avcc->AVCLevelIndication); if (gf_list_count(avcc->pictureParameterSets) || gf_list_count(avcc->sequenceParameterSets)) { u32 i, count, b64s; char b64[200]; strcat(sdpLine, "; sprop-parameter-sets="); count = gf_list_count(avcc->sequenceParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->sequenceParameterSets, i); b64s = gf_base64_encode(sl->data, sl->size, b64, 200); b64[b64s]=0; strcat(sdpLine, b64); if (i+1<count) strcat(sdpLine, ","); } if (i) strcat(sdpLine, ","); count = gf_list_count(avcc->pictureParameterSets); for (i=0; i<count; i++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(avcc->pictureParameterSets, i); b64s = gf_base64_encode(sl->data, sl->size, b64, 200); b64[b64s]=0; strcat(sdpLine, b64); if (i+1<count) strcat(sdpLine, ","); } } gf_odf_avc_cfg_del(avcc); strcat(sdpLine, "\n"); } } else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_HEVC) { #ifndef GPAC_DISABLE_HEVC GF_HEVCConfig *hevcc = dsi ? gf_odf_hevc_cfg_read(dsi, dsi_len, 0) : NULL; if (hevcc) { u32 count, i, j, b64s; char b64[200]; sprintf(sdpLine, "a=fmtp:%d", rtp->packetizer->PayloadType); count = gf_list_count(hevcc->param_array); for (i = 0; i < count; i++) { GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(hevcc->param_array, i); if (ar->type==GF_HEVC_NALU_SEQ_PARAM) { strcat(sdpLine, "; sprop-sps="); } else if (ar->type==GF_HEVC_NALU_PIC_PARAM) { strcat(sdpLine, "; sprop-pps="); } else if (ar->type==GF_HEVC_NALU_VID_PARAM) { strcat(sdpLine, "; sprop-vps="); } for (j = 0; j < gf_list_count(ar->nalus); j++) { GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j); b64s = gf_base64_encode(sl->data, sl->size, b64, 200); b64[b64s]=0; if (j) strcat(sdpLine, ", "); strcat(sdpLine, b64); } } gf_odf_hevc_cfg_del(hevcc); strcat(sdpLine, "\n"); } #endif } /*MPEG-4 decoder config*/ else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_MPEG4) { gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, dsi, dsi_len); strcat(sdpLine, "\n"); if (rtp->packetizer->slMap.IV_length && KMS_URI) { if (!strnicmp(KMS_URI, "(key)", 5) || !strnicmp(KMS_URI, "(ipmp)", 6) || !strnicmp(KMS_URI, "(uri)", 5)) { strcat(sdpLine, "; ISMACrypKey="); } else { strcat(sdpLine, "; ISMACrypKey=(uri)"); } strcat(sdpLine, KMS_URI); strcat(sdpLine, "\n"); } } /*DIMS decoder config*/ else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_3GPP_DIMS) { sprintf(sdpLine, "a=fmtp:%d Version-profile=%d", rtp->packetizer->PayloadType, 10); if (rtp->packetizer->flags & GP_RTP_DIMS_COMPRESSED) { strcat(sdpLine, ";content-coding=deflate"); } strcat(sdpLine, "\n"); } /*MPEG-4 Audio LATM*/ else if (rtp->packetizer->rtp_payt==GF_RTP_PAYT_LATM) { GF_BitStream *bs; char *config_bytes; u32 config_size; /* form config string */ bs = gf_bs_new(NULL, 32, GF_BITSTREAM_WRITE); gf_bs_write_int(bs, 0, 1); /* AudioMuxVersion */ gf_bs_write_int(bs, 1, 1); /* all streams same time */ gf_bs_write_int(bs, 0, 6); /* numSubFrames */ gf_bs_write_int(bs, 0, 4); /* numPrograms */ gf_bs_write_int(bs, 0, 3); /* numLayer */ /* audio-specific config - PacketVideo patch: don't signal SBR and PS stuff, not allowed in LATM with audioMuxVersion=0*/ if (dsi) gf_bs_write_data(bs, dsi, MIN(dsi_len, 2) ); /* other data */ gf_bs_write_int(bs, 0, 3); /* frameLengthType */ gf_bs_write_int(bs, 0xff, 8); /* latmBufferFullness */ gf_bs_write_int(bs, 0, 1); /* otherDataPresent */ gf_bs_write_int(bs, 0, 1); /* crcCheckPresent */ gf_bs_get_content(bs, &config_bytes, &config_size); gf_bs_del(bs); gf_rtp_builder_format_sdp(rtp->packetizer, payloadName, sdpLine, config_bytes, config_size); gf_free(config_bytes); strcat(sdpLine, "\n"); } strcat(sdp, sdpLine); size = (u32) strlen(sdp) + (*out_sdp_buffer ? (u32) strlen(*out_sdp_buffer) : 0) + 1; if ( !*out_sdp_buffer) { *out_sdp_buffer = gf_malloc(sizeof(char)*size); if (! *out_sdp_buffer) return GF_OUT_OF_MEM; strcpy(*out_sdp_buffer, sdp); } else { *out_sdp_buffer = gf_realloc(*out_sdp_buffer, sizeof(char)*size); if (! *out_sdp_buffer) return GF_OUT_OF_MEM; strcat(*out_sdp_buffer, sdp); } return GF_OK; }
void avcc_del(GF_Box *s) { GF_AVCConfigurationBox *ptr = (GF_AVCConfigurationBox *)s; if (ptr->config) gf_odf_avc_cfg_del(ptr->config); free(ptr); }