コード例 #1
0
GF_EXPORT
void gf_sl_depacketize (GF_SLConfig *slConfig, GF_SLHeader *Header, char *PDU, u32 PDULength, u32 *HeaderLen)
{
	GF_BitStream *bs;
	*HeaderLen = 0;
	if (!Header) return;
	//reset the input header
	memset(Header, 0, sizeof(GF_SLHeader));

	bs = gf_bs_new(PDU, PDULength, GF_BITSTREAM_READ);
	if (!bs) return;

	if (slConfig->useAccessUnitStartFlag) Header->accessUnitStartFlag = gf_bs_read_int(bs, 1);
	if (slConfig->useAccessUnitEndFlag) Header->accessUnitEndFlag = gf_bs_read_int(bs, 1);
	if ( !slConfig->useAccessUnitStartFlag && !slConfig->useAccessUnitEndFlag) {
		Header->accessUnitStartFlag = 1;
		Header->accessUnitEndFlag = 1;
	}
	if (slConfig->OCRLength > 0) Header->OCRflag = gf_bs_read_int(bs, 1);
	if (slConfig->useIdleFlag) Header->idleFlag = gf_bs_read_int(bs, 1);
	if (slConfig->usePaddingFlag) {
		Header->paddingFlag = gf_bs_read_int(bs, 1);
		if (Header->paddingFlag) Header->paddingBits = gf_bs_read_int(bs, 3);
	}
	if (!Header->idleFlag && (!Header->paddingFlag || Header->paddingBits != 0)) {

		if (slConfig->packetSeqNumLength > 0) Header->packetSequenceNumber = gf_bs_read_int(bs, slConfig->packetSeqNumLength);
		if (slConfig->degradationPriorityLength > 0) {
			Header->degradationPriorityFlag = gf_bs_read_int(bs, 1);
			if (Header->degradationPriorityFlag) Header->degradationPriority = gf_bs_read_int(bs, slConfig->degradationPriorityLength);
		}
		if (Header->OCRflag) Header->objectClockReference = gf_bs_read_int(bs, slConfig->OCRLength);
		if (Header->accessUnitStartFlag) {
			if (slConfig->useRandomAccessPointFlag) Header->randomAccessPointFlag = gf_bs_read_int(bs, 1);
			if (slConfig->AUSeqNumLength > 0) Header->AU_sequenceNumber = gf_bs_read_int(bs, slConfig->AUSeqNumLength);
			if (slConfig->useTimestampsFlag) {
				Header->decodingTimeStampFlag = gf_bs_read_int(bs, 1);
				Header->compositionTimeStampFlag = gf_bs_read_int(bs, 1);
			}
			if (slConfig->instantBitrateLength > 0) Header->instantBitrateFlag = gf_bs_read_int(bs, 1);
			if (Header->decodingTimeStampFlag) Header->decodingTimeStamp = gf_bs_read_long_int(bs, slConfig->timestampLength); 
			if (Header->compositionTimeStampFlag) Header->compositionTimeStamp = gf_bs_read_long_int(bs, slConfig->timestampLength); 
			if (slConfig->AULength > 0) Header->accessUnitLength = gf_bs_read_int(bs, slConfig->AULength);
			if (Header->instantBitrateFlag) Header->instantBitrate = gf_bs_read_int(bs, slConfig->instantBitrateLength);
		}
	}
	gf_bs_align(bs);
	*HeaderLen = (u32) gf_bs_get_position(bs);
	gf_bs_del(bs);
}
コード例 #2
0
ファイル: tx3g.c プロジェクト: DmitrySigaev/gpac-sf
GF_TextSample *gf_isom_parse_texte_sample_from_data(char *data, u32 dataLength)
{
	GF_TextSample *s;
	GF_BitStream *bs;
	/*empty text sample*/
	if (!data || !dataLength) {
		return gf_isom_new_text_sample();
	}

	bs = gf_bs_new(data, dataLength, GF_BITSTREAM_READ);
	s = gf_isom_parse_texte_sample(bs);
	gf_bs_del(bs);
	return s;
}
コード例 #3
0
/*allocates and writes the SL-PDU (Header + PDU) given the SLConfig and the GF_SLHeader
for this PDU. AUs must be split in PDUs by another process if needed (packetizer).*/
GF_EXPORT
void gf_sl_packetize(GF_SLConfig* slConfig, 
				  GF_SLHeader *Header, 
				  char *PDU, 
				  u32 size,
				  char **outPacket,
				  u32 *OutSize)
{
	GF_BitStream *bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	*OutSize = 0;
	if (!bs) return;

	if (slConfig->useAccessUnitStartFlag) gf_bs_write_int(bs, Header->accessUnitStartFlag, 1);
	if (slConfig->useAccessUnitEndFlag) gf_bs_write_int(bs, Header->accessUnitEndFlag, 1);
	if (slConfig->OCRLength > 0) gf_bs_write_int(bs, Header->OCRflag, 1);
	if (slConfig->useIdleFlag) gf_bs_write_int(bs, Header->idleFlag, 1);
	if (slConfig->usePaddingFlag) {
		gf_bs_write_int(bs, Header->paddingFlag, 1);
		if (Header->paddingFlag) gf_bs_write_int(bs, Header->paddingBits, 3);
	}
	if (! Header->idleFlag && (! Header->paddingFlag || Header->paddingBits != 0)) {
		if (slConfig->packetSeqNumLength > 0) gf_bs_write_int(bs, Header->packetSequenceNumber, slConfig->packetSeqNumLength);
		if (slConfig->degradationPriorityLength > 0) {
			gf_bs_write_int(bs, Header->degradationPriorityFlag, 1);
			if (Header->degradationPriorityFlag) gf_bs_write_int(bs, Header->degradationPriority, slConfig->degradationPriorityLength);
		}
		if (Header->OCRflag) gf_bs_write_long_int(bs, Header->objectClockReference, slConfig->OCRLength);
		if (Header->accessUnitStartFlag) {
			if (slConfig->useRandomAccessPointFlag) gf_bs_write_int(bs, Header->randomAccessPointFlag, 1);
			if (slConfig->AUSeqNumLength > 0) gf_bs_write_int(bs, Header->AU_sequenceNumber, slConfig->AUSeqNumLength);
			if (slConfig->useTimestampsFlag) {
				gf_bs_write_int(bs, Header->decodingTimeStampFlag, 1);
				gf_bs_write_int(bs, Header->compositionTimeStampFlag, 1);
			}
			if (slConfig->instantBitrateLength > 0) gf_bs_write_int(bs, Header->instantBitrateFlag, 1);
			if (Header->decodingTimeStampFlag) gf_bs_write_long_int(bs, Header->decodingTimeStamp, slConfig->timestampLength);
			if (Header->compositionTimeStampFlag) gf_bs_write_long_int(bs, Header->compositionTimeStamp, slConfig->timestampLength);
			if (slConfig->AULength > 0) gf_bs_write_int(bs, Header->accessUnitLength, slConfig->AULength);
			if (Header->instantBitrateFlag) gf_bs_write_int(bs, Header->instantBitrate, slConfig->instantBitrateLength);
		}
	}
	//done with the Header, Alin
	gf_bs_align(bs);
	//write the PDU - already byte aligned with stuffing (paddingBits in SL Header)
	gf_bs_write_data(bs, PDU, size);

	gf_bs_align(bs);
	gf_bs_get_content(bs, outPacket, OutSize);
	gf_bs_del(bs);
}
コード例 #4
0
ファイル: data_map.c プロジェクト: noelove/GPAC-old
void gf_isom_fdm_del(GF_FileDataMap *ptr)
{
	if (!ptr || (ptr->type != GF_ISOM_DATA_FILE)) return;
	if (ptr->bs) gf_bs_del(ptr->bs);
	if (ptr->stream) fclose(ptr->stream);

#ifndef GPAC_DISABLE_ISOM_WRITE
	if (ptr->temp_file) {
		gf_delete_file(ptr->temp_file);
		gf_free(ptr->temp_file);
	}
#endif
	gf_free(ptr);
}
コード例 #5
0
ファイル: webvtt.c プロジェクト: porcelijn/gpac
GF_EXPORT
GF_List *gf_webvtt_parse_cues_from_data(const char *data, u32 dataLength, u64 start)
{
	GF_List         *cues;
	GF_WebVTTCue    *cue;
	GF_VTTCueBox    *cuebox;
	GF_BitStream    *bs;
	char			*pre_text;
	cue = NULL;
	pre_text = NULL;
	cues = gf_list_new();
	bs = gf_bs_new(data, dataLength, GF_BITSTREAM_READ);
	while(gf_bs_available(bs))
	{
		GF_Err  e;
		GF_Box  *box;
		e = gf_isom_box_parse(&box, bs);
		if (e) return NULL;
		if (box->type == GF_ISOM_BOX_TYPE_VTCC_CUE) {
			cuebox = (GF_VTTCueBox *)box;
			cue   = gf_webvtt_cue_new();
			if (pre_text) {
				gf_webvtt_cue_add_property(cue, WEBVTT_PRECUE_TEXT, pre_text, (u32) strlen(pre_text));
				gf_free(pre_text);
				pre_text = NULL;
			}
			gf_list_add(cues, cue);
			gf_webvtt_timestamp_set(&cue->start, start);
			if (cuebox->id) {
				gf_webvtt_cue_add_property(cue, WEBVTT_ID, cuebox->id->string, (u32) strlen(cuebox->id->string));
			}
			if (cuebox->settings) {
				gf_webvtt_cue_add_property(cue, WEBVTT_SETTINGS, cuebox->settings->string, (u32) strlen(cuebox->settings->string));
			}
			if (cuebox->payload) {
				gf_webvtt_cue_add_property(cue, WEBVTT_PAYLOAD, cuebox->payload->string, (u32) strlen(cuebox->payload->string));
			}
		} else if (box->type == GF_ISOM_BOX_TYPE_VTTA) {
			GF_StringBox *sbox = (GF_StringBox *)box;
			if (cue) {
				gf_webvtt_cue_add_property(cue, WEBVTT_POSTCUE_TEXT, sbox->string, (u32) strlen(sbox->string));
			} else {
				pre_text = gf_strdup(sbox->string);
			}
		}
		gf_isom_box_del(box);
	}
	gf_bs_del(bs);
	return cues;
}
コード例 #6
0
ファイル: descriptors.c プロジェクト: Keemotion/GPAC4iOS
GF_EXPORT
GF_Err gf_odf_hevc_cfg_write(GF_HEVCConfig *cfg, char **outData, u32 *outSize)
{
	GF_Err e;
	GF_BitStream *bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	*outSize = 0;
	*outData = NULL;
	e = gf_odf_hevc_cfg_write_bs(cfg, bs);
	if (e==GF_OK) 
		gf_bs_get_content(bs, outData, outSize);

	gf_bs_del(bs);
	return e;
}
コード例 #7
0
ファイル: descriptors.c プロジェクト: ezdev128/gpac
/*special function for authoring - convert DSI to LASERConfig*/
GF_EXPORT
GF_Err gf_odf_get_laser_config(GF_DefaultDescriptor *dsi, GF_LASERConfig *cfg)
{
	u32 to_skip;
	GF_BitStream *bs;
	
	if (!cfg) return GF_BAD_PARAM;
	memset(cfg, 0, sizeof(GF_LASERConfig));
	
	if (!dsi || !dsi->data || !dsi->dataLength || !cfg) return GF_BAD_PARAM;
	bs = gf_bs_new(dsi->data, dsi->dataLength, GF_BITSTREAM_READ);
	memset(cfg, 0, sizeof(GF_LASERConfig));
	cfg->tag = GF_ODF_LASER_CFG_TAG;
	cfg->profile = gf_bs_read_int(bs, 8);
	cfg->level = gf_bs_read_int(bs, 8);
	/*cfg->reserved = */gf_bs_read_int(bs, 3);
	cfg->pointsCodec = gf_bs_read_int(bs, 2);
	cfg->pathComponents = gf_bs_read_int(bs, 4);
	cfg->fullRequestHost = gf_bs_read_int(bs, 1);
	if (gf_bs_read_int(bs, 1)) cfg->time_resolution = gf_bs_read_int(bs, 16);
	else cfg->time_resolution = 1000;
	cfg->colorComponentBits = 1 + gf_bs_read_int(bs, 4);
	cfg->resolution = gf_bs_read_int(bs, 4);
	if (cfg->resolution>7) cfg->resolution -= 16;
	cfg->coord_bits = gf_bs_read_int(bs, 5);
	cfg->scale_bits_minus_coord_bits = gf_bs_read_int(bs, 4);
	cfg->newSceneIndicator = gf_bs_read_int(bs, 1);
	/*reserved2*/ gf_bs_read_int(bs, 3);
	cfg->extensionIDBits = gf_bs_read_int(bs, 4);
	/*hasExtConfig - we just ignore it*/
	if (gf_bs_read_int(bs, 1)) {
		to_skip = gf_bs_read_vluimsbf5(bs);
		while (to_skip) {
			gf_bs_read_int(bs, 8);
			to_skip--;
		}
	}
	/*hasExtension - we just ignore it*/
	if (gf_bs_read_int(bs, 1)) {
		to_skip = gf_bs_read_vluimsbf5(bs);
		while (to_skip) {
			gf_bs_read_int(bs, 8);
			to_skip--;
		}
	}
	gf_bs_del(bs);
	return GF_OK;
}
コード例 #8
0
ファイル: vtt_dec.c プロジェクト: LongHanVisazure/gpac
static void VTT_ReadConfigFromDSI(VTTDec *vttdec, GF_DefaultDescriptor *dsi)
{
	GF_BitStream *bs;
	u32 entry_type;

	bs = gf_bs_new(dsi->data, dsi->dataLength, GF_BITSTREAM_READ);
	entry_type = gf_bs_read_u32(bs);
	if (entry_type == GF_ISOM_BOX_TYPE_WVTT) {
		GF_Box *b;
		gf_isom_parse_box(&b, bs);
		vttdec->config = ((GF_StringBox *)b)->string;
		((GF_StringBox *)b)->string = NULL;
		gf_isom_box_del(b);
	}
	gf_bs_del(bs);
}
コード例 #9
0
ファイル: rtp_pck_3gpp.c プロジェクト: erelh/gpac
static void rtp_amr_flush(GP_RTPPacketizer *builder)
{
	char *hdr;
	u32 hdr_size;
	if (!builder->bytesInPacket) return;
	gf_bs_get_content(builder->pck_hdr, &hdr, &hdr_size);
	gf_bs_del(builder->pck_hdr);
	builder->pck_hdr = NULL;
	/*overwrite last frame F bit*/
	hdr[builder->last_au_sn] &= 0x7F;
	builder->OnData(builder->cbk_obj, hdr, hdr_size, 1);
	gf_free(hdr);
	builder->OnPacketDone(builder->cbk_obj, &builder->rtp_header);
	builder->bytesInPacket = 0;
	builder->last_au_sn = 0;
}
コード例 #10
0
ファイル: drm_sample.c プロジェクト: HungMingWu/gpac
GF_Err gf_isom_track_cenc_add_sample_info(GF_ISOFile *the_file, u32 trackNumber, u32 container_type, u8 IV_size, char *buf, u32 len)
{
	u32 i;
	GF_SampleEncryptionBox *senc;
	GF_CENCSampleAuxInfo *sai;
	GF_SampleTableBox *stbl;
	GF_TrackBox *trak = gf_isom_get_track_from_file(the_file, trackNumber);
	if (!trak) return GF_BAD_PARAM;
	stbl = trak->Media->information->sampleTable;
	if (!stbl) return GF_BAD_PARAM;

	switch (container_type) {
	case GF_ISOM_BOX_UUID_PSEC:
		senc = (GF_SampleEncryptionBox *) stbl->piff_psec;
		break;
	case GF_ISOM_BOX_TYPE_SENC:
		senc = (GF_SampleEncryptionBox *)stbl->senc;
		break;
	default:
		return GF_NOT_SUPPORTED;
	}


	sai = (GF_CENCSampleAuxInfo *)gf_malloc(sizeof(GF_CENCSampleAuxInfo));
	if (!sai) return GF_OUT_OF_MEM;
	memset(sai, 0, sizeof(GF_CENCSampleAuxInfo));
	if (len) {
		GF_BitStream *bs = gf_bs_new(buf, len, GF_BITSTREAM_READ);
		sai->IV_size = IV_size;
		gf_bs_read_data(bs, (char *)sai->IV, IV_size);
		sai->subsample_count = gf_bs_read_u16(bs);
		if (sai->subsample_count) senc->flags = 0x00000002;
		sai->subsamples = (GF_CENCSubSampleEntry *)gf_malloc(sai->subsample_count*sizeof(GF_CENCSubSampleEntry));
		for (i = 0; i < sai->subsample_count; i++) {
			sai->subsamples[i].bytes_clear_data = gf_bs_read_u16(bs);
			sai->subsamples[i].bytes_encrypted_data = gf_bs_read_u32(bs);
		}
		gf_bs_del(bs);
	}

	gf_list_add(senc->samp_aux_info, sai);
#ifndef GPAC_DISABLE_ISOM_FRAGMENTS
	gf_isom_cenc_set_saiz_saio(senc, stbl, NULL, len);
#endif

	return GF_OK;
}
コード例 #11
0
ファイル: main.c プロジェクト: ARSekkat/gpac
void main(int argc, char **argv)
{
	FILE *in;
	GF_BitStream *bs;

	/* generation d'un TS aléatoire */
	/*
		if ((in=gf_fopen(argv[1], "wb")) == NULL) {
			printf( "Impossible d'ouvrir %s en lecture.\n", argv[1]);
		}
		{
			char buffer[188];
			u32 j, i, nb_packets = 300;
			for (i = 0; i < nb_packets; i++) {
				buffer[0] = 0x47;
				for (j = 1; j <188; j++) {
					buffer[j] = rand();//j;
				}
				gf_fwrite(buffer, 1, 188, in);
			}
		}
		gf_fclose(in);
		if ((in=gf_fopen(argv[1], "rb")) == NULL) {
			printf( "Impossible d'ouvrir %s en lecture.\n", argv[1]);
		}

		bs = gf_bs_from_file(in, GF_BITSTREAM_READ);
		if (bs == NULL) return;

		RS_Interleaver(bs, argv[2]);
		gf_fclose(in);
		gf_bs_del(bs);
	*/


	if ((in=gf_fopen(argv[1], "rb")) == NULL) {
		printf( "Impossible d'ouvrir %s en lecture.\n", argv[1]);
	}

	bs = gf_bs_from_file(in, GF_BITSTREAM_READ);
	if (bs == NULL) return;

	RS_Deinterleaver(bs, argv[2]);
	gf_fclose(in);
	gf_bs_del(bs);

}
コード例 #12
0
ファイル: memory_decoder.c プロジェクト: Bevara/GPAC
GF_EXPORT
GF_Err gf_bifs_decode_command_list(GF_BifsDecoder *codec, u16 ESID, char *data, u32 data_length, GF_List *com_list)
{
	GF_BitStream *bs;
	GF_Err e;

	if (!codec || !data || !codec->dec_memory_mode || !com_list) return GF_BAD_PARAM;

	codec->info = gf_bifs_dec_get_stream(codec, ESID);
	if (!codec->info) return GF_BAD_PARAM;
	if (codec->info->config.elementaryMasks ) return GF_NOT_SUPPORTED;

	/*root parse (not conditionals)*/
	assert(codec->scenegraph);
	/*setup current scene graph*/
	codec->current_graph = codec->scenegraph;

	codec->ActiveQP = (M_QuantizationParameter*) codec->scenegraph->global_qp;

	bs = gf_bs_new(data, data_length, GF_BITSTREAM_READ);
	gf_bs_set_eos_callback(bs, BM_EndOfStream, codec);

	e = BM_ParseCommand(codec, bs, com_list);
	gf_bs_del(bs);

	/*decode conditionals / input sensors*/
	if (!e) {
		gf_bifs_flush_command_list(codec);
	}
	/*if err or not reset conditionals*/
	while (gf_list_count(codec->command_buffers)) {
		CommandBufferItem *cbi = (CommandBufferItem *)gf_list_get(codec->command_buffers, 0);
		gf_free(cbi);
		gf_list_rem(codec->command_buffers, 0);
	}

	/*reset current config*/
	codec->info = NULL;
	codec->current_graph = NULL;



//	gf_mx_v(codec->mx);
	return e;
}
コード例 #13
0
ファイル: dummy_in.c プロジェクト: drakeguan/gpac
/*Dummy input just send a file name, no multitrack to handle so we don't need to check sub_url nor expected type*/
static GF_Descriptor *DC_GetServiceDesc(GF_InputService *plug, u32 expect_type, const char *sub_url)
{
	u32 size = 0;
	char *uri;
	GF_ESD *esd;
	GF_BitStream *bs;
	DCReader *read = (DCReader *) plug->priv;
	GF_InitialObjectDescriptor *iod = (GF_InitialObjectDescriptor *) gf_odf_desc_new(GF_ODF_IOD_TAG);
	iod->scene_profileAndLevel = 1;
	iod->graphics_profileAndLevel = 1;
	iod->OD_profileAndLevel = 1;
	iod->audio_profileAndLevel = 0xFE;
	iod->visual_profileAndLevel = 0xFE;
	iod->objectDescriptorID = 1;

	if (read->is_views_url) {
		iod->URLString = gf_strdup(read->url);
		return (GF_Descriptor *)iod;
	}

	esd = gf_odf_desc_esd_new(0);
	esd->slConfig->timestampResolution = 1000;
	esd->slConfig->useTimestampsFlag = 1;
	esd->ESID = 0xFFFE;
	esd->decoderConfig->streamType = GF_STREAM_PRIVATE_SCENE;
	esd->decoderConfig->objectTypeIndication = read->oti;
	if (read->dnload) {
		uri = (char *) gf_dm_sess_get_cache_name(read->dnload);
		gf_dm_sess_get_stats(read->dnload, NULL, NULL, &size, NULL, NULL, NULL);
	} else {
		FILE *f = gf_fopen(read->url, "rt");
		gf_fseek(f, 0, SEEK_END);
		size = (u32) gf_ftell(f);
		gf_fclose(f);
		uri = read->url;
	}
	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	gf_bs_write_u32(bs, size);
	gf_bs_write_data(bs, uri, (u32) strlen(uri));
	gf_bs_get_content(bs, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
	gf_bs_del(bs);

	gf_list_add(iod->ESDescriptors, esd);
	return (GF_Descriptor *)iod;
}
コード例 #14
0
ファイル: mediacodec_dec.c プロジェクト: ARSekkat/gpac
GF_Err MCDec_InitMpeg4Decoder(MCDec *ctx) 
{
    char *dsi_data = NULL;
    u32 dsi_data_size = 0;

    if (!ctx->esd->decoderConfig->decoderSpecificInfo) {
        ctx->esd->decoderConfig->decoderSpecificInfo = (GF_DefaultDescriptor *) gf_odf_desc_new(GF_ODF_DSI_TAG);
    }
    
    if (ctx->esd->decoderConfig->decoderSpecificInfo->data) {
        GF_M4VDecSpecInfo vcfg;
        GF_BitStream *bs;
        
        gf_m4v_get_config(ctx->esd->decoderConfig->decoderSpecificInfo->data, ctx->esd->decoderConfig->decoderSpecificInfo->dataLength, &vcfg);
        ctx->width = vcfg.width;
        ctx->height = vcfg.height;

        if (ctx->esd->slConfig) {
            ctx->esd->slConfig->predefined  = 2;
        }

        bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
        gf_bs_write_u32(bs, 0);
        gf_odf_desc_write_bs((GF_Descriptor *) ctx->esd, bs);
        gf_bs_get_content(bs, &dsi_data, &dsi_data_size);
        gf_bs_del(bs);

        ctx->mime = "video/mp4v-es";

        char *esds = (char *)malloc(dsi_data_size);
        memcpy(esds, dsi_data, dsi_data_size);

        esds[0] = 0x00;
        esds[1] = 0x00;
        esds[2] = 0x00;
        esds[3] = 0x01;
       
        AMediaFormat_setBuffer(ctx->format, "csd-0", esds, dsi_data_size);

        gf_free(dsi_data);
        return GF_OK;
    } 
    return GF_NOT_SUPPORTED;
}
コード例 #15
0
ファイル: img_in.c プロジェクト: ARSekkat/gpac
GF_ESD *IMG_GetESD(IMGLoader *read)
{
	GF_ESD *esd = gf_odf_desc_esd_new(0);
	esd->slConfig->timestampResolution = 1000;
	esd->decoderConfig->streamType = GF_STREAM_VISUAL;
	esd->ESID = 1;

	if (read->img_type == IMG_BMP)
		esd->decoderConfig->objectTypeIndication = GPAC_BMP_OTI;
	else {
		u8 OTI=0;
		GF_BitStream *bs = gf_bs_from_file(read->stream, GF_BITSTREAM_READ);
#ifndef GPAC_DISABLE_AV_PARSERS
		u32 mtype, w, h;
		gf_img_parse(bs, &OTI, &mtype, &w, &h, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
#endif
		gf_bs_del(bs);

		if (!OTI) {
			GF_LOG(GF_LOG_WARNING, GF_LOG_CODEC, ("[IMG_IN] Unable to guess format image - assigning from extension\n"));
			if (read->img_type==IMG_JPEG) OTI = GPAC_OTI_IMAGE_JPEG;
			else if (read->img_type==IMG_PNG) OTI = GPAC_OTI_IMAGE_PNG;
		}
		esd->decoderConfig->objectTypeIndication = OTI;

		if (read->img_type == IMG_PNGD) {
			GF_Descriptor *d = gf_odf_desc_new(GF_ODF_AUX_VIDEO_DATA);
			((GF_AuxVideoDescriptor*)d)->aux_video_type = 1;
			gf_list_add(esd->extensionDescriptors, d);
		}
		else if (read->img_type == IMG_PNGDS) {
			GF_Descriptor *d = gf_odf_desc_new(GF_ODF_AUX_VIDEO_DATA);
			((GF_AuxVideoDescriptor*)d)->aux_video_type = 2;
			gf_list_add(esd->extensionDescriptors, d);
		}
		else if (read->img_type == IMG_PNGS) {
			GF_Descriptor *d = gf_odf_desc_new(GF_ODF_AUX_VIDEO_DATA);
			((GF_AuxVideoDescriptor*)d)->aux_video_type = 3;
			gf_list_add(esd->extensionDescriptors, d);
		}
	}
	return esd;
}
コード例 #16
0
ファイル: webvtt.c プロジェクト: porcelijn/gpac
GF_Err gf_webvtt_dump_iso_sample(FILE *dump, u32 timescale, GF_ISOSample *iso_sample, Bool box_mode)
{
	GF_Err e;
	GF_BitStream *bs;

	if (box_mode) {
		fprintf(dump, "<WebVTTSample decodingTimeStamp=\""LLU"\" compositionTimeStamp=\""LLD"\" RAP=\"%d\" dataLength=\"%d\" >\n", iso_sample->DTS, (s64)iso_sample->DTS + iso_sample->CTS_Offset, iso_sample->IsRAP, iso_sample->dataLength);
	}
	bs = gf_bs_new(iso_sample->data, iso_sample->dataLength, GF_BITSTREAM_READ);
	while(gf_bs_available(bs))
	{
		GF_Box *box;
		GF_WebVTTTimestamp ts;
		e = gf_isom_box_parse(&box, bs);
		if (e) return e;

		if (box_mode) {
			gf_isom_box_dump(box, dump);
		} else if (box->type == GF_ISOM_BOX_TYPE_VTCC_CUE) {
			GF_VTTCueBox *cuebox = (GF_VTTCueBox *)box;
			if (cuebox->id) fprintf(dump, "%s", cuebox->id->string);
			gf_webvtt_timestamp_set(&ts, (iso_sample->DTS * 1000) / timescale);
			gf_webvtt_timestamp_dump(&ts, dump, GF_FALSE);
			fprintf(dump, " --> NEXT");
			if (cuebox->settings) fprintf(dump, " %s", cuebox->settings->string);
			fprintf(dump, "\n");
			if (cuebox->payload) fprintf(dump, "%s", cuebox->payload->string);
			fprintf(dump, "\n");
		} else if (box->type == GF_ISOM_BOX_TYPE_VTTE) {
			gf_webvtt_timestamp_set(&ts, (iso_sample->DTS * 1000) / timescale);
			gf_webvtt_timestamp_dump(&ts, dump, GF_FALSE);
			fprintf(dump, " --> NEXT\n\n");
		} else if (box->type == GF_ISOM_BOX_TYPE_VTTA) {
			fprintf(dump, "%s\n\n", ((GF_StringBox *)box)->string);
		}
		gf_isom_box_del(box);
	}
	gf_bs_del(bs);
	if (box_mode) {
		fprintf(dump, "</WebVTTSample>\n");
	}
	return GF_OK;
}
コード例 #17
0
ファイル: odf_codec.c プロジェクト: Abhinav95/ccextractor
GF_EXPORT
GF_Err gf_odf_desc_write(GF_Descriptor *desc, char **outEncDesc, u32 *outSize)
{
	GF_Err e;
	GF_BitStream *bs;
	if (!desc || !outEncDesc || !outSize) return GF_BAD_PARAM;
	*outEncDesc = NULL;
	*outSize = 0;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	if (!bs) return GF_OUT_OF_MEM;

	e = gf_odf_desc_write_bs(desc, bs);

	//then get the content from our bitstream
	gf_bs_get_content(bs, outEncDesc, outSize);
	gf_bs_del(bs);
	return e;
}
コード例 #18
0
ファイル: webvtt.c プロジェクト: dragonlucian/gpac
GF_Err gf_webvtt_dump_header_boxed(FILE *dump, const char *data, u32 dataLength, u32 *dumpedLength)
{
	GF_Err e;
	GF_Box *box;
	GF_StringBox *config;
	GF_BitStream *bs;
	*dumpedLength = 0;
	bs = gf_bs_new(data, dataLength, GF_BITSTREAM_READ);
	e = gf_isom_parse_box(&box, bs);
	if (!box || (box->type != GF_ISOM_BOX_TYPE_VTTC)) return GF_BAD_PARAM;
	config = (GF_StringBox *)box;
	if (config->string) {
		fprintf(dump, "%s", config->string);
		*dumpedLength = (u32)strlen(config->string)+1;
	}
	gf_bs_del(bs);
	gf_isom_box_del(box);
	return e;
}
コード例 #19
0
ファイル: odf_codec.c プロジェクト: Abhinav95/ccextractor
GF_EXPORT
GF_Err gf_odf_codec_encode(GF_ODCodec *codec, u32 cleanup_type)
{
	GF_ODCom *com;
	GF_Err e = GF_OK;
	u32 i;

	if (!codec) return GF_BAD_PARAM;

	//check our bitstream: if existing, this means the previous encoded AU was not retrieved
	//we DON'T allow that
	if (codec->bs) return GF_BAD_PARAM;
	codec->bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	if (!codec->bs) return GF_OUT_OF_MEM;

	/*encode each command*/
	i = 0;
	while ((com = (GF_ODCom *)gf_list_enum(codec->CommandList, &i))) {
		e = gf_odf_write_command(codec->bs, com);
		if (e) goto err_exit;
		//don't forget OD Commands are aligned...
		gf_bs_align(codec->bs);
	}

	//if an error occurs, delete the GF_BitStream and empty the codec
err_exit:
	if (e) {
		gf_bs_del(codec->bs);
		codec->bs = NULL;
	}
	if (cleanup_type == 1) {
		while (gf_list_count(codec->CommandList)) {
			com = (GF_ODCom *)gf_list_get(codec->CommandList, 0);
			gf_odf_delete_command(com);
			gf_list_rem(codec->CommandList, 0);
		}
	}
	if (cleanup_type == 0) {
		gf_list_reset(codec->CommandList);
	}
	return e;
}
コード例 #20
0
ファイル: saf_in.c プロジェクト: Bevara/GPAC
static void SAF_CheckFile(SAFIn *read)
{
	u32 nb_streams, i, cts, au_size, au_type, stream_id, ts_res;
	GF_BitStream *bs;
	StreamInfo si[1024];
	gf_f64_seek(read->stream, 0, SEEK_SET);
	bs = gf_bs_from_file(read->stream, GF_BITSTREAM_READ);

	nb_streams=0;
	while (gf_bs_available(bs)) {
		gf_bs_read_u16(bs);
		gf_bs_read_int(bs, 2);
		cts = gf_bs_read_int(bs, 30);
		au_size = gf_bs_read_int(bs, 16);
		au_type = gf_bs_read_int(bs, 4);
		stream_id = gf_bs_read_int(bs, 12);
		au_size-=2;
		ts_res = 0;
		for (i=0; i<nb_streams; i++) {
			if (si[i].stream_id==stream_id) ts_res = si[i].ts_res;
		}
		if (!ts_res) {
			if ((au_type==1) || (au_type==2) || (au_type==7)) {
				gf_bs_read_u16(bs);
				ts_res = gf_bs_read_u24(bs);
				au_size -= 5;
				si[nb_streams].stream_id = stream_id;
				si[nb_streams].ts_res = ts_res;
				nb_streams++;
			}
		}
		if (ts_res && (au_type==4)) {
			Double ts = cts;
			ts /= ts_res;
			if (ts>read->duration) read->duration = ts;
		}
		gf_bs_skip_bytes(bs, au_size);
	}
	gf_bs_del(bs);
	gf_f64_seek(read->stream, 0, SEEK_SET);
}
コード例 #21
0
ファイル: descriptors.c プロジェクト: 17eparker/ccextractor
GF_EXPORT
GF_Err gf_odf_get_ui_config(GF_DefaultDescriptor *dsi, GF_UIConfig *cfg)
{
	u32 len, i;
	GF_BitStream *bs;
	if (!dsi || !dsi->data || !dsi->dataLength || !cfg) return GF_BAD_PARAM;
	memset(cfg, 0, sizeof(GF_UIConfig));
	cfg->tag = GF_ODF_UI_CFG_TAG;	
	bs = gf_bs_new(dsi->data, dsi->dataLength, GF_BITSTREAM_READ);
	len = gf_bs_read_int(bs, 8);
	cfg->deviceName = (char*)gf_malloc(sizeof(char) * (len+1));
	for (i=0; i<len; i++) cfg->deviceName[i] = gf_bs_read_int(bs, 8);
	cfg->deviceName[i] = 0;

	if (!stricmp(cfg->deviceName, "StringSensor") && gf_bs_available(bs)) {
		cfg->termChar = gf_bs_read_int(bs, 8);
		cfg->delChar = gf_bs_read_int(bs, 8);
	}
	gf_bs_del(bs);
	return GF_OK;
}
コード例 #22
0
ファイル: tx3g.c プロジェクト: ScandalCorp/ccextractor
GF_Err gf_isom_text_get_encoded_tx3g(GF_ISOFile *file, u32 track, u32 sidx, u32 sidx_offset, char **tx3g, u32 *tx3g_size)
{
	GF_BitStream *bs;
	GF_TrackBox *trak;
	GF_Tx3gSampleEntryBox *a;
	
	trak = gf_isom_get_track_from_file(file, track);
	if (!trak) return GF_BAD_PARAM;

	a = (GF_Tx3gSampleEntryBox *) gf_list_get(trak->Media->information->sampleTable->SampleDescription->boxList, sidx-1);
	if (!a) return GF_BAD_PARAM;
	if ((a->type != GF_ISOM_BOX_TYPE_TX3G) && (a->type != GF_ISOM_BOX_TYPE_TEXT)) return GF_BAD_PARAM;
	
	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	gf_isom_write_tx3g(a, bs, sidx, sidx_offset);
	*tx3g = NULL;
	*tx3g_size = 0;
	gf_bs_get_content(bs, tx3g, tx3g_size);
	gf_bs_del(bs);
	return GF_OK;
}
コード例 #23
0
ファイル: rtp_pck_3gpp.c プロジェクト: erelh/gpac
static void rtp_evrc_smv_flush(GP_RTPPacketizer *builder)
{
	if (!builder->bytesInPacket) return;
	if (builder->auh_size>1) {
		char *hdr;
		u32 hdr_size;
		/*padding*/
		if (builder->last_au_sn % 2) gf_bs_write_int(builder->pck_hdr, 0, 4);
		gf_bs_get_content(builder->pck_hdr, &hdr, &hdr_size);
		gf_bs_del(builder->pck_hdr);
		builder->pck_hdr = NULL;
		/*overwrite count*/
		hdr[0] = 0;
		hdr[1] = builder->last_au_sn-1;/*MMM + frameCount-1*/
		builder->OnData(builder->cbk_obj, hdr, hdr_size, 1);
		gf_free(hdr);
	}
	builder->OnPacketDone(builder->cbk_obj, &builder->rtp_header);
	builder->bytesInPacket = 0;
	builder->last_au_sn = 0;
}
コード例 #24
0
ファイル: fm_fake_pull.c プロジェクト: bigbensk/gpac
static GF_ESD* get_esd()
{
	GF_BitStream *dsi = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	GF_ESD *esd = gf_odf_desc_esd_new(0);

	esd->ESID = 1;
	esd->decoderConfig->streamType = GF_STREAM_AUDIO;
	esd->decoderConfig->objectTypeIndication = GPAC_OTI_RAW_MEDIA_STREAM;
	esd->decoderConfig->avgBitrate = esd->decoderConfig->maxBitrate = 0;
	esd->slConfig->timestampResolution = FM_FAKE_PULL_AUDIO_FREQ;

	/*Decoder Specific Info for raw media*/
	gf_bs_write_u32(dsi, FM_FAKE_PULL_AUDIO_FREQ);	/*u32 sample_rate*/
	gf_bs_write_u16(dsi, FM_FAKE_PULL_CHAN_NUM);	/*u16 nb_channels*/
	gf_bs_write_u16(dsi, FM_FAKE_PULL_BITS);		/*u16 nb_bits_per_sample*/
	gf_bs_write_u32(dsi, FM_FAKE_PULL_FRAME_LEN);	/*u32 frame_size*/
	gf_bs_write_u32(dsi, 0);						/*u32 channel_config*/
	gf_bs_get_content(dsi, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
	gf_bs_del(dsi);

	return esd;
}
コード例 #25
0
static GF_ESD *AAC_GetESD(AACReader *read)
{
	GF_BitStream *dsi;
	GF_ESD *esd;
	u32 i, sbr_sr_idx;

	esd = gf_odf_desc_esd_new(0);
	esd->decoderConfig->streamType = GF_STREAM_AUDIO;
	esd->decoderConfig->objectTypeIndication = read->oti;
	esd->ESID = 1;
	esd->OCRESID = 1;
	esd->slConfig->timestampResolution = read->sample_rate;
	if (read->is_live) esd->slConfig->useAccessUnitEndFlag = esd->slConfig->useAccessUnitStartFlag = 1;
	dsi = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);

	/*write as regular AAC*/
	gf_bs_write_int(dsi, read->prof, 5);
	gf_bs_write_int(dsi, read->sr_idx, 4);
	gf_bs_write_int(dsi, read->nb_ch, 4);
	gf_bs_align(dsi);

	/*always signal implicit S	BR in case it's used*/
	sbr_sr_idx = read->sr_idx;
	for (i=0; i<16; i++) {
		if (GF_M4ASampleRates[i] == (u32) 2*read->sample_rate) {
			sbr_sr_idx = i;
			break;
		}
	}
	gf_bs_write_int(dsi, 0x2b7, 11);
	gf_bs_write_int(dsi, 5, 5);
	gf_bs_write_int(dsi, 1, 1);
	gf_bs_write_int(dsi, sbr_sr_idx, 4);

	gf_bs_align(dsi);
	gf_bs_get_content(dsi, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
	gf_bs_del(dsi);
	return esd;
}
コード例 #26
0
ファイル: openhevc_dec.c プロジェクト: TotoLulu94/gpac
static GF_Err HEVC_ConfigurationScalableStream(HEVCDec *ctx, GF_ESD *esd)
{
	GF_HEVCConfig *cfg = NULL;
	char *data;
	u32 data_len;
	GF_BitStream *bs;
	u32 i, j;

	if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data)
		return GF_OK;
	cfg = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, GF_FALSE);
	if (!cfg) return GF_NON_COMPLIANT_BITSTREAM;
	if (ctx->nalu_size_length != cfg->nal_unit_size)
		return GF_NON_COMPLIANT_BITSTREAM;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	for (i=0; i< gf_list_count(cfg->param_array); i++) {
		GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(cfg->param_array, i);
		for (j=0; j< gf_list_count(ar->nalus); j++) {
			GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
			gf_bs_write_int(bs, sl->size, 8*ctx->nalu_size_length);
			gf_bs_write_data(bs, sl->data, sl->size);
		}
	}

	gf_bs_get_content(bs, &data, &data_len);
	gf_bs_del(bs);
	libOpenHevcDecode(ctx->openHevcHandle, (u8 *)data, data_len, 0);

	if (ctx->raw_out) fwrite((u8 *)data, 1, data_len, ctx->raw_out);

	gf_free(data);

	libOpenHevcSetActiveDecoders(ctx->openHevcHandle, 2);
	libOpenHevcSetViewLayers(ctx->openHevcHandle, 1);

	return GF_OK;
}
コード例 #27
0
GF_Err gf_isom_ismacryp_sample_to_sample(GF_ISMASample *s, GF_ISOSample *dest)
{
	GF_BitStream *bs;
	if (!s || !dest) return GF_BAD_PARAM;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);

	if (s->flags & GF_ISOM_ISMA_USE_SEL_ENC) {
		gf_bs_write_int(bs, (s->flags & GF_ISOM_ISMA_IS_ENCRYPTED) ? 1 : 0, 1);
		gf_bs_write_int(bs, 0, 7);
	} 
	if (s->flags & GF_ISOM_ISMA_IS_ENCRYPTED) {
		if (s->IV_length) gf_bs_write_long_int(bs, s->IV, 8*s->IV_length);
		if (s->KI_length) gf_bs_write_data(bs, (char*)s->key_indicator, s->KI_length);
	}
	gf_bs_write_data(bs, s->data, s->dataLength);
	if (dest->data) free(dest->data);
	dest->data = NULL;
	dest->dataLength = 0;
	gf_bs_get_content(bs, &dest->data, &dest->dataLength);
	gf_bs_del(bs);
	return GF_OK;
}
コード例 #28
0
ファイル: demo_is.c プロジェクト: bigbensk/gpac
static void DEV_Start(struct __input_device *ifce)
{
	GF_BitStream *bs;
	char *buf, *szWord;
	u32 len, val, i, buf_size;

	szWord = "Hello InputSensor!";

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	/*HTK sensor buffer format: SFString - SFInt32 - SFFloat*/
	gf_bs_write_int(bs, 1, 1); 
	len = strlen(szWord);
	val = gf_get_bit_size(len);
	gf_bs_write_int(bs, val, 5);
	gf_bs_write_int(bs, len, val);
	for (i=0; i<len; i++) gf_bs_write_int(bs, szWord[i], 8);

	gf_bs_align(bs);
	gf_bs_get_content(bs, &buf, &buf_size);
	gf_bs_del(bs);

	ifce->DispatchFrame(ifce, buf, buf_size);
	gf_free(buf);
}
コード例 #29
0
ファイル: hinting.c プロジェクト: ARSekkat/gpac
static GF_Err gf_isom_load_next_hint_sample(GF_ISOFile *the_file, u32 trackNumber, GF_TrackBox *trak, GF_HintSampleEntryBox *entry)
{
	GF_BitStream *bs;
	u32 descIdx;
	GF_ISOSample *samp;

	if (!entry->cur_sample) return GF_BAD_PARAM;
	if (entry->cur_sample>trak->Media->information->sampleTable->SampleSize->sampleCount) return GF_EOS;

	samp = gf_isom_get_sample(the_file, trackNumber, entry->cur_sample, &descIdx);
	if (!samp) return GF_IO_ERR;
	entry->cur_sample++;

	if (entry->hint_sample) gf_isom_hint_sample_del(entry->hint_sample);

	bs = gf_bs_new(samp->data, samp->dataLength, GF_BITSTREAM_READ);
	entry->hint_sample = gf_isom_hint_sample_new(entry->type);
	gf_isom_hint_sample_read(entry->hint_sample, bs, samp->dataLength);
	gf_bs_del(bs);
	entry->hint_sample->TransmissionTime = samp->DTS;
	gf_isom_sample_del(&samp);
	entry->hint_sample->sample_cache = gf_list_new();
	return GF_OK;
}
コード例 #30
0
ファイル: droid_mpegv.c プロジェクト: Bevara/GPAC
u32 MPEGVS_OnData(struct __input_device * dr, const char* data)
{
	GF_BitStream *bs;
	char *buf;
	u32 buf_size;
	float x, y, z, q, a, b;
	MPEGVSCTX;

	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);

	if ( rc->sensorAndroidType == 1
	        || rc->sensorAndroidType == 2
	        || rc->sensorAndroidType == 3
	        || rc->sensorAndroidType == 4 )
	{
		sscanf(data, "%f;%f;%f;", &x, &y, &z);
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_float(bs, x);
		gf_bs_write_float(bs, y);
		gf_bs_write_float(bs, z);
	}
	else if ( rc->sensorAndroidType == 5
	          || rc->sensorAndroidType == 6 )
	{
		sscanf(data, "%f;", &x);

		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_float(bs, x);
	}
	else if ( rc->sensorAndroidType == 11 )
	{
		sscanf(data, "%f;%f;%f;", &x, &y, &z);

		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_float(bs, x);
		gf_bs_write_float(bs, y);
		gf_bs_write_float(bs, z);
		/*gf_bs_write_float(bs, q);*/
	}
	else if ( rc->sensorAndroidType == 100 )
	{
		sscanf(data, "%f;%f;%f;%f;%f;", &x, &y, &z, &a, &b);

		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_float(bs, x);
		gf_bs_write_float(bs, y);
		gf_bs_write_float(bs, z);
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_float(bs, a);
		gf_bs_write_int(bs, 1, 1);
		gf_bs_write_float(bs, b);
		/*gf_bs_write_float(bs, q);*/
	}

	gf_bs_align(bs);
	gf_bs_get_content(bs, &buf, &buf_size);
	gf_bs_del(bs);

	dr->DispatchFrame(dr, (u8*)buf, buf_size);
	gf_free(buf);

	return GF_OK;
}