コード例 #1
0
ファイル: sdp_fetch.c プロジェクト: Brilon314/gpac
void RP_SDPFromFile(RTPClient *rtp, char *file_name, RTPStream *stream)
{
	FILE *_sdp;
	char *sdp_buf;
	u32 sdp_size;

	sdp_buf = NULL;

	if (file_name && strstr(file_name, "file://")) file_name += strlen("file://");
	if (!file_name || !(_sdp = gf_fopen(file_name, "rt")) ) {
		gf_service_connect_ack(rtp->service, NULL, GF_URL_ERROR);
		return;
	}

	gf_fseek(_sdp, 0, SEEK_END);
	sdp_size = (u32) gf_ftell(_sdp);
	gf_fseek(_sdp, 0, SEEK_SET);
	sdp_buf = (char*)gf_malloc(sdp_size);
	if (1 > fread(sdp_buf, 1, sdp_size, _sdp)) {
		gf_service_connect_ack(rtp->service, NULL, GF_URL_ERROR);
	} else {
		RP_LoadSDP(rtp, sdp_buf, sdp_size, stream);
	}
	gf_fclose(_sdp);
	gf_free(sdp_buf);
}
コード例 #2
0
ファイル: img_in.c プロジェクト: NathanLewis/gpac
static GF_Err IMG_ConnectService(GF_InputService *plug, GF_ClientService *serv, const char *url)
{
	char *sExt;
	IMGLoader *read = (IMGLoader *)plug->priv;

	read->service = serv;
	if (!url)
		return GF_BAD_PARAM;
	sExt = strrchr(url, '.');
	if (!stricmp(sExt, ".jpeg") || !stricmp(sExt, ".jpg")) read->img_type = IMG_JPEG;
	else if (!stricmp(sExt, ".png")) read->img_type = IMG_PNG;
	else if (!stricmp(sExt, ".pngd")) read->img_type = IMG_PNGD;
	else if (!stricmp(sExt, ".pngds")) read->img_type = IMG_PNGDS;
	else if (!stricmp(sExt, ".pngs")) read->img_type = IMG_PNGS;
	else if (!stricmp(sExt, ".bmp")) read->img_type = IMG_BMP;

	if (read->dnload) gf_service_download_del(read->dnload);
	read->dnload = NULL;

	/*remote fetch*/
	if (!jp_is_local(url)) {
		jp_download_file(plug, url);
		return GF_OK;
	}

	read->stream = gf_fopen(url, "rb");
	if (read->stream) {
		gf_fseek(read->stream, 0, SEEK_END);
		read->data_size = (u32) gf_ftell(read->stream);
		gf_fseek(read->stream, 0, SEEK_SET);
	}
	gf_service_connect_ack(serv, NULL, read->stream ? GF_OK : GF_URL_ERROR);
	if (read->stream && read->is_inline) IMG_SetupObject(read);
	return GF_OK;
}
コード例 #3
0
ファイル: rtp_streamer.c プロジェクト: indiereign/gpac
GF_EXPORT
char *gf_rtp_streamer_format_sdp_header(char *app_name, char *ip_dest, char *session_name, char *iod64)
{
	u64 size;
	char *sdp, *tmp_fn = NULL;
	FILE *tmp = gf_temp_file_new(&tmp_fn);
	if (!tmp) return NULL;

	/* write SDP header*/
	fprintf(tmp, "v=0\n");
	fprintf(tmp, "o=%s 3326096807 1117107880000 IN IP%d %s\n", app_name, gf_net_is_ipv6(ip_dest) ? 6 : 4, ip_dest);
	fprintf(tmp, "s=%s\n", (session_name ? session_name : "GPAC Scene Streaming Session"));
	fprintf(tmp, "c=IN IP%d %s\n", gf_net_is_ipv6(ip_dest) ? 6 : 4, ip_dest);
	fprintf(tmp, "t=0 0\n");

	if (iod64) fprintf(tmp, "a=mpeg4-iod:\"data:application/mpeg4-iod;base64,%s\"\n", iod64);

	gf_fseek(tmp, 0, SEEK_END);
	size = gf_ftell(tmp);
	gf_fseek(tmp, 0, SEEK_SET);
	sdp = (char*)gf_malloc(sizeof(char) * (size_t)(size+1));
	size = fread(sdp, 1, (size_t)size, tmp);
	sdp[size] = 0;
	gf_fclose(tmp);
	gf_delete_file(tmp_fn);
	gf_free(tmp_fn);
	return sdp;
}
コード例 #4
0
ファイル: img_in.c プロジェクト: ARSekkat/gpac
void IMG_NetIO(void *cbk, GF_NETIO_Parameter *param)
{
	GF_Err e;
	const char *szCache;
	IMGLoader *read = (IMGLoader *) cbk;
	if (!read->dnload) return;

	/*handle service message*/
	gf_service_download_update_stats(read->dnload);

	e = param->error;
	/*wait to get the whole file*/
	if (!e && (param->msg_type!=GF_NETIO_DATA_TRANSFERED)) return;
	if ((e==GF_EOS) && (param->msg_type==GF_NETIO_DATA_EXCHANGE)) return;

	if (param->msg_type==GF_NETIO_DATA_TRANSFERED) {
		szCache = gf_dm_sess_get_cache_name(read->dnload);
		if (!szCache) e = GF_IO_ERR;
		else {
			if (read->stream) gf_fclose(read->stream);
			read->stream = gf_fopen((char *) szCache, "rb");
			if (!read->stream) e = GF_SERVICE_ERROR;
			else {
				e = GF_OK;
				gf_fseek(read->stream, 0, SEEK_END);
				read->data_size = (u32) gf_ftell(read->stream);
				gf_fseek(read->stream, 0, SEEK_SET);
			}
		}
	}
	/*OK confirm*/
	gf_service_connect_ack(read->service, NULL, e);
	if (!e) IMG_SetupObject(read);
}
コード例 #5
0
ファイル: vtt_dec.c プロジェクト: LongHanVisazure/gpac
/* Checks that the file is fully downloaded
   requires that the file_size is given in the DecoderSpecificInfo */
static Bool vtt_check_download(VTTDec *vttdec)
{
	u64 size;
	FILE *f = gf_fopen(vttdec->file_name, "rt");
	if (!f) return GF_FALSE;
	gf_fseek(f, 0, SEEK_END);
	size = gf_ftell(f);
	gf_fclose(f);
	if (size==vttdec->file_size) return GF_TRUE;
	return GF_FALSE;
}
コード例 #6
0
ファイル: meta.c プロジェクト: Brilon314/gpac
GF_EXPORT
GF_Err gf_isom_set_meta_xml(GF_ISOFile *file, Bool root_meta, u32 track_num, char *XMLFileName, Bool IsBinaryXML)
{
	GF_Err e;
	FILE *xmlfile;
	GF_XMLBox *xml;
	GF_MetaBox *meta;

	e = CanAccessMovie(file, GF_ISOM_OPEN_WRITE);
	if (e) return e;

	meta = gf_isom_get_meta(file, root_meta, track_num);
	if (!meta) return GF_BAD_PARAM;

	e = gf_isom_remove_meta_xml(file, root_meta, track_num);
	if (e) return e;

	xml = (GF_XMLBox *)xml_New();
	if (!xml) return GF_OUT_OF_MEM;
	gf_list_add(meta->other_boxes, xml);
	if (IsBinaryXML) xml->type = GF_ISOM_BOX_TYPE_BXML;


	/*assume 32bit max size = 4Go should be sufficient for a DID!!*/
	xmlfile = gf_fopen(XMLFileName, "rb");
	if (!xmlfile) return GF_URL_ERROR;
	gf_fseek(xmlfile, 0, SEEK_END);
	assert(gf_ftell(xmlfile) < 1<<31);
	xml->xml_length = (u32) gf_ftell(xmlfile);
	gf_fseek(xmlfile, 0, SEEK_SET);
	xml->xml = (char*)gf_malloc(sizeof(unsigned char)*xml->xml_length);
	xml->xml_length = (u32) fread(xml->xml, 1, sizeof(unsigned char)*xml->xml_length, xmlfile);
	if (ferror(xmlfile)) {
		gf_free(xml->xml);
		xml->xml = NULL;
		return GF_BAD_PARAM;
	}
	gf_fclose(xmlfile);
	return GF_OK;
}
コード例 #7
0
ファイル: dummy_in.c プロジェクト: drakeguan/gpac
/*Dummy input just send a file name, no multitrack to handle so we don't need to check sub_url nor expected type*/
static GF_Descriptor *DC_GetServiceDesc(GF_InputService *plug, u32 expect_type, const char *sub_url)
{
	u32 size = 0;
	char *uri;
	GF_ESD *esd;
	GF_BitStream *bs;
	DCReader *read = (DCReader *) plug->priv;
	GF_InitialObjectDescriptor *iod = (GF_InitialObjectDescriptor *) gf_odf_desc_new(GF_ODF_IOD_TAG);
	iod->scene_profileAndLevel = 1;
	iod->graphics_profileAndLevel = 1;
	iod->OD_profileAndLevel = 1;
	iod->audio_profileAndLevel = 0xFE;
	iod->visual_profileAndLevel = 0xFE;
	iod->objectDescriptorID = 1;

	if (read->is_views_url) {
		iod->URLString = gf_strdup(read->url);
		return (GF_Descriptor *)iod;
	}

	esd = gf_odf_desc_esd_new(0);
	esd->slConfig->timestampResolution = 1000;
	esd->slConfig->useTimestampsFlag = 1;
	esd->ESID = 0xFFFE;
	esd->decoderConfig->streamType = GF_STREAM_PRIVATE_SCENE;
	esd->decoderConfig->objectTypeIndication = read->oti;
	if (read->dnload) {
		uri = (char *) gf_dm_sess_get_cache_name(read->dnload);
		gf_dm_sess_get_stats(read->dnload, NULL, NULL, &size, NULL, NULL, NULL);
	} else {
		FILE *f = gf_fopen(read->url, "rt");
		gf_fseek(f, 0, SEEK_END);
		size = (u32) gf_ftell(f);
		gf_fclose(f);
		uri = read->url;
	}
	bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
	gf_bs_write_u32(bs, size);
	gf_bs_write_data(bs, uri, (u32) strlen(uri));
	gf_bs_get_content(bs, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
	gf_bs_del(bs);

	gf_list_add(iod->ESDescriptors, esd);
	return (GF_Descriptor *)iod;
}
コード例 #8
0
ファイル: ctx_load.c プロジェクト: TotoLulu94/gpac
static Bool CTXLoad_CheckDownload(CTXLoadPriv *priv)
{
	u64 size;
	FILE *f;
	u32 now = gf_sys_clock();

	if (!priv->file_size && (now - priv->last_check_time < 1000) ) return GF_FALSE;

	f = gf_fopen(priv->file_name, "rt");
	if (!f) return GF_FALSE;
	gf_fseek(f, 0, SEEK_END);
	size = gf_ftell(f);
	gf_fclose(f);

	/*we MUST have a complete file for now ...*/
	if (!priv->file_size) {
		if (priv->last_check_size == size) return GF_TRUE;
		priv->last_check_size = size;
		priv->last_check_time = now;
	} else {
		if (size==priv->file_size) return GF_TRUE;
	}
	return GF_FALSE;
}
コード例 #9
0
ファイル: webvtt.c プロジェクト: dragonlucian/gpac
GF_Err gf_webvtt_parser_init(GF_WebVTTParser *parser, const char *input_file,
                             void *user, GF_Err (*report_message)(void *, GF_Err, char *, const char *),
                             void (*on_sample_parsed)(void *, GF_WebVTTSample *),
                             void (*on_header_parsed)(void *, const char *))
{
	const char *ext;
	if (parser) {
		parser->state       = WEBVTT_PARSER_STATE_WAITING_SIGNATURE;

		ext = strrchr(input_file, '.');
		if (ext) {
			ext++;
			if (!strcmp(ext, "srt")) {
				parser->is_srt = GF_TRUE;
				parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
			}
		}
		parser->vtt_in = gf_fopen(input_file, "rt");
		gf_fseek(parser->vtt_in, 0, SEEK_END);
		parser->file_size = gf_ftell(parser->vtt_in);
		gf_fseek(parser->vtt_in, 0, SEEK_SET);

		parser->unicode_type = gf_text_get_utf_type(parser->vtt_in);
		if (parser->unicode_type<0) {
			gf_fclose(parser->vtt_in);
			return GF_NOT_SUPPORTED;
		}

		parser->user = user;
		parser->report_message = report_message;
		parser->on_sample_parsed = on_sample_parsed;
		parser->on_header_parsed = on_header_parsed;
		return GF_OK;
	}
	return GF_BAD_PARAM;
}
コード例 #10
0
ファイル: field_encode.c プロジェクト: Brilon314/gpac
GF_Err gf_bifs_enc_sf_field(GF_BifsEncoder *codec, GF_BitStream *bs, GF_Node *node, GF_FieldInfo *field)
{
	GF_Err e;

	if (node) {
		e = gf_bifs_enc_quant_field(codec, bs, node, field);
		if (e != GF_EOS) return e;
	}
	switch (field->fieldType) {
	case GF_SG_VRML_SFBOOL:
		GF_BIFS_WRITE_INT(codec, bs, * ((SFBool *)field->far_ptr), 1, "SFBool", NULL);
		break;
	case GF_SG_VRML_SFCOLOR:
		BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->red, bs, "color.red");
		BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->green, bs, "color.green");
		BE_WriteSFFloat(codec, ((SFColor *)field->far_ptr)->blue, bs, "color.blue");
		break;
	case GF_SG_VRML_SFFLOAT:
		BE_WriteSFFloat(codec, * ((SFFloat *)field->far_ptr), bs, NULL);
		break;
	case GF_SG_VRML_SFINT32:
		GF_BIFS_WRITE_INT(codec, bs, * ((SFInt32 *)field->far_ptr), 32, "SFInt32", NULL);
		break;
	case GF_SG_VRML_SFROTATION:
		BE_WriteSFFloat(codec, ((SFRotation  *)field->far_ptr)->x, bs, "rot.x");
		BE_WriteSFFloat(codec, ((SFRotation  *)field->far_ptr)->y, bs, "rot.y");
		BE_WriteSFFloat(codec, ((SFRotation  *)field->far_ptr)->z, bs, "rot.z");
		BE_WriteSFFloat(codec, ((SFRotation  *)field->far_ptr)->q, bs, "rot.theta");
		break;

	case GF_SG_VRML_SFSTRING:
		if (node && (node->sgprivate->tag==TAG_MPEG4_CacheTexture) && (field->fieldIndex<=2)) {
			u32 size, val;
			char buf[4096];
			char *res_src = NULL;
			const char *src = ((SFString*)field->far_ptr)->buffer;
			FILE *f;
			if (codec->src_url) res_src = gf_url_concatenate(codec->src_url, src);

			f = gf_fopen(res_src ? res_src : src, "rb");
			if (!f) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[BIFS] Cannot open source file %s for encoding CacheTexture\n", res_src ? res_src : src));
				return GF_URL_ERROR;
			}
			if (res_src) gf_free(res_src);
			gf_fseek(f, 0, SEEK_END);
			size = (u32) gf_ftell(f);
			val = gf_get_bit_size(size);
			GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL);
			GF_BIFS_WRITE_INT(codec, bs, size, val, "length", NULL);
			gf_fseek(f, 0, SEEK_SET);
			while (size) {
				u32 read = (u32) fread(buf, 1, 4096, f);
				gf_bs_write_data(bs, buf, read);
				size -= read;
			}
		} else {
			u32 i, val, len;
			char *str = (char *) ((SFString*)field->far_ptr)->buffer;
			if (node && (node->sgprivate->tag==TAG_MPEG4_BitWrapper) ) {
				len = ((M_BitWrapper*)node)->buffer_len;
			} else {
				len = str ? (u32) strlen(str) : 0;
			}
			val = gf_get_bit_size(len);
			GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL);
			GF_BIFS_WRITE_INT(codec, bs, len, val, "length", NULL);
			for (i=0; i<len; i++) gf_bs_write_int(bs, str[i], 8);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] string\t\t%d\t\t%s\n", 8*len, str) );
		}
		break;

	case GF_SG_VRML_SFTIME:
		gf_bs_write_double(bs, *((SFTime *)field->far_ptr));
		GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] SFTime\t\t%d\t\t%g\n", 64, *((SFTime *)field->far_ptr)));
		break;

	case GF_SG_VRML_SFVEC2F:
		BE_WriteSFFloat(codec, ((SFVec2f *)field->far_ptr)->x, bs, "vec2f.x");
		BE_WriteSFFloat(codec, ((SFVec2f *)field->far_ptr)->y, bs, "vec2f.y");
		break;

	case GF_SG_VRML_SFVEC3F:
		BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->x, bs, "vec3f.x");
		BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->y, bs, "vec3f.y");
		BE_WriteSFFloat(codec, ((SFVec3f *)field->far_ptr)->z, bs, "vec3f.z");
		break;

	case GF_SG_VRML_SFURL:
	{
		SFURL *url = (SFURL *) field->far_ptr;
		GF_BIFS_WRITE_INT(codec, bs, (url->OD_ID>0) ? 1 : 0, 1, "hasODID", "SFURL");
		if (url->OD_ID>0) {
			GF_BIFS_WRITE_INT(codec, bs, url->OD_ID, 10, "ODID", "SFURL");
		} else {
			u32 i, len = url->url ? (u32) strlen(url->url) : 0;
			u32 val = gf_get_bit_size(len);
			GF_BIFS_WRITE_INT(codec, bs, val, 5, "nbBits", NULL);
			GF_BIFS_WRITE_INT(codec, bs, len, val, "length", NULL);
			for (i=0; i<len; i++) gf_bs_write_int(bs, url->url[i], 8);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] string\t\t%d\t\t%s\t\t//SFURL\n", 8*len, url->url));
		}
	}
	break;
	case GF_SG_VRML_SFIMAGE:
	{
		u32 size, i;
		SFImage *img = (SFImage *)field->far_ptr;
		GF_BIFS_WRITE_INT(codec, bs, img->width, 12, "width", "SFImage");
		GF_BIFS_WRITE_INT(codec, bs, img->height, 12, "height", "SFImage");
		GF_BIFS_WRITE_INT(codec, bs, img->numComponents - 1, 2, "nbComp", "SFImage");
		size = img->width * img->height * img->numComponents;
		for (i=0; i<size; i++) gf_bs_write_int(bs, img->pixels[i], 8);
		GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] pixels\t\t%d\t\tnot dumped\t\t//SFImage\n", 8*size));
	}
	break;

	case GF_SG_VRML_SFCOMMANDBUFFER:
	{
		SFCommandBuffer *cb = (SFCommandBuffer *) field->far_ptr;
		if (cb->buffer) gf_free(cb->buffer);
		cb->buffer = NULL;
		cb->bufferSize = 0;
		if (gf_list_count(cb->commandList)) {
			u32 i, nbBits;
			GF_BitStream *bs_cond = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] /*SFCommandBuffer*/\n" ));
			e = gf_bifs_enc_commands(codec, cb->commandList, bs_cond);
			if (!e) gf_bs_get_content(bs_cond, (char**)&cb->buffer, &cb->bufferSize);
			gf_bs_del(bs_cond);
			if (e) return e;
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODING, ("[BIFS] /*End SFCommandBuffer*/\n"));
			nbBits = gf_get_bit_size(cb->bufferSize);
			GF_BIFS_WRITE_INT(codec, bs, nbBits, 5, "NbBits", NULL);
			GF_BIFS_WRITE_INT(codec, bs, cb->bufferSize, nbBits, "BufferSize", NULL);
			for (i=0; i<cb->bufferSize; i++) GF_BIFS_WRITE_INT(codec, bs, cb->buffer[i], 8, "buffer byte", NULL);
		}
		/*empty command buffer*/
		else {
			GF_BIFS_WRITE_INT(codec, bs, 0, 5, "NbBits", NULL);
		}
	}
	break;

	case GF_SG_VRML_SFNODE:
		return gf_bifs_enc_node(codec, *((GF_Node **)field->far_ptr), field->NDTtype, bs, node);

	case GF_SG_VRML_SFSCRIPT:
#ifdef GPAC_HAS_SPIDERMONKEY
		codec->LastError = SFScript_Encode(codec, (SFScript *)field->far_ptr, bs, node);
#else
		return GF_NOT_SUPPORTED;
#endif
		break;
	case GF_SG_VRML_SFATTRREF:
	{
		u32 idx=0;
		SFAttrRef *ar = (SFAttrRef *)field->far_ptr;
		u32 nbBitsDEF = gf_get_bit_size(gf_node_get_num_fields_in_mode(ar->node, GF_SG_FIELD_CODING_DEF) - 1);
		GF_BIFS_WRITE_INT(codec, bs, gf_node_get_id(ar->node) - 1, codec->info->config.NodeIDBits, "NodeID", NULL);

		gf_bifs_field_index_by_mode(ar->node, ar->fieldIndex, GF_SG_FIELD_CODING_DEF, &idx);
		GF_BIFS_WRITE_INT(codec, bs, idx, nbBitsDEF, "field", NULL);
	}
	break;
	default:
		return GF_NOT_SUPPORTED;
	}
	return codec->LastError;
}
コード例 #11
0
ファイル: meta.c プロジェクト: Brilon314/gpac
GF_Err gf_isom_add_meta_item_extended(GF_ISOFile *file, Bool root_meta, u32 track_num, Bool self_reference, char *resource_path,
                                      const char *item_name, u32 item_id, const char *mime_type, const char *content_encoding,
                                      GF_ImageItemProperties *image_props,
                                      const char *URL, const char *URN,
                                      char *data, u32 data_len)
{
	GF_Err e;
	GF_ItemLocationEntry *location_entry;
	GF_ItemInfoEntryBox *infe;
	GF_MetaBox *meta;
	u32 lastItemID = 0;

	if (!self_reference && !item_name && !resource_path) return GF_BAD_PARAM;
	e = CanAccessMovie(file, GF_ISOM_OPEN_WRITE);
	if (e) return e;
	meta = gf_isom_get_meta(file, root_meta, track_num);
	if (!meta) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("Trying to add item, but missing meta box"));
		return GF_BAD_PARAM;
	}

	e = FlushCaptureMode(file);
	if (e) return e;

	/*check file exists */
	if (!URN && !URL && !self_reference && !data) {
		FILE *src = gf_fopen(resource_path, "rb");
		if (!src) return GF_URL_ERROR;
		gf_fclose(src);
	}

	if (meta->item_infos) {
		u32 i;
		u32 item_count = gf_list_count(meta->item_infos->item_infos);
		for (i = 0; i < item_count; i++) {
			GF_ItemInfoEntryBox *e= (GF_ItemInfoEntryBox *)gf_list_get(meta->item_infos->item_infos, i);
			if (e->item_ID > lastItemID) lastItemID = e->item_ID;
			if (item_id == e->item_ID) {
				GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[IsoMedia] Item with id %d already exists, ignoring\n", item_id));
				item_id = 0;
			}
		}
	}

	infe = (GF_ItemInfoEntryBox *)infe_New();
	if (item_id) {
		infe->item_ID = item_id;
	} else {
		infe->item_ID = ++lastItemID;
	}

	/*get relative name*/
	if (item_name) {
		infe->item_name = gf_strdup(item_name);
	} else if (resource_path) {
		if (strrchr(resource_path, GF_PATH_SEPARATOR)) {
			infe->item_name = gf_strdup(strrchr(resource_path, GF_PATH_SEPARATOR) + 1);
		} else {
			infe->item_name = gf_strdup(resource_path);
		}
	}

	if (mime_type) {
		infe->content_type = gf_strdup(mime_type);
	} else {
		infe->content_type = gf_strdup("application/octet-stream");
	}
	if (content_encoding) infe->content_encoding = gf_strdup(content_encoding);

	/*Creation of the ItemLocation */
	location_entry = (GF_ItemLocationEntry*)gf_malloc(sizeof(GF_ItemLocationEntry));
	if (!location_entry) {
		gf_isom_box_del((GF_Box *)infe);
		return GF_OUT_OF_MEM;
	}
	memset(location_entry, 0, sizeof(GF_ItemLocationEntry));
	location_entry->extent_entries = gf_list_new();

	/*Creates an mdat if it does not exist*/
	if (!file->mdat) {
		file->mdat = (GF_MediaDataBox *)mdat_New();
		gf_list_add(file->TopBoxes, file->mdat);
	}

	/*Creation an ItemLocation Box if it does not exist*/
	if (!meta->item_locations) meta->item_locations = (GF_ItemLocationBox *)iloc_New();
	gf_list_add(meta->item_locations->location_entries, location_entry);
	location_entry->item_ID = infe->item_ID;

	if (!meta->item_infos) meta->item_infos = (GF_ItemInfoBox *) iinf_New();
	e = gf_list_add(meta->item_infos->item_infos, infe);
	if (e) return e;

	if (image_props) {
		meta_process_image_properties(meta, infe->item_ID, image_props);
	}

	/*0: the current file*/
	location_entry->data_reference_index = 0;
	if (self_reference) {
		GF_ItemExtentEntry *entry;
		GF_SAFEALLOC(entry, GF_ItemExtentEntry);
		gf_list_add(location_entry->extent_entries, entry);
		if (!infe->item_name) infe->item_name = gf_strdup("");
		return GF_OK;
	}

	/*file not copied, just referenced*/
	if (URL || URN) {
		u32 dataRefIndex;
		if (!meta->file_locations) meta->file_locations = (GF_DataInformationBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_DINF);
		if (!meta->file_locations->dref) meta->file_locations->dref = (GF_DataReferenceBox *) gf_isom_box_new(GF_ISOM_BOX_TYPE_DREF);
		e = Media_FindDataRef(meta->file_locations->dref, (char *) URL, (char *) URN, &dataRefIndex);
		if (e) return e;
		if (!dataRefIndex) {
			e = Media_CreateDataRef(meta->file_locations->dref, (char *) URL, (char *) URN, &dataRefIndex);
			if (e) return e;
		}
		location_entry->data_reference_index = dataRefIndex;
	}

	/*capture mode, write to disk*/
	if ((file->openMode == GF_ISOM_OPEN_WRITE) && !location_entry->data_reference_index) {
		FILE *src;
		GF_ItemExtentEntry *entry;
		GF_SAFEALLOC(entry, GF_ItemExtentEntry);

		location_entry->base_offset = gf_bs_get_position(file->editFileMap->bs);

		/*update base offset size*/
		if (location_entry->base_offset>0xFFFFFFFF) meta->item_locations->base_offset_size = 8;
		else if (location_entry->base_offset && !meta->item_locations->base_offset_size) meta->item_locations->base_offset_size = 4;

		entry->extent_length = 0;
		entry->extent_offset = 0;
		gf_list_add(location_entry->extent_entries, entry);

		if (data) {
			gf_bs_write_data(file->editFileMap->bs, data, data_len);
			/*update length size*/
			if (entry->extent_length>0xFFFFFFFF) meta->item_locations->length_size = 8;
			else if (entry->extent_length && !meta->item_locations->length_size) meta->item_locations->length_size = 4;
		} else if (resource_path) {
			src = gf_fopen(resource_path, "rb");
			if (src) {
				char cache_data[4096];
				u64 remain;
				gf_fseek(src, 0, SEEK_END);
				entry->extent_length = gf_ftell(src);
				gf_fseek(src, 0, SEEK_SET);

				remain = entry->extent_length;
				while (remain) {
					u32 size_cache = (remain>4096) ? 4096 : (u32) remain;
					size_t read = fread(cache_data, 1, size_cache, src);
					if (read==(size_t)-1) break;
					gf_bs_write_data(file->editFileMap->bs, cache_data, (u32) read);
					remain -= (u32) read;
				}
				gf_fclose(src);

				/*update length size*/
				if (entry->extent_length>0xFFFFFFFF) meta->item_locations->length_size = 8;
				else if (entry->extent_length && !meta->item_locations->length_size) meta->item_locations->length_size = 4;
			}
		}
	}
	/*store full path for info*/
	else if (!location_entry->data_reference_index) {
		if (data) {
			infe->full_path = (char *)gf_malloc(sizeof(char) * data_len);
			memcpy(infe->full_path, data, sizeof(char) * data_len);
			infe->data_len = data_len;
		} else {
			infe->full_path = gf_strdup(resource_path);
			infe->data_len = 0;
		}
	}
	return GF_OK;
}
コード例 #12
0
ファイル: main.c プロジェクト: JamesLinus/gpac
int main(int argc, char **argv)
{
	/* The ISO progressive reader */
	ISOProgressiveReader reader;
	/* Error indicator */
	GF_Err e;
	/* input file to be read in the data buffer */
	FILE *input;
	/* number of bytes read from the file at each read operation */
	u32 read_bytes;
	/* number of bytes read from the file (total) */
	u64 total_read_bytes;
	/* size of the input file */
	u64 file_size;
	/* number of bytes required to finish the current ISO Box reading (not used here)*/
	u64 missing_bytes;
	/* Thread used to run the ISO parsing in */
	GF_Thread *reading_thread;
	/* Return value for the program */
	int ret = 0;

	/* Usage */
	if (argc != 2) {
		fprintf(stdout, "Usage: %s filename\n", argv[0]);
		return 1;
	}

	/* Initializing GPAC framework */
	/* Enables GPAC memory tracking in debug mode only */
#if defined(DEBUG) || defined(_DEBUG)
	gf_sys_init(GF_MemTrackerSimple);
	gf_log_set_tool_level(GF_LOG_ALL, GF_LOG_WARNING);
	gf_log_set_tool_level(GF_LOG_MEMORY, GF_LOG_INFO);
#else
	gf_sys_init(GF_MemTrackerNone);
	gf_log_set_tool_level(GF_LOG_ALL, GF_LOG_WARNING);
#endif

	/* This is an input file to read data from. Could be replaced by any other method to retrieve the data (e.g. JavaScript, socket, ...)*/
	input = gf_fopen(argv[1], "rb");
	if (!input) {
		fprintf(stdout, "Could not open file %s for reading.\n", argv[1]);
		gf_sys_close();
		return 1;
	}

	gf_fseek(input, 0, SEEK_END);
	file_size = gf_ftell(input);
	gf_fseek(input, 0, SEEK_SET);

	/* Initializing the progressive reader */
	memset(&reader, 0, sizeof(ISOProgressiveReader));
	reading_thread = gf_th_new("ISO reading thread");
	reader.mutex = gf_mx_new("ISO Segment");
	reader.do_run = GF_TRUE;
	/* we want to parse the first track */
	reader.track_id = 1;
	/* start the async parsing */
	gf_th_run(reading_thread, iso_progressive_read_thread, &reader);

	/* start the data reading */
	reader.data_size = BUFFER_BLOCK_SIZE;
	reader.data = (u8 *)gf_malloc(reader.data_size);
	reader.valid_data_size = 0;
	total_read_bytes = 0;
	while (1) {
		/* block the parser until we are done manipulating the data buffer */
		gf_mx_p(reader.mutex);

		if (reader.valid_data_size + BUFFER_BLOCK_SIZE > MAX_BUFFER_SIZE) {
			/* regulate the reader to limit the max buffer size and let some time to the parser to release buffer data */
			fprintf(stdout, "Buffer full (%d/%d)- waiting to read next data \r", reader.valid_data_size, reader.data_size);
			gf_mx_v(reader.mutex);
			//gf_sleep(10);
		} else {
			/* make sure we have enough space in the buffer to read the next bloc of data */
			if (reader.valid_data_size + BUFFER_BLOCK_SIZE > reader.data_size) {
				reader.data = (u8 *)gf_realloc(reader.data, reader.data_size + BUFFER_BLOCK_SIZE);
				reader.data_size += BUFFER_BLOCK_SIZE;
			}

			/* read the next bloc of data and update the data buffer url */
			read_bytes = fread(reader.data+reader.valid_data_size, 1, BUFFER_BLOCK_SIZE, input);
			total_read_bytes += read_bytes;
			fprintf(stdout, "Read "LLD" bytes of "LLD" bytes from input file %s (buffer status: %5d/%5d)\r", total_read_bytes, file_size, argv[1], reader.valid_data_size, reader.data_size);
			if (read_bytes) {
				reader.valid_data_size += read_bytes;
				sprintf(reader.data_url, "gmem://%d@%p", reader.valid_data_size, reader.data);
			} else {
				/* end of file we can quit */
				gf_mx_v(reader.mutex);
				break;
			}

			/* if the file is not yet opened (no movie), open it in progressive mode (to update its data later on) */
			if (!reader.movie) {
				/* let's initialize the parser */
				e = gf_isom_open_progressive(reader.data_url, 0, 0, &reader.movie, &missing_bytes);
				if (reader.movie) {
					gf_isom_set_single_moof_mode(reader.movie, GF_TRUE);
				}
				/* we can let parser try to work now */
				gf_mx_v(reader.mutex);

				if ((e == GF_OK || e == GF_ISOM_INCOMPLETE_FILE) && reader.movie) {
					/* nothing to do, this is normal */
				} else {
					fprintf(stdout, "Error opening fragmented mp4 in progressive mode: %s (missing "LLD" bytes)\n", gf_error_to_string(e), missing_bytes);
					ret = 1;
					goto exit;
				}
			} else {
				/* let inform the parser that the buffer has been updated with new data */
				e = gf_isom_refresh_fragmented(reader.movie, &missing_bytes, reader.data_url);

				/* we can let parser try to work now */
				gf_mx_v(reader.mutex);

				if (e != GF_OK && e != GF_ISOM_INCOMPLETE_FILE) {
					fprintf(stdout, "Error refreshing fragmented mp4: %s (missing "LLD" bytes)\n", gf_error_to_string(e), missing_bytes);
					ret = 1;
					goto exit;
				}
			}

			//gf_sleep(1);
		}
	}

exit:
	/* stop the parser */
	reader.do_run = GF_FALSE;
	gf_th_stop(reading_thread);

	/* clean structures */
	gf_th_del(reading_thread);
	gf_mx_del(reader.mutex);
	gf_free(reader.data);
	gf_isom_close(reader.movie);
	gf_fclose(input);
	gf_sys_close();

	return ret;
}
コード例 #13
0
ファイル: webvtt.c プロジェクト: dragonlucian/gpac
GF_Err gf_webvtt_parser_parse(GF_WebVTTParser *parser, u32 duration)
{
	char            szLine[2048];
	char            *sOK;
	u32             len;
	GF_Err          e;
	Bool            do_parse = GF_TRUE;
	GF_WebVTTCue    *cue = NULL;
	u32             start = 0;
	u32             end = 0;
	char            *prevLine = NULL;
	char            *header = NULL;
	u32             header_len = 0;
	Bool            had_marks = GF_FALSE;

	if (!parser) return GF_BAD_PARAM;
	if (parser->is_srt) {
		parser->on_header_parsed(parser->user, gf_strdup("WEBVTT\n"));
	}
	while (do_parse) {
		sOK = gf_text_get_utf8_line(szLine, 2048, parser->vtt_in, parser->unicode_type);
		REM_TRAIL_MARKS(szLine, "\r\n")
		len = (u32) strlen(szLine);
		switch (parser->state) {
		case WEBVTT_PARSER_STATE_WAITING_SIGNATURE:
			if (!sOK || len < 6 || strnicmp(szLine, "WEBVTT", 6) || (len > 6 && szLine[6] != ' ' && szLine[6] != '\t')) {
				e = GF_CORRUPTED_DATA;
				parser->report_message(parser->user, e, "Bad WEBVTT file signature %s", szLine);
				goto exit;
			} else {
				if (had_marks) {
					szLine[len] = '\n';
					len++;
				}
				header = gf_strdup(szLine);
				header_len = len;
				parser->state = WEBVTT_PARSER_STATE_WAITING_HEADER;
			}
			break; /* proceed to next line */
		case WEBVTT_PARSER_STATE_WAITING_HEADER:
			if (prevLine) {
				u32 prev_len = (u32) strlen(prevLine);
				header = (char *)gf_realloc(header, header_len + prev_len + 1);
				strcpy(header+header_len,prevLine);
				header_len += prev_len;
				gf_free(prevLine);
				prevLine = NULL;
			}
			if (sOK && len) {
				if (strstr(szLine, "-->")) {
					parser->on_header_parsed(parser->user, header);
					/* continue to the next state without breaking */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP;
					/* no break, continue to the next state*/
				} else {
					if (had_marks) {
						szLine[len] = '\n';
						len++;
					}
					prevLine = gf_strdup(szLine);
					break; /* proceed to next line */
				}
			} else {
				parser->on_header_parsed(parser->user, header);
				if (!sOK) {
					/* end of file, parsing is done */
					do_parse = GF_FALSE;
					break;
				} else {
					/* empty line means end of header */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
					/* no break, continue to the next state*/
				}
			}
		case WEBVTT_PARSER_STATE_WAITING_CUE:
			if (sOK && len) {
				if (strstr(szLine, "-->")) {
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP;
					/* continue to the next state without breaking */
				} else {
					/* discard the previous line */
					/* should we do something with it ? callback ?*/
					if (prevLine) {
						gf_free(prevLine);
						prevLine = NULL;
					}
					/* save this new line */
					if (had_marks) {
						szLine[len] = '\n';
						len++;
					}
					prevLine = gf_strdup(szLine);
					/* stay in the same state */
					break;
				}
			} else {
				/* discard the previous line */
				/* should we do something with it ? callback ?*/
				if (prevLine) {
					gf_free(prevLine);
					prevLine = NULL;
				}
				if (!sOK) {
					do_parse = GF_FALSE;
					break;
				} else {
					/* remove empty lines and stay in the same state */
					break;
				}
			}
		case WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP:
			if (sOK && len) {
				if (cue == NULL) {
					cue   = gf_webvtt_cue_new();
				}
				if (prevLine) {
					gf_webvtt_cue_add_property(cue, WEBVTT_ID, prevLine, (u32) strlen(prevLine));
					gf_free(prevLine);
					prevLine = NULL;
				}
				e = gf_webvtt_parser_parse_timings_settings(parser, cue, szLine, len);
				if (e) {
					if (cue) gf_webvtt_cue_del(cue);
					cue = NULL;
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
				} else {
					start = (u32)gf_webvtt_timestamp_get(&cue->start);
					end   = (u32)gf_webvtt_timestamp_get(&cue->end);
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_PAYLOAD;
				}
			} else {
				/* not possible */
				assert(0);
			}
			break;
		case WEBVTT_PARSER_STATE_WAITING_CUE_PAYLOAD:
			if (sOK && len) {
				if (had_marks) {
					szLine[len] = '\n';
					len++;
				}
				gf_webvtt_cue_add_property(cue, WEBVTT_PAYLOAD, szLine, len);
				/* remain in the same state as a cue payload can have multiple lines */
				break;
			} else {
				/* end of the current cue */
				gf_webvtt_add_cue_to_samples(parser, parser->samples, cue);
				cue = NULL;

				gf_set_progress("Importing WebVTT", gf_ftell(parser->vtt_in), parser->file_size);
				if ((duration && (end >= duration)) || !sOK) {
					do_parse = GF_FALSE;
					break;
				} else {
					/* empty line, move to next cue */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
					break;
				}
			}
		}
		if (duration && (start >= duration)) {
			do_parse = GF_FALSE;
			break;
		}
	}


	/* no more cues to come, flush everything */
	if (cue) {
		gf_webvtt_add_cue_to_samples(parser, parser->samples, cue);
		cue = NULL;
	}
	while (gf_list_count(parser->samples) > 0) {
		GF_WebVTTSample *sample = (GF_WebVTTSample *)gf_list_get(parser->samples, 0);
		parser->last_duration = sample->end - sample->start;
		gf_list_rem(parser->samples, 0);
		parser->on_sample_parsed(parser->user, sample);
	}
	gf_set_progress("Importing WebVTT", parser->file_size, parser->file_size);
	e = GF_OK;
exit:
	if (cue) gf_webvtt_cue_del(cue);
	if (prevLine) gf_free(prevLine);
	if (header) gf_free(header);
	return e;
}
コード例 #14
0
ファイル: filestreamer.c プロジェクト: Brilon314/gpac
static GF_Err gf_isom_streamer_setup_sdp(GF_ISOMRTPStreamer *streamer, char*sdpfilename, char **out_sdp_buffer)
{
	GF_RTPTrack *track;
	FILE *sdp_out;
	char filename[GF_MAX_PATH];
	char sdpLine[20000];
	u32 t, count;
	u8 *payload_type;

	strcpy(filename, sdpfilename ? sdpfilename : "videosession.sdp");
	sdp_out = gf_fopen(filename, "wt");
	if (!sdp_out) return GF_IO_ERR;

	if (!out_sdp_buffer) {
		sprintf(sdpLine, "v=0");
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "o=MP4Streamer 3357474383 1148485440000 IN IP%d %s", gf_net_is_ipv6(streamer->dest_ip) ? 6 : 4, streamer->dest_ip);
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "s=livesession");
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "i=This is an MP4 time-sliced Streaming demo");
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "u=http://gpac.sourceforge.net");
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "e=admin@");
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "c=IN IP%d %s", gf_net_is_ipv6(streamer->dest_ip) ? 6 : 4, streamer->dest_ip);
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "t=0 0");
		fprintf(sdp_out, "%s\n", sdpLine);
		sprintf(sdpLine, "a=x-copyright: Streamed with GPAC (C)2000-200X - http://gpac.sourceforge.net");
		fprintf(sdp_out, "%s\n", sdpLine);
		if (streamer->base_track)
		{
			sprintf(sdpLine, "a=group:DDP L%d", streamer->base_track);
			fprintf(sdp_out, "%s", sdpLine);
			count = gf_isom_get_track_count(streamer->isom);
			for (t = 0; t < count; t++)
			{
				if (gf_isom_has_track_reference(streamer->isom, t+1, GF_ISOM_REF_BASE, gf_isom_get_track_id(streamer->isom, streamer->base_track)))
				{
					sprintf(sdpLine, " L%d", t+1);
					fprintf(sdp_out, "%s", sdpLine);
				}
			}
			fprintf(sdp_out, "\n");
		}
	}

	/*prepare array of payload type*/
	count = gf_isom_get_track_count(streamer->isom);
	payload_type = (u8 *)gf_malloc(count * sizeof(u8));
	track = streamer->stream;
	while (track) {
		payload_type[track->track_num-1] = gf_rtp_streamer_get_payload_type(track->rtp);
		track = track->next;
	}


	track = streamer->stream;
	while (track) {
		char *sdp_media=NULL;
		const char *KMS = NULL;
		char *dsi = NULL;
		u32 w, h;
		u32 dsi_len = 0;
		GF_DecoderConfig *dcd;
		//use inspect mode so that we don't aggregate xPS from the base in the enhancement ESD
		gf_isom_set_nalu_extract_mode(streamer->isom, track->track_num, GF_ISOM_NALU_EXTRACT_INSPECT);
		dcd = gf_isom_get_decoder_config(streamer->isom, track->track_num, 1);

		if (dcd && dcd->decoderSpecificInfo) {
			dsi = dcd->decoderSpecificInfo->data;
			dsi_len = dcd->decoderSpecificInfo->dataLength;
		}
		w = h = 0;
		if (gf_isom_get_media_type(streamer->isom, track->track_num) == GF_ISOM_MEDIA_VISUAL) {
			gf_isom_get_visual_info(streamer->isom, track->track_num, 1, &w, &h);
		}

		gf_isom_get_ismacryp_info(streamer->isom, track->track_num, 1, NULL, NULL, NULL, NULL, &KMS, NULL, NULL, NULL);

		/*TODO retrieve DIMS content encoding from track to set the flags */
		gf_rtp_streamer_append_sdp_extended(track->rtp, gf_isom_get_track_id(streamer->isom, track->track_num), dsi, dsi_len, streamer->isom, track->track_num, (char *)KMS, w, h, &sdp_media);
		if (streamer->base_track)
			gf_rtp_streamer_append_sdp_decoding_dependency(streamer->isom, track->track_num, payload_type, &sdp_media);
		if (sdp_media) {
			fprintf(sdp_out, "%s", sdp_media);
			gf_free(sdp_media);
		}

		if (dcd) gf_odf_desc_del((GF_Descriptor *)dcd);

		track = track->next;
	}
	fprintf(sdp_out, "\n");

	gf_fclose(sdp_out);
	if (out_sdp_buffer) {
		u64 size;
		sdp_out = gf_fopen(filename, "r");
		gf_fseek(sdp_out, 0, SEEK_END);
		size = gf_ftell(sdp_out);
		gf_fseek(sdp_out, 0, SEEK_SET);
		if (*out_sdp_buffer) gf_free(*out_sdp_buffer);
		*out_sdp_buffer = gf_malloc(sizeof(char)*(size_t)(size+1));
		size = fread(*out_sdp_buffer, 1, (size_t)size, sdp_out);
		gf_fclose(sdp_out);
		(*out_sdp_buffer)[size]=0;
	}

	gf_free(payload_type);
	return GF_OK;
}
コード例 #15
0
ファイル: main.c プロジェクト: ARSekkat/gpac
int main(int argc, char **argv)
{
	/********************/
	/*   declarations   */
	/********************/
	char *input, *output, tmpstr[GF_MAX_PATH];
	GF_ISOFile *isom_file_in;
	GF_MediaImporter import;
	AdobeHDSCtx ctx;
	GF_Err e;
	u32 i;

	/*****************/
	/*   gpac init   */
	/*****************/
	gf_sys_init(GF_MemTrackerNone);
	gf_log_set_tool_level(GF_LOG_ALL, GF_LOG_WARNING);

	/***********************/
	/*   initialisations   */
	/***********************/
	input = NULL;
	output = NULL;
	isom_file_in = NULL;
	memset(&import, 0, sizeof(GF_MediaImporter));
	e = GF_OK;
	memset(&ctx, 0, sizeof(ctx));

	ctx.curr_time = 0;
	ctx.segnum = 1;

	/*********************************************/
	/*   parse arguments and build HDS context   */
	/*********************************************/
	if (GF_OK != parse_args(argc, argv, &input, &output, &ctx.curr_time, &ctx.segnum)) {
		usage(argv[0]);
		goto exit;
	}

	ctx.multirate_manifest = adobe_alloc_multirate_manifest(output);

#if 0 /*'moov' conversion tests*/
	{
		char metamoov64[GF_MAX_PATH];
		u32 metamoov64_len;
		unsigned char metamoov[GF_MAX_PATH];
		u32 metamoov_len=GF_MAX_PATH;
		FILE *f = gf_fopen("metamoov64"/*input*/, "rt");
		gf_fseek(f, 0, SEEK_END);
		metamoov64_len = (u32)gf_ftell(f);
		gf_fseek(f, 0, SEEK_SET);
		fread(metamoov64, metamoov64_len, 1, f);
		metamoov_len = gf_base64_decode(metamoov64, metamoov64_len, metamoov, metamoov_len);
		gf_fclose(f);
		f = gf_fopen("metamoov", "wb");
		fwrite(metamoov, metamoov_len, 1, f);
		gf_fclose(f);
		return 0;
	}
#endif

#if 0 /*'abst'conversion tests*/
	{
		char bootstrap64[GF_MAX_PATH];
		u32 bootstrap64_len;
		unsigned char bootstrap[GF_MAX_PATH];
		u32 bootstrap_len=GF_MAX_PATH;
		GF_AdobeBootstrapInfoBox *abst = (GF_AdobeBootstrapInfoBox *)abst_New();
		GF_BitStream *bs;
#if 1 //64
		FILE *f = gf_fopen("bootstrap64"/*input*/, "rt");
		gf_fseek(f, 0, SEEK_END);
		bootstrap64_len = (u32)gf_ftell(f);
		gf_fseek(f, 0, SEEK_SET);
		fread(bootstrap64, bootstrap64_len, 1, f);
		bootstrap_len = gf_base64_decode(bootstrap64, bootstrap64_len, bootstrap, bootstrap_len);
#else //binary bootstrap
		FILE *f = gf_fopen("bootstrap.bin"/*input*/, "rb");
		gf_fseek(f, 0, SEEK_END);
		bootstrap_len = (u32)gf_ftell(f);
		gf_fseek(f, 0, SEEK_SET);
		fread(bootstrap, bootstrap_len, 1, f);
#endif
		bs = gf_bs_new(bootstrap+8, bootstrap_len-8, GF_BITSTREAM_READ);
		abst->size = bootstrap[2]*256+bootstrap[3];
		assert(abst->size<GF_MAX_PATH);
		abst_Read((GF_Box*)abst, bs);
		gf_bs_del(bs);
		//then rewrite with just one 'afrt'
		memset(bootstrap, 0, bootstrap_len);
		bs = gf_bs_new(bootstrap, bootstrap_len, GF_BITSTREAM_WRITE);
		abst_Write((GF_Box*)abst, bs);
		bootstrap_len = (u32)gf_bs_get_position(bs);
		gf_bs_del(bs);
		gf_fclose(f);
		f = gf_fopen("bootstrap", "wt");
		bootstrap64_len = gf_base64_encode(bootstrap, bootstrap_len, bootstrap64, GF_MAX_PATH);
		fwrite(bootstrap64, bootstrap64_len, 1, f);
		fprintf(f, "\n\n");
		abst_dump((GF_Box*)abst, f);
		gf_fclose(f);
		abst_del((GF_Box*)abst);
		return 0;
	}
#endif

	/*****************/
	/*   main loop   */
	/*****************/
	import.trackID = 0;
	import.in_name = input;
	import.flags = GF_IMPORT_PROBE_ONLY;

	//create output or open when recovering from a saved state
	sprintf(tmpstr, "%s_import.mp4", input);
	isom_file_in = gf_isom_open(tmpstr, GF_ISOM_WRITE_EDIT, NULL);
	if (!isom_file_in) {
		fprintf(stderr, "Error opening output file %s: %s\n", tmpstr, gf_error_to_string(e));
		assert(0);
		goto exit;
	}
	import.dest = isom_file_in;

	//probe input
	e = gf_media_import(&import);
	if (e) {
		fprintf(stderr, "Error while importing input file %s: %s\n", input, gf_error_to_string(e));
		assert(0);
		goto exit;
	}

	//import input data
	import.flags = 0;
	for (i=0; i<import.nb_tracks; i++) {
		import.trackID = import.tk_info[i].track_num;
		e = gf_media_import(&import);
		if (e) {
			fprintf(stderr, "Error while importing track number %u, input file %s: %s\n", import.trackID, input, gf_error_to_string(e));
			assert(0);
			goto exit;
		}
	}

	//Adobe specific stuff
	e = adobize_segment(isom_file_in, &ctx);
	if (e) {
		fprintf(stderr, "Couldn't turn the ISOM fragmented file into an Adobe f4v segment: %s\n", gf_error_to_string(e));
		assert(0);
		goto exit;
	}

	//interleave data and remove imported file
	//FIXME: set multiple fragments:
	sprintf(tmpstr, "%s_HD_100_Seg%u-Frag1", output, ctx.segnum); //FIXME: "HD", "100" and fragnum: pass as arg
	//e = gf_media_fragment_file(isom_file_in, tmpstr, 1.0);
	e = gf_media_fragment_file(isom_file_in, tmpstr, 1.0+gf_isom_get_duration(isom_file_in)/gf_isom_get_timescale(isom_file_in));
	if (e) {
		fprintf(stderr, "Error while fragmenting file to output %s: %s\n", output, gf_error_to_string(e));
		assert(0);
		goto exit;
	}
	gf_isom_delete(isom_file_in);
	isom_file_in = NULL;

	e = adobe_gen_multirate_manifest(ctx.multirate_manifest, ctx.bootstrap, ctx.bootstrap_size);
	if (e) {
		fprintf(stderr, "Couldn't generate Adobe f4m manifest: %s\n", gf_error_to_string(e));
		assert(0);
		goto exit;
	}

exit:
	//delete intermediate mp4 file
	if (isom_file_in)
		gf_isom_delete(isom_file_in);

	if (ctx.multirate_manifest)
		adobe_free_multirate_manifest(ctx.multirate_manifest);

	if (ctx.bootstrap) {
		gf_free(ctx.bootstrap);
		//ctx.bootstrap = NULL;
		//ctx.bootstrap_size = 0;
	}

	gf_sys_close();

	return !e ? 0 : 1;
}
コード例 #16
0
ファイル: intrinsics.c プロジェクト: ChaosJohn/gcc
size_t
PREFIX(ftell) (int * unit)
{
  return gf_ftell (*unit);
}
コード例 #17
0
ファイル: intrinsics.c プロジェクト: ChaosJohn/gcc
GFC_IO_INT
PREFIX(ftell2) (int * unit)
{
  return gf_ftell (*unit);
}
コード例 #18
0
ファイル: ac3_in.c プロジェクト: Acidburn0zzz/gpac
static GF_Err AC3_ChannelGetSLP(GF_InputService *plug, LPNETCHANNEL channel, char **out_data_ptr, u32 *out_data_size, GF_SLHeader *out_sl_hdr, Bool *sl_compressed, GF_Err *out_reception_status, Bool *is_new_data)
{
	u64 pos, start_from;
	Bool sync;
	GF_BitStream *bs;
	GF_AC3Header hdr;
	AC3Reader *read = (AC3Reader*)plug->priv;

	*out_reception_status = GF_OK;
	*sl_compressed = GF_FALSE;
	*is_new_data = GF_FALSE;
	memset(&hdr, 0, sizeof(GF_AC3Header));

	memset(&read->sl_hdr, 0, sizeof(GF_SLHeader));
	read->sl_hdr.randomAccessPointFlag = 1;
	read->sl_hdr.compositionTimeStampFlag = 1;

	if (read->ch != channel) return GF_STREAM_NOT_FOUND;

	/*fetching es data*/
	if (read->done) {
		*out_reception_status = GF_EOS;
		return GF_OK;
	}

	if (!read->data) {
		if (!read->stream) {
			*out_data_ptr = NULL;
			*out_data_size = 0;
			return GF_OK;
		}
		bs = gf_bs_from_file(read->stream, GF_BITSTREAM_READ);
		*is_new_data = GF_TRUE;

fetch_next:
		pos = gf_ftell(read->stream);
		sync = gf_ac3_parser_bs(bs, &hdr, GF_FALSE);
		if (!sync) {
			gf_bs_del(bs);
			if (!read->dnload) {
				*out_reception_status = GF_EOS;
				read->done = GF_TRUE;
			} else {
				gf_fseek(read->stream, pos, SEEK_SET);
				*out_reception_status = GF_OK;
			}
			return GF_OK;
		}

		if (!hdr.framesize) {
			gf_bs_del(bs);
			*out_reception_status = GF_EOS;
			read->done = GF_TRUE;
			return GF_OK;
		}
		read->data_size = hdr.framesize;
		read->nb_samp = 1536;
		/*we're seeking*/
		if (read->start_range && read->duration) {
			start_from = (u32) (read->start_range * read->sample_rate);
			if (read->current_time + read->nb_samp < start_from) {
				read->current_time += read->nb_samp;
				goto fetch_next;
			} else {
				read->start_range = 0;
			}
		}

		read->sl_hdr.compositionTimeStamp = read->current_time;

		read->data = (unsigned char*)gf_malloc(sizeof(char) * (read->data_size+read->pad_bytes));
		gf_bs_read_data(bs, (char *) read->data, read->data_size);
		if (read->pad_bytes) memset(read->data + read->data_size, 0, sizeof(char) * read->pad_bytes);
		gf_bs_del(bs);
	}
	*out_sl_hdr = read->sl_hdr;
	*out_data_ptr =(char *) read->data;
	*out_data_size = read->data_size;
	return GF_OK;
}
コード例 #19
0
ファイル: mpeg4_textures.c プロジェクト: Brilon314/gpac
static void imagetexture_update(GF_TextureHandler *txh)
{
	if (gf_node_get_tag(txh->owner)!=TAG_MPEG4_CacheTexture) {
		MFURL url = ((M_ImageTexture *) txh->owner)->url;

		/*setup texture if needed*/
		if (!txh->is_open && url.count) {
			gf_sc_texture_play(txh, &url);
		}
		gf_sc_texture_update_frame(txh, 0);

		if (
		    /*URL is present but not opened - redraw till fetch*/
		    /* (txh->stream && !txh->tx_io) && */
		    /*image has been updated*/
		    txh->needs_refresh) {
			/*mark all subtrees using this image as dirty*/
			gf_node_dirty_parents(txh->owner);
			gf_sc_invalidate(txh->compositor, NULL);
		}
		return;
	}
	/*cache texture case*/
	else {
		M_CacheTexture *ct = (M_CacheTexture *) txh->owner;

		/*decode cacheTexture data */
		if ((ct->data || ct->image.buffer) && !txh->data) {
#ifndef GPAC_DISABLE_AV_PARSERS
			u32 out_size;
			GF_Err e;

			/*BT/XMT playback: load to memory*/
			if (ct->image.buffer) {
				char *par = (char *) gf_scene_get_service_url( gf_node_get_graph(txh->owner ) );
				char *src_url = gf_url_concatenate(par, ct->image.buffer);
				FILE *test = gf_fopen( src_url ? src_url : ct->image.buffer, "rb");
				if (test) {
					fseek(test, 0, SEEK_END);
					ct->data_len = (u32) gf_ftell(test);
					ct->data = gf_malloc(sizeof(char)*ct->data_len);
					fseek(test, 0, SEEK_SET);
					if (ct->data_len != fread(ct->data, 1, ct->data_len, test)) {
						GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to load CacheTexture data from file %s: IO err\n", src_url ? src_url : ct->image.buffer ) );
						gf_free(ct->data);
						ct->data = NULL;
						ct->data_len = 0;
					}
					gf_fclose(test);
				} else {
					GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to load CacheTexture data from file %s: not found\n", src_url ? src_url : ct->image.buffer ) );
				}
				ct->image.buffer = NULL;
				if (src_url) gf_free(src_url);
			}

			/*BIFS decoded playback*/
			switch (ct->objectTypeIndication) {
			case GPAC_OTI_IMAGE_JPEG:
				out_size = 0;
				e = gf_img_jpeg_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, NULL, &out_size, 3);
				if (e==GF_BUFFER_TOO_SMALL) {
					u32 BPP;
					txh->data = gf_malloc(sizeof(char) * out_size);
					if (txh->pixelformat==GF_PIXEL_GREYSCALE) BPP = 1;
					else BPP = 3;

					e = gf_img_jpeg_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, txh->data, &out_size, BPP);
					if (e==GF_OK) {
						gf_sc_texture_allocate(txh);
						gf_sc_texture_set_data(txh);
						txh->needs_refresh = 1;
						txh->stride = out_size / txh->height;
					}
				}
				break;
			case GPAC_OTI_IMAGE_PNG:
				out_size = 0;
				e = gf_img_png_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, NULL, &out_size);
				if (e==GF_BUFFER_TOO_SMALL) {
					txh->data = gf_malloc(sizeof(char) * out_size);
					e = gf_img_png_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, txh->data, &out_size);
					if (e==GF_OK) {
						gf_sc_texture_allocate(txh);
						gf_sc_texture_set_data(txh);
						txh->needs_refresh = 1;
						txh->stride = out_size / txh->height;
					}
				}
				break;
			}

#endif // GPAC_DISABLE_AV_PARSERS

			/*cacheURL is specified, store the image*/
			if (ct->cacheURL.buffer) {
				u32 i;
				u8 hash[20];
				FILE *cached_texture;
				char szExtractName[GF_MAX_PATH], section[64], *opt, *src_url;
				opt = (char *) gf_cfg_get_key(txh->compositor->user->config, "General", "CacheDirectory");
				if (opt) {
					strcpy(szExtractName, opt);
				} else {
					opt = gf_get_default_cache_directory();
					strcpy(szExtractName, opt);
					gf_free(opt);
				}
				strcat(szExtractName, "/");
				src_url = (char *) gf_scene_get_service_url( gf_node_get_graph(txh->owner ) );

				gf_sha1_csum((u8 *)src_url, (u32) strlen(src_url), hash);
				for (i=0; i<20; i++) {
					char t[3];
					t[2] = 0;
					sprintf(t, "%02X", hash[i]);
					strcat(szExtractName, t);
				}
				strcat(szExtractName, "_");

				strcat(szExtractName, ct->cacheURL.buffer);
				cached_texture = gf_fopen(szExtractName, "wb");
				if (cached_texture) {
					gf_fwrite(ct->data, 1, ct->data_len, cached_texture);
					gf_fclose(cached_texture);
				}

				/*and write cache info*/
				if (ct->expirationDate!=0) {
					sprintf(section, "@cache=%p", ct);
					gf_cfg_set_key(txh->compositor->user->config, section, "serviceURL", src_url);
					gf_cfg_set_key(txh->compositor->user->config, section, "cacheFile", szExtractName);
					gf_cfg_set_key(txh->compositor->user->config, section, "cacheName", ct->cacheURL.buffer);

					if (ct->expirationDate>0) {
						char exp[50];
						u32 sec, frac;
						gf_net_get_ntp(&sec, &frac);
						sec += ct->expirationDate;
						sprintf(exp, "%u", sec);
						gf_cfg_set_key(txh->compositor->user->config, section, "expireAfterNTP", exp);
					} else {
						gf_cfg_set_key(txh->compositor->user->config, section, "expireAfterNTP", "0");
					}
				}
			}

			/*done with image, destroy buffer*/
			if (ct->data) gf_free(ct->data);
			ct->data = NULL;
			ct->data_len = 0;
		}
	}
}
コード例 #20
0
ファイル: live.c プロジェクト: drakeguan/gpac
u32 grab_live_m2ts(const char *grab_m2ts, const char *grab_ifce, const char *outName)
{
	char data[0x80000];
	u32 check = 50;
	u64 nb_pck;
	Bool first_run, is_rtp;
	FILE *output;
#ifndef GPAC_DISABLE_STREAMING
	u16 seq_num;
	GF_RTPReorder *ch = NULL;
#endif
	GF_Socket *sock;
	GF_Err e = gf_m2ts_get_socket(grab_m2ts, grab_ifce, 0x80000, &sock);

	if (e) {
		fprintf(stderr, "Cannot open %s: %s\n", grab_m2ts, gf_error_to_string(e));
		return 1;
	}
	output = gf_fopen(outName, "wb");
	if (!output) {
		fprintf(stderr, "Cannot open %s: check path and rights\n", outName);
		gf_sk_del(sock);
		return 1;
	}

	fprintf(stderr, "Dumping %s stream to %s - press q to abort\n", grab_m2ts, outName);

	first_run = 1;
	is_rtp = 0;
	while (1) {
		u32 size = 0;

		check--;
		if (!check) {
			if (gf_prompt_has_input()) {
				char c = (char) gf_prompt_get_char();
				if (c=='q') break;
			}
			check = 50;
		}

		/*m2ts chunks by chunks*/
		e = gf_sk_receive(sock, data, 0x40000, 0, &size);
		if (!size || e) {
			gf_sleep(1);
			continue;
		}
		if (first_run) {
			first_run = 0;
			/*FIXME: we assume only simple RTP packaging (no CSRC nor extensions)*/
			if ((data[0] != 0x47) && ((data[1] & 0x7F) == 33) ) {
				is_rtp = 1;
#ifndef GPAC_DISABLE_STREAMING
				ch = gf_rtp_reorderer_new(100, 500);
#endif
			}
		}
		/*process chunk*/
		if (is_rtp) {
#ifndef GPAC_DISABLE_STREAMING
			char *pck;
			seq_num = ((data[2] << 8) & 0xFF00) | (data[3] & 0xFF);
			gf_rtp_reorderer_add(ch, (void *) data, size, seq_num);

			pck = (char *) gf_rtp_reorderer_get(ch, &size);
			if (pck) {
				fwrite(pck+12, size-12, 1, output);
				gf_free(pck);
			}
#else
			fwrite(data+12, size-12, 1, output);
#endif
		} else {
			fwrite(data, size, 1, output);
		}
	}
	nb_pck = gf_ftell(output);
	nb_pck /= 188;
	fprintf(stderr, "Captured "LLU" TS packets\n", nb_pck );
	gf_fclose(output);
	gf_sk_del(sock);

#ifndef GPAC_DISABLE_STREAMING
	if (ch)
		gf_rtp_reorderer_del(ch);
#endif
	return 0;
}