예제 #1
0
static int ff_url_read(void *h, unsigned char *buf, int size)
{
	u32 retry = 10;
	u32 read;
	int full_size;
	FFDemux *ffd = (FFDemux *)h;

	full_size = 0;
	if (ffd->buffer_used) {
		if (ffd->buffer_used >= (u32) size) {
			ffd->buffer_used-=size;
			memcpy(ffd->buffer, ffd->buffer+size, sizeof(char)*ffd->buffer_used);
#ifdef FFMPEG_DUMP_REMOTE
			if (ffd->outdbg) gf_fwrite(buf, size, 1, ffd->outdbg);
#endif
			return size;
		}
		full_size += ffd->buffer_used;
		buf += ffd->buffer_used;
		size -= ffd->buffer_used;
		ffd->buffer_used = 0;
	}

	while (size) {
		GF_Err e = gf_dm_sess_fetch_data(ffd->dnload, buf, size, &read);
		if (e==GF_EOS) break;
		/*we're sync!!*/
		if (e==GF_IP_NETWORK_EMPTY) {
			if (!retry) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] timeout fetching bytes from network\n") );
				return -1;
			}
			retry --;
			gf_sleep(100);
			continue;
		}
		if (e) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] error fetching bytes from network: %s\n", gf_error_to_string(e) ) );
			return -1;
		}
		full_size += read;
		if (read==size) break;
		size -= read;
		buf += read;
	}
#ifdef FFMPEG_DUMP_REMOTE
	if (ffd->outdbg) gf_fwrite(ffd->buffer, full_size, 1, ffd->outdbg);
#endif
	return full_size ? (int) full_size : -1;
}
예제 #2
0
파일: meta.c 프로젝트: Brilon314/gpac
GF_EXPORT
GF_Err gf_isom_extract_meta_xml(GF_ISOFile *file, Bool root_meta, u32 track_num, char *outName, Bool *is_binary)
{
	u32 i, count;
	FILE *didfile;
	GF_XMLBox *xml = NULL;
	GF_MetaBox *meta = gf_isom_get_meta(file, root_meta, track_num);
	if (!meta) return GF_BAD_PARAM;

	/*Find XMLBox*/
	count = gf_list_count(meta->other_boxes);
	for (i = 0; i <count; i++) {
		GF_Box *a = (GF_Box *)gf_list_get(meta->other_boxes, i);
		if ((a->type == GF_ISOM_BOX_TYPE_XML) || (a->type == GF_ISOM_BOX_TYPE_BXML) ) {
			xml = (GF_XMLBox *)a;
			break;
		}
	}
	if (!xml || !xml->xml || !xml->xml_length) return GF_BAD_PARAM;

	didfile = gf_fopen(outName, "wb");
	if (!didfile) return GF_IO_ERR;
	gf_fwrite(xml->xml, xml->xml_length, 1, didfile);
	gf_fclose(didfile);

	if (is_binary) *is_binary = (xml->type==GF_ISOM_BOX_TYPE_BXML) ? 1 : 0;
	return GF_OK;
}
예제 #3
0
파일: main.c 프로젝트: ARSekkat/gpac
void on_m2ts_event(GF_M2TS_Demuxer *ts, u32 evt_type, void *par)
{
	GF_M2TS_PES_PCK *pck;
	switch (evt_type) {
	case GF_M2TS_EVT_PAT_FOUND:
		fprintf(stdout, "Service connected (PAT found)\n");
		break;
	case GF_M2TS_EVT_PAT_REPEAT:
		has_seen_pat = 1;
		break;
	case GF_M2TS_EVT_PAT_UPDATE:
		fprintf(stdout, "Service connected (PAT found)\n");
		break;
	case GF_M2TS_EVT_PMT_FOUND:
		fprintf(stdout, "Program list found - %d streams\n", gf_list_count( ((GF_M2TS_Program*)par)->streams) );
		break;
	case GF_M2TS_EVT_PMT_UPDATE:
		fprintf(stdout, "Program list updated - %d streams\n", gf_list_count( ((GF_M2TS_Program*)par)->streams) );
		break;
	case GF_M2TS_EVT_SDT_FOUND:
		fprintf(stdout, "Program Description found - %d desc\n", gf_list_count(ts->SDTs) );
		break;
	case GF_M2TS_EVT_SDT_UPDATE:
		fprintf(stdout, "Program Description updated - %d desc\n", gf_list_count(ts->SDTs) );
		break;
	case GF_M2TS_EVT_PES_PCK:
		pck = par;
		if (dest && (dump_pid == pck->stream->pid)) {
			gf_fwrite(pck->data, pck->data_len, 1, dest);
		}

		//fprintf(stdout, "PES(%d): DTS "LLD" PTS" LLD" RAP %d size %d\n", pck->stream->pid, pck->DTS, pck->PTS, pck->rap, pck->data_len);
		break;
	}
}
예제 #4
0
static char *validator_create_snapshot(GF_Validator *validator)
{
	GF_Err e;
	GF_VideoSurface fb;
	GF_Terminal *term = validator->term;
	char *dumpname;

	dumpname = validator_get_snapshot_name(validator, validator->is_recording, validator->snapshot_number);

	e = gf_term_get_screen_buffer(term, &fb);
	if (e) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[Validator] Error dumping screen buffer %s\n", gf_error_to_string(e)));
	} else {
		u32 dst_size = fb.width*fb.height*3;
		char *dst=gf_malloc(sizeof(char)*dst_size);

		e = gf_img_png_enc(fb.video_buffer, fb.width, fb.height, fb.pitch_y, fb.pixel_format, dst, &dst_size);
		if (e) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[Validator] Error encoding PNG %s\n", gf_error_to_string(e)));
		} else {
			FILE *png = gf_f64_open(dumpname, "wb");
			if (!png) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[Validator] Error writing file %s\n", dumpname));
			} else {
				gf_fwrite(dst, dst_size, 1, png);
				fclose(png);
				GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[Validator] Writing file %s\n", dumpname));
			}
		}
		if (dst) gf_free(dst);
		gf_term_release_screen_buffer(term, &fb);
	}
	validator->snapshot_number++;
	return dumpname;
}
예제 #5
0
파일: f4m.c 프로젝트: gorinje/gpac-svn
GF_Err adobe_gen_multirate_manifest(AdobeMultirate* am, char *bootstrap, size_t bootstrap_size)
{
	GF_Err e;
	u32 i;
#ifdef ADOBE_INLINED_BOOTSTRAP
	char bootstrap64[GF_MAX_PATH];
	u32 bootstrap64_len;
#endif

	fprintf(am->f, "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n");
	fprintf(am->f, "<manifest xmlns=\"http://ns.adobe.com/f4m/2.0\">\n");
	fprintf(am->f, "<id>%s</id>\n", am->id);
	fprintf(am->f, "<baseURL>%s</baseURL>\n", am->base_url);
    fprintf(am->f, "<streamType>live</streamType>\n");

	assert(am->streams);
	for (i=0; i<gf_list_count(am->streams); i++) {
		AdobeStream *as = gf_list_get(am->streams, i);
		assert(as);
#ifdef ADOBE_INLINED_BOOTSTRAP
		fprintf(am->f, "<bootstrapInfo profile=\"named\" id=\"boot_%s_%d\">\n", as->id, as->bitrate);
		bootstrap64_len = gf_base64_encode(bootstrap, bootstrap_size, bootstrap64, GF_MAX_PATH);
		fwrite(bootstrap64, bootstrap64_len, 1, am->f);
		if (bootstrap64_len >= GF_MAX_PATH) {
			fprintf(stderr, "Bootstrap may have been truncated for stream %s_%d.\n", as->id, as->bitrate);
			assert(0);
		}
		fprintf(am->f, "\n</bootstrapInfo>\n");
#else
		{
			char filename[GF_MAX_PATH];
			FILE *bstfile;
			sprintf(filename, "%s_%d.bootstrap", as->id, as->bitrate);
			bstfile = fopen(filename, "wb");
			gf_fwrite(bootstrap, bootstrap_size, 1, bstfile);
			fclose(bstfile);
		}
#endif
		e = adobe_gen_stream_manifest(as); 
		if (!e) { 
			if (!am->base_url && !as->base_url) 
				fprintf(stderr, "Warning: no base_url specified\n"); 

			fprintf(am->f, "<media href=\"%s_%s_%d.f4m\" bitrate=\"%d\"/>\n", am->id, as->id, as->bitrate, as->bitrate); 
		}
	}
	fprintf(am->f, "</manifest>\n");

	return GF_OK;
}
예제 #6
0
파일: main.c 프로젝트: Bevara/GPAC
void write_bmp(GF_VideoSurface *fb, char *rad_name, u32 img_num)
{
	char str[GF_MAX_PATH];
	BITMAPFILEHEADER fh;
	BITMAPINFOHEADER fi;
	FILE *fout;
	u32 j, i;
	char *ptr;

	if (img_num<10) {
		sprintf(str, "%s_00%d.bmp", rad_name, img_num);
	} else if (img_num<100) {
		sprintf(str, "%s_0%d.bmp", rad_name, img_num);
	} else {
		sprintf(str, "%s_%d.bmp", rad_name, img_num);
	}

	fout = fopen(str, "wb");
	if (!fout) return;

	memset(&fh, 0, sizeof(fh));
	fh.bfType = 19778;
	fh.bfOffBits = 14 + 40;

	memset(&fi, 0, sizeof(char)*40);
	fi.biSize = sizeof(char)*40;
	fi.biWidth = fb->width;
	fi.biHeight = fb->height;
	fi.biPlanes = 1;
	fi.biBitCount = 24;
	fi.biCompression = BI_RGB;
	fi.biSizeImage = fb->pitch * fb->height;

	/*NOT ALIGNED!!*/
	gf_fwrite(&fh.bfType, 2, 1, fout);
	gf_fwrite(&fh.bfSize, 4, 1, fout);
	gf_fwrite(&fh.bfReserved1, 2, 1, fout);
	gf_fwrite(&fh.bfReserved2, 2, 1, fout);
	gf_fwrite(&fh.bfOffBits, 4, 1, fout);

	gf_fwrite(&fi, 1, 40, fout);

	for (j=fb->height; j>0; j--) {
		ptr = fb->video_buffer + (j-1)*fb->pitch;
		//gf_fwrite(ptr, 1, fb->width  * 3, fout);
		for (i=0; i<fb->width; i++) {
			fputc(ptr[2], fout);
			fputc(ptr[1], fout);
			fputc(ptr[0], fout);
			ptr+=3;
		}
	}

	fclose(fout);
}
예제 #7
0
파일: cache.c 프로젝트: olegloa/mp4box
GF_Err gf_cache_write_to_cache( const DownloadedCacheEntry entry, const GF_DownloadSession * sess, const char * data, const u32 size) {
	u32 read;
	CHECK_ENTRY;

	if (!data || (!entry->writeFilePtr && !entry->mem_storage) || sess != entry->write_session) {
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("Incorrect parameter : data=%p, writeFilePtr=%p mem_storage=%p at "__FILE__"\n", data, entry->writeFilePtr, entry->mem_storage));
		return GF_BAD_PARAM;
	}

	if (entry->memory_stored) {
		if (entry->written_in_cache + size > entry->mem_allocated) {
			u32 new_size = MAX(entry->mem_allocated*2, entry->written_in_cache + size);
			entry->mem_storage = gf_realloc(entry->mem_storage, (new_size+2));
			entry->mem_allocated = new_size;
			sprintf(entry->cache_filename, "gmem://%d@%p", entry->contentLength, entry->mem_storage);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[CACHE] Reallocating memory cache to %d bytes\n", new_size));
		}
		memcpy(entry->mem_storage + entry->written_in_cache, data, size);
		entry->written_in_cache += size;
		memset(entry->mem_storage + entry->written_in_cache, 0, 2);
		sprintf(entry->cache_filename, "gmem://%d@%p", entry->written_in_cache, entry->mem_storage);

		GF_LOG(GF_LOG_INFO, GF_LOG_NETWORK, ("[CACHE] Writing %d bytes to cache\n", size));
		return GF_OK;
	}

	read = (u32) gf_fwrite(data, sizeof(char), size, entry->writeFilePtr);
	if (read > 0)
		entry->written_in_cache+= read;
	if (read != size) {
		/* Something bad happened */
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK,
		       ("[CACHE] Error while writting %d bytes of data to cache : has written only %d bytes.", size, read));
		gf_cache_close_write_cache(entry, sess, 0);
		gf_delete_file(entry->cache_filename);
		return GF_IO_ERR;
	}
	if (fflush(entry->writeFilePtr)) {
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK,
		       ("[CACHE] Error while flushing data bytes to cache file : %s.", entry->cache_filename));
		gf_cache_close_write_cache(entry, sess, 0);
		gf_delete_file(entry->cache_filename);
		return GF_IO_ERR;
	}
	GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[CACHE] Writing %d bytes to cache\n", size));
	return GF_OK;
}
예제 #8
0
파일: main.c 프로젝트: Bevara/GPAC
void write_raw(GF_VideoSurface *fb, char *rad_name, u32 img_num)
{
	char str[GF_MAX_PATH];
	FILE *fout;
	if (img_num<10) {
		sprintf(str, "%s_00%d.raw", rad_name, img_num);
	} else if (img_num<100) {
		sprintf(str, "%s_0%d.raw", rad_name, img_num);
	} else {
		sprintf(str, "%s_%d.raw", rad_name, img_num);
	}

	fout = fopen(str, "wb");
	if (!fout) return;
	gf_fwrite(fb->video_buffer , fb->height*fb->pitch, 1, fout);
	fclose(fout);
}
예제 #9
0
static void validator_xvl_close(GF_Validator *validator)
{
	if (validator->xvl_parser) {
		/* writing the validation results */
		if (!validator->is_recording) {
			FILE *xvl_fp;
			char *xvl_content;
			char result_filename[GF_MAX_PATH];
			char *dot;
			xvl_content = gf_xml_dom_serialize(validator->xvl_node, 0);
			dot = strrchr(validator->xvl_filename, '.');
			dot[0] = 0;
			sprintf(result_filename, "%s-result.xml", validator->xvl_filename);
			dot[0] = '.';
			xvl_fp = gf_f64_open(result_filename, "wt");
			gf_fwrite(xvl_content, strlen(xvl_content), 1, xvl_fp);
			fclose(xvl_fp);
			gf_free(xvl_content);
		}
		gf_xml_dom_del(validator->xvl_parser);
		validator->xvl_parser = NULL;
		validator->xvl_filename = NULL;
	}
}
예제 #10
0
파일: load.c 프로젝트: fcsteagu/gpac-1
void isor_declare_objects(ISOMReader *read)
{
	GF_ObjectDescriptor *od;
	GF_ESD *esd;
	const char *tag;
	u32 i, count, ocr_es_id, tlen, base_track, j, track_id;
	Bool highest_stream;
	char *opt;
	Bool add_ps_lower = GF_TRUE;

	ocr_es_id = 0;
	opt = (char*) gf_modules_get_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS");
	if (!opt) {
		gf_modules_set_option((GF_BaseInterface *)read->input, "ISOReader", "DeclareScalableXPS", "yes");
	} else if (!strcmp(opt, "no")) {
		add_ps_lower = GF_FALSE;
	}

	/*TODO check for alternate tracks*/
	count = gf_isom_get_track_count(read->mov);
	for (i=0; i<count; i++) {
		if (!gf_isom_is_track_enabled(read->mov, i+1)) continue;

		switch (gf_isom_get_media_type(read->mov, i+1)) {
		case GF_ISOM_MEDIA_AUDIO:
		case GF_ISOM_MEDIA_VISUAL:
		case GF_ISOM_MEDIA_TEXT:
		case GF_ISOM_MEDIA_SUBT:
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_SUBPIC:
			break;
		default:
			continue;
		}

		/*we declare only the highest video track (i.e the track we play)*/
		highest_stream = GF_TRUE;
		track_id = gf_isom_get_track_id(read->mov, i+1);
		for (j = 0; j < count; j++) {
			if (gf_isom_has_track_reference(read->mov, j+1, GF_ISOM_REF_SCAL, track_id) > 0) {
				highest_stream = GF_FALSE;
				break;
			}
		}
		if ((gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) && !highest_stream)
			continue;
		esd = gf_media_map_esd(read->mov, i+1);
		if (esd) {
			gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_BASE, 1, &base_track);
			esd->has_ref_base = base_track ? GF_TRUE : GF_FALSE;
			/*FIXME: if we declare only SPS/PPS of the highest layer, we have a problem in decoding even though we have all SPS/PPS inband (OpenSVC bug ?)*/
			/*so we add by default the SPS/PPS of the lower layers to this esd*/
			if (esd->has_ref_base && add_ps_lower) {
				u32 count, refIndex, ref_track, num_sps, num_pps, t;
				GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
				GF_AVCConfig *avccfg, *svccfg;

				count = gf_isom_get_reference_count(read->mov, i+1, GF_ISOM_REF_SCAL);
				for (refIndex = count; refIndex != 0; refIndex--) {
					gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_SCAL, refIndex, &ref_track);
					avccfg = gf_isom_avc_config_get(read->mov, ref_track, 1);
					svccfg = gf_isom_svc_config_get(read->mov, ref_track, 1);
					if (avccfg) {
						num_sps = gf_list_count(avccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(avccfg->pictureParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(avccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(avccfg);
					}
					if (svccfg) {
						num_sps = gf_list_count(svccfg->sequenceParameterSets);
						for (t = 0; t < num_sps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->sequenceParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->sequenceParameterSets, sl, 0);
						}
						num_pps = gf_list_count(svccfg->pictureParameterSets);
						for (t = 0; t < num_pps; t++) {
							GF_AVCConfigSlot *slc = gf_list_get(svccfg->pictureParameterSets, t);
							GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot));
							sl->id = slc->id;
							sl->size = slc->size;
							sl->data = (char*)gf_malloc(sizeof(char)*sl->size);
							memcpy(sl->data, slc->data, sizeof(char)*sl->size);
							gf_list_insert(cfg->pictureParameterSets, sl, 0);
						}
						gf_odf_avc_cfg_del(svccfg);
					}
				}

				if (esd->decoderConfig->decoderSpecificInfo->data) gf_free(esd->decoderConfig->decoderSpecificInfo->data);
				gf_odf_avc_cfg_write(cfg, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength);
				gf_odf_avc_cfg_del(cfg);
			}

			od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
			od->service_ifce = read->input;
			od->objectDescriptorID = 0;
			if (!ocr_es_id) ocr_es_id = esd->ESID;
			esd->OCRESID = ocr_es_id;
			gf_list_add(od->ESDescriptors, esd);
			if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
				send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
			} else {
				gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
			}
		}
	}
	/*if cover art, extract it in cache*/
	if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COVER_ART, &tag, &tlen)==GF_OK) {
		const char *cdir = gf_modules_get_option((GF_BaseInterface *)gf_term_get_service_interface(read->service), "General", "CacheDirectory");
		if (cdir) {
			char szName[GF_MAX_PATH];
			const char *sep;
			FILE *t;
			sep = strrchr(gf_isom_get_filename(read->mov), '\\');
			if (!sep) sep = strrchr(gf_isom_get_filename(read->mov), '/');
			if (!sep) sep = gf_isom_get_filename(read->mov);

			if ((cdir[strlen(cdir)-1] != '\\') && (cdir[strlen(cdir)-1] != '/')) {
				sprintf(szName, "%s/%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			} else {
				sprintf(szName, "%s%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			}

			t = gf_f64_open(szName, "wb");

			if (t) {
				Bool isom_contains_video = GF_FALSE;

				/*write cover data*/
				assert(!(tlen & 0x80000000));
				gf_fwrite(tag, tlen & 0x7FFFFFFF, 1, t);
				fclose(t);

				/*don't display cover art when video is present*/
				for (i=0; i<gf_isom_get_track_count(read->mov); i++) {
					if (!gf_isom_is_track_enabled(read->mov, i+1))
						continue;
					if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) {
						isom_contains_video = GF_TRUE;
						break;
					}
				}

				if (!isom_contains_video) {
					od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
					od->service_ifce = read->input;
					od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID;
					od->URLString = gf_strdup(szName);
					if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
						send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
					} else {
						gf_term_add_media(read->service, (GF_Descriptor*)od, GF_TRUE);
					}
				}
			}
		}
	}
	if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
		send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, NULL, NULL);
	} else {
		gf_term_add_media(read->service, NULL, GF_FALSE);
	}
}
예제 #11
0
static void validator_xvs_close(GF_Validator *validator)
{
	if (validator->xvs_parser) {
		if (validator->is_recording) {
			FILE *xvs_fp;
			char *xvs_content;
			char filename[100];
			GF_XMLAttribute *att;
			GF_XMLAttribute *att_file = NULL;
			u32 att_index = 0;
			while (1) {
				att = gf_list_get(validator->xvs_node->attributes, att_index);
				if (!att) {
					break;
				} else if (!strcmp(att->name, "file")) {
					att_file = att;
				}
				att_index++;
			}

			if (!att_file) {
				GF_SAFEALLOC(att, GF_XMLAttribute);
				att->name = gf_strdup("file");
				gf_list_add(validator->xvs_node->attributes, att);
			} else {
				att = att_file;
				if (att->value) gf_free(att->value);
			}
			sprintf(filename, "%s%c%s", validator->test_base, GF_PATH_SEPARATOR, validator->test_filename);
			att->value = gf_strdup(filename);
			xvs_content = gf_xml_dom_serialize(validator->xvs_node, 0);
			xvs_fp = gf_f64_open(validator->xvs_filename, "wt");
			gf_fwrite(xvs_content, strlen(xvs_content), 1, xvs_fp);
			fclose(xvs_fp);
			gf_free(xvs_content);
		} else {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_MODULE, ("[Validator] XVS Result : %s\n", (validator->xvs_result?"Success":"Failure")));
			if (validator->xvs_node_in_xvl) {
				GF_XMLAttribute *att;
				GF_XMLAttribute *att_result = NULL;
				u32 att_index = 0;
				while (1) {
					att = gf_list_get(validator->xvs_node_in_xvl->attributes, att_index);
					if (!att) {
						break;
					} else if (!strcmp(att->name, "result")) {
						att_result = att;
					}
					att_index++;
				}
				if (!att_result) {
					GF_SAFEALLOC(att_result, GF_XMLAttribute);
					att_result->name = gf_strdup("result");
					gf_list_add(validator->xvs_node_in_xvl->attributes, att_result);
				}
				if (att_result->value) gf_free(att_result->value);
				att_result->value = gf_strdup(validator->xvs_result ? "pass" : "fail");
			}
		}
		gf_xml_dom_del(validator->xvs_parser);
		validator->xvs_parser = NULL;
	}
	validator->xvs_node = NULL;
	validator->xvs_node_in_xvl = NULL;
	validator->xvs_filename = NULL;
	validator->test_filename = NULL;
	validator->ck = NULL;
	validator->xvs_event_index = 0;
	validator->snapshot_number = 0;
}
예제 #12
0
파일: loader_qt.c 프로젝트: indiereign/gpac
/*import cubic QTVR to mp4*/
GF_Err gf_sm_load_init_qt(GF_SceneLoader *load)
{
	u32 i, di, w, h, tk, nb_samp;
	Bool has_qtvr;
	GF_ISOSample *samp;
	GF_ISOFile *src;
	GF_StreamContext *st;
	GF_AUContext *au;
	GF_Command *com;
	M_Background *back;
	M_NavigationInfo *ni;
	M_Group *gr;
	GF_ODUpdate *odU;
	GF_SceneGraph *sg;
	GF_ObjectDescriptor *od;
	GF_ESD *esd;

	if (!load->ctx) return GF_NOT_SUPPORTED;

	src = gf_isom_open(load->fileName, GF_ISOM_OPEN_READ, NULL);
	if (!src) return gf_qt_report(load, GF_URL_ERROR, "Opening file %s failed", load->fileName);

	w = h = tk = 0;
	nb_samp = 0;

	has_qtvr = 0;
	for (i=0; i<gf_isom_get_track_count(src); i++) {
		switch (gf_isom_get_media_type(src, i+1)) {
		case GF_ISOM_MEDIA_VISUAL:
			if (gf_isom_get_media_subtype(src, i+1, 1) == GF_ISOM_BOX_TYPE_JPEG) {
				GF_GenericSampleDescription *udesc = gf_isom_get_generic_sample_description(src, i+1, 1);
				if ((udesc->width>w) || (udesc->height>h)) {
					w = udesc->width;
					h = udesc->height;
					tk = i+1;
					nb_samp = gf_isom_get_sample_count(src, i+1);
				}
				if (udesc->extension_buf) gf_free(udesc->extension_buf);
				gf_free(udesc);
			}
			break;
		case GF_ISOM_MEDIA_QTVR:
			has_qtvr = 1;
			break;
		}
	}
	if (!has_qtvr) {
		gf_isom_delete(src);
		return gf_qt_report(load, GF_NOT_SUPPORTED, "QTVR not found - no conversion available for this QuickTime movie");
	}
	if (!tk) {
		gf_isom_delete(src);
		return gf_qt_report(load, GF_NON_COMPLIANT_BITSTREAM, "No associated visual track with QTVR movie");
	}
	if (nb_samp!=6) {
		gf_isom_delete(src);
		return gf_qt_report(load, GF_NOT_SUPPORTED, "Movie %s doesn't look a Cubic QTVR - sorry...", load->fileName);
	}

	GF_LOG(GF_LOG_INFO, GF_LOG_PARSER, ("QT: Importing Cubic QTVR Movie"));

	/*create scene*/
	sg = load->ctx->scene_graph;
	gr = (M_Group *) gf_node_new(sg, TAG_MPEG4_Group);
	gf_node_register((GF_Node *)gr, NULL);
	st = gf_sm_stream_new(load->ctx, 1, GF_STREAM_SCENE, 1);
	au = gf_sm_stream_au_new(st, 0, 0, 1);
	com = gf_sg_command_new(load->ctx->scene_graph, GF_SG_SCENE_REPLACE);
	gf_list_add(au->commands, com);
	com->node = (GF_Node *)gr;

	back = (M_Background *) gf_node_new(sg, TAG_MPEG4_Background);
	gf_node_list_add_child( &gr->children, (GF_Node*)back);
	gf_node_register((GF_Node *)back, (GF_Node *)gr);

	gf_sg_vrml_mf_alloc(&back->leftUrl, GF_SG_VRML_MFURL, 1);
	back->leftUrl.vals[0].OD_ID = 2;
	gf_sg_vrml_mf_alloc(&back->frontUrl, GF_SG_VRML_MFURL, 1);
	back->frontUrl.vals[0].OD_ID = 3;
	gf_sg_vrml_mf_alloc(&back->rightUrl, GF_SG_VRML_MFURL, 1);
	back->rightUrl.vals[0].OD_ID = 4;
	gf_sg_vrml_mf_alloc(&back->backUrl, GF_SG_VRML_MFURL, 1);
	back->backUrl.vals[0].OD_ID = 5;
	gf_sg_vrml_mf_alloc(&back->topUrl, GF_SG_VRML_MFURL, 1);
	back->topUrl.vals[0].OD_ID = 6;
	gf_sg_vrml_mf_alloc(&back->bottomUrl, GF_SG_VRML_MFURL, 1);
	back->bottomUrl.vals[0].OD_ID = 7;

	ni = (M_NavigationInfo *) gf_node_new(sg, TAG_MPEG4_NavigationInfo);
	gf_node_list_add_child(&gr->children, (GF_Node*)ni);
	gf_node_register((GF_Node *)ni, (GF_Node *)gr);
	gf_sg_vrml_mf_reset(&ni->type, GF_SG_VRML_MFSTRING);
	gf_sg_vrml_mf_alloc(&ni->type, GF_SG_VRML_MFSTRING, 1);
	ni->type.vals[0] = gf_strdup("VR");

	/*create ODs*/
	st = gf_sm_stream_new(load->ctx, 2, GF_STREAM_OD, 1);
	au = gf_sm_stream_au_new(st, 0, 0, 1);
	odU = (GF_ODUpdate*) gf_odf_com_new(GF_ODF_OD_UPDATE_TAG);
	gf_list_add(au->commands, odU);
	for (i=0; i<6; i++) {
		GF_MuxInfo *mi;
		FILE *img;
		char szName[1024];
		od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
		od->objectDescriptorID = 2+i;
		esd = gf_odf_desc_esd_new(2);
		esd->decoderConfig->streamType = GF_STREAM_VISUAL;
		esd->decoderConfig->objectTypeIndication = GPAC_OTI_IMAGE_JPEG;
		esd->ESID = 3+i;
		/*extract image and remember it*/
		mi = (GF_MuxInfo *) gf_odf_desc_new(GF_ODF_MUXINFO_TAG);
		gf_list_add(esd->extensionDescriptors, mi);
		mi->delete_file = 1;
		sprintf(szName, "%s_img%d.jpg", load->fileName, esd->ESID);
		mi->file_name = gf_strdup(szName);

		gf_list_add(od->ESDescriptors, esd);
		gf_list_add(odU->objectDescriptors, od);

		samp = gf_isom_get_sample(src, tk, i+1, &di);
		img = gf_fopen(mi->file_name, "wb");
		gf_fwrite(samp->data, samp->dataLength, 1, img);
		gf_fclose(img);
		gf_isom_sample_del(&samp);
	}
	gf_isom_delete(src);
	return GF_OK;
}
예제 #13
0
파일: main.c 프로젝트: ARSekkat/gpac
void save_rs_0(char *filename, unsigned char *data)
{
	FILE *rs_out = gf_fopen(filename,"a+b");
	gf_fwrite(data, 1, 204, rs_out);
	gf_fclose(rs_out);
}
예제 #14
0
파일: xml_ns.c 프로젝트: Keemotion/GPAC4iOS
GF_EXPORT
GF_Err gf_node_store_embedded_data(XMLRI *iri, const char *cache_dir, const char *base_filename)
{
	char szFile[GF_MAX_PATH], buf[20], *sep, *data, *ext;
	u32 data_size, idx;
	Bool existing;
	FILE *f;

	if (!cache_dir || !base_filename || !iri || !iri->string || strncmp(iri->string, "data:", 5)) return GF_OK;

	/*handle "data:" scheme when cache is specified*/
	strcpy(szFile, cache_dir);
	data_size = (u32) strlen(szFile);
	if (szFile[data_size-1] != GF_PATH_SEPARATOR) {
		szFile[data_size] = GF_PATH_SEPARATOR;
		szFile[data_size+1] = 0;
	}
	if (base_filename) {
		sep = strrchr(base_filename, GF_PATH_SEPARATOR);
#ifdef WIN32
		if (!sep) sep = strrchr(base_filename, '/');
#endif
		if (!sep) sep = (char *) base_filename;
		else sep += 1;
		strcat(szFile, sep);
	}
	sep = strrchr(szFile, '.');
	if (sep) sep[0] = 0;
	strcat(szFile, "_img_");

	/*get mime type*/
	sep = (char *)iri->string + 5;
	if (!strncmp(sep, "image/jpg", 9) || !strncmp(sep, "image/jpeg", 10)) ext = ".jpg";
	else if (!strncmp(sep, "image/png", 9)) ext = ".png";
	else if (!strncmp(sep, "image/svg+xml", 13)) ext = ".svg";
	else return GF_BAD_PARAM;


	data = NULL;
	sep = strchr(iri->string, ';');
	if (!strncmp(sep, ";base64,", 8)) {
		sep += 8;
		data_size = 2 * (u32) strlen(sep);
		data = (char*)gf_malloc(sizeof(char)*data_size);
		if (!data) return GF_OUT_OF_MEM;
		data_size = gf_base64_decode(sep, (u32) strlen(sep), data, data_size);
	}
	else if (!strncmp(sep, ";base16,", 8)) {
		data_size = 2 * (u32) strlen(sep);
		data = (char*)gf_malloc(sizeof(char)*data_size);
		if (!data) return GF_OUT_OF_MEM;
		sep += 8;
		data_size = gf_base16_decode(sep, (u32) strlen(sep), data, data_size);
	}
	if (!data_size) return GF_OK;
	
	iri->type = XMLRI_STRING;
	
	existing = 0;
	idx = 0;
	while (1) {
		u32 res = check_existing_file(szFile, ext, data, data_size, idx);
		if (!res) break;
		if (res==2) {
			existing = 1;
			break;
		}
		idx++;
	}
	sprintf(buf, "%04X", idx);
	strcat(szFile, buf);
	strcat(szFile, ext);

	if (!existing) {
		f = gf_f64_open(szFile, "wb");
		if (!f) {
			gf_free(data);
			gf_free(iri->string);
			iri->string = NULL;
			return GF_IO_ERR;
		}
		gf_fwrite(data, data_size, 1, f);
		fclose(f);
	}
	gf_free(data);
	gf_free(iri->string);
	iri->string = gf_strdup(szFile);
	return GF_OK;
}
예제 #15
0
static void imagetexture_update(GF_TextureHandler *txh)
{
	if (gf_node_get_tag(txh->owner)!=TAG_MPEG4_CacheTexture) {
		MFURL url = ((M_ImageTexture *) txh->owner)->url;

		/*setup texture if needed*/
		if (!txh->is_open && url.count) {
			gf_sc_texture_play(txh, &url);
		}
		gf_sc_texture_update_frame(txh, 0);

		if (
		    /*URL is present but not opened - redraw till fetch*/
		    /* (txh->stream && !txh->tx_io) && */
		    /*image has been updated*/
		    txh->needs_refresh) {
			/*mark all subtrees using this image as dirty*/
			gf_node_dirty_parents(txh->owner);
			gf_sc_invalidate(txh->compositor, NULL);
		}
		return;
	}
	/*cache texture case*/
	else {
		M_CacheTexture *ct = (M_CacheTexture *) txh->owner;

		/*decode cacheTexture data */
		if ((ct->data || ct->image.buffer) && !txh->data) {
#ifndef GPAC_DISABLE_AV_PARSERS
			u32 out_size;
			GF_Err e;

			/*BT/XMT playback: load to memory*/
			if (ct->image.buffer) {
				char *par = (char *) gf_scene_get_service_url( gf_node_get_graph(txh->owner ) );
				char *src_url = gf_url_concatenate(par, ct->image.buffer);
				FILE *test = gf_fopen( src_url ? src_url : ct->image.buffer, "rb");
				if (test) {
					fseek(test, 0, SEEK_END);
					ct->data_len = (u32) gf_ftell(test);
					ct->data = gf_malloc(sizeof(char)*ct->data_len);
					fseek(test, 0, SEEK_SET);
					if (ct->data_len != fread(ct->data, 1, ct->data_len, test)) {
						GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to load CacheTexture data from file %s: IO err\n", src_url ? src_url : ct->image.buffer ) );
						gf_free(ct->data);
						ct->data = NULL;
						ct->data_len = 0;
					}
					gf_fclose(test);
				} else {
					GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to load CacheTexture data from file %s: not found\n", src_url ? src_url : ct->image.buffer ) );
				}
				ct->image.buffer = NULL;
				if (src_url) gf_free(src_url);
			}

			/*BIFS decoded playback*/
			switch (ct->objectTypeIndication) {
			case GPAC_OTI_IMAGE_JPEG:
				out_size = 0;
				e = gf_img_jpeg_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, NULL, &out_size, 3);
				if (e==GF_BUFFER_TOO_SMALL) {
					u32 BPP;
					txh->data = gf_malloc(sizeof(char) * out_size);
					if (txh->pixelformat==GF_PIXEL_GREYSCALE) BPP = 1;
					else BPP = 3;

					e = gf_img_jpeg_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, txh->data, &out_size, BPP);
					if (e==GF_OK) {
						gf_sc_texture_allocate(txh);
						gf_sc_texture_set_data(txh);
						txh->needs_refresh = 1;
						txh->stride = out_size / txh->height;
					}
				}
				break;
			case GPAC_OTI_IMAGE_PNG:
				out_size = 0;
				e = gf_img_png_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, NULL, &out_size);
				if (e==GF_BUFFER_TOO_SMALL) {
					txh->data = gf_malloc(sizeof(char) * out_size);
					e = gf_img_png_dec((char *) ct->data, ct->data_len, &txh->width, &txh->height, &txh->pixelformat, txh->data, &out_size);
					if (e==GF_OK) {
						gf_sc_texture_allocate(txh);
						gf_sc_texture_set_data(txh);
						txh->needs_refresh = 1;
						txh->stride = out_size / txh->height;
					}
				}
				break;
			}

#endif // GPAC_DISABLE_AV_PARSERS

			/*cacheURL is specified, store the image*/
			if (ct->cacheURL.buffer) {
				u32 i;
				u8 hash[20];
				FILE *cached_texture;
				char szExtractName[GF_MAX_PATH], section[64], *opt, *src_url;
				opt = (char *) gf_cfg_get_key(txh->compositor->user->config, "General", "CacheDirectory");
				if (opt) {
					strcpy(szExtractName, opt);
				} else {
					opt = gf_get_default_cache_directory();
					strcpy(szExtractName, opt);
					gf_free(opt);
				}
				strcat(szExtractName, "/");
				src_url = (char *) gf_scene_get_service_url( gf_node_get_graph(txh->owner ) );

				gf_sha1_csum((u8 *)src_url, (u32) strlen(src_url), hash);
				for (i=0; i<20; i++) {
					char t[3];
					t[2] = 0;
					sprintf(t, "%02X", hash[i]);
					strcat(szExtractName, t);
				}
				strcat(szExtractName, "_");

				strcat(szExtractName, ct->cacheURL.buffer);
				cached_texture = gf_fopen(szExtractName, "wb");
				if (cached_texture) {
					gf_fwrite(ct->data, 1, ct->data_len, cached_texture);
					gf_fclose(cached_texture);
				}

				/*and write cache info*/
				if (ct->expirationDate!=0) {
					sprintf(section, "@cache=%p", ct);
					gf_cfg_set_key(txh->compositor->user->config, section, "serviceURL", src_url);
					gf_cfg_set_key(txh->compositor->user->config, section, "cacheFile", szExtractName);
					gf_cfg_set_key(txh->compositor->user->config, section, "cacheName", ct->cacheURL.buffer);

					if (ct->expirationDate>0) {
						char exp[50];
						u32 sec, frac;
						gf_net_get_ntp(&sec, &frac);
						sec += ct->expirationDate;
						sprintf(exp, "%u", sec);
						gf_cfg_set_key(txh->compositor->user->config, section, "expireAfterNTP", exp);
					} else {
						gf_cfg_set_key(txh->compositor->user->config, section, "expireAfterNTP", "0");
					}
				}
			}

			/*done with image, destroy buffer*/
			if (ct->data) gf_free(ct->data);
			ct->data = NULL;
			ct->data_len = 0;
		}
	}
}
예제 #16
0
파일: main.c 프로젝트: ARSekkat/gpac
void save_ts(char *filename, unsigned char *data)
{
	FILE *ts_out = gf_fopen(filename,"a+b");
	gf_fwrite(data, 1, 188, ts_out);
	gf_fclose(ts_out);
}
예제 #17
0
int sdp_generator(PNC_CallbackData *data, char *ip_dest, char *sdp_fmt)
{
	GF_SceneEngine *codec;
	GF_ESD *esd = NULL;
	u32 size,size64;
	char *buffer;
	char buf64[5000];
	FILE *fp;
	int ret;
	char temp[5000];
	u16 port;
	u32 socket_type;

	gf_sk_get_local_info(data->chan->rtp, &port, &socket_type);

	fp = gf_fopen("broadcaster.sdp", "w+");
	if(fp == NULL) {
		fprintf(stderr, "Cannot open SDP file broadcaster.sdp\n");
		exit(1);
	}

	ret = gf_fwrite("v=0\n", 1, 4, fp);
	sprintf(temp, "o=GpacBroadcaster 3326096807 1117107880000 IN IP%d %s\n", gf_net_is_ipv6(ip_dest) ? 6 : 4, ip_dest);
	ret = gf_fwrite(temp, 1, strlen(temp), fp);

	ret = gf_fwrite("s=MPEG4Broadcaster\n", 1, 19, fp);

	sprintf(temp, "c=IN IP%d %s\n", gf_net_is_ipv6(ip_dest) ? 6 : 4, ip_dest);
	ret = gf_fwrite(temp, 1, strlen(temp), fp);

	ret = gf_fwrite("t=0 0\n", 1, 6, fp);

	codec = (GF_SceneEngine *) data->codec;
	if (codec) {
		buffer = NULL;
		size = 0;
		gf_odf_desc_write((GF_Descriptor *) codec->ctx->root_od, &buffer, &size);
		esd = gf_list_get(codec->ctx->root_od->ESDescriptors, 0);

		size64 = gf_base64_encode((unsigned char *) buffer, size, (unsigned char *) buf64, 2000);
		buf64[size64] = 0;
		free(buffer);

		sprintf(temp, "a=mpeg4-iod:\"data:application/mpeg4-iod;base64,%s\"\n", buf64);
		ret = gf_fwrite(temp, 1, strlen(temp), fp);
	}

	sprintf(temp, "m=application %d RTP/AVP 96\n", port);
	ret = gf_fwrite(temp, 1, strlen(temp), fp);

	ret = gf_fwrite("a=rtpmap:96 mpeg4-generic/1000\n", 1, 31, fp);

	if (esd) {
		sprintf(temp, "a=mpeg4-esid:%d\n", esd->ESID);
		ret = gf_fwrite(temp, 1, strlen(temp), fp);
	}

	sprintf(temp, "%s\n", sdp_fmt);
	ret = gf_fwrite(temp, 1, strlen(temp), fp);
	fflush(fp);
	gf_fclose(fp);
	dprintf(DEBUG_sdp_generator, "SDP file generated in broadcaster.sdp\n");
	return GF_OK;
}
예제 #18
0
파일: load.c 프로젝트: Brilon314/gpac
void isor_declare_objects(ISOMReader *read)
{
	GF_ObjectDescriptor *od;
	GF_ESD *esd;
	const char *tag;
	u32 i, count, ocr_es_id, tlen, base_track, j, track_id;
	Bool highest_stream;

	ocr_es_id = 0;

	/*TODO check for alternate tracks*/
	count = gf_isom_get_track_count(read->mov);
	for (i=0; i<count; i++) {
		if (!gf_isom_is_track_enabled(read->mov, i+1))
			continue;

		switch (gf_isom_get_media_type(read->mov, i+1)) {
		case GF_ISOM_MEDIA_AUDIO:
		case GF_ISOM_MEDIA_VISUAL:
		case GF_ISOM_MEDIA_TEXT:
		case GF_ISOM_MEDIA_SUBT:
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_SUBPIC:
			break;
		default:
			continue;
		}
		//some subtypes are not declared as readable objects
		switch (gf_isom_get_media_subtype(read->mov, i+1, 1)) {
		case GF_ISOM_SUBTYPE_HVT1:
			continue;
		default:
			break;
		}
		/*we declare only the highest video track (i.e the track we play)*/
		highest_stream = GF_TRUE;
		track_id = gf_isom_get_track_id(read->mov, i+1);
		if (read->play_only_track_id && (read->play_only_track_id != track_id)) continue;

		for (j = 0; j < count; j++) {
			if (gf_isom_has_track_reference(read->mov, j+1, GF_ISOM_REF_SCAL, track_id) > 0) {
				highest_stream = GF_FALSE;
				break;
			}
		}
		if ((gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) && !highest_stream)
			continue;
		esd = gf_media_map_esd(read->mov, i+1);
		if (esd) {
			gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_BASE, 1, &base_track);
			esd->has_ref_base = base_track ? GF_TRUE : GF_FALSE;

			if (!esd->langDesc) {
				esd->langDesc = (GF_Language *) gf_odf_desc_new(GF_ODF_LANG_TAG);
				gf_isom_get_media_language(read->mov, i+1, &esd->langDesc->full_lang_code);
			}

			od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
			od->service_ifce = read->input;
			od->objectDescriptorID = 0;
			if (!ocr_es_id) ocr_es_id = esd->ESID;
			esd->OCRESID = ocr_es_id;
			gf_list_add(od->ESDescriptors, esd);
			if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
				send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
			} else {
				gf_service_declare_media(read->service, (GF_Descriptor*)od, GF_TRUE);
			}
		}
	}
	/*if cover art, extract it in cache*/
	if (gf_isom_apple_get_tag(read->mov, GF_ISOM_ITUNE_COVER_ART, &tag, &tlen)==GF_OK) {
		const char *cdir = gf_modules_get_option((GF_BaseInterface *)gf_service_get_interface(read->service), "General", "CacheDirectory");
		if (cdir) {
			char szName[GF_MAX_PATH];
			const char *sep;
			FILE *t;
			sep = strrchr(gf_isom_get_filename(read->mov), '\\');
			if (!sep) sep = strrchr(gf_isom_get_filename(read->mov), '/');
			if (!sep) sep = gf_isom_get_filename(read->mov);

			if ((cdir[strlen(cdir)-1] != '\\') && (cdir[strlen(cdir)-1] != '/')) {
				sprintf(szName, "%s/%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			} else {
				sprintf(szName, "%s%s_cover.%s", cdir, sep, (tlen & 0x80000000) ? "png" : "jpg");
			}

			t = gf_fopen(szName, "wb");

			if (t) {
				Bool isom_contains_video = GF_FALSE;

				/*write cover data*/
				assert(!(tlen & 0x80000000));
				gf_fwrite(tag, tlen & 0x7FFFFFFF, 1, t);
				gf_fclose(t);

				/*don't display cover art when video is present*/
				for (i=0; i<gf_isom_get_track_count(read->mov); i++) {
					if (!gf_isom_is_track_enabled(read->mov, i+1))
						continue;
					if (gf_isom_get_media_type(read->mov, i+1) == GF_ISOM_MEDIA_VISUAL) {
						isom_contains_video = GF_TRUE;
						break;
					}
				}

				if (!isom_contains_video) {
					od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG);
					od->service_ifce = read->input;
					od->objectDescriptorID = GF_MEDIA_EXTERNAL_ID;
					od->URLString = gf_strdup(szName);
					if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
						send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, (GF_Descriptor*)od, NULL);
					} else {
						gf_service_declare_media(read->service, (GF_Descriptor*)od, GF_TRUE);
					}
				}
			}
		}
	}
	if (read->input->query_proxy && read->input->proxy_udta && read->input->proxy_type) {
		send_proxy_command(read, GF_FALSE, GF_TRUE, GF_OK, NULL, NULL);
	} else {
		gf_service_declare_media(read->service, NULL, GF_FALSE);
	}
}
예제 #19
0
파일: sdp_load.c 프로젝트: erelh/gpac
void RP_SaveSessionState(RTPClient *rtp)
{
	GF_Err e;
	char *sdp_buf;
	const char *opt;
	GF_X_Attribute*att;
	u32 i, j;
	GF_SDPInfo *sdp;
	RTSPSession *sess = NULL;

	if (!rtp->session_state_data) return;

	sdp_buf = rtp->session_state_data + strlen("data:application/sdp,");
	sdp = gf_sdp_info_new();
	e = gf_sdp_info_parse(sdp, sdp_buf, (u32) strlen(sdp_buf) );

	for (i=0; i<gf_list_count(rtp->channels); i++) {
		GF_SDPMedia *media = NULL;
		RTPStream *ch = gf_list_get(rtp->channels, i);
		if (!ch->control) continue;

		for (j=0; j<gf_list_count(sdp->media_desc); j++) {
			u32 k;
			GF_SDPMedia *med = (GF_SDPMedia*)gf_list_get(sdp->media_desc, j);

			for (k=0; k<gf_list_count(med->Attributes); k++) {
				att = (GF_X_Attribute*)gf_list_get(med->Attributes, k);
				if (!stricmp(att->Name, "control") && (strstr(att->Value, ch->control)!=NULL) ) {
					media = med;
					break;
				}
			}
			if (media)
				break;
		}
		if (!media) continue;

		if (ch->rtp_ch->net_info.IsUnicast) {
			char szPorts[4096];
			u16 porta, portb;
			media->PortNumber = ch->rtp_ch->net_info.client_port_first;

			/*remove x-server-port extension*/
			for (j=0; j<gf_list_count(media->Attributes); j++) {
				att = (GF_X_Attribute*)gf_list_get(media->Attributes, j);
				if (!stricmp(att->Name, "x-stream-state") ) {
					gf_free(att->Name);
					gf_free(att->Value);
					gf_free(att);
					gf_list_rem(media->Attributes, j);
				}
			}
			ch->current_start += gf_rtp_get_current_time(ch->rtp_ch);

			GF_SAFEALLOC(att, GF_X_Attribute);
			att->Name = gf_strdup("x-stream-state");
			porta = ch->rtp_ch->net_info.port_first ? ch->rtp_ch->net_info.port_first : ch->rtp_ch->net_info.client_port_first;
			portb = ch->rtp_ch->net_info.port_last ? ch->rtp_ch->net_info.port_last : ch->rtp_ch->net_info.client_port_last;

			sprintf(szPorts, "server-port=%d-%d;ssrc=%X;npt=%g;seq=%d;rtptime=%d",
				porta,
				portb,
				ch->rtp_ch->SenderSSRC,
				ch->current_start,
				ch->rtp_ch->rtp_first_SN,
				ch->rtp_ch->rtp_time
			);
			att->Value = gf_strdup(szPorts);
			gf_list_add(media->Attributes, att);

			if (ch->rtsp)
				sess = ch->rtsp;
		} else {
			media->PortNumber = ch->rtp_ch->net_info.port_first;
		}

	}
	/*remove x-server-port/x-session-id extension*/
	for (j=0; j<gf_list_count(sdp->Attributes); j++) {
		att = (GF_X_Attribute*)gf_list_get(sdp->Attributes, j);
		if (!stricmp(att->Name, "x-session-id") || !stricmp(att->Name, "x-session-name")
		) {
			gf_free(att->Name);
			gf_free(att->Value);
			gf_free(att);
			gf_list_rem(sdp->Attributes, j);
		}
	}
	if (sess) {
		char szURL[4096];

		if (sess->session_id) {
			GF_SAFEALLOC(att, GF_X_Attribute);
			att->Name = gf_strdup("x-session-id");
			att->Value = gf_strdup(sess->session_id);
			gf_list_add(sdp->Attributes, att);
		}

		GF_SAFEALLOC(att, GF_X_Attribute);
		att->Name = gf_strdup("x-session-name");
		sprintf(szURL, "rtsp://%s:%d/%s", sess->session->Server, sess->session->Port, sess->session->Service);
		att->Value = gf_strdup(szURL);
		gf_list_add(sdp->Attributes, att);
	}

	gf_free(rtp->session_state_data);
	sdp_buf = NULL;
	gf_sdp_info_write(sdp, &sdp_buf);
	if (sdp_buf) {
		rtp->session_state_data = gf_malloc(sizeof(char) * (strlen("data:application/sdp,") + strlen(sdp_buf) + 1) );
		strcpy(rtp->session_state_data, "data:application/sdp,");
		strcat(rtp->session_state_data, sdp_buf);
		gf_free(sdp_buf);
	}


	gf_sdp_info_del(sdp);


	opt = (char *) gf_modules_get_option((GF_BaseInterface *) gf_term_get_service_interface(rtp->service), "Streaming", "SessionMigrationServer");
	if (opt) {
		if (rtp->dnload) gf_term_download_del(rtp->dnload);
		rtp->dnload = NULL;

		if (strnicmp(opt, "http://", 7)) {
			rtp->dnload = gf_term_download_new(rtp->service, opt, GF_NETIO_SESSION_NOT_THREADED, MigrateSDP_NetIO, rtp);
			while (1) {
				char buffer[100];
				u32 read;
				e = gf_dm_sess_fetch_data(rtp->dnload, buffer, 100, &read);
				if (e && (e!=GF_IP_NETWORK_EMPTY)) break;
			}
			gf_term_download_del(rtp->dnload);
			rtp->dnload = NULL;
		} else {
			FILE *f = gf_f64_open(opt, "wt");
			if (f) {
				sdp_buf = rtp->session_state_data + strlen("data:application/sdp,");
				gf_fwrite(sdp_buf, 1, strlen(sdp_buf), f);
				fclose(f);
			} else {
				e = GF_IO_ERR;
			}
		}
		if (e<0) {
			gf_term_on_message(sess->owner->service, e, "Error saving session state");
		}
	}
}
예제 #20
0
파일: broadcaster.c 프로젝트: Bevara/GPAC
u32 tcp_server(void *par)
{
	TCP_Input *input = par;
	u32 *timer = input->RAPtimer;
	char buffer[MAX_BUF];
	unsigned char temp[MAX_BUF];
	FILE *fp;
	u32 byte_read;
	int ret;
	GF_Config *gf_config_file;
	GF_Socket *TCP_socket;
	GF_Socket *conn_socket;
	GF_Err e;

	int debug = input->debug;
	input->status = 1;

	TCP_socket = gf_sk_new(GF_SOCK_TYPE_TCP);
	e = gf_sk_bind(TCP_socket, NULL, input->port, NULL, 0, 0);
	e = gf_sk_listen(TCP_socket, 1);
	e = gf_sk_set_block_mode(TCP_socket, 1);
	e = gf_sk_server_mode(TCP_socket, 0);

	while(input->status == 1)
	{
		memset(buffer, 0, sizeof(buffer));
		e = gf_sk_accept(TCP_socket, &conn_socket);
		if (e == GF_OK) {
			memset(buffer, 0, sizeof(buffer));
			e = gf_sk_receive(conn_socket, buffer, MAX_BUF, 0, &byte_read);
		}

		switch (e) {
		case GF_IP_NETWORK_EMPTY:
			gf_sleep(33);
			continue;
		case GF_OK:
			break;
		default:
			fprintf(stderr, "Error with TCP socket : %s\n", gf_error_to_string(e));
			exit(1);
			break;
		}

		if((*(input->config_flag)) == 0)
		{
			u32 num_retry;
			fp = fopen("temp.cfg", "w+");
			if (!fp) {
				fprintf(stderr, "Error opening temp file for the configuration\n");
				exit(1);
			}
			ret = gf_fwrite(buffer, 1, byte_read, fp);
			fclose(fp);

			/* parsing config info */
			gf_config_file = gf_cfg_new(".", "temp.cfg");
			if (!gf_config_file) {
				fprintf(stderr, "Error opening the config file %s\n", gf_error_to_string(e));
				exit(-1);
			}
			parse_config(gf_config_file, input->config, debug);

			/* Acknowledging the configuration */
			gf_sk_send(conn_socket, "OK\n", 3);

			memset(temp, 0, sizeof(temp));
			fp = fopen(input->config->scene_init_file, "w+");
			if (!fp) {
				fprintf(stderr, "Error opening temp file for reception of the initial scene\n");
				exit(1);
			}
			num_retry=10;

			while (1)
			{
				GF_Err e = gf_sk_receive(conn_socket, temp, sizeof(temp), 0, &byte_read);

				if (e == GF_OK) {
					gf_fwrite(temp, 1, byte_read, fp);
				} else if (e==GF_IP_NETWORK_EMPTY) {
					num_retry--;
					if (!num_retry)
						break;
					gf_sleep(1);
				} else {
					fprintf(stderr, "Error receiving initial scene: %s\n", gf_error_to_string(e));
					break;
				}
			}
			fclose(fp);
			*(input->config_flag) = 1;
		}
		/* we only wait now for the config updates */
		if ( (*(input->config_flag)) == 1) {
			ret = sscanf(buffer, "DelaiMax=%d\n", timer);
			fprintf(stdout, "RAP timer changed, now : %d\n", *timer);
		}
		gf_sk_del(conn_socket);
	}

	input->status = 2;
	return GF_OK;
}