예제 #1
0
/* Constructor */
HbbtvDemuxer::HbbtvDemuxer(char *input_data, char* url, Bool dsmcc, Bool no_url,sPlayerInterface* player_interf)
{

	Demuxts = NULL;

	Channels = gf_list_new();
 	Input_data = gf_strdup(input_data);	
	user = player_interf;
	player_interf->Demuxer = this;
	ait_to_process = 0;
	nb_prog_pmt_received = 0;
	all_prog_pmt_received =0;
	No_URL = 0;
	Ignore_TS_URL = 0;	
	if(dsmcc){
		process_dsmcc = 1;
	}else{
		process_dsmcc = 0;
	}

	if(no_url){
		GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[HBBTVTerminal] No URL \n"));
 		No_URL = 1;
 	}else if(url){
		GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[HBBTVTerminal] Forced URL %s \n",url));
		Force_URL = gf_strdup(url);
		Ignore_TS_URL = 1;
	}
 	nb_ait = 0;
	ts_demux_mutex = gf_mx_new("HBBTV_TS_Demux_Mutex");

	ts_demux_thread = gf_th_new("HBBTV_TS_Demux_Thread");
	
}
예제 #2
0
파일: html5_mse.c 프로젝트: HungMingWu/gpac
/* create a track based on the ESD and adds it to the source buffer */
static GF_HTML_Track *gf_mse_source_buffer_add_track(GF_HTML_SourceBuffer *sb, GF_ESD *esd)
{
	GF_HTML_Track *track;
	GF_HTML_TrackType type;

	track = NULL;
	type = HTML_MEDIA_TRACK_TYPE_UNKNOWN;
	if (esd->decoderConfig->streamType==GF_STREAM_VISUAL) {
		type = HTML_MEDIA_TRACK_TYPE_VIDEO;
	} else if (esd->decoderConfig->streamType==GF_STREAM_AUDIO) {
		type = HTML_MEDIA_TRACK_TYPE_AUDIO;
	} else {
		/* TODO: Text tracks */
	}

	track = gf_html_media_track_new(type, "", GF_TRUE, "", "", "", "");
	if (track) {
		char mx_name[256];
		track->bin_id = esd->ESID;
		track->buffer = gf_list_new();
		sprintf(mx_name, "track_mutex_%d", track->bin_id);
		track->buffer_mutex = gf_mx_new(mx_name);
		track->timescale = esd->slConfig->timestampResolution;
		gf_list_add(sb->tracks, track);
	}
	return track;
}
예제 #3
0
파일: read.c 프로젝트: supperlitt/gpac
GF_InputService *isor_client_load()
{
    ISOMReader *reader;
    GF_InputService *plug;
    GF_SAFEALLOC(plug, GF_InputService);
    GF_REGISTER_MODULE_INTERFACE(plug, GF_NET_CLIENT_INTERFACE, "GPAC IsoMedia Reader", "gpac distribution")
    plug->RegisterMimeTypes = ISOR_RegisterMimeTypes;
    plug->CanHandleURL = ISOR_CanHandleURL;
    plug->ConnectService = ISOR_ConnectService;
    plug->CloseService = ISOR_CloseService;
    plug->GetServiceDescriptor = ISOR_GetServiceDesc;
    plug->ConnectChannel = ISOR_ConnectChannel;
    plug->DisconnectChannel = ISOR_DisconnectChannel;
    plug->ServiceCommand = ISOR_ServiceCommand;
    plug->CanHandleURLInService = ISOR_CanHandleURLInService;
    /*we do support pull mode*/
    plug->ChannelGetSLP = ISOR_ChannelGetSLP;
    plug->ChannelReleaseSLP = ISOR_ChannelReleaseSLP;

    GF_SAFEALLOC(reader, ISOMReader);
    reader->channels = gf_list_new();
    reader->segment_mutex = gf_mx_new("ISO Segment");

    plug->priv = reader;
    return plug;
}
예제 #4
0
파일: channel.c 프로젝트: bigbensk/gpac
GF_Channel *gf_es_new(GF_ESD *esd)
{
	u32 nbBits;
	GF_Channel *tmp;
	GF_SAFEALLOC(tmp, GF_Channel);
	if (!tmp) return NULL;

	tmp->mx = gf_mx_new("Channel");
	tmp->esd = esd;
	tmp->es_state = GF_ESM_ES_SETUP;

	nbBits = sizeof(u32) * 8 - esd->slConfig->AUSeqNumLength;
	tmp->max_au_sn = 0xFFFFFFFF >> nbBits;
	nbBits = sizeof(u32) * 8 - esd->slConfig->packetSeqNumLength;
	tmp->max_pck_sn = 0xFFFFFFFF >> nbBits;

	tmp->skip_sl = (esd->slConfig->predefined == SLPredef_SkipSL) ? 1 : 0;

	/*take care of dummy streams*/
	if (!esd->slConfig->timestampResolution) esd->slConfig->timestampResolution = esd->slConfig->timeScale ? esd->slConfig->timeScale : 1000;
	if (!esd->slConfig->OCRResolution) esd->slConfig->OCRResolution = esd->slConfig->timestampResolution;

	tmp->ts_res = esd->slConfig->timestampResolution;

	tmp->ocr_scale = 0;
	if (esd->slConfig->OCRResolution) {
		tmp->ocr_scale = 1000;
		tmp->ocr_scale /= esd->slConfig->OCRResolution;
	}

	Channel_Reset(tmp, 0);
	return tmp;
}
예제 #5
0
GF_InputService *NewM2TSReader()
{
    M2TSIn *reader;
    GF_InputService *plug = gf_malloc(sizeof(GF_InputService));
    memset(plug, 0, sizeof(GF_InputService));
    GF_REGISTER_MODULE_INTERFACE(plug, GF_NET_CLIENT_INTERFACE, "GPAC MPEG-2 TS Reader", "gpac distribution")

    plug->CanHandleURL = M2TS_CanHandleURL;
    plug->CanHandleURLInService = M2TS_CanHandleURLInService;
    plug->ConnectService = M2TS_ConnectService;
    plug->CloseService = M2TS_CloseService;
    plug->GetServiceDescriptor = M2TS_GetServiceDesc;
    plug->ConnectChannel = M2TS_ConnectChannel;
    plug->DisconnectChannel = M2TS_DisconnectChannel;
    plug->ServiceCommand = M2TS_ServiceCommand;
    plug->RegisterMimeTypes = M2TS_RegisterMimeTypes;

    reader = gf_malloc(sizeof(M2TSIn));
    memset(reader, 0, sizeof(M2TSIn));
    plug->priv = reader;
    reader->ts = gf_m2ts_demux_new();
    reader->ts->on_event = M2TS_OnEvent;
    reader->ts->user = reader;
    reader->ts->demux_and_play = 1;
    reader->ts->th = gf_th_new("MPEG-2 TS Demux");

    reader->mx = gf_mx_new("MPEG2 Demux");

    return plug;
}
예제 #6
0
파일: rtp_in.c 프로젝트: jnorthrup/gpac
GF_InputService *RTP_Load()
{
	RTPClient *priv;
	GF_InputService *plug;
	GF_SAFEALLOC(plug, GF_InputService);
	memset(plug, 0, sizeof(GF_InputService));
	GF_REGISTER_MODULE_INTERFACE(plug, GF_NET_CLIENT_INTERFACE, "GPAC RTP/RTSP Client", "gpac distribution")

	plug->CanHandleURL = RP_CanHandleURL;
	plug->CanHandleURLInService = RP_CanHandleURLInService;
	plug->ConnectService = RP_ConnectService;
	plug->CloseService = RP_CloseService;
	plug->GetServiceDescriptor = RP_GetServiceDesc;
	plug->ConnectChannel = RP_ConnectChannel;
	plug->DisconnectChannel = RP_DisconnectChannel;
	plug->ServiceCommand = RP_ServiceCommand;
	plug->RegisterMimeTypes = RP_RegisterMimeTypes;

	/*PULL mode for embedded streams*/
	plug->ChannelGetSLP = RP_ChannelGetSLP;
	plug->ChannelReleaseSLP = RP_ChannelReleaseSLP;

	GF_SAFEALLOC(priv, RTPClient);
	priv->sessions = gf_list_new();
	priv->channels = gf_list_new();

	plug->priv = priv;

	priv->time_out = 30000;
	priv->mx = gf_mx_new("RTPDemux");
	priv->th = gf_th_new("RTPDemux");

	return plug;
}
예제 #7
0
파일: gapi.cpp 프로젝트: bigbensk/gpac
static void *NewGAPIVideoOutput()
{
	GAPIPriv *priv;
	GF_VideoOutput *driv = (GF_VideoOutput *) gf_malloc(sizeof(GF_VideoOutput));
	memset(driv, 0, sizeof(GF_VideoOutput));
	GF_REGISTER_MODULE_INTERFACE(driv, GF_VIDEO_OUTPUT_INTERFACE, "GAPI Video Output", "gpac distribution")

	priv = (GAPIPriv *) gf_malloc(sizeof(GAPIPriv));
	memset(priv, 0, sizeof(GAPIPriv));
	priv->mx = gf_mx_new("GAPI");
	driv->opaque = priv;

#ifdef GPAC_USE_OGL_ES
	driv->hw_caps = GF_VIDEO_HW_OPENGL | GF_VIDEO_HW_OPENGL_OFFSCREEN | GF_VIDEO_HW_OPENGL_OFFSCREEN_ALPHA;
#endif
	/*rgb, yuv to do*/

	driv->Setup = GAPI_Setup;
	driv->Shutdown = GAPI_Shutdown;
	driv->Flush = GAPI_Flush;
	driv->ProcessEvent = GAPI_ProcessEvent;
	driv->Blit = NULL;
	driv->LockBackBuffer = GAPI_LockBackBuffer;
	driv->SetFullScreen = GAPI_SetFullScreen;
	return (void *)driv;
}
예제 #8
0
void *New_FFMPEG_Demux()
{
	FFDemux *priv;
	GF_InputService *ffd = gf_malloc(sizeof(GF_InputService));
	memset(ffd, 0, sizeof(GF_InputService));

	GF_SAFEALLOC(priv, FFDemux);

	GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] Registering all ffmpeg plugins...\n") );
	/* register all codecs, demux and protocols */
	av_register_all();
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] Registering all ffmpeg plugins DONE.\n") );

	ffd->RegisterMimeTypes = FFD_RegisterMimeTypes;
	ffd->CanHandleURL = FFD_CanHandleURL;
	ffd->CloseService = FFD_CloseService;
	ffd->ConnectChannel = FFD_ConnectChannel;
	ffd->ConnectService = FFD_ConnectService;
	ffd->DisconnectChannel = FFD_DisconnectChannel;
	ffd->GetServiceDescriptor = FFD_GetServiceDesc;
	ffd->ServiceCommand = FFD_ServiceCommand;

	ffd->CanHandleURLInService = FFD_CanHandleURLInService;

	priv->thread = gf_th_new("FFMPEG Demux");
	priv->mx = gf_mx_new("FFMPEG Demux");

	GF_REGISTER_MODULE_INTERFACE(ffd, GF_NET_CLIENT_INTERFACE, "FFMPEG Demuxer", "gpac distribution");
	ffd->priv = priv;
	return ffd;
}
예제 #9
0
파일: media_manager.c 프로젝트: Bevara/GPAC
GF_Err gf_term_init_scheduler(GF_Terminal *term, u32 threading_mode)
{
	term->mm_mx = gf_mx_new("MediaManager");
	term->codecs = gf_list_new();

	term->frame_duration = 33;
	switch (threading_mode) {
	case GF_TERM_THREAD_SINGLE:
		term->flags |= GF_TERM_SINGLE_THREAD;
		break;
	case GF_TERM_THREAD_MULTI:
		term->flags |= GF_TERM_MULTI_THREAD;
		break;
	default:
		break;
	}

	if (term->user->init_flags & GF_TERM_NO_DECODER_THREAD)
		return GF_OK;

	term->mm_thread = gf_th_new("MediaManager");
	term->flags |= GF_TERM_RUNNING;
	term->priority = GF_THREAD_PRIORITY_NORMAL;
	gf_th_run(term->mm_thread, MM_Loop, term);
	return GF_OK;
}
예제 #10
0
GPAC_GenericController::GPAC_GenericController(PLT_CtrlPointReference& ctrlPoint, GF_UPnP *upnp)
{
	m_pUPnP = upnp;
	m_CtrlPoint = ctrlPoint;
	m_CtrlPoint->AddListener(this);
	m_ControlPointLock = gf_mx_new("GenericController");
	m_Devices = gf_list_new();
}
예제 #11
0
파일: saf.c 프로젝트: erelh/gpac
GF_SAFMuxer *gf_saf_mux_new()
{
	GF_SAFMuxer *mux;
	GF_SAFEALLOC(mux, GF_SAFMuxer);
	mux->mx = gf_mx_new("SAF");
	mux->streams = gf_list_new();
	return mux;
}
예제 #12
0
GPAC_MediaController::GPAC_MediaController(PLT_CtrlPointReference& ctrlPoint, GF_UPnP *upnp)
{
    m_MediaController = new PLT_MediaController(ctrlPoint, this);
    m_MediaBrowser = new PLT_MediaBrowser(ctrlPoint, this);

	m_MediaServers = gf_list_new();
	m_MediaRenderers = gf_list_new();
	m_ControlPointLock = gf_mx_new("AVControlPoint");
	m_pUPnP = upnp;
}
예제 #13
0
void *SDL_NewVideo()
{
	SDLVidCtx *ctx;
	GF_VideoOutput *driv;
	
	driv = gf_malloc(sizeof(GF_VideoOutput));
	memset(driv, 0, sizeof(GF_VideoOutput));
	GF_REGISTER_MODULE_INTERFACE(driv, GF_VIDEO_OUTPUT_INTERFACE, "SDL Video Output", "gpac distribution");

	ctx = gf_malloc(sizeof(SDLVidCtx));
	memset(ctx, 0, sizeof(SDLVidCtx));
#ifdef	SDL_WINDOW_THREAD
	ctx->sdl_th = gf_th_new("SDLVideo");
#endif
	ctx->evt_mx = gf_mx_new("SDLEvents");
	
	driv->opaque = ctx;
	driv->Setup = SDLVid_Setup;
	driv->Shutdown = SDLVid_Shutdown;
	driv->SetFullScreen = SDLVid_SetFullScreen;
	driv->Flush = SDLVid_Flush;
	driv->ProcessEvent = SDLVid_ProcessEvent;
	/*no offscreen opengl with SDL*/
	driv->hw_caps |= GF_VIDEO_HW_OPENGL;

	/*no YUV hardware blitting in SDL (only overlays)*/
	driv->hw_caps |= GF_VIDEO_HW_HAS_YUV_OVERLAY | GF_VIDEO_HW_HAS_RGB | GF_VIDEO_HW_HAS_RGBA;
	driv->Blit = SDL_Blit;
	driv->LockBackBuffer = SDLVid_LockBackBuffer;
	driv->LockOSContext = NULL;

	/*color keying with overlays are not supported in SDL ...*/
#if 0
	/*get YUV overlay key*/
	opt = gf_modules_get_option((GF_BaseInterface *)driv, "Video", "OverlayColorKey");
	/*no set is the default*/
	if (!opt) {
		opt = "0101FE";
		gf_modules_set_option((GF_BaseInterface *)driv, "Video", "OverlayColorKey", "0101FE");
	}
	sscanf(opt, "%06x", &driv->overlay_color_key);
	if (driv->overlay_color_key) driv->overlay_color_key |= 0xFF000000;
	GF_LOG(GF_LOG_INFO, GF_LOG_MMIO, ("[SDL Out] YUV Overlays enabled - ColorKey enabled: %s (key %x)\n", 
									driv->overlay_color_key ? "Yes" : "No", driv->overlay_color_key
							));
#endif
#ifndef SDL_TEXTINPUTEVENT_TEXT_SIZE
	SDL_EnableUNICODE(1);
#else
	SDL_StartTextInput();
#endif /* SDL_TEXTINPUTEVENT_TEXT_SIZE */
	return driv;
}
예제 #14
0
GF_TermExt *avr_new()
{
    GF_TermExt *dr;
    GF_AVRedirect *uir;
    dr = gf_malloc ( sizeof ( GF_TermExt ) );
    memset ( dr, 0, sizeof ( GF_TermExt ) );
    GF_REGISTER_MODULE_INTERFACE ( dr, GF_TERM_EXT_INTERFACE, "GPAC Output Recorder", "gpac distribution" );

    GF_SAFEALLOC ( uir, GF_AVRedirect );
    dr->process = avr_process;
    dr->udta = uir;
    uir->encodingMutex = gf_mx_new("RedirectAV_encodingMutex");
    assert( uir->encodingMutex);
    uir->frameMutex = gf_mx_new("RedirectAV_frameMutex");
    uir->encodingThread = gf_th_new("RedirectAV_EncodingThread");
    uir->audioEncodingThread = gf_th_new("RedirectAV_AudioEncodingThread");
    uir->encode = 1;
    uir->is_open = 0;
    uir->is_running = 0;
    return dr;
}
예제 #15
0
파일: clock.c 프로젝트: erelh/gpac
GF_Clock *NewClock(GF_Terminal *term)
{
	GF_Clock *tmp;
	GF_SAFEALLOC(tmp, GF_Clock);
	if (!tmp) return NULL;
	tmp->mx = gf_mx_new("Clock");
	tmp->term = term;
	tmp->speed = FIX_ONE;
	if (term->play_state) tmp->Paused = 1;
	tmp->data_timeout = term->net_data_timeout;
	return tmp;
}
예제 #16
0
GPAC_GenericDevice::GPAC_GenericDevice(const char* FriendlyName, const char *device_id)
	: PLT_DeviceHost("/", "", device_id ? device_id : "urn:schemas-upnp-org:device:GenericDevice:1", FriendlyName)
{
	m_pServices = gf_list_new();

#ifdef GPAC_HAS_SPIDERMONKEY
	run_proc = JSVAL_NULL;
	act_proc = JSVAL_NULL;
	obj = NULL;
	js_source = "";
	act_ref = NULL;
	m_pSema = NULL;
	m_pMutex = gf_mx_new("UPnP Generic Device");
#endif
}
예제 #17
0
GF_AudioMixer *gf_mixer_new(struct _audio_render *ar)
{
	GF_AudioMixer *am;
	am = (GF_AudioMixer *) malloc(sizeof(GF_AudioMixer));
	if (!am) return NULL;
	memset(am, 0, sizeof(GF_AudioMixer));
	am->mx = gf_mx_new();
	am->sources = gf_list_new();
	am->isEmpty = 1;
	am->ar = ar;
	am->sample_rate = 44100;
	am->bits_per_sample = 16;
	am->nb_channels = 2;
	am->output = NULL;
	am->output_size = 0;
	return am;
}
예제 #18
0
GF_EXPORT
GF_Ringbuffer * gf_ringbuffer_new(u32 sz)
{
  GF_Ringbuffer *rb;

  rb = gf_malloc (sizeof (GF_Ringbuffer));
  if (sz % 2 != 0)
    sz++;
  rb->size = sz;
  rb->size_mask = rb->size;
  rb->size_mask -= 1;
  rb->write_ptr = 0;
  rb->read_ptr = 0;
  rb->buf = gf_malloc (rb->size);
  rb->mx = gf_mx_new("RingBufferMutex");
  return rb;
}
예제 #19
0
파일: module.c 프로젝트: kasrinat/gpac
GF_EXPORT
GF_ModuleManager *gf_modules_new(const char *directory, GF_Config *config)
{
	GF_ModuleManager *tmp;
	u32 loadedModules;
	const char *opt;
	u32 num_dirs = 0;

	if (!config) return NULL;

	/* Try to resolve directory from config file */
	GF_SAFEALLOC(tmp, GF_ModuleManager);
	if (!tmp) return NULL;
	tmp->cfg = config;
	tmp->mutex = gf_mx_new("Module Manager");
	gf_modules_get_module_directories(tmp, &num_dirs);

	/* Initialize module list */
	tmp->plug_list = gf_list_new();
	if (!tmp->plug_list) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("OUT OF MEMORY, cannot create list of modules !!!\n"));
		gf_free(tmp);
		return NULL;
	}
	tmp->plugin_registry = gf_list_new();
	if (!tmp->plugin_registry) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("OUT OF MEMORY, cannot create list of static module registers !!!\n"));
		gf_list_del(tmp->plug_list);
		gf_free(tmp);
		return NULL;
	}

	opt = gf_cfg_get_key(config, "Systems", "ModuleUnload");
	if (opt && !strcmp(opt, "no")) {
		tmp->no_unload = GF_TRUE;
	}
#ifndef GPAC_MODULE_CUSTOM_LOAD
	load_all_modules(tmp);
#endif
	loadedModules = gf_modules_refresh(tmp);
	GF_LOG(GF_LOG_INFO, GF_LOG_CORE, ("Loaded %d modules from directory %s.\n", loadedModules, directory));

	return tmp;
}
예제 #20
0
void dc_circular_buffer_create(CircularBuffer *circular_buf, u32 size, LockMode mode, int max_num_consumers)
{
	u32 i;
	circular_buf->size = size;
	circular_buf->list = (Node*)gf_malloc(size * sizeof(Node));
	circular_buf->mode = mode;
	circular_buf->max_num_consumers = max_num_consumers;

	for (i=0; i<size; i++) {
		circular_buf->list[i].num_producers = 0;
		circular_buf->list[i].num_consumers = 0;
		circular_buf->list[i].num_consumers_accessed = 0;
		circular_buf->list[i].marked = 0;
		circular_buf->list[i].num_consumers_waiting = 0;
		circular_buf->list[i].consumers_semaphore = gf_sema_new(1000, 0);
		circular_buf->list[i].producers_semaphore = gf_sema_new(1000, 0);
		circular_buf->list[i].mutex = gf_mx_new("Circular Buffer Mutex");
	}
}
예제 #21
0
파일: register.c 프로젝트: erelh/gpac
int lock_call_back(void ** mutex, enum AVLockOp op) {
	switch (op) {
	case AV_LOCK_CREATE:
		*mutex = gf_mx_new("AVLIB callback mutex");
		break;
	case AV_LOCK_OBTAIN:
		gf_mx_p(*mutex);
		break;
	case AV_LOCK_RELEASE:
		gf_mx_v(*mutex);
		break;
	case AV_LOCK_DESTROY:
		gf_mx_del(*mutex);
		break;
	}

	return 0;

}
예제 #22
0
GF_DownloadSession *gf_dm_sess_new(GF_DownloadManager *dm, char *url, u32 dl_flags,
									  gf_dm_user_io user_io,
									  void *usr_cbk,
									  GF_Err *e)
{
	GF_DownloadSession *sess;

	*e = GF_OK;
	if (gf_dm_is_local(dm, url)) return NULL;

	if (!gf_dm_can_handle_url(dm, url)) {
		*e = GF_NOT_SUPPORTED;
		return NULL;
	}
	if (!user_io) {
		*e = GF_BAD_PARAM;
		return NULL;
	}


	sess = (GF_DownloadSession *)malloc(sizeof(GF_DownloadSession));
	memset((void *)sess, 0, sizeof(GF_DownloadSession));
	sess->flags = dl_flags;
	sess->user_proc = user_io;
	sess->usr_cbk = usr_cbk;
	sess->dm = dm;
	gf_list_add(dm->sessions, sess);

	*e = gf_dm_setup_from_url(sess, url);
	if (*e) {
		gf_dm_sess_del(sess);
		return NULL;
	}
	if (!(sess->flags & GF_NETIO_SESSION_NOT_THREADED) ) {
		sess->th = gf_th_new();
		sess->mx = gf_mx_new();
		gf_th_run(sess->th, gf_dm_session_thread, sess);
	}
	sess->num_retry = SESSION_RETRY_COUNT;
	return sess;
}
예제 #23
0
void *New_FFMPEG_Demux()
{
	GF_InputService *ffd;
	FFDemux *priv;
	GF_SAFEALLOC(ffd, GF_InputService);
	if (!ffd) return NULL;
	GF_SAFEALLOC(priv, FFDemux);
	if (!priv) {
		gf_free(ffd);
		return NULL;
	}
	GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] Registering all ffmpeg plugins...\n") );
	/* register all codecs, demux and protocols */
	av_register_all();
	avformat_network_init();
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] Registering all ffmpeg plugins DONE.\n") );

	ffd->RegisterMimeTypes = FFD_RegisterMimeTypes;
	ffd->CanHandleURL = FFD_CanHandleURL;
	ffd->CloseService = FFD_CloseService;
	ffd->ConnectChannel = FFD_ConnectChannel;
	ffd->ConnectService = FFD_ConnectService;
	ffd->DisconnectChannel = FFD_DisconnectChannel;
	ffd->GetServiceDescriptor = FFD_GetServiceDesc;
	ffd->ServiceCommand = FFD_ServiceCommand;

	ffd->CanHandleURLInService = FFD_CanHandleURLInService;

	priv->thread = gf_th_new("FFMPEG Demux");
	priv->mx = gf_mx_new("FFMPEG Demux");
	if (!priv->thread || !priv->mx) {
		if (priv->thread) gf_th_del(priv->thread);
		if (priv->mx) gf_mx_del(priv->mx);
		gf_free(priv);
		return NULL;
	}

	GF_REGISTER_MODULE_INTERFACE(ffd, GF_NET_CLIENT_INTERFACE, "FFMPEG Demuxer", "gpac distribution");
	ffd->priv = priv;
	return ffd;
}
예제 #24
0
파일: ios_mpegv.c 프로젝트: erelh/gpac
GF_InputSensorDevice* NewMPEGVSInputSesor()
{
	MPEGVSensorContext* ctx = NULL;
	GF_InputSensorDevice* driv = NULL;

	driv = (GF_InputSensorDevice *) gf_malloc(sizeof(GF_InputSensorDevice));
	memset(driv, 0, sizeof(GF_InputSensorDevice));
	GF_REGISTER_MODULE_INTERFACE(driv, GF_INPUT_DEVICE_INTERFACE, "MPEG-V Sensors Input Module", "gpac distribution");

	driv->RegisterDevice = MPEGVS_RegisterDevice;
	driv->Start = MPEGVS_Start;
	driv->Stop = MPEGVS_Stop;

	ctx = (MPEGVSensorContext*) gf_malloc (sizeof(MPEGVSensorContext));
	memset(ctx, 0, sizeof(MPEGVSensorContext));
    ctx->mx = gf_mx_new(NULL);

	driv->udta = (void*)ctx;

	return driv;
}
예제 #25
0
GF_AbstractTSMuxer * ts_amux_new(GF_AVRedirect * avr, u32 videoBitrateInBitsPerSec, u32 width, u32 height, u32 audioBitRateInBitsPerSec) {
	GF_AbstractTSMuxer * ts = gf_malloc( sizeof(GF_AbstractTSMuxer));
	memset( ts, 0, sizeof( GF_AbstractTSMuxer));
	ts->oc = avformat_alloc_context();
	ts->destination = avr->destination;
	av_register_all();
	ts->oc->oformat = GUESS_FORMAT(NULL, avr->destination, NULL);
	if (!ts->oc->oformat)
		ts->oc->oformat = GUESS_FORMAT("mpegts", NULL, NULL);
	assert( ts->oc->oformat);
#if REDIRECT_AV_AUDIO_ENABLED
	ts->audio_st = av_new_stream(ts->oc, avr->audioCodec->id);
	{
		AVCodecContext * c = ts->audio_st->codec;
		c->codec_id = avr->audioCodec->id;
		c->codec_type = AVMEDIA_TYPE_AUDIO;
		/* put sample parameters */
		c->sample_fmt = SAMPLE_FMT_S16;
		c->bit_rate = audioBitRateInBitsPerSec;
		c->sample_rate = avr->audioSampleRate;
		c->channels = 2;
		c->time_base.num = 1;
		c->time_base.den = 1000;
		// some formats want stream headers to be separate
		if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER)
			c->flags |= CODEC_FLAG_GLOBAL_HEADER;
	}
#endif

	ts->video_st = av_new_stream(ts->oc, avr->videoCodec->id);
	{
		AVCodecContext * c = ts->video_st->codec;
		c->codec_id = avr->videoCodec->id;
		c->codec_type = AVMEDIA_TYPE_VIDEO;

		/* put sample parameters */
		c->bit_rate = videoBitrateInBitsPerSec;
		/* resolution must be a multiple of two */
		c->width = width;
		c->height = height;
		/* time base: this is the fundamental unit of time (in seconds) in terms
		   of which frame timestamps are represented. for fixed-fps content,
		   timebase should be 1/framerate and timestamp increments should be
		   identically 1. */
		c->time_base.den = STREAM_FRAME_RATE;
		c->time_base.num = 1;
		c->gop_size = 12; /* emit one intra frame every twelve frames at most */
		c->pix_fmt = STREAM_PIX_FMT;
		if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
			/* just for testing, we also add B frames */
			c->max_b_frames = 2;
		}
		if (c->codec_id == CODEC_ID_MPEG1VIDEO) {
			/* Needed to avoid using macroblocks in which some coeffs overflow.
			   This does not happen with normal video, it just happens here as
			   the motion of the chroma plane does not match the luma plane. */
			c->mb_decision=2;
		}
		// some formats want stream headers to be separate
		if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER)
			c->flags |= CODEC_FLAG_GLOBAL_HEADER;

	}
	//av_set_pts_info(ts->audio_st, 33, 1, audioBitRateInBitsPerSec);

#ifndef AVIO_FLAG_WRITE
	/* set the output parameters (must be done even if no
	   parameters). */
	if (av_set_parameters(ts->oc, NULL) < 0) {
		fprintf(stderr, "Invalid output format parameters\n");
		return NULL;
	}
#endif

	dump_format(ts->oc, 0, avr->destination, 1);
	GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[AVRedirect] DUMPING to %s...\n", ts->destination));

#if (LIBAVCODEC_VERSION_MAJOR<55)
	if (avcodec_open(ts->video_st->codec, avr->videoCodec) < 0) {
#else
	if (avcodec_open2(ts->video_st->codec, avr->videoCodec, NULL) < 0) {
#endif
		GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open video codec\n"));
		return NULL;
	}
#if REDIRECT_AV_AUDIO_ENABLED
#if (LIBAVCODEC_VERSION_MAJOR<55)
	if (avcodec_open(ts->audio_st->codec, avr->audioCodec) < 0) {
#else
	if (avcodec_open2(ts->audio_st->codec, avr->audioCodec, NULL) < 0) {
#endif
		GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open audio codec\n"));
		return NULL;
	}
	ts->audioMx = gf_mx_new("TS_AudioMx");
#endif
	ts->videoMx = gf_mx_new("TS_VideoMx");
	ts->tsEncodingThread = gf_th_new("ts_interleave_thread_run");
	ts->encode = 1;
	ts->audioPackets = NULL;
	ts->videoPackets = NULL;
	gf_th_run(ts->tsEncodingThread, ts_interleave_thread_run, ts);
	return ts;
}

void ts_amux_del(GF_AbstractTSMuxer * muxerToDelete) {
	if (!muxerToDelete)
		return;
	muxerToDelete->encode = 0;
	gf_sleep(100);
	gf_th_stop(muxerToDelete->tsEncodingThread);
	muxerToDelete->tsEncodingThread = NULL;
#if REDIRECT_AV_AUDIO_ENABLED
	gf_mx_del(muxerToDelete->audioMx);
	muxerToDelete->audioMx = NULL;
#endif
	gf_mx_del(muxerToDelete->videoMx);
	muxerToDelete->videoMx = NULL;
	if (muxerToDelete->video_st) {
		avcodec_close(muxerToDelete->video_st->codec);
		muxerToDelete->video_st = NULL;
	}
#if REDIRECT_AV_AUDIO_ENABLED
	if (muxerToDelete->audio_st) {
		avcodec_close(muxerToDelete->audio_st->codec);
		muxerToDelete->audio_st = NULL;
	}
#endif
	/* write the trailer, if any.  the trailer must be written
	 * before you close the CodecContexts open when you wrote the
	 * header; otherwise write_trailer may try to use memory that
	 * was freed on av_codec_close() */
	if (muxerToDelete->oc) {
		u32 i;
		/* free the streams */
		for (i = 0; i < muxerToDelete->oc->nb_streams; i++) {
			av_freep(&muxerToDelete->oc->streams[i]->codec);
			av_freep(&muxerToDelete->oc->streams[i]);
		}

		/* free the stream */
		av_free(muxerToDelete->oc);
		muxerToDelete->oc = NULL;
	}
}

Bool ts_encode_audio_frame(GF_AbstractTSMuxer * ts, uint8_t * data, int encoded, u64 pts) {
	AVPacketList *pl;
	AVPacket * pkt;
	if (!ts->encode)
		return 1;
	pl = gf_malloc(sizeof(AVPacketList));
	pl->next = NULL;
	pkt = &(pl->pkt);
	av_init_packet(pkt);
	assert( ts->audio_st);
	assert( ts->audio_st->codec);
	pkt->flags = 0;
	if (ts->audio_st->codec->coded_frame) {
		if (ts->audio_st->codec->coded_frame->key_frame)
			pkt->flags = AV_PKT_FLAG_KEY;
		if (ts->audio_st->codec->coded_frame->pts != AV_NOPTS_VALUE) {
			pkt->pts = av_rescale_q(ts->audio_st->codec->coded_frame->pts, ts->audio_st->codec->time_base, ts->audio_st->time_base);
		} else {
			if (pts == AV_NOPTS_VALUE)
				pkt->pts = AV_NOPTS_VALUE;
			else {
				pkt->pts = av_rescale_q(pts, ts->audio_st->codec->time_base, ts->audio_st->time_base);
			}
		}
	} else {
		if (pts == AV_NOPTS_VALUE)
			pkt->pts = AV_NOPTS_VALUE;
		else
			pkt->pts = av_rescale_q(pts, ts->audio_st->codec->time_base, ts->audio_st->time_base);
	}
	pkt->stream_index= ts->audio_st->index;
	pkt->data = data;
	pkt->size = encoded;
	//fprintf(stderr, "AUDIO PTS="LLU" was: "LLU" (%p)\n", pkt->pts, pts, pl);
	gf_mx_p(ts->audioMx);
	if (!ts->audioPackets)
		ts->audioPackets = pl;
	else {
		AVPacketList * px = ts->audioPackets;
		while (px->next)
			px = px->next;
		px->next = pl;
	}
	gf_mx_v(ts->audioMx);
	return 0;
}

Bool ts_encode_video_frame(GF_AbstractTSMuxer* ts, uint8_t* data, int encoded) {
	AVPacketList *pl;
	AVPacket * pkt;
	if (!ts->encode)
		return 1;
	pl = gf_malloc(sizeof(AVPacketList));
	pl->next = NULL;
	pkt = &(pl->pkt);

	av_init_packet(pkt);

	if (ts->video_st->codec->coded_frame->pts != AV_NOPTS_VALUE) {
		//pkt->pts= av_rescale_q(ts->video_st->codec->coded_frame->pts, ts->video_st->codec->time_base, ts->video_st->time_base);
		pkt->pts = ts->video_st->codec->coded_frame->pts * ts->video_st->time_base.den / ts->video_st->time_base.num / 1000;
		//pkt->pts = ts->video_st->codec->coded_frame->pts;
	}
	if (ts->video_st->codec->coded_frame->key_frame)
		pkt->flags |= AV_PKT_FLAG_KEY;
	pkt->stream_index= ts->video_st->index;
	pkt->data= data;
	pkt->size= encoded;
	//fprintf(stderr, "VIDEO PTS="LLU" was: "LLU" (%p)\n", pkt->pts, ts->video_st->codec->coded_frame->pts, pl);
	gf_mx_p(ts->videoMx);
	if (!ts->videoPackets)
		ts->videoPackets = pl;
	else {
		AVPacketList * px = ts->videoPackets;
		while (px->next)
			px = px->next;
		px->next = pl;
	}
	gf_mx_v(ts->videoMx);
	return 0;
}
예제 #26
0
파일: osmo4_view.cpp 프로젝트: erelh/gpac
// -----------------------------------------------------------------------------
// COsmo4AppView::ConstructL()
// Symbian 2nd phase constructor can leave.
// -----------------------------------------------------------------------------
//
void COsmo4AppView::ConstructL( const TRect& aRect )
{
	const char *opt;
	Bool first_launch = 0;

#if defined(__SERIES60_3X__)
	selector = CRemConInterfaceSelector::NewL();
	target = CRemConCoreApiTarget::NewL(*selector, *this);
	selector->OpenTargetL();
#endif

    // Create a window for this application view
    CreateWindowL();
    // Set the windows size
    SetRect( aRect );
	//draw
    ActivateL();

#ifndef GPAC_GUI_ONLY
	m_window = Window();
	m_session = CEikonEnv::Static()->WsSession();

	m_mx = gf_mx_new("Osmo4");

	//load config file
	m_user.config = gf_cfg_init(NULL, &first_launch);
	if (!m_user.config) {
		MessageBox("Cannot create GPAC Config file", "Fatal Error");
		User::Leave(KErrGeneral);
	}
	if (first_launch) {
		MessageBox("Osmo4", "Thank you for Installing");
	}

	/*load modules*/
	opt = gf_cfg_get_key(m_user.config, "General", "ModulesDirectory");
	m_user.modules = gf_modules_new(opt, m_user.config);
	if (!m_user.modules || !gf_modules_get_count(m_user.modules)) {
		MessageBox(m_user.modules ? "No modules available" : "Cannot create module manager", "Fatal Error");
		if (m_user.modules) gf_modules_del(m_user.modules);
		gf_cfg_del(m_user.config);
		User::Leave(KErrGeneral);
	}

	if (first_launch) {
		/*first launch, register all files ext*/
		for (u32 i=0; i<gf_modules_get_count(m_user.modules); i++) {
			GF_InputService *ifce = (GF_InputService *) gf_modules_load_interface(m_user.modules, i, GF_NET_CLIENT_INTERFACE);
			if (!ifce) continue;
			if (ifce) {
				ifce->CanHandleURL(ifce, "test.test");
				gf_modules_close_interface((GF_BaseInterface *)ifce);
			}
		}
	}

	/*we don't thread the terminal, ie appart from the audio renderer, media decoding and visual rendering is
	handled by the app process*/
	m_user.init_flags = GF_TERM_NO_VISUAL_THREAD | GF_TERM_NO_REGULATION;
	m_user.EventProc = GPAC_EventProc;
	m_user.opaque = this;
	m_user.os_window_handler = (void *) &m_window;
	m_user.os_display = (void *) &m_session;

	m_term = gf_term_new(&m_user);
	if (!m_term) {
		MessageBox("Cannot load GPAC terminal", "Fatal Error");
		gf_modules_del(m_user.modules);
		gf_cfg_del(m_user.config);
		User::Leave(KErrGeneral);
	}
	//MessageBox("GPAC terminal loaded", "Success !");

	/*ok set output size*/
	TSize s = m_window.Size();
	gf_term_set_size(m_term, s.iWidth, s.iHeight);


	/*start our callback (every ms)*/
	const TInt KTickInterval = 33000;
	m_pTimer = CPeriodic::NewL(CActive::EPriorityStandard);
	m_pTimer->Start(KTickInterval, KTickInterval, TCallBack(myTick, this));

	opt = gf_cfg_get_key(m_user.config, "General", "StartupFile");
	if (opt) gf_term_connect(m_term, opt);

#endif

}
예제 #27
0
파일: media_manager.c 프로젝트: Bevara/GPAC
void gf_term_set_threading(GF_Terminal *term, u32 mode)
{
	u32 i;
	Bool thread_it, restart_it;
	CodecEntry *ce;

	switch (mode) {
	case GF_TERM_THREAD_SINGLE:
		if (term->flags & GF_TERM_SINGLE_THREAD) return;
		term->flags &= ~GF_TERM_MULTI_THREAD;
		term->flags |= GF_TERM_SINGLE_THREAD;
		break;
	case GF_TERM_THREAD_MULTI:
		if (term->flags & GF_TERM_MULTI_THREAD) return;
		term->flags &= ~GF_TERM_SINGLE_THREAD;
		term->flags |= GF_TERM_MULTI_THREAD;
		break;
	default:
		if (!(term->flags & (GF_TERM_MULTI_THREAD | GF_TERM_SINGLE_THREAD) ) ) return;
		term->flags &= ~GF_TERM_SINGLE_THREAD;
		term->flags &= ~GF_TERM_MULTI_THREAD;
		break;
	}

	gf_mx_p(term->mm_mx);


	i=0;
	while ((ce = (CodecEntry*)gf_list_enum(term->codecs, &i))) {
		thread_it = 0;
		/*free mode, decoder wants threading - do */
		if ((mode == GF_TERM_THREAD_FREE) && (ce->flags & GF_MM_CE_REQ_THREAD)) thread_it = 1;
		else if (mode == GF_TERM_THREAD_MULTI) thread_it = 1;

		if (thread_it && (ce->flags & GF_MM_CE_THREADED)) continue;
		if (!thread_it && !(ce->flags & GF_MM_CE_THREADED)) continue;

		restart_it = 0;
		if (ce->flags & GF_MM_CE_RUNNING) {
			restart_it = 1;
			ce->flags &= ~GF_MM_CE_RUNNING;
		}

		if (ce->flags & GF_MM_CE_THREADED) {
			/*wait for thread to die*/
			while (!(ce->flags & GF_MM_CE_DEAD)) gf_sleep(1);
			ce->flags &= ~GF_MM_CE_DEAD;
			gf_th_del(ce->thread);
			ce->thread = NULL;
			gf_mx_del(ce->mx);
			ce->mx = NULL;
			ce->flags &= ~GF_MM_CE_THREADED;
		} else {
			term->cumulated_priority -= ce->dec->Priority+1;
		}

		if (thread_it) {
			ce->flags |= GF_MM_CE_THREADED;
			ce->thread = gf_th_new(ce->dec->decio->module_name);
			ce->mx = gf_mx_new(ce->dec->decio->module_name);
		}

		if (restart_it) {
			ce->flags |= GF_MM_CE_RUNNING;
			if (ce->thread) {
				gf_th_run(ce->thread, RunSingleDec, ce);
				gf_th_set_priority(ce->thread, term->priority);
			} else {
				term->cumulated_priority += ce->dec->Priority+1;
			}
		}
	}
	gf_mx_v(term->mm_mx);
}
예제 #28
0
파일: media_manager.c 프로젝트: Bevara/GPAC
void gf_term_add_codec(GF_Terminal *term, GF_Codec *codec)
{
	u32 i, count;
	Bool locked;
	Bool threaded;
	CodecEntry *cd;
	CodecEntry *ptr, *next;
	GF_CodecCapability cap;
	assert(codec);

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Registering codec %s\n", codec->decio ? codec->decio->module_name : "RAW"));

	/*caution: the mutex can be grabbed by a decoder waiting for a mutex owned by the calling thread
	this happens when several scene codecs are running concurently and triggering play/pause on media*/
	locked = gf_mx_try_lock(term->mm_mx);

	cd = mm_get_codec(term->codecs, codec);
	if (cd) goto exit;

	GF_SAFEALLOC(cd, CodecEntry);
	cd->dec = codec;
	if (!cd->dec->Priority)
		cd->dec->Priority = 1;

	/*we force audio codecs to be threaded in free mode, so that we avoid waiting in the audio renderer if another decoder is locking the main mutex
	this can happen when the audio decoder is running late*/
	if (codec->type==GF_STREAM_AUDIO) {
		threaded = 1;
	} else {
		cap.CapCode = GF_CODEC_WANTS_THREAD;
		cap.cap.valueInt = 0;
		gf_codec_get_capability(codec, &cap);
		threaded = cap.cap.valueInt;
	}

	if (threaded) cd->flags |= GF_MM_CE_REQ_THREAD;


	if (term->flags & GF_TERM_MULTI_THREAD) {
		if ((codec->type==GF_STREAM_AUDIO) || (codec->type==GF_STREAM_VISUAL)) threaded = 1;
	} else if (term->flags & GF_TERM_SINGLE_THREAD) {
		threaded = 0;
	}
	if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA)
		threaded = 0;

	if (threaded) {
		cd->thread = gf_th_new(cd->dec->decio->module_name);
		cd->mx = gf_mx_new(cd->dec->decio->module_name);
		cd->flags |= GF_MM_CE_THREADED;
		gf_list_add(term->codecs, cd);
		goto exit;
	}

	//add codec 1- per priority 2- per type, audio being first
	//priorities inherits from Systems (5bits) so range from 0 to 31
	//we sort from MAX to MIN
	count = gf_list_count(term->codecs);
	for (i=0; i<count; i++) {
		ptr = (CodecEntry*)gf_list_get(term->codecs, i);
		if (ptr->flags & GF_MM_CE_THREADED) continue;

		//higher priority, continue
		if (ptr->dec->Priority > codec->Priority) continue;

		//same priority, put audio first
		if (ptr->dec->Priority == codec->Priority) {
			//we insert audio (0x05) before video (0x04)
			if (ptr->dec->type < codec->type) {
				gf_list_insert(term->codecs, cd, i);
				goto exit;
			}
			//same prior, same type: insert after
			if (ptr->dec->type == codec->type) {
				if (i+1==count) {
					gf_list_add(term->codecs, cd);
				} else {
					gf_list_insert(term->codecs, cd, i+1);
				}
				goto exit;
			}
			//we insert video (0x04) after audio (0x05) if next is not audio
			//last one
			if (i+1 == count) {
				gf_list_add(term->codecs, cd);
				goto exit;
			}
			next = (CodecEntry*)gf_list_get(term->codecs, i+1);
			//# priority level, insert
			if ((next->flags & GF_MM_CE_THREADED) || (next->dec->Priority != codec->Priority)) {
				gf_list_insert(term->codecs, cd, i+1);
				goto exit;
			}
			//same priority level and at least one after : continue
			continue;
		}
		gf_list_insert(term->codecs, cd, i);
		goto exit;
	}
	//if we got here, first in list
	gf_list_add(term->codecs, cd);

exit:
	if (locked) gf_mx_v(term->mm_mx);
	return;
}
예제 #29
0
파일: cache.c 프로젝트: olegloa/mp4box
DownloadedCacheEntry gf_cache_create_entry ( GF_DownloadManager * dm, const char * cache_directory, const char * url , u64 start_range, u64 end_range, Bool mem_storage)
{
	char tmp[_CACHE_TMP_SIZE];
	u8 hash[_CACHE_HASH_SIZE];
	int sz;
	char ext[_CACHE_MAX_EXTENSION_SIZE];
	DownloadedCacheEntry entry = NULL;
	if ( !dm || !url || !cache_directory) {
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK,
		       ("[CACHE] gf_cache_create_entry :%d, dm=%p, url=%s cache_directory=%s, aborting.\n", __LINE__, dm, url, cache_directory));
		return entry;
	}
	sz = (u32) strlen ( url );
	if ( sz > _CACHE_TMP_SIZE )
	{
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK,
		       ("[CACHE] gf_cache_create_entry:%d : ERROR, URL is too long (%d chars), more than %d chars.\n", __LINE__, sz, _CACHE_TMP_SIZE ));
		return entry;
	}
	tmp[0] = '\0';
	/*generate hash of the full url*/
	if (start_range && end_range) {
		sprintf(tmp, "%s_"LLD"-"LLD, url, start_range, end_range );
	} else {
		strcpy ( tmp, url );
	}
	gf_sha1_csum ((u8*) tmp, (u32) strlen ( tmp ), hash );
	tmp[0] = 0;
	{
		int i;
		for ( i=0; i<20; i++ )
		{
			char t[3];
			t[2] = 0;
			sprintf ( t, "%02X", hash[i] );
			strcat ( tmp, t );
		}
	}
	assert ( strlen ( tmp ) == (_CACHE_HASH_SIZE * 2) );

	GF_SAFEALLOC(entry, struct __DownloadedCacheEntryStruct);

	if ( !entry ) {
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("gf_cache_create_entry:%d : OUT of memory !\n", __LINE__));
		return NULL;
	}
	GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry:%d, entry=%p\n", __LINE__, entry));

	entry->url = gf_strdup ( url );
	entry->hash = gf_strdup ( tmp );

	entry->memory_stored = mem_storage;

	entry->cacheSize = 0;
	entry->contentLength = 0;
	entry->serverETag = NULL;
	entry->diskETag = NULL;
	entry->flags = NO_VALIDATION;
	entry->validity = 0;
	entry->diskLastModified = NULL;
	entry->serverLastModified = NULL;
	entry->dm = dm;
	entry->range_start = start_range;
	entry->range_end = end_range;

#ifdef ENABLE_WRITE_MX
	{
		char name[1024];
		snprintf(name, sizeof(name), "CachedEntryWriteMx=%p, url=%s", (void*) entry, url);
		entry->write_mutex = gf_mx_new(name);
		assert(entry->write_mutex);
	}
#endif

	entry->deletableFilesOnDelete = 0;
	entry->write_session = NULL;
	entry->sessions = gf_list_new();

	if (entry->memory_stored) {
		entry->cache_filename = gf_malloc ( strlen ("gmem://") + 8 + strlen("@") + 16 + 1);
	} else {
		/* Sizeof cache directory + hash + possible extension */
		entry->cache_filename = gf_malloc ( strlen ( cache_directory ) + strlen(cache_file_prefix) + strlen(tmp) + _CACHE_MAX_EXTENSION_SIZE + 1);
	}

	if ( !entry->hash || !entry->url || !entry->cache_filename || !entry->sessions)
	{
		GF_Err err;
		/* Probably out of memory */
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry:%d, aborting due to OUT of MEMORY !\n", __LINE__));
		err = gf_cache_delete_entry ( entry );
		assert ( err == GF_OK );
		return NULL;
	}

	if (entry->memory_stored) {
		sprintf(entry->cache_filename, "gmem://%d@%p", entry->contentLength, entry->mem_storage);
		return entry;
	}


	tmp[0] = '\0';
	strcpy ( entry->cache_filename, cache_directory );
	strcat( entry->cache_filename, cache_file_prefix );
	strcat ( entry->cache_filename, entry->hash );
	strcpy ( tmp, url );

	{
		char * parser;
		parser = strrchr ( tmp, '?' );
		if ( parser )
			parser[0] = '\0';
		parser = strrchr ( tmp, '#' );
		if ( parser )
			parser[0] = '\0';
		parser = strrchr ( tmp, '.' );
		if ( parser && ( strlen ( parser ) < _CACHE_MAX_EXTENSION_SIZE ) )
			strncpy(ext, parser, _CACHE_MAX_EXTENSION_SIZE);
		else
			strncpy(ext, default_cache_file_suffix, _CACHE_MAX_EXTENSION_SIZE);
		assert (strlen(ext));
		strcat( entry->cache_filename, ext);
	}
	tmp[0] = '\0';
	strcpy( tmp, cache_file_prefix);
	strcat( tmp, entry->hash );
	strcat( tmp , ext);
	strcat ( tmp, cache_file_info_suffix );
	entry->properties = gf_cfg_force_new ( cache_directory, tmp );
	if ( !entry->properties )
	{
		GF_Err err;
		/* OUT of memory ? */
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry:%d, aborting due to OUT of MEMORY !\n", __LINE__));
		err = gf_cache_delete_entry ( entry );
		assert ( err == GF_OK );
		return NULL;
	}
	gf_cache_set_etag_on_disk(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_ETAG));
	gf_cache_set_etag_on_server(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_ETAG));
	gf_cache_set_mime_type(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_MIME_TYPE));
	gf_cache_set_last_modified_on_disk(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_LAST_MODIFIED));
	gf_cache_set_last_modified_on_server(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_LAST_MODIFIED));
	{
		const char * keyValue = gf_cfg_get_key ( entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_URL );
		if ( keyValue == NULL || stricmp ( url, keyValue ) )
			entry->flags |= CORRUPTED;

		keyValue = gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_RANGE);
		if (keyValue) {
			u64 s, e;
			sscanf(keyValue, LLD"-"LLD, &s, &e);
			/*mark as corrupted if not same range (we don't support this for the time being ...*/
			if ((s!=entry->range_start) || (e!=entry->range_end))
				entry->flags |= CORRUPTED;
		}
	}
	gf_cache_check_if_cache_file_is_corrupted(entry);

	return entry;
}
예제 #30
0
//-------------------------------
// dir should end with /
int CNativeWrapper::init(JNIEnv * env, void * bitmap, jobject * callback, int width, int height, const char * cfg_dir, const char * modules_dir, const char * cache_dir, const char * font_dir, const char * urlToLoad) {
	LOGI("Initializing GPAC with URL=%s...", urlToLoad);
	strcpy(m_cfg_dir, cfg_dir);
	strcpy(m_modules_dir, modules_dir);
	strcpy(m_cache_dir, cache_dir);
	strcpy(m_font_dir, font_dir);

	char m_cfg_filename[GF_MAX_PATH];
	strcpy(m_cfg_filename, m_cfg_dir);
	strcat(m_cfg_filename, "GPAC.cfg");

	int m_Width = width;
	int m_Height = height;

	int first_launch = 0;
	const char *opt;

	m_window = env;
	m_session = bitmap;
	if (!mainJavaEnv)
		mainJavaEnv = (JavaEnvTh *) gf_malloc(sizeof(JavaEnvTh));
	memset(mainJavaEnv, 0, sizeof(JavaEnvTh));
	setJavaEnv(mainJavaEnv, env, env->NewGlobalRef(*callback));
	if (pthread_setspecific( jni_thread_env_key, mainJavaEnv)) {
		LOGE("Failed to set specific thread data to jni_thread_env_key=%p for main thread !", jni_thread_env_key);
	}

	m_mx = gf_mx_new("Osmo4");

	//load config file
	LOGI("Loading User Config %s...", "GPAC.cfg");
	m_user.config = gf_cfg_force_new(cfg_dir, "GPAC.cfg");
	gf_set_progress_callback(this, Osmo4_progress_cbk);

	opt = gf_cfg_get_key(m_user.config, "General", "ModulesDirectory");
	if (!opt) {
		FILE * fstart;
		char msg[256];
		LOGI("First launch, initializing new Config %s...", "GPAC.cfg");
		/*hardcode module directory*/
		gf_cfg_set_key(m_user.config, "Downloader", "CleanCache", "yes");
		/*startup file*/
		snprintf(msg, 256, "%sgui/gui.bt", cfg_dir);
		fstart = fopen(msg, "r");
		if (fstart) {
			fclose(fstart);
			gf_cfg_set_key(m_user.config, "General", "StartupFile", msg);
		} else {
			gf_cfg_set_key(m_user.config, "General", "#StartupFile", msg);
		}
		gf_cfg_set_key(m_user.config, "GUI", "UnhideControlPlayer", "1");
		/*setup UDP traffic autodetect*/
		gf_cfg_set_key(m_user.config, "Network", "AutoReconfigUDP", "yes");
		gf_cfg_set_key(m_user.config, "Network", "UDPTimeout", "10000");
		gf_cfg_set_key(m_user.config, "Network", "BufferLength", "3000");
		gf_cfg_set_key(m_user.config, "Compositor", "TextureTextMode", "Default");
		//gf_cfg_set_key(m_user.config, "Compositor", "FrameRate", "30");
		gf_cfg_set_key(m_user.config, "Audio", "ForceConfig", "no");
		gf_cfg_set_key(m_user.config, "Audio", "NumBuffers", "1");
		gf_cfg_set_key(m_user.config, "FontEngine", "FontReader", "ft_font");
	}
	/* All of this has to be done for every instance */
	gf_cfg_set_key(m_user.config, "General", "ModulesDirectory", modules_dir ? modules_dir : GPAC_MODULES_DIR);
	gf_cfg_set_key(m_user.config, "General", "CacheDirectory", cache_dir ? cache_dir : GPAC_CACHE_DIR);
	gf_cfg_set_key(m_user.config, "General", "LastWorkingDir", cfg_dir);
	gf_cfg_set_key(m_user.config, "FontEngine", "FontDirectory", GPAC_FONT_DIR);
	gf_cfg_set_key(m_user.config, "Video", "DriverName", "Android Video Output");
	gf_cfg_set_key(m_user.config, "Audio", "DriverName", "Android Audio Output");

	opt = gf_cfg_get_key(m_user.config, "General", "ModulesDirectory");
	LOGI("loading modules in directory %s...", opt);
	m_user.modules = gf_modules_new(opt, m_user.config);
	if (!m_user.modules || !gf_modules_get_count(m_user.modules)) {
		LOGE("No modules found in directory %s !", opt);
		if (m_user.modules)
			gf_modules_del(m_user.modules);
		gf_cfg_del(m_user.config);
		m_user.config = NULL;
		return Quit(KErrGeneral);
	}

	/*we don't thread the visual compositor to be able to minimize the app and still have audio running*/
	m_user.init_flags = GF_TERM_NO_COMPOSITOR_THREAD;
	m_user.opaque = this;

	m_user.os_window_handler = m_window;
	m_user.os_display = m_session;
	m_user.EventProc = GPAC_EventProc;
	if (!javaVM) {
		LOGE("NO JAVA VM FOUND, m_user=%p !!!!\n", &m_user);
		return Quit(KErrGeneral);
	}

	LOGD("Loading GPAC terminal, m_user=%p...", &m_user);
	gf_sys_init(GF_FALSE);
	gf_fm_request_set_callback(this, on_fm_request);
	SetupLogs();
	m_term = gf_term_new(&m_user);
	if (!m_term) {
		LOGE("Cannot load GPAC Terminal with m_user=%p", m_user);
		MessageBox("Cannot load GPAC terminal", "Fatal Error", GF_SERVICE_ERROR);
		gf_modules_del(m_user.modules);
		m_user.modules = NULL;
		gf_cfg_del(m_user.config);
		m_user.config = NULL;
		return Quit(KErrGeneral);
	}

	//setAudioEnvironment(javaVM);

	LOGD("Setting term size m_user=%p...", &m_user);
	gf_term_set_size(m_term, m_Width, m_Height);

	opt = gf_cfg_get_key(m_user.config, "General", "StartupFile");
	LOGD("File loaded at startup=%s.", opt);

	if (!urlToLoad)
		urlToLoad = opt;
	if (urlToLoad) {
		LOGI("Connecting to %s...", urlToLoad);
		gf_term_connect(m_term, urlToLoad);
	}
	debug_log("init end");
	LOGD("Saving config file %s...\n", m_cfg_filename);
	gf_cfg_save(m_user.config);
	LOGI("Initialization complete, config file saved as %s.\n", m_cfg_filename);

	return 0;
}