Exemplo n.º 1
0
u32 RunSingleDec(void *ptr)
{
	GF_Err e;
	u32 time_left;
	CodecEntry *ce = (CodecEntry *) ptr;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaDecoder %d] Entering thread ID %d\n", ce->dec->odm->OD->objectDescriptorID, gf_th_id() ));

	while (ce->flags & GF_MM_CE_RUNNING) {
		time_left = gf_sys_clock();
		gf_mx_p(ce->mx);
		e = gf_codec_process(ce->dec, ce->dec->odm->term->frame_duration);
		if (e) gf_term_message(ce->dec->odm->term, ce->dec->odm->net_service->url, "Decoding Error", e);
		gf_mx_v(ce->mx);
		time_left = gf_sys_clock() - time_left;


		/*no priority boost this way for systems codecs, priority is dynamically set by not releasing the 
		graph when late and moving on*/
		if (!ce->dec->CB || (ce->dec->CB->UnitCount == ce->dec->CB->Capacity)) 
			ce->dec->PriorityBoost = 0;

		/*while on don't sleep*/
		if (ce->dec->PriorityBoost) continue;

		if (time_left) {
			while (time_left > ce->dec->odm->term->frame_duration) time_left -= ce->dec->odm->term->frame_duration;
			gf_sleep(time_left);
		} else {
			gf_sleep(ce->dec->odm->term->frame_duration);
		}
	}
	ce->flags |= GF_MM_CE_DEAD;
	return 0;
}
Exemplo n.º 2
0
void m2ts_net_io(void *cbk, GF_NETIO_Parameter *param)
{
    GF_Err e;
    M2TSIn *m2ts = (M2TSIn *) cbk;
    assert( m2ts );
    /*handle service message*/
    gf_term_download_update_stats(m2ts->ts->dnload);

    if (param->msg_type==GF_NETIO_DATA_TRANSFERED) {
        e = GF_EOS;
    } else if (param->msg_type==GF_NETIO_DATA_EXCHANGE) {
        e = GF_OK;
        assert( m2ts->ts);
        if (param->size > 0) {
            /*process chunk*/
            assert(param->data);
            if (m2ts->network_buffer_size < param->size) {
                m2ts->network_buffer = gf_realloc(m2ts->network_buffer, sizeof(char) * param->size);
                m2ts->network_buffer_size = param->size;
            }
            assert( m2ts->network_buffer );
            memcpy(m2ts->network_buffer, param->data, param->size);
            gf_m2ts_process_data(m2ts->ts, m2ts->network_buffer, param->size);
        }

        /*if asked to regulate, wait until we get a play request*/
        if (m2ts->ts->run_state && !m2ts->ts->nb_playing && m2ts->ts->file_regulate) {
            while (m2ts->ts->run_state && !m2ts->ts->nb_playing && m2ts->ts->file_regulate) {
                gf_sleep(50);
                continue;
            }
        } else {
            gf_sleep(1);
        }
        if (!m2ts->ts->run_state) {
            if (m2ts->ts->dnload)
                gf_term_download_del( m2ts->ts->dnload );
            m2ts->ts->dnload = NULL;
        }

    } else {
        e = param->error;
    }

    switch (e) {
    case GF_EOS:
        if (!m2ts->is_connected) {
            gf_term_on_connect(m2ts->service, NULL, GF_OK);
        }
        return;
    case GF_OK:
        return;
    default:
        if (!m2ts->ts_setup) {
            m2ts->ts_setup = 1;
        }
        GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER,("[MPEGTSIn] : Error while getting data : %s\n", gf_error_to_string(e)));
        gf_term_on_connect(m2ts->service, NULL, e);
    }
}
Exemplo n.º 3
0
static Bool play_pause_seek_gettime(GF_Terminal *term, const char *fn)
{
	u32 time;
	const u32 target_time_in_ms = 10000;

	//play
	connected = GF_FALSE;
	gf_term_connect_from_time(term, fn, 0, GF_FALSE);
	while (!connected) gf_sleep(1);

	//seek to target_time_in_ms
	gf_term_play_from_time(term, target_time_in_ms, GF_FALSE);
	gf_term_set_option(term, GF_OPT_PLAY_STATE, GF_STATE_STEP_PAUSE);
	time = gf_term_get_time_in_ms(term);
	assert(time == target_time_in_ms);
	
	//seek to 0
	connected = GF_FALSE;
	gf_term_play_from_time(term, 0, GF_FALSE);
	while (!connected) gf_sleep(1);
	time = gf_term_get_time_in_ms(term);
	assert(time == 0);

	return GF_TRUE;
}
Exemplo n.º 4
0
static u32 gf_dm_session_thread(void *par)
{
	GF_DownloadSession *sess = (GF_DownloadSession *)par;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Downloader] Entering thread ID %d\n", gf_th_id() ));

	sess->flags &= ~GF_DOWNLOAD_SESSION_THREAD_DEAD;
	while (!sess->destroy) {
		gf_mx_p(sess->mx);
		if (sess->status >= GF_NETIO_DISCONNECTED) {
			gf_mx_v(sess->mx);
			break;
		}

		if (sess->status < GF_NETIO_CONNECTED) {
			gf_dm_connect(sess);
		} else {
			if (sess->status == GF_NETIO_WAIT_FOR_REPLY) gf_sleep(GF_WAIT_REPLY_SLEEP);
			sess->do_requests(sess);
		}
		gf_mx_v(sess->mx);
		gf_sleep(2);
	}
	/*destroy all sessions*/
	gf_dm_disconnect(sess);
	sess->status = GF_NETIO_STATE_ERROR;
	sess->last_error = 0;
	sess->flags |= GF_DOWNLOAD_SESSION_THREAD_DEAD;
	return 1;
}
Exemplo n.º 5
0
GF_Err EPOCAudio::Open(u32 sample_rate, Bool stereo)
{
	TInt res = 0;
	u32 count;
	TMdaAudioDataSettings::TAudioCaps epoc_sr;

	
	switch (sample_rate) {
	case 8000: epoc_sr = TMdaAudioDataSettings::ESampleRate8000Hz; break;
	case 11025: epoc_sr = TMdaAudioDataSettings::ESampleRate11025Hz; break;
	case 12000: epoc_sr = TMdaAudioDataSettings::ESampleRate12000Hz; break;
	case 16000: epoc_sr = TMdaAudioDataSettings::ESampleRate16000Hz; break;
	case 22050: epoc_sr = TMdaAudioDataSettings::ESampleRate22050Hz; break;
	case 24000: epoc_sr = TMdaAudioDataSettings::ESampleRate24000Hz; break;
	case 32000: epoc_sr = TMdaAudioDataSettings::ESampleRate32000Hz; break;
	case 44100: epoc_sr = TMdaAudioDataSettings::ESampleRate44100Hz; break;
	case 48000: epoc_sr = TMdaAudioDataSettings::ESampleRate48000Hz; break;
	default:
		return GF_NOT_SUPPORTED;
	}
	
	state = EPOC_AUDIO_INIT;

	gf_sleep(10);
	TRAP(res, m_stream = CMdaAudioOutputStream::NewL(*this) );
	if ((res!=KErrNone) || !m_stream) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOCAudio] Cannot create output audio stream\n"));
		return GF_IO_ERR;
	}
	m_stream->Open(&m_settings);

	/*wait for ack - if not getting it in 50*40 = 2sec, abort*/
	count = 50;
	while (count) {
		if (state == EPOC_AUDIO_OPEN) break;
		else if (state == EPOC_AUDIO_ERROR) {
			return GF_IO_ERR;
		}
		gf_sleep(40);

		TInt error;
		CActiveScheduler::RunIfReady(error, CActive::EPriorityIdle);
		count--;
	}
	if (state != EPOC_AUDIO_OPEN) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOCAudio] Failed to open sound device - is it present?\n"));
		return GF_NOT_SUPPORTED;
	}

	TRAP(res, m_stream->SetAudioPropertiesL(epoc_sr, stereo ? TMdaAudioDataSettings::EChannelsStereo : TMdaAudioDataSettings::EChannelsMono) );
	m_stream->SetPriority(EPriorityAbsoluteHigh, EMdaPriorityPreferenceTime );
	m_stream->SetVolume(init_vol * m_stream->MaxVolume() / 100);

	current_buffer = nb_buffers_queued = 0;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[EPOCAudio] output audio stream ready - sample rate %d - %d channels\n", sample_rate, stereo ? 2 : 1));
	return GF_OK;
}
Exemplo n.º 6
0
u32 SDLVid_EventProc(void *par)
{
#if 0
	u32 last_mouse_move;
#endif
	Bool ret;
	GF_VideoOutput *dr = (GF_VideoOutput *)par;
	SDLVID();

	if (!SDLVid_InitializeWindow(ctx, dr)) {
		ctx->sdl_th_state = SDL_STATE_STOP_REQ;
	}

	ctx->sdl_th_state = SDL_STATE_RUNNING;
	while (ctx->sdl_th_state==SDL_STATE_RUNNING) {
		/*after much testing: we must ensure nothing is using the event queue when resizing window.
		-- under X, it throws Xlib "unexpected async reply" under linux, therefore we don't wait events,
		we check for events and execute them if any
		-- under Win32, the SDL_SetVideoMode deadlocks, so we don't force exclusive access to events
		*/
#ifndef WIN32
		gf_mx_p(ctx->evt_mx);
#endif

		ret = SDLVid_ProcessMessageQueue(ctx, dr);

#ifndef WIN32
		gf_mx_v(ctx->evt_mx);
#endif

		/*looks like this hides the cursor for ever when switching back from FS*/
#if 0
		if (ctx->fullscreen && (last_mouse_move + 2000 < SDL_GetTicks()) ) {
			if (cursor_on) SDL_ShowCursor(0);
			cursor_on = 0;
		} else if (!cursor_on) {
			SDL_ShowCursor(1);
			cursor_on = 1;
		}
#endif
	
		/*QUIT message has been processed*/
		if (!ret) {
			ctx->sdl_th_state = SDL_STATE_STOP_REQ;
			break;
		}

		gf_sleep(2);
	}

	while (ctx->sdl_th_state == SDL_STATE_STOP_REQ)
		gf_sleep(10);

	SDLVid_ShutdownWindow(ctx);
	ctx->sdl_th_state = SDL_STATE_STOPPED;

	return 0;
}
Exemplo n.º 7
0
Bool player(char *fn)
{
	Bool ret = GF_FALSE;
	GF_Err e;
	GF_User user;
	GF_Terminal *term = NULL;
	GF_Config *cfg_file;
	memset(&user, 0, sizeof(user));
	cfg_file = gf_cfg_init(NULL, NULL);
	user.modules = gf_modules_new(NULL, cfg_file);
	e = manually_register_opengl_shared_module(user.modules);
	if (e != GF_OK) goto exit;
	set_cfg_option(cfg_file, "Video:DriverName=" OPENGL_SHARED_MODULE_NAME_STR);
	user.config = cfg_file;
	user.opaque = &user;
	user.EventProc = event_proc;
	term = gf_term_new(&user);
	if (!term) goto exit;

	gf_term_connect(term, fn);
	while (!connected) gf_sleep(1);
	while ( connected) {
		gf_term_process_step(term);
		check_keyboard(term);
	}
	ret = GF_TRUE;

exit:
	gf_term_disconnect(term);
	gf_term_del(term);
	gf_modules_del(user.modules);
	gf_cfg_del(cfg_file);

	return ret;
}
Exemplo n.º 8
0
void gf_term_remove_codec(GF_Terminal *term, GF_Codec *codec)
{
	u32 i;
	Bool locked;
	CodecEntry *ce;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Unregistering codec %s\n", codec->decio ? codec->decio->module_name : "RAW"));

	/*cf note above*/
	locked = gf_mx_try_lock(term->mm_mx);

	i=0;
	while ((ce = (CodecEntry*)gf_list_enum(term->codecs, &i))) {
		if (ce->dec != codec) continue;

		if (ce->thread) {
			if (ce->flags & GF_MM_CE_RUNNING) {
				ce->flags &= ~GF_MM_CE_RUNNING;
				while (! (ce->flags & GF_MM_CE_DEAD)) gf_sleep(10);
				ce->flags &= ~GF_MM_CE_DEAD;
			}
			gf_th_del(ce->thread);
			gf_mx_del(ce->mx);
		}
		if (locked) {
			gf_free(ce);
			gf_list_rem(term->codecs, i-1);
		} else {
			ce->flags |= GF_MM_CE_DISCARDED;
		}
		break;
	}
	if (locked) gf_mx_v(term->mm_mx);
	return;
}
Exemplo n.º 9
0
void gf_sc_ar_del(GF_AudioRenderer *ar)
{
	if (!ar) return;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Destroying compositor\n"));
	/*resume if paused (might cause deadlock otherwise)*/
	if (ar->Frozen) gf_sc_ar_control(ar, GF_SC_AR_RESUME);
	/*stop and shutdown*/
	if (ar->audio_out) {
		/*kill audio thread*/
		if (!ar->audio_out->SelfThreaded) {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] stopping audio thread\n"));
			ar->audio_th_state = 2;
			while (ar->audio_th_state != 3) {
				gf_sleep(33);
			}
			GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] audio thread stopped\n"));
			gf_th_del(ar->th);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] audio thread destroyed\n"));
		}
		/*lock access before shutdown and emulate a reconfig (avoids mixer lock from self-threaded modules)*/
		ar->need_reconfig = GF_TRUE;
		gf_mixer_lock(ar->mixer, GF_TRUE);
		if (ar->audio_out->SelfThreaded) ar->audio_out->Shutdown(ar->audio_out);
		gf_modules_close_interface((GF_BaseInterface *)ar->audio_out);
		ar->audio_out = NULL;
		gf_mixer_lock(ar->mixer, GF_FALSE);
	}
	gf_mixer_del(ar->mixer);

	if (ar->audio_listeners) gf_list_del(ar->audio_listeners);
	gf_afc_unload(&ar->filter_chain);
	gf_free(ar);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Renderer destroyed\n"));
}
Exemplo n.º 10
0
void gf_term_stop_scheduler(GF_Terminal *term)
{
	if (term->mm_thread) {
		u32 count, i;

		term->flags &= ~GF_TERM_RUNNING;
		while (!(term->flags & GF_TERM_DEAD) )
			gf_sleep(2);

		count = gf_list_count(term->codecs);
		for (i=0; i<count; i++) {
			CodecEntry *ce = gf_list_get(term->codecs, i);
			if (ce->flags & GF_MM_CE_DISCARDED) {
				gf_free(ce);
				gf_list_rem(term->codecs, i);
				count--;
				i--;
			}
		}

		assert(! gf_list_count(term->codecs));
		gf_th_del(term->mm_thread);
	}
	gf_list_del(term->codecs);
	gf_mx_del(term->mm_mx);
}
Exemplo n.º 11
0
static GF_Err FFD_CloseService(GF_InputService *plug)
{
	FFDemux *ffd = plug->priv;

	ffd->is_running = 0;

#ifndef FF_API_CLOSE_INPUT_FILE
	if (ffd->ctx) av_close_input_file(ffd->ctx);
#else
	if (ffd->ctx) avformat_close_input(&ffd->ctx);
#endif

	ffd->ctx = NULL;
	ffd->audio_ch = ffd->video_ch = NULL;
	ffd->audio_run = ffd->video_run = 0;

	if (ffd->dnload) {
		if (ffd->is_running) {
			while (!ffd->is_running) gf_sleep(1);
			ffd->is_running = 0;
		}
		gf_service_download_del(ffd->dnload);
		ffd->dnload = NULL;
	}
	if (ffd->buffer) gf_free(ffd->buffer);
	ffd->buffer = NULL;

	gf_service_disconnect_ack(ffd->service, NULL, GF_OK);
#ifdef FFMPEG_DUMP_REMOTE
	if (ffd->outdbg) fclose(ffd->outdbg);
#endif
	return GF_OK;
}
Exemplo n.º 12
0
u32 ThreadRun(void* param)
{
	struct __input_device * dr = (struct __input_device *)param;
	MPEGVSCTX;

	GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[MPEG-V_IN] Start: %d\n", gf_th_id()));

	loadSensorControler(rc);

	if (!rc->env || !rc->sensCtrlObj)
		return;

	(*rc->env)->CallNonvirtualVoidMethod(rc->env, rc->sensCtrlObj, rc->sensCtrlClass, rc->startSensor, (s32)dr, rc->sensorAndroidType);

	while (!rc->stop)
		gf_sleep(10);

	GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[MPEG-V_IN] Stop: %d\n", gf_th_id()));

	if (!rc->env)
		return;

	if ( rc->sensCtrlObj )
	{
		(*rc->env)->CallNonvirtualVoidMethod(rc->env, rc->sensCtrlObj, rc->sensCtrlClass, rc->stopSensor);

		(*rc->env)->DeleteLocalRef( rc->env, rc->sensCtrlObj );
	}

	unloadSensorController(rc);
}
Exemplo n.º 13
0
u32 MM_Loop(void *par)
{
	GF_Terminal *term = (GF_Terminal *) par;
	Bool do_scene = (term->flags & GF_TERM_NO_VISUAL_THREAD) ? 1 : 0;
	Bool do_codec = (term->flags & GF_TERM_NO_DECODER_THREAD) ? 0 : 1;
	Bool do_regulate = (term->user->init_flags & GF_TERM_NO_REGULATION) ? 0 : 1;

	gf_th_set_priority(term->mm_thread, term->priority);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaManager] Entering thread ID %d\n", gf_th_id() ));
//	GF_LOG(GF_LOG_DEBUG, GF_LOG_RTI, ("(RTI] Terminal Cycle Log\tServices\tDecoders\tCompositor\tSleep\n"));

	while (term->flags & GF_TERM_RUNNING) {
		u32 left;
		if (do_codec) left = MM_SimulationStep_Decoder(term);
		else left = term->frame_duration;
		
		if (do_scene) {
			u32 time_taken = gf_sys_clock();
			gf_sc_draw_frame(term->compositor);
			time_taken = gf_sys_clock() - time_taken;
			if (left>time_taken) 
				left -= time_taken;
			else
				left = 0;
		}
		if (do_regulate)
			gf_sleep(left);
	}
	term->flags |= GF_TERM_DEAD;
	return 0;
}
Exemplo n.º 14
0
u32 gf_ar_proc(void *p)
{
	GF_AudioRenderer *ar = (GF_AudioRenderer *) p;

	ar->audio_th_state = 1;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[AudioRender] Entering audio thread ID %d\n", gf_th_id() ));

	gf_mixer_lock(ar->mixer, 1);
	ar->need_reconfig = 1;
	gf_sc_ar_reconfig(ar);
	gf_mixer_lock(ar->mixer, 0);

	while (ar->audio_th_state == 1) {
		gf_mixer_lock(ar->mixer, 1);
		//do mix even if mixer is empty, otherwise we will push the same buffer over and over to the sound card
		if (ar->Frozen ) {
			gf_mixer_lock(ar->mixer, 0);
			gf_sleep(0);
		} else {
			if (ar->need_reconfig) gf_sc_ar_reconfig(ar);
			ar->audio_out->WriteAudio(ar->audio_out);
			gf_mixer_lock(ar->mixer, 0);
		}
	}
	GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Exiting audio thread\n"));
	ar->audio_out->Shutdown(ar->audio_out);
	ar->audio_th_state = 3;
	return 0;
}
Exemplo n.º 15
0
GF_Err SDLVid_Setup(struct _video_out *dr, void *os_handle, void *os_display, u32 init_flags)
{
	SDLVID();
	/*we don't allow SDL hack, not stable enough*/
	//if (os_handle) SDLVid_SetHack(os_handle, 1);

	ctx->os_handle = os_handle;
	ctx->is_init = 0;
	ctx->output_3d_type = 0;
	ctx->force_alpha = (init_flags & GF_TERM_WINDOW_TRANSPARENT) ? 1 : 0;

	if (!SDLOUT_InitSDL()) return GF_IO_ERR;

#ifdef	SDL_WINDOW_THREAD
	ctx->sdl_th_state = SDL_STATE_STOPPED;
	gf_th_run(ctx->sdl_th, SDLVid_EventProc, dr);

	while (!ctx->sdl_th_state)
		gf_sleep(10);

	if (ctx->sdl_th_state==SDL_STATE_STOP_REQ) {
		SDLOUT_CloseSDL();
		ctx->sdl_th_state = SDL_STATE_STOPPED;
		return GF_IO_ERR;
	}
#else
	if (!SDLVid_InitializeWindow(ctx, dr)) {
		SDLOUT_CloseSDL();
		return GF_IO_ERR;
	}
#endif

	ctx->is_init = 1;
	return GF_OK;
}
Exemplo n.º 16
0
const char *gf_dm_sess_mime_type(GF_DownloadSession *sess)
{
	Bool go;
	u32 flags = sess->flags;
	sess->flags |= GF_NETIO_SESSION_NOT_CACHED;

	go = 1;
	while (go) {
		switch (sess->status) {
		/*setup download*/
		case GF_NETIO_SETUP:
			gf_dm_connect(sess);
			break;
		case GF_NETIO_WAIT_FOR_REPLY:
			gf_sleep(20);
		case GF_NETIO_CONNECTED:
			sess->do_requests(sess);
			break;
		case GF_NETIO_DATA_EXCHANGE:
		case GF_NETIO_DISCONNECTED:
		case GF_NETIO_STATE_ERROR:
			go = 0;
			break;
		}
	}
	sess->flags = flags;
	if (sess->status==GF_NETIO_STATE_ERROR) return NULL;
	return sess->mime_type;
}
Exemplo n.º 17
0
void DS_WriteAudio(GF_AudioOutput *dr)
{
	u32 retry;
    DWORD in_play, cur_play;
	DSCONTEXT();

	/*wait for end of current play buffer*/
	if (ctx->pOutput->lpVtbl->GetCurrentPosition(ctx->pOutput, &in_play, NULL) != DS_OK ) {
		GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[DirectSound] error getting sound buffer positions\n"));
		return;
	}
	in_play = (in_play / ctx->buffer_size);
	retry = 6;
	while (retry) {
		if (ctx->pOutput->lpVtbl->GetCurrentPosition(ctx->pOutput, &cur_play, NULL) != DS_OK ) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[DirectSound] error getting sound buffer positions\n"));
			return;
		}
		cur_play = (cur_play / ctx->buffer_size);
		if (cur_play == in_play) {
			gf_sleep(20);
			retry--;
		} else {
			/**/
			ctx->frame_state[in_play] = 0;
			DS_FillBuffer(dr, in_play);
			return;
		}
	}
}
Exemplo n.º 18
0
void EPOCAudio::Close(Bool and_wait)
{
	u32 i;
	if (m_stream) {
		if (state==EPOC_AUDIO_PLAY) {
#if 0
			m_stream->Stop();

			while (0 && and_wait) {

				if (state != EPOC_AUDIO_PLAY) break;
				gf_sleep(1);

				TInt error;
				CActiveScheduler::RunIfReady(error, CActive::EPriorityIdle);
			}
#endif
		}
		delete m_stream;
		m_stream = NULL;
	}
	for (i=0; i<num_buffers; i++) {
		if (buffers[i]) gf_free(buffers[i]);
		buffers[i] = NULL;
	}
	num_buffers = 0;
	state = EPOC_AUDIO_INIT;
	GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[EPOCAudio] output audio stream closed\n"));
}
Exemplo n.º 19
0
GF_Renderer *gf_sr_new(GF_User *user, Bool self_threaded, GF_Terminal *term)
{
	GF_Renderer *tmp = SR_New(user);
	if (!tmp) return NULL;
	tmp->term = term;

	/**/
	tmp->audio_renderer = gf_sr_ar_load(user);	
	if (!tmp->audio_renderer) GF_USER_MESSAGE(user, "", "NO AUDIO RENDERER", GF_OK);

	gf_mx_p(tmp->mx);

	/*run threaded*/
	if (self_threaded) {
		tmp->VisualThread = gf_th_new();
		gf_th_run(tmp->VisualThread, SR_RenderRun, tmp);
		while (tmp->video_th_state!=1) {
			gf_sleep(10);
			if (tmp->video_th_state==3) {
				gf_mx_v(tmp->mx);
				gf_sr_del(tmp);
				return NULL;
			}
		}
	}

	/*set default size if owning output*/
	if (!tmp->user->os_window_handler) {
		gf_sr_set_size(tmp, 320, 20);
	}

	gf_mx_v(tmp->mx);

	return tmp;
}
Exemplo n.º 20
0
void RTP_Delete(GF_BaseInterface *bi)
{
	RTPClient *rtp;
	u32 retry;
	GF_InputService *plug = (GF_InputService *) bi;
	rtp = (RTPClient *)plug->priv;

	/*shutdown thread*/
	if (rtp->th_state==1) rtp->th_state = 0;
	retry = 20;
	while ((rtp->th_state==1) && retry) {
		gf_sleep(10);
		retry--;
	}
	assert(retry);

	if (rtp->session_state_data) gf_free(rtp->session_state_data);

	RP_cleanup(rtp);
	gf_th_del(rtp->th);
	gf_mx_del(rtp->mx);
	gf_list_del(rtp->sessions);
	gf_list_del(rtp->channels);
	gf_free(rtp);
	gf_free(bi);
}
Exemplo n.º 21
0
GF_EXPORT
u32 gf_term_process_step(GF_Terminal *term)
{
	u32 nb_decs=0;
	u32 time_taken = gf_sys_clock();

	if (term->flags & GF_TERM_NO_DECODER_THREAD) {
		MM_SimulationStep_Decoder(term, &nb_decs);
	}

	if (term->flags & GF_TERM_NO_COMPOSITOR_THREAD) {
		u32 ms_until_next;
		gf_sc_draw_frame(term->compositor, &ms_until_next);
		if (ms_until_next<term->compositor->frame_duration/2) {
			time_taken=0;
		}

	}
	time_taken = gf_sys_clock() - time_taken;
	if (time_taken > term->compositor->frame_duration) {
		time_taken = 0;
	} else {
		time_taken = term->compositor->frame_duration - time_taken;
	}
	if (term->bench_mode || (term->user->init_flags & GF_TERM_NO_REGULATION)) return time_taken;

	if (2*time_taken >= term->compositor->frame_duration) {
		gf_sleep(nb_decs ? 1 : time_taken);
	}
	return time_taken;
}
Exemplo n.º 22
0
u32 MM_Loop(void *par)
{
	GF_Terminal *term = (GF_Terminal *) par;
	Bool do_scene = (term->flags & GF_TERM_NO_VISUAL_THREAD) ? 1 : 0;
	Bool do_codec = (term->flags & GF_TERM_NO_DECODER_THREAD) ? 0 : 1;
	Bool do_regulate = (term->user->init_flags & GF_TERM_NO_REGULATION) ? 0 : 1;

	gf_th_set_priority(term->mm_thread, term->priority);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaManager] Entering thread ID %d\n", gf_th_id() ));
//	GF_LOG(GF_LOG_DEBUG, GF_LOG_RTI, ("(RTI] Terminal Cycle Log\tServices\tDecoders\tCompositor\tSleep\n"));

	while (term->flags & GF_TERM_RUNNING) {
		u32 nb_decs = 0;
		u32 left = 0;
		if (do_codec) left = MM_SimulationStep_Decoder(term, &nb_decs);
		else left = term->frame_duration;

		if (do_scene) {
			u32 ms_until_next=0;
			u32 time_taken = gf_sys_clock();
			gf_sc_draw_frame(term->compositor, &ms_until_next);
			time_taken = gf_sys_clock() - time_taken;
			if (ms_until_next<term->frame_duration/2) {
				left = 0;
			} else if (left>time_taken)
				left -= time_taken;
			else
				left = 0;
		}
		if (do_regulate) {
			if (term->bench_mode) {
				gf_sleep(0);
			} else {
				if (left==term->frame_duration) {
					//if nothing was done during this pass but we have active decoder, just yield. We don't want to sleep since
					//composition memory could be released at any time. We should have a signal here, rather than a wait
					gf_sleep(nb_decs ? 0 : term->frame_duration/2);
				}
			}
		}
	}
	term->flags |= GF_TERM_DEAD;
	return 0;
}
Exemplo n.º 23
0
static u32 ext_media_load_th(void *par) {
	GF_HYBMEDIA *self = (GF_HYBMEDIA*) par;
	/*declare object to terminal*/
	GF_ObjectDescriptor *od = (GF_ObjectDescriptor*)gf_odf_desc_new(GF_ODF_OD_TAG);
	od->URLString = gf_strdup("http://gpac.sourceforge.net/screenshots/lion.jpg");
	od->objectDescriptorID = 0;
	gf_sleep(2000); //TODO: remove the sleep
	gf_term_add_media(self->owner, (GF_Descriptor*)od, 0);
	return 0;
}
Exemplo n.º 24
0
void avr_delete ( GF_BaseInterface *ifce )
{
    GF_TermExt *dr = ( GF_TermExt * ) ifce;
    GF_AVRedirect *avr = dr->udta;
    avr->is_running = 0;
    /* Ensure encoding is finished */
    gf_mx_p(avr->frameMutex);
    gf_mx_v(avr->frameMutex);
    gf_sleep(200);
    gf_th_stop(avr->encodingThread);
    gf_mx_del(avr->frameMutex);
    avr->frameMutex = NULL;
    gf_th_del(avr->encodingThread);
    avr->encodingThread = NULL;
    gf_mx_del(avr->encodingMutex);
    avr->encodingMutex = NULL;
    if ( avr->ts_implementation )
    {
        ts_amux_del(avr->ts_implementation);
        avr->ts_implementation = NULL;
    }
    avr->videoCodec = NULL;
    if ( avr->YUVpicture )
    {
        av_free ( avr->YUVpicture );
    }
    if ( avr->yuv_data )
        av_free ( avr->yuv_data );
    avr->yuv_data = NULL;
    avr->YUVpicture = NULL;
    if ( avr->RGBpicture )
    {
        av_free ( avr->RGBpicture );
    }
    avr->RGBpicture = NULL;
    if ( avr->swsContext )
        sws_freeContext ( avr->swsContext );
    avr->swsContext = NULL;
    if ( avr->videoOutbuf )
        gf_free ( avr->videoOutbuf );
    avr->videoOutbuf = NULL;
    if ( avr->pcmAudio )
        gf_ringbuffer_del(avr->pcmAudio);
    avr->pcmAudio = NULL;
    gf_global_resource_unlock(avr->globalLock);
    avr->globalLock = NULL;
    if (avr->audioEncodingThread){
	gf_th_stop(avr->audioEncodingThread);
    	gf_th_del(avr->audioEncodingThread);
    }
    avr->audioEncodingThread = NULL;
    gf_free ( avr );
    gf_free ( dr );
}
Exemplo n.º 25
0
u32 RP_Thread(void *param)
{
	u32 i;
	GF_NetworkCommand com;
	RTSPSession *sess;
	RTPStream *ch;
	RTPClient *rtp = (RTPClient *)param;

	rtp->th_state = 1;
	com.command_type = GF_NET_CHAN_BUFFER_QUERY;
	while (rtp->th_state) {
		gf_mx_p(rtp->mx);

		/*fecth data on udp*/
		i=0;
		while ((ch = (RTPStream *)gf_list_enum(rtp->channels, &i))) {
			if ((ch->flags & RTP_EOS) || (ch->status!=RTP_Running) ) continue;
			/*for interleaved channels don't read too fast, query the buffer occupancy*/
			if (ch->flags & RTP_INTERLEAVED) {
				com.base.on_channel = ch->channel;
				gf_term_on_command(rtp->service, &com, GF_OK);
				/*if no buffering, use a default value (3 sec of data should do it)*/
				if (!com.buffer.max) com.buffer.max = 3000;
				if (com.buffer.occupancy <= com.buffer.max) ch->rtsp->flags |= RTSP_TCP_FLUSH;
			} else {
				RP_ReadStream(ch);
			}
		}

		/*and process commands / flush TCP*/
		i=0;
		while ((sess = (RTSPSession *)gf_list_enum(rtp->sessions, &i))) {
			RP_ProcessCommands(sess);

			if (sess->connect_error) {
				gf_term_on_connect(sess->owner->service, NULL, sess->connect_error);
				sess->connect_error = 0;
			}

		}

		gf_mx_v(rtp->mx);

		gf_sleep(1);
	}

	if (rtp->dnload) gf_term_download_del(rtp->dnload);
	rtp->dnload = NULL;

	rtp->th_state = 2;
	return 0;
}
Exemplo n.º 26
0
static void AC3_RegulateDataRate(AC3Reader *read)
{
	GF_NetworkCommand com;

	memset(&com, 0, sizeof(GF_NetworkCommand));
	com.command_type = GF_NET_CHAN_BUFFER_QUERY;
	com.base.on_channel = read->ch;
	while (read->ch) {
		gf_service_command(read->service, &com, GF_OK);
		if (com.buffer.occupancy < com.buffer.max) break;
		gf_sleep(2);
	}
}
Exemplo n.º 27
0
void gf_sr_del(GF_Renderer *sr)
{
	if (!sr) return;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_RENDER, ("[Render] Destroying Renderer\n"));
	gf_sr_lock(sr, 1);

	if (sr->VisualThread) {
		sr->video_th_state = 2;
		while (sr->video_th_state!=3) gf_sleep(10);
		gf_th_del(sr->VisualThread);
	}
	if (sr->video_out) {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_RENDER, ("[Render] Closing video output\n"));
		sr->video_out->Shutdown(sr->video_out);
		gf_modules_close_interface((GF_BaseInterface *)sr->video_out);
	}
	GF_LOG(GF_LOG_DEBUG, GF_LOG_RENDER, ("[Render] Closing visual renderer\n"));
	sr->visual_renderer->UnloadRenderer(sr->visual_renderer);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_RENDER, ("[Render] Unloading visual renderer module\n"));
	gf_modules_close_interface((GF_BaseInterface *)sr->visual_renderer);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_RENDER, ("[Render] visual renderer module unloaded\n"));

	if (sr->audio_renderer) gf_sr_ar_del(sr->audio_renderer);

#ifdef GF_SR_EVENT_QUEUE
	gf_mx_p(sr->ev_mx);
	while (gf_list_count(sr->events)) {
		GF_Event *ev = (GF_Event *)gf_list_get(sr->events, 0);
		gf_list_rem(sr->events, 0);
		free(ev);
	}
	gf_mx_v(sr->ev_mx);
	gf_mx_del(sr->ev_mx);
	gf_list_del(sr->events);
#endif


	if (sr->font_engine) {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_RENDER, ("[Render] Closing font engine\n"));
		sr->font_engine->shutdown_font_engine(sr->font_engine);
		gf_modules_close_interface((GF_BaseInterface *)sr->font_engine);
	}
	gf_list_del(sr->textures);
	gf_list_del(sr->time_nodes);
	gf_list_del(sr->extra_scenes);
	gf_sr_lock(sr, 0);
	gf_mx_del(sr->mx);
	free(sr);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_RENDER, ("[Render] Renderer destroyed\n"));
}
Exemplo n.º 28
0
static void close_waveform(GF_AudioOutput *dr)
{
	WAVCTX();

    if (!ctx->event) return;

	/*brute-force version, actually much safer on winCE*/
#ifdef _WIN32_WCE
	ctx->exit_request = 1;
	SetEvent(ctx->event);
	waveOutReset(ctx->hwo);
	waveOutClose(ctx->hwo);
	if (ctx->wav_buf) free(ctx->wav_buf);
	ctx->wav_buf = NULL;
    CloseHandle(ctx->event);
	ctx->event = NULL;
	ctx->exit_request = 0;
#else
	ctx->exit_request = 1;
	SetEvent(ctx->event);
	if (ctx->hwo) {
		u32 i;
		Bool not_done;
		MMRESULT res;
		/*wait for all buffers to complete, otherwise this locks waveOutReset*/
		while (1) {
			not_done = 0;
			for (i=0 ; i< ctx->num_buffers; i++) {
				if (! (ctx->wav_hdr[i].dwFlags & WHDR_DONE)) {
					not_done = 1;
					break;
				}
			}
			if (!not_done) break;
			gf_sleep(60);
		}
		/*waveOutReset gives unpredictable results on PocketPC, so just close right away*/
		while (1) {
			res = waveOutClose(ctx->hwo);
			if (res == MMSYSERR_NOERROR) break;
		}
		ctx->hwo = NULL;
	}
	if (ctx->wav_buf) free(ctx->wav_buf);
	ctx->wav_buf = NULL;
    CloseHandle(ctx->event);
	ctx->event = NULL;
	ctx->exit_request = 0;
#endif
}
Exemplo n.º 29
0
static int ff_url_read(void *h, unsigned char *buf, int size)
{
	u32 retry = 10;
	u32 read;
	int full_size;
	FFDemux *ffd = (FFDemux *)h;

	full_size = 0;
	if (ffd->buffer_used) {
		if (ffd->buffer_used >= (u32) size) {
			ffd->buffer_used-=size;
			memcpy(ffd->buffer, ffd->buffer+size, sizeof(char)*ffd->buffer_used);
#ifdef FFMPEG_DUMP_REMOTE
			if (ffd->outdbg) gf_fwrite(buf, size, 1, ffd->outdbg);
#endif
			return size;
		}
		full_size += ffd->buffer_used;
		buf += ffd->buffer_used;
		size -= ffd->buffer_used;
		ffd->buffer_used = 0;
	}

	while (size) {
		GF_Err e = gf_dm_sess_fetch_data(ffd->dnload, buf, size, &read);
		if (e==GF_EOS) break;
		/*we're sync!!*/
		if (e==GF_IP_NETWORK_EMPTY) {
			if (!retry) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] timeout fetching bytes from network\n") );
				return -1;
			}
			retry --;
			gf_sleep(100);
			continue;
		}
		if (e) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG Demuxer] error fetching bytes from network: %s\n", gf_error_to_string(e) ) );
			return -1;
		}
		full_size += read;
		if (read==size) break;
		size -= read;
		buf += read;
	}
#ifdef FFMPEG_DUMP_REMOTE
	if (ffd->outdbg) gf_fwrite(ffd->buffer, full_size, 1, ffd->outdbg);
#endif
	return full_size ? (int) full_size : -1;
}
Exemplo n.º 30
0
void MPEGVS_Stop(struct __input_device * dr)
{
	MPEGVSCTX;

	if ( rc->trd )
	{
		rc->stop = 1;
		while ( gf_th_status(rc->trd) == GF_THREAD_STATUS_RUN )
			gf_sleep(5);

		gf_th_del(rc->trd);
		rc->trd = NULL;
		rc->stop = 0;
	}
}