예제 #1
0
파일: droid_mpegv.c 프로젝트: Bevara/GPAC
u32 ThreadRun(void* param)
{
	struct __input_device * dr = (struct __input_device *)param;
	MPEGVSCTX;

	GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[MPEG-V_IN] Start: %d\n", gf_th_id()));

	loadSensorControler(rc);

	if (!rc->env || !rc->sensCtrlObj)
		return;

	(*rc->env)->CallNonvirtualVoidMethod(rc->env, rc->sensCtrlObj, rc->sensCtrlClass, rc->startSensor, (s32)dr, rc->sensorAndroidType);

	while (!rc->stop)
		gf_sleep(10);

	GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[MPEG-V_IN] Stop: %d\n", gf_th_id()));

	if (!rc->env)
		return;

	if ( rc->sensCtrlObj )
	{
		(*rc->env)->CallNonvirtualVoidMethod(rc->env, rc->sensCtrlObj, rc->sensCtrlClass, rc->stopSensor);

		(*rc->env)->DeleteLocalRef( rc->env, rc->sensCtrlObj );
	}

	unloadSensorController(rc);
}
예제 #2
0
static u32 gf_dm_session_thread(void *par)
{
	GF_DownloadSession *sess = (GF_DownloadSession *)par;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Downloader] Entering thread ID %d\n", gf_th_id() ));

	sess->flags &= ~GF_DOWNLOAD_SESSION_THREAD_DEAD;
	while (!sess->destroy) {
		gf_mx_p(sess->mx);
		if (sess->status >= GF_NETIO_DISCONNECTED) {
			gf_mx_v(sess->mx);
			break;
		}

		if (sess->status < GF_NETIO_CONNECTED) {
			gf_dm_connect(sess);
		} else {
			if (sess->status == GF_NETIO_WAIT_FOR_REPLY) gf_sleep(GF_WAIT_REPLY_SLEEP);
			sess->do_requests(sess);
		}
		gf_mx_v(sess->mx);
		gf_sleep(2);
	}
	/*destroy all sessions*/
	gf_dm_disconnect(sess);
	sess->status = GF_NETIO_STATE_ERROR;
	sess->last_error = 0;
	sess->flags |= GF_DOWNLOAD_SESSION_THREAD_DEAD;
	return 1;
}
예제 #3
0
u32 MM_Loop(void *par)
{
	GF_Terminal *term = (GF_Terminal *) par;
	Bool do_scene = (term->flags & GF_TERM_NO_VISUAL_THREAD) ? 1 : 0;
	Bool do_codec = (term->flags & GF_TERM_NO_DECODER_THREAD) ? 0 : 1;
	Bool do_regulate = (term->user->init_flags & GF_TERM_NO_REGULATION) ? 0 : 1;

	gf_th_set_priority(term->mm_thread, term->priority);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaManager] Entering thread ID %d\n", gf_th_id() ));
//	GF_LOG(GF_LOG_DEBUG, GF_LOG_RTI, ("(RTI] Terminal Cycle Log\tServices\tDecoders\tCompositor\tSleep\n"));

	while (term->flags & GF_TERM_RUNNING) {
		u32 left;
		if (do_codec) left = MM_SimulationStep_Decoder(term);
		else left = term->frame_duration;
		
		if (do_scene) {
			u32 time_taken = gf_sys_clock();
			gf_sc_draw_frame(term->compositor);
			time_taken = gf_sys_clock() - time_taken;
			if (left>time_taken) 
				left -= time_taken;
			else
				left = 0;
		}
		if (do_regulate)
			gf_sleep(left);
	}
	term->flags |= GF_TERM_DEAD;
	return 0;
}
예제 #4
0
u32 gf_ar_proc(void *p)
{
	GF_AudioRenderer *ar = (GF_AudioRenderer *) p;

	ar->audio_th_state = 1;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[AudioRender] Entering audio thread ID %d\n", gf_th_id() ));

	gf_mixer_lock(ar->mixer, GF_TRUE);
	ar->need_reconfig = GF_TRUE;
	gf_sc_ar_reconfig(ar);
	gf_mixer_lock(ar->mixer, GF_FALSE);

	while (ar->audio_th_state == 1) {
		//do mix even if mixer is empty, otherwise we will push the same buffer over and over to the sound card
/*
		if (ar->Frozen ) {
			gf_sleep(0);
		} else 
*/		{
			if (ar->need_reconfig) gf_sc_ar_reconfig(ar);
			ar->audio_out->WriteAudio(ar->audio_out);
		}
	}
	GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Exiting audio thread\n"));
	ar->audio_out->Shutdown(ar->audio_out);
	ar->audio_th_state = 3;
	return 0;
}
예제 #5
0
파일: ios_mpegv.c 프로젝트: erelh/gpac
void MPEGVS_Stop(struct __input_device * dr)
{
	MPEGVSCTX;

    GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[MPEG-V_IN] Stop: %d\n", gf_th_id()));

    SENS_Stop(rc->inst);
    SENS_DestroyInstance(&rc->inst);
}
예제 #6
0
파일: os_thread.c 프로젝트: fcsteagu/gpac-1
DWORD WINAPI RunThread(void *ptr)
{
	DWORD ret = 0;
#else
void * RunThread(void *ptr)
{
	long int ret = 0;
#endif
	GF_Thread *t = (GF_Thread *)ptr;

	/* Signal the caller */
	if (! t->_signal) goto exit;
#ifdef GPAC_ANDROID
	if (pthread_once(&currentThreadInfoKey_once, &currentThreadInfoKey_alloc) || pthread_setspecific(currentThreadInfoKey, t))
		GF_LOG(GF_LOG_ERROR, GF_LOG_MUTEX, ("[Mutex] Couldn't run thread %s, ID 0x%08x\n", t->log_name, t->id));
#endif /* GPAC_ANDROID */
	t->status = GF_THREAD_STATUS_RUN;
	gf_sema_notify(t->_signal, 1);

#ifndef GPAC_DISABLE_LOG
	t->id = gf_th_id();
	GF_LOG(GF_LOG_INFO, GF_LOG_MUTEX, ("[Thread %s] At %d Entering thread proc - thread ID 0x%08x\n", t->log_name, gf_sys_clock(), t->id));
#endif

	/* Each thread has its own seed */
	gf_rand_init(0);

	/* Run our thread */
	ret = t->Run(t->args);

exit:
#ifndef GPAC_DISABLE_LOG
	GF_LOG(GF_LOG_INFO, GF_LOG_MUTEX, ("[Thread %s] At %d Exiting thread proc\n", t->log_name, gf_sys_clock()));
#endif
	t->status = GF_THREAD_STATUS_DEAD;
	t->Run = NULL;
#ifdef WIN32
	if (!CloseHandle(t->threadH)) {
		DWORD err = GetLastError();
		GF_LOG(GF_LOG_ERROR, GF_LOG_MUTEX, ("[Thread %s] Couldn't close handle when exiting thread proc, error code: %d\n", t->log_name, err));
	}
	t->threadH = NULL;
	return ret;
#else

#ifdef GPAC_ANDROID
	#ifndef GPAC_DISABLE_LOG
		GF_LOG(GF_LOG_INFO, GF_LOG_MUTEX, ("[Thread %s] RunBeforeExit=%p\n", t->log_name, t->RunBeforeExit));
	#endif
	if (t->RunBeforeExit)
		t->RunBeforeExit(t->args);
#endif /* GPAC_ANDROID */
	pthread_exit((void *)0);
	return (void *)ret;
#endif
}
예제 #7
0
JavaEnvTh * CNativeWrapper::getEnv() {
	JNIEnv *env;
	JavaEnvTh * javaEnv;
	if (!javaVM) {
		debug_log("************* No JVM Found ************");
		return NULL;
	}
	javaEnv = (JavaEnvTh*) pthread_getspecific( jni_thread_env_key );
	if (javaEnv)
		return javaEnv;
	javaEnv = (JavaEnvTh *) gf_malloc(sizeof(JavaEnvTh));
	if (!javaEnv)
		return NULL;
	memset(javaEnv, 0, sizeof(JavaEnvTh));
	javaVM->AttachCurrentThread(&env, NULL);
	if (!env) {
		LOGE("Attaching to thread did failed for thread id=%d", gf_th_id());
		gf_free(javaEnv);
		return NULL;
	}
	LOGI("Rebuilding methods for thread %d", gf_th_id());
	setJavaEnv(javaEnv, env, mainJavaEnv->cbk_obj);
	if (pthread_setspecific(jni_thread_env_key, javaEnv)) {
		LOGE("Failed to set specific thread data to jni_thread_env_key for thread=%d. No ENV available !", gf_th_id());
		gf_free(javaEnv);
		return NULL;
	}
	GF_Thread * t;
	LOGI("Getting current Thread %d...", gf_th_id());
	t = gf_th_current();
	LOGI("Getting current Thread DONE = %p, now registering before exit...", t);

	if (GF_OK != gf_register_before_exit_function(gf_th_current(), &beforeThreadExits)) {
		LOGE("Failed to register exit function for thread %p, no javaEnv for current thread.", gf_th_current());
		//javaVM->DetachCurrentThread();
		gf_free(javaEnv);
		javaEnv = NULL;
	}
	LOGI("Registering DONE for %d", gf_th_id());
	return javaEnv;
}
예제 #8
0
파일: os_thread.c 프로젝트: fcsteagu/gpac-1
static const char *log_th_name(u32 id)
{
	u32 i, count;

	if (!id) id = gf_th_id();
	count = gf_list_count(thread_bank);
	for (i=0; i<count; i++) {
		GF_Thread *t = gf_list_get(thread_bank, i);
		if (t->id == id) return t->log_name;
	}
	return "Main Process";
}
예제 #9
0
GF_EXPORT
Bool gf_mx_try_lock(GF_Mutex *mx)
{
	u32 caller;
	if (!mx) return 0;
	caller = gf_th_id();
	if (caller == mx->Holder) {
		mx->HolderCount += 1;
		return 1;
	}
	/*FIXME !! WE MUST HAVE tryLock*/
	gf_mx_p(mx);
	return 1;
}
예제 #10
0
GF_EXPORT
u32 gf_mx_p(GF_Mutex *mx)
{
	u32 caller;
	if (!mx) return 0;
	caller = gf_th_id();
	if (caller == mx->Holder) {
		mx->HolderCount += 1;
		return 1;
	}
	mx->hMutex->Wait();
	mx->Holder = caller;
	mx->HolderCount = 1;
	return 1;
}
예제 #11
0
파일: ios_mpegv.c 프로젝트: erelh/gpac
void MPEGVS_Start(struct __input_device * dr)
{
	MPEGVSCTX;

    GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[MPEG-V_IN] Start: %d\n", gf_th_id()));

    if ( rc->inst ) {
        SENS_Stop(rc->inst);
        SENS_DestroyInstance(&rc->inst);
    }

    rc->inst = SENS_CreateInstance();
    SENS_SetSensorType(rc->inst, rc->sensorIOSType);
    SENS_SetCallback(rc->inst, MPEGVSensorCallback, dr);
    SENS_Start(rc->inst);
}
예제 #12
0
void CNativeWrapper::setJavaEnv(JavaEnvTh * envToSet, JNIEnv *env, jobject callback) {
	assert( envToSet );
	jclass localRef = env->GetObjectClass(callback);
	envToSet->env = env;
	envToSet->javaThreadId = gf_th_id();
	envToSet->cbk_obj = callback;
	envToSet->cbk_displayMessage =
	    env->GetMethodID(localRef, "displayMessage", "(Ljava/lang/String;Ljava/lang/String;I)V");
	envToSet->cbk_onProgress =
	    env->GetMethodID(localRef, "onProgress", "(Ljava/lang/String;II)V");
	envToSet->cbk_onLog =
	    env->GetMethodID(localRef, "onLog", "(IILjava/lang/String;)V");
	envToSet->cbk_setCaption =
	    env->GetMethodID(localRef, "setCaption", "(Ljava/lang/String;)V");
	envToSet->cbk_showKeyboard =
	    env->GetMethodID(localRef, "showKeyboard", "(Z)V");
	env->DeleteLocalRef(localRef);
}
예제 #13
0
파일: media_manager.c 프로젝트: Bevara/GPAC
u32 MM_Loop(void *par)
{
	GF_Terminal *term = (GF_Terminal *) par;
	Bool do_scene = (term->flags & GF_TERM_NO_VISUAL_THREAD) ? 1 : 0;
	Bool do_codec = (term->flags & GF_TERM_NO_DECODER_THREAD) ? 0 : 1;
	Bool do_regulate = (term->user->init_flags & GF_TERM_NO_REGULATION) ? 0 : 1;

	gf_th_set_priority(term->mm_thread, term->priority);
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaManager] Entering thread ID %d\n", gf_th_id() ));
//	GF_LOG(GF_LOG_DEBUG, GF_LOG_RTI, ("(RTI] Terminal Cycle Log\tServices\tDecoders\tCompositor\tSleep\n"));

	while (term->flags & GF_TERM_RUNNING) {
		u32 nb_decs = 0;
		u32 left = 0;
		if (do_codec) left = MM_SimulationStep_Decoder(term, &nb_decs);
		else left = term->frame_duration;

		if (do_scene) {
			u32 ms_until_next=0;
			u32 time_taken = gf_sys_clock();
			gf_sc_draw_frame(term->compositor, &ms_until_next);
			time_taken = gf_sys_clock() - time_taken;
			if (ms_until_next<term->frame_duration/2) {
				left = 0;
			} else if (left>time_taken)
				left -= time_taken;
			else
				left = 0;
		}
		if (do_regulate) {
			if (term->bench_mode) {
				gf_sleep(0);
			} else {
				if (left==term->frame_duration) {
					//if nothing was done during this pass but we have active decoder, just yield. We don't want to sleep since
					//composition memory could be released at any time. We should have a signal here, rather than a wait
					gf_sleep(nb_decs ? 0 : term->frame_duration/2);
				}
			}
		}
	}
	term->flags |= GF_TERM_DEAD;
	return 0;
}
예제 #14
0
u32 SR_RenderRun(void *par)
{	
	GF_Renderer *sr = (GF_Renderer *) par;
	sr->video_th_state = 1;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Renderer] Entering thread ID %d\n", gf_th_id() ));

	while (sr->video_th_state == 1) {
		/*sleep or render*/
		if (sr->is_hidden) 
			gf_sleep(sr->frame_duration);
		else
			gf_sr_simulation_tick(sr);
	}
	/*destroy video out here if w're using openGL, to avoid threading issues*/
	sr->video_out->Shutdown(sr->video_out);
	gf_modules_close_interface((GF_BaseInterface *)sr->video_out);
	sr->video_out = NULL;
	sr->video_th_state = 3;
	return 0;
}
예제 #15
0
GF_EXPORT
void gf_mx_v(GF_Mutex *mx)
{
	u32 caller;
	if (!mx) return;
	caller = gf_th_id();

	/*only if we own*/
	if (caller != mx->Holder) {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Invalid mutex release - owner PID %d - caller PID %d\n", mx->Holder, caller));
		return;
	}
	if (!mx->HolderCount) {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Invalid mutex release - mutex not locked\n"));
		return;
	}
	mx->HolderCount -= 1;

	if (mx->HolderCount == 0) {
		mx->Holder = 0;
		mx->hMutex->Signal();
	}
}
예제 #16
0
u32 gf_ar_proc(void *p)
{
	GF_AudioRenderer *ar = (GF_AudioRenderer *) p;

	ar->audio_th_state = 1;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[AudioRender] Entering audio thread ID %d\n", gf_th_id() ));

	gf_mixer_lock(ar->mixer, 1);
	ar->need_reconfig = 1;
	gf_sc_ar_reconfig(ar);
	gf_mixer_lock(ar->mixer, 0);

	while (ar->audio_th_state == 1) {
		//GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Audio simulation step\n"));
		
		/*THIS IS NEEDED FOR SYMBIAN - if no yield here, the audio module always grabs the 
		main mixer mutex and it takes forever before it can be grabed by another thread, 
		for instance when reconfiguring scene*/
//		gf_sleep(1);

		gf_mixer_lock(ar->mixer, 1);
		if (ar->Frozen || gf_mixer_empty(ar->mixer) ) {
			gf_mixer_lock(ar->mixer, 0);
			gf_sleep(33);
		} else {
			if (ar->need_reconfig) gf_sc_ar_reconfig(ar);
			ar->audio_out->WriteAudio(ar->audio_out);
			gf_mixer_lock(ar->mixer, 0);
		}
	}
	GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Exiting audio thread\n"));
	ar->audio_out->Shutdown(ar->audio_out);
	ar->audio_th_state = 3;
	return 0;
}
예제 #17
0
파일: media_manager.c 프로젝트: Bevara/GPAC
u32 RunSingleDec(void *ptr)
{
	GF_Err e;
	u64 time_taken;
	CodecEntry *ce = (CodecEntry *) ptr;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaDecoder %d] Entering thread ID %d\n", ce->dec->odm->OD->objectDescriptorID, gf_th_id() ));

	while (ce->flags & GF_MM_CE_RUNNING) {
		time_taken = gf_sys_clock_high_res();
		if (!ce->dec->force_cb_resize) {
			gf_mx_p(ce->mx);
			e = gf_codec_process(ce->dec, ce->dec->odm->term->frame_duration);
			if (e) gf_term_message(ce->dec->odm->term, ce->dec->odm->net_service->url, "Decoding Error", e);
			gf_mx_v(ce->mx);
		}
		time_taken = gf_sys_clock_high_res() - time_taken;


		/*no priority boost this way for systems codecs, priority is dynamically set by not releasing the
		graph when late and moving on*/
		if (!ce->dec->CB || (ce->dec->CB->UnitCount == ce->dec->CB->Capacity))
			ce->dec->PriorityBoost = 0;

		/*while on don't sleep*/
		if (ce->dec->PriorityBoost) continue;

		if (time_taken<20) {
			gf_sleep(1);
		}
	}
	ce->flags |= GF_MM_CE_DEAD;
	return 0;
}
예제 #18
0
u32 DD_WindowThread(void *par)
{
	u32 flags;
	RECT rc;
	MSG msg;
	WNDCLASS wc;
	HINSTANCE hInst;
	GF_VideoOutput *vout = par;
	DDContext *ctx = (DDContext *)vout->opaque;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[DirectXOutput] Entering thread ID %d\n", gf_th_id() ));

	hInst = GetModuleHandle("gm_dx_hw.dll");
	memset(&wc, 0, sizeof(WNDCLASS));
	wc.style = CS_BYTEALIGNWINDOW;
	wc.hInstance = hInst;
	wc.lpfnWndProc = DD_WindowProc;
	wc.hIcon = LoadIcon (NULL, IDI_APPLICATION);
	wc.hCursor = LoadCursor (NULL, IDC_ARROW);
	wc.hbrBackground = (HBRUSH)GetStockObject (BLACK_BRUSH);
	wc.lpszClassName = "GPAC DirectDraw Output";
	RegisterClass (&wc);

	flags = ctx->switch_res;
	ctx->switch_res = 0;

	if (!ctx->os_hwnd) {
		if (flags & GF_TERM_WINDOWLESS) ctx->windowless = 1;

		ctx->os_hwnd = CreateWindow("GPAC DirectDraw Output", "GPAC DirectDraw Output", ctx->windowless ? WS_POPUP : WS_OVERLAPPEDWINDOW, 0, 0, 120, 100, NULL, NULL, hInst, NULL);

		if (ctx->os_hwnd == NULL) {
			ctx->th_state = 2;
			return 1;
		}
		if (flags & GF_TERM_INIT_HIDE) {
			ShowWindow(ctx->os_hwnd, SW_HIDE);
		} else {
			SetForegroundWindow(ctx->os_hwnd);
			ShowWindow(ctx->os_hwnd, SW_SHOWNORMAL);
		}

		/*get border & title bar sizes*/
		rc.left = rc.top = 0;
		rc.right = rc.bottom = 100;
		AdjustWindowRect(&rc, WS_OVERLAPPEDWINDOW, 0);
		ctx->off_w = rc.right - rc.left - 100;
		ctx->off_h = rc.bottom - rc.top - 100;
		ctx->owns_hwnd = 1;

		if (ctx->windowless) SetWindowless(vout, ctx->os_hwnd);
	}

	ctx->fs_hwnd = CreateWindow("GPAC DirectDraw Output", "GPAC DirectDraw FS Output", WS_POPUP, 0, 0, 120, 100, NULL, NULL, hInst, NULL);
	if (!ctx->fs_hwnd) {
		ctx->th_state = 2;
		return 1;
	}
	ShowWindow(ctx->fs_hwnd, SW_HIDE);
	ctx->th_state = 1;

	/*if visible set focus*/
	if (!ctx->switch_res) SetFocus(ctx->os_hwnd);

	ctx->switch_res = 0;
	SetWindowLong(ctx->os_hwnd, GWL_USERDATA, (LONG) vout);
	SetWindowLong(ctx->fs_hwnd, GWL_USERDATA, (LONG) vout);

	/*load cursors*/
	ctx->curs_normal = LoadCursor(NULL, IDC_ARROW);
	assert(ctx->curs_normal);
	ctx->curs_hand = LoadCursor(hInst, MAKEINTRESOURCE(IDC_HAND_PTR));
	ctx->curs_collide = LoadCursor(hInst, MAKEINTRESOURCE(IDC_COLLIDE));
	ctx->cursor_type = GF_CURSOR_NORMAL;

	while (GetMessage (&(msg), NULL, 0, 0)) {
		TranslateMessage (&(msg));
		DispatchMessage (&(msg));
	}
	ctx->th_state = 2;
	return 0;
}
예제 #19
0
NPT_Result
GPAC_GenericDevice::OnAction(PLT_ActionReference&          action,
                             const PLT_HttpRequestContext& context)
{
	NPT_COMPILER_UNUSED(context);

#ifdef GPAC_HAS_SPIDERMONKEY
	gf_mx_p(m_pMutex);
#endif
	PLT_ActionDesc &act_desc = action->GetActionDesc();
	NPT_String name = act_desc.GetName();
#ifdef GPAC_HAS_SPIDERMONKEY
	assert(!m_pSema);
#endif
	GF_LOG(GF_LOG_INFO, GF_LOG_NETWORK, ("[UPnP] Action %s called (thread %d)\n", (char *) name, gf_th_id() ));

#ifdef GPAC_HAS_SPIDERMONKEY
	if (JSVAL_IS_NULL(act_proc)) {
		gf_mx_v(m_pMutex);
		return NPT_SUCCESS;
	}

	jsval argv[2];

	m_pUPnP->LockJavascript(GF_TRUE);

	JSObject *js_action = JS_NewObject(m_pUPnP->m_pJSCtx, &m_pUPnP->upnpDeviceClass._class, 0, 0);
	argv[0] = OBJECT_TO_JSVAL(js_action);
	SMJS_SET_PRIVATE(m_pUPnP->m_pJSCtx, js_action, this);

	act_ref = action;

	JS_DefineProperty(m_pUPnP->m_pJSCtx, js_action, "Name", STRING_TO_JSVAL( JS_NewStringCopyZ(m_pUPnP->m_pJSCtx, name) ), 0, 0, JSPROP_READONLY | JSPROP_PERMANENT);
	GPAC_Service *service = (GPAC_Service *) act_desc.GetService();
	JS_DefineProperty(m_pUPnP->m_pJSCtx, js_action, "Service", service->m_pObj ? OBJECT_TO_JSVAL( service->m_pObj) : JSVAL_NULL, 0, 0, JSPROP_READONLY | JSPROP_PERMANENT);
	JS_DefineFunction(m_pUPnP->m_pJSCtx, js_action, "GetArgument", upnp_action_get_argument, 1, 0);
	JS_DefineFunction(m_pUPnP->m_pJSCtx, js_action, "SendReply", upnp_action_send_reply, 1, 0);

	/*create a semaphore*/
	m_pSema = gf_sema_new(1, 0);

	jsval rval;
	JS_CallFunctionValue(m_pUPnP->m_pJSCtx, obj, act_proc, 1, argv, &rval);
	SMJS_SET_PRIVATE(m_pUPnP->m_pJSCtx, js_action, NULL);
	m_pUPnP->LockJavascript(GF_FALSE);

	if (JSVAL_IS_INT(rval) && (JSVAL_TO_INT(rval) != 0)) {
		action->SetError(JSVAL_TO_INT(rval), "Action Failed");
	}
	/*wait on the semaphore*/
	if (!gf_sema_wait_for(m_pSema, 10000)) {
		GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[UPnP] Reply processing to action %s timeout - sending incomplete reply)\n", (char *) name));
	}
	gf_sema_del(m_pSema);
	m_pSema = NULL;

	gf_mx_v(m_pMutex);
#endif
	return NPT_SUCCESS;
}