bool GMAudioInputManager_ptlib::set_device (const Ekiga::AudioInputDevice & device) { if ( device.type == DEVICE_TYPE ) { PTRACE(4, "GMAudioInputManager_ptlib\tSetting Device " << device); current_state.device = device; return true; } return false; }
static gboolean ekiga_dbus_component_get_user_comment (G_GNUC_UNUSED EkigaDBusComponent *self, char **comment, G_GNUC_UNUSED GError **error) { PTRACE (1, "DBus\tGetComment"); *comment = gm_conf_get_string (PERSONAL_DATA_KEY "comment"); return TRUE; }
JNIEXPORT jstring JNICALL Java_com_sun_netstorage_samqfs_mgmt_fs_Host_getMetadataServerName( JNIEnv *env, jclass cls /*ARGSUSED*/, jobject ctx, jstring fsName) { jboolean isCopy; char *cstr = GET_STR(fsName, isCopy); char *res = NULL; PTRACE(1, "jni:Host_getMetadataServerName() entry"); if (-1 == get_mds_host(CTX, cstr, &res)) { REL_STR(fsName, cstr, isCopy); ThrowEx(env); return (NULL); } REL_STR(fsName, cstr, isCopy); PTRACE(1, "jni:Host_getMetadataServerName() done"); return (JSTRING(res)); }
static gboolean ekiga_dbus_component_get_user_location (G_GNUC_UNUSED EkigaDBusComponent *self, char **location, G_GNUC_UNUSED GError **error) { PTRACE (1, "DBus\tGetLocation"); *location = gm_conf_get_string (PERSONAL_DATA_KEY "location"); return TRUE; }
JNIEXPORT void JNICALL Java_com_sun_netstorage_samqfs_mgmt_fs_Host_setAdvancedNetCfg(JNIEnv *env, jclass cls /*ARGSUSED*/, jobject ctx, jstring fsName, jobjectArray host_strs) { jboolean isCopy; char *cstr = GET_STR(fsName, isCopy); PTRACE(1, "jni:Host_setAdvancedNetCfg() entry"); if (-1 == set_advanced_network_cfg(CTX, cstr, jarray2lst(env, host_strs, "java/lang/String", String2charr))) { REL_STR(fsName, cstr, isCopy); ThrowEx(env); return; } REL_STR(fsName, cstr, isCopy); PTRACE(1, "jni:Host_setAdvancedNetCfg() done"); }
jobject relfs2ReleaserJob(JNIEnv *env, void *v_relfs) { jclass cls; jmethodID mid; jobject newObj; release_fs_t *relfs = (release_fs_t *)v_relfs; PTRACE(1, "jni:relfs2ReleaserJob() entry"); cls = (*env)->FindClass(env, BASEPKG"/rel/ReleaserJob"); /* call the private constructor to initialize all fields */ mid = (*env)->GetMethodID(env, cls, "<init>", "(Ljava/lang/String;SS)V"); newObj = (*env)->NewObject(env, cls, mid, JSTRING(relfs->fi_name), (jshort)relfs->fi_low, (jshort)relfs->used_pct); PTRACE(1, "jni:relfs2ReleaserJob() done"); return (newObj); }
void VideoGrabber::startCall() { PWaitAndSignal locker( mutex ); // if( state == SCall ) return; PTRACE( 6, "VideoGrabber::startCall" ); state = SCall; preparing = true; if( IsSuspended() ) Resume(); }
void VideoGrabber::startPreview() { PWaitAndSignal locker( mutex ); // if( state == SPreview ) return; PTRACE( 6, "VideoGrabber::startSPreview" ); state = SPreview; preparing = true; if( IsSuspended() ) Resume(); }
void VideoGrabber::closeReader() { PTRACE( 6, "VideoGrabber::closeReader" ); if( videoReader ) { channel->CloseVideoReader(); videoReader = 0; ancaInfo->setInt( VG_STATE, VG_CLOSED ); } }
JNIEXPORT void JNICALL Java_com_sun_netstorage_samqfs_mgmt_SamFSConnection_reinit(JNIEnv *env, jobject samConn) { ctx_t ctx; PTRACE(1, "jni:SamFSConnection_reinit() entry"); ctx.dump_path[0] = '\0'; ctx.read_location[0] = '\0'; ctx.user_id[0] = '\0'; ctx.handle = (samrpc_client_t *)getConnHandle(env, samConn); /* reinitialize library */ PTRACE(1, "jni:reinitializing libfsmgmt"); if (-1 == init_sam_mgmt(&ctx)) { ThrowEx(env); return; } PTRACE(1, "jni:SamFSConnection_reinit() done"); }
bool GMAudioInputManager_ptlib::open (unsigned channels, unsigned samplerate, unsigned bits_per_sample) { PTRACE(4, "GMAudioInputManager_ptlib\tOpening Device " << current_state.device); PTRACE(4, "GMAudioInputManager_ptlib\tOpening Device with " << channels << "-" << samplerate << "/" << bits_per_sample); current_state.channels = channels; current_state.samplerate = samplerate; current_state.bits_per_sample = bits_per_sample; input_device = PSoundChannel::CreateOpenedChannel (current_state.device.source, #ifdef WIN32 utf2codepage (current_state.device.name), // reencode back to codepage #else current_state.device.name, #endif PSoundChannel::Recorder, channels, samplerate, bits_per_sample); Ekiga::AudioInputErrorCodes error_code = Ekiga::AI_ERROR_NONE; if (!input_device) error_code = Ekiga::AI_ERROR_DEVICE; if (error_code != Ekiga::AI_ERROR_NONE) { PTRACE(1, "GMAudioInputManager_ptlib\tEncountered error " << error_code << " while opening device "); Ekiga::Runtime::run_in_main (boost::bind (&GMAudioInputManager_ptlib::device_error_in_main, this, current_state.device, error_code)); return false; } unsigned volume; input_device->GetVolume (volume); current_state.opened = true; Ekiga::AudioInputSettings settings; settings.volume = volume; settings.modifyable = true; Ekiga::Runtime::run_in_main (boost::bind (&GMAudioInputManager_ptlib::device_opened_in_main, this, current_state.device, settings)); return true; }
/* * is_faults_gen_status_on * Function to get the status of fault generation * * The status of SNMP events generation is dependent on setting * "alerts = on" in defaults.conf * * If there is an error, the return value is set to the return value from the * server API. The samerrno and samerrmsg are passed on to the client */ int is_faults_gen_status_on( ctx_t *ctx, /* client connection */ boolean_t *faults_gen_status /* status of fault generation */ ) { int ret_val; ctx_arg_t arg; samrpc_result_t result; char *func_name = "rpc:is faults gen status on"; char *err_msg; enum clnt_stat stat; PTRACE(2, "%s entry", func_name); CHECK_CLIENT_HANDLE(ctx, func_name); if (ISNULL(faults_gen_status)) { PTRACE(2, "%s exit %s", func_name, samerrmsg); return (-1); } PTRACE(3, "%s calling RPC...", func_name); memset((char *)&result, 0, sizeof (result)); arg.ctx = ctx; SAMRPC_CLNT_CALL(samrpc_is_faults_gen_status_on, ctx_arg_t); CHECK_FUNCTION_FAILURE(result, func_name); ret_val = result.status; *faults_gen_status = *((boolean_t *) result.samrpc_result_u.result.result_data); free(result.samrpc_result_u.result.result_data); PTRACE(2, "%s returning with status [%d]...", func_name, ret_val); PTRACE(2, "%s exit", func_name); return (ret_val); }
/* * convert a C list of int to a jIntArray */ jintArray lst2jintArray(JNIEnv *env, sqm_lst_t *lst) { jintArray jarr; node_t *node = NULL; int idx = 0; jint *p = NULL; if (NULL == lst) { PTRACE(1, "jni:lst2jintArray(null). return."); return (NULL); } else { PTRACE(2, "jni:lst2jintArray(lst[%d])", lst->length); if (lst->length > 0) { p = (jint *) malloc(lst->length * sizeof (jint)); } } jarr = (*env)->NewIntArray(env, lst->length); if (NULL == jarr) { PTRACE(1, "jni:cannot create jintArray"); return (NULL); } node = lst->head; while (node) { // TRACE("jni:lst2jintArray:building node %d", idx); p[idx] = *(int *)node->data; PTRACE(2, "eq: %d\n", p[idx]); node = node->next; idx++; } (*env)->SetIntArrayRegion(env, jarr, 0, lst->length, p); if (p != NULL) { free(p); } PTRACE(2, "jni:lst2jintArray() done"); return (jarr); }
JNIEXPORT jstring JNICALL Java_com_sun_netstorage_samqfs_mgmt_fs_Restore_getParams(JNIEnv *env, jclass cls /*ARGSUSED*/, jobject ctx, jstring fsname) { char *str = NULL; jboolean isCopy; char *cstr = GET_STR(fsname, isCopy); PTRACE(1, "jni:Restore_getParams(..., %s)", Str(cstr)); if (-1 == get_csd_params(CTX, cstr, &str)) { REL_STR(fsname, cstr, isCopy); ThrowEx(env); return (NULL); } REL_STR(fsname, cstr, isCopy); PTRACE(1, "jni:Restore_getParams() done %s", Str(str)); return (JSTRING(str)); }
template <class CodecClass> static void * Create(const PluginCodec_Definition * defn) { // PTRACE(1, "Plugin", "static void * Create................11111......11111....111"); CodecClass * codec = new CodecClass(defn); // PTRACE(1, "Plugin", "static void * Create................22222......22222....22"); if (codec != NULL && codec->Construct()) return codec; PTRACE(1, "Plugin", "Could not open codec, no context being returned."); delete codec; return NULL; }
virtual bool SetOption(const char * optionName, const char * optionValue) { if (strcasecmp(optionName, UseInBandFEC.m_name) == 0) { if (!SetOptionBoolean(m_useInBandFEC, optionValue)) return false; PTRACE(4, MY_CODEC_LOG, "In band FEC set to " << std::boolalpha << m_useInBandFEC); return true; } // Base class sets bit rate and frame time return PluginCodec<MY_CODEC>::SetOption(optionName, optionValue); }
/*------------------------------------------------------------------------------ Function name : EWLFreeRefFrm Description : Release a frame buffer previously allocated with EWLMallocRefFrm. Return type : void Argument : const void * instance - EWL instance Argument : EWLLinearMem_t *info - frame buffer memory information ------------------------------------------------------------------------------*/ void EWLFreeRefFrm(const void *instance, EWLLinearMem_t * info) { hx280ewl_t *enc_ewl = (hx280ewl_t *) instance; EWLLinearMem_t *buff = (EWLLinearMem_t *) info; assert(enc_ewl != NULL); assert(buff != NULL); EWLFreeLinear(enc_ewl, buff); PTRACE("------>EWLFreeRefFrm\t%p\n", buff->virtualAddress); }
bool VideoGrabber::openDevices() { PTRACE( 6, "VideoGrabber::openDevices" ); if( !openReader() || !openPlayer() ) return false; unsigned int w, h; videoReader->GetFrameSize( w, h ); videoPlayer->SetFrameSize( w, h ); if ( bufferSize && videoReader->GetMaxFrameBytes() > bufferSize ) delete buffer; if ( !bufferSize || videoReader->GetMaxFrameBytes() > bufferSize ) { buffer = new char[ videoReader->GetMaxFrameBytes() ]; bufferSize = videoReader->GetMaxFrameBytes(); PTRACE( 6, "Grabber buffer set to size " << bufferSize ); } return true; }
bool GMAudioOutputManager_null::set_device (Ekiga::AudioOutputPS ps, const Ekiga::AudioOutputDevice & device) { if ( ( device.type == DEVICE_TYPE ) && ( device.source == DEVICE_SOURCE) && ( device.name == DEVICE_NAME) ) { PTRACE(4, "GMAudioOutputManager_null\tSetting Device[" << ps << "] " << device); current_state[ps].device = device; return true; } return false; }
jobject priority2ArPriority(JNIEnv *env, void *v_prio) { jclass cls; jmethodID mid; jobject newObj; priority_t *prio = (priority_t *)v_prio; PTRACE(2, "jni:priority2ArPriority() entry"); cls = (*env)->FindClass(env, BASEPKG"/arc/ArPriority"); mid = (*env)->GetMethodID(env, cls, "<init>", "(Ljava/lang/String;FI)V"); newObj = (*env)->NewObject(env, cls, mid, JSTRING(prio->priority_name), (jfloat)prio->value, (jint)prio->change_flag); PTRACE(2, "jni:priority2ArPriority() done"); return (newObj); }
void PrintComError(_com_error &e) { _bstr_t bstrSource(e.Source()); _bstr_t bstrDescription(e.Description()); // Print COM errors. PTRACE(1, "catch _com_error:\tError = " << e.Error() << "\tErrorMessage = " << (const char*)e.ErrorMessage() << "\tSource = " << (const char*)bstrSource << "\tDescription = " << (const char*)bstrDescription ); }
JNIEXPORT void JNICALL Java_com_sun_netstorage_samqfs_mgmt_rel_Releaser_setGlobalDirective( JNIEnv *env, jclass cls /*ARGSUSED*/, jobject ctx, jobject relDirObj) { rl_fs_directive_t *rl; int res; PTRACE(1, "jni:Releaser_setGlobalDirective() entry"); rl = ReleaserDirective2reldir(env, relDirObj); if (NULL != rl) strcpy(rl->fs, GLOBAL); else PTRACE(1, "jni:rl is NULL"); res = set_rl_fs_directive(CTX, rl); free(rl); if (-1 == res) { ThrowEx(env); return; } PTRACE(1, "jni:Releaser_setGlobalDirective() done"); }
JNIEXPORT void JNICALL Java_com_sun_netstorage_samqfs_mgmt_fs_Restore_setParams(JNIEnv *env, jclass cls /*ARGSUSED*/, jobject ctx, jstring fsname, jstring parameters) { jboolean isCopy, isCopy2; char *cstr = GET_STR(fsname, isCopy); char *pstr = GET_STR(parameters, isCopy2); PTRACE(1, "jni:Restore_setParams()"); if (-1 == set_csd_params(CTX, cstr, pstr)) { REL_STR(fsname, cstr, isCopy); REL_STR(parameters, pstr, isCopy2); ThrowEx(env); return; } REL_STR(fsname, cstr, isCopy); REL_STR(parameters, pstr, isCopy2); PTRACE(1, "jni:Restore_setParams() done"); }
void XWindow::SetWindow (int x, int y, unsigned int windowWidth, unsigned int windowHeight) { PTRACE(4, "X11\tSetWindow " << x << "," << y << " " << windowWidth << "x" << windowHeight); XLockDisplay (_display); XMoveResizeWindow (_display, _XWindow, x, y, windowWidth, windowHeight); XUnlockDisplay (_display); CalculateSize (windowWidth, windowHeight, true); }
int cns_get_registration( ctx_t *ctx, /* client connection */ char *asset_prefix, /* asset for which to get registration */ char **reg_kv /* return key value string for the registration */ ) { int ret_val; string_arg_t arg; samrpc_result_t result; char *func_name = "rpc:get registration"; char *err_msg; enum clnt_stat stat; PTRACE(2, "%s entry", func_name); CHECK_CLIENT_HANDLE(ctx, func_name); if (ISNULL(asset_prefix, reg_kv)) { PTRACE(2, "%s exit %s", func_name, samerrmsg); return (-1); } PTRACE(3, "%s calling RPC...", func_name); memset((char *)&result, 0, sizeof (result)); arg.ctx = ctx; arg.str = asset_prefix; SAMRPC_CLNT_CALL(samrpc_cns_get_registration, string_arg_t); CHECK_FUNCTION_FAILURE(result, func_name); ret_val = result.status; *reg_kv = (char *)result.samrpc_result_u.result.result_data; PTRACE(2, "%s returning with status [%d]...", func_name, ret_val); PTRACE(2, "%s exit", func_name); return (ret_val); }
/* * Get the total number of licensed slots for the given * robot_type and media_type pair */ int get_licensed_media_slots( ctx_t *ctx, /* client connection */ const char *robot_name, /* robot type */ const char *media_name /* media_type */ ) { int ret_val; string_string_arg_t arg; samrpc_result_t result; char *func_name = "rpc:get licensed media slots"; char *err_msg; enum clnt_stat stat; PTRACE(2, "%s entry", func_name); CHECK_CLIENT_HANDLE(ctx, func_name); if (ISNULL(robot_name, media_name)) { PTRACE(2, "%s exit %s", func_name, samerrmsg); return (-1); } PTRACE(3, "%s calling RPC...", func_name); memset((char *)&result, 0, sizeof (result)); arg.ctx = ctx; arg.str1 = (char *)robot_name; arg.str2 = (char *)media_name; SAMRPC_CLNT_CALL(samrpc_get_licensed_media_slots, string_string_arg_t); CHECK_FUNCTION_FAILURE(result, func_name); ret_val = result.status; PTRACE(2, "%s returning licensed slots [%d]...", func_name, ret_val); PTRACE(2, "%s exit", func_name); return (ret_val); }
/* * clear_load_request * * cancel a load request */ int clear_load_request( ctx_t *ctx, /* client connection */ vsn_t vsn, /* vsn */ int index /* index in preview queue or -1 */ ) { int ret_val; clear_load_request_arg_t arg; samrpc_result_t result; char *func_name = "rpc:clear load request"; char *err_msg; enum clnt_stat stat; PTRACE(2, "%s entry", func_name); CHECK_CLIENT_HANDLE(ctx, func_name); if (ISNULL(vsn)) { PTRACE(2, "%s exit %s", func_name, samerrmsg); return (-1); } PTRACE(3, "%s calling RPC...", func_name); memset((char *)&result, 0, sizeof (result)); arg.ctx = ctx; strcpy(arg.vsn, vsn); arg.index = index; SAMRPC_CLNT_CALL(samrpc_clear_load_request, clear_load_request_arg_t); CHECK_FUNCTION_FAILURE(result, func_name); ret_val = result.status; PTRACE(2, "%s returning with status [%d]...", func_name, ret_val); PTRACE(2, "%s exit", func_name); return (ret_val); }
/* pfair_tick - this function is called for every local timer * interrupt. */ static void pfair_tick(struct task_struct* t) { struct pfair_state* state = &__get_cpu_var(pfair_state); quanta_t time, cur; int retry = 10; do { cur = current_quantum(state); PTRACE("q %lu at %llu\n", cur, litmus_clock()); /* Attempt to advance time. First CPU to get here * will prepare the next quantum. */ time = cmpxchg(&pfair_time, cur - 1, /* expected */ cur /* next */ ); if (time == cur - 1) { /* exchange succeeded */ wait_for_quantum(cur - 1, state); schedule_next_quantum(cur); retry = 0; } else if (time_before(time, cur - 1)) { /* the whole system missed a tick !? */ catchup_quanta(time, cur, state); retry--; } else if (time_after(time, cur)) { /* our timer lagging behind!? */ TRACE("BAD pfair_time:%lu > cur:%lu\n", time, cur); retry--; } else { /* Some other CPU already started scheduling * this quantum. Let it do its job and then update. */ retry = 0; } } while (retry); /* Spin locally until time advances. */ wait_for_quantum(cur, state); /* copy assignment */ /* FIXME: what if we race with a future update? Corrupted state? */ state->local = state->linked; /* signal that we are done */ mb(); state->local_tick = state->cur_tick; if (state->local != current && (is_realtime(current) || is_present(state->local))) set_tsk_need_resched(current); }
bool GMVideoInputManager_ptlib::set_device (const Ekiga::VideoInputDevice & device, int channel, Ekiga::VideoInputFormat format) { if ( device.type == DEVICE_TYPE ) { PTRACE(4, "GMVideoInputManager_ptlib\tSetting Device " << device); current_state.device = device; current_state.channel = channel; current_state.format = format; return true; } return false; }
bool GMAudioInputManager_null::open (unsigned channels, unsigned samplerate, unsigned bits_per_sample) { PTRACE(4, "GMAudioInputManager_null\tOpening Device " << current_state.device); PTRACE(4, "GMAudioInputManager_null\tOpening Device with " << channels << "-" << samplerate << "/" << bits_per_sample); current_state.channels = channels; current_state.samplerate = samplerate; current_state.bits_per_sample = bits_per_sample; current_state.opened = true; adaptive_delay.Restart(); Ekiga::AudioInputSettings settings; settings.volume = 0; settings.modifyable = false; Ekiga::Runtime::run_in_main (boost::bind (&GMAudioInputManager_null::device_opened_in_main, this, current_state.device, settings)); return true; }