static force_inline VALUE ruby_dispatch(VALUE top, VALUE self, SEL sel, rb_vm_method_node_t *node, unsigned char opt, int argc, const VALUE *argv) { const rb_vm_arity_t &arity = node->arity; if ((argc < arity.min) || ((arity.max != -1) && (argc > arity.max))) { short limit = (argc < arity.min) ? arity.min : arity.max; rb_raise(rb_eArgError, "wrong number of arguments (%d for %d)", argc, limit); } if ((node->flags & VM_METHOD_PRIVATE) && opt == 0) { // Calling a private method with no explicit receiver OR an attribute // assignment to non-self, triggering #method_missing. rb_vm_block_t *b = GET_VM()->current_block(); return method_missing(self, sel, b, argc, argv, METHOD_MISSING_PRIVATE); } if ((node->flags & VM_METHOD_PROTECTED) && top != 0 && node->klass != NULL && !rb_obj_is_kind_of(top, (VALUE)node->klass)) { // Calling a protected method inside a method where 'self' is not // an instance of the class where the method was originally defined, // triggering #method_missing. rb_vm_block_t *b = GET_VM()->current_block(); return method_missing(self, sel, b, argc, argv, METHOD_MISSING_PROTECTED); } if ((node->flags & VM_METHOD_EMPTY) && arity.max == arity.min) { // Calling an empty method, let's just return nil! return Qnil; } if ((node->flags & VM_METHOD_FBODY) && arity.max != arity.min) { // Calling a function defined with rb_objc_define_method with // a negative arity, which means a different calling convention. if (arity.real == 2) { return ((VALUE (*)(VALUE, SEL, int, const VALUE *))node->ruby_imp) (self, sel, argc, argv); } else if (arity.real == 1) { return ((VALUE (*)(VALUE, SEL, ...))node->ruby_imp) (self, sel, rb_ary_new4(argc, argv)); } else if (arity.real == 3) { return ((VALUE (*)(VALUE, SEL, VALUE, int, const VALUE *))node->ruby_imp) (self, sel, top, argc, argv); } else { printf("invalid negative arity for C function %d\n", arity.real); abort(); } } return __rb_vm_rcall(self, sel, node->ruby_imp, arity, argc, argv); }
void rb_unlink_method_entry(rb_method_entry_t *me) { struct unlinked_method_entry_list_entry *ume = ALLOC(struct unlinked_method_entry_list_entry); ume->me = me; ume->next = GET_VM()->unlinked_method_entry_list; GET_VM()->unlinked_method_entry_list = ume; }
static VALUE trap(int sig, sighandler_t func, VALUE command) { sighandler_t oldfunc; VALUE oldcmd; rb_vm_t *vm = GET_VM(); /* * Be careful. ruby_signal() and trap_list[sig].cmd must be changed * atomically. In current implementation, we only need to don't call * RUBY_VM_CHECK_INTS(). */ oldfunc = ruby_signal(sig, func); oldcmd = vm->trap_list[sig].cmd; switch (oldcmd) { case 0: case Qtrue: if (oldfunc == SIG_IGN) oldcmd = rb_str_new2("IGNORE"); else if (oldfunc == sighandler) oldcmd = rb_str_new2("DEFAULT"); else oldcmd = Qnil; break; case Qnil: break; case Qundef: oldcmd = rb_str_new2("EXIT"); break; } vm->trap_list[sig].cmd = command; vm->trap_list[sig].safe = rb_safe_level(); return oldcmd; }
void ruby_init(void) { int state; if (ruby_initialized) return; ruby_initialized = 1; #ifdef __MACOS__ rb_origenviron = 0; #else rb_origenviron = environ; #endif #if WITH_OBJC char *s; s = getenv("MACRUBY_DEBUG"); ruby_dlog_enabled = !(s == NULL || *s == '0'); s = getenv("MACRUBY_DEBUG_FILE"); if (s == NULL) { ruby_dlog_file = stderr; } else { ruby_dlog_file = fopen(s, "w"); if (ruby_dlog_file == NULL) { fprintf(stderr, "cannot open macruby debug file `%s'", strerror(errno)); ruby_dlog_file = stderr; } } #endif Init_stack((void *)&state); Init_PreGC(); Init_BareVM(); Init_heap(); PUSH_TAG(); if ((state = EXEC_TAG()) == 0) { rb_call_inits(); #ifdef __MACOS__ _macruby_init(); #elif defined(__VMS) _vmsruby_init(); #endif ruby_prog_init(); ALLOW_INTS; } POP_TAG(); if (state) { error_print(); exit(EXIT_FAILURE); } GET_VM()->running = 1; }
static void Init_postponed_job(void) { rb_vm_t *vm = GET_VM(); vm->postponed_job_buffer = ALLOC_N(rb_postponed_job_t, MAX_POSTPONED_JOB); vm->postponed_job_index = 0; }
static VALUE trap(struct trap_arg *arg) { sighandler_t oldfunc, func = arg->func; VALUE oldcmd, command = arg->cmd; int sig = arg->sig; rb_vm_t *vm = GET_VM(); oldfunc = ruby_signal(sig, func); oldcmd = vm->trap_list[sig].cmd; switch (oldcmd) { case 0: if (oldfunc == SIG_IGN) oldcmd = rb_str_new2("IGNORE"); else if (oldfunc == sighandler) oldcmd = rb_str_new2("DEFAULT"); else oldcmd = Qnil; break; case Qundef: oldcmd = rb_str_new2("EXIT"); break; } vm->trap_list[sig].cmd = command; vm->trap_list[sig].safe = rb_safe_level(); /* enable at least specified signal. */ #if USE_TRAP_MASK #ifdef HAVE_SIGPROCMASK sigdelset(&arg->mask, sig); #else arg->mask &= ~sigmask(sig); #endif #endif return oldcmd; }
/* Initializes the Ruby VM and builtin libraries. * @retval 0 if succeeded. * @retval non-zero an error occurred. */ int ruby_setup(void) { static int initialized = 0; int state; if (initialized) return 0; initialized = 1; ruby_init_stack((void *)&state); Init_BareVM(); Init_heap(); Init_vm_objects(); PUSH_TAG(); if ((state = EXEC_TAG()) == 0) { rb_call_inits(); ruby_prog_init(); GET_VM()->running = 1; } POP_TAG(); return state; }
void Init_load(void) { #undef rb_intern #define rb_intern(str) rb_intern2((str), strlen(str)) rb_vm_t *vm = GET_VM(); static const char var_load_path[] = "$:"; ID id_load_path = rb_intern2(var_load_path, sizeof(var_load_path)-1); rb_define_hooked_variable(var_load_path, (VALUE*)vm, load_path_getter, rb_gvar_readonly_setter); rb_alias_variable(rb_intern("$-I"), id_load_path); rb_alias_variable(rb_intern("$LOAD_PATH"), id_load_path); vm->load_path = rb_ary_new(); vm->expanded_load_path = rb_ary_tmp_new(0); vm->load_path_snapshot = rb_ary_tmp_new(0); vm->load_path_check_cache = 0; rb_define_virtual_variable("$\"", get_loaded_features, 0); rb_define_virtual_variable("$LOADED_FEATURES", get_loaded_features, 0); vm->loaded_features = rb_ary_new(); vm->loaded_features_snapshot = rb_ary_tmp_new(0); vm->loaded_features_index = st_init_strtable(); rb_define_global_function("load", rb_f_load, -1); rb_define_global_function("require", rb_f_require, 1); rb_define_global_function("require_relative", rb_f_require_relative, 1); rb_define_method(rb_cModule, "autoload", rb_mod_autoload, 2); rb_define_method(rb_cModule, "autoload?", rb_mod_autoload_p, 1); rb_define_global_function("autoload", rb_f_autoload, 2); rb_define_global_function("autoload?", rb_f_autoload_p, 1); ruby_dln_librefs = rb_ary_tmp_new(0); rb_gc_register_mark_object(ruby_dln_librefs); }
static VALUE find_file(VALUE fname) { VALUE res; int nOK = 0; //RAWLOG_INFO1("find_file: fname: %s", RSTRING_PTR(fname)); if ( strncmp(RSTRING_PTR(fname), rho_native_rhopath(), strlen(rho_native_rhopath())) == 0 ){ res = rb_str_dup(fname); rb_str_cat(res,".iseq",5); //RAWLOG_INFO1("find_file: res: %s", RSTRING_PTR(res)); }else{ int i = 0; VALUE load_path = GET_VM()->load_path; //VALUE dir; VALUE fname1 = checkRhoBundleInPath(fname); //RAWLOG_INFO1("find_file: fname after checkRhoBundleInPath: %s", RSTRING_PTR(fname)); //TODO: support document relative require in case of multiple apps if (RARRAY_LEN(load_path)>1){ for( ; i < RARRAY_LEN(load_path); i++ ){ VALUE dir = RARRAY_PTR(load_path)[i]; //RAWLOG_INFO1("find_file: check dir %s", RSTRING_PTR(dir)); res = rb_str_dup(dir); rb_str_cat(res,"/",1); rb_str_cat(res,RSTRING_PTR(fname1),RSTRING_LEN(fname1)); rb_str_cat(res,".iseq",5); //RAWLOG_INFO1("find_file: check file: %s", RSTRING_PTR(res)); if( eaccess(RSTRING_PTR(res), R_OK) == 0 ){ nOK = 1; break; } } if ( !nOK ) return 0; } /*else { dir = RARRAY_PTR(load_path)[RARRAY_LEN(load_path)-1]; res = rb_str_dup(dir); rb_str_cat(res,"/",1); rb_str_cat(res,RSTRING_PTR(fname),RSTRING_LEN(fname)); rb_str_cat(res,".iseq",5); if ( g_curAppPath != 0 && eaccess(RSTRING_PTR(res), R_OK) != 0 ){ res = rb_str_new2(g_curAppPath); rb_str_cat(res,"/",1); rb_str_cat(res,RSTRING_PTR(fname),RSTRING_LEN(fname)); rb_str_cat(res,".iseq",5); } } */ } //RAWLOG_INFO1("find_file: RhoPreparePath: %s", RSTRING_PTR(res)); res = RhoPreparePath(res); if ( !nOK ) nOK = 1;//eaccess(RSTRING_PTR(res), R_OK) == 0 ? 1 : 0; return nOK ? res : 0; }
static void init_sigchld(int sig) { sighandler_t oldfunc; #if USE_TRAP_MASK # ifdef HAVE_SIGPROCMASK sigset_t mask; sigset_t fullmask; # else int mask; int fullmask; # endif #endif #if USE_TRAP_MASK /* disable interrupt */ sigfillset(&fullmask); pthread_sigmask(SIG_BLOCK, &fullmask, &mask); #endif oldfunc = ruby_signal(sig, SIG_DFL); if (oldfunc != SIG_DFL && oldfunc != SIG_IGN) { ruby_signal(sig, oldfunc); } else { GET_VM()->trap_list[sig].cmd = 0; } #if USE_TRAP_MASK sigdelset(&mask, sig); pthread_sigmask(SIG_SETMASK, &mask, NULL); trap_last_mask = mask; #endif }
VALUE rb_vm_block_eval2(rb_vm_block_t *b, VALUE self, SEL sel, int argc, const VALUE *argv) { // TODO check given arity and raise exception return vm_block_eval(GET_VM(), b, sel, self, argc, argv); }
void rb_vm_set_current_mri_method_context(VALUE self, SEL sel) { RoxorVM *vm = GET_VM(); vm->set_current_mri_method_self(self); vm->set_current_mri_method_sel(sel); }
void ruby_init(void) { static int initialized = 0; int state; if (initialized) return; initialized = 1; //RHO //rb_origenviron = environ; //RHO Init_stack((void *)&state); Init_BareVM(); Init_heap(); PUSH_TAG(); if ((state = EXEC_TAG()) == 0) { rb_call_inits(); ruby_prog_init(); } POP_TAG(); if (state) { error_print(); exit(EXIT_FAILURE); } GET_VM()->running = 1; }
VALUE require_compiled(VALUE fname, VALUE* result) { VALUE path; char* szName = 0; // FilePathValue(fname); szName = RSTRING_PTR(fname); RAWTRACE1("require_compiled: %s", szName); rb_funcall(fname, rb_intern("sub!"), 2, rb_str_new2(".rb"), rb_str_new2("") ); if ( strcmp("strscan",szName)==0 || strcmp("enumerator",szName)==0 ) return Qtrue; path = find_file(fname); if ( path != 0 ) { VALUE seq; if ( isAlreadyLoaded(path) == Qtrue ) return Qtrue; rb_ary_push(GET_VM()->loaded_features, path); seq = loadISeqFromFile(path); //*result = rb_funcall(seq, rb_intern("eval"), 0 ); *result = rb_iseq_eval(seq); return Qtrue; } return Qnil; }
static st_table * get_loaded_features_index(void) { VALUE features; int i; rb_vm_t *vm = GET_VM(); if (!rb_ary_shared_with_p(vm->loaded_features_snapshot, vm->loaded_features)) { /* The sharing was broken; something (other than us in rb_provide_feature()) modified loaded_features. Rebuild the index. */ st_foreach(vm->loaded_features_index, loaded_features_index_clear_i, 0); features = vm->loaded_features; for (i = 0; i < RARRAY_LEN(features); i++) { VALUE entry, as_str; as_str = entry = rb_ary_entry(features, i); StringValue(as_str); as_str = rb_fstring(rb_str_freeze(as_str)); if (as_str != entry) rb_ary_store(features, i, as_str); features_index_add(as_str, INT2FIX(i)); } reset_loaded_features_snapshot(); } return vm->loaded_features_index; }
int ruby_running() { if(GET_VM() != NULL) return 1; else return 0; }
VALUE rb_call_super(int argc, const VALUE *argv) { RoxorVM *vm = GET_VM(); VALUE self = vm->get_current_mri_method_self(); SEL sel = vm->get_current_mri_method_sel(); assert(self != 0 && sel != 0); return rb_vm_call_super(self, sel, argc, argv); }
Free_Mem (Size, Block_A, Zone) { signed long Status; Status = LIB$GET_VM(Size, &Block_A, Zone) ; XLOG$FAO(LOG$MEM,"!%T FREE_MEM: Status: !SL, Size: !SL, Addr: !SL, Zone: !SL!/", 0, Status, Size, Block_A, Zone) ; Mem_Stat() ; return (Status); }
void rb_trap_exit(void) { rb_vm_t *vm = GET_VM(); VALUE trap_exit = vm->trap_list[0].cmd; if (trap_exit) { vm->trap_list[0].cmd = 0; signal_exec(trap_exit, vm->trap_list[0].safe, 0); } }
static RETSIGTYPE sighandler(int sig) { rb_vm_t *vm = GET_VM(); /* fix me for Multi-VM */ ATOMIC_INC(vm->signal_buff[sig]); ATOMIC_INC(vm->buffered_signal_size); #if !defined(BSD_SIGNAL) && !defined(POSIX_SIGNAL) ruby_signal(sig, sighandler); #endif }
RUBY_FUNC_EXPORTED void ruby_init_ext(const char *name, void (*init)(void)) { st_table *loading_tbl = get_loading_table(); if (rb_provided(name)) return; if (!loading_tbl) { GET_VM()->loading_table = loading_tbl = st_init_strtable(); } st_update(loading_tbl, (st_data_t)name, register_init_ext, (st_data_t)init); }
static void init_sigchld(int sig) { sighandler_t oldfunc; rb_disable_interrupt(); oldfunc = ruby_signal(sig, SIG_DFL); if (oldfunc != SIG_DFL && oldfunc != SIG_IGN) { ruby_signal(sig, oldfunc); } else { GET_VM()->trap_list[sig].cmd = 0; } rb_enable_interrupt(); }
static int init_sigchld(int sig) { sighandler_t oldfunc; oldfunc = ruby_signal(sig, SIG_DFL); if (oldfunc == SIG_ERR) return -1; if (oldfunc != SIG_DFL && oldfunc != SIG_IGN) { ruby_signal(sig, oldfunc); } else { GET_VM()->trap_list[sig].cmd = 0; } return 0; }
/* Construct expanded load path and store it to cache. We rebuild load path partially if the cache is invalid. We don't cache non string object and expand it every time. We ensure that string objects in $LOAD_PATH are frozen. */ static void rb_construct_expanded_load_path(int type, int *has_relative, int *has_non_cache) { rb_vm_t *vm = GET_VM(); VALUE load_path = vm->load_path; VALUE expanded_load_path = vm->expanded_load_path; VALUE ary; long i; int level = rb_safe_level(); ary = rb_ary_tmp_new(RARRAY_LEN(load_path)); for (i = 0; i < RARRAY_LEN(load_path); ++i) { VALUE path, as_str, expanded_path; int is_string, non_cache; char *as_cstr; as_str = path = RARRAY_AREF(load_path, i); is_string = RB_TYPE_P(path, T_STRING) ? 1 : 0; non_cache = !is_string ? 1 : 0; as_str = rb_get_path_check_to_string(path, level); as_cstr = RSTRING_PTR(as_str); if (!non_cache) { if ((type == EXPAND_RELATIVE && rb_is_absolute_path(as_cstr)) || (type == EXPAND_HOME && (!as_cstr[0] || as_cstr[0] != '~')) || (type == EXPAND_NON_CACHE)) { /* Use cached expanded path. */ rb_ary_push(ary, RARRAY_AREF(expanded_load_path, i)); continue; } } if (!*has_relative && !rb_is_absolute_path(as_cstr)) *has_relative = 1; if (!*has_non_cache && non_cache) *has_non_cache = 1; /* Freeze only string object. We expand other objects every time. */ if (is_string) rb_str_freeze(path); as_str = rb_get_path_check_convert(path, as_str, level); expanded_path = rb_file_expand_path_fast(as_str, Qnil); rb_str_freeze(expanded_path); rb_ary_push(ary, rb_fstring(expanded_path)); } rb_obj_freeze(ary); vm->expanded_load_path = ary; rb_ary_replace(vm->load_path_snapshot, vm->load_path); }
static VALUE method_missing(VALUE obj, SEL sel, rb_vm_block_t *block, int argc, const VALUE *argv, rb_vm_method_missing_reason_t call_status) { if (sel == selAlloc) { rb_raise(rb_eTypeError, "allocator undefined for %s", RSTRING_PTR(rb_inspect(obj))); } GET_VM()->set_method_missing_reason(call_status); VALUE *new_argv = (VALUE *)xmalloc_ptrs(sizeof(VALUE) * (argc + 1)); char buf[100]; int n = snprintf(buf, sizeof buf, "%s", sel_getName(sel)); if (buf[n - 1] == ':') { // Let's see if there are more colons making this a real selector. bool multiple_colons = false; for (int i = 0; i < (n - 1); i++) { if (buf[i] == ':') { multiple_colons = true; break; } } if (!multiple_colons) { // Not a typical multiple argument selector. So as this is // probably a typical ruby method name, chop off the colon. buf[n - 1] = '\0'; } } new_argv[0] = ID2SYM(rb_intern(buf)); MEMCPY(&new_argv[1], argv, VALUE, argc); // In case the missing selector _is_ method_missing: OR the object does // not respond to method_missing: (this can happen for NSProxy-based // objects), directly trigger the exception. Class k = (Class)CLASS_OF(obj); if (sel == selMethodMissing || class_getInstanceMethod(k, selMethodMissing) == NULL) { rb_vm_method_missing(obj, argc + 1, new_argv); return Qnil; // never reached } else { return rb_vm_call2(block, obj, (VALUE)k, selMethodMissing, argc + 1, new_argv); } }
VALUE require_compiled(VALUE fname, VALUE* result) { VALUE path; char* szName1 = 0; rb_funcall(fname, rb_intern("sub!"), 2, rb_str_new2(".rb"), rb_str_new2("") ); szName1 = RSTRING_PTR(fname); if ( strcmp("strscan",szName1)==0 || strcmp("enumerator",szName1)==0 || strcmp("stringio",szName1)==0 || strcmp("socket",szName1)==0 || strcmp("digest.so",szName1)==0 || strcmp("openssl.so",szName1)==0 || strcmp("fcntl",szName1)==0 || strcmp("digest/md5",szName1)==0 || strcmp("digest/sha1",szName1)==0 ) return Qtrue; if ( isAlreadyLoaded(fname) == Qtrue ) return Qtrue; //RAWLOG_INFO1("find_file: %s", RSTRING_PTR(fname)); path = find_file(fname); if ( path != 0 ) { VALUE seq; // if ( isAlreadyLoaded(path) == Qtrue ) // return Qtrue; RAWLOG_INFO1("require_compiled: %s", szName1); //optimize require //rb_ary_push(GET_VM()->loaded_features, path); rb_ary_push(GET_VM()->loaded_features, fname); rb_gc_disable(); seq = loadISeqFromFile(path); rb_gc_enable(); //*result = rb_funcall(seq, rb_intern("eval"), 0 ); *result = rb_iseq_eval(seq); return Qtrue; } RAWLOG_ERROR1("require_compiled: error: can not find %s", RSTRING_PTR(fname)); return Qnil; }
static void push_include(const char *path, VALUE (*filter)(VALUE)) { const char sep = PATH_SEP_CHAR; const char *p, *s; VALUE load_path = GET_VM()->load_path; p = path; while (*p) { while (*p == sep) p++; if (!*p) break; for (s = p; *s && *s != sep; s = CharNext(s)); rb_ary_push(load_path, (*filter)(rubylib_mangled_path(p, s - p))); p = s; } }
VALUE rb_get_expanded_load_path(void) { rb_vm_t *vm = GET_VM(); const VALUE non_cache = Qtrue; if (!rb_ary_shared_with_p(vm->load_path_snapshot, vm->load_path)) { /* The load path was modified. Rebuild the expanded load path. */ int has_relative = 0, has_non_cache = 0; rb_construct_expanded_load_path(EXPAND_ALL, &has_relative, &has_non_cache); if (has_relative) { vm->load_path_check_cache = load_path_getcwd(); } else if (has_non_cache) { /* Non string object. */ vm->load_path_check_cache = non_cache; } else { vm->load_path_check_cache = 0; } } else if (vm->load_path_check_cache == non_cache) { int has_relative = 1, has_non_cache = 1; /* Expand only non-cacheable objects. */ rb_construct_expanded_load_path(EXPAND_NON_CACHE, &has_relative, &has_non_cache); } else if (vm->load_path_check_cache) { int has_relative = 1, has_non_cache = 1; VALUE cwd = load_path_getcwd(); if (!rb_str_equal(vm->load_path_check_cache, cwd)) { /* Current working directory or filesystem encoding was changed. Expand relative load path and non-cacheable objects again. */ vm->load_path_check_cache = cwd; rb_construct_expanded_load_path(EXPAND_RELATIVE, &has_relative, &has_non_cache); } else { /* Expand only tilde (User HOME) and non-cacheable objects. */ rb_construct_expanded_load_path(EXPAND_HOME, &has_relative, &has_non_cache); } } return vm->expanded_load_path; }
VALUE isAlreadyLoaded(VALUE path) { VALUE v, features; long i; const char *f; features = GET_VM()->loaded_features; for (i = 0; i < RARRAY_LEN(features); ++i) { v = RARRAY_PTR(features)[i]; f = StringValuePtr(v); if ( RSTRING_LEN(v) != RSTRING_LEN(path)) continue; if (strcmp(f, RSTRING_PTR(path)) == 0) { return Qtrue; } } return Qfalse; }
VALUE rb_vm_yield_under(VALUE klass, VALUE self, int argc, const VALUE *argv) { RoxorVM *vm = GET_VM(); rb_vm_block_t *b = vm->current_block(); if (b == NULL) { rb_raise(rb_eLocalJumpError, "no block given"); } vm->pop_current_block(); VALUE old_self = b->self; b->self = self; VALUE old_class = b->klass; b->klass = klass; rb_vm_outer_t *o = vm->push_outer((Class)klass); o->pushed_by_eval = true; struct Finally { RoxorVM *vm; rb_vm_block_t *b; VALUE old_class; VALUE old_self; Finally(RoxorVM *_vm, rb_vm_block_t *_b, VALUE _old_class, VALUE _old_self) { vm = _vm; b = _b; old_class = _old_class; old_self = _old_self; } ~Finally() { vm->pop_outer(true); b->self = old_self; b->klass = old_class; vm->add_current_block(b); } } finalizer(vm, b, old_class, old_self); return vm_block_eval(vm, b, NULL, b->self, argc, argv); }