Exemple #1
0
  void* operator new(size_t /*size*/) {
#ifndef PRODUCT
    GUARANTEE(sizeof(singleton) >= sizeof(DefaultStream), "sanity");
    jvm_memset(&singleton, 0, sizeof(singleton));
#endif
    return (void*)(&singleton);
  }
 void  BranchTable::init()
  {
    _b_items = 
      (BranchItem*) jvm_malloc(BRANCH_TABLE_LENGTH *  sizeof(BranchItem));
    GUARANTEE(_b_items, "sanity");
    jvm_memset(_b_items, 0, BRANCH_TABLE_LENGTH * sizeof(BranchItem));
    _item_index = 0;
  }
void oop_write_barrier_range(OopDesc** start, int len) {
  juint start_offset;
  juint head, main, tail;
  OopDesc **p;
  const juint BLOCK = 32;

  if (len < BLOCK) {
    GUARANTEE(len > 0, "ObjectHeap::set_bit_range() cannot handle len <= 0");
    // sets one bit at a time.
    ObjectHeap::set_bit_range(start, len);
  } else {
    // split the range into <head>, <main> and <tail> portions, so that the
    // <main> portion can be set using memset.
    start_offset = start - _heap_start;

    head = BLOCK - (start_offset % BLOCK);
    main = (len - head) & (~(BLOCK-1));
    tail = len - (head + main);

    GUARANTEE(head <= BLOCK, "sanity");
    GUARANTEE(tail <  BLOCK, "sanity");

#ifdef AZZERT
    int i;
    p = start;
    for (i = 0; i < len; i++) {
      ObjectHeap::clear_bit_for(p);
      p++;
    }
#endif

    p = start;

    if (head > 0) {
      ObjectHeap::set_bit_range(p, head);
      p += head;
    }

    if (main > 0) {
      juint *bvw = ObjectHeap::get_bitvectorword_for_aligned(p);
      jvm_memset(bvw, 0xff, main / BitsPerByte);
      p += main;
    }

    if (tail > 0) {
      ObjectHeap::set_bit_range(p, tail);
    }

#ifdef AZZERT
    p = start;
    for (i = 0; i < len; i++) {
      GUARANTEE(ObjectHeap::test_bit_for(p), "bit must be set");
      p++;
    }
#endif
  }
}
Exemple #4
0
void save_java_stack_snapshot() {
#if !ENABLE_ISOLATES
  // This code does not work in MVM -- it really should switch the task
  // first before examing the callstack of a task.
  EnforceRuntimeJavaStackDirection enfore_java_stack_direction;

  jvm_memset(saved_java_stack, 0, sizeof(saved_java_stack));
  int index = 0;
  int max = ARRAY_SIZE(saved_java_stack);

  Thread *thread = Thread::current();
  if (thread->is_null()) {
    return;
  }
  if (thread->last_java_fp() == NULL || thread->last_java_sp() == NULL) {
    return;
  }
  {
#if ENABLE_ISOLATES
    // We must switch to the context of the task
    TaskGCContext tmp(thread->task_id());
#endif
    Frame fr(Thread::current());
    while (index < max) {
      if (fr.is_entry_frame()) {
        if (fr.as_EntryFrame().is_first_frame()) {
          break;
        }
        fr.as_EntryFrame().caller_is(fr);
      } else {
        SavedJavaStackFrame *saved = &saved_java_stack[index++];
        JavaFrame java_frame = fr.as_JavaFrame();
        
        Method method = java_frame.method();
        method.print_name_to(saved->name, sizeof(saved->name));
        
        saved->used = 1;
        saved->bci = java_frame.bci();
        saved->method = method.obj();
        
#if ENABLE_COMPILER
        if (java_frame.is_compiled_frame()) {
          saved->compiled_method = java_frame.compiled_method();
        }
#endif

        java_frame.caller_is(fr);
      }
    }
  }
#endif
}
 void BranchTable::remove_only(address cm)
 {
	 for (int i = 0; i < BRANCH_TABLE_LENGTH; i++)
	 {
	 	if (_b_items[i].inst_addr() != 0 && 
			_b_items[i].callee_addr() == cm)
	 	{
	              *(int*)(_b_items[i].inst_addr()) = _b_items[i].old_inst();	 		
                     OsMisc_flush_icache(_b_items[i].inst_addr(), 4);		
       		jvm_memset(&_b_items[i], 0, sizeof(BranchItem));
	 	}
	 }
 }
Exemple #6
0
KNIEXPORT int _KNI_push_handles(int n, _KNI_HandleInfo* info, jobject* handles)
{
  info->prev = (_KNI_HandleInfo*)last_kni_handle_info;
  info->total_count = n;
  info->declared_count = 0;
  info->handles = handles;
  last_kni_handle_info = info;

  // Must clear it for GC to work properly
  jvm_memset(handles, 0, n * sizeof(jobject));

  return (0);
}
ReturnOop BufferedFile::allocate(JvmAllocProc alloc_proc JVM_TRAPS) {
  if (alloc_proc != NULL) {
    OopDesc* oop_desc =
      (OopDesc*)alloc_proc(sizeof(BufferedFileDesc));
    if (oop_desc == NULL) {
      Throw::out_of_memory_error(JVM_SINGLE_ARG_THROW_0);
      return NULL;
    }

    // Must zero-inititialize the allocated block to match 
    // ObjectHeap::allocate behavior.
    jvm_memset((char*)oop_desc, 0, sizeof(BufferedFileDesc));
      
    return oop_desc;
  } else {
    return BufferedFile::allocate(JVM_SINGLE_ARG_CHECK_0);
  }
}
ReturnOop Buffer::allocate(unsigned int length, 
                           JvmAllocProc alloc_proc JVM_TRAPS) {
  if (alloc_proc != NULL) {
    OopDesc* oop_desc = (OopDesc*)alloc_proc(sizeof(ArrayDesc) +
                                             length * sizeof(jubyte));
    if (oop_desc == NULL) {
      Throw::out_of_memory_error(JVM_SINGLE_ARG_THROW_0);
    }
    // Must zero-inititialize the allocated block to match 
    // ObjectHeap::allocate behavior.
    jvm_memset((char*)oop_desc + sizeof(ArrayDesc), 0, 
               length * sizeof(jubyte));
    *oop_desc->int_field_addr(Array::length_offset()) = length;
    return oop_desc;
  } else {
    return Universe::new_byte_array(length JVM_CHECK_0);
  }
}
Exemple #9
0
// Initialize the kvmcompat module. This function must be called by
// Universe.cpp after all the primitive array classes have been loaded.
void kvmcompat_initialize() {
#ifdef AZZERT
  jvm_memset(PrimitiveArrayClasses, 0xff, sizeof(PrimitiveArrayClasses));
  _in_kvm_native_method = 0;
#endif

  ARRAY_CLASS *p = PrimitiveArrayClasses;

  p[T_BOOLEAN] = (ARRAY_CLASS)Universe::  bool_array_class()->java_mirror();
  p[T_CHAR]    = (ARRAY_CLASS)Universe::  char_array_class()->java_mirror();
  p[T_BYTE]    = (ARRAY_CLASS)Universe::  byte_array_class()->java_mirror();
  p[T_SHORT]   = (ARRAY_CLASS)Universe:: short_array_class()->java_mirror();
  p[T_INT]     = (ARRAY_CLASS)Universe::   int_array_class()->java_mirror();
  p[T_LONG]    = (ARRAY_CLASS)Universe::  long_array_class()->java_mirror();
#if ENABLE_FLOAT
  p[T_FLOAT]   = (ARRAY_CLASS)Universe:: float_array_class()->java_mirror();
  p[T_DOUBLE]  = (ARRAY_CLASS)Universe::double_array_class()->java_mirror();
#endif
  TemporaryRootsLength = 0;
  GlobalRootsLength = 0;
}
Exemple #10
0
bool CompiledMethod::expand_compiled_code_space(int delta, int relocation_size) {
  if (ObjectHeap::expand_current_compiled_method(delta)) {
    if (Verbose) {
      TTY_TRACE_CR(("Expanding compiled method from %d to %d bytes", 
                    size(), size() + delta));
    }
    void* src = field_base(end_offset() - relocation_size);
    void* dst = DERIVED(void*, src, delta);
    GUARANTEE(src < dst, "should be copying up");
    jvm_memmove(dst, src, relocation_size); // possibly overlapping regions
    // It's probably OK only to clear dst[-1], but let's just make sure.
    jvm_memset(src, 0, delta);
    ((CompiledMethodDesc*) obj())->set_size(size() + delta);
    
    

    if (VerifyGC > 2) {
      ObjectHeap::verify();
    }
    return true;
  } else {
    return false;
 void  BranchTable::revoke(address caller)
 {
   TTY_TRACE_IF_VERBOSE(("entering revoke\n"));
   TTY_TRACE_IF_VERBOSE(("caller addr = 0x%x\n", (int)caller));
   int i = _item_index - 1;
   while(true)
   {
     if (_b_items[i].inst_addr() != 0 && _b_items[i].caller_addr() == caller)
     {
       TTY_TRACE_IF_VERBOSE(("revoke index = %d\n", i));
       jvm_memset(&_b_items[i], 0, sizeof(BranchItem));
       i--;
       if (i < 0)
       {
         i = BRANCH_TABLE_LENGTH - 1;
       }
     }else
       {
       break;
       }
   }
   TTY_TRACE_IF_VERBOSE(("leaving revoke\n"));
 }
  void BranchTable::remove()
  {
    TTY_TRACE_IF_VERBOSE(("entering remove\n"));
    for (int i = 0; i < BRANCH_TABLE_LENGTH; i++)
    {
      if (_b_items[i].inst_addr() != 0)
      {
        const CompiledMethodDesc* callee = (CompiledMethodDesc*)_b_items[i].callee_addr();
        const CompiledMethodDesc* caller = (CompiledMethodDesc*)_b_items[i].caller_addr();
        const int callee_index = callee->get_cache_index();
        const int caller_index = caller->get_cache_index();
        GUARANTEE(callee_index >= 0, "sanity");
        GUARANTEE(caller_index >= 0, "sanity");

          if (callee_index > CompiledMethodCache::get_upb() ||
               caller_index > CompiledMethodCache::get_upb() ||  
              CompiledMethodCache::get_item(callee_index) != callee ||
              CompiledMethodCache::get_item(caller_index) != caller)//means removed
          {

            GUARANTEE(_b_items[i].inst_addr() >= (address) _compiler_area_start 
              && _b_items[i].inst_addr() <= (address) _compiler_area_top, "sansity\n");
            {
              TTY_TRACE_IF_VERBOSE(("static removed\n"));
              TTY_TRACE_IF_VERBOSE(("remove: inst_addr = 0x%x,  index = %d\n",
                                    (int) _b_items[i].inst_addr(), i));
              GUARANTEE(*(juint*)(_b_items[i].inst_addr()) >> 24 ==0xEA,
                        "sanity");
              *(int*)(_b_items[i].inst_addr()) = _b_items[i].old_inst();
              OsMisc_flush_icache(_b_items[i].inst_addr(), 4);
            }
            jvm_memset(&_b_items[i], 0, sizeof(BranchItem));
            TTY_TRACE_IF_VERBOSE(("removed index %d\n", i));
          } else {
            GUARANTEE(_b_items[i].inst_addr() >= (address) _compiler_area_start 
              && _b_items[i].inst_addr() <= (address) _compiler_area_top, "sansity\n");
            address old_caller_addr = _b_items[i].caller_addr();
            address old_callee_addr = _b_items[i].callee_addr();
            jint caller_shift = (int) ((MethodDesc*) old_caller_addr)->_klass;
            if ( caller_shift!= 0)
            {
              _b_items[i].set_caller_addr(old_caller_addr + caller_shift);
              TTY_TRACE_IF_VERBOSE(("adjusted caller\n"));
            }
            jint callee_shift = (int) ((MethodDesc*) old_callee_addr)->_klass;
            if ( callee_shift != 0)
           {
             _b_items[i].set_callee_addr(old_callee_addr + callee_shift);
             TTY_TRACE_IF_VERBOSE(("adjusted callee\n"));
           }
            
            {
              int offset = callee_shift - caller_shift;
              TTY_TRACE_IF_VERBOSE(("adjust: inst_addr = 0x%x, index = %d\n",
                                    (int) _b_items[i].inst_addr(),  i));
              GUARANTEE(*(juint*)(_b_items[i].inst_addr()) >> 24 ==0xEA, 
                        "sanity");

                    if (offset != 0)
                    {  
                        TTY_TRACE_IF_VERBOSE(("adjusted index %d\n", i));
                        int* inst_addr = (int*) (_b_items[i].inst_addr());

                        int current_target = *inst_addr & ((1<<24) - 1);
                        current_target <<= 8; /*lsl*/
                        current_target >>= 6; /*asr*/
                        current_target += offset;
                        current_target >>=2;
                        current_target &= ((1<<24) - 1);
                        *inst_addr &= ~((1<<24) - 1);
                        *inst_addr |= current_target;
                        TTY_TRACE_IF_VERBOSE(("static adjusted\n"));
                       OsMisc_flush_icache((address) inst_addr, 4);    
                        
                    }
                   _b_items[i].set_inst_addr(_b_items[i].inst_addr() + caller_shift);
                    TTY_TRACE_IF_VERBOSE(("adjusted index %d\n", i));
            }
          
          }
// Initialize global data structures used by JarFileParser.
void JarFileParser::initialize() {
  // Set all entries to -1
  jvm_memset(_cached_parsers, 0xff, sizeof _cached_parsers);
  _timestamp = 0;
}
Exemple #14
0
void init_jvm_chunk_manager() {
  jvm_memset(&chunk_info[0], 0, sizeof(chunk_info));
  SysPageSize = jvm_sysconf(_SC_PAGE_SIZE);
}
void ObjArray::fill_zero(OopDesc *obj_array) {
  ObjArray::Raw array = obj_array;
  jvm_memset(array().base_address(), 0, array().length() * sizeof(OopDesc*));
}