Exemple #1
0
oop objVectorMap::cloneSize(oop obj, fint len, bool mustAllocate, oop filler) {
  assert_objVector(obj, "not an obj vector");
  fint l = objVectorOop(obj)->length();
  objVectorOop v;
  if (l < len) {
    // growing array
    v= objVectorOop(obj)->grow(len - l, mustAllocate);
    if (oop(v) != failedAllocationOop) {
      set_oops(v->objs(l), len - l, filler);
      if (v->is_old()) {
        if (filler->is_new()) {
          // do all of object
          Memory->remembered_set->record_multistores(v->oops(), v->objs(len));
        } else {
          // just do beginning of object; filler isn't new
          Memory->remembered_set->record_multistores(v->oops(), v->objs(l));
        }
      }
    }
  } else if (l > len) {
    // shrinking array
    v= objVectorOop(obj)->shrink(l - len, mustAllocate);
  } else {
    // copying array
    v= objVectorOop(obj)->copy(mustAllocate);
  }
  if (oop(v) != failedAllocationOop) v->init_mark();
  return v;
}
Exemple #2
0
void objVectorMap::dummy_initialize(oop obj, oop filler) { 
  Unused(filler);
  assert_objVector(obj, "not an obj vector");
  objVectorOop(obj)->set_length(0);
}
Exemple #3
0
oop objVectorMap::clone(oop obj, bool mustAllocate, oop genObj) {
  assert_objVector(obj, "not an obj vector");
  objVectorOop v= objVectorOop(obj)->copy(mustAllocate, genObj);
  if (oop(v) != failedAllocationOop) v->init_mark();
  return v;
}
Exemple #4
0
oop_t ActivationObj::loop(oop_t this_activation) {

  The::set_active_context( this_activation, this);
  
  DECLARE_STACK;
  smi         bci = get_pc_quickly(io);
  
  ActivationMapObj* m_addr = map_addr();
  
  oop_t           codes_oop    = m_addr->codes();
  ByteVectorObj*  codes_addr   = ByteVectorObj::from(codes_oop);
  char*           codes        = codes_addr->bytes();
  fint            codes_length = codes_addr->indexableSize();



  oop_t         literals       = m_addr->literals();
  ObjVectorObj* literals_addr  = ObjVectorObj::from(literals);
  fint          literals_io    = literals_addr->indexableOrigin();
  
 
  fint index = 0, temp_index;
  # define UC_index ((temp_index = index << INDEXWIDTH), (index = 0), temp_index | bc_index)
  bool undirected_resend = false;
  # define UC_undirected_resend (undirected_resend ? (undirected_resend = false, true) : false)
  
  fint lexical_level = 0;
  
  # define use_lit (literals_addr->read_oop(literals_io + UC_index))
  
  oop_t delegatee = 0, temp_del;
  # define UC_del  ((temp_del = delegatee), (delegatee = 0), temp_del)
  
  fint arg_count = 0, temp_arg_count;
  # define UC_arg_count ((temp_arg_count = arg_count), (arg_count = 0), temp_arg_count)
  
  fint temp_bci;
  // for process pre-emption, stop on backward branches
  // todo optimize should probably just stop every 10 or 100 backward branches, or even just every N bytecodes
  # define set_bci(bci_oop) (temp_bci = value_of_smiOop(assert_smi(bci_oop)), stop = temp_bci < bci, bci = temp_bci)
  
  oop_t self = get_self_quickly(io);
  oop_t rcvr = get_rcvr_quickly(io);
  for ( bool stop = false; !stop; ) {
    if (bci >= codes_length) {
      oop_t r = pop();
      oop_t s = get_sender_quickly(io);
      if (s != NULL) // it'll be NULL if we're returning from the start method
        ActivationObj::from(s)->remote_push(r);
      // todo optimize time slow; quits this routine just for a return -- dmu 1/06
      return s;
    }
    unsigned char bc = codes[bci++];
    ByteCodeKind kind  = getOp(bc);
    fint         bc_index = getIndex(bc);
    // printf("interpreting a bytecode in activationMap %i, bc is %i, kind is %i, bc_index is %i\n", map_oop(), bc, kind, bc_index);
    switch (kind) {
     default:   fatal("unknown kind of bytecode"); break;
     
     case                   INDEX_CODE:          index = UC_index;     break;
     case           LEXICAL_LEVEL_CODE:  lexical_level = UC_index;     break;
     case          ARGUMENT_COUNT_CODE:      arg_count = UC_index;     break;
  
     case           READ_LOCAL_CODE:   push(local_obj_addr(lexical_level)-> read_arg_or_local(UC_index)      );  lexical_level = 0;               break;
     case          WRITE_LOCAL_CODE:        local_obj_addr(lexical_level)->write_arg_or_local(UC_index, pop());  lexical_level = 0;  push(self);  break;
     
     case          BRANCH_CODE:                                                          set_bci(use_lit);                   break;
     case          BRANCH_TRUE_CODE:     if ( pop() == The::oop_of(The:: true_object))   set_bci(use_lit);  else index = 0;  break;
     case          BRANCH_FALSE_CODE:    if ( pop() == The::oop_of(The::false_object))   set_bci(use_lit);  else index = 0;  break;
     case          BRANCH_INDEXED_CODE:
                                        {
                                         ObjVectorObj* branch_vector_addr = ObjVectorObj::from(assert_objVector(use_lit));
                                         oop_t branch_index_oop = pop();
                                         if ( is_smi(branch_index_oop) ) {
                                            smi branch_index = value_of_smiOop(branch_index_oop);
                                            if (  0 <= branch_index  &&  branch_index < branch_vector_addr->indexableSize()  )   {
                                              oop_t dest_oop = branch_vector_addr->indexable_at(branch_index);
                                              set_bci(dest_oop);
                                            }
                                         }
                                        }
                                        break;
       
     
     case      DELEGATEE_CODE:               delegatee = use_lit;                                     break;


     case LITERAL_CODE:
      {
       oop_t lit = use_lit;
       if (::is_block(lit)) {
         put_sp_quickly(io, sp); // make sure that the sp is stored correctly, because an allocation could trigger a GC
         oop_t cloned_block = BlockObj::clone_block(lit, this_activation);
         ActivationObj* possibly_moved_act_addr = ActivationObj::from(this_activation); // mightHaveScavengedTheActivation
         if (possibly_moved_act_addr != this) {
           possibly_moved_act_addr->remote_push(cloned_block);
           possibly_moved_act_addr->put_pc_quickly( io, bci );
           return this_activation;
         } else {
           push(cloned_block);
         }
       } else {
         push(lit);
       }
      }
      break;
     
     case IMPLICIT_SEND_CODE:
      // fall through
     case SEND_CODE:
     {
      oop_t selector = use_lit;
      if (selector == The::oop_of(The::restart_selector)) {
        put_sp_quickly( io,  sp  = first_stack_offset               );
        put_pc_quickly( io,  bci = get_pc_after_endInit_quickly(io) );
        break;
      }
      put_sp_quickly( io, sp );
      // todo optimize dmu 3/6. This is here for the _Breakpoint primitve to help debugging by storing the PC.
      // But it slows every primitive, sigh.
      put_pc_quickly( io, bci);

      oop_t a = send(kind == IMPLICIT_SEND_CODE, selector, UC_undirected_resend, UC_del, UC_arg_count, this_activation); 
      if (a != this_activation || ActivationObj::from(a) != this) { // mightHaveScavengedTheActivation
        // put_pc_quickly( io, bci); // commented out after I added the put_pc_quickly above, dmu 3/6
        return a;
      }
      sp = get_sp_quickly(io);
     }
     break;
      
     case NO_OPERAND_CODE:
      switch(bc_index) {
       default: fatal("???"); break;
        case               POP_CODE:      pop();                                  break;
        case              SELF_CODE:      push(self);                             break;
        case          END_INIT_CODE:      put_pc_after_endInit_quickly(io, bci);  break;

        case   NONLOCAL_RETURN_CODE:      return nonlocal_return(pop(), rcvr);    break;
        case UNDIRECTED_RESEND_CODE:      undirected_resend = true;               break;
       }
       break;

    }
  }
  put_sp_quickly( io, sp  );
  put_pc_quickly( io, bci );
  return this_activation;
}