void ActivationObj::initialize(ActivationMapObj* m_addr, oop_t activationMap, oop_t method_holder, oop_t self, oop_t rcvr, oop_t* args, fint arg_count, fint assignable_local_count, fint max_stack_size, oop_t sender_act) { set_mark_bit_for_activation(); set_map( activationMap ); fint io = indexableOrigin(); put_sp_quickly( io, first_stack_offset ); put_pc_quickly( io, m_addr->starting_pc() ); put_self_quickly( io, self ); put_rcvr_quickly( io, rcvr ); put_sender_quickly( io, sender_act ); put_methodHolder_quickly( io, method_holder ); // set args for (fint i = 0; i < arg_count; ++i) write_arg_or_local( first_arg_offset + i, args[i] ); // set locals oop_t nilOop = The::oop_of(The::nil_object); for (fint i = 0; i < assignable_local_count; ++i) write_arg_or_local( first_arg_offset + arg_count + i, nilOop ); }
oop_t ActivationObj::vector_of_outgoing_arguments(fint argc, bool isImplicitSelf) { ObjVectorObj* vaddr; oop_t v = ((ObjVectorObj*) The::addr_of(The::vector_proto))->clone_and_resize(argc, The::oop_of(The::nil_object), &vaddr); fint beginning_of_args_offset = indexableOrigin() + get_sp() + (isImplicitSelf ? 0 : 1); // sp has already been cut back fint end_of_args_offset = beginning_of_args_offset + argc; for (fint i = beginning_of_args_offset; i < end_of_args_offset; ++i) { vaddr->write_indexable_at( i, read_oop(i) ); } return v; }
oop_t ByteVectorObj::clone_and_resize(smi new_indexable_size, char fill, ByteVectorObj** addrp, bool shouldFill) { ByteVectorObj* new_addr; char* new_bytes; oop_t new_bv = clone_oops_and_allocate_bytes(indexableOrigin(), new_indexable_size, (MemObj**) &new_addr, &new_bytes); new_addr->set_indexableSize(new_indexable_size); copy_bytes_to(new_bytes, new_indexable_size, fill, shouldFill); if (addrp) *addrp = new_addr; assert(new_addr->is_byteVector()); return new_bv; }