/* Allocates memory */ void jit::emit(cell code_template_) { gc_root<array> code_template(code_template_,parent_vm); emit_relocation(code_template.value()); gc_root<byte_array> insns(array_nth(code_template.untagged(),0),parent_vm); if(computing_offset_p) { cell size = array_capacity(insns.untagged()); if(offset == 0) { position--; computing_offset_p = false; } else if(offset < size) { position++; computing_offset_p = false; } else offset -= size; } code.append_byte_array(insns.value()); }
code_block* callback_heap::add(cell owner, cell return_rewind) { /* code_template is a 2-tuple where the first element contains the relocations and the second a byte array of compiled assembly code. The code assumes that there are four relocations on x86 and three on ppc. */ tagged<array> code_template(parent->special_objects[CALLBACK_STUB]); tagged<byte_array> insns(array_nth(code_template.untagged(), 1)); cell size = array_capacity(insns.untagged()); cell bump = align(size + sizeof(code_block), data_alignment); code_block* stub = allocator->allot(bump); if (!stub) { parent->general_error(ERROR_CALLBACK_SPACE_OVERFLOW, false_object, false_object); } stub->header = bump & ~7; stub->owner = owner; stub->parameters = false_object; stub->relocation = false_object; memcpy((void*)stub->entry_point(), insns->data<void>(), size); /* Store VM pointer in two relocations. */ store_callback_operand(stub, 0, (cell)parent); store_callback_operand(stub, 2, (cell)parent); /* On x86, the RET instruction takes an argument which depends on the callback's calling convention */ if (return_takes_param_p()) store_callback_operand(stub, 3, return_rewind); update(stub); return stub; }
instruction_operand callback_heap::callback_operand(code_block *stub, cell index) { tagged<array> code_template(parent->special_objects[CALLBACK_STUB]); tagged<byte_array> relocation_template(array_nth(code_template.untagged(),0)); relocation_entry entry(relocation_template->data<relocation_entry>()[index]); return instruction_operand(entry,stub,0); }
void callback_heap::update(callback *stub) { tagged<array> code_template(parent->userenv[CALLBACK_STUB]); cell rel_class = untag_fixnum(array_nth(code_template.untagged(),1)); cell offset = untag_fixnum(array_nth(code_template.untagged(),3)); parent->store_address_in_code_block(rel_class, (cell)(stub + 1) + offset, (cell)(stub->compiled + 1)); flush_icache((cell)stub,stub->size); }
void jit::emit_relocation(cell code_template_) { data_root<array> code_template(code_template_,parent); cell capacity = array_capacity(code_template.untagged()); for(cell i = 1; i < capacity; i += 3) { relocation_class rel_class = (relocation_class)untag_fixnum(array_nth(code_template.untagged(),i)); relocation_type rel_type = (relocation_type)untag_fixnum(array_nth(code_template.untagged(),i + 1)); cell offset = array_nth(code_template.untagged(),i + 2); relocation_entry new_entry(rel_type,rel_class,code.count + untag_fixnum(offset)); relocation.append_bytes(&new_entry,sizeof(relocation_entry)); } }
void jit::emit_relocation(cell code_template_) { gc_root<array> code_template(code_template_,parent_vm); cell capacity = array_capacity(code_template.untagged()); for(cell i = 1; i < capacity; i += 3) { cell rel_class = array_nth(code_template.untagged(),i); cell rel_type = array_nth(code_template.untagged(),i + 1); cell offset = array_nth(code_template.untagged(),i + 2); relocation_entry new_entry = (untag_fixnum(rel_type) << 28) | (untag_fixnum(rel_class) << 24) | ((code.count + untag_fixnum(offset))); relocation.append_bytes(&new_entry,sizeof(relocation_entry)); } }
instruction_operand callback_heap::callback_operand(code_block *stub, cell index) { tagged<array> code_template(parent->special_objects[CALLBACK_STUB]); cell rel_class = untag_fixnum(array_nth(code_template.untagged(),3 * index + 1)); cell rel_type = untag_fixnum(array_nth(code_template.untagged(),3 * index + 2)); cell offset = untag_fixnum(array_nth(code_template.untagged(),3 * index + 3)); relocation_entry rel( (relocation_type)rel_type, (relocation_class)rel_class, offset); instruction_operand op(rel,stub,0); return op; }
code_block *callback_heap::add(cell owner, cell return_rewind) { tagged<array> code_template(parent->special_objects[CALLBACK_STUB]); tagged<byte_array> insns(array_nth(code_template.untagged(),1)); cell size = array_capacity(insns.untagged()); cell bump = align(size + sizeof(code_block),data_alignment); if(here + bump > seg->end) fatal_error("Out of callback space",0); free_heap_block *free_block = (free_heap_block *)here; free_block->make_free(bump); here += bump; code_block *stub = (code_block *)free_block; stub->owner = owner; stub->parameters = false_object; stub->relocation = false_object; memcpy(stub->entry_point(),insns->data<void>(),size); /* Store VM pointer */ store_callback_operand(stub,0,(cell)parent); cell index; if(setup_seh_p()) { store_callback_operand(stub,1); index = 1; } else index = 0; /* Store VM pointer */ store_callback_operand(stub,index + 2,(cell)parent); /* On x86, the RET instruction takes an argument which depends on the callback's calling convention */ if(return_takes_param_p()) store_callback_operand(stub,index + 3,return_rewind); update(stub); return stub; }
/* Allocates memory */ bool jit::emit_subprimitive(cell word_, bool tail_call_p, bool stack_frame_p) { data_root<word> word(word_, parent); data_root<array> code_template(word->subprimitive, parent); parameters.append(untag<array>(array_nth(code_template.untagged(), 0))); literals.append(untag<array>(array_nth(code_template.untagged(), 1))); emit(array_nth(code_template.untagged(), 2)); if (array_capacity(code_template.untagged()) == 5) { if (tail_call_p) { if (stack_frame_p) emit(parent->special_objects[JIT_EPILOG]); emit(array_nth(code_template.untagged(), 4)); return true; } else emit(array_nth(code_template.untagged(), 3)); } return false; }
code_block* callback_heap::add(cell owner, cell return_rewind) { tagged<array> code_template(parent->special_objects[CALLBACK_STUB]); tagged<byte_array> insns(array_nth(code_template.untagged(), 1)); cell size = array_capacity(insns.untagged()); cell bump = align(size + sizeof(code_block), data_alignment); code_block* stub = allocator->allot(bump); if (!stub) { parent->general_error(ERROR_CALLBACK_SPACE_OVERFLOW, false_object, false_object); } stub->header = bump & ~7; stub->owner = owner; stub->parameters = false_object; stub->relocation = false_object; memcpy((void*)stub->entry_point(), insns->data<void>(), size); /* Store VM pointer */ store_callback_operand(stub, 0, (cell)parent); cell index; if (setup_seh_p()) { store_callback_operand(stub, 1); index = 1; } else index = 0; /* Store VM pointer */ store_callback_operand(stub, index + 2, (cell) parent); /* On x86, the RET instruction takes an argument which depends on the callback's calling convention */ if (return_takes_param_p()) store_callback_operand(stub, index + 3, return_rewind); update(stub); return stub; }
callback *callback_heap::add(code_block *compiled) { tagged<array> code_template(parent->userenv[CALLBACK_STUB]); tagged<byte_array> insns(array_nth(code_template.untagged(),0)); cell size = array_capacity(insns.untagged()); cell bump = align8(size) + sizeof(callback); if(here + bump > seg->end) fatal_error("Out of callback space",0); callback *stub = (callback *)here; stub->compiled = compiled; memcpy(stub + 1,insns->data<void>(),size); stub->size = align8(size); here += bump; update(stub); return stub; }
void jit::emit_with(cell code_template_, cell argument_) { gc_root<array> code_template(code_template_,parent_vm); gc_root<object> argument(argument_,parent_vm); literal(argument.value()); emit(code_template.value()); }
void jit::emit_with_parameter(cell code_template_, cell argument_) { data_root<array> code_template(code_template_,parent); data_root<object> argument(argument_,parent); parameter(argument.value()); emit(code_template.value()); }