/* Compact just the code heap, after growing the data heap */ void factor_vm::collect_compact_code_impl(bool trace_contexts_p) { /* Figure out where blocks are going to go */ mark_bits<code_block> *code_forwarding_map = &code->allocator->state; code_forwarding_map->compute_forwarding(); const code_block *code_finger = code->allocator->first_block(); code_compaction_fixup fixup(code_forwarding_map,&code_finger); slot_visitor<code_compaction_fixup> data_forwarder(this,fixup); code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup); code_forwarder.visit_uninitialized_code_blocks(); if(trace_contexts_p) code_forwarder.visit_context_code_blocks(); /* Update code heap references in data heap */ object_grow_heap_updater object_updater(code_forwarder); each_object(object_updater); /* Slide everything in the code heap up, and update code heap pointers inside code blocks. */ code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder); code->allocator->compact(code_block_updater,fixup,&code_finger); update_code_roots_for_compaction(); callbacks->update(); }
/* Compact data and code heaps */ void factor_vm::collect_compact_impl(bool trace_contexts_p) { gc_event *event = current_gc->event; #if defined(FACTOR_DEBUG) code->verify_all_blocks_set(); #endif if(event) event->started_compaction(); tenured_space *tenured = data->tenured; mark_bits<object> *data_forwarding_map = &tenured->state; mark_bits<code_block> *code_forwarding_map = &code->allocator->state; /* Figure out where blocks are going to go */ data_forwarding_map->compute_forwarding(); code_forwarding_map->compute_forwarding(); const object *data_finger = tenured->first_block(); const code_block *code_finger = code->allocator->first_block(); compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger); slot_visitor<compaction_fixup> data_forwarder(this,fixup); code_block_visitor<compaction_fixup> code_forwarder(this,fixup); code_forwarder.visit_code_roots(); /* Object start offsets get recomputed by the object_compaction_updater */ data->tenured->starts.clear_object_start_offsets(); /* Slide everything in tenured space up, and update data and code heap pointers inside objects. */ object_compaction_updater object_updater(this,fixup); tenured->compact(object_updater,fixup,&data_finger); /* Slide everything in the code heap up, and update data and code heap pointers inside code blocks. */ code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder); code->allocator->compact(code_block_updater,fixup,&code_finger); data_forwarder.visit_roots(); if(trace_contexts_p) { data_forwarder.visit_contexts(); code_forwarder.visit_context_code_blocks(); } update_code_roots_for_compaction(); callbacks->update(); code->initialize_all_blocks_set(); if(event) event->ended_compaction(); }
// Compact data and code heaps void factor_vm::collect_compact_impl() { gc_event* event = current_gc->event; #ifdef FACTOR_DEBUG code->verify_all_blocks_set(); #endif if (event) event->reset_timer(); tenured_space* tenured = data->tenured; mark_bits* data_forwarding_map = &tenured->state; mark_bits* code_forwarding_map = &code->allocator->state; // Figure out where blocks are going to go data_forwarding_map->compute_forwarding(); code_forwarding_map->compute_forwarding(); const object* data_finger = (object*)tenured->start; const code_block* code_finger = (code_block*)code->allocator->start; { compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger, &code_finger); slot_visitor<compaction_fixup> forwarder(this, fixup); forwarder.visit_uninitialized_code_blocks(); // Object start offsets get recomputed by the object_compaction_updater data->tenured->starts.clear_object_start_offsets(); // Slide everything in tenured space up, and update data and code heap // pointers inside objects. auto compact_object_func = [&](object* old_addr, object* new_addr, cell size) { forwarder.visit_slots(new_addr); forwarder.visit_object_code_block(new_addr); tenured->starts.record_object_start_offset(new_addr); }; tenured->compact(compact_object_func, fixup, &data_finger); // Slide everything in the code heap up, and update data and code heap // pointers inside code blocks. auto compact_code_func = [&](code_block* old_addr, code_block* new_addr, cell size) { forwarder.visit_code_block_objects(new_addr); cell old_entry_point = old_addr->entry_point(); forwarder.visit_instruction_operands(new_addr, old_entry_point); }; code->allocator->compact(compact_code_func, fixup, &code_finger); forwarder.visit_all_roots(); forwarder.visit_context_code_blocks(); } update_code_roots_for_compaction(); // Each callback has a relocation with a pointer to a code block in // the code heap. Since the code heap has now been compacted, those // pointers are invalid and we need to update them. auto callback_updater = [&](code_block* stub, cell size) { callbacks->update(stub); }; callbacks->allocator->iterate(callback_updater); code->initialize_all_blocks_set(); if (event) event->ended_compaction(); }