void lspace_compute_object_target(Collector* collector, Lspace* lspace) { void* dest_addr = lspace->heap_start; unsigned int iterate_index = 0; Partial_Reveal_Object* p_obj = lspace_get_first_marked_object(lspace, &iterate_index); assert(!collector->rem_set); collector->rem_set = free_set_pool_get_entry(collector->gc->metadata); #ifdef USE_32BITS_HASHCODE collector->hashcode_set = free_set_pool_get_entry(collector->gc->metadata); #endif #ifdef GC_GEN_STATS GC_Gen_Collector_Stats* stats = (GC_Gen_Collector_Stats*)collector->stats; #endif while( p_obj ){ assert( obj_is_marked_in_vt(p_obj)); unsigned int obj_size = vm_object_size(p_obj); #ifdef GC_GEN_STATS gc_gen_collector_update_moved_los_obj_stats_major(stats, vm_object_size(p_obj)); #endif assert(((POINTER_SIZE_INT)dest_addr + obj_size) <= (POINTER_SIZE_INT)lspace->heap_end); #ifdef USE_32BITS_HASHCODE obj_size += hashcode_is_attached(p_obj)? GC_OBJECT_ALIGNMENT : 0 ; Obj_Info_Type obj_info = slide_compact_process_hashcode(p_obj, dest_addr, &obj_size, collector, null, null); #else Obj_Info_Type obj_info = get_obj_info_raw(p_obj); #endif if( obj_info != 0 ) { collector_remset_add_entry(collector, (Partial_Reveal_Object **)dest_addr); collector_remset_add_entry(collector, (Partial_Reveal_Object **)(POINTER_SIZE_INT)obj_info); } obj_set_fw_in_oi(p_obj, dest_addr); dest_addr = (void *)ALIGN_UP_TO_KILO(((POINTER_SIZE_INT) dest_addr + obj_size)); p_obj = lspace_get_next_marked_object(lspace, &iterate_index); } pool_put_entry(collector->gc->metadata->collector_remset_pool, collector->rem_set); collector->rem_set = NULL; #ifdef USE_32BITS_HASHCODE pool_put_entry(collector->gc->metadata->collector_hashcode_pool, collector->hashcode_set); collector->hashcode_set = NULL; #endif lspace->scompact_fa_start = dest_addr; lspace->scompact_fa_end= lspace->heap_end; return; }
static void mspace_compute_object_target(Collector* collector, Mspace* mspace) { Block_Header *curr_block = collector->cur_compact_block; Block_Header *dest_block = collector->cur_target_block; Block_Header *local_last_dest = dest_block; void *dest_addr = dest_block->base; Block_Header *last_src; #ifdef USE_32BITS_HASHCODE Hashcode_Buf* old_hashcode_buf = NULL; Hashcode_Buf* new_hashcode_buf = hashcode_buf_create(); hashcode_buf_init(new_hashcode_buf); #endif assert(!collector->rem_set); collector->rem_set = free_set_pool_get_entry(collector->gc->metadata); #ifdef USE_32BITS_HASHCODE collector->hashcode_set = free_set_pool_get_entry(collector->gc->metadata); #endif #ifdef GC_GEN_STATS GC_Gen_Collector_Stats* stats = (GC_Gen_Collector_Stats*)collector->stats; #endif while( curr_block ) { void* start_pos; Partial_Reveal_Object *first_obj = block_get_first_marked_obj_prefetch_next(curr_block, &start_pos); if(first_obj) { ++curr_block->dest_counter; if(!dest_block->src) dest_block->src = first_obj; else last_src->next_src = first_obj; last_src = curr_block; } Partial_Reveal_Object* p_obj = first_obj; while( p_obj ) { assert( obj_is_marked_in_vt(p_obj)); unsigned int obj_size = (unsigned int)((POINTER_SIZE_INT)start_pos - (POINTER_SIZE_INT)p_obj); #ifdef GC_GEN_STATS gc_gen_collector_update_moved_nos_mos_obj_stats_major(stats, obj_size); #endif Obj_Info_Type obj_info = get_obj_info(p_obj); unsigned int obj_size_precompute = obj_size; #ifdef USE_32BITS_HASHCODE precompute_hashcode_extend_size(p_obj, dest_addr, &obj_size_precompute); #endif if( ((POINTER_SIZE_INT)dest_addr + obj_size_precompute) > (POINTER_SIZE_INT)GC_BLOCK_END(dest_block)) { #ifdef USE_32BITS_HASHCODE block_swap_hashcode_buf(dest_block, &new_hashcode_buf, &old_hashcode_buf); #endif dest_block->new_free = dest_addr; dest_block = mspace_get_next_target_block(collector, mspace); if(dest_block == NULL) { collector->result = FALSE; return; } if((!local_last_dest) || (dest_block->block_idx > local_last_dest->block_idx)) local_last_dest = dest_block; dest_addr = dest_block->base; dest_block->src = p_obj; last_src = curr_block; if(p_obj != first_obj) ++curr_block->dest_counter; } assert(((POINTER_SIZE_INT)dest_addr + obj_size) <= (POINTER_SIZE_INT)GC_BLOCK_END(dest_block)); #ifdef USE_32BITS_HASHCODE obj_info = slide_compact_process_hashcode(p_obj, dest_addr, &obj_size, collector,curr_block->hashcode_buf, new_hashcode_buf); #endif if( obj_info != 0 ) { collector_remset_add_entry(collector, (Partial_Reveal_Object **)dest_addr); collector_remset_add_entry(collector, (Partial_Reveal_Object **)(POINTER_SIZE_INT)obj_info); } obj_set_fw_in_oi(p_obj, dest_addr); /* FIXME: should use alloc to handle alignment requirement */ dest_addr = (void *)((POINTER_SIZE_INT) dest_addr + obj_size); p_obj = block_get_next_marked_obj_prefetch_next(curr_block, &start_pos); } #ifdef USE_32BITS_HASHCODE hashcode_buf_clear(curr_block->hashcode_buf); #endif curr_block = mspace_get_next_compact_block(collector, mspace); } #ifdef USE_32BITS_HASHCODE pool_put_entry(collector->gc->metadata->collector_hashcode_pool, collector->hashcode_set); collector->hashcode_set = NULL; #endif pool_put_entry(collector->gc->metadata->collector_remset_pool, collector->rem_set); collector->rem_set = NULL; dest_block->new_free = dest_addr; Block_Header *cur_last_dest = (Block_Header *)last_block_for_dest; collector->cur_target_block = local_last_dest; while((local_last_dest)&&((!cur_last_dest) || (local_last_dest->block_idx > cur_last_dest->block_idx))) { atomic_casptr((volatile void **)&last_block_for_dest, local_last_dest, cur_last_dest); cur_last_dest = (Block_Header *)last_block_for_dest; } #ifdef USE_32BITS_HASHCODE old_hashcode_buf = block_set_hashcode_buf(dest_block, new_hashcode_buf); hashcode_buf_destory(old_hashcode_buf); #endif return; }
static inline void move_obj_between_chunks(Wspace *wspace, Chunk_Header **dest_ptr, Chunk_Header *src) { Chunk_Header *dest = *dest_ptr; assert(dest->slot_size == src->slot_size); unsigned int slot_size = dest->slot_size; unsigned int alloc_num = src->alloc_num; assert(alloc_num); #ifdef USE_32BITS_HASHCODE Hashcode_Buf* old_hashcode_buf = src->hashcode_buf; Hashcode_Buf* new_hashcode_buf = dest->hashcode_buf; #endif while(alloc_num && dest){ Partial_Reveal_Object *p_obj = next_alloc_slot_in_chunk(src); Partial_Reveal_Object *target = (Partial_Reveal_Object *)alloc_in_chunk(dest); if(dest->slot_index == MAX_SLOT_INDEX){ dest->status = CHUNK_USED | CHUNK_NORMAL; wspace_reg_used_chunk(wspace,dest); dest = NULL; } assert(p_obj && target); memcpy(target, p_obj, slot_size); #ifdef USE_32BITS_HASHCODE if(hashcode_is_set(p_obj)){ int hashcode; if(new_hashcode_buf == NULL) { new_hashcode_buf = hashcode_buf_create(); hashcode_buf_init(new_hashcode_buf); dest->hashcode_buf = new_hashcode_buf; } if(hashcode_is_buffered(p_obj)){ /*already buffered objects;*/ hashcode = hashcode_buf_lookup(p_obj, old_hashcode_buf); hashcode_buf_update(target, hashcode, new_hashcode_buf); }else{ /*objects need buffering.*/ hashcode = hashcode_gen(p_obj); hashcode_buf_update(target, hashcode, new_hashcode_buf); Obj_Info_Type oi = get_obj_info_raw(target); set_obj_info(target, oi | HASHCODE_BUFFERED_BIT); } } #endif #ifdef SSPACE_VERIFY wspace_modify_mark_in_compact(target, p_obj, slot_size); #endif obj_set_fw_in_oi(p_obj, target); --alloc_num; } #ifdef USE_32BITS_HASHCODE if(alloc_num == 0) { if(old_hashcode_buf) hashcode_buf_destory(old_hashcode_buf); src->hashcode_buf = NULL; } #endif /* dest might be set to NULL, so we use *dest_ptr here */ assert((*dest_ptr)->alloc_num <= (*dest_ptr)->slot_num); src->alloc_num = alloc_num; if(!dest){ assert((*dest_ptr)->alloc_num == (*dest_ptr)->slot_num); *dest_ptr = NULL; clear_free_slot_in_table(src->table, src->slot_index); } }