inline HeapWord* G1CollectedHeap::old_attempt_allocation(size_t word_size, AllocationContext_t context) { assert(!is_humongous(word_size), "we should not be seeing humongous-size allocations in this path"); HeapWord* result = _allocator->old_gc_alloc_region(context)->attempt_allocation(word_size, true /* bot_updates */); if (result == NULL) { MutexLockerEx x(FreeList_lock, Mutex::_no_safepoint_check_flag); result = _allocator->old_gc_alloc_region(context)->attempt_allocation_locked(word_size, true /* bot_updates */); } return result; }
inline HeapWord* G1CollectedHeap::attempt_allocation(size_t word_size, uint* gc_count_before_ret, uint* gclocker_retry_count_ret) { assert_heap_not_locked_and_not_at_safepoint(); assert(!is_humongous(word_size), "attempt_allocation() should not " "be called for humongous allocation requests"); AllocationContext_t context = AllocationContext::current(); HeapWord* result = _allocator->mutator_alloc_region(context)->attempt_allocation(word_size, false /* bot_updates */); if (result == NULL) { result = attempt_allocation_slow(word_size, context, gc_count_before_ret, gclocker_retry_count_ret); } assert_heap_not_locked(); if (result != NULL) { dirty_young_block(result, word_size); } return result; }
bool is_in_cset_or_humongous() const { return is_in_cset() || is_humongous(); }