BloomFilter::BloomFilter(size_t N, size_t K, const BitMap &bits) : N_(N), M_(bits.size()), K_(K), bits_(bits), pFalse_(computePFalse(N, M_, K)) { }
void FpuStackAllocator::merge_cleanup_fpu_stack(LIR_List* instrs, FpuStackSim* cur_sim, BitMap& live_fpu_regs) { #ifndef PRODUCT if (TraceFPUStack) { tty->cr(); tty->print("before cleanup: state: "); cur_sim->print(); tty->cr(); tty->print(" live: "); live_fpu_regs.print_on(tty); tty->cr(); } #endif int slot = 0; while (slot < cur_sim->stack_size()) { int reg = cur_sim->get_slot(slot); if (!live_fpu_regs.at(reg)) { if (slot != 0) { merge_insert_xchg(instrs, cur_sim, slot); } merge_insert_pop(instrs, cur_sim); } else { slot++; } } #ifndef PRODUCT if (TraceFPUStack) { tty->print("after cleanup: state: "); cur_sim->print(); tty->cr(); tty->print(" live: "); live_fpu_regs.print_on(tty); tty->cr(); tty->cr(); } // check if fpu stack only contains live registers for (unsigned int i = 0; i < live_fpu_regs.size(); i++) { if (live_fpu_regs.at(i) != cur_sim->contains(i)) { tty->print_cr("mismatch between required and actual stack content"); break; } } #endif }
inline size_t ParMarkBitMap::size() const { return _beg_bits.size(); }
// Is the given page index the last page? bool is_last_page(size_t index) const { return index == (_committed.size() - 1); }