void LowerBitSets::lowerBitSetCalls( ArrayRef<Metadata *> BitSets, Constant *CombinedGlobalAddr, const DenseMap<GlobalObject *, uint64_t> &GlobalLayout) { Constant *CombinedGlobalIntAddr = ConstantExpr::getPtrToInt(CombinedGlobalAddr, IntPtrTy); // For each bitset in this disjoint set... for (Metadata *BS : BitSets) { // Build the bitset. BitSetInfo BSI = buildBitSet(BS, GlobalLayout); DEBUG({ if (auto BSS = dyn_cast<MDString>(BS)) dbgs() << BSS->getString() << ": "; else dbgs() << "<unnamed>: "; BSI.print(dbgs()); }); ByteArrayInfo *BAI = 0; // Lower each call to llvm.bitset.test for this bitset. for (CallInst *CI : BitSetTestCallSites[BS]) { ++NumBitSetCallsLowered; Value *Lowered = lowerBitSetCall(CI, BSI, BAI, CombinedGlobalIntAddr, GlobalLayout); CI->replaceAllUsesWith(Lowered); CI->eraseFromParent(); } }
void LowerTypeTestsModule::lowerTypeTestCalls( ArrayRef<Metadata *> TypeIds, Constant *CombinedGlobalAddr, const DenseMap<GlobalObject *, uint64_t> &GlobalLayout) { Constant *CombinedGlobalIntAddr = ConstantExpr::getPtrToInt(CombinedGlobalAddr, IntPtrTy); // For each type identifier in this disjoint set... for (Metadata *TypeId : TypeIds) { // Build the bitset. BitSetInfo BSI = buildBitSet(TypeId, GlobalLayout); DEBUG({ if (auto MDS = dyn_cast<MDString>(TypeId)) dbgs() << MDS->getString() << ": "; else dbgs() << "<unnamed>: "; BSI.print(dbgs()); }); ByteArrayInfo *BAI = nullptr; // Lower each call to llvm.type.test for this type identifier. for (CallInst *CI : TypeTestCallSites[TypeId]) { ++NumTypeTestCallsLowered; Value *Lowered = lowerBitSetCall(CI, BSI, BAI, CombinedGlobalIntAddr, GlobalLayout); CI->replaceAllUsesWith(Lowered); CI->eraseFromParent(); } }
TEST(LowerTypeTests, BitSetBuilder) { struct { std::vector<uint64_t> Offsets; std::set<uint64_t> Bits; uint64_t ByteOffset; uint64_t BitSize; unsigned AlignLog2; bool IsSingleOffset; bool IsAllOnes; } BSBTests[] = { {{}, std::set<uint64_t>{}, 0, 1, 0, false, false}, {{0}, {0}, 0, 1, 0, true, true}, {{4}, {0}, 4, 1, 0, true, true}, {{37}, {0}, 37, 1, 0, true, true}, {{0, 1}, {0, 1}, 0, 2, 0, false, true}, {{0, 4}, {0, 1}, 0, 2, 2, false, true}, {{0, uint64_t(1) << 33}, {0, 1}, 0, 2, 33, false, true}, {{3, 7}, {0, 1}, 3, 2, 2, false, true}, {{0, 1, 7}, {0, 1, 7}, 0, 8, 0, false, false}, {{0, 2, 14}, {0, 1, 7}, 0, 8, 1, false, false}, {{0, 1, 8}, {0, 1, 8}, 0, 9, 0, false, false}, {{0, 2, 16}, {0, 1, 8}, 0, 9, 1, false, false}, { {0, 1, 2, 3, 4, 5, 6, 7}, {0, 1, 2, 3, 4, 5, 6, 7}, 0, 8, 0, false, true }, { {0, 1, 2, 3, 4, 5, 6, 7, 8}, {0, 1, 2, 3, 4, 5, 6, 7, 8}, 0, 9, 0, false, true }, }; for (auto &&T : BSBTests) { BitSetBuilder BSB; for (auto Offset : T.Offsets) BSB.addOffset(Offset); BitSetInfo BSI = BSB.build(); EXPECT_EQ(T.Bits, BSI.Bits); EXPECT_EQ(T.ByteOffset, BSI.ByteOffset); EXPECT_EQ(T.BitSize, BSI.BitSize); EXPECT_EQ(T.AlignLog2, BSI.AlignLog2); EXPECT_EQ(T.IsSingleOffset, BSI.isSingleOffset()); EXPECT_EQ(T.IsAllOnes, BSI.isAllOnes()); for (auto Offset : T.Offsets) EXPECT_TRUE(BSI.containsGlobalOffset(Offset)); auto I = T.Offsets.begin(); for (uint64_t NonOffset = 0; NonOffset != 256; ++NonOffset) { if (I != T.Offsets.end() && *I == NonOffset) { ++I; continue; } EXPECT_FALSE(BSI.containsGlobalOffset(NonOffset)); } } }