void
ASTRecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD,
                                           const CXXRecordDecl *&FirstPrimary) {
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
         e = RD->bases_end(); i != e; ++i) {
    assert(!i->getType()->isDependentType() &&
           "Cannot layout class with dependent bases.");
    const CXXRecordDecl *Base =
      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
    if (!i->isVirtual()) {
      SelectPrimaryVBase(Base, FirstPrimary);
      if (PrimaryBase.getBase())
        return;
      continue;
    }
    if (IsNearlyEmpty(Base)) {
      if (FirstPrimary==0)
        FirstPrimary = Base;
      if (!IndirectPrimaryBases.count(Base)) {
        setPrimaryBase(Base, /*IsVirtual=*/true);
        return;
      }
    }
    assert(i->isVirtual());
    SelectPrimaryVBase(Base, FirstPrimary);
    if (PrimaryBase.getBase())
      return;
  }
}
void InheritanceHierarchyWriter::WriteNode(QualType Type, bool FromVirtual) {
  QualType CanonType = Context.getCanonicalType(Type);

  if (FromVirtual) {
    if (KnownVirtualBases.find(CanonType) != KnownVirtualBases.end())
      return;

    // We haven't seen this virtual base before, so display it and
    // its bases.
    KnownVirtualBases.insert(CanonType);
  }

  // Declare the node itself.
  Out << "  ";
  WriteNodeReference(Type, FromVirtual);

  // Give the node a label based on the name of the class.
  std::string TypeName = Type.getAsString();
  Out << " [ shape=\"box\", label=\"" << DOT::EscapeString(TypeName);

  // If the name of the class was a typedef or something different
  // from the "real" class name, show the real class name in
  // parentheses so we don't confuse ourselves.
  if (TypeName != CanonType.getAsString()) {
    Out << "\\n(" << CanonType.getAsString() << ")";
  }

  // Finished describing the node.
  Out << " \"];\n";

  // Display the base classes.
  const CXXRecordDecl *Decl
    = static_cast<const CXXRecordDecl *>(Type->getAs<RecordType>()->getDecl());
  for (CXXRecordDecl::base_class_const_iterator Base = Decl->bases_begin();
       Base != Decl->bases_end(); ++Base) {
    QualType CanonBaseType = Context.getCanonicalType(Base->getType());

    // If this is not virtual inheritance, bump the direct base
    // count for the type.
    if (!Base->isVirtual())
      ++DirectBaseCount[CanonBaseType];

    // Write out the node (if we need to).
    WriteNode(Base->getType(), Base->isVirtual());

    // Write out the edge.
    Out << "  ";
    WriteNodeReference(Type, FromVirtual);
    Out << " -> ";
    WriteNodeReference(Base->getType(), Base->isVirtual());

    // Write out edge attributes to show the kind of inheritance.
    if (Base->isVirtual()) {
      Out << " [ style=\"dashed\" ]";
    }
    Out << ";";
  }
}
/// LayoutVirtualBases - layout the non-virtual bases of a record decl.
bool
CGRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
                                          const ASTRecordLayout &Layout) {
  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
       E = RD->bases_end(); I != E; ++I) {
    const CXXRecordDecl *BaseDecl = 
      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());

    // We only want to lay out virtual bases that aren't indirect primary bases
    // of some other base.
    if (I->isVirtual() && !IndirectPrimaryBases.count(BaseDecl)) {
      // Only lay out the base once.
      if (!LaidOutVirtualBases.insert(BaseDecl))
        continue;

      CharUnits vbaseOffset = Layout.getVBaseClassOffset(BaseDecl);
      if (!LayoutVirtualBase(BaseDecl, vbaseOffset))
        return false;
    }

    if (!BaseDecl->getNumVBases()) {
      // This base isn't interesting since it doesn't have any virtual bases.
      continue;
    }
    
    if (!LayoutVirtualBases(BaseDecl, Layout))
      return false;
  }
  return true;
}
bool
CGRecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD,
                                             const ASTRecordLayout &Layout) {
  const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();

  // If we have a primary base, lay it out first.
  if (PrimaryBase) {
    if (!Layout.isPrimaryBaseVirtual()) {
      if (!LayoutNonVirtualBase(PrimaryBase, CharUnits::Zero()))
        return false;
    } else {
      if (!LayoutVirtualBase(PrimaryBase, CharUnits::Zero()))
        return false;
    }

  // Otherwise, add a vtable / vf-table if the layout says to do so.
  } else if (Layout.hasOwnVFPtr()) {
    llvm::Type *FunctionType =
      llvm::FunctionType::get(llvm::Type::getInt32Ty(Types.getLLVMContext()),
                              /*isVarArg=*/true);
    llvm::Type *VTableTy = FunctionType->getPointerTo();

    if (getTypeAlignment(VTableTy) > Alignment) {
      // FIXME: Should we allow this to happen in Sema?
      assert(!Packed && "Alignment is wrong even with packed struct!");
      return false;
    }

    assert(NextFieldOffset.isZero() &&
           "VTable pointer must come first!");
    AppendField(CharUnits::Zero(), VTableTy->getPointerTo());
  }

  // Layout the non-virtual bases.
  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
       E = RD->bases_end(); I != E; ++I) {
    if (I->isVirtual())
      continue;

    const CXXRecordDecl *BaseDecl = 
      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());

    // We've already laid out the primary base.
    if (BaseDecl == PrimaryBase && !Layout.isPrimaryBaseVirtual())
      continue;

    if (!LayoutNonVirtualBase(BaseDecl, Layout.getBaseClassOffset(BaseDecl)))
      return false;
  }

  // Add a vb-table pointer if the layout insists.
    if (Layout.hasOwnVBPtr()) {
    CharUnits VBPtrOffset = Layout.getVBPtrOffset();
    llvm::Type *Vbptr = llvm::Type::getInt32PtrTy(Types.getLLVMContext());
    AppendPadding(VBPtrOffset, getTypeAlignment(Vbptr));
    AppendField(VBPtrOffset, Vbptr);
  }

  return true;
}
void ASTRecordLayoutBuilder::UpdateEmptyClassOffsets(const CXXRecordDecl *RD,
                                                     uint64_t Offset) {
  if (RD->isEmpty())
    EmptyClassOffsets.insert(std::make_pair(Offset, RD));
  
  const ASTRecordLayout &Info = Ctx.getASTRecordLayout(RD);

  // Update bases.
  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
       E = RD->bases_end(); I != E; ++I) {
    assert(!I->getType()->isDependentType() &&
           "Cannot layout class with dependent bases.");
    if (I->isVirtual())
      continue;
    
    const CXXRecordDecl *Base =
      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
    
    uint64_t BaseClassOffset = Info.getBaseClassOffset(Base);
    UpdateEmptyClassOffsets(Base, Offset + BaseClassOffset);
  }
  
  // Update fields.
  unsigned FieldNo = 0;
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end(); 
       I != E; ++I, ++FieldNo) {
    const FieldDecl *FD = *I;
    
    uint64_t FieldOffset = Info.getFieldOffset(FieldNo);
    UpdateEmptyClassOffsets(FD, Offset + FieldOffset);
  }
  
  // FIXME: Update virtual bases.
}
void VTTBuilder::LayoutVirtualVTTs(const CXXRecordDecl *RD,
                                   VisitedVirtualBasesSetTy &VBases) {
  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
       E = RD->bases_end(); I != E; ++I) {
    const CXXRecordDecl *BaseDecl = 
      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
    
    // Check if this is a virtual base.
    if (I->isVirtual()) {
      // Check if we've seen this base before.
      if (!VBases.insert(BaseDecl))
        continue;
    
      CharUnits BaseOffset = 
        MostDerivedClassLayout.getVBaseClassOffset(BaseDecl);
      
      LayoutVTT(BaseSubobject(BaseDecl, BaseOffset), /*BaseIsVirtual=*/true);
    }
    
    // We only need to layout virtual VTTs for this base if it actually has
    // virtual bases.
    if (BaseDecl->getNumVBases())
      LayoutVirtualVTTs(BaseDecl, VBases);
  }
}
Exemple #7
0
// CanUseSingleInheritance - Return whether the given record decl has a "single, 
// public, non-virtual base at offset zero (i.e. the derived class is dynamic 
// iff the base is)", according to Itanium C++ ABI, 2.95p6b.
static bool CanUseSingleInheritance(const CXXRecordDecl *RD) {
  // Check the number of bases.
  if (RD->getNumBases() != 1)
    return false;
  
  // Get the base.
  CXXRecordDecl::base_class_const_iterator Base = RD->bases_begin();
  
  // Check that the base is not virtual.
  if (Base->isVirtual())
    return false;
  
  // Check that the base is public.
  if (Base->getAccessSpecifier() != AS_public)
    return false;
  
  // Check that the class is dynamic iff the base is.
  const CXXRecordDecl *BaseDecl = 
    cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
  if (!BaseDecl->isEmpty() && 
      BaseDecl->isDynamicClass() != RD->isDynamicClass())
    return false;
  
  return true;
}
void ASTRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
                                                const CXXRecordDecl *PB,
                                                int64_t Offset,
                                 llvm::SmallSet<const CXXRecordDecl*, 32> &mark,
                    llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary) {
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
         e = RD->bases_end(); i != e; ++i) {
    const CXXRecordDecl *Base = 
      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
#if 0
    const ASTRecordLayout &L = Ctx.getASTRecordLayout(Base);
    const CXXRecordDecl *PB = L.getPrimaryBase();
    if (PB && L.getPrimaryBaseWasVirtual()
        && IndirectPrimary.count(PB)) {
      int64_t BaseOffset;
      // FIXME: calculate this.
      BaseOffset = (1<<63) | (1<<31);
      VBases.push_back(PB);
      VBaseOffsets.push_back(BaseOffset);
    }
#endif
    int64_t BaseOffset = Offset;;
    // FIXME: Calculate BaseOffset.
    if (i->isVirtual()) {
      if (Base == PB) {
        // Only lay things out once.
        if (mark.count(Base))
          continue;
        // Mark it so we don't lay it out twice.
        mark.insert(Base);
        assert (IndirectPrimary.count(Base) && "IndirectPrimary was wrong");
        VBases.push_back(Base);
        VBaseOffsets.push_back(Offset);
      } else if (IndirectPrimary.count(Base)) {
        // Someone else will eventually lay this out.
        ;
      } else {
        // Only lay things out once.
        if (mark.count(Base))
          continue;
        // Mark it so we don't lay it out twice.
        mark.insert(Base);
        LayoutVirtualBase(Base);
        BaseOffset = *(VBaseOffsets.end()-1);
      }
    }
    if (Base->getNumVBases()) {
      const ASTRecordLayout &L = Ctx.getASTRecordLayout(Base);
      const CXXRecordDecl *PB = L.getPrimaryBase();
      LayoutVirtualBases(Base, PB, BaseOffset, mark, IndirectPrimary);
    }
  }
}
void 
ASTRecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD) {
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
       e = RD->bases_end(); i != e; ++i) {
    if (!i->isVirtual()) {
      const CXXRecordDecl *Base = 
        cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
      // Skip the PrimaryBase here, as it is laid down first.
      if (Base != PrimaryBase || PrimaryBaseWasVirtual)
        LayoutBaseNonVirtually(Base, false);
    }
  }
}
void ASTRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *Class,
                                                const CXXRecordDecl *RD,
                                                const CXXRecordDecl *PB,
                                                uint64_t Offset,
                                 llvm::SmallSet<const CXXRecordDecl*, 32> &mark,
                    llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary) {
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
         e = RD->bases_end(); i != e; ++i) {
    assert(!i->getType()->isDependentType() &&
           "Cannot layout class with dependent bases.");
    const CXXRecordDecl *Base =
      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
    uint64_t BaseOffset = Offset;
    if (i->isVirtual()) {
      if (Base == PB) {
        // Only lay things out once.
        if (mark.count(Base))
          continue;
        // Mark it so we don't lay it out twice.
        mark.insert(Base);
        assert (IndirectPrimary.count(Base) && "IndirectPrimary was wrong");
        VBases.push_back(std::make_pair(Base, Offset));
      } else if (IndirectPrimary.count(Base)) {
        // Someone else will eventually lay this out.
        ;
      } else {
        // Only lay things out once.
        if (mark.count(Base))
          continue;
        // Mark it so we don't lay it out twice.
        mark.insert(Base);
        LayoutVirtualBase(Base);
        BaseOffset = VBases.back().second;
      }
    } else {
      if (RD == Class)
        BaseOffset = getBaseOffset(Base);
      else {
        const ASTRecordLayout &Layout = Ctx.getASTRecordLayout(RD);
        BaseOffset = Offset + Layout.getBaseClassOffset(Base);
      }
    }
    
    if (Base->getNumVBases()) {
      const ASTRecordLayout &Layout = Ctx.getASTRecordLayout(Base);
      const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBaseInfo().getBase();
      LayoutVirtualBases(Class, Base, PrimaryBase, BaseOffset, mark, 
                         IndirectPrimary);
    }
  }
}
void
ASTRecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD) {
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
       e = RD->bases_end(); i != e; ++i) {
    if (!i->isVirtual()) {
      assert(!i->getType()->isDependentType() &&
             "Cannot layout class with dependent bases.");
      const CXXRecordDecl *Base =
        cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
      // Skip the PrimaryBase here, as it is laid down first.
      if (Base != PrimaryBase.getBase() || PrimaryBase.isVirtual())
        LayoutBaseNonVirtually(Base, false);
    }
  }
}
/// SelectPrimaryBase - Selects the primary base for the given class and
/// record that with setPrimaryBase.  We also calculate the IndirectPrimaries.
void ASTRecordLayoutBuilder::SelectPrimaryBase(const CXXRecordDecl *RD) {
  // Compute all the primary virtual bases for all of our direct and
  // indirect bases, and record all their primary virtual base classes.
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
       e = RD->bases_end(); i != e; ++i) {
    assert(!i->getType()->isDependentType() &&
           "Cannot layout class with dependent bases.");
    const CXXRecordDecl *Base =
      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
    IdentifyPrimaryBases(Base);
  }

  // If the record has a dynamic base class, attempt to choose a primary base 
  // class. It is the first (in direct base class order) non-virtual dynamic 
  // base class, if one exists.
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
       e = RD->bases_end(); i != e; ++i) {
    // Ignore virtual bases.
    if (i->isVirtual())
      continue;
    
    const CXXRecordDecl *Base =
      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());

    if (Base->isDynamicClass()) {
      // We found it.
      PrimaryBase = ASTRecordLayout::PrimaryBaseInfo(Base, /*IsVirtual=*/false);
      return;
    }
  }

  // Otherwise, it is the first nearly empty virtual base that is not an
  // indirect primary virtual base class, if one exists.

  // If we have no virtual bases at this point, bail out as the searching below
  // is expensive.
  if (RD->getNumVBases() == 0)
    return;
  
  // Then we can search for the first nearly empty virtual base itself.
  const CXXRecordDecl *FirstPrimary = 0;
  SelectPrimaryVBase(RD, FirstPrimary);

  // Otherwise if is the first nearly empty virtual base, if one exists,
  // otherwise there is no primary base class.
  if (!PrimaryBase.getBase())
    setPrimaryBase(FirstPrimary, /*IsVirtual=*/true);
}
bool ASTRecordLayoutBuilder::canPlaceRecordAtOffset(const CXXRecordDecl *RD, 
                                                    uint64_t Offset) const {
  // Look for an empty class with the same type at the same offset.
  for (EmptyClassOffsetsTy::const_iterator I = 
        EmptyClassOffsets.lower_bound(Offset), 
       E = EmptyClassOffsets.upper_bound(Offset); I != E; ++I) {
    
    if (I->second == RD)
      return false;
  }
  
  const ASTRecordLayout &Info = Ctx.getASTRecordLayout(RD);

  // Check bases.
  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
       E = RD->bases_end(); I != E; ++I) {
    assert(!I->getType()->isDependentType() &&
           "Cannot layout class with dependent bases.");
    if (I->isVirtual())
      continue;
    
    const CXXRecordDecl *Base =
      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());

    uint64_t BaseClassOffset = Info.getBaseClassOffset(Base);
    
    if (!canPlaceRecordAtOffset(Base, Offset + BaseClassOffset))
      return false;
  }
  
  // Check fields.
  unsigned FieldNo = 0;
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end(); 
       I != E; ++I, ++FieldNo) {
    const FieldDecl *FD = *I;
    
    uint64_t FieldOffset = Info.getFieldOffset(FieldNo);
    
    if (!canPlaceFieldAtOffset(FD, Offset + FieldOffset))
      return false;
  }

  // FIXME: virtual bases.
  return true;
}
/// SelectPrimaryBase - Selects the primary base for the given class and
/// record that with setPrimaryBase.  We also calculate the IndirectPrimaries.
void ASTRecordLayoutBuilder::SelectPrimaryBase(const CXXRecordDecl *RD,
                    llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary) {
  // We compute all the primary virtual bases for all of our direct and
  // indirect bases, and record all their primary virtual base classes.
  const CXXRecordDecl *FirstPrimary = 0;
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
       e = RD->bases_end(); i != e; ++i) {
    const CXXRecordDecl *Base = 
      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
    SelectPrimaryForBase(Base, IndirectPrimary);
  }

  // The primary base is the first non-virtual indirect or direct base class,
  // if one exists.
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
       e = RD->bases_end(); i != e; ++i) {
    if (!i->isVirtual()) {
      const CXXRecordDecl *Base = 
        cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
      if (Base->isDynamicClass()) {
        setPrimaryBase(Base, false);
        return;
      }
    }
  }

  setPrimaryBase(0, false);

  // Otherwise, it is the first nearly empty virtual base that is not an
  // indirect primary virtual base class, if one exists.

  // If we have no virtual bases at this point, bail out as the searching below
  // is expensive.
  if (RD->getNumVBases() == 0)
    return;

  // Then we can search for the first nearly empty virtual base itself.
  SelectPrimaryVBase(RD, FirstPrimary, IndirectPrimary);

  // Otherwise if is the first nearly empty virtual base, if one exists,
  // otherwise there is no primary base class.
  setPrimaryBase(FirstPrimary, true);
  return;
}
void VTTBuilder::LayoutSecondaryVTTs(BaseSubobject Base) {
  const CXXRecordDecl *RD = Base.getBase();

  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
       E = RD->bases_end(); I != E; ++I) {
    
    // Don't layout virtual bases.
    if (I->isVirtual())
        continue;

    const CXXRecordDecl *BaseDecl =
      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());

    const ASTRecordLayout &Layout = Ctx.getASTRecordLayout(RD);
    CharUnits BaseOffset = Base.getBaseOffset() + 
      Layout.getBaseClassOffset(BaseDecl);
   
    // Layout the VTT for this base.
    LayoutVTT(BaseSubobject(BaseDecl, BaseOffset), /*BaseIsVirtual=*/false);
  }
}
void ASTRecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD,
                                             const CXXRecordDecl *&FirstPrimary,
                    llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary) {
  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
         e = RD->bases_end(); i != e; ++i) {
    const CXXRecordDecl *Base = 
      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
    if (!i->isVirtual()) {
      SelectPrimaryVBase(Base, FirstPrimary, IndirectPrimary);
      if (PrimaryBase)
        return;
      continue;
    }
    if (IsNearlyEmpty(Base)) {
      if (FirstPrimary==0)
        FirstPrimary = Base;
      if (!IndirectPrimary.count(Base)) {
        setPrimaryBase(Base, true);
        return;
      }
    }
  }
}
void
VTTBuilder::LayoutSecondaryVirtualPointers(BaseSubobject Base, 
                                           bool BaseIsMorallyVirtual,
                                           uint64_t VTableIndex,
                                           const CXXRecordDecl *VTableClass,
                                           VisitedVirtualBasesSetTy &VBases) {
  const CXXRecordDecl *RD = Base.getBase();
  
  // We're not interested in bases that don't have virtual bases, and not
  // morally virtual bases.
  if (!RD->getNumVBases() && !BaseIsMorallyVirtual)
    return;

  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
       E = RD->bases_end(); I != E; ++I) {
    const CXXRecordDecl *BaseDecl =
      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());

    // Itanium C++ ABI 2.6.2:
    //   Secondary virtual pointers are present for all bases with either
    //   virtual bases or virtual function declarations overridden along a 
    //   virtual path.
    //
    // If the base class is not dynamic, we don't want to add it, nor any
    // of its base classes.
    if (!BaseDecl->isDynamicClass())
      continue;
    
    bool BaseDeclIsMorallyVirtual = BaseIsMorallyVirtual;
    bool BaseDeclIsNonVirtualPrimaryBase = false;
    CharUnits BaseOffset;
    if (I->isVirtual()) {
      // Ignore virtual bases that we've already visited.
      if (!VBases.insert(BaseDecl))
        continue;
      
      BaseOffset = MostDerivedClassLayout.getVBaseClassOffset(BaseDecl);
      BaseDeclIsMorallyVirtual = true;
    } else {
      const ASTRecordLayout &Layout = Ctx.getASTRecordLayout(RD);
      
      BaseOffset = Base.getBaseOffset() + 
        Layout.getBaseClassOffset(BaseDecl);
      
      if (!Layout.isPrimaryBaseVirtual() &&
          Layout.getPrimaryBase() == BaseDecl)
        BaseDeclIsNonVirtualPrimaryBase = true;
    }

    // Itanium C++ ABI 2.6.2:
    //   Secondary virtual pointers: for each base class X which (a) has virtual
    //   bases or is reachable along a virtual path from D, and (b) is not a
    //   non-virtual primary base, the address of the virtual table for X-in-D
    //   or an appropriate construction virtual table.
    if (!BaseDeclIsNonVirtualPrimaryBase &&
        (BaseDecl->getNumVBases() || BaseDeclIsMorallyVirtual)) {
      // Add the vtable pointer.
      AddVTablePointer(BaseSubobject(BaseDecl, BaseOffset), VTableIndex, 
                       VTableClass);
    }

    // And lay out the secondary virtual pointers for the base class.
    LayoutSecondaryVirtualPointers(BaseSubobject(BaseDecl, BaseOffset),
                                   BaseDeclIsMorallyVirtual, VTableIndex, 
                                   VTableClass, VBases);
  }
}
Exemple #18
0
/// Try to emit a base destructor as an alias to its primary
/// base-class destructor.
bool CodeGenModule::TryEmitBaseDestructorAsAlias(const CXXDestructorDecl *D) {
  if (!getCodeGenOpts().CXXCtorDtorAliases)
    return true;

  // Producing an alias to a base class ctor/dtor can degrade debug quality
  // as the debugger cannot tell them apart.
  if (getCodeGenOpts().OptimizationLevel == 0)
    return true;

  // If the destructor doesn't have a trivial body, we have to emit it
  // separately.
  if (!D->hasTrivialBody())
    return true;

  const CXXRecordDecl *Class = D->getParent();

  // If we need to manipulate a VTT parameter, give up.
  if (Class->getNumVBases()) {
    // Extra Credit:  passing extra parameters is perfectly safe
    // in many calling conventions, so only bail out if the ctor's
    // calling convention is nonstandard.
    return true;
  }

  // If any field has a non-trivial destructor, we have to emit the
  // destructor separately.
  for (CXXRecordDecl::field_iterator I = Class->field_begin(),
         E = Class->field_end(); I != E; ++I)
    if (I->getType().isDestructedType())
      return true;

  // Try to find a unique base class with a non-trivial destructor.
  const CXXRecordDecl *UniqueBase = 0;
  for (CXXRecordDecl::base_class_const_iterator I = Class->bases_begin(),
         E = Class->bases_end(); I != E; ++I) {

    // We're in the base destructor, so skip virtual bases.
    if (I->isVirtual()) continue;

    // Skip base classes with trivial destructors.
    const CXXRecordDecl *Base
      = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
    if (Base->hasTrivialDestructor()) continue;

    // If we've already found a base class with a non-trivial
    // destructor, give up.
    if (UniqueBase) return true;
    UniqueBase = Base;
  }

  // If we didn't find any bases with a non-trivial destructor, then
  // the base destructor is actually effectively trivial, which can
  // happen if it was needlessly user-defined or if there are virtual
  // bases with non-trivial destructors.
  if (!UniqueBase)
    return true;

  // If the base is at a non-zero offset, give up.
  const ASTRecordLayout &ClassLayout = Context.getASTRecordLayout(Class);
  if (!ClassLayout.getBaseClassOffset(UniqueBase).isZero())
    return true;

  const CXXDestructorDecl *BaseD = UniqueBase->getDestructor();
  return TryEmitDefinitionAsAlias(GlobalDecl(D, Dtor_Base),
                                  GlobalDecl(BaseD, Dtor_Base),
                                  false);
}
/// Try to emit a base destructor as an alias to its primary
/// base-class destructor.
bool CodeGenModule::TryEmitBaseDestructorAsAlias(const CXXDestructorDecl *D) {
  if (!getCodeGenOpts().CXXCtorDtorAliases)
    return true;

  // If the destructor doesn't have a trivial body, we have to emit it
  // separately.
  if (!HasTrivialBody(D))
    return true;

  const CXXRecordDecl *Class = D->getParent();

  // If we need to manipulate a VTT parameter, give up.
  if (Class->getNumVBases()) {
    // Extra Credit:  passing extra parameters is perfectly safe
    // in many calling conventions, so only bail out if the ctor's
    // calling convention is nonstandard.
    return true;
  }

  // If any fields have a non-trivial destructor, we have to emit it
  // separately.
  for (CXXRecordDecl::field_iterator I = Class->field_begin(),
         E = Class->field_end(); I != E; ++I)
    if (const RecordType *RT = (*I)->getType()->getAs<RecordType>())
      if (!cast<CXXRecordDecl>(RT->getDecl())->hasTrivialDestructor())
        return true;

  // Try to find a unique base class with a non-trivial destructor.
  const CXXRecordDecl *UniqueBase = 0;
  for (CXXRecordDecl::base_class_const_iterator I = Class->bases_begin(),
         E = Class->bases_end(); I != E; ++I) {

    // We're in the base destructor, so skip virtual bases.
    if (I->isVirtual()) continue;

    // Skip base classes with trivial destructors.
    const CXXRecordDecl *Base
      = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
    if (Base->hasTrivialDestructor()) continue;

    // If we've already found a base class with a non-trivial
    // destructor, give up.
    if (UniqueBase) return true;
    UniqueBase = Base;
  }

  // If we didn't find any bases with a non-trivial destructor, then
  // the base destructor is actually effectively trivial, which can
  // happen if it was needlessly user-defined or if there are virtual
  // bases with non-trivial destructors.
  if (!UniqueBase)
    return true;

  /// If we don't have a definition for the destructor yet, don't
  /// emit.  We can't emit aliases to declarations; that's just not
  /// how aliases work.
  const CXXDestructorDecl *BaseD = UniqueBase->getDestructor(getContext());
  if (!BaseD->isImplicit() && !BaseD->getBody())
    return true;

  // If the base is at a non-zero offset, give up.
  const ASTRecordLayout &ClassLayout = Context.getASTRecordLayout(Class);
  if (ClassLayout.getBaseClassOffset(UniqueBase) != 0)
    return true;

  return TryEmitDefinitionAsAlias(GlobalDecl(D, Dtor_Base),
                                  GlobalDecl(BaseD, Dtor_Base));
}
Exemple #20
0
bool CXXBasePaths::lookupInBases(ASTContext &Context,
                                 const CXXRecordDecl *Record,
                               CXXRecordDecl::BaseMatchesCallback *BaseMatches, 
                                 void *UserData) {
  bool FoundPath = false;

  // The access of the path down to this record.
  AccessSpecifier AccessToHere = ScratchPath.Access;
  bool IsFirstStep = ScratchPath.empty();

  for (CXXRecordDecl::base_class_const_iterator BaseSpec = Record->bases_begin(),
         BaseSpecEnd = Record->bases_end(); 
       BaseSpec != BaseSpecEnd; 
       ++BaseSpec) {
    // Find the record of the base class subobjects for this type.
    QualType BaseType = Context.getCanonicalType(BaseSpec->getType())
                                                          .getUnqualifiedType();
    
    // C++ [temp.dep]p3:
    //   In the definition of a class template or a member of a class template,
    //   if a base class of the class template depends on a template-parameter,
    //   the base class scope is not examined during unqualified name lookup 
    //   either at the point of definition of the class template or member or 
    //   during an instantiation of the class tem- plate or member.
    if (BaseType->isDependentType())
      continue;
    
    // Determine whether we need to visit this base class at all,
    // updating the count of subobjects appropriately.
    std::pair<bool, unsigned>& Subobjects = ClassSubobjects[BaseType];
    bool VisitBase = true;
    bool SetVirtual = false;
    if (BaseSpec->isVirtual()) {
      VisitBase = !Subobjects.first;
      Subobjects.first = true;
      if (isDetectingVirtual() && DetectedVirtual == 0) {
        // If this is the first virtual we find, remember it. If it turns out
        // there is no base path here, we'll reset it later.
        DetectedVirtual = BaseType->getAs<RecordType>();
        SetVirtual = true;
      }
    } else
      ++Subobjects.second;
    
    if (isRecordingPaths()) {
      // Add this base specifier to the current path.
      CXXBasePathElement Element;
      Element.Base = &*BaseSpec;
      Element.Class = Record;
      if (BaseSpec->isVirtual())
        Element.SubobjectNumber = 0;
      else
        Element.SubobjectNumber = Subobjects.second;
      ScratchPath.push_back(Element);

      // Calculate the "top-down" access to this base class.
      // The spec actually describes this bottom-up, but top-down is
      // equivalent because the definition works out as follows:
      // 1. Write down the access along each step in the inheritance
      //    chain, followed by the access of the decl itself.
      //    For example, in
      //      class A { public: int foo; };
      //      class B : protected A {};
      //      class C : public B {};
      //      class D : private C {};
      //    we would write:
      //      private public protected public
      // 2. If 'private' appears anywhere except far-left, access is denied.
      // 3. Otherwise, overall access is determined by the most restrictive
      //    access in the sequence.
      if (IsFirstStep)
        ScratchPath.Access = BaseSpec->getAccessSpecifier();
      else
        ScratchPath.Access = CXXRecordDecl::MergeAccess(AccessToHere, 
                                                 BaseSpec->getAccessSpecifier());
    }
    
    // Track whether there's a path involving this specific base.
    bool FoundPathThroughBase = false;
    
    if (BaseMatches(BaseSpec, ScratchPath, UserData)) {
      // We've found a path that terminates at this base.
      FoundPath = FoundPathThroughBase = true;
      if (isRecordingPaths()) {
        // We have a path. Make a copy of it before moving on.
        Paths.push_back(ScratchPath);
      } else if (!isFindingAmbiguities()) {
        // We found a path and we don't care about ambiguities;
        // return immediately.
        return FoundPath;
      }
    } else if (VisitBase) {
      CXXRecordDecl *BaseRecord
        = cast<CXXRecordDecl>(BaseSpec->getType()->castAs<RecordType>()
                                ->getDecl());
      if (lookupInBases(Context, BaseRecord, BaseMatches, UserData)) {
        // C++ [class.member.lookup]p2:
        //   A member name f in one sub-object B hides a member name f in
        //   a sub-object A if A is a base class sub-object of B. Any
        //   declarations that are so hidden are eliminated from
        //   consideration.
        
        // There is a path to a base class that meets the criteria. If we're 
        // not collecting paths or finding ambiguities, we're done.
        FoundPath = FoundPathThroughBase = true;
        if (!isFindingAmbiguities())
          return FoundPath;
      }
    }
    
    // Pop this base specifier off the current path (if we're
    // collecting paths).
    if (isRecordingPaths()) {
      ScratchPath.pop_back();
    }

    // If we set a virtual earlier, and this isn't a path, forget it again.
    if (SetVirtual && !FoundPathThroughBase) {
      DetectedVirtual = 0;
    }
  }

  // Reset the scratch path access.
  ScratchPath.Access = AccessToHere;
  
  return FoundPath;
}
Exemple #21
0
void FinalOverriderCollector::Collect(const CXXRecordDecl *RD, 
                                      bool VirtualBase,
                                      const CXXRecordDecl *InVirtualSubobject,
                                      CXXFinalOverriderMap &Overriders) {
  unsigned SubobjectNumber = 0;
  if (!VirtualBase)
    SubobjectNumber
      = ++SubobjectCount[cast<CXXRecordDecl>(RD->getCanonicalDecl())];

  for (CXXRecordDecl::base_class_const_iterator Base = RD->bases_begin(),
         BaseEnd = RD->bases_end(); Base != BaseEnd; ++Base) {
    if (const RecordType *RT = Base->getType()->getAs<RecordType>()) {
      const CXXRecordDecl *BaseDecl = cast<CXXRecordDecl>(RT->getDecl());
      if (!BaseDecl->isPolymorphic())
        continue;

      if (Overriders.empty() && !Base->isVirtual()) {
        // There are no other overriders of virtual member functions,
        // so let the base class fill in our overriders for us.
        Collect(BaseDecl, false, InVirtualSubobject, Overriders);
        continue;
      }

      // Collect all of the overridders from the base class subobject
      // and merge them into the set of overridders for this class.
      // For virtual base classes, populate or use the cached virtual
      // overrides so that we do not walk the virtual base class (and
      // its base classes) more than once.
      CXXFinalOverriderMap ComputedBaseOverriders;
      CXXFinalOverriderMap *BaseOverriders = &ComputedBaseOverriders;
      if (Base->isVirtual()) {
        CXXFinalOverriderMap *&MyVirtualOverriders = VirtualOverriders[BaseDecl];
        BaseOverriders = MyVirtualOverriders;
        if (!MyVirtualOverriders) {
          MyVirtualOverriders = new CXXFinalOverriderMap;

          // Collect may cause VirtualOverriders to reallocate, invalidating the
          // MyVirtualOverriders reference. Set BaseOverriders to the right
          // value now.
          BaseOverriders = MyVirtualOverriders;

          Collect(BaseDecl, true, BaseDecl, *MyVirtualOverriders);
        }
      } else
        Collect(BaseDecl, false, InVirtualSubobject, ComputedBaseOverriders);

      // Merge the overriders from this base class into our own set of
      // overriders.
      for (CXXFinalOverriderMap::iterator OM = BaseOverriders->begin(), 
                               OMEnd = BaseOverriders->end();
           OM != OMEnd;
           ++OM) {
        const CXXMethodDecl *CanonOM
          = cast<CXXMethodDecl>(OM->first->getCanonicalDecl());
        Overriders[CanonOM].add(OM->second);
      }
    }
  }

  for (CXXRecordDecl::method_iterator M = RD->method_begin(), 
                                   MEnd = RD->method_end();
       M != MEnd;
       ++M) {
    // We only care about virtual methods.
    if (!M->isVirtual())
      continue;

    CXXMethodDecl *CanonM = cast<CXXMethodDecl>(M->getCanonicalDecl());

    if (CanonM->begin_overridden_methods()
                                       == CanonM->end_overridden_methods()) {
      // This is a new virtual function that does not override any
      // other virtual function. Add it to the map of virtual
      // functions for which we are tracking overridders. 

      // C++ [class.virtual]p2:
      //   For convenience we say that any virtual function overrides itself.
      Overriders[CanonM].add(SubobjectNumber,
                             UniqueVirtualMethod(CanonM, SubobjectNumber,
                                                 InVirtualSubobject));
      continue;
    }

    // This virtual method overrides other virtual methods, so it does
    // not add any new slots into the set of overriders. Instead, we
    // replace entries in the set of overriders with the new
    // overrider. To do so, we dig down to the original virtual
    // functions using data recursion and update all of the methods it
    // overrides.
    typedef std::pair<CXXMethodDecl::method_iterator, 
                      CXXMethodDecl::method_iterator> OverriddenMethods;
    SmallVector<OverriddenMethods, 4> Stack;
    Stack.push_back(std::make_pair(CanonM->begin_overridden_methods(),
                                   CanonM->end_overridden_methods()));
    while (!Stack.empty()) {
      OverriddenMethods OverMethods = Stack.back();
      Stack.pop_back();

      for (; OverMethods.first != OverMethods.second; ++OverMethods.first) {
        const CXXMethodDecl *CanonOM
          = cast<CXXMethodDecl>((*OverMethods.first)->getCanonicalDecl());

        // C++ [class.virtual]p2:
        //   A virtual member function C::vf of a class object S is
        //   a final overrider unless the most derived class (1.8)
        //   of which S is a base class subobject (if any) declares
        //   or inherits another member function that overrides vf.
        //
        // Treating this object like the most derived class, we
        // replace any overrides from base classes with this
        // overriding virtual function.
        Overriders[CanonOM].replaceAll(
                               UniqueVirtualMethod(CanonM, SubobjectNumber,
                                                   InVirtualSubobject));

        if (CanonOM->begin_overridden_methods()
                                       == CanonOM->end_overridden_methods())
          continue;

        // Continue recursion to the methods that this virtual method
        // overrides.
        Stack.push_back(std::make_pair(CanonOM->begin_overridden_methods(),
                                       CanonOM->end_overridden_methods()));
      }
    }

    // C++ [class.virtual]p2:
    //   For convenience we say that any virtual function overrides itself.
    Overriders[CanonM].add(SubobjectNumber,
                           UniqueVirtualMethod(CanonM, SubobjectNumber,
                                               InVirtualSubobject));
  }
}
/// ConvertTagDeclType - Lay out a tagged decl type like struct or union or
/// enum.
const llvm::Type *CodeGenTypes::ConvertTagDeclType(const TagDecl *TD) {
  // TagDecl's are not necessarily unique, instead use the (clang)
  // type connected to the decl.
  const Type *Key =
    Context.getTagDeclType(TD).getTypePtr();
  llvm::DenseMap<const Type*, llvm::PATypeHolder>::iterator TDTI =
    TagDeclTypes.find(Key);

  // If we've already compiled this tag type, use the previous definition.
  if (TDTI != TagDeclTypes.end())
    return TDTI->second;

  const EnumDecl *ED = dyn_cast<EnumDecl>(TD);

  // If this is still a forward declaration, just define an opaque
  // type to use for this tagged decl.
  // C++0x: If this is a enumeration type with fixed underlying type,
  // consider it complete.
  if (!TD->isDefinition() && !(ED && ED->isFixed())) {
    llvm::Type *ResultType = llvm::OpaqueType::get(getLLVMContext());
    TagDeclTypes.insert(std::make_pair(Key, ResultType));
    return ResultType;
  }

  // Okay, this is a definition of a type.  Compile the implementation now.

  if (ED)  // Don't bother storing enums in TagDeclTypes.
    return ConvertTypeRecursive(ED->getIntegerType());

  // This decl could well be recursive.  In this case, insert an opaque
  // definition of this type, which the recursive uses will get.  We will then
  // refine this opaque version later.

  // Create new OpaqueType now for later use in case this is a recursive
  // type.  This will later be refined to the actual type.
  llvm::PATypeHolder ResultHolder = llvm::OpaqueType::get(getLLVMContext());
  TagDeclTypes.insert(std::make_pair(Key, ResultHolder));

  const RecordDecl *RD = cast<const RecordDecl>(TD);

  // Force conversion of non-virtual base classes recursively.
  if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(TD)) {    
    for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
         e = RD->bases_end(); i != e; ++i) {
      if (!i->isVirtual()) {
        const CXXRecordDecl *Base =
          cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
        ConvertTagDeclType(Base);
      }
    }
  }

  // Layout fields.
  CGRecordLayout *Layout = ComputeRecordLayout(RD);

  CGRecordLayouts[Key] = Layout;
  const llvm::Type *ResultType = Layout->getLLVMType();

  // Refine our Opaque type to ResultType.  This can invalidate ResultType, so
  // make sure to read the result out of the holder.
  cast<llvm::OpaqueType>(ResultHolder.get())
    ->refineAbstractTypeTo(ResultType);

  return ResultHolder.get();
}
Exemple #23
0
/// ConvertRecordDeclType - Lay out a tagged decl type like struct or union.
llvm::StructType *CodeGenTypes::ConvertRecordDeclType(const RecordDecl *RD) {
  // TagDecl's are not necessarily unique, instead use the (clang)
  // type connected to the decl.
  const Type *Key = Context.getTagDeclType(RD).getTypePtr();

  llvm::StructType *&Entry = RecordDeclTypes[Key];

  // If we don't have a StructType at all yet, create the forward declaration.
  if (Entry == 0) {
    Entry = llvm::StructType::create(getLLVMContext());
    addRecordTypeName(RD, Entry, "");
  }
  llvm::StructType *Ty = Entry;

  // If this is still a forward declaration, or the LLVM type is already
  // complete, there's nothing more to do.
  RD = RD->getDefinition();
  if (RD == 0 || !RD->isCompleteDefinition() || !Ty->isOpaque())
    return Ty;
  
  // If converting this type would cause us to infinitely loop, don't do it!
  if (!isSafeToConvert(RD, *this)) {
    DeferredRecords.push_back(RD);
    return Ty;
  }

  // Okay, this is a definition of a type.  Compile the implementation now.
  bool InsertResult = RecordsBeingLaidOut.insert(Key); (void)InsertResult;
  assert(InsertResult && "Recursively compiling a struct?");
  
  // Force conversion of non-virtual base classes recursively.
  if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD)) {
    for (CXXRecordDecl::base_class_const_iterator i = CRD->bases_begin(),
         e = CRD->bases_end(); i != e; ++i) {
      if (i->isVirtual()) continue;
      
      ConvertRecordDeclType(i->getType()->getAs<RecordType>()->getDecl());
    }
  }

  // Layout fields.
  CGRecordLayout *Layout = ComputeRecordLayout(RD, Ty);
  CGRecordLayouts[Key] = Layout;

  // We're done laying out this struct.
  bool EraseResult = RecordsBeingLaidOut.erase(Key); (void)EraseResult;
  assert(EraseResult && "struct not in RecordsBeingLaidOut set?");
   
  // If this struct blocked a FunctionType conversion, then recompute whatever
  // was derived from that.
  // FIXME: This is hugely overconservative.
  if (SkippedLayout)
    TypeCache.clear();
    
  // If we're done converting the outer-most record, then convert any deferred
  // structs as well.
  if (RecordsBeingLaidOut.empty())
    while (!DeferredRecords.empty())
      ConvertRecordDeclType(DeferredRecords.pop_back_val());

  return Ty;
}
void VBTableBuilder::findUnambiguousPaths(const CXXRecordDecl *ReusingBase,
                                          BaseSubobject CurSubobject,
                                          VBTablePathVector &Paths) {
  size_t PathsStart = Paths.size();
  bool ReuseVBPtrFromBase = true;
  const CXXRecordDecl *CurBase = CurSubobject.getBase();

  // If this base has a vbptr, then we've found a path.  These are not full
  // paths, so we don't use CXXBasePath.
  if (hasVBPtr(CurBase)) {
    ReuseVBPtrFromBase = false;
    VBTablePath *Info = new VBTablePath(
      VBTableInfo(ReusingBase, CurSubobject, /*GV=*/0));
    Paths.push_back(Info);
  }

  // Recurse onto any bases which themselves have virtual bases.
  const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(CurBase);
  for (CXXRecordDecl::base_class_const_iterator I = CurBase->bases_begin(),
       E = CurBase->bases_end(); I != E; ++I) {
    const CXXRecordDecl *Base = I->getType()->getAsCXXRecordDecl();
    if (!Base->getNumVBases())
      continue;  // Bases without virtual bases have no vbptrs.
    CharUnits NextOffset;
    const CXXRecordDecl *NextReusingBase = Base;
    if (I->isVirtual()) {
      if (!VBasesSeen.insert(Base))
        continue;  // Don't visit virtual bases twice.
      NextOffset = DerivedLayout.getVBaseClassOffset(Base);
    } else {
      NextOffset = (CurSubobject.getBaseOffset() +
                    Layout.getBaseClassOffset(Base));

      // If CurBase didn't have a vbptr, then ReusingBase will reuse the vbptr
      // from the first non-virtual base with vbases for its vbptr.
      if (ReuseVBPtrFromBase) {
        NextReusingBase = ReusingBase;
        ReuseVBPtrFromBase = false;
      }
    }

    size_t NumPaths = Paths.size();
    findUnambiguousPaths(NextReusingBase, BaseSubobject(Base, NextOffset),
                         Paths);

    // Tag paths through this base with the base itself.  We might use it to
    // disambiguate.
    for (size_t I = NumPaths, E = Paths.size(); I != E; ++I)
      Paths[I]->NextBase = Base;
  }

  bool AmbiguousPaths = rebucketPaths(Paths, PathsStart);
  if (AmbiguousPaths)
    rebucketPaths(Paths, PathsStart, /*SecondPass=*/true);

#ifndef NDEBUG
  // Check that the paths are in fact unique.
  for (size_t I = PathsStart + 1, E = Paths.size(); I != E; ++I) {
    assert(Paths[I]->Path != Paths[I - 1]->Path && "vbtable paths are not unique");
  }
#endif
}