unsigned X86WinCOFFObjectWriter::getRelocType(MCContext &Ctx,
                                              const MCValue &Target,
                                              const MCFixup &Fixup,
                                              bool IsCrossSection,
                                              const MCAsmBackend &MAB) const {
  unsigned FixupKind = Fixup.getKind();
  if (IsCrossSection) {
    if (FixupKind != FK_Data_4 && FixupKind != llvm::X86::reloc_signed_4byte) {
      Ctx.reportError(Fixup.getLoc(), "Cannot represent this expression");
      return COFF::IMAGE_REL_AMD64_ADDR32;
    }
    FixupKind = FK_PCRel_4;
  }

  MCSymbolRefExpr::VariantKind Modifier = Target.isAbsolute() ?
    MCSymbolRefExpr::VK_None : Target.getSymA()->getKind();

  if (getMachine() == COFF::IMAGE_FILE_MACHINE_AMD64) {
    switch (FixupKind) {
    case FK_PCRel_4:
    case X86::reloc_riprel_4byte:
    case X86::reloc_riprel_4byte_movq_load:
    case X86::reloc_riprel_4byte_relax:
    case X86::reloc_riprel_4byte_relax_rex:
    case X86::reloc_branch_4byte_pcrel:
      return COFF::IMAGE_REL_AMD64_REL32;
    case FK_Data_4:
    case X86::reloc_signed_4byte:
    case X86::reloc_signed_4byte_relax:
      if (Modifier == MCSymbolRefExpr::VK_COFF_IMGREL32)
        return COFF::IMAGE_REL_AMD64_ADDR32NB;
      if (Modifier == MCSymbolRefExpr::VK_SECREL)
        return COFF::IMAGE_REL_AMD64_SECREL;
      return COFF::IMAGE_REL_AMD64_ADDR32;
    case FK_Data_8:
      return COFF::IMAGE_REL_AMD64_ADDR64;
    case FK_SecRel_2:
      return COFF::IMAGE_REL_AMD64_SECTION;
    case FK_SecRel_4:
      return COFF::IMAGE_REL_AMD64_SECREL;
    default:
      Ctx.reportError(Fixup.getLoc(), "unsupported relocation type");
      return COFF::IMAGE_REL_AMD64_ADDR32;
    }
  } else if (getMachine() == COFF::IMAGE_FILE_MACHINE_I386) {
    switch (FixupKind) {
    case FK_PCRel_4:
    case X86::reloc_riprel_4byte:
    case X86::reloc_riprel_4byte_movq_load:
      return COFF::IMAGE_REL_I386_REL32;
    case FK_Data_4:
    case X86::reloc_signed_4byte:
    case X86::reloc_signed_4byte_relax:
      if (Modifier == MCSymbolRefExpr::VK_COFF_IMGREL32)
        return COFF::IMAGE_REL_I386_DIR32NB;
      if (Modifier == MCSymbolRefExpr::VK_SECREL)
        return COFF::IMAGE_REL_AMD64_SECREL;
      return COFF::IMAGE_REL_I386_DIR32;
    case FK_SecRel_2:
      return COFF::IMAGE_REL_I386_SECTION;
    case FK_SecRel_4:
      return COFF::IMAGE_REL_I386_SECREL;
    default:
      Ctx.reportError(Fixup.getLoc(), "unsupported relocation type");
      return COFF::IMAGE_REL_I386_DIR32;
    }
  } else
    llvm_unreachable("Unsupported COFF machine type.");
}
static void checkIs32(MCContext &Ctx, SMLoc Loc, X86_64RelType Type) {
  if (Type != RT64_32)
    Ctx.reportError(Loc,
                    "32 bit reloc applied to a field with a different size");
}
Exemple #3
0
unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
        const MCValue &Target,
        const MCFixup &Fixup,
        bool IsPCRel) const {
    AArch64MCExpr::VariantKind RefKind =
        static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
    AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
    bool IsNC = AArch64MCExpr::isNotChecked(RefKind);

    assert((!Target.getSymA() ||
            Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None) &&
           "Should only be expression-level modifiers here");

    assert((!Target.getSymB() ||
            Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
           "Should only be expression-level modifiers here");

    if (IsPCRel) {
        switch ((unsigned)Fixup.getKind()) {
        case FK_Data_1:
            Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
            return ELF::R_AARCH64_NONE;
        case FK_Data_2:
            return ELF::R_AARCH64_PREL16;
        case FK_Data_4:
            return ELF::R_AARCH64_PREL32;
        case FK_Data_8:
            return ELF::R_AARCH64_PREL64;
        case AArch64::fixup_aarch64_pcrel_adr_imm21:
            assert(SymLoc == AArch64MCExpr::VK_NONE && "unexpected ADR relocation");
            return ELF::R_AARCH64_ADR_PREL_LO21;
        case AArch64::fixup_aarch64_pcrel_adrp_imm21:
            if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
                return ELF::R_AARCH64_ADR_PREL_PG_HI21;
            if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
                return ELF::R_AARCH64_ADR_GOT_PAGE;
            if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
                return ELF::R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21;
            if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
                return ELF::R_AARCH64_TLSDESC_ADR_PAGE21;
            Ctx.reportError(Fixup.getLoc(),
                            "invalid symbol kind for ADRP relocation");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_pcrel_branch26:
            return ELF::R_AARCH64_JUMP26;
        case AArch64::fixup_aarch64_pcrel_call26:
            return ELF::R_AARCH64_CALL26;
        case AArch64::fixup_aarch64_ldr_pcrel_imm19:
            if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
                return ELF::R_AARCH64_TLSIE_LD_GOTTPREL_PREL19;
            return ELF::R_AARCH64_LD_PREL_LO19;
        case AArch64::fixup_aarch64_pcrel_branch14:
            return ELF::R_AARCH64_TSTBR14;
        case AArch64::fixup_aarch64_pcrel_branch19:
            return ELF::R_AARCH64_CONDBR19;
        default:
            Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
            return ELF::R_AARCH64_NONE;
        }
    } else {
        switch ((unsigned)Fixup.getKind()) {
        case FK_Data_1:
            Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
            return ELF::R_AARCH64_NONE;
        case FK_Data_2:
            return ELF::R_AARCH64_ABS16;
        case FK_Data_4:
            return ELF::R_AARCH64_ABS32;
        case FK_Data_8:
            return ELF::R_AARCH64_ABS64;
        case AArch64::fixup_aarch64_add_imm12:
            if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
                return ELF::R_AARCH64_TLSLD_ADD_DTPREL_HI12;
            if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
                return ELF::R_AARCH64_TLSLE_ADD_TPREL_HI12;
            if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
                return ELF::R_AARCH64_TLSLD_ADD_DTPREL_LO12_NC;
            if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
                return ELF::R_AARCH64_TLSLD_ADD_DTPREL_LO12;
            if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
                return ELF::R_AARCH64_TLSLE_ADD_TPREL_LO12_NC;
            if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
                return ELF::R_AARCH64_TLSLE_ADD_TPREL_LO12;
            if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
                return ELF::R_AARCH64_TLSDESC_ADD_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
                return ELF::R_AARCH64_ADD_ABS_LO12_NC;

            Ctx.reportError(Fixup.getLoc(),
                            "invalid fixup for add (uimm12) instruction");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_ldst_imm12_scale1:
            if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
                return ELF::R_AARCH64_LDST8_ABS_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
                return ELF::R_AARCH64_TLSLD_LDST8_DTPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
                return ELF::R_AARCH64_TLSLD_LDST8_DTPREL_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
                return ELF::R_AARCH64_TLSLE_LDST8_TPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
                return ELF::R_AARCH64_TLSLE_LDST8_TPREL_LO12_NC;

            Ctx.reportError(Fixup.getLoc(),
                            "invalid fixup for 8-bit load/store instruction");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_ldst_imm12_scale2:
            if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
                return ELF::R_AARCH64_LDST16_ABS_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
                return ELF::R_AARCH64_TLSLD_LDST16_DTPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
                return ELF::R_AARCH64_TLSLD_LDST16_DTPREL_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
                return ELF::R_AARCH64_TLSLE_LDST16_TPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
                return ELF::R_AARCH64_TLSLE_LDST16_TPREL_LO12_NC;

            Ctx.reportError(Fixup.getLoc(),
                            "invalid fixup for 16-bit load/store instruction");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_ldst_imm12_scale4:
            if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
                return ELF::R_AARCH64_LDST32_ABS_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
                return ELF::R_AARCH64_TLSLD_LDST32_DTPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
                return ELF::R_AARCH64_TLSLD_LDST32_DTPREL_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
                return ELF::R_AARCH64_TLSLE_LDST32_TPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
                return ELF::R_AARCH64_TLSLE_LDST32_TPREL_LO12_NC;

            Ctx.reportError(Fixup.getLoc(),
                            "invalid fixup for 32-bit load/store instruction");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_ldst_imm12_scale8:
            if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
                return ELF::R_AARCH64_LDST64_ABS_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_GOT && IsNC)
                return ELF::R_AARCH64_LD64_GOT_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
                return ELF::R_AARCH64_TLSLD_LDST64_DTPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
                return ELF::R_AARCH64_TLSLD_LDST64_DTPREL_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
                return ELF::R_AARCH64_TLSLE_LDST64_TPREL_LO12;
            if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
                return ELF::R_AARCH64_TLSLE_LDST64_TPREL_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC)
                return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
            if (SymLoc == AArch64MCExpr::VK_TLSDESC && IsNC)
                return ELF::R_AARCH64_TLSDESC_LD64_LO12_NC;

            Ctx.reportError(Fixup.getLoc(),
                            "invalid fixup for 64-bit load/store instruction");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_ldst_imm12_scale16:
            if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
                return ELF::R_AARCH64_LDST128_ABS_LO12_NC;

            Ctx.reportError(Fixup.getLoc(),
                            "invalid fixup for 128-bit load/store instruction");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_movw:
            if (RefKind == AArch64MCExpr::VK_ABS_G3)
                return ELF::R_AARCH64_MOVW_UABS_G3;
            if (RefKind == AArch64MCExpr::VK_ABS_G2)
                return ELF::R_AARCH64_MOVW_UABS_G2;
            if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
                return ELF::R_AARCH64_MOVW_SABS_G2;
            if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
                return ELF::R_AARCH64_MOVW_UABS_G2_NC;
            if (RefKind == AArch64MCExpr::VK_ABS_G1)
                return ELF::R_AARCH64_MOVW_UABS_G1;
            if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
                return ELF::R_AARCH64_MOVW_SABS_G1;
            if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
                return ELF::R_AARCH64_MOVW_UABS_G1_NC;
            if (RefKind == AArch64MCExpr::VK_ABS_G0)
                return ELF::R_AARCH64_MOVW_UABS_G0;
            if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
                return ELF::R_AARCH64_MOVW_SABS_G0;
            if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
                return ELF::R_AARCH64_MOVW_UABS_G0_NC;
            if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
                return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
            if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
                return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1;
            if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
                return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
            if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
                return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G0;
            if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
                return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G0_NC;
            if (RefKind == AArch64MCExpr::VK_TPREL_G2)
                return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
            if (RefKind == AArch64MCExpr::VK_TPREL_G1)
                return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1;
            if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
                return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
            if (RefKind == AArch64MCExpr::VK_TPREL_G0)
                return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G0;
            if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
                return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G0_NC;
            if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
                return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
            if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
                return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
            Ctx.reportError(Fixup.getLoc(),
                            "invalid fixup for movz/movk instruction");
            return ELF::R_AARCH64_NONE;
        case AArch64::fixup_aarch64_tlsdesc_call:
            return ELF::R_AARCH64_TLSDESC_CALL;
        default:
            Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
            return ELF::R_AARCH64_NONE;
        }
    }

    llvm_unreachable("Unimplemented fixup -> relocation");
}
// assumes IsILP32 is true
static bool isNonILP32reloc(const MCFixup &Fixup,
                            AArch64MCExpr::VariantKind RefKind,
                            MCContext &Ctx) {
  if ((unsigned)Fixup.getKind() != AArch64::fixup_aarch64_movw)
    return false;
  switch(RefKind) {
    case AArch64MCExpr::VK_ABS_G3:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G3));
      return true;
    case AArch64MCExpr::VK_ABS_G2:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2));
      return true;
    case AArch64MCExpr::VK_ABS_G2_S:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G2));
      return true;
    case AArch64MCExpr::VK_ABS_G2_NC:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G2_NC));
      return true;
    case AArch64MCExpr::VK_ABS_G1_S:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_SABS_G1));
      return true;
    case AArch64MCExpr::VK_ABS_G1_NC:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(MOVW_UABS_G1_NC));
      return true;
    case AArch64MCExpr::VK_DTPREL_G2:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G2));
      return true;
    case AArch64MCExpr::VK_DTPREL_G1_NC:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLD_MOVW_DTPREL_G1_NC));
      return true;
    case AArch64MCExpr::VK_TPREL_G2:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G2));
      return true;
    case AArch64MCExpr::VK_TPREL_G1_NC:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSLE_MOVW_TPREL_G1_NC));
      return true;
    case AArch64MCExpr::VK_GOTTPREL_G1:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G1));
      return true;
    case AArch64MCExpr::VK_GOTTPREL_G0_NC:
      Ctx.reportError(Fixup.getLoc(), BAD_ILP32_MOV(TLSIE_MOVW_GOTTPREL_G0_NC));
      return true;
    default: return false;
  }
  return false;
}
unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
                                              const MCValue &Target,
                                              const MCFixup &Fixup,
                                              bool IsPCRel) const {
  AArch64MCExpr::VariantKind RefKind =
      static_cast<AArch64MCExpr::VariantKind>(Target.getRefKind());
  AArch64MCExpr::VariantKind SymLoc = AArch64MCExpr::getSymbolLoc(RefKind);
  bool IsNC = AArch64MCExpr::isNotChecked(RefKind);

  assert((!Target.getSymA() ||
          Target.getSymA()->getKind() == MCSymbolRefExpr::VK_None) &&
         "Should only be expression-level modifiers here");

  assert((!Target.getSymB() ||
          Target.getSymB()->getKind() == MCSymbolRefExpr::VK_None) &&
         "Should only be expression-level modifiers here");

  if (IsPCRel) {
    switch ((unsigned)Fixup.getKind()) {
    case FK_Data_1:
      Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
      return ELF::R_AARCH64_NONE;
    case FK_Data_2:
      return R_CLS(PREL16);
    case FK_Data_4:
      return R_CLS(PREL32);
    case FK_Data_8:
      if (IsILP32) {
        Ctx.reportError(Fixup.getLoc(), "ILP32 8 byte PC relative data "
                        "relocation not supported (LP64 eqv: PREL64)");
        return ELF::R_AARCH64_NONE;
      } else
        return ELF::R_AARCH64_PREL64;
    case AArch64::fixup_aarch64_pcrel_adr_imm21:
      assert(SymLoc == AArch64MCExpr::VK_NONE && "unexpected ADR relocation");
      return R_CLS(ADR_PREL_LO21);
    case AArch64::fixup_aarch64_pcrel_adrp_imm21:
      if (SymLoc == AArch64MCExpr::VK_ABS && !IsNC)
        return R_CLS(ADR_PREL_PG_HI21);
      if (SymLoc == AArch64MCExpr::VK_ABS && IsNC) {
        if (IsILP32) {
          Ctx.reportError(Fixup.getLoc(),
                          "invalid fixup for 32-bit pcrel ADRP instruction "
                          "VK_ABS VK_NC");
          return ELF::R_AARCH64_NONE;
        } else {
          return ELF::R_AARCH64_ADR_PREL_PG_HI21_NC;
        }
      }
      if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC)
        return R_CLS(ADR_GOT_PAGE);
      if (SymLoc == AArch64MCExpr::VK_GOTTPREL && !IsNC)
        return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
      if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
        return R_CLS(TLSDESC_ADR_PAGE21);
      Ctx.reportError(Fixup.getLoc(),
                      "invalid symbol kind for ADRP relocation");
      return ELF::R_AARCH64_NONE;
    case AArch64::fixup_aarch64_pcrel_branch26:
      return R_CLS(JUMP26);
    case AArch64::fixup_aarch64_pcrel_call26:
      return R_CLS(CALL26);
    case AArch64::fixup_aarch64_ldr_pcrel_imm19:
      if (SymLoc == AArch64MCExpr::VK_GOTTPREL)
        return R_CLS(TLSIE_LD_GOTTPREL_PREL19);
      return R_CLS(LD_PREL_LO19);
    case AArch64::fixup_aarch64_pcrel_branch14:
      return R_CLS(TSTBR14);
    case AArch64::fixup_aarch64_pcrel_branch19:
      return R_CLS(CONDBR19);
    default:
      Ctx.reportError(Fixup.getLoc(), "Unsupported pc-relative fixup kind");
      return ELF::R_AARCH64_NONE;
    }
  } else {
    if (IsILP32 && isNonILP32reloc(Fixup, RefKind, Ctx))
        return ELF::R_AARCH64_NONE;
    switch ((unsigned)Fixup.getKind()) {
    case FK_Data_1:
      Ctx.reportError(Fixup.getLoc(), "1-byte data relocations not supported");
      return ELF::R_AARCH64_NONE;
    case FK_Data_2:
      return R_CLS(ABS16);
    case FK_Data_4:
      return R_CLS(ABS32);
    case FK_Data_8:
      if (IsILP32) {
        Ctx.reportError(Fixup.getLoc(), "ILP32 8 byte absolute data "
			"relocation not supported (LP64 eqv: ABS64)");
        return ELF::R_AARCH64_NONE;
      } else
        return ELF::R_AARCH64_ABS64;
    case AArch64::fixup_aarch64_add_imm12:
      if (RefKind == AArch64MCExpr::VK_DTPREL_HI12)
        return R_CLS(TLSLD_ADD_DTPREL_HI12);
      if (RefKind == AArch64MCExpr::VK_TPREL_HI12)
        return R_CLS(TLSLE_ADD_TPREL_HI12);
      if (RefKind == AArch64MCExpr::VK_DTPREL_LO12_NC)
        return R_CLS(TLSLD_ADD_DTPREL_LO12_NC);
      if (RefKind == AArch64MCExpr::VK_DTPREL_LO12)
        return R_CLS(TLSLD_ADD_DTPREL_LO12);
      if (RefKind == AArch64MCExpr::VK_TPREL_LO12_NC)
        return R_CLS(TLSLE_ADD_TPREL_LO12_NC);
      if (RefKind == AArch64MCExpr::VK_TPREL_LO12)
        return R_CLS(TLSLE_ADD_TPREL_LO12);
      if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
        return R_CLS(TLSDESC_ADD_LO12);
      if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
        return R_CLS(ADD_ABS_LO12_NC);

      Ctx.reportError(Fixup.getLoc(),
                      "invalid fixup for add (uimm12) instruction");
      return ELF::R_AARCH64_NONE;
    case AArch64::fixup_aarch64_ldst_imm12_scale1:
      if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
        return R_CLS(LDST8_ABS_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
        return R_CLS(TLSLD_LDST8_DTPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
        return R_CLS(TLSLD_LDST8_DTPREL_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
        return R_CLS(TLSLE_LDST8_TPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
        return R_CLS(TLSLE_LDST8_TPREL_LO12_NC);

      Ctx.reportError(Fixup.getLoc(),
                      "invalid fixup for 8-bit load/store instruction");
      return ELF::R_AARCH64_NONE;
    case AArch64::fixup_aarch64_ldst_imm12_scale2:
      if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
        return R_CLS(LDST16_ABS_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
        return R_CLS(TLSLD_LDST16_DTPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
        return R_CLS(TLSLD_LDST16_DTPREL_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
        return R_CLS(TLSLE_LDST16_TPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
        return R_CLS(TLSLE_LDST16_TPREL_LO12_NC);

      Ctx.reportError(Fixup.getLoc(),
                      "invalid fixup for 16-bit load/store instruction");
      return ELF::R_AARCH64_NONE;
    case AArch64::fixup_aarch64_ldst_imm12_scale4:
      if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
        return R_CLS(LDST32_ABS_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
        return R_CLS(TLSLD_LDST32_DTPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
        return R_CLS(TLSLD_LDST32_DTPREL_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
        return R_CLS(TLSLE_LDST32_TPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
        return R_CLS(TLSLE_LDST32_TPREL_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
        if (IsILP32) {
          return ELF::R_AARCH64_P32_LD32_GOT_LO12_NC;
        } else {
          Ctx.reportError(Fixup.getLoc(),
                          "LP64 4 byte unchecked GOT load/store relocation "
			  "not supported (ILP32 eqv: LD32_GOT_LO12_NC");
          return ELF::R_AARCH64_NONE;
        }
      }
      if (SymLoc == AArch64MCExpr::VK_GOT && !IsNC) {
        if (IsILP32) {
          Ctx.reportError(Fixup.getLoc(),
                          "ILP32 4 byte checked GOT load/store relocation "
			  "not supported (unchecked eqv: LD32_GOT_LO12_NC)");
        } else {
          Ctx.reportError(Fixup.getLoc(),
                          "LP64 4 byte checked GOT load/store relocation "
			  "not supported (unchecked/ILP32 eqv: "
			  "LD32_GOT_LO12_NC)");
        }
        return ELF::R_AARCH64_NONE;
      }
      if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
        if (IsILP32) {
          return ELF::R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC;
        } else {
          Ctx.reportError(Fixup.getLoc(), "LP64 32-bit load/store "
                          "relocation not supported (ILP32 eqv: "
                          "TLSIE_LD32_GOTTPREL_LO12_NC)");
          return ELF::R_AARCH64_NONE;
        }
      }
      if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC) {
        if (IsILP32) {
          return ELF::R_AARCH64_P32_TLSDESC_LD32_LO12;
        } else {
          Ctx.reportError(Fixup.getLoc(),
                          "LP64 4 byte TLSDESC load/store relocation "
			  "not supported (ILP32 eqv: TLSDESC_LD64_LO12)");
          return ELF::R_AARCH64_NONE;
        }
      }

      Ctx.reportError(Fixup.getLoc(),
                      "invalid fixup for 32-bit load/store instruction "
		      "fixup_aarch64_ldst_imm12_scale4");
      return ELF::R_AARCH64_NONE;
    case AArch64::fixup_aarch64_ldst_imm12_scale8:
      if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
        return R_CLS(LDST64_ABS_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_GOT && IsNC) {
        if (!IsILP32) {
          return ELF::R_AARCH64_LD64_GOT_LO12_NC;
        } else {
          Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
                          "relocation not supported (LP64 eqv: "
                          "LD64_GOT_LO12_NC)");
          return ELF::R_AARCH64_NONE;
        }
      }
      if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
        return R_CLS(TLSLD_LDST64_DTPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
        return R_CLS(TLSLD_LDST64_DTPREL_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
        return R_CLS(TLSLE_LDST64_TPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
        return R_CLS(TLSLE_LDST64_TPREL_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_GOTTPREL && IsNC) {
        if (!IsILP32) {
          return ELF::R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC;
        } else {
          Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
                          "relocation not supported (LP64 eqv: "
                          "TLSIE_LD64_GOTTPREL_LO12_NC)");
          return ELF::R_AARCH64_NONE;
        }
      }
      if (SymLoc == AArch64MCExpr::VK_TLSDESC) {
        if (!IsILP32) {
          return ELF::R_AARCH64_TLSDESC_LD64_LO12;
        } else {
          Ctx.reportError(Fixup.getLoc(), "ILP32 64-bit load/store "
                          "relocation not supported (LP64 eqv: "
                          "TLSDESC_LD64_LO12)");
          return ELF::R_AARCH64_NONE;
        }
      }
      Ctx.reportError(Fixup.getLoc(),
                      "invalid fixup for 64-bit load/store instruction");
      return ELF::R_AARCH64_NONE;
    case AArch64::fixup_aarch64_ldst_imm12_scale16:
      if (SymLoc == AArch64MCExpr::VK_ABS && IsNC)
        return R_CLS(LDST128_ABS_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && !IsNC)
        return R_CLS(TLSLD_LDST128_DTPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_DTPREL && IsNC)
        return R_CLS(TLSLD_LDST128_DTPREL_LO12_NC);
      if (SymLoc == AArch64MCExpr::VK_TPREL && !IsNC)
        return R_CLS(TLSLE_LDST128_TPREL_LO12);
      if (SymLoc == AArch64MCExpr::VK_TPREL && IsNC)
        return R_CLS(TLSLE_LDST128_TPREL_LO12_NC);

      Ctx.reportError(Fixup.getLoc(),
                      "invalid fixup for 128-bit load/store instruction");
      return ELF::R_AARCH64_NONE;
    // ILP32 case not reached here, tested with isNonILP32reloc
    case AArch64::fixup_aarch64_movw:
      if (RefKind == AArch64MCExpr::VK_ABS_G3)
        return ELF::R_AARCH64_MOVW_UABS_G3;
      if (RefKind == AArch64MCExpr::VK_ABS_G2)
        return ELF::R_AARCH64_MOVW_UABS_G2;
      if (RefKind == AArch64MCExpr::VK_ABS_G2_S)
        return ELF::R_AARCH64_MOVW_SABS_G2;
      if (RefKind == AArch64MCExpr::VK_ABS_G2_NC)
        return ELF::R_AARCH64_MOVW_UABS_G2_NC;
      if (RefKind == AArch64MCExpr::VK_ABS_G1)
        return R_CLS(MOVW_UABS_G1);
      if (RefKind == AArch64MCExpr::VK_ABS_G1_S)
        return ELF::R_AARCH64_MOVW_SABS_G1;
      if (RefKind == AArch64MCExpr::VK_ABS_G1_NC)
        return ELF::R_AARCH64_MOVW_UABS_G1_NC;
      if (RefKind == AArch64MCExpr::VK_ABS_G0)
        return R_CLS(MOVW_UABS_G0);
      if (RefKind == AArch64MCExpr::VK_ABS_G0_S)
        return R_CLS(MOVW_SABS_G0);
      if (RefKind == AArch64MCExpr::VK_ABS_G0_NC)
        return R_CLS(MOVW_UABS_G0_NC);
      if (RefKind == AArch64MCExpr::VK_DTPREL_G2)
        return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G2;
      if (RefKind == AArch64MCExpr::VK_DTPREL_G1)
        return R_CLS(TLSLD_MOVW_DTPREL_G1);
      if (RefKind == AArch64MCExpr::VK_DTPREL_G1_NC)
        return ELF::R_AARCH64_TLSLD_MOVW_DTPREL_G1_NC;
      if (RefKind == AArch64MCExpr::VK_DTPREL_G0)
        return R_CLS(TLSLD_MOVW_DTPREL_G0);
      if (RefKind == AArch64MCExpr::VK_DTPREL_G0_NC)
        return R_CLS(TLSLD_MOVW_DTPREL_G0_NC);
      if (RefKind == AArch64MCExpr::VK_TPREL_G2)
        return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G2;
      if (RefKind == AArch64MCExpr::VK_TPREL_G1)
        return R_CLS(TLSLE_MOVW_TPREL_G1);
      if (RefKind == AArch64MCExpr::VK_TPREL_G1_NC)
        return ELF::R_AARCH64_TLSLE_MOVW_TPREL_G1_NC;
      if (RefKind == AArch64MCExpr::VK_TPREL_G0)
        return R_CLS(TLSLE_MOVW_TPREL_G0);
      if (RefKind == AArch64MCExpr::VK_TPREL_G0_NC)
        return R_CLS(TLSLE_MOVW_TPREL_G0_NC);
      if (RefKind == AArch64MCExpr::VK_GOTTPREL_G1)
        return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G1;
      if (RefKind == AArch64MCExpr::VK_GOTTPREL_G0_NC)
        return ELF::R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC;
      Ctx.reportError(Fixup.getLoc(),
                      "invalid fixup for movz/movk instruction");
      return ELF::R_AARCH64_NONE;
    case AArch64::fixup_aarch64_tlsdesc_call:
      return R_CLS(TLSDESC_CALL);
    default:
      Ctx.reportError(Fixup.getLoc(), "Unknown ELF relocation type");
      return ELF::R_AARCH64_NONE;
    }
  }

  llvm_unreachable("Unimplemented fixup -> relocation");
}
Exemple #6
0
static uint64_t adjustFixupValue(const MCFixup &Fixup, uint64_t Value,
                                 MCContext &Ctx) {
  unsigned Kind = Fixup.getKind();
  switch (Kind) {
  default:
    llvm_unreachable("Unknown fixup kind!");
  case RISCV::fixup_riscv_got_hi20:
    llvm_unreachable("Relocation should be unconditionally forced\n");
  case FK_Data_1:
  case FK_Data_2:
  case FK_Data_4:
  case FK_Data_8:
    return Value;
  case RISCV::fixup_riscv_lo12_i:
  case RISCV::fixup_riscv_pcrel_lo12_i:
  case RISCV::fixup_riscv_tprel_lo12_i:
    return Value & 0xfff;
  case RISCV::fixup_riscv_lo12_s:
  case RISCV::fixup_riscv_pcrel_lo12_s:
  case RISCV::fixup_riscv_tprel_lo12_s:
    return (((Value >> 5) & 0x7f) << 25) | ((Value & 0x1f) << 7);
  case RISCV::fixup_riscv_hi20:
  case RISCV::fixup_riscv_pcrel_hi20:
  case RISCV::fixup_riscv_tprel_hi20:
    // Add 1 if bit 11 is 1, to compensate for low 12 bits being negative.
    return ((Value + 0x800) >> 12) & 0xfffff;
  case RISCV::fixup_riscv_jal: {
    if (!isInt<21>(Value))
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    if (Value & 0x1)
      Ctx.reportError(Fixup.getLoc(), "fixup value must be 2-byte aligned");
    // Need to produce imm[19|10:1|11|19:12] from the 21-bit Value.
    unsigned Sbit = (Value >> 20) & 0x1;
    unsigned Hi8 = (Value >> 12) & 0xff;
    unsigned Mid1 = (Value >> 11) & 0x1;
    unsigned Lo10 = (Value >> 1) & 0x3ff;
    // Inst{31} = Sbit;
    // Inst{30-21} = Lo10;
    // Inst{20} = Mid1;
    // Inst{19-12} = Hi8;
    Value = (Sbit << 19) | (Lo10 << 9) | (Mid1 << 8) | Hi8;
    return Value;
  }
  case RISCV::fixup_riscv_branch: {
    if (!isInt<13>(Value))
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    if (Value & 0x1)
      Ctx.reportError(Fixup.getLoc(), "fixup value must be 2-byte aligned");
    // Need to extract imm[12], imm[10:5], imm[4:1], imm[11] from the 13-bit
    // Value.
    unsigned Sbit = (Value >> 12) & 0x1;
    unsigned Hi1 = (Value >> 11) & 0x1;
    unsigned Mid6 = (Value >> 5) & 0x3f;
    unsigned Lo4 = (Value >> 1) & 0xf;
    // Inst{31} = Sbit;
    // Inst{30-25} = Mid6;
    // Inst{11-8} = Lo4;
    // Inst{7} = Hi1;
    Value = (Sbit << 31) | (Mid6 << 25) | (Lo4 << 8) | (Hi1 << 7);
    return Value;
  }
  case RISCV::fixup_riscv_call:
  case RISCV::fixup_riscv_call_plt: {
    // Jalr will add UpperImm with the sign-extended 12-bit LowerImm,
    // we need to add 0x800ULL before extract upper bits to reflect the
    // effect of the sign extension.
    uint64_t UpperImm = (Value + 0x800ULL) & 0xfffff000ULL;
    uint64_t LowerImm = Value & 0xfffULL;
    return UpperImm | ((LowerImm << 20) << 32);
  }
  case RISCV::fixup_riscv_rvc_jump: {
    // Need to produce offset[11|4|9:8|10|6|7|3:1|5] from the 11-bit Value.
    unsigned Bit11  = (Value >> 11) & 0x1;
    unsigned Bit4   = (Value >> 4) & 0x1;
    unsigned Bit9_8 = (Value >> 8) & 0x3;
    unsigned Bit10  = (Value >> 10) & 0x1;
    unsigned Bit6   = (Value >> 6) & 0x1;
    unsigned Bit7   = (Value >> 7) & 0x1;
    unsigned Bit3_1 = (Value >> 1) & 0x7;
    unsigned Bit5   = (Value >> 5) & 0x1;
    Value = (Bit11 << 10) | (Bit4 << 9) | (Bit9_8 << 7) | (Bit10 << 6) |
            (Bit6 << 5) | (Bit7 << 4) | (Bit3_1 << 1) | Bit5;
    return Value;
  }
  case RISCV::fixup_riscv_rvc_branch: {
    // Need to produce offset[8|4:3], [reg 3 bit], offset[7:6|2:1|5]
    unsigned Bit8   = (Value >> 8) & 0x1;
    unsigned Bit7_6 = (Value >> 6) & 0x3;
    unsigned Bit5   = (Value >> 5) & 0x1;
    unsigned Bit4_3 = (Value >> 3) & 0x3;
    unsigned Bit2_1 = (Value >> 1) & 0x3;
    Value = (Bit8 << 12) | (Bit4_3 << 10) | (Bit7_6 << 5) | (Bit2_1 << 3) |
            (Bit5 << 2);
    return Value;
  }

  }
}
Exemple #7
0
static uint64_t adjustFixupValue(const MCFixup &Fixup, uint64_t Value,
                                 MCContext &Ctx) {
  unsigned Kind = Fixup.getKind();
  int64_t SignedValue = static_cast<int64_t>(Value);
  switch (Kind) {
  default:
    llvm_unreachable("Unknown fixup kind!");
  case AArch64::fixup_aarch64_pcrel_adr_imm21:
    if (SignedValue > 2097151 || SignedValue < -2097152)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    return AdrImmBits(Value & 0x1fffffULL);
  case AArch64::fixup_aarch64_pcrel_adrp_imm21:
    return AdrImmBits((Value & 0x1fffff000ULL) >> 12);
  case AArch64::fixup_aarch64_ldr_pcrel_imm19:
  case AArch64::fixup_aarch64_pcrel_branch19:
    // Signed 21-bit immediate
    if (SignedValue > 2097151 || SignedValue < -2097152)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    if (Value & 0x3)
      Ctx.reportError(Fixup.getLoc(), "fixup not sufficiently aligned");
    // Low two bits are not encoded.
    return (Value >> 2) & 0x7ffff;
  case AArch64::fixup_aarch64_add_imm12:
  case AArch64::fixup_aarch64_ldst_imm12_scale1:
    // Unsigned 12-bit immediate
    if (Value >= 0x1000)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    return Value;
  case AArch64::fixup_aarch64_ldst_imm12_scale2:
    // Unsigned 12-bit immediate which gets multiplied by 2
    if (Value >= 0x2000)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    if (Value & 0x1)
      Ctx.reportError(Fixup.getLoc(), "fixup must be 2-byte aligned");
    return Value >> 1;
  case AArch64::fixup_aarch64_ldst_imm12_scale4:
    // Unsigned 12-bit immediate which gets multiplied by 4
    if (Value >= 0x4000)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    if (Value & 0x3)
      Ctx.reportError(Fixup.getLoc(), "fixup must be 4-byte aligned");
    return Value >> 2;
  case AArch64::fixup_aarch64_ldst_imm12_scale8:
    // Unsigned 12-bit immediate which gets multiplied by 8
    if (Value >= 0x8000)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    if (Value & 0x7)
      Ctx.reportError(Fixup.getLoc(), "fixup must be 8-byte aligned");
    return Value >> 3;
  case AArch64::fixup_aarch64_ldst_imm12_scale16:
    // Unsigned 12-bit immediate which gets multiplied by 16
    if (Value >= 0x10000)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    if (Value & 0xf)
      Ctx.reportError(Fixup.getLoc(), "fixup must be 16-byte aligned");
    return Value >> 4;
  case AArch64::fixup_aarch64_movw:
    Ctx.reportError(Fixup.getLoc(),
                    "no resolvable MOVZ/MOVK fixups supported yet");
    return Value;
  case AArch64::fixup_aarch64_pcrel_branch14:
    // Signed 16-bit immediate
    if (SignedValue > 32767 || SignedValue < -32768)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    // Low two bits are not encoded (4-byte alignment assumed).
    if (Value & 0x3)
      Ctx.reportError(Fixup.getLoc(), "fixup not sufficiently aligned");
    return (Value >> 2) & 0x3fff;
  case AArch64::fixup_aarch64_pcrel_branch26:
  case AArch64::fixup_aarch64_pcrel_call26:
    // Signed 28-bit immediate
    if (SignedValue > 134217727 || SignedValue < -134217728)
      Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
    // Low two bits are not encoded (4-byte alignment assumed).
    if (Value & 0x3)
      Ctx.reportError(Fixup.getLoc(), "fixup not sufficiently aligned");
    return (Value >> 2) & 0x3ffffff;
  case FK_Data_1:
  case FK_Data_2:
  case FK_Data_4:
  case FK_Data_8:
    return Value;
  }
}