[llvm-branch-commits] [llvm] [BOLT][NFC] Define AArch64 jump table types (PR #132109)

via llvm-branch-commits llvm-branch-commits at lists.llvm.org
Thu Mar 20 14:13:35 PDT 2025


llvmbot wrote:


<!--LLVM PR SUMMARY COMMENT-->

@llvm/pr-subscribers-bolt

Author: Amir Ayupov (aaupov)

<details>
<summary>Changes</summary>

Define new JumpTable types for AArch64 based on definitions
used in LLVM:

https://github.com/llvm/llvm-project/blob/335a4614dee4123ff4f86e6400fc6a305766e227/llvm/lib/Target/AArch64/AArch64InstrInfo.td#L1217-L1225

Parsing and basic support is in a follow up PR #<!-- -->132114.

Depends on #<!-- -->132108.

---
Full diff: https://github.com/llvm/llvm-project/pull/132109.diff


8 Files Affected:

- (modified) bolt/include/bolt/Core/BinaryContext.h (+12-1) 
- (modified) bolt/include/bolt/Core/JumpTable.h (+26-2) 
- (modified) bolt/lib/Core/BinaryContext.cpp (+21-17) 
- (modified) bolt/lib/Core/BinaryEmitter.cpp (+1-1) 
- (modified) bolt/lib/Core/BinaryFunction.cpp (+10-7) 
- (modified) bolt/lib/Core/JumpTable.cpp (+3-3) 
- (modified) bolt/lib/Passes/IndirectCallPromotion.cpp (+2-2) 
- (modified) bolt/lib/Passes/JTFootprintReduction.cpp (+1-1) 


``````````diff
diff --git a/bolt/include/bolt/Core/BinaryContext.h b/bolt/include/bolt/Core/BinaryContext.h
index 485979f1a55a1..d713f1b3721f6 100644
--- a/bolt/include/bolt/Core/BinaryContext.h
+++ b/bolt/include/bolt/Core/BinaryContext.h
@@ -435,7 +435,18 @@ class BinaryContext {
 
   /// Return size of an entry for the given jump table \p Type.
   uint64_t getJumpTableEntrySize(JumpTable::JumpTableType Type) const {
-    return Type == JumpTable::JTT_PIC ? 4 : AsmInfo->getCodePointerSize();
+    switch (Type) {
+    case JumpTable::JTT_X86_64_PIC4:
+      return 4;
+    case JumpTable::JTT_X86_64_ABS:
+      return AsmInfo->getCodePointerSize();
+    case JumpTable::JTT_AARCH64_REL1:
+      return 1;
+    case JumpTable::JTT_AARCH64_REL2:
+      return 2;
+    case JumpTable::JTT_AARCH64_REL4:
+      return 4;
+    }
   }
 
   /// Return JumpTable containing a given \p Address.
diff --git a/bolt/include/bolt/Core/JumpTable.h b/bolt/include/bolt/Core/JumpTable.h
index 52b9ccee1f7e1..7dc0c6581b233 100644
--- a/bolt/include/bolt/Core/JumpTable.h
+++ b/bolt/include/bolt/Core/JumpTable.h
@@ -47,10 +47,34 @@ class JumpTable : public BinaryData {
 
 public:
   enum JumpTableType : char {
-    JTT_NORMAL,
-    JTT_PIC,
+    JTT_X86_64_FIRST = 0,
+    JTT_X86_64_ABS = JTT_X86_64_FIRST,
+    JTT_X86_64_PIC4,
+    JTT_X86_64_LAST = JTT_X86_64_PIC4,
+    JTT_AARCH64_FIRST,
+    JTT_AARCH64_REL1 = JTT_AARCH64_FIRST,
+    JTT_AARCH64_REL2,
+    JTT_AARCH64_REL4,
+    JTT_AARCH64_LAST = JTT_AARCH64_REL4
   };
 
+  static StringRef getTypeStr(JumpTableType Type) {
+    switch (Type) {
+    case JTT_X86_64_ABS:
+      return "X86_64_ABS";
+    case JTT_X86_64_PIC4:
+      return "X86_64_PIC4";
+    case JTT_AARCH64_REL1:
+      return "AARCH64_REL1";
+    case JTT_AARCH64_REL2:
+      return "AARCH64_REL2";
+    case JTT_AARCH64_REL4:
+      return "AARCH64_REL4";
+    }
+  }
+
+  const StringRef getTypeStr() { return getTypeStr(Type); }
+
   /// Branch statistics for jump table entries.
   struct JumpInfo {
     uint64_t Mispreds{0};
diff --git a/bolt/lib/Core/BinaryContext.cpp b/bolt/lib/Core/BinaryContext.cpp
index c4902653a9b01..f557f4c06ccd3 100644
--- a/bolt/lib/Core/BinaryContext.cpp
+++ b/bolt/lib/Core/BinaryContext.cpp
@@ -496,7 +496,7 @@ BinaryContext::handleAddressRef(uint64_t Address, BinaryFunction &BF,
     const MemoryContentsType MemType = analyzeMemoryAt(Address, BF);
     if (MemType == MemoryContentsType::POSSIBLE_PIC_JUMP_TABLE && IsPCRel) {
       const MCSymbol *Symbol =
-          getOrCreateJumpTable(BF, Address, JumpTable::JTT_PIC);
+          getOrCreateJumpTable(BF, Address, JumpTable::JTT_X86_64_PIC4);
 
       return std::make_pair(Symbol, 0);
     }
@@ -540,10 +540,10 @@ MemoryContentsType BinaryContext::analyzeMemoryAt(uint64_t Address,
 
   // Start with checking for PIC jump table. We expect non-PIC jump tables
   // to have high 32 bits set to 0.
-  if (analyzeJumpTable(Address, JumpTable::JTT_PIC, BF))
+  if (analyzeJumpTable(Address, JumpTable::JTT_X86_64_PIC4, BF))
     return MemoryContentsType::POSSIBLE_PIC_JUMP_TABLE;
 
-  if (analyzeJumpTable(Address, JumpTable::JTT_NORMAL, BF))
+  if (analyzeJumpTable(Address, JumpTable::JTT_X86_64_ABS, BF))
     return MemoryContentsType::POSSIBLE_JUMP_TABLE;
 
   return MemoryContentsType::UNKNOWN;
@@ -594,12 +594,9 @@ bool BinaryContext::analyzeJumpTable(const uint64_t Address,
   if (NextJTAddress)
     UpperBound = std::min(NextJTAddress, UpperBound);
 
-  LLVM_DEBUG({
-    using JTT = JumpTable::JumpTableType;
-    dbgs() << formatv("BOLT-DEBUG: analyzeJumpTable @{0:x} in {1}, JTT={2}\n",
-                      Address, BF.getPrintName(),
-                      Type == JTT::JTT_PIC ? "PIC" : "Normal");
-  });
+  LLVM_DEBUG(
+      dbgs() << formatv("BOLT-DEBUG: analyzeJumpTable @{0:x} in {1}, JTT={2}\n",
+                        Address, BF, JumpTable::getTypeStr(Type)));
   const uint64_t EntrySize = getJumpTableEntrySize(Type);
   for (uint64_t EntryAddress = Address; EntryAddress <= UpperBound - EntrySize;
        EntryAddress += EntrySize) {
@@ -607,13 +604,13 @@ bool BinaryContext::analyzeJumpTable(const uint64_t Address,
                       << " -> ");
     // Check if there's a proper relocation against the jump table entry.
     if (HasRelocations) {
-      if (Type == JumpTable::JTT_PIC &&
+      if (Type == JumpTable::JTT_X86_64_PIC4 &&
           !DataPCRelocations.count(EntryAddress)) {
         LLVM_DEBUG(
             dbgs() << "FAIL: JTT_PIC table, no relocation for this address\n");
         break;
       }
-      if (Type == JumpTable::JTT_NORMAL && !getRelocationAt(EntryAddress)) {
+      if (Type == JumpTable::JTT_X86_64_ABS && !getRelocationAt(EntryAddress)) {
         LLVM_DEBUG(
             dbgs()
             << "FAIL: JTT_NORMAL table, no relocation for this address\n");
@@ -621,10 +618,17 @@ bool BinaryContext::analyzeJumpTable(const uint64_t Address,
       }
     }
 
-    const uint64_t Value =
-        (Type == JumpTable::JTT_PIC)
-            ? Address + *getSignedValueAtAddress(EntryAddress, EntrySize)
-            : *getPointerAtAddress(EntryAddress);
+    uint64_t Value = 0;
+    switch (Type) {
+    case JumpTable::JTT_X86_64_PIC4:
+      Value = Address + *getSignedValueAtAddress(EntryAddress, EntrySize);
+      break;
+    case JumpTable::JTT_X86_64_ABS:
+      Value = *getPointerAtAddress(EntryAddress);
+      break;
+    default:
+      llvm_unreachable("Unhandled jump table type");
+    }
 
     // __builtin_unreachable() case.
     if (Value == UnreachableAddress) {
@@ -679,7 +683,7 @@ bool BinaryContext::analyzeJumpTable(const uint64_t Address,
 
   // Trim direct/normal jump table to exclude trailing unreachable entries that
   // can collide with a function address.
-  if (Type == JumpTable::JTT_NORMAL && EntriesAsAddress &&
+  if (Type == JumpTable::JTT_X86_64_ABS && EntriesAsAddress &&
       TrimmedSize != EntriesAsAddress->size() &&
       getBinaryFunctionAtAddress(UnreachableAddress))
     EntriesAsAddress->resize(TrimmedSize);
@@ -736,7 +740,7 @@ void BinaryContext::populateJumpTables() {
 
     // In strict mode, erase PC-relative relocation record. Later we check that
     // all such records are erased and thus have been accounted for.
-    if (opts::StrictMode && JT->Type == JumpTable::JTT_PIC) {
+    if (opts::StrictMode && JT->Type == JumpTable::JTT_X86_64_PIC4) {
       for (uint64_t Address = JT->getAddress();
            Address < JT->getAddress() + JT->getSize();
            Address += JT->EntrySize) {
diff --git a/bolt/lib/Core/BinaryEmitter.cpp b/bolt/lib/Core/BinaryEmitter.cpp
index 1aad25242712f..77272045d5bac 100644
--- a/bolt/lib/Core/BinaryEmitter.cpp
+++ b/bolt/lib/Core/BinaryEmitter.cpp
@@ -850,7 +850,7 @@ void BinaryEmitter::emitJumpTable(const JumpTable &JT, MCSection *HotSection,
       }
       LastLabel = LI->second;
     }
-    if (JT.Type == JumpTable::JTT_NORMAL) {
+    if (JT.Type == JumpTable::JTT_X86_64_ABS) {
       Streamer.emitSymbolValue(Entry, JT.OutputEntrySize);
     } else { // JTT_PIC
       const MCSymbolRefExpr *JTExpr =
diff --git a/bolt/lib/Core/BinaryFunction.cpp b/bolt/lib/Core/BinaryFunction.cpp
index b46ba1a0d1a85..effd3c4799bf6 100644
--- a/bolt/lib/Core/BinaryFunction.cpp
+++ b/bolt/lib/Core/BinaryFunction.cpp
@@ -912,7 +912,7 @@ BinaryFunction::processIndirectBranch(MCInst &Instruction, unsigned Size,
            "Invalid memory instruction");
     const MCExpr *FixedEntryDispExpr = FixedEntryDispOperand->getExpr();
     const uint64_t EntryAddress = getExprValue(FixedEntryDispExpr);
-    uint64_t EntrySize = BC.getJumpTableEntrySize(JumpTable::JTT_PIC);
+    uint64_t EntrySize = BC.getJumpTableEntrySize(JumpTable::JTT_X86_64_PIC4);
     ErrorOr<int64_t> Value =
         BC.getSignedValueAtAddress(EntryAddress, EntrySize);
     if (!Value)
@@ -982,12 +982,14 @@ BinaryFunction::processIndirectBranch(MCInst &Instruction, unsigned Size,
   MemoryContentsType MemType;
   if (JumpTable *JT = BC.getJumpTableContainingAddress(ArrayStart)) {
     switch (JT->Type) {
-    case JumpTable::JTT_NORMAL:
+    case JumpTable::JTT_X86_64_ABS:
       MemType = MemoryContentsType::POSSIBLE_JUMP_TABLE;
       break;
-    case JumpTable::JTT_PIC:
+    case JumpTable::JTT_X86_64_PIC4:
       MemType = MemoryContentsType::POSSIBLE_PIC_JUMP_TABLE;
       break;
+    default:
+      llvm_unreachable("Unhandled jump table type");
     }
   } else {
     MemType = BC.analyzeMemoryAt(ArrayStart, *this);
@@ -998,7 +1000,7 @@ BinaryFunction::processIndirectBranch(MCInst &Instruction, unsigned Size,
   if (BranchType == IndirectBranchType::POSSIBLE_PIC_JUMP_TABLE) {
     if (MemType != MemoryContentsType::POSSIBLE_PIC_JUMP_TABLE)
       return IndirectBranchType::UNKNOWN;
-    JTType = JumpTable::JTT_PIC;
+    JTType = JumpTable::JTT_X86_64_PIC4;
   } else {
     if (MemType == MemoryContentsType::POSSIBLE_PIC_JUMP_TABLE)
       return IndirectBranchType::UNKNOWN;
@@ -1007,7 +1009,7 @@ BinaryFunction::processIndirectBranch(MCInst &Instruction, unsigned Size,
       return IndirectBranchType::POSSIBLE_TAIL_CALL;
 
     BranchType = IndirectBranchType::POSSIBLE_JUMP_TABLE;
-    JTType = JumpTable::JTT_NORMAL;
+    JTType = JumpTable::JTT_X86_64_ABS;
   }
 
   // Convert the instruction into jump table branch.
@@ -1779,7 +1781,8 @@ void BinaryFunction::postProcessJumpTables() {
   // Create labels for all entries.
   for (auto &JTI : JumpTables) {
     JumpTable &JT = *JTI.second;
-    if (JT.Type == JumpTable::JTT_PIC && opts::JumpTables == JTS_BASIC) {
+    if (JT.Type == JumpTable::JTT_X86_64_PIC4 &&
+        opts::JumpTables == JTS_BASIC) {
       opts::JumpTables = JTS_MOVE;
       BC.outs() << "BOLT-INFO: forcing -jump-tables=move as PIC jump table was "
                    "detected in function "
@@ -1974,7 +1977,7 @@ bool BinaryFunction::postProcessIndirectBranches(
           BC.MIB->unsetJumpTable(Instr);
 
           JumpTable *JT = BC.getJumpTableContainingAddress(LastJT);
-          if (JT->Type == JumpTable::JTT_NORMAL) {
+          if (JT->Type == JumpTable::JTT_X86_64_ABS) {
             // Invalidating the jump table may also invalidate other jump table
             // boundaries. Until we have/need a support for this, mark the
             // function as non-simple.
diff --git a/bolt/lib/Core/JumpTable.cpp b/bolt/lib/Core/JumpTable.cpp
index 65e1032c579b5..e780c737c56b5 100644
--- a/bolt/lib/Core/JumpTable.cpp
+++ b/bolt/lib/Core/JumpTable.cpp
@@ -85,9 +85,9 @@ void bolt::JumpTable::updateOriginal() {
   uint64_t EntryOffset = BaseOffset;
   for (MCSymbol *Entry : Entries) {
     const uint64_t RelType =
-        Type == JTT_NORMAL ? ELF::R_X86_64_64 : ELF::R_X86_64_PC32;
+        Type == JTT_X86_64_ABS ? ELF::R_X86_64_64 : ELF::R_X86_64_PC32;
     const uint64_t RelAddend =
-        Type == JTT_NORMAL ? 0 : EntryOffset - BaseOffset;
+        Type == JTT_X86_64_ABS ? 0 : EntryOffset - BaseOffset;
     // Replace existing relocation with the new one to allow any modifications
     // to the original jump table.
     if (BC.HasRelocations)
@@ -99,7 +99,7 @@ void bolt::JumpTable::updateOriginal() {
 
 void bolt::JumpTable::print(raw_ostream &OS) const {
   uint64_t Offset = 0;
-  if (Type == JTT_PIC)
+  if (Type == JTT_X86_64_PIC4)
     OS << "PIC ";
   ListSeparator LS;
 
diff --git a/bolt/lib/Passes/IndirectCallPromotion.cpp b/bolt/lib/Passes/IndirectCallPromotion.cpp
index 2b5a591f4c7a2..d70fd0e72e560 100644
--- a/bolt/lib/Passes/IndirectCallPromotion.cpp
+++ b/bolt/lib/Passes/IndirectCallPromotion.cpp
@@ -246,7 +246,7 @@ IndirectCallPromotion::getCallTargets(BinaryBasicBlock &BB,
 
   if (const JumpTable *JT = BF.getJumpTable(Inst)) {
     // Don't support PIC jump tables for now
-    if (!opts::ICPJumpTablesByTarget && JT->Type == JumpTable::JTT_PIC)
+    if (!opts::ICPJumpTablesByTarget && JT->Type == JumpTable::JTT_X86_64_PIC4)
       return Targets;
     const Location From(BF.getSymbol());
     const std::pair<size_t, size_t> Range =
@@ -256,7 +256,7 @@ IndirectCallPromotion::getCallTargets(BinaryBasicBlock &BB,
     const JumpTable::JumpInfo *JI =
         JT->Counts.empty() ? &DefaultJI : &JT->Counts[Range.first];
     const size_t JIAdj = JT->Counts.empty() ? 0 : 1;
-    assert(JT->Type == JumpTable::JTT_PIC ||
+    assert(JT->Type == JumpTable::JTT_X86_64_PIC4 ||
            JT->EntrySize == BC.AsmInfo->getCodePointerSize());
     for (size_t I = Range.first; I < Range.second; ++I, JI += JIAdj) {
       MCSymbol *Entry = JT->Entries[I];
diff --git a/bolt/lib/Passes/JTFootprintReduction.cpp b/bolt/lib/Passes/JTFootprintReduction.cpp
index 71bdbba950df1..13b37dc3bc6ff 100644
--- a/bolt/lib/Passes/JTFootprintReduction.cpp
+++ b/bolt/lib/Passes/JTFootprintReduction.cpp
@@ -202,7 +202,7 @@ bool JTFootprintReduction::tryOptimizePIC(BinaryContext &BC,
 
   JumpTable->OutputEntrySize = 4;
   // DePICify
-  JumpTable->Type = JumpTable::JTT_NORMAL;
+  JumpTable->Type = JumpTable::JTT_X86_64_ABS;
 
   BB.replaceInstruction(Inst, NewFrag.begin(), NewFrag.end());
   return true;

``````````

</details>


https://github.com/llvm/llvm-project/pull/132109


More information about the llvm-branch-commits mailing list