[llvm-branch-commits] [BOLT][NFC] Pass JumpTable to analyzeJumpTable (PR #132110)

via llvm-branch-commits llvm-branch-commits at lists.llvm.org
Thu Mar 20 14:07:21 PDT 2025


llvmbot wrote:


<!--LLVM PR SUMMARY COMMENT-->

@llvm/pr-subscribers-bolt

Author: Amir Ayupov (aaupov)

<details>
<summary>Changes</summary>

Optional arguments of `analyzeJumpTable` (`EntriesAsAddress` and
`HasEntryInFragment`) are attributes of a JumpTable class.

Instead of passing them separately, accept a pointer to `JumpTable`.
#<!-- -->132114 is a follow-up change that uses `JumpTable` fields for
parsing on AArch64.

Depends on #<!-- -->132109.

---
Full diff: https://github.com/llvm/llvm-project/pull/132110.diff


2 Files Affected:

- (modified) bolt/include/bolt/Core/BinaryContext.h (+3-4) 
- (modified) bolt/lib/Core/BinaryContext.cpp (+11-13) 


``````````diff
diff --git a/bolt/include/bolt/Core/BinaryContext.h b/bolt/include/bolt/Core/BinaryContext.h
index d713f1b3721f6..69b8f02a5aa0a 100644
--- a/bolt/include/bolt/Core/BinaryContext.h
+++ b/bolt/include/bolt/Core/BinaryContext.h
@@ -584,14 +584,13 @@ class BinaryContext {
   /// If \p NextJTAddress is different from zero, it is used as an upper
   /// bound for jump table memory layout.
   ///
-  /// Optionally, populate \p Address from jump table entries. The entries
-  /// could be partially populated if the jump table detection fails.
+  /// If \p JT is set, populate it with jump table entries. The entries could be
+  /// partially populated if the jump table detection fails.
   bool analyzeJumpTable(const uint64_t Address,
                         const JumpTable::JumpTableType Type,
                         const BinaryFunction &BF,
                         const uint64_t NextJTAddress = 0,
-                        JumpTable::AddressesType *EntriesAsAddress = nullptr,
-                        bool *HasEntryInFragment = nullptr) const;
+                        JumpTable *JT = nullptr) const;
 
   /// After jump table locations are established, this function will populate
   /// their EntriesAsAddress based on memory contents.
diff --git a/bolt/lib/Core/BinaryContext.cpp b/bolt/lib/Core/BinaryContext.cpp
index f557f4c06ccd3..941c02143dc1d 100644
--- a/bolt/lib/Core/BinaryContext.cpp
+++ b/bolt/lib/Core/BinaryContext.cpp
@@ -553,8 +553,7 @@ bool BinaryContext::analyzeJumpTable(const uint64_t Address,
                                      const JumpTable::JumpTableType Type,
                                      const BinaryFunction &BF,
                                      const uint64_t NextJTAddress,
-                                     JumpTable::AddressesType *EntriesAsAddress,
-                                     bool *HasEntryInFragment) const {
+                                     JumpTable *JT) const {
   // Target address of __builtin_unreachable.
   const uint64_t UnreachableAddress = BF.getAddress() + BF.getSize();
 
@@ -571,11 +570,11 @@ bool BinaryContext::analyzeJumpTable(const uint64_t Address,
   size_t TrimmedSize = 0;
 
   auto addEntryAddress = [&](uint64_t EntryAddress, bool Unreachable = false) {
-    if (!EntriesAsAddress)
+    if (!JT)
       return;
-    EntriesAsAddress->emplace_back(EntryAddress);
+    JT->EntriesAsAddress.emplace_back(EntryAddress);
     if (!Unreachable)
-      TrimmedSize = EntriesAsAddress->size();
+      TrimmedSize = JT->EntriesAsAddress.size();
   };
 
   ErrorOr<const BinarySection &> Section = getSectionForAddress(Address);
@@ -676,17 +675,17 @@ bool BinaryContext::analyzeJumpTable(const uint64_t Address,
     ++NumRealEntries;
     LLVM_DEBUG(dbgs() << formatv("OK: {0:x} real entry\n", Value));
 
-    if (TargetBF != &BF && HasEntryInFragment)
-      *HasEntryInFragment = true;
+    if (TargetBF != &BF && JT)
+      JT->IsSplit = true;
     addEntryAddress(Value);
   }
 
   // Trim direct/normal jump table to exclude trailing unreachable entries that
   // can collide with a function address.
-  if (Type == JumpTable::JTT_X86_64_ABS && EntriesAsAddress &&
-      TrimmedSize != EntriesAsAddress->size() &&
+  if (Type == JumpTable::JTT_X86_64_ABS && JT &&
+      TrimmedSize != JT->EntriesAsAddress.size() &&
       getBinaryFunctionAtAddress(UnreachableAddress))
-    EntriesAsAddress->resize(TrimmedSize);
+    JT->EntriesAsAddress.resize(TrimmedSize);
 
   // It's a jump table if the number of real entries is more than 1, or there's
   // one real entry and one or more special targets. If there are only multiple
@@ -710,9 +709,8 @@ void BinaryContext::populateJumpTables() {
     if (NextJTI != JTE)
       NextJTAddress = NextJTI->second->getAddress();
 
-    const bool Success =
-        analyzeJumpTable(JT->getAddress(), JT->Type, *(JT->Parents[0]),
-                         NextJTAddress, &JT->EntriesAsAddress, &JT->IsSplit);
+    const bool Success = analyzeJumpTable(
+        JT->getAddress(), JT->Type, *JT->Parents.front(), NextJTAddress, JT);
     if (!Success) {
       LLVM_DEBUG({
         dbgs() << "failed to analyze ";

``````````

</details>


https://github.com/llvm/llvm-project/pull/132110


More information about the llvm-branch-commits mailing list