[llvm] CallPromotionUtils: Correctly use IndexSize when determining the bit width of pointer offsets. (PR #119138)

Owen Anderson via llvm-commits llvm-commits at lists.llvm.org
Sun Dec 8 10:18:57 PST 2024


https://github.com/resistor created https://github.com/llvm/llvm-project/pull/119138

None

>From d8b5850dcbb94e3ae29abbec2653b794a32ddbd9 Mon Sep 17 00:00:00 2001
From: Owen Anderson <resistor at mac.com>
Date: Mon, 9 Dec 2024 07:17:57 +1300
Subject: [PATCH] CallPromotionUtils: Correctly use IndexSize when determining
 the bit width of pointer offsets.

---
 .../Transforms/Utils/CallPromotionUtils.cpp   | 10 ++--
 .../Inline/promote-call-bitwidth.ll           | 47 +++++++++++++++++++
 2 files changed, 54 insertions(+), 3 deletions(-)
 create mode 100644 llvm/test/Transforms/Inline/promote-call-bitwidth.ll

diff --git a/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp b/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp
index 17cba2e642a19a..5c8246c14b3882 100644
--- a/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp
+++ b/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp
@@ -692,14 +692,17 @@ bool llvm::tryPromoteCall(CallBase &CB) {
   if (!VTableEntryLoad)
     return false; // Not a vtable entry load.
   Value *VTableEntryPtr = VTableEntryLoad->getPointerOperand();
-  APInt VTableOffset(DL.getTypeSizeInBits(VTableEntryPtr->getType()), 0);
+  APInt VTableOffset(DL.getIndexSizeInBits(
+                         VTableEntryPtr->getType()->getPointerAddressSpace()),
+                     0);
   Value *VTableBasePtr = VTableEntryPtr->stripAndAccumulateConstantOffsets(
       DL, VTableOffset, /* AllowNonInbounds */ true);
   LoadInst *VTablePtrLoad = dyn_cast<LoadInst>(VTableBasePtr);
   if (!VTablePtrLoad)
     return false; // Not a vtable load.
   Value *Object = VTablePtrLoad->getPointerOperand();
-  APInt ObjectOffset(DL.getTypeSizeInBits(Object->getType()), 0);
+  APInt ObjectOffset(
+      DL.getIndexSizeInBits(Object->getType()->getPointerAddressSpace()), 0);
   Value *ObjectBase = Object->stripAndAccumulateConstantOffsets(
       DL, ObjectOffset, /* AllowNonInbounds */ true);
   if (!(isa<AllocaInst>(ObjectBase) && ObjectOffset == 0))
@@ -712,7 +715,8 @@ bool llvm::tryPromoteCall(CallBase &CB) {
       VTablePtrLoad, VTablePtrLoad->getParent(), BBI, 0, nullptr, nullptr);
   if (!VTablePtr)
     return false; // No vtable found.
-  APInt VTableOffsetGVBase(DL.getTypeSizeInBits(VTablePtr->getType()), 0);
+  APInt VTableOffsetGVBase(
+      DL.getIndexSizeInBits(VTablePtr->getType()->getPointerAddressSpace()), 0);
   Value *VTableGVBase = VTablePtr->stripAndAccumulateConstantOffsets(
       DL, VTableOffsetGVBase, /* AllowNonInbounds */ true);
   GlobalVariable *GV = dyn_cast<GlobalVariable>(VTableGVBase);
diff --git a/llvm/test/Transforms/Inline/promote-call-bitwidth.ll b/llvm/test/Transforms/Inline/promote-call-bitwidth.ll
new file mode 100644
index 00000000000000..f0e9f7aec00d6b
--- /dev/null
+++ b/llvm/test/Transforms/Inline/promote-call-bitwidth.ll
@@ -0,0 +1,47 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --function-signature --scrub-attributes
+; RUN: opt -S -passes=inline < %s | FileCheck %s
+target datalayout = "e-m:e-p200:128:128:128:64-p:64:64-i64:64-i128:128-n64-S128-A200-P200-G200"
+target triple = "riscv64-unknown-freebsd13.0"
+
+define void @test(ptr addrspace(200) %arg1, ptr addrspace(200) %arg2) local_unnamed_addr addrspace(200) {
+; CHECK-LABEL: define {{[^@]+}}@test
+; CHECK-SAME: (ptr addrspace(200) [[ARG1:%.*]], ptr addrspace(200) [[ARG2:%.*]]) local_unnamed_addr addrspace(200) {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[TMP0:%.*]] = load ptr addrspace(200), ptr addrspace(200) [[ARG2]], align 16
+; CHECK-NEXT:    call addrspace(200) void [[TMP0]](ptr addrspace(200) [[ARG1]])
+; CHECK-NEXT:    ret void
+;
+entry:
+  call void @call_fnptr(ptr addrspace(200) %arg1, ptr addrspace(200) %arg2)
+  ret void
+}
+
+define internal void @call_fnptr(ptr addrspace(200) %this, ptr addrspace(200) %arg) unnamed_addr addrspace(200) align 2 {
+entry:
+  %0 = load ptr addrspace(200), ptr addrspace(200) %arg, align 16
+  call void %0(ptr addrspace(200) %this)
+  ret void
+}
+
+define void @test2(ptr addrspace(200) %this) local_unnamed_addr addrspace(200) {
+; CHECK-LABEL: define {{[^@]+}}@test2
+; CHECK-SAME: (ptr addrspace(200) [[THIS:%.*]]) local_unnamed_addr addrspace(200) {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[VTABLE_I:%.*]] = load ptr addrspace(200), ptr addrspace(200) [[THIS]], align 16
+; CHECK-NEXT:    [[FN_I:%.*]] = load ptr addrspace(200), ptr addrspace(200) [[VTABLE_I]], align 16
+; CHECK-NEXT:    call addrspace(200) void [[FN_I]](ptr addrspace(200) [[THIS]])
+; CHECK-NEXT:    ret void
+;
+entry:
+  call void @call_via_vtable(ptr addrspace(200) %this)
+  ret void
+}
+
+define internal void @call_via_vtable(ptr addrspace(200) %this) unnamed_addr addrspace(200) {
+entry:
+  %0 = bitcast ptr addrspace(200) %this to ptr addrspace(200)
+  %vtable = load ptr addrspace(200), ptr addrspace(200) %0, align 16
+  %fn = load ptr addrspace(200), ptr addrspace(200) %vtable, align 16
+  call void %fn(ptr addrspace(200) %this)
+  ret void
+}



More information about the llvm-commits mailing list