[llvm] [SjLjEHPrepare] Fix callsite problem (PR #67264)
via llvm-commits
llvm-commits at lists.llvm.org
Sun Sep 24 05:19:30 PDT 2023
llvmbot wrote:
<!--LLVM PR SUMMARY COMMENT-->
@llvm/pr-subscribers-libunwind
<details>
<summary>Changes</summary>
Fix callsite problem caused by b61fd7f. Original implementation inserts no-action (callsite == -1) marker to only not-throw call and resume instructions. At b61fd7f, it changes to insert if I.mayThrow() returns true, but this causes wrong insertions of no-action marker just before invoke instructions like "invoke cxa_bad_type." As a result, SjLj routine could not handle exceptions if the exceptions and try/catch are in the identical function like test_aux_runtime.pass.cpp. Also add a regression test for VE.
---
Full diff: https://github.com/llvm/llvm-project/pull/67264.diff
2 Files Affected:
- (modified) llvm/lib/CodeGen/SjLjEHPrepare.cpp (+13-3)
- (added) llvm/test/CodeGen/VE/Scalar/sjlj-bad-cast.ll (+102)
``````````diff
diff --git a/llvm/lib/CodeGen/SjLjEHPrepare.cpp b/llvm/lib/CodeGen/SjLjEHPrepare.cpp
index 7994821ae7c0a14..d244fc3e66d472b 100644
--- a/llvm/lib/CodeGen/SjLjEHPrepare.cpp
+++ b/llvm/lib/CodeGen/SjLjEHPrepare.cpp
@@ -442,9 +442,19 @@ bool SjLjEHPrepare::setupEntryBlockAndCallSites(Function &F) {
for (BasicBlock &BB : F) {
if (&BB == &F.front())
continue;
- for (Instruction &I : BB)
- if (I.mayThrow())
- insertCallSiteStore(&I, -1);
+ for (Instruction &I : BB) {
+ // Partially revert b61fd7f modifications. Stop using "I.mayThrow()"
+ // here. Using it inserts no-action marker to just before invoke
+ // instructions like "invoke void @__cxa_bad_cast()". That means
+ // tests having exception and try/catch in the identical function
+ // are broken. For example, tests like test_aux_runtime.pass.cpp.
+ if (CallInst *CI = dyn_cast<CallInst>(&I)) {
+ if (!CI->doesNotThrow())
+ insertCallSiteStore(CI, -1);
+ } else if (ResumeInst *RI = dyn_cast<ResumeInst>(&I)) {
+ insertCallSiteStore(RI, -1);
+ }
+ }
}
// Register the function context and make sure it's known to not throw
diff --git a/llvm/test/CodeGen/VE/Scalar/sjlj-bad-cast.ll b/llvm/test/CodeGen/VE/Scalar/sjlj-bad-cast.ll
new file mode 100644
index 000000000000000..05001df63c0819c
--- /dev/null
+++ b/llvm/test/CodeGen/VE/Scalar/sjlj-bad-cast.ll
@@ -0,0 +1,102 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2
+; RUN: llc < %s -mtriple=ve --exception-model=sjlj | FileCheck %s
+
+;;; Test for SjLjEHPrepare.cpp.
+;;; This checks whether only correct resumeLocation (1) is inserted just
+;;; before invoking __cxa_bad_cast.
+
+ at _ZTISt8bad_cast = external dso_local constant ptr
+ at _ZTVN10__cxxabiv117__class_type_infoE = external dso_local global [0 x ptr]
+ at _ZTSZ15bad_typeid_testvE1A = internal constant [23 x i8] c"Z15bad_typeid_testvE1A\00", align 1
+ at _ZTSZ13bad_cast_testvE1A = internal constant [21 x i8] c"Z13bad_cast_testvE1A\00", align 1
+ at _ZTIZ13bad_cast_testvE1A = internal constant { ptr, ptr } { ptr getelementptr inbounds (ptr, ptr @_ZTVN10__cxxabiv117__class_type_infoE, i64 2), ptr @_ZTSZ13bad_cast_testvE1A }, align 8
+ at _ZTSZ13bad_cast_testvE1B = internal constant [21 x i8] c"Z13bad_cast_testvE1B\00", align 1
+ at _ZTIZ13bad_cast_testvE1B = internal constant { ptr, ptr } { ptr getelementptr inbounds (ptr, ptr @_ZTVN10__cxxabiv117__class_type_infoE, i64 2), ptr @_ZTSZ13bad_cast_testvE1B }, align 8
+ at _ZTVN10__cxxabiv121__vmi_class_type_infoE = external dso_local global [0 x ptr]
+ at _ZTSZ13bad_cast_testvE1D = internal constant [21 x i8] c"Z13bad_cast_testvE1D\00", align 1
+ at _ZTIZ13bad_cast_testvE1D = internal constant { ptr, ptr, i32, i32, ptr, i64, ptr, i64 } { ptr getelementptr inbounds (ptr, ptr @_ZTVN10__cxxabiv121__vmi_class_type_infoE, i64 2), ptr @_ZTSZ13bad_cast_testvE1D, i32 0, i32 2, ptr @_ZTIZ13bad_cast_testvE1A, i64 -6141, ptr @_ZTIZ13bad_cast_testvE1B, i64 0 }, align 8
+
+; Function Attrs: mustprogress noinline optnone
+define dso_local noundef zeroext i1 @_Z13bad_cast_testv() #0 personality ptr @__gxx_personality_sj0 {
+; CHECK-LABEL: _Z13bad_cast_testv:
+; CHECK: # %dynamic_cast.bad_cast
+; CHECK-NEXT: or %s0, 1, (0)1
+; CHECK-NEXT: st %s0, -96(, %s9)
+; CHECK-NEXT: .Ltmp{{[0-9]*}}:
+; CHECK-NEXT: lea %s0, __cxa_bad_cast at lo
+; CHECK-NEXT: and %s0, %s0, (32)0
+; CHECK-NEXT: lea.sl %s12, __cxa_bad_cast at hi(, %s0)
+entry:
+ %retval = alloca i1, align 1
+; %d = alloca %class.D, align 8
+ %bp = alloca ptr, align 8
+ %dr = alloca ptr, align 8
+ %exn.slot = alloca ptr, align 8
+ %ehselector.slot = alloca i32, align 4
+ %0 = alloca ptr, align 8
+; call void @_ZZ13bad_cast_testvEN1DC1Ev(ptr noundef nonnull align 8 dereferenceable(8) %d) #7
+; store ptr %d, ptr %bp, align 8
+ %1 = load ptr, ptr %bp, align 8
+ %2 = call ptr @__dynamic_cast(ptr %1, ptr @_ZTIZ13bad_cast_testvE1B, ptr @_ZTIZ13bad_cast_testvE1D, i64 -2) #7
+ %3 = icmp eq ptr %2, null
+ br i1 %3, label %dynamic_cast.bad_cast, label %dynamic_cast.end
+
+dynamic_cast.bad_cast: ; preds = %entry
+ invoke void @__cxa_bad_cast() #6
+ to label %invoke.cont unwind label %lpad
+
+invoke.cont: ; preds = %dynamic_cast.bad_cast
+ unreachable
+
+dynamic_cast.end: ; preds = %entry
+ store ptr %2, ptr %dr, align 8
+ %4 = load ptr, ptr %dr, align 8
+ br label %try.cont
+
+lpad: ; preds = %dynamic_cast.bad_cast
+ %5 = landingpad { ptr, i32 }
+ catch ptr @_ZTISt8bad_cast
+ %6 = extractvalue { ptr, i32 } %5, 0
+ store ptr %6, ptr %exn.slot, align 8
+ %7 = extractvalue { ptr, i32 } %5, 1
+ store i32 %7, ptr %ehselector.slot, align 4
+ br label %catch.dispatch
+
+catch.dispatch: ; preds = %lpad
+ %sel = load i32, ptr %ehselector.slot, align 4
+ %8 = call i32 @llvm.eh.typeid.for(ptr @_ZTISt8bad_cast) #7
+ %matches = icmp eq i32 %sel, %8
+ br i1 %matches, label %catch, label %eh.resume
+
+catch: ; preds = %catch.dispatch
+ %exn = load ptr, ptr %exn.slot, align 8
+ %9 = call ptr @__cxa_begin_catch(ptr %exn) #7
+ store ptr %9, ptr %0, align 8
+ store i1 true, ptr %retval, align 1
+ call void @__cxa_end_catch()
+ br label %return
+
+try.cont: ; preds = %dynamic_cast.end
+ store i1 false, ptr %retval, align 1
+ br label %return
+
+return: ; preds = %try.cont, %catch
+ %10 = load i1, ptr %retval, align 1
+ ret i1 %10
+
+eh.resume: ; preds = %catch.dispatch
+ %exn1 = load ptr, ptr %exn.slot, align 8
+ %sel2 = load i32, ptr %ehselector.slot, align 4
+ %lpad.val = insertvalue { ptr, i32 } poison, ptr %exn1, 0
+ %lpad.val3 = insertvalue { ptr, i32 } %lpad.val, i32 %sel2, 1
+ resume { ptr, i32 } %lpad.val3
+}
+
+; Function Attrs: nounwind memory(read)
+declare dso_local ptr @__dynamic_cast(ptr, ptr, ptr, i64) #4
+declare dso_local void @__cxa_bad_cast()
+; Function Attrs: nounwind memory(none)
+declare i32 @llvm.eh.typeid.for(ptr) #2
+declare dso_local ptr @__cxa_begin_catch(ptr)
+declare dso_local void @__cxa_end_catch()
+declare dso_local i32 @__gxx_personality_sj0(...)
``````````
</details>
https://github.com/llvm/llvm-project/pull/67264
More information about the llvm-commits
mailing list