[llvm] [hwasan] Add test for hwasan pass with fixed shadow (PR #89813)

Thurston Dang via llvm-commits llvm-commits at lists.llvm.org
Tue Apr 23 12:40:15 PDT 2024


https://github.com/thurstond created https://github.com/llvm/llvm-project/pull/89813

This test records the current behavior of HWASan, which doesn't utilize the fixed shadow intrinsics of https://github.com/llvm/llvm-project/commit/365bddf634993d5ea357e9715d8aacd7ee40c4b5

It is intended to be updated in future work ("Optimize outlined memaccess for fixed shadow on Aarch64"; https://github.com/llvm/llvm-project/pull/88544)

>From d7c8037d68957f0e884c0357f2ef244ed0b32812 Mon Sep 17 00:00:00 2001
From: Thurston Dang <thurston at google.com>
Date: Tue, 23 Apr 2024 19:36:12 +0000
Subject: [PATCH] [hwasan] Add test for hwasan pass with fixed shadow

This test records the current behavior of HWASan, which doesn't
utilize the fixed shadow intrinsics of https://github.com/llvm/llvm-project/commit/365bddf634993d5ea357e9715d8aacd7ee40c4b5

It is intended to be updated in future work ("Optimize outlined memaccess for fixed shadow on Aarch64"; https://github.com/llvm/llvm-project/pull/88544)
---
 .../HWAddressSanitizer/fixed-shadow.ll        | 229 ++++++++++++++++++
 1 file changed, 229 insertions(+)
 create mode 100644 llvm/test/Instrumentation/HWAddressSanitizer/fixed-shadow.ll

diff --git a/llvm/test/Instrumentation/HWAddressSanitizer/fixed-shadow.ll b/llvm/test/Instrumentation/HWAddressSanitizer/fixed-shadow.ll
new file mode 100644
index 00000000000000..05a927f0385ba0
--- /dev/null
+++ b/llvm/test/Instrumentation/HWAddressSanitizer/fixed-shadow.ll
@@ -0,0 +1,229 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 2
+; Test basic address sanitizer instrumentation.
+;
+; RUN: opt < %s -passes=hwasan -hwasan-mapping-offset=4398046511104 -S | FileCheck %s
+
+
+target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
+target triple = "aarch64--linux-android9001"
+
+define i8 @test_load8(ptr %a) sanitize_hwaddress {
+; CHECK-LABEL: define i8 @test_load8
+; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR0:[0-9]+]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 0)
+; CHECK-NEXT:    [[B:%.*]] = load i8, ptr [[A]], align 4
+; CHECK-NEXT:    ret i8 [[B]]
+;
+entry:
+  %b = load i8, ptr %a, align 4
+  ret i8 %b
+}
+
+define i16 @test_load16(ptr %a) sanitize_hwaddress {
+; CHECK-LABEL: define i16 @test_load16
+; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 1)
+; CHECK-NEXT:    [[B:%.*]] = load i16, ptr [[A]], align 4
+; CHECK-NEXT:    ret i16 [[B]]
+;
+entry:
+  %b = load i16, ptr %a, align 4
+  ret i16 %b
+}
+
+define i32 @test_load32(ptr %a) sanitize_hwaddress {
+; CHECK-LABEL: define i32 @test_load32
+; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 2)
+; CHECK-NEXT:    [[B:%.*]] = load i32, ptr [[A]], align 4
+; CHECK-NEXT:    ret i32 [[B]]
+;
+entry:
+  %b = load i32, ptr %a, align 4
+  ret i32 %b
+}
+
+define i64 @test_load64(ptr %a) sanitize_hwaddress {
+; CHECK-LABEL: define i64 @test_load64
+; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 3)
+; CHECK-NEXT:    [[B:%.*]] = load i64, ptr [[A]], align 8
+; CHECK-NEXT:    ret i64 [[B]]
+;
+entry:
+  %b = load i64, ptr %a, align 8
+  ret i64 %b
+}
+
+define i128 @test_load128(ptr %a) sanitize_hwaddress {
+; CHECK-LABEL: define i128 @test_load128
+; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 4)
+; CHECK-NEXT:    [[B:%.*]] = load i128, ptr [[A]], align 16
+; CHECK-NEXT:    ret i128 [[B]]
+;
+entry:
+  %b = load i128, ptr %a, align 16
+  ret i128 %b
+}
+
+define i40 @test_load40(ptr %a) sanitize_hwaddress {
+; CHECK-LABEL: define i40 @test_load40
+; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    [[TMP0:%.*]] = ptrtoint ptr [[A]] to i64
+; CHECK-NEXT:    call void @__hwasan_loadN(i64 [[TMP0]], i64 5)
+; CHECK-NEXT:    [[B:%.*]] = load i40, ptr [[A]], align 4
+; CHECK-NEXT:    ret i40 [[B]]
+;
+entry:
+  %b = load i40, ptr %a, align 4
+  ret i40 %b
+}
+
+define void @test_store8(ptr %a, i8 %b) sanitize_hwaddress {
+; CHECK-LABEL: define void @test_store8
+; CHECK-SAME: (ptr [[A:%.*]], i8 [[B:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 16)
+; CHECK-NEXT:    store i8 [[B]], ptr [[A]], align 4
+; CHECK-NEXT:    ret void
+;
+entry:
+  store i8 %b, ptr %a, align 4
+  ret void
+}
+
+define void @test_store16(ptr %a, i16 %b) sanitize_hwaddress {
+; CHECK-LABEL: define void @test_store16
+; CHECK-SAME: (ptr [[A:%.*]], i16 [[B:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 17)
+; CHECK-NEXT:    store i16 [[B]], ptr [[A]], align 4
+; CHECK-NEXT:    ret void
+;
+entry:
+  store i16 %b, ptr %a, align 4
+  ret void
+}
+
+define void @test_store32(ptr %a, i32 %b) sanitize_hwaddress {
+; CHECK-LABEL: define void @test_store32
+; CHECK-SAME: (ptr [[A:%.*]], i32 [[B:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 18)
+; CHECK-NEXT:    store i32 [[B]], ptr [[A]], align 4
+; CHECK-NEXT:    ret void
+;
+entry:
+  store i32 %b, ptr %a, align 4
+  ret void
+}
+
+define void @test_store64(ptr %a, i64 %b) sanitize_hwaddress {
+; CHECK-LABEL: define void @test_store64
+; CHECK-SAME: (ptr [[A:%.*]], i64 [[B:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 19)
+; CHECK-NEXT:    store i64 [[B]], ptr [[A]], align 8
+; CHECK-NEXT:    ret void
+;
+entry:
+  store i64 %b, ptr %a, align 8
+  ret void
+}
+
+define void @test_store128(ptr %a, i128 %b) sanitize_hwaddress {
+; CHECK-LABEL: define void @test_store128
+; CHECK-SAME: (ptr [[A:%.*]], i128 [[B:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    call void @llvm.hwasan.check.memaccess.shortgranules(ptr [[DOTHWASAN_SHADOW]], ptr [[A]], i32 20)
+; CHECK-NEXT:    store i128 [[B]], ptr [[A]], align 16
+; CHECK-NEXT:    ret void
+;
+entry:
+  store i128 %b, ptr %a, align 16
+  ret void
+}
+
+define void @test_store40(ptr %a, i40 %b) sanitize_hwaddress {
+; CHECK-LABEL: define void @test_store40
+; CHECK-SAME: (ptr [[A:%.*]], i40 [[B:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    [[TMP0:%.*]] = ptrtoint ptr [[A]] to i64
+; CHECK-NEXT:    call void @__hwasan_storeN(i64 [[TMP0]], i64 5)
+; CHECK-NEXT:    store i40 [[B]], ptr [[A]], align 4
+; CHECK-NEXT:    ret void
+;
+entry:
+  store i40 %b, ptr %a, align 4
+  ret void
+}
+
+define void @test_store_unaligned(ptr %a, i64 %b) sanitize_hwaddress {
+; CHECK-LABEL: define void @test_store_unaligned
+; CHECK-SAME: (ptr [[A:%.*]], i64 [[B:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[DOTHWASAN_SHADOW:%.*]] = call ptr asm "", "=r,0"(ptr inttoptr (i64 4398046511104 to ptr))
+; CHECK-NEXT:    [[TMP0:%.*]] = ptrtoint ptr [[A]] to i64
+; CHECK-NEXT:    call void @__hwasan_storeN(i64 [[TMP0]], i64 8)
+; CHECK-NEXT:    store i64 [[B]], ptr [[A]], align 4
+; CHECK-NEXT:    ret void
+;
+entry:
+  store i64 %b, ptr %a, align 4
+  ret void
+}
+
+define i8 @test_load_noattr(ptr %a) {
+; CHECK-LABEL: define i8 @test_load_noattr
+; CHECK-SAME: (ptr [[A:%.*]]) {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[B:%.*]] = load i8, ptr [[A]], align 4
+; CHECK-NEXT:    ret i8 [[B]]
+;
+entry:
+  %b = load i8, ptr %a, align 4
+  ret i8 %b
+}
+
+define i8 @test_load_notmyattr(ptr %a) sanitize_address {
+; CHECK-LABEL: define i8 @test_load_notmyattr
+; CHECK-SAME: (ptr [[A:%.*]]) #[[ATTR1:[0-9]+]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[B:%.*]] = load i8, ptr [[A]], align 4
+; CHECK-NEXT:    ret i8 [[B]]
+;
+entry:
+  %b = load i8, ptr %a, align 4
+  ret i8 %b
+}
+
+define i8 @test_load_addrspace(ptr addrspace(256) %a) sanitize_hwaddress {
+; CHECK-LABEL: define i8 @test_load_addrspace
+; CHECK-SAME: (ptr addrspace(256) [[A:%.*]]) #[[ATTR0]] {
+; CHECK-NEXT:  entry:
+; CHECK-NEXT:    [[B:%.*]] = load i8, ptr addrspace(256) [[A]], align 4
+; CHECK-NEXT:    ret i8 [[B]]
+;
+entry:
+  %b = load i8, ptr addrspace(256) %a, align 4
+  ret i8 %b
+}



More information about the llvm-commits mailing list