[llvm] 8a557d8 - [AArch64][GlobalISel] Widen extloads before clamping during legalization

Jessica Paquette via llvm-commits llvm-commits at lists.llvm.org
Thu Aug 5 16:14:30 PDT 2021


Author: Jessica Paquette
Date: 2021-08-05T16:14:06-07:00
New Revision: 8a557d8311593627efd08d03178889971d5ae02b

URL: https://github.com/llvm/llvm-project/commit/8a557d8311593627efd08d03178889971d5ae02b
DIFF: https://github.com/llvm/llvm-project/commit/8a557d8311593627efd08d03178889971d5ae02b.diff

LOG: [AArch64][GlobalISel] Widen extloads before clamping during legalization

Allows us to avoid awkward type breakdowns on types like s88, like the other
commits.

Differential Revision: https://reviews.llvm.org/D107587

Added: 
    

Modified: 
    llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp
    llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp b/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp
index 0a13e21c1c03..80d9fc16ceea 100644
--- a/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp
+++ b/llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp
@@ -266,8 +266,8 @@ AArch64LegalizerInfo::AArch64LegalizerInfo(const AArch64Subtarget &ST)
                                  {s64, p0, s64, 8},
                                  {p0, p0, s64, 8},
                                  {v2s32, p0, s64, 8}})
-      .clampScalar(0, s32, s64)
       .widenScalarToNextPow2(0)
+      .clampScalar(0, s32, s64)
       // TODO: We could support sum-of-pow2's but the lowering code doesn't know
       //       how to do that yet.
       .unsupportedIfMemSizeNotPow2()

diff  --git a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir
index a3fabb573d11..43ba3d969882 100644
--- a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir
+++ b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-extload.mir
@@ -1,50 +1,5 @@
 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
 # RUN: llc -march=aarch64 -run-pass=legalizer %s -o - -verify-machineinstrs | FileCheck %s
---- |
-  target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
-  target triple = "aarch64"
-
-  define void @test_extload() {
-  entry:
-    ret void
-  }
-
-  define i64 @sext_i32_i64(i32* %ptr) {
-    %ld = load i32, i32* %ptr, align 4
-    %v = sext i32 %ld to i64
-    ret i64 %v
-  }
-
-  define i64 @sext_i16_i64(i16* %ptr) {
-    %ld = load i16, i16* %ptr, align 2
-    %v = sext i16 %ld to i64
-    ret i64 %v
-  }
-
-  define i64 @sext_i8_i64(i8* %ptr) {
-    %ld = load i8, i8* %ptr, align 1
-    %v = sext i8 %ld to i64
-    ret i64 %v
-  }
-
-  define i64 @zext_i32_i64(i32* %ptr) {
-    %ld = load i32, i32* %ptr, align 4
-    %v = zext i32 %ld to i64
-    ret i64 %v
-  }
-
-  define i64 @zext_i16_i64(i16* %ptr) {
-    %ld = load i16, i16* %ptr, align 2
-    %v = zext i16 %ld to i64
-    ret i64 %v
-  }
-
-  define i64 @zext_i8_i64(i8* %ptr) {
-    %ld = load i8, i8* %ptr, align 1
-    %v = zext i8 %ld to i64
-    ret i64 %v
-  }
-
 ...
 ---
 name:            test_extload
@@ -67,11 +22,11 @@ body:             |
 
     ; CHECK-LABEL: name: sext_i32_i64
     ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
-    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s32) from %ir.ptr)
+    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s32))
     ; CHECK: $x0 = COPY [[SEXTLOAD]](s64)
     ; CHECK: RET_ReallyLR implicit $x0
     %0:_(p0) = COPY $x0
-    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s32) from %ir.ptr)
+    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s32))
     $x0 = COPY %2(s64)
     RET_ReallyLR implicit $x0
 
@@ -84,11 +39,11 @@ body:             |
 
     ; CHECK-LABEL: name: sext_i16_i64
     ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
-    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s16) from %ir.ptr)
+    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s16))
     ; CHECK: $x0 = COPY [[SEXTLOAD]](s64)
     ; CHECK: RET_ReallyLR implicit $x0
     %0:_(p0) = COPY $x0
-    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s16) from %ir.ptr)
+    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s16))
     $x0 = COPY %2(s64)
     RET_ReallyLR implicit $x0
 
@@ -101,11 +56,11 @@ body:             |
 
     ; CHECK-LABEL: name: sext_i8_i64
     ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
-    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s8) from %ir.ptr)
+    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load (s8))
     ; CHECK: $x0 = COPY [[SEXTLOAD]](s64)
     ; CHECK: RET_ReallyLR implicit $x0
     %0:_(p0) = COPY $x0
-    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s8) from %ir.ptr)
+    %2:_(s64) = G_SEXTLOAD %0(p0) :: (load (s8))
     $x0 = COPY %2(s64)
     RET_ReallyLR implicit $x0
 
@@ -118,11 +73,11 @@ body:             |
 
     ; CHECK-LABEL: name: zext_i32_i64
     ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
-    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s32) from %ir.ptr)
+    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s32))
     ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64)
     ; CHECK: RET_ReallyLR implicit $x0
     %0:_(p0) = COPY $x0
-    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s32) from %ir.ptr)
+    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s32))
     $x0 = COPY %2(s64)
     RET_ReallyLR implicit $x0
 
@@ -135,11 +90,11 @@ body:             |
 
     ; CHECK-LABEL: name: zext_i16_i64
     ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
-    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s16) from %ir.ptr)
+    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s16))
     ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64)
     ; CHECK: RET_ReallyLR implicit $x0
     %0:_(p0) = COPY $x0
-    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s16) from %ir.ptr)
+    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s16))
     $x0 = COPY %2(s64)
     RET_ReallyLR implicit $x0
 
@@ -152,12 +107,48 @@ body:             |
 
     ; CHECK-LABEL: name: zext_i8_i64
     ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
-    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8) from %ir.ptr)
+    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD [[COPY]](p0) :: (load (s8))
     ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64)
     ; CHECK: RET_ReallyLR implicit $x0
     %0:_(p0) = COPY $x0
-    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s8) from %ir.ptr)
+    %2:_(s64) = G_ZEXTLOAD %0(p0) :: (load (s8))
     $x0 = COPY %2(s64)
     RET_ReallyLR implicit $x0
 
 ...
+---
+name:            zext_i8_i88
+body:             |
+  bb.1:
+    liveins: $x0
+    ; CHECK-LABEL: name: zext_i8_i88
+    ; CHECK: %ptr:_(p0) = COPY $x0
+    ; CHECK: [[ZEXTLOAD:%[0-9]+]]:_(s64) = G_ZEXTLOAD %ptr(p0) :: (load (s8))
+    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
+    ; CHECK: $x0 = COPY [[ZEXTLOAD]](s64)
+    ; CHECK: RET_ReallyLR implicit $x0
+    %ptr:_(p0) = COPY $x0
+    %load:_(s88) = G_ZEXTLOAD %ptr(p0) :: (load (s8))
+    %trunc:_(s64) = G_TRUNC %load
+    $x0 = COPY %trunc(s64)
+    RET_ReallyLR implicit $x0
+...
+...
+---
+name:            sext_i8_i88
+body:             |
+  bb.1:
+    liveins: $x0
+    ; CHECK-LABEL: name: sext_i8_i88
+    ; CHECK: %ptr:_(p0) = COPY $x0
+    ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD %ptr(p0) :: (load (s8))
+    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63
+    ; CHECK: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[SEXTLOAD]], [[C]](s64)
+    ; CHECK: $x0 = COPY [[SEXTLOAD]](s64)
+    ; CHECK: RET_ReallyLR implicit $x0
+    %ptr:_(p0) = COPY $x0
+    %load:_(s88) = G_SEXTLOAD %ptr(p0) :: (load (s8))
+    %trunc:_(s64) = G_TRUNC %load
+    $x0 = COPY %trunc(s64)
+    RET_ReallyLR implicit $x0
+...


        


More information about the llvm-commits mailing list