[llvm] [PowerPC] ppc64-P9-vabsd.ll - update v16i8 abdu test now that it vectorizes in the middle-end (PR #154712)
Simon Pilgrim via llvm-commits
llvm-commits at lists.llvm.org
Tue Aug 26 23:56:24 PDT 2025
https://github.com/RKSimon updated https://github.com/llvm/llvm-project/pull/154712
>From 8bb4b6b9eb2b6a44419ae1709044d8f2047eb92c Mon Sep 17 00:00:00 2001
From: Simon Pilgrim <llvm-dev at redking.me.uk>
Date: Thu, 21 Aug 2025 10:37:39 +0100
Subject: [PATCH] [PowerPC] ppc64-P9-vabsd.ll - update v16i8 abdu test now that
it vectorizes in the middle-end
The scalarized IR was written before improvements to SLP / cost models ensured that the abs intrinsic was easily vectorizable
opt -O3 : https://zig.godbolt.org/z/39T65vh8M
Now that it is we need a more useful llc test
We could add the old test to Transforms/PhaseOrdering if there's any concern about future coverage?
---
llvm/test/CodeGen/PowerPC/ppc64-P9-vabsd.ll | 1007 +------------------
1 file changed, 16 insertions(+), 991 deletions(-)
diff --git a/llvm/test/CodeGen/PowerPC/ppc64-P9-vabsd.ll b/llvm/test/CodeGen/PowerPC/ppc64-P9-vabsd.ll
index b540948b20f75..eaab932c41df7 100644
--- a/llvm/test/CodeGen/PowerPC/ppc64-P9-vabsd.ll
+++ b/llvm/test/CodeGen/PowerPC/ppc64-P9-vabsd.ll
@@ -190,1000 +190,25 @@ entry:
ret <8 x i16> %6
}
-; FIXME: This does not produce ISD::ABS. This does not even vectorize correctly!
-; This function should look like sub_absv_32 and sub_absv_16 except that the type is v16i8.
-; Function Attrs: norecurse nounwind readnone
define <16 x i8> @sub_absv_8_ext(<16 x i8> %a, <16 x i8> %b) local_unnamed_addr {
-; CHECK-PWR9-LE-LABEL: sub_absv_8_ext:
-; CHECK-PWR9-LE: # %bb.0: # %entry
-; CHECK-PWR9-LE-NEXT: li r3, 0
-; CHECK-PWR9-LE-NEXT: li r5, 2
-; CHECK-PWR9-LE-NEXT: li r4, 1
-; CHECK-PWR9-LE-NEXT: std r30, -16(r1) # 8-byte Folded Spill
-; CHECK-PWR9-LE-NEXT: vextubrx r6, r3, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r3, r3, v3
-; CHECK-PWR9-LE-NEXT: vextubrx r8, r5, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r5, r5, v3
-; CHECK-PWR9-LE-NEXT: std r29, -24(r1) # 8-byte Folded Spill
-; CHECK-PWR9-LE-NEXT: std r28, -32(r1) # 8-byte Folded Spill
-; CHECK-PWR9-LE-NEXT: std r27, -40(r1) # 8-byte Folded Spill
-; CHECK-PWR9-LE-NEXT: std r26, -48(r1) # 8-byte Folded Spill
-; CHECK-PWR9-LE-NEXT: std r25, -56(r1) # 8-byte Folded Spill
-; CHECK-PWR9-LE-NEXT: clrlwi r6, r6, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r3, r3, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r8, r8, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r5, r5, 24
-; CHECK-PWR9-LE-NEXT: vextubrx r7, r4, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r4, r4, v3
-; CHECK-PWR9-LE-NEXT: sub r3, r6, r3
-; CHECK-PWR9-LE-NEXT: sub r6, r8, r5
-; CHECK-PWR9-LE-NEXT: clrlwi r7, r7, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r4, r4, 24
-; CHECK-PWR9-LE-NEXT: sub r4, r7, r4
-; CHECK-PWR9-LE-NEXT: srawi r5, r3, 31
-; CHECK-PWR9-LE-NEXT: srawi r7, r4, 31
-; CHECK-PWR9-LE-NEXT: xor r3, r3, r5
-; CHECK-PWR9-LE-NEXT: xor r4, r4, r7
-; CHECK-PWR9-LE-NEXT: sub r5, r3, r5
-; CHECK-PWR9-LE-NEXT: srawi r3, r6, 31
-; CHECK-PWR9-LE-NEXT: sub r4, r4, r7
-; CHECK-PWR9-LE-NEXT: xor r6, r6, r3
-; CHECK-PWR9-LE-NEXT: sub r3, r6, r3
-; CHECK-PWR9-LE-NEXT: li r6, 3
-; CHECK-PWR9-LE-NEXT: vextubrx r7, r6, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r6, r6, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r7, r7, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r6, r6, 24
-; CHECK-PWR9-LE-NEXT: sub r6, r7, r6
-; CHECK-PWR9-LE-NEXT: srawi r7, r6, 31
-; CHECK-PWR9-LE-NEXT: xor r6, r6, r7
-; CHECK-PWR9-LE-NEXT: sub r6, r6, r7
-; CHECK-PWR9-LE-NEXT: li r7, 4
-; CHECK-PWR9-LE-NEXT: vextubrx r8, r7, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r7, r7, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v4, r6
-; CHECK-PWR9-LE-NEXT: clrlwi r8, r8, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r7, r7, 24
-; CHECK-PWR9-LE-NEXT: sub r7, r8, r7
-; CHECK-PWR9-LE-NEXT: srawi r8, r7, 31
-; CHECK-PWR9-LE-NEXT: xor r7, r7, r8
-; CHECK-PWR9-LE-NEXT: sub r7, r7, r8
-; CHECK-PWR9-LE-NEXT: li r8, 5
-; CHECK-PWR9-LE-NEXT: vextubrx r9, r8, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r8, r8, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r9, r9, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r8, r8, 24
-; CHECK-PWR9-LE-NEXT: sub r8, r9, r8
-; CHECK-PWR9-LE-NEXT: srawi r9, r8, 31
-; CHECK-PWR9-LE-NEXT: xor r8, r8, r9
-; CHECK-PWR9-LE-NEXT: sub r8, r8, r9
-; CHECK-PWR9-LE-NEXT: li r9, 6
-; CHECK-PWR9-LE-NEXT: vextubrx r10, r9, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r9, r9, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r10, r10, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r9, r9, 24
-; CHECK-PWR9-LE-NEXT: sub r9, r10, r9
-; CHECK-PWR9-LE-NEXT: srawi r10, r9, 31
-; CHECK-PWR9-LE-NEXT: xor r9, r9, r10
-; CHECK-PWR9-LE-NEXT: sub r9, r9, r10
-; CHECK-PWR9-LE-NEXT: li r10, 7
-; CHECK-PWR9-LE-NEXT: vextubrx r11, r10, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r10, r10, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r11, r11, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r10, r10, 24
-; CHECK-PWR9-LE-NEXT: sub r10, r11, r10
-; CHECK-PWR9-LE-NEXT: srawi r11, r10, 31
-; CHECK-PWR9-LE-NEXT: xor r10, r10, r11
-; CHECK-PWR9-LE-NEXT: sub r10, r10, r11
-; CHECK-PWR9-LE-NEXT: li r11, 8
-; CHECK-PWR9-LE-NEXT: vextubrx r12, r11, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r11, r11, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v5, r10
-; CHECK-PWR9-LE-NEXT: clrlwi r12, r12, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r11, r11, 24
-; CHECK-PWR9-LE-NEXT: sub r11, r12, r11
-; CHECK-PWR9-LE-NEXT: srawi r12, r11, 31
-; CHECK-PWR9-LE-NEXT: xor r11, r11, r12
-; CHECK-PWR9-LE-NEXT: sub r11, r11, r12
-; CHECK-PWR9-LE-NEXT: li r12, 9
-; CHECK-PWR9-LE-NEXT: vextubrx r0, r12, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r12, r12, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r0, r0, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r12, r12, 24
-; CHECK-PWR9-LE-NEXT: sub r12, r0, r12
-; CHECK-PWR9-LE-NEXT: srawi r0, r12, 31
-; CHECK-PWR9-LE-NEXT: xor r12, r12, r0
-; CHECK-PWR9-LE-NEXT: sub r12, r12, r0
-; CHECK-PWR9-LE-NEXT: li r0, 10
-; CHECK-PWR9-LE-NEXT: vextubrx r30, r0, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r0, r0, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r30, r30, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r0, r0, 24
-; CHECK-PWR9-LE-NEXT: sub r0, r30, r0
-; CHECK-PWR9-LE-NEXT: srawi r30, r0, 31
-; CHECK-PWR9-LE-NEXT: xor r0, r0, r30
-; CHECK-PWR9-LE-NEXT: sub r0, r0, r30
-; CHECK-PWR9-LE-NEXT: li r30, 11
-; CHECK-PWR9-LE-NEXT: vextubrx r29, r30, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r30, r30, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r29, r29, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r30, r30, 24
-; CHECK-PWR9-LE-NEXT: sub r30, r29, r30
-; CHECK-PWR9-LE-NEXT: srawi r29, r30, 31
-; CHECK-PWR9-LE-NEXT: xor r30, r30, r29
-; CHECK-PWR9-LE-NEXT: sub r30, r30, r29
-; CHECK-PWR9-LE-NEXT: li r29, 12
-; CHECK-PWR9-LE-NEXT: vextubrx r28, r29, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r29, r29, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r28, r28, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r29, r29, 24
-; CHECK-PWR9-LE-NEXT: sub r29, r28, r29
-; CHECK-PWR9-LE-NEXT: srawi r28, r29, 31
-; CHECK-PWR9-LE-NEXT: xor r29, r29, r28
-; CHECK-PWR9-LE-NEXT: sub r29, r29, r28
-; CHECK-PWR9-LE-NEXT: li r28, 13
-; CHECK-PWR9-LE-NEXT: vextubrx r27, r28, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r28, r28, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r27, r27, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r28, r28, 24
-; CHECK-PWR9-LE-NEXT: sub r28, r27, r28
-; CHECK-PWR9-LE-NEXT: srawi r27, r28, 31
-; CHECK-PWR9-LE-NEXT: xor r28, r28, r27
-; CHECK-PWR9-LE-NEXT: sub r28, r28, r27
-; CHECK-PWR9-LE-NEXT: li r27, 14
-; CHECK-PWR9-LE-NEXT: vextubrx r26, r27, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r27, r27, v3
-; CHECK-PWR9-LE-NEXT: clrlwi r26, r26, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r27, r27, 24
-; CHECK-PWR9-LE-NEXT: sub r27, r26, r27
-; CHECK-PWR9-LE-NEXT: srawi r26, r27, 31
-; CHECK-PWR9-LE-NEXT: xor r27, r27, r26
-; CHECK-PWR9-LE-NEXT: sub r27, r27, r26
-; CHECK-PWR9-LE-NEXT: li r26, 15
-; CHECK-PWR9-LE-NEXT: vextubrx r25, r26, v2
-; CHECK-PWR9-LE-NEXT: vextubrx r26, r26, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v2, r5
-; CHECK-PWR9-LE-NEXT: mtvsrd v3, r4
-; CHECK-PWR9-LE-NEXT: vmrghb v2, v3, v2
-; CHECK-PWR9-LE-NEXT: mtvsrd v3, r3
-; CHECK-PWR9-LE-NEXT: clrlwi r25, r25, 24
-; CHECK-PWR9-LE-NEXT: clrlwi r26, r26, 24
-; CHECK-PWR9-LE-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v4, r8
-; CHECK-PWR9-LE-NEXT: sub r26, r25, r26
-; CHECK-PWR9-LE-NEXT: vmrglh v2, v3, v2
-; CHECK-PWR9-LE-NEXT: mtvsrd v3, r7
-; CHECK-PWR9-LE-NEXT: srawi r25, r26, 31
-; CHECK-PWR9-LE-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v4, r9
-; CHECK-PWR9-LE-NEXT: xor r26, r26, r25
-; CHECK-PWR9-LE-NEXT: vmrghb v4, v5, v4
-; CHECK-PWR9-LE-NEXT: sub r26, r26, r25
-; CHECK-PWR9-LE-NEXT: ld r25, -56(r1) # 8-byte Folded Reload
-; CHECK-PWR9-LE-NEXT: mtvsrd v5, r26
-; CHECK-PWR9-LE-NEXT: ld r26, -48(r1) # 8-byte Folded Reload
-; CHECK-PWR9-LE-NEXT: vmrglh v3, v4, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v4, r30
-; CHECK-PWR9-LE-NEXT: ld r30, -16(r1) # 8-byte Folded Reload
-; CHECK-PWR9-LE-NEXT: xxmrglw vs0, v3, v2
-; CHECK-PWR9-LE-NEXT: mtvsrd v2, r11
-; CHECK-PWR9-LE-NEXT: mtvsrd v3, r12
-; CHECK-PWR9-LE-NEXT: vmrghb v2, v3, v2
-; CHECK-PWR9-LE-NEXT: mtvsrd v3, r0
-; CHECK-PWR9-LE-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v4, r28
-; CHECK-PWR9-LE-NEXT: ld r28, -32(r1) # 8-byte Folded Reload
-; CHECK-PWR9-LE-NEXT: vmrglh v2, v3, v2
-; CHECK-PWR9-LE-NEXT: mtvsrd v3, r29
-; CHECK-PWR9-LE-NEXT: ld r29, -24(r1) # 8-byte Folded Reload
-; CHECK-PWR9-LE-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR9-LE-NEXT: mtvsrd v4, r27
-; CHECK-PWR9-LE-NEXT: ld r27, -40(r1) # 8-byte Folded Reload
-; CHECK-PWR9-LE-NEXT: vmrghb v4, v5, v4
-; CHECK-PWR9-LE-NEXT: vmrglh v3, v4, v3
-; CHECK-PWR9-LE-NEXT: xxmrglw vs1, v3, v2
-; CHECK-PWR9-LE-NEXT: xxmrgld v2, vs1, vs0
-; CHECK-PWR9-LE-NEXT: blr
-;
-; CHECK-PWR9-BE-LABEL: sub_absv_8_ext:
-; CHECK-PWR9-BE: # %bb.0: # %entry
-; CHECK-PWR9-BE-NEXT: li r3, 0
-; CHECK-PWR9-BE-NEXT: li r4, 1
-; CHECK-PWR9-BE-NEXT: li r5, 2
-; CHECK-PWR9-BE-NEXT: std r30, -16(r1) # 8-byte Folded Spill
-; CHECK-PWR9-BE-NEXT: vextublx r6, r3, v2
-; CHECK-PWR9-BE-NEXT: vextublx r3, r3, v3
-; CHECK-PWR9-BE-NEXT: vextublx r7, r4, v2
-; CHECK-PWR9-BE-NEXT: vextublx r4, r4, v3
-; CHECK-PWR9-BE-NEXT: std r29, -24(r1) # 8-byte Folded Spill
-; CHECK-PWR9-BE-NEXT: std r28, -32(r1) # 8-byte Folded Spill
-; CHECK-PWR9-BE-NEXT: std r27, -40(r1) # 8-byte Folded Spill
-; CHECK-PWR9-BE-NEXT: std r26, -48(r1) # 8-byte Folded Spill
-; CHECK-PWR9-BE-NEXT: std r25, -56(r1) # 8-byte Folded Spill
-; CHECK-PWR9-BE-NEXT: clrlwi r6, r6, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r3, r3, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r7, r7, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r4, r4, 24
-; CHECK-PWR9-BE-NEXT: vextublx r8, r5, v2
-; CHECK-PWR9-BE-NEXT: vextublx r5, r5, v3
-; CHECK-PWR9-BE-NEXT: sub r3, r6, r3
-; CHECK-PWR9-BE-NEXT: sub r4, r7, r4
-; CHECK-PWR9-BE-NEXT: clrlwi r8, r8, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r5, r5, 24
-; CHECK-PWR9-BE-NEXT: sub r5, r8, r5
-; CHECK-PWR9-BE-NEXT: srawi r6, r3, 31
-; CHECK-PWR9-BE-NEXT: srawi r7, r4, 31
-; CHECK-PWR9-BE-NEXT: srawi r8, r5, 31
-; CHECK-PWR9-BE-NEXT: xor r3, r3, r6
-; CHECK-PWR9-BE-NEXT: xor r4, r4, r7
-; CHECK-PWR9-BE-NEXT: xor r5, r5, r8
-; CHECK-PWR9-BE-NEXT: sub r3, r3, r6
-; CHECK-PWR9-BE-NEXT: li r6, 3
-; CHECK-PWR9-BE-NEXT: sub r4, r4, r7
-; CHECK-PWR9-BE-NEXT: sub r5, r5, r8
-; CHECK-PWR9-BE-NEXT: vextublx r7, r6, v2
-; CHECK-PWR9-BE-NEXT: vextublx r6, r6, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r7, r7, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r6, r6, 24
-; CHECK-PWR9-BE-NEXT: sub r6, r7, r6
-; CHECK-PWR9-BE-NEXT: srawi r7, r6, 31
-; CHECK-PWR9-BE-NEXT: xor r6, r6, r7
-; CHECK-PWR9-BE-NEXT: sub r6, r6, r7
-; CHECK-PWR9-BE-NEXT: li r7, 4
-; CHECK-PWR9-BE-NEXT: vextublx r8, r7, v2
-; CHECK-PWR9-BE-NEXT: vextublx r7, r7, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r8, r8, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r7, r7, 24
-; CHECK-PWR9-BE-NEXT: sub r7, r8, r7
-; CHECK-PWR9-BE-NEXT: srawi r8, r7, 31
-; CHECK-PWR9-BE-NEXT: xor r7, r7, r8
-; CHECK-PWR9-BE-NEXT: sub r7, r7, r8
-; CHECK-PWR9-BE-NEXT: li r8, 5
-; CHECK-PWR9-BE-NEXT: vextublx r9, r8, v2
-; CHECK-PWR9-BE-NEXT: vextublx r8, r8, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r9, r9, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r8, r8, 24
-; CHECK-PWR9-BE-NEXT: sub r8, r9, r8
-; CHECK-PWR9-BE-NEXT: srawi r9, r8, 31
-; CHECK-PWR9-BE-NEXT: xor r8, r8, r9
-; CHECK-PWR9-BE-NEXT: sub r8, r8, r9
-; CHECK-PWR9-BE-NEXT: li r9, 6
-; CHECK-PWR9-BE-NEXT: vextublx r10, r9, v2
-; CHECK-PWR9-BE-NEXT: vextublx r9, r9, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r10, r10, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r9, r9, 24
-; CHECK-PWR9-BE-NEXT: sub r9, r10, r9
-; CHECK-PWR9-BE-NEXT: srawi r10, r9, 31
-; CHECK-PWR9-BE-NEXT: xor r9, r9, r10
-; CHECK-PWR9-BE-NEXT: sub r9, r9, r10
-; CHECK-PWR9-BE-NEXT: li r10, 7
-; CHECK-PWR9-BE-NEXT: vextublx r11, r10, v2
-; CHECK-PWR9-BE-NEXT: vextublx r10, r10, v3
-; CHECK-PWR9-BE-NEXT: mtfprwz f2, r9
-; CHECK-PWR9-BE-NEXT: clrlwi r11, r11, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r10, r10, 24
-; CHECK-PWR9-BE-NEXT: sub r10, r11, r10
-; CHECK-PWR9-BE-NEXT: srawi r11, r10, 31
-; CHECK-PWR9-BE-NEXT: xor r10, r10, r11
-; CHECK-PWR9-BE-NEXT: sub r10, r10, r11
-; CHECK-PWR9-BE-NEXT: li r11, 8
-; CHECK-PWR9-BE-NEXT: vextublx r12, r11, v2
-; CHECK-PWR9-BE-NEXT: vextublx r11, r11, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r12, r12, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r11, r11, 24
-; CHECK-PWR9-BE-NEXT: sub r11, r12, r11
-; CHECK-PWR9-BE-NEXT: srawi r12, r11, 31
-; CHECK-PWR9-BE-NEXT: xor r11, r11, r12
-; CHECK-PWR9-BE-NEXT: sub r11, r11, r12
-; CHECK-PWR9-BE-NEXT: li r12, 9
-; CHECK-PWR9-BE-NEXT: vextublx r0, r12, v2
-; CHECK-PWR9-BE-NEXT: vextublx r12, r12, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r0, r0, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r12, r12, 24
-; CHECK-PWR9-BE-NEXT: sub r12, r0, r12
-; CHECK-PWR9-BE-NEXT: srawi r0, r12, 31
-; CHECK-PWR9-BE-NEXT: xor r12, r12, r0
-; CHECK-PWR9-BE-NEXT: sub r12, r12, r0
-; CHECK-PWR9-BE-NEXT: li r0, 10
-; CHECK-PWR9-BE-NEXT: vextublx r30, r0, v2
-; CHECK-PWR9-BE-NEXT: vextublx r0, r0, v3
-; CHECK-PWR9-BE-NEXT: mtvsrwz v4, r12
-; CHECK-PWR9-BE-NEXT: clrlwi r30, r30, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r0, r0, 24
-; CHECK-PWR9-BE-NEXT: sub r0, r30, r0
-; CHECK-PWR9-BE-NEXT: srawi r30, r0, 31
-; CHECK-PWR9-BE-NEXT: xor r0, r0, r30
-; CHECK-PWR9-BE-NEXT: sub r0, r0, r30
-; CHECK-PWR9-BE-NEXT: li r30, 11
-; CHECK-PWR9-BE-NEXT: vextublx r29, r30, v2
-; CHECK-PWR9-BE-NEXT: vextublx r30, r30, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r29, r29, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r30, r30, 24
-; CHECK-PWR9-BE-NEXT: sub r30, r29, r30
-; CHECK-PWR9-BE-NEXT: srawi r29, r30, 31
-; CHECK-PWR9-BE-NEXT: xor r30, r30, r29
-; CHECK-PWR9-BE-NEXT: sub r30, r30, r29
-; CHECK-PWR9-BE-NEXT: li r29, 12
-; CHECK-PWR9-BE-NEXT: vextublx r28, r29, v2
-; CHECK-PWR9-BE-NEXT: vextublx r29, r29, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r28, r28, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r29, r29, 24
-; CHECK-PWR9-BE-NEXT: sub r29, r28, r29
-; CHECK-PWR9-BE-NEXT: srawi r28, r29, 31
-; CHECK-PWR9-BE-NEXT: xor r29, r29, r28
-; CHECK-PWR9-BE-NEXT: sub r29, r29, r28
-; CHECK-PWR9-BE-NEXT: li r28, 13
-; CHECK-PWR9-BE-NEXT: vextublx r27, r28, v2
-; CHECK-PWR9-BE-NEXT: vextublx r28, r28, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r27, r27, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r28, r28, 24
-; CHECK-PWR9-BE-NEXT: sub r28, r27, r28
-; CHECK-PWR9-BE-NEXT: srawi r27, r28, 31
-; CHECK-PWR9-BE-NEXT: xor r28, r28, r27
-; CHECK-PWR9-BE-NEXT: sub r28, r28, r27
-; CHECK-PWR9-BE-NEXT: li r27, 14
-; CHECK-PWR9-BE-NEXT: vextublx r26, r27, v2
-; CHECK-PWR9-BE-NEXT: vextublx r27, r27, v3
-; CHECK-PWR9-BE-NEXT: clrlwi r26, r26, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r27, r27, 24
-; CHECK-PWR9-BE-NEXT: sub r27, r26, r27
-; CHECK-PWR9-BE-NEXT: srawi r26, r27, 31
-; CHECK-PWR9-BE-NEXT: xor r27, r27, r26
-; CHECK-PWR9-BE-NEXT: sub r27, r27, r26
-; CHECK-PWR9-BE-NEXT: li r26, 15
-; CHECK-PWR9-BE-NEXT: vextublx r25, r26, v2
-; CHECK-PWR9-BE-NEXT: vextublx r26, r26, v3
-; CHECK-PWR9-BE-NEXT: mtfprwz f0, r27
-; CHECK-PWR9-BE-NEXT: addis r27, r2, .LCPI9_0 at toc@ha
-; CHECK-PWR9-BE-NEXT: mtvsrwz v3, r28
-; CHECK-PWR9-BE-NEXT: ld r28, -32(r1) # 8-byte Folded Reload
-; CHECK-PWR9-BE-NEXT: addi r27, r27, .LCPI9_0 at toc@l
-; CHECK-PWR9-BE-NEXT: clrlwi r25, r25, 24
-; CHECK-PWR9-BE-NEXT: clrlwi r26, r26, 24
-; CHECK-PWR9-BE-NEXT: lxv vs1, 0(r27)
-; CHECK-PWR9-BE-NEXT: ld r27, -40(r1) # 8-byte Folded Reload
-; CHECK-PWR9-BE-NEXT: sub r26, r25, r26
-; CHECK-PWR9-BE-NEXT: srawi r25, r26, 31
-; CHECK-PWR9-BE-NEXT: xor r26, r26, r25
-; CHECK-PWR9-BE-NEXT: sub r26, r26, r25
-; CHECK-PWR9-BE-NEXT: ld r25, -56(r1) # 8-byte Folded Reload
-; CHECK-PWR9-BE-NEXT: mtvsrwz v2, r26
-; CHECK-PWR9-BE-NEXT: ld r26, -48(r1) # 8-byte Folded Reload
-; CHECK-PWR9-BE-NEXT: xxperm v2, vs0, vs1
-; CHECK-PWR9-BE-NEXT: mtfprwz f0, r29
-; CHECK-PWR9-BE-NEXT: ld r29, -24(r1) # 8-byte Folded Reload
-; CHECK-PWR9-BE-NEXT: xxperm v3, vs0, vs1
-; CHECK-PWR9-BE-NEXT: mtfprwz f0, r0
-; CHECK-PWR9-BE-NEXT: vmrghh v2, v3, v2
-; CHECK-PWR9-BE-NEXT: mtvsrwz v3, r30
-; CHECK-PWR9-BE-NEXT: ld r30, -16(r1) # 8-byte Folded Reload
-; CHECK-PWR9-BE-NEXT: xxperm v3, vs0, vs1
-; CHECK-PWR9-BE-NEXT: mtfprwz f0, r11
-; CHECK-PWR9-BE-NEXT: xxperm v4, vs0, vs1
-; CHECK-PWR9-BE-NEXT: vmrghh v3, v4, v3
-; CHECK-PWR9-BE-NEXT: mtvsrwz v4, r4
-; CHECK-PWR9-BE-NEXT: xxmrghw vs0, v3, v2
-; CHECK-PWR9-BE-NEXT: mtvsrwz v2, r10
-; CHECK-PWR9-BE-NEXT: mtvsrwz v3, r8
-; CHECK-PWR9-BE-NEXT: xxperm v2, vs2, vs1
-; CHECK-PWR9-BE-NEXT: mtfprwz f2, r7
-; CHECK-PWR9-BE-NEXT: xxperm v3, vs2, vs1
-; CHECK-PWR9-BE-NEXT: mtfprwz f2, r5
-; CHECK-PWR9-BE-NEXT: vmrghh v2, v3, v2
-; CHECK-PWR9-BE-NEXT: mtvsrwz v3, r6
-; CHECK-PWR9-BE-NEXT: xxperm v3, vs2, vs1
-; CHECK-PWR9-BE-NEXT: mtfprwz f2, r3
-; CHECK-PWR9-BE-NEXT: xxperm v4, vs2, vs1
-; CHECK-PWR9-BE-NEXT: vmrghh v3, v4, v3
-; CHECK-PWR9-BE-NEXT: xxmrghw vs1, v3, v2
-; CHECK-PWR9-BE-NEXT: xxmrghd v2, vs1, vs0
-; CHECK-PWR9-BE-NEXT: blr
-;
-; CHECK-PWR8-LABEL: sub_absv_8_ext:
-; CHECK-PWR8: # %bb.0: # %entry
-; CHECK-PWR8-NEXT: xxswapd vs0, v2
-; CHECK-PWR8-NEXT: xxswapd vs1, v3
-; CHECK-PWR8-NEXT: std r30, -16(r1) # 8-byte Folded Spill
-; CHECK-PWR8-NEXT: std r28, -32(r1) # 8-byte Folded Spill
-; CHECK-PWR8-NEXT: std r29, -24(r1) # 8-byte Folded Spill
-; CHECK-PWR8-NEXT: std r26, -48(r1) # 8-byte Folded Spill
-; CHECK-PWR8-NEXT: mffprd r11, f0
-; CHECK-PWR8-NEXT: mffprd r8, f1
-; CHECK-PWR8-NEXT: std r27, -40(r1) # 8-byte Folded Spill
-; CHECK-PWR8-NEXT: std r25, -56(r1) # 8-byte Folded Spill
-; CHECK-PWR8-NEXT: clrldi r3, r11, 56
-; CHECK-PWR8-NEXT: clrldi r4, r8, 56
-; CHECK-PWR8-NEXT: rldicl r5, r11, 56, 56
-; CHECK-PWR8-NEXT: rldicl r6, r8, 56, 56
-; CHECK-PWR8-NEXT: rldicl r7, r11, 48, 56
-; CHECK-PWR8-NEXT: rldicl r9, r8, 48, 56
-; CHECK-PWR8-NEXT: rldicl r0, r11, 32, 56
-; CHECK-PWR8-NEXT: rldicl r30, r8, 32, 56
-; CHECK-PWR8-NEXT: rldicl r29, r11, 24, 56
-; CHECK-PWR8-NEXT: rldicl r28, r8, 24, 56
-; CHECK-PWR8-NEXT: rldicl r10, r11, 40, 56
-; CHECK-PWR8-NEXT: rldicl r12, r8, 40, 56
-; CHECK-PWR8-NEXT: rldicl r27, r11, 16, 56
-; CHECK-PWR8-NEXT: rldicl r11, r11, 8, 56
-; CHECK-PWR8-NEXT: std r24, -64(r1) # 8-byte Folded Spill
-; CHECK-PWR8-NEXT: clrlwi r3, r3, 24
-; CHECK-PWR8-NEXT: clrlwi r4, r4, 24
-; CHECK-PWR8-NEXT: clrlwi r5, r5, 24
-; CHECK-PWR8-NEXT: clrlwi r6, r6, 24
-; CHECK-PWR8-NEXT: clrlwi r7, r7, 24
-; CHECK-PWR8-NEXT: clrlwi r9, r9, 24
-; CHECK-PWR8-NEXT: sub r3, r3, r4
-; CHECK-PWR8-NEXT: clrlwi r0, r0, 24
-; CHECK-PWR8-NEXT: clrlwi r30, r30, 24
-; CHECK-PWR8-NEXT: sub r4, r5, r6
-; CHECK-PWR8-NEXT: sub r5, r7, r9
-; CHECK-PWR8-NEXT: clrlwi r29, r29, 24
-; CHECK-PWR8-NEXT: clrlwi r28, r28, 24
-; CHECK-PWR8-NEXT: sub r7, r0, r30
-; CHECK-PWR8-NEXT: sub r9, r29, r28
-; CHECK-PWR8-NEXT: clrlwi r10, r10, 24
-; CHECK-PWR8-NEXT: clrlwi r12, r12, 24
-; CHECK-PWR8-NEXT: sub r6, r10, r12
-; CHECK-PWR8-NEXT: clrlwi r27, r27, 24
-; CHECK-PWR8-NEXT: clrlwi r11, r11, 24
-; CHECK-PWR8-NEXT: srawi r0, r5, 31
-; CHECK-PWR8-NEXT: srawi r29, r7, 31
-; CHECK-PWR8-NEXT: srawi r12, r4, 31
-; CHECK-PWR8-NEXT: srawi r28, r9, 31
-; CHECK-PWR8-NEXT: srawi r30, r6, 31
-; CHECK-PWR8-NEXT: srawi r10, r3, 31
-; CHECK-PWR8-NEXT: xor r5, r5, r0
-; CHECK-PWR8-NEXT: xor r26, r7, r29
-; CHECK-PWR8-NEXT: sub r7, r5, r0
-; CHECK-PWR8-NEXT: rldicl r5, r8, 16, 56
-; CHECK-PWR8-NEXT: rldicl r8, r8, 8, 56
-; CHECK-PWR8-NEXT: xor r4, r4, r12
-; CHECK-PWR8-NEXT: xor r25, r9, r28
-; CHECK-PWR8-NEXT: sub r9, r4, r12
-; CHECK-PWR8-NEXT: sub r4, r26, r29
-; CHECK-PWR8-NEXT: mtvsrd v1, r9
-; CHECK-PWR8-NEXT: clrlwi r5, r5, 24
-; CHECK-PWR8-NEXT: sub r5, r27, r5
-; CHECK-PWR8-NEXT: clrlwi r8, r8, 24
-; CHECK-PWR8-NEXT: sub r8, r11, r8
-; CHECK-PWR8-NEXT: xor r6, r6, r30
-; CHECK-PWR8-NEXT: sub r6, r6, r30
-; CHECK-PWR8-NEXT: xor r3, r3, r10
-; CHECK-PWR8-NEXT: sub r10, r3, r10
-; CHECK-PWR8-NEXT: sub r3, r25, r28
-; CHECK-PWR8-NEXT: mtvsrd v6, r6
-; CHECK-PWR8-NEXT: mtvsrd v7, r3
-; CHECK-PWR8-NEXT: srawi r12, r5, 31
-; CHECK-PWR8-NEXT: srawi r11, r8, 31
-; CHECK-PWR8-NEXT: xor r5, r5, r12
-; CHECK-PWR8-NEXT: xor r8, r8, r11
-; CHECK-PWR8-NEXT: sub r5, r5, r12
-; CHECK-PWR8-NEXT: sub r8, r8, r11
-; CHECK-PWR8-NEXT: mfvsrd r11, v2
-; CHECK-PWR8-NEXT: mfvsrd r12, v3
-; CHECK-PWR8-NEXT: mtvsrd v8, r8
-; CHECK-PWR8-NEXT: clrldi r0, r11, 56
-; CHECK-PWR8-NEXT: clrldi r30, r12, 56
-; CHECK-PWR8-NEXT: rldicl r29, r12, 56, 56
-; CHECK-PWR8-NEXT: rldicl r28, r12, 48, 56
-; CHECK-PWR8-NEXT: rldicl r27, r12, 40, 56
-; CHECK-PWR8-NEXT: rldicl r26, r12, 32, 56
-; CHECK-PWR8-NEXT: rldicl r25, r12, 24, 56
-; CHECK-PWR8-NEXT: rldicl r24, r12, 16, 56
-; CHECK-PWR8-NEXT: rldicl r12, r12, 8, 56
-; CHECK-PWR8-NEXT: clrlwi r0, r0, 24
-; CHECK-PWR8-NEXT: clrlwi r30, r30, 24
-; CHECK-PWR8-NEXT: clrlwi r29, r29, 24
-; CHECK-PWR8-NEXT: clrlwi r28, r28, 24
-; CHECK-PWR8-NEXT: clrlwi r27, r27, 24
-; CHECK-PWR8-NEXT: clrlwi r26, r26, 24
-; CHECK-PWR8-NEXT: clrlwi r25, r25, 24
-; CHECK-PWR8-NEXT: clrlwi r24, r24, 24
-; CHECK-PWR8-NEXT: clrlwi r12, r12, 24
-; CHECK-PWR8-NEXT: sub r0, r0, r30
-; CHECK-PWR8-NEXT: srawi r30, r0, 31
-; CHECK-PWR8-NEXT: xor r0, r0, r30
-; CHECK-PWR8-NEXT: sub r0, r0, r30
-; CHECK-PWR8-NEXT: rldicl r30, r11, 56, 56
-; CHECK-PWR8-NEXT: clrlwi r30, r30, 24
-; CHECK-PWR8-NEXT: mtvsrd v2, r0
-; CHECK-PWR8-NEXT: sub r30, r30, r29
-; CHECK-PWR8-NEXT: srawi r29, r30, 31
-; CHECK-PWR8-NEXT: xor r30, r30, r29
-; CHECK-PWR8-NEXT: sub r30, r30, r29
-; CHECK-PWR8-NEXT: rldicl r29, r11, 48, 56
-; CHECK-PWR8-NEXT: clrlwi r29, r29, 24
-; CHECK-PWR8-NEXT: mtvsrd v3, r30
-; CHECK-PWR8-NEXT: ld r30, -16(r1) # 8-byte Folded Reload
-; CHECK-PWR8-NEXT: sub r29, r29, r28
-; CHECK-PWR8-NEXT: srawi r28, r29, 31
-; CHECK-PWR8-NEXT: xor r29, r29, r28
-; CHECK-PWR8-NEXT: sub r29, r29, r28
-; CHECK-PWR8-NEXT: rldicl r28, r11, 40, 56
-; CHECK-PWR8-NEXT: clrlwi r28, r28, 24
-; CHECK-PWR8-NEXT: sub r28, r28, r27
-; CHECK-PWR8-NEXT: srawi r27, r28, 31
-; CHECK-PWR8-NEXT: xor r28, r28, r27
-; CHECK-PWR8-NEXT: sub r28, r28, r27
-; CHECK-PWR8-NEXT: rldicl r27, r11, 32, 56
-; CHECK-PWR8-NEXT: clrlwi r27, r27, 24
-; CHECK-PWR8-NEXT: mtvsrd v4, r28
-; CHECK-PWR8-NEXT: ld r28, -32(r1) # 8-byte Folded Reload
-; CHECK-PWR8-NEXT: sub r27, r27, r26
-; CHECK-PWR8-NEXT: srawi r26, r27, 31
-; CHECK-PWR8-NEXT: xor r27, r27, r26
-; CHECK-PWR8-NEXT: sub r27, r27, r26
-; CHECK-PWR8-NEXT: rldicl r26, r11, 24, 56
-; CHECK-PWR8-NEXT: clrlwi r26, r26, 24
-; CHECK-PWR8-NEXT: sub r26, r26, r25
-; CHECK-PWR8-NEXT: srawi r25, r26, 31
-; CHECK-PWR8-NEXT: xor r26, r26, r25
-; CHECK-PWR8-NEXT: sub r26, r26, r25
-; CHECK-PWR8-NEXT: rldicl r25, r11, 16, 56
-; CHECK-PWR8-NEXT: rldicl r11, r11, 8, 56
-; CHECK-PWR8-NEXT: clrlwi r25, r25, 24
-; CHECK-PWR8-NEXT: clrlwi r11, r11, 24
-; CHECK-PWR8-NEXT: mtvsrd v5, r26
-; CHECK-PWR8-NEXT: ld r26, -48(r1) # 8-byte Folded Reload
-; CHECK-PWR8-NEXT: sub r25, r25, r24
-; CHECK-PWR8-NEXT: sub r11, r11, r12
-; CHECK-PWR8-NEXT: srawi r24, r25, 31
-; CHECK-PWR8-NEXT: srawi r12, r11, 31
-; CHECK-PWR8-NEXT: xor r25, r25, r24
-; CHECK-PWR8-NEXT: xor r11, r11, r12
-; CHECK-PWR8-NEXT: sub r25, r25, r24
-; CHECK-PWR8-NEXT: sub r11, r11, r12
-; CHECK-PWR8-NEXT: ld r24, -64(r1) # 8-byte Folded Reload
-; CHECK-PWR8-NEXT: mtvsrd v0, r11
-; CHECK-PWR8-NEXT: vmrghb v2, v3, v2
-; CHECK-PWR8-NEXT: mtvsrd v3, r29
-; CHECK-PWR8-NEXT: ld r29, -24(r1) # 8-byte Folded Reload
-; CHECK-PWR8-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR8-NEXT: mtvsrd v4, r27
-; CHECK-PWR8-NEXT: ld r27, -40(r1) # 8-byte Folded Reload
-; CHECK-PWR8-NEXT: vmrglh v2, v3, v2
-; CHECK-PWR8-NEXT: vmrghb v4, v5, v4
-; CHECK-PWR8-NEXT: mtvsrd v5, r25
-; CHECK-PWR8-NEXT: ld r25, -56(r1) # 8-byte Folded Reload
-; CHECK-PWR8-NEXT: vmrghb v5, v0, v5
-; CHECK-PWR8-NEXT: mtvsrd v0, r10
-; CHECK-PWR8-NEXT: vmrglh v3, v5, v4
-; CHECK-PWR8-NEXT: xxmrglw vs0, v3, v2
-; CHECK-PWR8-NEXT: vmrghb v0, v1, v0
-; CHECK-PWR8-NEXT: mtvsrd v1, r7
-; CHECK-PWR8-NEXT: vmrghb v1, v6, v1
-; CHECK-PWR8-NEXT: mtvsrd v6, r4
-; CHECK-PWR8-NEXT: vmrglh v4, v1, v0
-; CHECK-PWR8-NEXT: vmrghb v6, v7, v6
-; CHECK-PWR8-NEXT: mtvsrd v7, r5
-; CHECK-PWR8-NEXT: vmrghb v7, v8, v7
-; CHECK-PWR8-NEXT: vmrglh v5, v7, v6
-; CHECK-PWR8-NEXT: xxmrglw vs1, v5, v4
-; CHECK-PWR8-NEXT: xxmrgld v2, vs0, vs1
-; CHECK-PWR8-NEXT: blr
+; CHECK-PWR9-LABEL: sub_absv_8_ext:
+; CHECK-PWR9: # %bb.0: # %entry
+; CHECK-PWR9-NEXT: vabsdub v2, v2, v3
+; CHECK-PWR9-NEXT: blr
;
-; CHECK-PWR7-LABEL: sub_absv_8_ext:
-; CHECK-PWR7: # %bb.0: # %entry
-; CHECK-PWR7-NEXT: stdu r1, -512(r1)
-; CHECK-PWR7-NEXT: .cfi_def_cfa_offset 512
-; CHECK-PWR7-NEXT: .cfi_offset r14, -144
-; CHECK-PWR7-NEXT: .cfi_offset r15, -136
-; CHECK-PWR7-NEXT: .cfi_offset r16, -128
-; CHECK-PWR7-NEXT: .cfi_offset r17, -120
-; CHECK-PWR7-NEXT: .cfi_offset r18, -112
-; CHECK-PWR7-NEXT: .cfi_offset r19, -104
-; CHECK-PWR7-NEXT: .cfi_offset r20, -96
-; CHECK-PWR7-NEXT: .cfi_offset r21, -88
-; CHECK-PWR7-NEXT: .cfi_offset r22, -80
-; CHECK-PWR7-NEXT: .cfi_offset r23, -72
-; CHECK-PWR7-NEXT: .cfi_offset r24, -64
-; CHECK-PWR7-NEXT: .cfi_offset r25, -56
-; CHECK-PWR7-NEXT: .cfi_offset r26, -48
-; CHECK-PWR7-NEXT: .cfi_offset r27, -40
-; CHECK-PWR7-NEXT: .cfi_offset r28, -32
-; CHECK-PWR7-NEXT: .cfi_offset r29, -24
-; CHECK-PWR7-NEXT: .cfi_offset r30, -16
-; CHECK-PWR7-NEXT: .cfi_offset r31, -8
-; CHECK-PWR7-NEXT: .cfi_offset r2, -152
-; CHECK-PWR7-NEXT: addi r3, r1, 320
-; CHECK-PWR7-NEXT: std r14, 368(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r15, 376(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r16, 384(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r17, 392(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r18, 400(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r19, 408(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r20, 416(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r21, 424(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r22, 432(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r23, 440(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r24, 448(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r25, 456(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r26, 464(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r27, 472(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r28, 480(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r29, 488(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r30, 496(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r31, 504(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: std r2, 360(r1) # 8-byte Folded Spill
-; CHECK-PWR7-NEXT: stxvw4x v2, 0, r3
-; CHECK-PWR7-NEXT: lbz r3, 320(r1)
-; CHECK-PWR7-NEXT: addi r4, r1, 336
-; CHECK-PWR7-NEXT: stw r3, 60(r1) # 4-byte Folded Spill
-; CHECK-PWR7-NEXT: stxvw4x v3, 0, r4
-; CHECK-PWR7-NEXT: lbz r15, 334(r1)
-; CHECK-PWR7-NEXT: lbz r14, 350(r1)
-; CHECK-PWR7-NEXT: lbz r31, 335(r1)
-; CHECK-PWR7-NEXT: lbz r2, 351(r1)
-; CHECK-PWR7-NEXT: sub r15, r15, r14
-; CHECK-PWR7-NEXT: sub r14, r31, r2
-; CHECK-PWR7-NEXT: srawi r2, r14, 31
-; CHECK-PWR7-NEXT: xor r14, r14, r2
-; CHECK-PWR7-NEXT: lbz r3, 333(r1)
-; CHECK-PWR7-NEXT: lbz r19, 331(r1)
-; CHECK-PWR7-NEXT: lbz r18, 347(r1)
-; CHECK-PWR7-NEXT: sub r19, r19, r18
-; CHECK-PWR7-NEXT: lbz r17, 332(r1)
-; CHECK-PWR7-NEXT: lbz r16, 348(r1)
-; CHECK-PWR7-NEXT: sub r17, r17, r16
-; CHECK-PWR7-NEXT: lbz r23, 329(r1)
-; CHECK-PWR7-NEXT: sub r14, r14, r2
-; CHECK-PWR7-NEXT: lbz r2, 349(r1)
-; CHECK-PWR7-NEXT: lbz r22, 345(r1)
-; CHECK-PWR7-NEXT: lbz r4, 336(r1)
-; CHECK-PWR7-NEXT: lbz r5, 321(r1)
-; CHECK-PWR7-NEXT: lbz r6, 337(r1)
-; CHECK-PWR7-NEXT: lbz r7, 322(r1)
-; CHECK-PWR7-NEXT: lbz r8, 338(r1)
-; CHECK-PWR7-NEXT: lbz r9, 323(r1)
-; CHECK-PWR7-NEXT: lbz r10, 339(r1)
-; CHECK-PWR7-NEXT: lbz r11, 324(r1)
-; CHECK-PWR7-NEXT: lbz r12, 340(r1)
-; CHECK-PWR7-NEXT: lbz r0, 325(r1)
-; CHECK-PWR7-NEXT: lbz r30, 341(r1)
-; CHECK-PWR7-NEXT: lbz r29, 326(r1)
-; CHECK-PWR7-NEXT: lbz r28, 342(r1)
-; CHECK-PWR7-NEXT: lbz r27, 327(r1)
-; CHECK-PWR7-NEXT: lbz r26, 343(r1)
-; CHECK-PWR7-NEXT: sub r3, r3, r2
-; CHECK-PWR7-NEXT: lbz r25, 328(r1)
-; CHECK-PWR7-NEXT: lbz r24, 344(r1)
-; CHECK-PWR7-NEXT: lbz r21, 330(r1)
-; CHECK-PWR7-NEXT: lbz r20, 346(r1)
-; CHECK-PWR7-NEXT: sub r5, r5, r6
-; CHECK-PWR7-NEXT: srawi r18, r3, 31
-; CHECK-PWR7-NEXT: sub r7, r7, r8
-; CHECK-PWR7-NEXT: sub r9, r9, r10
-; CHECK-PWR7-NEXT: sub r11, r11, r12
-; CHECK-PWR7-NEXT: sub r0, r0, r30
-; CHECK-PWR7-NEXT: sub r29, r29, r28
-; CHECK-PWR7-NEXT: sub r27, r27, r26
-; CHECK-PWR7-NEXT: sub r25, r25, r24
-; CHECK-PWR7-NEXT: srawi r31, r15, 31
-; CHECK-PWR7-NEXT: ld r2, 360(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: xor r3, r3, r18
-; CHECK-PWR7-NEXT: srawi r6, r5, 31
-; CHECK-PWR7-NEXT: srawi r8, r7, 31
-; CHECK-PWR7-NEXT: srawi r10, r9, 31
-; CHECK-PWR7-NEXT: srawi r12, r11, 31
-; CHECK-PWR7-NEXT: srawi r30, r0, 31
-; CHECK-PWR7-NEXT: sub r3, r3, r18
-; CHECK-PWR7-NEXT: srawi r18, r19, 31
-; CHECK-PWR7-NEXT: srawi r28, r29, 31
-; CHECK-PWR7-NEXT: ld r16, 384(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: sldi r3, r3, 56
-; CHECK-PWR7-NEXT: srawi r26, r27, 31
-; CHECK-PWR7-NEXT: srawi r24, r25, 31
-; CHECK-PWR7-NEXT: xor r19, r19, r18
-; CHECK-PWR7-NEXT: xor r15, r15, r31
-; CHECK-PWR7-NEXT: xor r5, r5, r6
-; CHECK-PWR7-NEXT: std r3, 272(r1)
-; CHECK-PWR7-NEXT: std r3, 280(r1)
-; CHECK-PWR7-NEXT: srawi r3, r17, 31
-; CHECK-PWR7-NEXT: sub r19, r19, r18
-; CHECK-PWR7-NEXT: xor r7, r7, r8
-; CHECK-PWR7-NEXT: sub r15, r15, r31
-; CHECK-PWR7-NEXT: xor r17, r17, r3
-; CHECK-PWR7-NEXT: xor r9, r9, r10
-; CHECK-PWR7-NEXT: xor r11, r11, r12
-; CHECK-PWR7-NEXT: xor r0, r0, r30
-; CHECK-PWR7-NEXT: xor r29, r29, r28
-; CHECK-PWR7-NEXT: xor r27, r27, r26
-; CHECK-PWR7-NEXT: sub r3, r17, r3
-; CHECK-PWR7-NEXT: xor r25, r25, r24
-; CHECK-PWR7-NEXT: sub r25, r25, r24
-; CHECK-PWR7-NEXT: sub r27, r27, r26
-; CHECK-PWR7-NEXT: sub r29, r29, r28
-; CHECK-PWR7-NEXT: sldi r3, r3, 56
-; CHECK-PWR7-NEXT: sub r0, r0, r30
-; CHECK-PWR7-NEXT: sub r11, r11, r12
-; CHECK-PWR7-NEXT: sub r9, r9, r10
-; CHECK-PWR7-NEXT: sub r7, r7, r8
-; CHECK-PWR7-NEXT: sub r5, r5, r6
-; CHECK-PWR7-NEXT: sldi r14, r14, 56
-; CHECK-PWR7-NEXT: sldi r15, r15, 56
-; CHECK-PWR7-NEXT: ld r31, 504(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: std r3, 256(r1)
-; CHECK-PWR7-NEXT: std r3, 264(r1)
-; CHECK-PWR7-NEXT: sldi r3, r19, 56
-; CHECK-PWR7-NEXT: sldi r25, r25, 56
-; CHECK-PWR7-NEXT: sldi r27, r27, 56
-; CHECK-PWR7-NEXT: std r3, 240(r1)
-; CHECK-PWR7-NEXT: std r3, 248(r1)
-; CHECK-PWR7-NEXT: sub r3, r23, r22
-; CHECK-PWR7-NEXT: srawi r23, r3, 31
-; CHECK-PWR7-NEXT: sub r22, r21, r20
-; CHECK-PWR7-NEXT: srawi r21, r22, 31
-; CHECK-PWR7-NEXT: sldi r29, r29, 56
-; CHECK-PWR7-NEXT: sldi r0, r0, 56
-; CHECK-PWR7-NEXT: sldi r11, r11, 56
-; CHECK-PWR7-NEXT: xor r3, r3, r23
-; CHECK-PWR7-NEXT: xor r22, r22, r21
-; CHECK-PWR7-NEXT: sldi r9, r9, 56
-; CHECK-PWR7-NEXT: sldi r7, r7, 56
-; CHECK-PWR7-NEXT: sldi r5, r5, 56
-; CHECK-PWR7-NEXT: ld r30, 496(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: ld r28, 480(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: sub r3, r3, r23
-; CHECK-PWR7-NEXT: sub r22, r22, r21
-; CHECK-PWR7-NEXT: std r14, 304(r1)
-; CHECK-PWR7-NEXT: ld r26, 464(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: sldi r3, r3, 56
-; CHECK-PWR7-NEXT: sldi r22, r22, 56
-; CHECK-PWR7-NEXT: ld r24, 448(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: ld r23, 440(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: std r14, 312(r1)
-; CHECK-PWR7-NEXT: std r15, 288(r1)
-; CHECK-PWR7-NEXT: std r3, 208(r1)
-; CHECK-PWR7-NEXT: std r3, 216(r1)
-; CHECK-PWR7-NEXT: lwz r3, 60(r1) # 4-byte Folded Reload
-; CHECK-PWR7-NEXT: std r15, 296(r1)
-; CHECK-PWR7-NEXT: ld r21, 424(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: ld r20, 416(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: std r22, 224(r1)
-; CHECK-PWR7-NEXT: std r22, 232(r1)
-; CHECK-PWR7-NEXT: sub r4, r3, r4
-; CHECK-PWR7-NEXT: std r25, 192(r1)
-; CHECK-PWR7-NEXT: ld r22, 432(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: ld r19, 408(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: srawi r3, r4, 31
-; CHECK-PWR7-NEXT: std r25, 200(r1)
-; CHECK-PWR7-NEXT: ld r25, 456(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: std r27, 176(r1)
-; CHECK-PWR7-NEXT: std r27, 184(r1)
-; CHECK-PWR7-NEXT: xor r4, r4, r3
-; CHECK-PWR7-NEXT: std r29, 160(r1)
-; CHECK-PWR7-NEXT: ld r27, 472(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: std r29, 168(r1)
-; CHECK-PWR7-NEXT: std r0, 144(r1)
-; CHECK-PWR7-NEXT: sub r3, r4, r3
-; CHECK-PWR7-NEXT: std r0, 152(r1)
-; CHECK-PWR7-NEXT: ld r29, 488(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: ld r18, 400(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: sldi r3, r3, 56
-; CHECK-PWR7-NEXT: std r11, 128(r1)
-; CHECK-PWR7-NEXT: ld r17, 392(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: std r11, 136(r1)
-; CHECK-PWR7-NEXT: std r9, 112(r1)
-; CHECK-PWR7-NEXT: std r3, 64(r1)
-; CHECK-PWR7-NEXT: std r3, 72(r1)
-; CHECK-PWR7-NEXT: addi r3, r1, 304
-; CHECK-PWR7-NEXT: std r9, 120(r1)
-; CHECK-PWR7-NEXT: ld r15, 376(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: std r7, 96(r1)
-; CHECK-PWR7-NEXT: std r7, 104(r1)
-; CHECK-PWR7-NEXT: std r5, 80(r1)
-; CHECK-PWR7-NEXT: std r5, 88(r1)
-; CHECK-PWR7-NEXT: lxvw4x v2, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 288
-; CHECK-PWR7-NEXT: lxvw4x v3, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 272
-; CHECK-PWR7-NEXT: ld r14, 368(r1) # 8-byte Folded Reload
-; CHECK-PWR7-NEXT: vmrghb v2, v3, v2
-; CHECK-PWR7-NEXT: lxvw4x v3, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 256
-; CHECK-PWR7-NEXT: lxvw4x v4, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 240
-; CHECK-PWR7-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR7-NEXT: vmrghh v2, v3, v2
-; CHECK-PWR7-NEXT: lxvw4x v3, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 224
-; CHECK-PWR7-NEXT: lxvw4x v4, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 208
-; CHECK-PWR7-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR7-NEXT: lxvw4x v4, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 192
-; CHECK-PWR7-NEXT: lxvw4x v5, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 176
-; CHECK-PWR7-NEXT: vmrghb v4, v5, v4
-; CHECK-PWR7-NEXT: vmrghh v3, v4, v3
-; CHECK-PWR7-NEXT: xxmrghw vs0, v3, v2
-; CHECK-PWR7-NEXT: lxvw4x v2, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 160
-; CHECK-PWR7-NEXT: lxvw4x v3, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 144
-; CHECK-PWR7-NEXT: vmrghb v2, v3, v2
-; CHECK-PWR7-NEXT: lxvw4x v3, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 128
-; CHECK-PWR7-NEXT: lxvw4x v4, 0, r3
-; CHECK-PWR7-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR7-NEXT: addi r3, r1, 112
-; CHECK-PWR7-NEXT: vmrghh v2, v3, v2
-; CHECK-PWR7-NEXT: lxvw4x v3, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 96
-; CHECK-PWR7-NEXT: lxvw4x v4, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 80
-; CHECK-PWR7-NEXT: vmrghb v3, v4, v3
-; CHECK-PWR7-NEXT: lxvw4x v4, 0, r3
-; CHECK-PWR7-NEXT: addi r3, r1, 64
-; CHECK-PWR7-NEXT: lxvw4x v5, 0, r3
-; CHECK-PWR7-NEXT: vmrghb v4, v5, v4
-; CHECK-PWR7-NEXT: vmrghh v3, v4, v3
-; CHECK-PWR7-NEXT: xxmrghw vs1, v3, v2
-; CHECK-PWR7-NEXT: xxmrghd v2, vs1, vs0
-; CHECK-PWR7-NEXT: addi r1, r1, 512
-; CHECK-PWR7-NEXT: blr
+; CHECK-PWR78-LABEL: sub_absv_8_ext:
+; CHECK-PWR78: # %bb.0: # %entry
+; CHECK-PWR78-NEXT: vminub v4, v2, v3
+; CHECK-PWR78-NEXT: vmaxub v2, v2, v3
+; CHECK-PWR78-NEXT: vsububm v2, v2, v4
+; CHECK-PWR78-NEXT: blr
entry:
- %vecext = extractelement <16 x i8> %a, i32 0
- %conv = zext i8 %vecext to i32
- %vecext1 = extractelement <16 x i8> %b, i32 0
- %conv2 = zext i8 %vecext1 to i32
- %sub = sub nsw i32 %conv, %conv2
- %ispos = icmp sgt i32 %sub, -1
- %neg = sub nsw i32 0, %sub
- %0 = select i1 %ispos, i32 %sub, i32 %neg
- %conv3 = trunc i32 %0 to i8
- %vecins = insertelement <16 x i8> <i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0>, i8 %conv3, i32 0
- %vecext4 = extractelement <16 x i8> %a, i32 1
- %conv5 = zext i8 %vecext4 to i32
- %vecext6 = extractelement <16 x i8> %b, i32 1
- %conv7 = zext i8 %vecext6 to i32
- %sub8 = sub nsw i32 %conv5, %conv7
- %ispos171 = icmp sgt i32 %sub8, -1
- %neg172 = sub nsw i32 0, %sub8
- %1 = select i1 %ispos171, i32 %sub8, i32 %neg172
- %conv10 = trunc i32 %1 to i8
- %vecins11 = insertelement <16 x i8> %vecins, i8 %conv10, i32 1
- %vecext12 = extractelement <16 x i8> %a, i32 2
- %conv13 = zext i8 %vecext12 to i32
- %vecext14 = extractelement <16 x i8> %b, i32 2
- %conv15 = zext i8 %vecext14 to i32
- %sub16 = sub nsw i32 %conv13, %conv15
- %ispos173 = icmp sgt i32 %sub16, -1
- %neg174 = sub nsw i32 0, %sub16
- %2 = select i1 %ispos173, i32 %sub16, i32 %neg174
- %conv18 = trunc i32 %2 to i8
- %vecins19 = insertelement <16 x i8> %vecins11, i8 %conv18, i32 2
- %vecext20 = extractelement <16 x i8> %a, i32 3
- %conv21 = zext i8 %vecext20 to i32
- %vecext22 = extractelement <16 x i8> %b, i32 3
- %conv23 = zext i8 %vecext22 to i32
- %sub24 = sub nsw i32 %conv21, %conv23
- %ispos175 = icmp sgt i32 %sub24, -1
- %neg176 = sub nsw i32 0, %sub24
- %3 = select i1 %ispos175, i32 %sub24, i32 %neg176
- %conv26 = trunc i32 %3 to i8
- %vecins27 = insertelement <16 x i8> %vecins19, i8 %conv26, i32 3
- %vecext28 = extractelement <16 x i8> %a, i32 4
- %conv29 = zext i8 %vecext28 to i32
- %vecext30 = extractelement <16 x i8> %b, i32 4
- %conv31 = zext i8 %vecext30 to i32
- %sub32 = sub nsw i32 %conv29, %conv31
- %ispos177 = icmp sgt i32 %sub32, -1
- %neg178 = sub nsw i32 0, %sub32
- %4 = select i1 %ispos177, i32 %sub32, i32 %neg178
- %conv34 = trunc i32 %4 to i8
- %vecins35 = insertelement <16 x i8> %vecins27, i8 %conv34, i32 4
- %vecext36 = extractelement <16 x i8> %a, i32 5
- %conv37 = zext i8 %vecext36 to i32
- %vecext38 = extractelement <16 x i8> %b, i32 5
- %conv39 = zext i8 %vecext38 to i32
- %sub40 = sub nsw i32 %conv37, %conv39
- %ispos179 = icmp sgt i32 %sub40, -1
- %neg180 = sub nsw i32 0, %sub40
- %5 = select i1 %ispos179, i32 %sub40, i32 %neg180
- %conv42 = trunc i32 %5 to i8
- %vecins43 = insertelement <16 x i8> %vecins35, i8 %conv42, i32 5
- %vecext44 = extractelement <16 x i8> %a, i32 6
- %conv45 = zext i8 %vecext44 to i32
- %vecext46 = extractelement <16 x i8> %b, i32 6
- %conv47 = zext i8 %vecext46 to i32
- %sub48 = sub nsw i32 %conv45, %conv47
- %ispos181 = icmp sgt i32 %sub48, -1
- %neg182 = sub nsw i32 0, %sub48
- %6 = select i1 %ispos181, i32 %sub48, i32 %neg182
- %conv50 = trunc i32 %6 to i8
- %vecins51 = insertelement <16 x i8> %vecins43, i8 %conv50, i32 6
- %vecext52 = extractelement <16 x i8> %a, i32 7
- %conv53 = zext i8 %vecext52 to i32
- %vecext54 = extractelement <16 x i8> %b, i32 7
- %conv55 = zext i8 %vecext54 to i32
- %sub56 = sub nsw i32 %conv53, %conv55
- %ispos183 = icmp sgt i32 %sub56, -1
- %neg184 = sub nsw i32 0, %sub56
- %7 = select i1 %ispos183, i32 %sub56, i32 %neg184
- %conv58 = trunc i32 %7 to i8
- %vecins59 = insertelement <16 x i8> %vecins51, i8 %conv58, i32 7
- %vecext60 = extractelement <16 x i8> %a, i32 8
- %conv61 = zext i8 %vecext60 to i32
- %vecext62 = extractelement <16 x i8> %b, i32 8
- %conv63 = zext i8 %vecext62 to i32
- %sub64 = sub nsw i32 %conv61, %conv63
- %ispos185 = icmp sgt i32 %sub64, -1
- %neg186 = sub nsw i32 0, %sub64
- %8 = select i1 %ispos185, i32 %sub64, i32 %neg186
- %conv66 = trunc i32 %8 to i8
- %vecins67 = insertelement <16 x i8> %vecins59, i8 %conv66, i32 8
- %vecext68 = extractelement <16 x i8> %a, i32 9
- %conv69 = zext i8 %vecext68 to i32
- %vecext70 = extractelement <16 x i8> %b, i32 9
- %conv71 = zext i8 %vecext70 to i32
- %sub72 = sub nsw i32 %conv69, %conv71
- %ispos187 = icmp sgt i32 %sub72, -1
- %neg188 = sub nsw i32 0, %sub72
- %9 = select i1 %ispos187, i32 %sub72, i32 %neg188
- %conv74 = trunc i32 %9 to i8
- %vecins75 = insertelement <16 x i8> %vecins67, i8 %conv74, i32 9
- %vecext76 = extractelement <16 x i8> %a, i32 10
- %conv77 = zext i8 %vecext76 to i32
- %vecext78 = extractelement <16 x i8> %b, i32 10
- %conv79 = zext i8 %vecext78 to i32
- %sub80 = sub nsw i32 %conv77, %conv79
- %ispos189 = icmp sgt i32 %sub80, -1
- %neg190 = sub nsw i32 0, %sub80
- %10 = select i1 %ispos189, i32 %sub80, i32 %neg190
- %conv82 = trunc i32 %10 to i8
- %vecins83 = insertelement <16 x i8> %vecins75, i8 %conv82, i32 10
- %vecext84 = extractelement <16 x i8> %a, i32 11
- %conv85 = zext i8 %vecext84 to i32
- %vecext86 = extractelement <16 x i8> %b, i32 11
- %conv87 = zext i8 %vecext86 to i32
- %sub88 = sub nsw i32 %conv85, %conv87
- %ispos191 = icmp sgt i32 %sub88, -1
- %neg192 = sub nsw i32 0, %sub88
- %11 = select i1 %ispos191, i32 %sub88, i32 %neg192
- %conv90 = trunc i32 %11 to i8
- %vecins91 = insertelement <16 x i8> %vecins83, i8 %conv90, i32 11
- %vecext92 = extractelement <16 x i8> %a, i32 12
- %conv93 = zext i8 %vecext92 to i32
- %vecext94 = extractelement <16 x i8> %b, i32 12
- %conv95 = zext i8 %vecext94 to i32
- %sub96 = sub nsw i32 %conv93, %conv95
- %ispos193 = icmp sgt i32 %sub96, -1
- %neg194 = sub nsw i32 0, %sub96
- %12 = select i1 %ispos193, i32 %sub96, i32 %neg194
- %conv98 = trunc i32 %12 to i8
- %vecins99 = insertelement <16 x i8> %vecins91, i8 %conv98, i32 12
- %vecext100 = extractelement <16 x i8> %a, i32 13
- %conv101 = zext i8 %vecext100 to i32
- %vecext102 = extractelement <16 x i8> %b, i32 13
- %conv103 = zext i8 %vecext102 to i32
- %sub104 = sub nsw i32 %conv101, %conv103
- %ispos195 = icmp sgt i32 %sub104, -1
- %neg196 = sub nsw i32 0, %sub104
- %13 = select i1 %ispos195, i32 %sub104, i32 %neg196
- %conv106 = trunc i32 %13 to i8
- %vecins107 = insertelement <16 x i8> %vecins99, i8 %conv106, i32 13
- %vecext108 = extractelement <16 x i8> %a, i32 14
- %conv109 = zext i8 %vecext108 to i32
- %vecext110 = extractelement <16 x i8> %b, i32 14
- %conv111 = zext i8 %vecext110 to i32
- %sub112 = sub nsw i32 %conv109, %conv111
- %ispos197 = icmp sgt i32 %sub112, -1
- %neg198 = sub nsw i32 0, %sub112
- %14 = select i1 %ispos197, i32 %sub112, i32 %neg198
- %conv114 = trunc i32 %14 to i8
- %vecins115 = insertelement <16 x i8> %vecins107, i8 %conv114, i32 14
- %vecext116 = extractelement <16 x i8> %a, i32 15
- %conv117 = zext i8 %vecext116 to i32
- %vecext118 = extractelement <16 x i8> %b, i32 15
- %conv119 = zext i8 %vecext118 to i32
- %sub120 = sub nsw i32 %conv117, %conv119
- %ispos199 = icmp sgt i32 %sub120, -1
- %neg200 = sub nsw i32 0, %sub120
- %15 = select i1 %ispos199, i32 %sub120, i32 %neg200
- %conv122 = trunc i32 %15 to i8
- %vecins123 = insertelement <16 x i8> %vecins115, i8 %conv122, i32 15
- ret <16 x i8> %vecins123
+ %0 = zext <16 x i8> %a to <16 x i32>
+ %1 = zext <16 x i8> %b to <16 x i32>
+ %2 = sub nsw <16 x i32> %0, %1
+ %3 = tail call <16 x i32> @llvm.abs.v16i32(<16 x i32> %2, i1 true)
+ %4 = trunc <16 x i32> %3 to <16 x i8>
+ ret <16 x i8> %4
}
define <4 x i32> @sub_absv_vec_32(<4 x i32> %a, <4 x i32> %b) local_unnamed_addr {
More information about the llvm-commits
mailing list