[llvm] [AMDGPU] Allow sinking of free vector ops (PR #109172)
Jeffrey Byrnes via llvm-commits
llvm-commits at lists.llvm.org
Wed Sep 18 11:09:34 PDT 2024
https://github.com/jrbyrnes created https://github.com/llvm/llvm-project/pull/109172
Certain types of `ShuffleVectors` / `ExtractElement` / `InsertElement` instructions have zero cost and will be optimized out. Sinking these into user blocks may help enable SDAG combines by providing visibility to the values, rather than just emitting CopyTo/FromRegs. The sink IR pass disables sinking into loops, so this PR extends the `CodeGenPrepare` target hook `shouldSinkOperands`
>From 20ee95d70223a8c48da6bb06b23d64166fe9e2b0 Mon Sep 17 00:00:00 2001
From: Jeffrey Byrnes <Jeffrey.Byrnes at amd.com>
Date: Tue, 23 Jul 2024 15:14:14 -0700
Subject: [PATCH 1/2] [AMDGPU] Correctly insert s_nops for implicit read of
SDWA
Change-Id: I4e22bb3764705f328827eb64704720a0d6aa1a9b
---
.../lib/Target/AMDGPU/GCNHazardRecognizer.cpp | 134 +++++-
llvm/lib/Target/AMDGPU/SIInstrInfo.td | 10 +
.../Target/AMDGPU/Utils/AMDGPUBaseInfo.cpp | 12 +
llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.h | 3 +
llvm/lib/Target/AMDGPU/VOP3Instructions.td | 2 +
llvm/lib/Target/AMDGPU/VOPInstructions.td | 2 +
llvm/test/CodeGen/AMDGPU/dst-sel-hazard.mir | 436 ++++++++++++++++++
.../CodeGen/AMDGPU/llvm.amdgcn.cvt.fp8.ll | 2 +
8 files changed, 579 insertions(+), 22 deletions(-)
create mode 100644 llvm/test/CodeGen/AMDGPU/dst-sel-hazard.mir
diff --git a/llvm/lib/Target/AMDGPU/GCNHazardRecognizer.cpp b/llvm/lib/Target/AMDGPU/GCNHazardRecognizer.cpp
index a8b171aa82840a..a6b7264405ade1 100644
--- a/llvm/lib/Target/AMDGPU/GCNHazardRecognizer.cpp
+++ b/llvm/lib/Target/AMDGPU/GCNHazardRecognizer.cpp
@@ -876,6 +876,7 @@ GCNHazardRecognizer::checkVALUHazardsHelper(const MachineOperand &Def,
return DataIdx >= 0 &&
TRI->regsOverlap(MI.getOperand(DataIdx).getReg(), Reg);
};
+
int WaitStatesNeededForDef =
VALUWaitStates - getWaitStatesSince(IsHazardFn, VALUWaitStates);
WaitStatesNeeded = std::max(WaitStatesNeeded, WaitStatesNeededForDef);
@@ -883,6 +884,70 @@ GCNHazardRecognizer::checkVALUHazardsHelper(const MachineOperand &Def,
return WaitStatesNeeded;
}
+/// Dest sel forwarding issue occurs if additional logic is needed to swizzle /
+/// pack the computed value into correct bit position of the dest register. This
+/// occurs if we have SDWA with dst_sel != DWORD or if we have op_sel with
+/// dst_sel that is not aligned to the register. This function analayzes the \p
+/// MI and \returns an operand with dst forwarding issue, or nullptr if
+/// none exists.
+static const MachineOperand *
+getDstSelForwardingOperand(const MachineInstr &MI, const GCNSubtarget &ST) {
+ if (!SIInstrInfo::isVALU(MI))
+ return nullptr;
+
+ const SIInstrInfo *TII = ST.getInstrInfo();
+
+ unsigned Opcode = MI.getOpcode();
+
+ // There are three different types of instructions
+ // which produce forwarded dest: 1. SDWA with dst_sel != DWORD, 2. VOP3
+ // which write hi bits (e.g. op_sel[3] == 1), and 3. CVR_SR_FP8_F32 and
+ // CVT_SR_BF8_F32 with op_sel[3:2]
+ // != 0
+ if (SIInstrInfo::isSDWA(MI)) {
+ // Type 1: SDWA with dst_sel != DWORD
+ if (auto *DstSel = TII->getNamedOperand(MI, AMDGPU::OpName::dst_sel))
+ if (DstSel->getImm() == AMDGPU::SDWA::DWORD)
+ return nullptr;
+ } else {
+ // Type 2 && Type 3: (VOP3 which write the hi bits) || (CVT_SR_FP8_F32 and
+ // CVT_SR_BF8_F32 with op_sel[3:2] != 0)
+ if (!AMDGPU::hasNamedOperand(Opcode, AMDGPU::OpName::op_sel) ||
+ !(TII->getNamedOperand(MI, AMDGPU::OpName::src0_modifiers)->getImm() &
+ SISrcMods::DST_OP_SEL ||
+ (AMDGPU::isFP8DstSelInst(Opcode) &&
+ (TII->getNamedOperand(MI, AMDGPU::OpName::src2_modifiers)->getImm() &
+ SISrcMods::OP_SEL_0))))
+ return nullptr;
+ }
+
+ return TII->getNamedOperand(MI, AMDGPU::OpName::vdst);
+}
+
+/// Checks whether the provided \p MI "consumes" the operand with a Dest sel
+/// fowarding issue \p Dst . We may "consume" the Dst via a standard explicit
+/// RAW, or through irregular ways (e.g implicit RAW, certain types of WAW)
+static bool consumesDstSelForwardingOperand(const MachineInstr *VALU,
+ const MachineOperand *Dst,
+ const SIRegisterInfo *TRI) {
+ // We must consider implicit reads of the VALU. SDWA with dst_sel and
+ // UNUSED_PRESERVE will implicitly read the result from forwarded dest,
+ // and we must account for that hazard.
+ // We also must account for WAW hazards. In particular, WAW with dest
+ // preserve semantics (e.g. VOP3 with op_sel, VOP2 &&
+ // !zeroesHigh16BitsOfDest) will read the forwarded dest for parity
+ // check for ECC. Without accounting for this hazard, the ECC will be
+ // wrong.
+ // TODO: limit to RAW (including implicit reads) + problematic WAW (i.e.
+ // complete zeroesHigh16BitsOfDest)
+ for (auto &Operand : VALU->operands()) {
+ if (Operand.isReg() && TRI->regsOverlap(Dst->getReg(), Operand.getReg())) {
+ return true;
+ }
+ }
+ return false;
+}
+
int GCNHazardRecognizer::checkVALUHazards(MachineInstr *VALU) {
int WaitStatesNeeded = 0;
@@ -913,27 +978,18 @@ int GCNHazardRecognizer::checkVALUHazards(MachineInstr *VALU) {
if (ST.hasDstSelForwardingHazard()) {
const int Shift16DefWaitstates = 1;
- auto IsShift16BitDefFn = [this, VALU](const MachineInstr &MI) {
- if (!SIInstrInfo::isVALU(MI))
- return false;
- const SIInstrInfo *TII = ST.getInstrInfo();
- if (SIInstrInfo::isSDWA(MI)) {
- if (auto *DstSel = TII->getNamedOperand(MI, AMDGPU::OpName::dst_sel))
- if (DstSel->getImm() == AMDGPU::SDWA::DWORD)
- return false;
- } else {
- if (!AMDGPU::hasNamedOperand(MI.getOpcode(), AMDGPU::OpName::op_sel) ||
- !(TII->getNamedOperand(MI, AMDGPU::OpName::src0_modifiers)
- ->getImm() &
- SISrcMods::DST_OP_SEL))
- return false;
- }
+ auto IsShift16BitDefFn = [this, VALU](const MachineInstr &ProducerMI) {
const SIRegisterInfo *TRI = ST.getRegisterInfo();
- if (auto *Dst = TII->getNamedOperand(MI, AMDGPU::OpName::vdst)) {
- Register Def = Dst->getReg();
+ const MachineOperand *ForwardedDst =
+ getDstSelForwardingOperand(ProducerMI, ST);
+ if (ForwardedDst) {
+ return consumesDstSelForwardingOperand(VALU, ForwardedDst, TRI);
+ }
- for (const MachineOperand &Use : VALU->explicit_uses()) {
- if (Use.isReg() && TRI->regsOverlap(Def, Use.getReg()))
+ if (ProducerMI.isInlineAsm()) {
+ // Assume inline asm has dst forwarding hazard
+ for (auto &Def : ProducerMI.all_defs()) {
+ if (consumesDstSelForwardingOperand(VALU, &Def, TRI))
return true;
}
}
@@ -1030,7 +1086,7 @@ int GCNHazardRecognizer::checkInlineAsmHazards(MachineInstr *IA) {
// problematic thus far.
// see checkVALUHazards()
- if (!ST.has12DWordStoreHazard())
+ if (!ST.has12DWordStoreHazard() && !ST.hasDstSelForwardingHazard())
return 0;
const MachineRegisterInfo &MRI = MF.getRegInfo();
@@ -1039,11 +1095,45 @@ int GCNHazardRecognizer::checkInlineAsmHazards(MachineInstr *IA) {
for (const MachineOperand &Op :
llvm::drop_begin(IA->operands(), InlineAsm::MIOp_FirstOperand)) {
if (Op.isReg() && Op.isDef()) {
- WaitStatesNeeded =
- std::max(WaitStatesNeeded, checkVALUHazardsHelper(Op, MRI));
+ if (!TRI.isVectorRegister(MRI, Op.getReg()))
+ continue;
+
+ if (ST.has12DWordStoreHazard()) {
+ WaitStatesNeeded =
+ std::max(WaitStatesNeeded, checkVALUHazardsHelper(Op, MRI));
+ }
}
}
+ if (ST.hasDstSelForwardingHazard()) {
+ const int Shift16DefWaitstates = 1;
+
+ auto IsShift16BitDefFn = [this, &IA](const MachineInstr &ProducerMI) {
+ const MachineOperand *Dst = getDstSelForwardingOperand(ProducerMI, ST);
+ // Assume inline asm reads the dst
+ if (Dst)
+ return IA->modifiesRegister(Dst->getReg(), &TRI) ||
+ IA->readsRegister(Dst->getReg(), &TRI);
+
+ if (ProducerMI.isInlineAsm()) {
+ // If MI is inline asm, assume it has dst forwarding hazard
+ for (auto &Def : ProducerMI.all_defs()) {
+ if (IA->modifiesRegister(Def.getReg(), &TRI) ||
+ IA->readsRegister(Def.getReg(), &TRI)) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ };
+
+ int WaitStatesNeededForDef =
+ Shift16DefWaitstates -
+ getWaitStatesSince(IsShift16BitDefFn, Shift16DefWaitstates);
+ WaitStatesNeeded = std::max(WaitStatesNeeded, WaitStatesNeededForDef);
+ }
+
return WaitStatesNeeded;
}
diff --git a/llvm/lib/Target/AMDGPU/SIInstrInfo.td b/llvm/lib/Target/AMDGPU/SIInstrInfo.td
index 85281713e22b1f..2b54429dc9a03f 100644
--- a/llvm/lib/Target/AMDGPU/SIInstrInfo.td
+++ b/llvm/lib/Target/AMDGPU/SIInstrInfo.td
@@ -2342,6 +2342,7 @@ class VOPProfile <list<ValueType> _ArgVT, bit _EnableClamp = 0> {
field bit IsFP8SrcByteSel = 0;
field bit IsFP8DstByteSel = 0;
+ field bit HasFP8DstByteSel = 0;
field bit IsFP8ByteSel = !or(IsFP8SrcByteSel, IsFP8DstByteSel);
field bit HasDst = !ne(DstVT.Value, untyped.Value);
@@ -2921,6 +2922,15 @@ def getVCMPXOpFromVCMP : InstrMapping {
let ValueCols = [["1"]];
}
+def FP8DstByteSelTable : GenericTable {
+ let FilterClass = "VOP3_Pseudo";
+ let CppTypeName = "FP8DstByteSelInfo";
+ let Fields = ["Opcode", "HasFP8DstByteSel"];
+
+ let PrimaryKey = ["Opcode"];
+ let PrimaryKeyName = "getFP8DstByteSelHelper";
+}
+
def VOPDComponentTable : GenericTable {
let FilterClass = "VOPD_Component";
let CppTypeName = "VOPDComponentInfo";
diff --git a/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.cpp b/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.cpp
index 5b41a2cd731607..cda664a151ef54 100644
--- a/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.cpp
+++ b/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.cpp
@@ -385,6 +385,13 @@ struct SingleUseExceptionInfo {
bool IsInvalidSingleUseProducer;
};
+struct FP8DstByteSelInfo {
+ uint16_t Opcode;
+ bool HasFP8DstByteSel;
+};
+
+#define GET_FP8DstByteSelTable_DECL
+#define GET_FP8DstByteSelTable_IMPL
#define GET_MTBUFInfoTable_DECL
#define GET_MTBUFInfoTable_IMPL
#define GET_MUBUFInfoTable_DECL
@@ -629,6 +636,11 @@ bool isInvalidSingleUseProducerInst(unsigned Opc) {
return Info && Info->IsInvalidSingleUseProducer;
}
+bool isFP8DstSelInst(unsigned Opc) {
+ const FP8DstByteSelInfo *Info = getFP8DstByteSelHelper(Opc);
+ return Info ? Info->HasFP8DstByteSel : false;
+}
+
unsigned mapWMMA2AddrTo3AddrOpcode(unsigned Opc) {
const WMMAOpcodeMappingInfo *Info = getWMMAMappingInfoFrom2AddrOpcode(Opc);
return Info ? Info->Opcode3Addr : ~0u;
diff --git a/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.h b/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.h
index a4e6a7ebe0558b..35c080d8e0bebc 100644
--- a/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.h
+++ b/llvm/lib/Target/AMDGPU/Utils/AMDGPUBaseInfo.h
@@ -861,6 +861,9 @@ getVOPDInstInfo(unsigned VOPDOpcode, const MCInstrInfo *InstrInfo);
LLVM_READONLY
bool isTrue16Inst(unsigned Opc);
+LLVM_READONLY
+bool isFP8DstSelInst(unsigned Opc);
+
LLVM_READONLY
bool isInvalidSingleUseConsumerInst(unsigned Opc);
diff --git a/llvm/lib/Target/AMDGPU/VOP3Instructions.td b/llvm/lib/Target/AMDGPU/VOP3Instructions.td
index 6748eff9376b0d..466114b95f9f90 100644
--- a/llvm/lib/Target/AMDGPU/VOP3Instructions.td
+++ b/llvm/lib/Target/AMDGPU/VOP3Instructions.td
@@ -568,6 +568,7 @@ def VOP3_CVT_SR_F8_F32_Profile : VOP3_Profile<VOPProfile<[i32, f32, i32, f32]>,
let HasSrc2Mods = 1;
let HasExtVOP3DPP = 1;
let HasOpSel = 1;
+ let HasFP8DstByteSel = 1;
let AsmVOP3OpSel = !subst(", $src2_modifiers", "",
getAsmVOP3OpSel<3, HasClamp, HasOMod,
HasSrc0FloatMods, HasSrc1FloatMods,
@@ -587,6 +588,7 @@ def VOP3_CVT_SR_F8_F32_Profile : VOP3_Profile<VOPProfile<[i32, f32, i32, f32]>,
class VOP3_CVT_SR_F8_ByteSel_Profile<ValueType SrcVT> :
VOP3_Profile<VOPProfile<[i32, SrcVT, i32, untyped]>> {
let IsFP8DstByteSel = 1;
+ let HasFP8DstByteSel = 1;
let HasClamp = 0;
defvar bytesel = (ins VGPR_32:$vdst_in, ByteSel:$byte_sel);
let Ins64 = !con(getIns64<Src0RC64, Src1RC64, Src2RC64, NumSrcArgs,
diff --git a/llvm/lib/Target/AMDGPU/VOPInstructions.td b/llvm/lib/Target/AMDGPU/VOPInstructions.td
index 3851415ab0caed..5a460ef0d42320 100644
--- a/llvm/lib/Target/AMDGPU/VOPInstructions.td
+++ b/llvm/lib/Target/AMDGPU/VOPInstructions.td
@@ -113,6 +113,8 @@ class VOP3_Pseudo <string opName, VOPProfile P, list<dag> pattern = [],
let IsWMMA = P.IsWMMA;
let IsSWMMAC = P.IsSWMMAC;
+ bit HasFP8DstByteSel = P.HasFP8DstByteSel;
+
let AsmOperands = !if(isVop3OpSel,
P.AsmVOP3OpSel,
!if(!and(isVOP3P, P.IsPacked), P.AsmVOP3P, P.Asm64));
diff --git a/llvm/test/CodeGen/AMDGPU/dst-sel-hazard.mir b/llvm/test/CodeGen/AMDGPU/dst-sel-hazard.mir
new file mode 100644
index 00000000000000..e24817078d8bc9
--- /dev/null
+++ b/llvm/test/CodeGen/AMDGPU/dst-sel-hazard.mir
@@ -0,0 +1,436 @@
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 5
+# RUN: llc -mtriple=amdgcn -mcpu=gfx942 -run-pass post-RA-hazard-rec -o - %s | FileCheck -check-prefix=HAZARD %s
+# RUN: llc -mtriple=amdgcn -mcpu=gfx90a -run-pass post-RA-hazard-rec -o - %s | FileCheck -check-prefix=NOHAZARD %s
+
+---
+name: sdwa_opsel_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: sdwa_opsel_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: sdwa_opsel_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ S_ENDPGM 0
+...
+
+---
+name: sdwa_lo_opsel_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: sdwa_lo_opsel_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 4, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: sdwa_lo_opsel_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 4, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 4, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ S_ENDPGM 0
+...
+
+---
+name: opsel_sdwa_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: opsel_sdwa_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: opsel_sdwa_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+
+# TODO -- there is no reason for s_nop (V_ADD_U16 doesn't preserve the dest)
+
+---
+name: opsel_no_sdwa_no_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: opsel_no_sdwa_no_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: opsel_no_sdwa_no_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ S_ENDPGM 0
+...
+
+---
+name: no_opsel_sdwa_no_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: no_opsel_sdwa_no_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_e64 killed $vgpr3, killed $vgpr4, killed $vgpr2, 0, implicit $exec
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: no_opsel_sdwa_no_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_e64 killed $vgpr3, killed $vgpr4, killed $vgpr2, 0, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_MAD_U16_e64 killed $vgpr3, killed $vgpr4, killed $vgpr2, 0, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+---
+name: opsel_opsel_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: opsel_opsel_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 4, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: opsel_opsel_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 4, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 4, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ S_ENDPGM 0
+...
+
+# TODO -- there is no reason for s_nop
+
+---
+name: opsel_opsel_no_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: opsel_opsel_no_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: opsel_opsel_no_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ renamable $vgpr0 = V_MAD_U16_gfx9_e64 12, killed $vgpr3, 4, killed $vgpr4, 4, killed $vgpr2, 0, 0, implicit $exec
+ S_ENDPGM 0
+...
+
+# DS_READ_U16_D16 has dest preserve semantics, but only VALU consumers have hazard
+
+---
+name: sdwa_loadsel_no_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: sdwa_loadsel_no_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 3, 0, 3, 3, implicit $exec
+ ; HAZARD-NEXT: renamable $vgpr0 = DS_READ_U16_D16 killed renamable $vgpr3, 0, 0, killed renamable $vgpr0, implicit $exec
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: sdwa_loadsel_no_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 3, 0, 3, 3, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = DS_READ_U16_D16 killed renamable $vgpr3, 0, 0, killed renamable $vgpr0, implicit $exec
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 3, 0, 3, 3, implicit $exec
+ renamable $vgpr0 = DS_READ_U16_D16 killed renamable $vgpr3, 0, 0, killed renamable $vgpr0, implicit $exec
+ S_ENDPGM 0
+...
+
+---
+name: sdwa_sdwa_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: sdwa_sdwa_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: sdwa_sdwa_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+---
+name: cvt_sdwa_hazard_1
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: cvt_sdwa_hazard_1
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 0, killed $vgpr3, 0, killed $vgpr1, 4, $vgpr0, 0, implicit $mode, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: cvt_sdwa_hazard_1
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 0, killed $vgpr3, 0, killed $vgpr1, 4, $vgpr0, 0, implicit $mode, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 0, killed $vgpr3, 0, killed $vgpr1, 4, $vgpr0, 0, implicit $mode, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+---
+name: cvt_sdwa_hazard_2
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: cvt_sdwa_hazard_2
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 8, killed $vgpr3, 0, killed $vgpr1, 0, $vgpr0, 0, implicit $mode, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: cvt_sdwa_hazard_2
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 8, killed $vgpr3, 0, killed $vgpr1, 0, $vgpr0, 0, implicit $mode, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 8, killed $vgpr3, 0, killed $vgpr1, 0, $vgpr0, 0, implicit $mode, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+---
+name: cvt_sdwa_hazard_3
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: cvt_sdwa_hazard_3
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 8, killed $vgpr3, 0, killed $vgpr1, 4, $vgpr0, 0, implicit $mode, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: cvt_sdwa_hazard_3
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 8, killed $vgpr3, 0, killed $vgpr1, 4, $vgpr0, 0, implicit $mode, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 8, killed $vgpr3, 0, killed $vgpr1, 4, $vgpr0, 0, implicit $mode, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+---
+name: cvt_sdwa_no_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: cvt_sdwa_no_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 0, killed $vgpr3, 0, killed $vgpr1, 0, $vgpr0, 0, implicit $mode, implicit $exec
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: cvt_sdwa_no_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 0, killed $vgpr3, 0, killed $vgpr1, 0, $vgpr0, 0, implicit $mode, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_CVT_SR_FP8_F32_e64 0, killed $vgpr3, 0, killed $vgpr1, 0, $vgpr0, 0, implicit $mode, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+# TODO -- there is no reason for s_nop (V_ADD_U16 doesn't preserve the dest)
+
+---
+name: sdwa_nosdwa_no_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: sdwa_nosdwa_no_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: sdwa_nosdwa_no_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec
+ S_ENDPGM 0
+...
+
+---
+name: inline_sdwa_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: inline_sdwa_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: inline_sdwa_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ INLINEASM &"v_or_b32 %0, 0, %1", 32, 327690, def $vgpr0, 327689, $vgpr1
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ S_ENDPGM 0
+...
+
+---
+name: sdwa_inline_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: sdwa_inline_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: sdwa_inline_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ ; NOHAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ renamable $vgpr0 = V_ADD_U16_sdwa 0, $vgpr1, 0, $vgpr2, 0, 1, 0, 3, 3, implicit $exec, implicit killed $vgpr0(tied-def 0)
+ INLINEASM &"v_or_b32 %0, 0, %1", 32, 327690, def $vgpr0, 327689, $vgpr1
+ S_ENDPGM 0
+...
+
+
+---
+name: inline_inline_hazard
+body: |
+ bb.0:
+ liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+
+ ; HAZARD-LABEL: name: inline_inline_hazard
+ ; HAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; HAZARD-NEXT: {{ $}}
+ ; HAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; HAZARD-NEXT: S_NOP 0
+ ; HAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; HAZARD-NEXT: S_ENDPGM 0
+ ;
+ ; NOHAZARD-LABEL: name: inline_inline_hazard
+ ; NOHAZARD: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $exec, $mode
+ ; NOHAZARD-NEXT: {{ $}}
+ ; NOHAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; NOHAZARD-NEXT: INLINEASM &"v_or_b32 %0, 0, %1", 32 /* isconvergent attdialect */, 327690 /* regdef:SReg_1_with_sub0 */, def $vgpr0, 327689 /* reguse:SReg_1_with_sub0 */, $vgpr1
+ ; NOHAZARD-NEXT: S_ENDPGM 0
+ INLINEASM &"v_or_b32 %0, 0, %1", 32, 327690, def $vgpr0, 327689, $vgpr1
+ INLINEASM &"v_or_b32 %0, 0, %1", 32, 327690, def $vgpr0, 327689, $vgpr1
+ S_ENDPGM 0
+...
+
diff --git a/llvm/test/CodeGen/AMDGPU/llvm.amdgcn.cvt.fp8.ll b/llvm/test/CodeGen/AMDGPU/llvm.amdgcn.cvt.fp8.ll
index d3fc96d7ff8012..8313f5b655efba 100644
--- a/llvm/test/CodeGen/AMDGPU/llvm.amdgcn.cvt.fp8.ll
+++ b/llvm/test/CodeGen/AMDGPU/llvm.amdgcn.cvt.fp8.ll
@@ -375,6 +375,7 @@ define i32 @test_cvt_sr_bf8_f32_byte1(float %x, i32 %r, i32 %old) {
; GFX940: ; %bb.0:
; GFX940-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GFX940-NEXT: v_cvt_sr_bf8_f32 v2, v0, v1 op_sel:[0,0,1,0]
+; GFX940-NEXT: s_nop 0
; GFX940-NEXT: v_mov_b32_e32 v0, v2
; GFX940-NEXT: s_setpc_b64 s[30:31]
;
@@ -469,6 +470,7 @@ define i32 @test_cvt_sr_fp8_f32_byte1(float %x, i32 %r, i32 %old) {
; GFX940: ; %bb.0:
; GFX940-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GFX940-NEXT: v_cvt_sr_fp8_f32 v2, v0, v1 op_sel:[0,0,1,0]
+; GFX940-NEXT: s_nop 0
; GFX940-NEXT: v_mov_b32_e32 v0, v2
; GFX940-NEXT: s_setpc_b64 s[30:31]
;
>From 554f4895649733fe7dc9fe923b14594b27c61b9b Mon Sep 17 00:00:00 2001
From: Jeffrey Byrnes <Jeffrey.Byrnes at amd.com>
Date: Mon, 16 Sep 2024 10:14:32 -0700
Subject: [PATCH 2/2] [AMDGPU] Allow sinking of free vector ops
Change-Id: If915150deb4e478204ceec067b6a368d2e62de79
---
llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp | 88 +-
llvm/test/CodeGen/AMDGPU/loop-vector-sink.ll | 48 +
llvm/test/CodeGen/AMDGPU/srem.ll | 2214 +++++++++--------
3 files changed, 1249 insertions(+), 1101 deletions(-)
create mode 100644 llvm/test/CodeGen/AMDGPU/loop-vector-sink.ll
diff --git a/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp b/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp
index e57c8f8b7b4835..1ebd0376ebd7b3 100644
--- a/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp
+++ b/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp
@@ -6000,7 +6000,7 @@ bool AMDGPUTargetLowering::isReassocProfitable(MachineRegisterInfo &MRI,
/// Whether it is profitable to sink the operands of an
/// Instruction I to the basic block of I.
-/// This helps using several modifiers (like abs and neg) more often.
+
bool AMDGPUTargetLowering::shouldSinkOperands(
Instruction *I, SmallVectorImpl<Use *> &Ops) const {
using namespace PatternMatch;
@@ -6010,8 +6010,94 @@ bool AMDGPUTargetLowering::shouldSinkOperands(
if (any_of(Ops, [&](Use *U) { return U->get() == Op.get(); }))
continue;
+ // This helps using several modifiers (abs and neg) more often.
if (match(&Op, m_FAbs(m_Value())) || match(&Op, m_FNeg(m_Value())))
Ops.push_back(&Op);
+
+ // Zero cost vector instructions (e.g. extractelement 0 of i32 vectors)
+ // will be optimized away, and sinking them can help SDAG combines.
+ DataLayout DL = I->getModule()->getDataLayout();
+
+ auto IsFreeExtractInsert = [&DL, this](VectorType *VecType,
+ unsigned VecIndex) {
+ unsigned EltSize = DL.getTypeSizeInBits(VecType->getElementType());
+ return EltSize >= 32 ||
+ (EltSize == 16 && VecIndex == 0 && Subtarget->has16BitInsts());
+ };
+
+ uint64_t VecIndex;
+ Value *Vec;
+ if (match(Op.get(), m_ExtractElt(m_Value(Vec), m_ConstantInt(VecIndex)))) {
+ Instruction *VecOpInst =
+ dyn_cast<Instruction>(cast<Instruction>(Op.get())->getOperand(0));
+ // If a zero cost extractvector instruction is the only use of the vector,
+ // then it may be combined with the def.
+ if (VecOpInst && VecOpInst->hasOneUse())
+ continue;
+
+ if (IsFreeExtractInsert(cast<VectorType>(Vec->getType()), VecIndex))
+ Ops.push_back(&Op);
+
+ continue;
+ }
+
+ if (match(Op.get(),
+ m_InsertElt(m_Value(Vec), m_Value(), m_ConstantInt(VecIndex)))) {
+ if (IsFreeExtractInsert(cast<VectorType>(Vec->getType()), VecIndex))
+ Ops.push_back(&Op);
+
+ continue;
+ }
+
+ if (auto *Shuffle = dyn_cast<ShuffleVectorInst>(Op.get())) {
+ if (Shuffle->isIdentity()) {
+ Ops.push_back(&Op);
+ continue;
+ }
+
+ unsigned EltSize = DL.getTypeSizeInBits(
+ cast<VectorType>(cast<VectorType>(Shuffle->getType()))
+ ->getElementType());
+
+ // For i32 (or greater) shufflevectors, these will be lowered into a
+ // series of insert / extract elements, which will be coalesced away.
+ if (EltSize >= 32) {
+ Ops.push_back(&Op);
+ continue;
+ }
+
+ if (EltSize < 16 || !Subtarget->has16BitInsts())
+ continue;
+
+ int NumSubElts, SubIndex;
+ if (Shuffle->changesLength()) {
+ if (Shuffle->increasesLength() && Shuffle->isIdentityWithPadding()) {
+ Ops.push_back(&Op);
+ continue;
+ }
+
+ if (Shuffle->isExtractSubvectorMask(SubIndex) ||
+ Shuffle->isInsertSubvectorMask(NumSubElts, SubIndex)) {
+ if (!(SubIndex % 2)) {
+ Ops.push_back(&Op);
+ continue;
+ }
+ }
+ }
+
+ if (Shuffle->isReverse() || Shuffle->isZeroEltSplat() ||
+ Shuffle->isSingleSource()) {
+ Ops.push_back(&Op);
+ continue;
+ }
+
+ if (Shuffle->isInsertSubvectorMask(NumSubElts, SubIndex)) {
+ if (!(SubIndex % 2)) {
+ Ops.push_back(&Op);
+ continue;
+ }
+ }
+ }
}
return !Ops.empty();
diff --git a/llvm/test/CodeGen/AMDGPU/loop-vector-sink.ll b/llvm/test/CodeGen/AMDGPU/loop-vector-sink.ll
new file mode 100644
index 00000000000000..59fe8b8682439e
--- /dev/null
+++ b/llvm/test/CodeGen/AMDGPU/loop-vector-sink.ll
@@ -0,0 +1,48 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
+; RUN: llc -march=amdgcn -mcpu=gfx942 < %s | FileCheck -enable-var-scope --check-prefix=GCN %s
+
+define amdgpu_kernel void @runningSum(ptr addrspace(1) %out, i32 %inputElement0, i32 %inputElement1, i32 %inputIter) {
+; GCN-LABEL: runningSum:
+; GCN: ; %bb.0: ; %bb.0
+; GCN-NEXT: s_load_dwordx2 s[4:5], s[2:3], 0x30
+; GCN-NEXT: s_waitcnt lgkmcnt(0)
+; GCN-NEXT: s_mov_b32 s0, s4
+; GCN-NEXT: s_mov_b32 s1, s4
+; GCN-NEXT: .LBB0_1: ; %loopBody
+; GCN-NEXT: ; =>This Inner Loop Header: Depth=1
+; GCN-NEXT: s_add_i32 s1, s4, s1
+; GCN-NEXT: s_add_i32 s0, s4, s0
+; GCN-NEXT: s_add_i32 s5, s5, -1
+; GCN-NEXT: s_cmp_lg_u32 s5, 0
+; GCN-NEXT: s_cbranch_scc1 .LBB0_1
+; GCN-NEXT: ; %bb.2: ; %loopExit
+; GCN-NEXT: s_load_dwordx2 s[4:5], s[2:3], 0x24
+; GCN-NEXT: v_mov_b32_e32 v2, 0
+; GCN-NEXT: v_mov_b64_e32 v[0:1], s[0:1]
+; GCN-NEXT: s_waitcnt lgkmcnt(0)
+; GCN-NEXT: global_store_dwordx2 v2, v[0:1], s[4:5]
+; GCN-NEXT: s_endpgm
+bb.0:
+ br label %preheader
+
+preheader:
+ %vecElement0 = insertelement <2 x i32> poison, i32 %inputElement0, i64 0
+ %broadcast0 = shufflevector <2 x i32> %vecElement0, <2 x i32> poison, <2 x i32> zeroinitializer
+ %vecElement1 = insertelement <2 x i32> poison, i32 %inputElement1, i64 0
+ %broadcast1 = shufflevector <2 x i32> %vecElement1, <2 x i32> poison, <2 x i32> zeroinitializer
+ br label %loopBody
+
+loopBody:
+ %previousSum = phi <2 x i32> [ %broadcast1, %preheader ], [ %runningSum, %loopBody ]
+ %iterCount = phi i32 [ %inputIter, %preheader ], [ %itersLeft, %loopBody ]
+ %runningSum = add <2 x i32> %broadcast1, %previousSum
+ %itersLeft = sub i32 %iterCount, 1
+ %cond = icmp eq i32 %itersLeft, 0
+ br i1 %cond, label %loopExit, label %loopBody, !llvm.loop !0
+
+loopExit:
+ store <2 x i32> %runningSum, ptr addrspace(1) %out
+ ret void
+}
+
+!0 = !{!"llvm.loop.mustprogress"}
diff --git a/llvm/test/CodeGen/AMDGPU/srem.ll b/llvm/test/CodeGen/AMDGPU/srem.ll
index 7b0241984a3491..316c16325ea460 100644
--- a/llvm/test/CodeGen/AMDGPU/srem.ll
+++ b/llvm/test/CodeGen/AMDGPU/srem.ll
@@ -4861,625 +4861,610 @@ define amdgpu_kernel void @srem_v4i64(ptr addrspace(1) %out, ptr addrspace(1) %i
; GCN-LABEL: srem_v4i64:
; GCN: ; %bb.0:
; GCN-NEXT: s_load_dwordx4 s[4:7], s[2:3], 0x24
-; GCN-NEXT: v_mov_b32_e32 v8, 0
+; GCN-NEXT: v_mov_b32_e32 v4, 0
; GCN-NEXT: s_waitcnt lgkmcnt(0)
-; GCN-NEXT: global_load_dwordx4 v[10:13], v8, s[6:7] offset:32
-; GCN-NEXT: global_load_dwordx4 v[14:17], v8, s[6:7]
-; GCN-NEXT: global_load_dwordx4 v[0:3], v8, s[6:7] offset:48
-; GCN-NEXT: global_load_dwordx4 v[4:7], v8, s[6:7] offset:16
+; GCN-NEXT: global_load_dwordx4 v[10:13], v4, s[6:7] offset:48
+; GCN-NEXT: global_load_dwordx4 v[14:17], v4, s[6:7] offset:32
+; GCN-NEXT: global_load_dwordx4 v[6:9], v4, s[6:7]
+; GCN-NEXT: global_load_dwordx4 v[0:3], v4, s[6:7] offset:16
; GCN-NEXT: s_waitcnt vmcnt(3)
-; GCN-NEXT: v_readfirstlane_b32 s7, v11
-; GCN-NEXT: v_readfirstlane_b32 s6, v10
+; GCN-NEXT: v_readfirstlane_b32 s11, v13
; GCN-NEXT: s_waitcnt vmcnt(2)
-; GCN-NEXT: v_readfirstlane_b32 s9, v15
-; GCN-NEXT: v_readfirstlane_b32 s8, v14
-; GCN-NEXT: s_or_b64 s[0:1], s[8:9], s[6:7]
-; GCN-NEXT: s_mov_b32 s0, 0
-; GCN-NEXT: s_cmp_lg_u64 s[0:1], 0
-; GCN-NEXT: s_cbranch_scc0 .LBB12_13
+; GCN-NEXT: v_readfirstlane_b32 s0, v15
+; GCN-NEXT: s_waitcnt vmcnt(1)
+; GCN-NEXT: v_or_b32_e32 v5, s0, v7
+; GCN-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; GCN-NEXT: v_readfirstlane_b32 s10, v12
+; GCN-NEXT: v_readfirstlane_b32 s13, v11
+; GCN-NEXT: v_readfirstlane_b32 s12, v10
+; GCN-NEXT: v_readfirstlane_b32 s15, v17
+; GCN-NEXT: v_readfirstlane_b32 s14, v16
+; GCN-NEXT: v_readfirstlane_b32 s16, v14
+; GCN-NEXT: s_cbranch_vccz .LBB12_13
; GCN-NEXT: ; %bb.1:
-; GCN-NEXT: s_ashr_i32 s0, s7, 31
-; GCN-NEXT: s_add_u32 s2, s6, s0
-; GCN-NEXT: s_mov_b32 s1, s0
-; GCN-NEXT: s_addc_u32 s3, s7, s0
-; GCN-NEXT: s_xor_b64 s[12:13], s[2:3], s[0:1]
-; GCN-NEXT: v_cvt_f32_u32_e32 v8, s12
-; GCN-NEXT: v_cvt_f32_u32_e32 v9, s13
-; GCN-NEXT: s_sub_u32 s0, 0, s12
-; GCN-NEXT: s_subb_u32 s1, 0, s13
-; GCN-NEXT: v_madmk_f32 v8, v9, 0x4f800000, v8
-; GCN-NEXT: v_rcp_f32_e32 v8, v8
-; GCN-NEXT: v_mul_f32_e32 v8, 0x5f7ffffc, v8
-; GCN-NEXT: v_mul_f32_e32 v9, 0x2f800000, v8
-; GCN-NEXT: v_trunc_f32_e32 v9, v9
-; GCN-NEXT: v_madmk_f32 v8, v9, 0xcf800000, v8
-; GCN-NEXT: v_cvt_u32_f32_e32 v9, v9
-; GCN-NEXT: v_cvt_u32_f32_e32 v8, v8
-; GCN-NEXT: v_readfirstlane_b32 s2, v9
-; GCN-NEXT: v_readfirstlane_b32 s3, v8
-; GCN-NEXT: s_mul_i32 s7, s0, s2
-; GCN-NEXT: s_mul_hi_u32 s15, s0, s3
-; GCN-NEXT: s_mul_i32 s14, s1, s3
-; GCN-NEXT: s_add_i32 s7, s15, s7
-; GCN-NEXT: s_add_i32 s7, s7, s14
-; GCN-NEXT: s_mul_i32 s16, s0, s3
-; GCN-NEXT: s_mul_hi_u32 s14, s3, s7
-; GCN-NEXT: s_mul_i32 s15, s3, s7
-; GCN-NEXT: s_mul_hi_u32 s3, s3, s16
-; GCN-NEXT: s_add_u32 s3, s3, s15
-; GCN-NEXT: s_addc_u32 s14, 0, s14
-; GCN-NEXT: s_mul_hi_u32 s17, s2, s16
-; GCN-NEXT: s_mul_i32 s16, s2, s16
-; GCN-NEXT: s_add_u32 s3, s3, s16
-; GCN-NEXT: s_mul_hi_u32 s15, s2, s7
-; GCN-NEXT: s_addc_u32 s3, s14, s17
-; GCN-NEXT: s_addc_u32 s14, s15, 0
-; GCN-NEXT: s_mul_i32 s7, s2, s7
-; GCN-NEXT: s_add_u32 s3, s3, s7
-; GCN-NEXT: s_addc_u32 s7, 0, s14
-; GCN-NEXT: v_add_co_u32_e32 v8, vcc, s3, v8
-; GCN-NEXT: s_cmp_lg_u64 vcc, 0
-; GCN-NEXT: s_addc_u32 s2, s2, s7
-; GCN-NEXT: v_readfirstlane_b32 s7, v8
-; GCN-NEXT: s_mul_i32 s3, s0, s2
-; GCN-NEXT: s_mul_hi_u32 s14, s0, s7
-; GCN-NEXT: s_add_i32 s3, s14, s3
-; GCN-NEXT: s_mul_i32 s1, s1, s7
-; GCN-NEXT: s_add_i32 s3, s3, s1
-; GCN-NEXT: s_mul_i32 s0, s0, s7
-; GCN-NEXT: s_mul_hi_u32 s14, s2, s0
-; GCN-NEXT: s_mul_i32 s15, s2, s0
-; GCN-NEXT: s_mul_i32 s17, s7, s3
-; GCN-NEXT: s_mul_hi_u32 s0, s7, s0
-; GCN-NEXT: s_mul_hi_u32 s16, s7, s3
-; GCN-NEXT: s_add_u32 s0, s0, s17
-; GCN-NEXT: s_addc_u32 s7, 0, s16
-; GCN-NEXT: s_add_u32 s0, s0, s15
-; GCN-NEXT: s_mul_hi_u32 s1, s2, s3
-; GCN-NEXT: s_addc_u32 s0, s7, s14
-; GCN-NEXT: s_addc_u32 s1, s1, 0
-; GCN-NEXT: s_mul_i32 s3, s2, s3
-; GCN-NEXT: s_add_u32 s0, s0, s3
-; GCN-NEXT: s_addc_u32 s1, 0, s1
-; GCN-NEXT: v_add_co_u32_e32 v8, vcc, s0, v8
-; GCN-NEXT: s_cmp_lg_u64 vcc, 0
-; GCN-NEXT: s_addc_u32 s2, s2, s1
-; GCN-NEXT: s_ashr_i32 s14, s9, 31
-; GCN-NEXT: s_add_u32 s0, s8, s14
-; GCN-NEXT: s_mov_b32 s15, s14
-; GCN-NEXT: s_addc_u32 s1, s9, s14
-; GCN-NEXT: s_xor_b64 s[16:17], s[0:1], s[14:15]
-; GCN-NEXT: v_readfirstlane_b32 s3, v8
-; GCN-NEXT: s_mul_i32 s1, s16, s2
-; GCN-NEXT: s_mul_hi_u32 s7, s16, s3
-; GCN-NEXT: s_mul_hi_u32 s0, s16, s2
-; GCN-NEXT: s_add_u32 s1, s7, s1
-; GCN-NEXT: s_addc_u32 s0, 0, s0
-; GCN-NEXT: s_mul_hi_u32 s9, s17, s3
-; GCN-NEXT: s_mul_i32 s3, s17, s3
-; GCN-NEXT: s_add_u32 s1, s1, s3
-; GCN-NEXT: s_mul_hi_u32 s7, s17, s2
-; GCN-NEXT: s_addc_u32 s0, s0, s9
-; GCN-NEXT: s_addc_u32 s1, s7, 0
-; GCN-NEXT: s_mul_i32 s2, s17, s2
-; GCN-NEXT: s_add_u32 s0, s0, s2
-; GCN-NEXT: s_addc_u32 s1, 0, s1
-; GCN-NEXT: s_mul_i32 s1, s12, s1
-; GCN-NEXT: s_mul_hi_u32 s2, s12, s0
-; GCN-NEXT: s_add_i32 s1, s2, s1
-; GCN-NEXT: s_mul_i32 s2, s13, s0
-; GCN-NEXT: s_mul_i32 s0, s12, s0
-; GCN-NEXT: s_add_i32 s7, s1, s2
-; GCN-NEXT: v_mov_b32_e32 v8, s0
-; GCN-NEXT: s_sub_i32 s1, s17, s7
-; GCN-NEXT: v_sub_co_u32_e32 v8, vcc, s16, v8
-; GCN-NEXT: s_cmp_lg_u64 vcc, 0
-; GCN-NEXT: s_subb_u32 s9, s1, s13
-; GCN-NEXT: v_subrev_co_u32_e64 v9, s[0:1], s12, v8
-; GCN-NEXT: s_cmp_lg_u64 s[0:1], 0
-; GCN-NEXT: s_subb_u32 s15, s9, 0
-; GCN-NEXT: s_cmp_ge_u32 s15, s13
-; GCN-NEXT: s_cselect_b32 s16, -1, 0
-; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s12, v9
-; GCN-NEXT: s_cmp_eq_u32 s15, s13
-; GCN-NEXT: v_cndmask_b32_e64 v10, 0, -1, s[2:3]
-; GCN-NEXT: v_mov_b32_e32 v11, s16
-; GCN-NEXT: s_cselect_b64 s[2:3], -1, 0
-; GCN-NEXT: s_cmp_lg_u64 s[0:1], 0
-; GCN-NEXT: v_cndmask_b32_e64 v10, v11, v10, s[2:3]
-; GCN-NEXT: s_subb_u32 s2, s9, s13
-; GCN-NEXT: v_subrev_co_u32_e64 v11, s[0:1], s12, v9
-; GCN-NEXT: s_cmp_lg_u64 s[0:1], 0
-; GCN-NEXT: s_subb_u32 s2, s2, 0
-; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v10
-; GCN-NEXT: v_cndmask_b32_e64 v9, v9, v11, s[0:1]
-; GCN-NEXT: v_mov_b32_e32 v10, s15
-; GCN-NEXT: v_mov_b32_e32 v11, s2
-; GCN-NEXT: s_cmp_lg_u64 vcc, 0
-; GCN-NEXT: v_cndmask_b32_e64 v10, v10, v11, s[0:1]
-; GCN-NEXT: s_subb_u32 s0, s17, s7
-; GCN-NEXT: s_cmp_ge_u32 s0, s13
-; GCN-NEXT: s_cselect_b32 s1, -1, 0
-; GCN-NEXT: v_cmp_le_u32_e32 vcc, s12, v8
-; GCN-NEXT: s_cmp_eq_u32 s0, s13
-; GCN-NEXT: v_cndmask_b32_e64 v11, 0, -1, vcc
-; GCN-NEXT: v_mov_b32_e32 v14, s1
-; GCN-NEXT: s_cselect_b64 vcc, -1, 0
-; GCN-NEXT: v_cndmask_b32_e32 v11, v14, v11, vcc
-; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v11
-; GCN-NEXT: v_mov_b32_e32 v14, s0
-; GCN-NEXT: v_cndmask_b32_e32 v8, v8, v9, vcc
-; GCN-NEXT: v_cndmask_b32_e32 v10, v14, v10, vcc
-; GCN-NEXT: v_xor_b32_e32 v8, s14, v8
-; GCN-NEXT: v_xor_b32_e32 v9, s14, v10
-; GCN-NEXT: v_mov_b32_e32 v10, s14
-; GCN-NEXT: v_subrev_co_u32_e32 v8, vcc, s14, v8
-; GCN-NEXT: v_subb_co_u32_e32 v9, vcc, v9, v10, vcc
+; GCN-NEXT: s_ashr_i32 s2, s0, 31
+; GCN-NEXT: s_add_u32 s6, s16, s2
+; GCN-NEXT: s_mov_b32 s3, s2
+; GCN-NEXT: s_addc_u32 s7, s0, s2
+; GCN-NEXT: s_xor_b64 s[6:7], s[6:7], s[2:3]
+; GCN-NEXT: v_cvt_f32_u32_e32 v4, s6
+; GCN-NEXT: v_cvt_f32_u32_e32 v5, s7
+; GCN-NEXT: s_sub_u32 s0, 0, s6
+; GCN-NEXT: s_subb_u32 s1, 0, s7
+; GCN-NEXT: v_madmk_f32 v4, v5, 0x4f800000, v4
+; GCN-NEXT: v_rcp_f32_e32 v4, v4
+; GCN-NEXT: v_mul_f32_e32 v4, 0x5f7ffffc, v4
+; GCN-NEXT: v_mul_f32_e32 v5, 0x2f800000, v4
+; GCN-NEXT: v_trunc_f32_e32 v5, v5
+; GCN-NEXT: v_madmk_f32 v4, v5, 0xcf800000, v4
+; GCN-NEXT: v_cvt_u32_f32_e32 v5, v5
+; GCN-NEXT: v_cvt_u32_f32_e32 v4, v4
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v5
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v4
+; GCN-NEXT: v_mul_lo_u32 v13, s1, v4
+; GCN-NEXT: v_mul_lo_u32 v12, s0, v4
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v4, v12
+; GCN-NEXT: v_mul_lo_u32 v13, v4, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v4, v10
+; GCN-NEXT: v_mul_lo_u32 v14, v5, v12
+; GCN-NEXT: v_mul_hi_u32 v12, v5, v12
+; GCN-NEXT: v_mul_hi_u32 v16, v5, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v13
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v15, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v5, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v14
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, v13, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v16, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v11, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v12, vcc
+; GCN-NEXT: v_add_co_u32_e32 v4, vcc, v4, v10
+; GCN-NEXT: v_addc_co_u32_e32 v5, vcc, v5, v11, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v5
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v4
+; GCN-NEXT: v_mul_lo_u32 v12, s1, v4
+; GCN-NEXT: v_mul_lo_u32 v13, s0, v4
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v12
+; GCN-NEXT: v_mul_lo_u32 v14, v4, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v4, v13
+; GCN-NEXT: v_mul_hi_u32 v16, v4, v10
+; GCN-NEXT: v_mul_hi_u32 v12, v5, v13
+; GCN-NEXT: v_mul_lo_u32 v13, v5, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v5, v10
+; GCN-NEXT: v_add_co_u32_e32 v14, vcc, v15, v14
+; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, 0, v16, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v5, v10
+; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v14, v13
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, v15, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v12, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v4, vcc, v4, v10
+; GCN-NEXT: v_addc_co_u32_e32 v5, vcc, v5, v11, vcc
+; GCN-NEXT: v_ashrrev_i32_e32 v10, 31, v7
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v6, v10
+; GCN-NEXT: v_xor_b32_e32 v11, v11, v10
+; GCN-NEXT: v_mul_lo_u32 v12, v11, v5
+; GCN-NEXT: v_mul_hi_u32 v13, v11, v4
+; GCN-NEXT: v_mul_hi_u32 v14, v11, v5
+; GCN-NEXT: v_addc_co_u32_e32 v7, vcc, v7, v10, vcc
+; GCN-NEXT: v_xor_b32_e32 v7, v7, v10
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v13, v12
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v14, vcc
+; GCN-NEXT: v_mul_lo_u32 v14, v7, v4
+; GCN-NEXT: v_mul_hi_u32 v4, v7, v4
+; GCN-NEXT: v_mul_hi_u32 v15, v7, v5
+; GCN-NEXT: v_mul_lo_u32 v5, v7, v5
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v12, v14
+; GCN-NEXT: v_addc_co_u32_e32 v4, vcc, v13, v4, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v15, vcc
+; GCN-NEXT: v_add_co_u32_e32 v4, vcc, v4, v5
+; GCN-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v12, vcc
+; GCN-NEXT: v_mul_lo_u32 v5, s6, v5
+; GCN-NEXT: v_mul_hi_u32 v12, s6, v4
+; GCN-NEXT: v_mul_lo_u32 v13, s7, v4
+; GCN-NEXT: v_mul_lo_u32 v4, s6, v4
+; GCN-NEXT: v_add_u32_e32 v5, v12, v5
+; GCN-NEXT: v_add_u32_e32 v5, v5, v13
+; GCN-NEXT: v_sub_u32_e32 v12, v7, v5
+; GCN-NEXT: v_mov_b32_e32 v13, s7
+; GCN-NEXT: v_sub_co_u32_e32 v4, vcc, v11, v4
+; GCN-NEXT: v_subb_co_u32_e64 v11, s[0:1], v12, v13, vcc
+; GCN-NEXT: v_subrev_co_u32_e64 v12, s[0:1], s6, v4
+; GCN-NEXT: v_subbrev_co_u32_e64 v14, s[2:3], 0, v11, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_cndmask_b32_e64 v15, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s6, v12
+; GCN-NEXT: v_subb_co_u32_e64 v11, s[0:1], v11, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v16, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_eq_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_subrev_co_u32_e64 v13, s[0:1], s6, v12
+; GCN-NEXT: v_subb_co_u32_e32 v5, vcc, v7, v5, vcc
+; GCN-NEXT: v_cndmask_b32_e64 v15, v15, v16, s[2:3]
+; GCN-NEXT: v_subbrev_co_u32_e64 v11, s[0:1], 0, v11, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s7, v5
+; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v15
+; GCN-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s6, v4
+; GCN-NEXT: v_cndmask_b32_e64 v12, v12, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v13, 0, -1, vcc
+; GCN-NEXT: v_cmp_eq_u32_e32 vcc, s7, v5
+; GCN-NEXT: v_cndmask_b32_e32 v7, v7, v13, vcc
+; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v7
+; GCN-NEXT: v_cndmask_b32_e64 v11, v14, v11, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e32 v4, v4, v12, vcc
+; GCN-NEXT: v_cndmask_b32_e32 v5, v5, v11, vcc
+; GCN-NEXT: v_xor_b32_e32 v4, v4, v10
+; GCN-NEXT: v_xor_b32_e32 v5, v5, v10
+; GCN-NEXT: v_sub_co_u32_e32 v4, vcc, v4, v10
+; GCN-NEXT: v_subb_co_u32_e32 v5, vcc, v5, v10, vcc
; GCN-NEXT: s_cbranch_execnz .LBB12_3
; GCN-NEXT: .LBB12_2:
-; GCN-NEXT: v_cvt_f32_u32_e32 v8, s6
-; GCN-NEXT: s_sub_i32 s0, 0, s6
-; GCN-NEXT: s_mov_b32 s1, 0
-; GCN-NEXT: v_rcp_iflag_f32_e32 v8, v8
-; GCN-NEXT: v_mul_f32_e32 v8, 0x4f7ffffe, v8
-; GCN-NEXT: v_cvt_u32_f32_e32 v8, v8
-; GCN-NEXT: v_readfirstlane_b32 s2, v8
-; GCN-NEXT: s_mul_i32 s0, s0, s2
-; GCN-NEXT: s_mul_hi_u32 s0, s2, s0
-; GCN-NEXT: s_add_i32 s2, s2, s0
-; GCN-NEXT: s_mul_hi_u32 s0, s8, s2
-; GCN-NEXT: s_mul_i32 s0, s0, s6
-; GCN-NEXT: s_sub_i32 s0, s8, s0
-; GCN-NEXT: s_sub_i32 s2, s0, s6
-; GCN-NEXT: s_cmp_ge_u32 s0, s6
-; GCN-NEXT: s_cselect_b32 s0, s2, s0
-; GCN-NEXT: s_sub_i32 s2, s0, s6
-; GCN-NEXT: s_cmp_ge_u32 s0, s6
-; GCN-NEXT: s_cselect_b32 s0, s2, s0
-; GCN-NEXT: v_mov_b32_e32 v9, s1
-; GCN-NEXT: v_mov_b32_e32 v8, s0
+; GCN-NEXT: v_cvt_f32_u32_e32 v4, s16
+; GCN-NEXT: s_sub_i32 s0, 0, s16
+; GCN-NEXT: v_rcp_iflag_f32_e32 v4, v4
+; GCN-NEXT: v_mul_f32_e32 v4, 0x4f7ffffe, v4
+; GCN-NEXT: v_cvt_u32_f32_e32 v4, v4
+; GCN-NEXT: v_mul_lo_u32 v5, s0, v4
+; GCN-NEXT: v_mul_hi_u32 v5, v4, v5
+; GCN-NEXT: v_add_u32_e32 v4, v4, v5
+; GCN-NEXT: v_mul_hi_u32 v4, v6, v4
+; GCN-NEXT: v_mul_lo_u32 v4, v4, s16
+; GCN-NEXT: v_sub_u32_e32 v4, v6, v4
+; GCN-NEXT: v_subrev_u32_e32 v5, s16, v4
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s16, v4
+; GCN-NEXT: v_cndmask_b32_e32 v4, v4, v5, vcc
+; GCN-NEXT: v_subrev_u32_e32 v5, s16, v4
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s16, v4
+; GCN-NEXT: v_cndmask_b32_e32 v4, v4, v5, vcc
+; GCN-NEXT: v_mov_b32_e32 v5, 0
; GCN-NEXT: .LBB12_3:
-; GCN-NEXT: v_or_b32_e32 v11, v17, v13
-; GCN-NEXT: v_mov_b32_e32 v10, 0
-; GCN-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[10:11]
+; GCN-NEXT: v_or_b32_e32 v7, s15, v9
+; GCN-NEXT: v_mov_b32_e32 v6, 0
+; GCN-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[6:7]
; GCN-NEXT: s_cbranch_vccz .LBB12_14
; GCN-NEXT: ; %bb.4:
-; GCN-NEXT: v_ashrrev_i32_e32 v10, 31, v13
-; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v12, v10
-; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, v13, v10, vcc
+; GCN-NEXT: s_ashr_i32 s0, s15, 31
+; GCN-NEXT: s_add_u32 s2, s14, s0
+; GCN-NEXT: s_mov_b32 s1, s0
+; GCN-NEXT: s_addc_u32 s3, s15, s0
+; GCN-NEXT: s_xor_b64 s[6:7], s[2:3], s[0:1]
+; GCN-NEXT: v_cvt_f32_u32_e32 v6, s6
+; GCN-NEXT: v_cvt_f32_u32_e32 v7, s7
+; GCN-NEXT: s_sub_u32 s0, 0, s6
+; GCN-NEXT: s_subb_u32 s1, 0, s7
+; GCN-NEXT: v_madmk_f32 v6, v7, 0x4f800000, v6
+; GCN-NEXT: v_rcp_f32_e32 v6, v6
+; GCN-NEXT: v_mul_f32_e32 v6, 0x5f7ffffc, v6
+; GCN-NEXT: v_mul_f32_e32 v7, 0x2f800000, v6
+; GCN-NEXT: v_trunc_f32_e32 v7, v7
+; GCN-NEXT: v_madmk_f32 v6, v7, 0xcf800000, v6
+; GCN-NEXT: v_cvt_u32_f32_e32 v7, v7
+; GCN-NEXT: v_cvt_u32_f32_e32 v6, v6
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v7
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v6
+; GCN-NEXT: v_mul_lo_u32 v13, s1, v6
+; GCN-NEXT: v_mul_lo_u32 v12, s0, v6
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v6, v12
+; GCN-NEXT: v_mul_lo_u32 v13, v6, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v6, v10
+; GCN-NEXT: v_mul_lo_u32 v14, v7, v12
+; GCN-NEXT: v_mul_hi_u32 v12, v7, v12
+; GCN-NEXT: v_mul_hi_u32 v16, v7, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v13
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v15, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v7, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v14
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, v13, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v16, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v11, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v12, vcc
+; GCN-NEXT: v_add_co_u32_e32 v6, vcc, v6, v10
+; GCN-NEXT: v_addc_co_u32_e32 v7, vcc, v7, v11, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v7
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v6
+; GCN-NEXT: v_mul_lo_u32 v12, s1, v6
+; GCN-NEXT: v_mul_lo_u32 v13, s0, v6
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v12
+; GCN-NEXT: v_mul_lo_u32 v14, v6, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v6, v13
+; GCN-NEXT: v_mul_hi_u32 v16, v6, v10
+; GCN-NEXT: v_mul_hi_u32 v12, v7, v13
+; GCN-NEXT: v_mul_lo_u32 v13, v7, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v7, v10
+; GCN-NEXT: v_add_co_u32_e32 v14, vcc, v15, v14
+; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, 0, v16, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v7, v10
+; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v14, v13
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, v15, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v12, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v6, vcc, v6, v10
+; GCN-NEXT: v_addc_co_u32_e32 v7, vcc, v7, v11, vcc
+; GCN-NEXT: v_ashrrev_i32_e32 v10, 31, v9
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v8, v10
; GCN-NEXT: v_xor_b32_e32 v11, v11, v10
-; GCN-NEXT: v_xor_b32_e32 v10, v13, v10
-; GCN-NEXT: v_cvt_f32_u32_e32 v13, v11
-; GCN-NEXT: v_cvt_f32_u32_e32 v14, v10
-; GCN-NEXT: v_sub_co_u32_e32 v15, vcc, 0, v11
-; GCN-NEXT: v_subb_co_u32_e32 v18, vcc, 0, v10, vcc
-; GCN-NEXT: v_madmk_f32 v13, v14, 0x4f800000, v13
-; GCN-NEXT: v_rcp_f32_e32 v13, v13
-; GCN-NEXT: v_mul_f32_e32 v13, 0x5f7ffffc, v13
-; GCN-NEXT: v_mul_f32_e32 v14, 0x2f800000, v13
-; GCN-NEXT: v_trunc_f32_e32 v14, v14
-; GCN-NEXT: v_madmk_f32 v13, v14, 0xcf800000, v13
-; GCN-NEXT: v_cvt_u32_f32_e32 v14, v14
-; GCN-NEXT: v_cvt_u32_f32_e32 v13, v13
-; GCN-NEXT: v_mul_lo_u32 v20, v15, v14
-; GCN-NEXT: v_mul_hi_u32 v19, v15, v13
-; GCN-NEXT: v_mul_lo_u32 v21, v18, v13
-; GCN-NEXT: v_mul_lo_u32 v22, v15, v13
-; GCN-NEXT: v_add_u32_e32 v19, v19, v20
-; GCN-NEXT: v_add_u32_e32 v19, v19, v21
-; GCN-NEXT: v_mul_lo_u32 v20, v13, v19
-; GCN-NEXT: v_mul_hi_u32 v21, v13, v22
-; GCN-NEXT: v_mul_hi_u32 v23, v13, v19
-; GCN-NEXT: v_mul_hi_u32 v24, v14, v19
-; GCN-NEXT: v_mul_lo_u32 v19, v14, v19
-; GCN-NEXT: v_add_co_u32_e32 v20, vcc, v21, v20
-; GCN-NEXT: v_addc_co_u32_e32 v21, vcc, 0, v23, vcc
-; GCN-NEXT: v_mul_lo_u32 v23, v14, v22
-; GCN-NEXT: v_mul_hi_u32 v22, v14, v22
-; GCN-NEXT: v_add_co_u32_e32 v20, vcc, v20, v23
-; GCN-NEXT: v_addc_co_u32_e32 v20, vcc, v21, v22, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v21, vcc, 0, v24, vcc
-; GCN-NEXT: v_add_co_u32_e32 v19, vcc, v20, v19
-; GCN-NEXT: v_addc_co_u32_e32 v20, vcc, 0, v21, vcc
-; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v13, v19
-; GCN-NEXT: v_addc_co_u32_e32 v14, vcc, v14, v20, vcc
-; GCN-NEXT: v_mul_lo_u32 v19, v15, v14
-; GCN-NEXT: v_mul_hi_u32 v20, v15, v13
-; GCN-NEXT: v_mul_lo_u32 v18, v18, v13
-; GCN-NEXT: v_mul_lo_u32 v15, v15, v13
-; GCN-NEXT: v_add_u32_e32 v19, v20, v19
-; GCN-NEXT: v_add_u32_e32 v18, v19, v18
-; GCN-NEXT: v_mul_lo_u32 v21, v13, v18
-; GCN-NEXT: v_mul_hi_u32 v22, v13, v15
-; GCN-NEXT: v_mul_hi_u32 v23, v13, v18
-; GCN-NEXT: v_mul_hi_u32 v20, v14, v15
-; GCN-NEXT: v_mul_lo_u32 v15, v14, v15
-; GCN-NEXT: v_mul_hi_u32 v19, v14, v18
-; GCN-NEXT: v_add_co_u32_e32 v21, vcc, v22, v21
-; GCN-NEXT: v_addc_co_u32_e32 v22, vcc, 0, v23, vcc
-; GCN-NEXT: v_mul_lo_u32 v18, v14, v18
-; GCN-NEXT: v_add_co_u32_e32 v15, vcc, v21, v15
-; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, v22, v20, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v19, vcc, 0, v19, vcc
-; GCN-NEXT: v_add_co_u32_e32 v15, vcc, v15, v18
-; GCN-NEXT: v_addc_co_u32_e32 v18, vcc, 0, v19, vcc
-; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v13, v15
-; GCN-NEXT: v_addc_co_u32_e32 v14, vcc, v14, v18, vcc
-; GCN-NEXT: v_ashrrev_i32_e32 v15, 31, v17
-; GCN-NEXT: v_add_co_u32_e32 v18, vcc, v16, v15
-; GCN-NEXT: v_xor_b32_e32 v18, v18, v15
-; GCN-NEXT: v_mul_lo_u32 v19, v18, v14
-; GCN-NEXT: v_mul_hi_u32 v20, v18, v13
-; GCN-NEXT: v_mul_hi_u32 v21, v18, v14
-; GCN-NEXT: v_addc_co_u32_e32 v17, vcc, v17, v15, vcc
-; GCN-NEXT: v_xor_b32_e32 v17, v17, v15
-; GCN-NEXT: v_add_co_u32_e32 v19, vcc, v20, v19
-; GCN-NEXT: v_addc_co_u32_e32 v20, vcc, 0, v21, vcc
-; GCN-NEXT: v_mul_lo_u32 v21, v17, v13
-; GCN-NEXT: v_mul_hi_u32 v13, v17, v13
-; GCN-NEXT: v_mul_hi_u32 v22, v17, v14
-; GCN-NEXT: v_mul_lo_u32 v14, v17, v14
-; GCN-NEXT: v_add_co_u32_e32 v19, vcc, v19, v21
-; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, v20, v13, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v19, vcc, 0, v22, vcc
-; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v13, v14
-; GCN-NEXT: v_addc_co_u32_e32 v14, vcc, 0, v19, vcc
-; GCN-NEXT: v_mul_lo_u32 v14, v11, v14
-; GCN-NEXT: v_mul_hi_u32 v19, v11, v13
-; GCN-NEXT: v_mul_lo_u32 v20, v10, v13
-; GCN-NEXT: v_mul_lo_u32 v13, v11, v13
-; GCN-NEXT: v_add_u32_e32 v14, v19, v14
-; GCN-NEXT: v_add_u32_e32 v14, v14, v20
-; GCN-NEXT: v_sub_u32_e32 v19, v17, v14
-; GCN-NEXT: v_sub_co_u32_e32 v13, vcc, v18, v13
-; GCN-NEXT: v_subb_co_u32_e64 v18, s[0:1], v19, v10, vcc
-; GCN-NEXT: v_sub_co_u32_e64 v19, s[0:1], v13, v11
-; GCN-NEXT: v_subbrev_co_u32_e64 v20, s[2:3], 0, v18, s[0:1]
-; GCN-NEXT: v_cmp_ge_u32_e64 s[2:3], v20, v10
-; GCN-NEXT: v_cndmask_b32_e64 v21, 0, -1, s[2:3]
-; GCN-NEXT: v_cmp_ge_u32_e64 s[2:3], v19, v11
-; GCN-NEXT: v_subb_co_u32_e32 v14, vcc, v17, v14, vcc
-; GCN-NEXT: v_cndmask_b32_e64 v22, 0, -1, s[2:3]
-; GCN-NEXT: v_cmp_eq_u32_e64 s[2:3], v20, v10
-; GCN-NEXT: v_subb_co_u32_e64 v18, s[0:1], v18, v10, s[0:1]
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v14, v10
-; GCN-NEXT: v_cndmask_b32_e64 v21, v21, v22, s[2:3]
-; GCN-NEXT: v_sub_co_u32_e64 v22, s[0:1], v19, v11
-; GCN-NEXT: v_cndmask_b32_e64 v17, 0, -1, vcc
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v13, v11
-; GCN-NEXT: v_subbrev_co_u32_e64 v18, s[0:1], 0, v18, s[0:1]
-; GCN-NEXT: v_cndmask_b32_e64 v11, 0, -1, vcc
-; GCN-NEXT: v_cmp_eq_u32_e32 vcc, v14, v10
-; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v21
-; GCN-NEXT: v_cndmask_b32_e32 v10, v17, v11, vcc
-; GCN-NEXT: v_cndmask_b32_e64 v19, v19, v22, s[0:1]
-; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v10
-; GCN-NEXT: v_cndmask_b32_e64 v18, v20, v18, s[0:1]
-; GCN-NEXT: v_cndmask_b32_e32 v11, v13, v19, vcc
-; GCN-NEXT: v_cndmask_b32_e32 v10, v14, v18, vcc
-; GCN-NEXT: v_xor_b32_e32 v11, v11, v15
-; GCN-NEXT: v_xor_b32_e32 v13, v10, v15
-; GCN-NEXT: v_sub_co_u32_e32 v10, vcc, v11, v15
-; GCN-NEXT: v_subb_co_u32_e32 v11, vcc, v13, v15, vcc
+; GCN-NEXT: v_mul_lo_u32 v12, v11, v7
+; GCN-NEXT: v_mul_hi_u32 v13, v11, v6
+; GCN-NEXT: v_mul_hi_u32 v14, v11, v7
+; GCN-NEXT: v_addc_co_u32_e32 v9, vcc, v9, v10, vcc
+; GCN-NEXT: v_xor_b32_e32 v9, v9, v10
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v13, v12
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v14, vcc
+; GCN-NEXT: v_mul_lo_u32 v14, v9, v6
+; GCN-NEXT: v_mul_hi_u32 v6, v9, v6
+; GCN-NEXT: v_mul_hi_u32 v15, v9, v7
+; GCN-NEXT: v_mul_lo_u32 v7, v9, v7
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v12, v14
+; GCN-NEXT: v_addc_co_u32_e32 v6, vcc, v13, v6, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v15, vcc
+; GCN-NEXT: v_add_co_u32_e32 v6, vcc, v6, v7
+; GCN-NEXT: v_addc_co_u32_e32 v7, vcc, 0, v12, vcc
+; GCN-NEXT: v_mul_lo_u32 v7, s6, v7
+; GCN-NEXT: v_mul_hi_u32 v12, s6, v6
+; GCN-NEXT: v_mul_lo_u32 v13, s7, v6
+; GCN-NEXT: v_mul_lo_u32 v6, s6, v6
+; GCN-NEXT: v_add_u32_e32 v7, v12, v7
+; GCN-NEXT: v_add_u32_e32 v7, v7, v13
+; GCN-NEXT: v_sub_u32_e32 v12, v9, v7
+; GCN-NEXT: v_mov_b32_e32 v13, s7
+; GCN-NEXT: v_sub_co_u32_e32 v6, vcc, v11, v6
+; GCN-NEXT: v_subb_co_u32_e64 v11, s[0:1], v12, v13, vcc
+; GCN-NEXT: v_subrev_co_u32_e64 v12, s[0:1], s6, v6
+; GCN-NEXT: v_subbrev_co_u32_e64 v14, s[2:3], 0, v11, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_cndmask_b32_e64 v15, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s6, v12
+; GCN-NEXT: v_subb_co_u32_e64 v11, s[0:1], v11, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v16, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_eq_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_subrev_co_u32_e64 v13, s[0:1], s6, v12
+; GCN-NEXT: v_subb_co_u32_e32 v7, vcc, v9, v7, vcc
+; GCN-NEXT: v_cndmask_b32_e64 v15, v15, v16, s[2:3]
+; GCN-NEXT: v_subbrev_co_u32_e64 v11, s[0:1], 0, v11, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s7, v7
+; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v15
+; GCN-NEXT: v_cndmask_b32_e64 v9, 0, -1, vcc
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s6, v6
+; GCN-NEXT: v_cndmask_b32_e64 v12, v12, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v13, 0, -1, vcc
+; GCN-NEXT: v_cmp_eq_u32_e32 vcc, s7, v7
+; GCN-NEXT: v_cndmask_b32_e32 v9, v9, v13, vcc
+; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v9
+; GCN-NEXT: v_cndmask_b32_e64 v11, v14, v11, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e32 v6, v6, v12, vcc
+; GCN-NEXT: v_cndmask_b32_e32 v7, v7, v11, vcc
+; GCN-NEXT: v_xor_b32_e32 v6, v6, v10
+; GCN-NEXT: v_xor_b32_e32 v7, v7, v10
+; GCN-NEXT: v_sub_co_u32_e32 v6, vcc, v6, v10
+; GCN-NEXT: v_subb_co_u32_e32 v7, vcc, v7, v10, vcc
; GCN-NEXT: s_cbranch_execnz .LBB12_6
; GCN-NEXT: .LBB12_5:
-; GCN-NEXT: v_cvt_f32_u32_e32 v10, v12
-; GCN-NEXT: v_sub_u32_e32 v11, 0, v12
-; GCN-NEXT: v_rcp_iflag_f32_e32 v10, v10
-; GCN-NEXT: v_mul_f32_e32 v10, 0x4f7ffffe, v10
-; GCN-NEXT: v_cvt_u32_f32_e32 v10, v10
-; GCN-NEXT: v_mul_lo_u32 v11, v11, v10
-; GCN-NEXT: v_mul_hi_u32 v11, v10, v11
-; GCN-NEXT: v_add_u32_e32 v10, v10, v11
-; GCN-NEXT: v_mul_hi_u32 v10, v16, v10
-; GCN-NEXT: v_mul_lo_u32 v10, v10, v12
-; GCN-NEXT: v_sub_u32_e32 v10, v16, v10
-; GCN-NEXT: v_sub_u32_e32 v11, v10, v12
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v10, v12
-; GCN-NEXT: v_cndmask_b32_e32 v10, v10, v11, vcc
-; GCN-NEXT: v_sub_u32_e32 v11, v10, v12
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v10, v12
-; GCN-NEXT: v_cndmask_b32_e32 v10, v10, v11, vcc
-; GCN-NEXT: v_mov_b32_e32 v11, 0
+; GCN-NEXT: v_cvt_f32_u32_e32 v6, s14
+; GCN-NEXT: s_sub_i32 s0, 0, s14
+; GCN-NEXT: v_rcp_iflag_f32_e32 v6, v6
+; GCN-NEXT: v_mul_f32_e32 v6, 0x4f7ffffe, v6
+; GCN-NEXT: v_cvt_u32_f32_e32 v6, v6
+; GCN-NEXT: v_mul_lo_u32 v7, s0, v6
+; GCN-NEXT: v_mul_hi_u32 v7, v6, v7
+; GCN-NEXT: v_add_u32_e32 v6, v6, v7
+; GCN-NEXT: v_mul_hi_u32 v6, v8, v6
+; GCN-NEXT: v_mul_lo_u32 v6, v6, s14
+; GCN-NEXT: v_sub_u32_e32 v6, v8, v6
+; GCN-NEXT: v_subrev_u32_e32 v7, s14, v6
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s14, v6
+; GCN-NEXT: v_cndmask_b32_e32 v6, v6, v7, vcc
+; GCN-NEXT: v_subrev_u32_e32 v7, s14, v6
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s14, v6
+; GCN-NEXT: v_cndmask_b32_e32 v6, v6, v7, vcc
+; GCN-NEXT: v_mov_b32_e32 v7, 0
; GCN-NEXT: .LBB12_6:
; GCN-NEXT: s_waitcnt vmcnt(0)
-; GCN-NEXT: v_or_b32_e32 v13, v5, v1
-; GCN-NEXT: v_mov_b32_e32 v12, 0
-; GCN-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[12:13]
+; GCN-NEXT: v_or_b32_e32 v9, s13, v1
+; GCN-NEXT: v_mov_b32_e32 v8, 0
+; GCN-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[8:9]
; GCN-NEXT: s_cbranch_vccz .LBB12_15
; GCN-NEXT: ; %bb.7:
-; GCN-NEXT: v_ashrrev_i32_e32 v13, 31, v1
-; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v0, v13
-; GCN-NEXT: v_addc_co_u32_e32 v1, vcc, v1, v13, vcc
-; GCN-NEXT: v_xor_b32_e32 v12, v12, v13
-; GCN-NEXT: v_xor_b32_e32 v1, v1, v13
-; GCN-NEXT: v_cvt_f32_u32_e32 v13, v12
-; GCN-NEXT: v_cvt_f32_u32_e32 v14, v1
-; GCN-NEXT: v_sub_co_u32_e32 v15, vcc, 0, v12
-; GCN-NEXT: v_subb_co_u32_e32 v16, vcc, 0, v1, vcc
-; GCN-NEXT: v_madmk_f32 v13, v14, 0x4f800000, v13
-; GCN-NEXT: v_rcp_f32_e32 v13, v13
-; GCN-NEXT: v_mul_f32_e32 v13, 0x5f7ffffc, v13
-; GCN-NEXT: v_mul_f32_e32 v14, 0x2f800000, v13
-; GCN-NEXT: v_trunc_f32_e32 v14, v14
-; GCN-NEXT: v_madmk_f32 v13, v14, 0xcf800000, v13
-; GCN-NEXT: v_cvt_u32_f32_e32 v14, v14
-; GCN-NEXT: v_cvt_u32_f32_e32 v13, v13
-; GCN-NEXT: v_mul_lo_u32 v18, v15, v14
-; GCN-NEXT: v_mul_hi_u32 v17, v15, v13
-; GCN-NEXT: v_mul_lo_u32 v19, v16, v13
-; GCN-NEXT: v_mul_lo_u32 v20, v15, v13
-; GCN-NEXT: v_add_u32_e32 v17, v17, v18
-; GCN-NEXT: v_add_u32_e32 v17, v17, v19
-; GCN-NEXT: v_mul_lo_u32 v18, v13, v17
-; GCN-NEXT: v_mul_hi_u32 v19, v13, v20
-; GCN-NEXT: v_mul_hi_u32 v21, v13, v17
-; GCN-NEXT: v_mul_hi_u32 v22, v14, v17
-; GCN-NEXT: v_mul_lo_u32 v17, v14, v17
-; GCN-NEXT: v_add_co_u32_e32 v18, vcc, v19, v18
-; GCN-NEXT: v_addc_co_u32_e32 v19, vcc, 0, v21, vcc
-; GCN-NEXT: v_mul_lo_u32 v21, v14, v20
-; GCN-NEXT: v_mul_hi_u32 v20, v14, v20
-; GCN-NEXT: v_add_co_u32_e32 v18, vcc, v18, v21
-; GCN-NEXT: v_addc_co_u32_e32 v18, vcc, v19, v20, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v19, vcc, 0, v22, vcc
-; GCN-NEXT: v_add_co_u32_e32 v17, vcc, v18, v17
-; GCN-NEXT: v_addc_co_u32_e32 v18, vcc, 0, v19, vcc
-; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v13, v17
-; GCN-NEXT: v_addc_co_u32_e32 v14, vcc, v14, v18, vcc
-; GCN-NEXT: v_mul_lo_u32 v17, v15, v14
-; GCN-NEXT: v_mul_hi_u32 v18, v15, v13
-; GCN-NEXT: v_mul_lo_u32 v16, v16, v13
-; GCN-NEXT: v_mul_lo_u32 v15, v15, v13
-; GCN-NEXT: v_add_u32_e32 v17, v18, v17
-; GCN-NEXT: v_add_u32_e32 v16, v17, v16
-; GCN-NEXT: v_mul_lo_u32 v19, v13, v16
-; GCN-NEXT: v_mul_hi_u32 v20, v13, v15
-; GCN-NEXT: v_mul_hi_u32 v21, v13, v16
-; GCN-NEXT: v_mul_hi_u32 v18, v14, v15
-; GCN-NEXT: v_mul_lo_u32 v15, v14, v15
-; GCN-NEXT: v_mul_hi_u32 v17, v14, v16
-; GCN-NEXT: v_add_co_u32_e32 v19, vcc, v20, v19
-; GCN-NEXT: v_addc_co_u32_e32 v20, vcc, 0, v21, vcc
-; GCN-NEXT: v_mul_lo_u32 v16, v14, v16
-; GCN-NEXT: v_add_co_u32_e32 v15, vcc, v19, v15
-; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, v20, v18, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v17, vcc, 0, v17, vcc
-; GCN-NEXT: v_add_co_u32_e32 v15, vcc, v15, v16
-; GCN-NEXT: v_addc_co_u32_e32 v16, vcc, 0, v17, vcc
-; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v13, v15
-; GCN-NEXT: v_addc_co_u32_e32 v14, vcc, v14, v16, vcc
-; GCN-NEXT: v_ashrrev_i32_e32 v15, 31, v5
-; GCN-NEXT: v_add_co_u32_e32 v16, vcc, v4, v15
-; GCN-NEXT: v_xor_b32_e32 v16, v16, v15
-; GCN-NEXT: v_mul_lo_u32 v17, v16, v14
-; GCN-NEXT: v_mul_hi_u32 v18, v16, v13
-; GCN-NEXT: v_mul_hi_u32 v19, v16, v14
-; GCN-NEXT: v_addc_co_u32_e32 v5, vcc, v5, v15, vcc
-; GCN-NEXT: v_xor_b32_e32 v5, v5, v15
-; GCN-NEXT: v_add_co_u32_e32 v17, vcc, v18, v17
-; GCN-NEXT: v_addc_co_u32_e32 v18, vcc, 0, v19, vcc
-; GCN-NEXT: v_mul_lo_u32 v19, v5, v13
-; GCN-NEXT: v_mul_hi_u32 v13, v5, v13
-; GCN-NEXT: v_mul_hi_u32 v20, v5, v14
-; GCN-NEXT: v_mul_lo_u32 v14, v5, v14
-; GCN-NEXT: v_add_co_u32_e32 v17, vcc, v17, v19
-; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, v18, v13, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v17, vcc, 0, v20, vcc
-; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v13, v14
-; GCN-NEXT: v_addc_co_u32_e32 v14, vcc, 0, v17, vcc
-; GCN-NEXT: v_mul_lo_u32 v14, v12, v14
-; GCN-NEXT: v_mul_hi_u32 v17, v12, v13
-; GCN-NEXT: v_mul_lo_u32 v18, v1, v13
-; GCN-NEXT: v_mul_lo_u32 v13, v12, v13
-; GCN-NEXT: v_add_u32_e32 v14, v17, v14
-; GCN-NEXT: v_add_u32_e32 v14, v14, v18
-; GCN-NEXT: v_sub_u32_e32 v17, v5, v14
-; GCN-NEXT: v_sub_co_u32_e32 v13, vcc, v16, v13
-; GCN-NEXT: v_subb_co_u32_e64 v16, s[0:1], v17, v1, vcc
-; GCN-NEXT: v_sub_co_u32_e64 v17, s[0:1], v13, v12
-; GCN-NEXT: v_subbrev_co_u32_e64 v18, s[2:3], 0, v16, s[0:1]
-; GCN-NEXT: v_cmp_ge_u32_e64 s[2:3], v18, v1
-; GCN-NEXT: v_cndmask_b32_e64 v19, 0, -1, s[2:3]
-; GCN-NEXT: v_cmp_ge_u32_e64 s[2:3], v17, v12
-; GCN-NEXT: v_subb_co_u32_e32 v5, vcc, v5, v14, vcc
-; GCN-NEXT: v_cndmask_b32_e64 v20, 0, -1, s[2:3]
-; GCN-NEXT: v_cmp_eq_u32_e64 s[2:3], v18, v1
-; GCN-NEXT: v_subb_co_u32_e64 v16, s[0:1], v16, v1, s[0:1]
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v5, v1
-; GCN-NEXT: v_cndmask_b32_e64 v19, v19, v20, s[2:3]
-; GCN-NEXT: v_sub_co_u32_e64 v20, s[0:1], v17, v12
-; GCN-NEXT: v_cndmask_b32_e64 v14, 0, -1, vcc
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v13, v12
-; GCN-NEXT: v_subbrev_co_u32_e64 v16, s[0:1], 0, v16, s[0:1]
-; GCN-NEXT: v_cndmask_b32_e64 v12, 0, -1, vcc
-; GCN-NEXT: v_cmp_eq_u32_e32 vcc, v5, v1
-; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v19
-; GCN-NEXT: v_cndmask_b32_e32 v1, v14, v12, vcc
-; GCN-NEXT: v_cndmask_b32_e64 v17, v17, v20, s[0:1]
-; GCN-NEXT: v_cndmask_b32_e64 v16, v18, v16, s[0:1]
-; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v1
-; GCN-NEXT: v_cndmask_b32_e32 v1, v5, v16, vcc
-; GCN-NEXT: v_cndmask_b32_e32 v5, v13, v17, vcc
-; GCN-NEXT: v_xor_b32_e32 v5, v5, v15
-; GCN-NEXT: v_xor_b32_e32 v1, v1, v15
-; GCN-NEXT: v_sub_co_u32_e32 v12, vcc, v5, v15
-; GCN-NEXT: v_subb_co_u32_e32 v13, vcc, v1, v15, vcc
+; GCN-NEXT: s_ashr_i32 s0, s13, 31
+; GCN-NEXT: s_add_u32 s2, s12, s0
+; GCN-NEXT: s_mov_b32 s1, s0
+; GCN-NEXT: s_addc_u32 s3, s13, s0
+; GCN-NEXT: s_xor_b64 s[6:7], s[2:3], s[0:1]
+; GCN-NEXT: v_cvt_f32_u32_e32 v8, s6
+; GCN-NEXT: v_cvt_f32_u32_e32 v9, s7
+; GCN-NEXT: s_sub_u32 s0, 0, s6
+; GCN-NEXT: s_subb_u32 s1, 0, s7
+; GCN-NEXT: v_madmk_f32 v8, v9, 0x4f800000, v8
+; GCN-NEXT: v_rcp_f32_e32 v8, v8
+; GCN-NEXT: v_mul_f32_e32 v8, 0x5f7ffffc, v8
+; GCN-NEXT: v_mul_f32_e32 v9, 0x2f800000, v8
+; GCN-NEXT: v_trunc_f32_e32 v9, v9
+; GCN-NEXT: v_madmk_f32 v8, v9, 0xcf800000, v8
+; GCN-NEXT: v_cvt_u32_f32_e32 v9, v9
+; GCN-NEXT: v_cvt_u32_f32_e32 v8, v8
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v9
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v8
+; GCN-NEXT: v_mul_lo_u32 v13, s1, v8
+; GCN-NEXT: v_mul_lo_u32 v12, s0, v8
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v8, v12
+; GCN-NEXT: v_mul_lo_u32 v13, v8, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v8, v10
+; GCN-NEXT: v_mul_lo_u32 v14, v9, v12
+; GCN-NEXT: v_mul_hi_u32 v12, v9, v12
+; GCN-NEXT: v_mul_hi_u32 v16, v9, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v13
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v15, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v9, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v14
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, v13, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v16, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v11, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v12, vcc
+; GCN-NEXT: v_add_co_u32_e32 v8, vcc, v8, v10
+; GCN-NEXT: v_addc_co_u32_e32 v9, vcc, v9, v11, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v9
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v8
+; GCN-NEXT: v_mul_lo_u32 v12, s1, v8
+; GCN-NEXT: v_mul_lo_u32 v13, s0, v8
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v12
+; GCN-NEXT: v_mul_lo_u32 v14, v8, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v8, v13
+; GCN-NEXT: v_mul_hi_u32 v16, v8, v10
+; GCN-NEXT: v_mul_hi_u32 v12, v9, v13
+; GCN-NEXT: v_mul_lo_u32 v13, v9, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v9, v10
+; GCN-NEXT: v_add_co_u32_e32 v14, vcc, v15, v14
+; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, 0, v16, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v9, v10
+; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v14, v13
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, v15, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v12, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v8, vcc, v8, v10
+; GCN-NEXT: v_addc_co_u32_e32 v9, vcc, v9, v11, vcc
+; GCN-NEXT: v_ashrrev_i32_e32 v10, 31, v1
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v0, v10
+; GCN-NEXT: v_xor_b32_e32 v11, v11, v10
+; GCN-NEXT: v_mul_lo_u32 v12, v11, v9
+; GCN-NEXT: v_mul_hi_u32 v13, v11, v8
+; GCN-NEXT: v_mul_hi_u32 v14, v11, v9
+; GCN-NEXT: v_addc_co_u32_e32 v1, vcc, v1, v10, vcc
+; GCN-NEXT: v_xor_b32_e32 v1, v1, v10
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v13, v12
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v14, vcc
+; GCN-NEXT: v_mul_lo_u32 v14, v1, v8
+; GCN-NEXT: v_mul_hi_u32 v8, v1, v8
+; GCN-NEXT: v_mul_hi_u32 v15, v1, v9
+; GCN-NEXT: v_mul_lo_u32 v9, v1, v9
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v12, v14
+; GCN-NEXT: v_addc_co_u32_e32 v8, vcc, v13, v8, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v15, vcc
+; GCN-NEXT: v_add_co_u32_e32 v8, vcc, v8, v9
+; GCN-NEXT: v_addc_co_u32_e32 v9, vcc, 0, v12, vcc
+; GCN-NEXT: v_mul_lo_u32 v9, s6, v9
+; GCN-NEXT: v_mul_hi_u32 v12, s6, v8
+; GCN-NEXT: v_mul_lo_u32 v13, s7, v8
+; GCN-NEXT: v_mul_lo_u32 v8, s6, v8
+; GCN-NEXT: v_add_u32_e32 v9, v12, v9
+; GCN-NEXT: v_add_u32_e32 v9, v9, v13
+; GCN-NEXT: v_sub_u32_e32 v12, v1, v9
+; GCN-NEXT: v_mov_b32_e32 v13, s7
+; GCN-NEXT: v_sub_co_u32_e32 v8, vcc, v11, v8
+; GCN-NEXT: v_subb_co_u32_e64 v11, s[0:1], v12, v13, vcc
+; GCN-NEXT: v_subrev_co_u32_e64 v12, s[0:1], s6, v8
+; GCN-NEXT: v_subbrev_co_u32_e64 v14, s[2:3], 0, v11, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_cndmask_b32_e64 v15, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s6, v12
+; GCN-NEXT: v_subb_co_u32_e64 v11, s[0:1], v11, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v16, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_eq_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_subrev_co_u32_e64 v13, s[0:1], s6, v12
+; GCN-NEXT: v_subb_co_u32_e32 v1, vcc, v1, v9, vcc
+; GCN-NEXT: v_cndmask_b32_e64 v15, v15, v16, s[2:3]
+; GCN-NEXT: v_subbrev_co_u32_e64 v11, s[0:1], 0, v11, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s7, v1
+; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v15
+; GCN-NEXT: v_cndmask_b32_e64 v9, 0, -1, vcc
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s6, v8
+; GCN-NEXT: v_cndmask_b32_e64 v12, v12, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v13, 0, -1, vcc
+; GCN-NEXT: v_cmp_eq_u32_e32 vcc, s7, v1
+; GCN-NEXT: v_cndmask_b32_e32 v9, v9, v13, vcc
+; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v9
+; GCN-NEXT: v_cndmask_b32_e64 v11, v14, v11, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e32 v8, v8, v12, vcc
+; GCN-NEXT: v_cndmask_b32_e32 v1, v1, v11, vcc
+; GCN-NEXT: v_xor_b32_e32 v8, v8, v10
+; GCN-NEXT: v_xor_b32_e32 v1, v1, v10
+; GCN-NEXT: v_sub_co_u32_e32 v8, vcc, v8, v10
+; GCN-NEXT: v_subb_co_u32_e32 v9, vcc, v1, v10, vcc
; GCN-NEXT: s_cbranch_execnz .LBB12_9
; GCN-NEXT: .LBB12_8:
-; GCN-NEXT: v_cvt_f32_u32_e32 v1, v0
-; GCN-NEXT: v_sub_u32_e32 v5, 0, v0
-; GCN-NEXT: v_mov_b32_e32 v13, 0
+; GCN-NEXT: v_cvt_f32_u32_e32 v1, s12
+; GCN-NEXT: s_sub_i32 s0, 0, s12
+; GCN-NEXT: v_mov_b32_e32 v9, 0
; GCN-NEXT: v_rcp_iflag_f32_e32 v1, v1
; GCN-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
; GCN-NEXT: v_cvt_u32_f32_e32 v1, v1
-; GCN-NEXT: v_mul_lo_u32 v5, v5, v1
-; GCN-NEXT: v_mul_hi_u32 v5, v1, v5
-; GCN-NEXT: v_add_u32_e32 v1, v1, v5
-; GCN-NEXT: v_mul_hi_u32 v1, v4, v1
-; GCN-NEXT: v_mul_lo_u32 v1, v1, v0
-; GCN-NEXT: v_sub_u32_e32 v1, v4, v1
-; GCN-NEXT: v_sub_u32_e32 v4, v1, v0
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v1, v0
-; GCN-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
-; GCN-NEXT: v_sub_u32_e32 v4, v1, v0
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v1, v0
-; GCN-NEXT: v_cndmask_b32_e32 v12, v1, v4, vcc
+; GCN-NEXT: v_mul_lo_u32 v8, s0, v1
+; GCN-NEXT: v_mul_hi_u32 v8, v1, v8
+; GCN-NEXT: v_add_u32_e32 v1, v1, v8
+; GCN-NEXT: v_mul_hi_u32 v1, v0, v1
+; GCN-NEXT: v_mul_lo_u32 v1, v1, s12
+; GCN-NEXT: v_sub_u32_e32 v0, v0, v1
+; GCN-NEXT: v_subrev_u32_e32 v1, s12, v0
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s12, v0
+; GCN-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
+; GCN-NEXT: v_subrev_u32_e32 v1, s12, v0
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s12, v0
+; GCN-NEXT: v_cndmask_b32_e32 v8, v0, v1, vcc
; GCN-NEXT: .LBB12_9:
-; GCN-NEXT: v_or_b32_e32 v1, v7, v3
+; GCN-NEXT: v_or_b32_e32 v1, s11, v3
; GCN-NEXT: v_mov_b32_e32 v0, 0
; GCN-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1]
; GCN-NEXT: s_cbranch_vccz .LBB12_16
; GCN-NEXT: ; %bb.10:
-; GCN-NEXT: v_ashrrev_i32_e32 v0, 31, v3
-; GCN-NEXT: v_add_co_u32_e32 v1, vcc, v2, v0
-; GCN-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v0, vcc
-; GCN-NEXT: v_xor_b32_e32 v1, v1, v0
-; GCN-NEXT: v_xor_b32_e32 v0, v3, v0
-; GCN-NEXT: v_cvt_f32_u32_e32 v3, v1
-; GCN-NEXT: v_cvt_f32_u32_e32 v4, v0
-; GCN-NEXT: v_sub_co_u32_e32 v5, vcc, 0, v1
-; GCN-NEXT: v_subb_co_u32_e32 v14, vcc, 0, v0, vcc
-; GCN-NEXT: v_madmk_f32 v3, v4, 0x4f800000, v3
-; GCN-NEXT: v_rcp_f32_e32 v3, v3
-; GCN-NEXT: v_mul_f32_e32 v3, 0x5f7ffffc, v3
-; GCN-NEXT: v_mul_f32_e32 v4, 0x2f800000, v3
-; GCN-NEXT: v_trunc_f32_e32 v4, v4
-; GCN-NEXT: v_madmk_f32 v3, v4, 0xcf800000, v3
-; GCN-NEXT: v_cvt_u32_f32_e32 v4, v4
-; GCN-NEXT: v_cvt_u32_f32_e32 v3, v3
-; GCN-NEXT: v_mul_lo_u32 v16, v5, v4
-; GCN-NEXT: v_mul_hi_u32 v15, v5, v3
-; GCN-NEXT: v_mul_lo_u32 v17, v14, v3
-; GCN-NEXT: v_mul_lo_u32 v18, v5, v3
-; GCN-NEXT: v_add_u32_e32 v15, v15, v16
-; GCN-NEXT: v_add_u32_e32 v15, v15, v17
-; GCN-NEXT: v_mul_lo_u32 v16, v3, v15
-; GCN-NEXT: v_mul_hi_u32 v17, v3, v18
-; GCN-NEXT: v_mul_hi_u32 v19, v3, v15
-; GCN-NEXT: v_mul_hi_u32 v20, v4, v15
-; GCN-NEXT: v_mul_lo_u32 v15, v4, v15
-; GCN-NEXT: v_add_co_u32_e32 v16, vcc, v17, v16
-; GCN-NEXT: v_addc_co_u32_e32 v17, vcc, 0, v19, vcc
-; GCN-NEXT: v_mul_lo_u32 v19, v4, v18
-; GCN-NEXT: v_mul_hi_u32 v18, v4, v18
-; GCN-NEXT: v_add_co_u32_e32 v16, vcc, v16, v19
-; GCN-NEXT: v_addc_co_u32_e32 v16, vcc, v17, v18, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v17, vcc, 0, v20, vcc
-; GCN-NEXT: v_add_co_u32_e32 v15, vcc, v16, v15
-; GCN-NEXT: v_addc_co_u32_e32 v16, vcc, 0, v17, vcc
-; GCN-NEXT: v_add_co_u32_e32 v3, vcc, v3, v15
-; GCN-NEXT: v_addc_co_u32_e32 v4, vcc, v4, v16, vcc
-; GCN-NEXT: v_mul_lo_u32 v15, v5, v4
-; GCN-NEXT: v_mul_hi_u32 v16, v5, v3
-; GCN-NEXT: v_mul_lo_u32 v14, v14, v3
-; GCN-NEXT: v_mul_lo_u32 v5, v5, v3
-; GCN-NEXT: v_add_u32_e32 v15, v16, v15
-; GCN-NEXT: v_add_u32_e32 v14, v15, v14
-; GCN-NEXT: v_mul_lo_u32 v17, v3, v14
-; GCN-NEXT: v_mul_hi_u32 v18, v3, v5
-; GCN-NEXT: v_mul_hi_u32 v19, v3, v14
-; GCN-NEXT: v_mul_hi_u32 v16, v4, v5
-; GCN-NEXT: v_mul_lo_u32 v5, v4, v5
-; GCN-NEXT: v_mul_hi_u32 v15, v4, v14
-; GCN-NEXT: v_add_co_u32_e32 v17, vcc, v18, v17
-; GCN-NEXT: v_addc_co_u32_e32 v18, vcc, 0, v19, vcc
-; GCN-NEXT: v_mul_lo_u32 v14, v4, v14
-; GCN-NEXT: v_add_co_u32_e32 v5, vcc, v17, v5
-; GCN-NEXT: v_addc_co_u32_e32 v5, vcc, v18, v16, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, 0, v15, vcc
-; GCN-NEXT: v_add_co_u32_e32 v5, vcc, v5, v14
-; GCN-NEXT: v_addc_co_u32_e32 v14, vcc, 0, v15, vcc
-; GCN-NEXT: v_add_co_u32_e32 v3, vcc, v3, v5
-; GCN-NEXT: v_addc_co_u32_e32 v4, vcc, v4, v14, vcc
-; GCN-NEXT: v_ashrrev_i32_e32 v5, 31, v7
-; GCN-NEXT: v_add_co_u32_e32 v14, vcc, v6, v5
-; GCN-NEXT: v_xor_b32_e32 v14, v14, v5
-; GCN-NEXT: v_mul_lo_u32 v15, v14, v4
-; GCN-NEXT: v_mul_hi_u32 v16, v14, v3
-; GCN-NEXT: v_mul_hi_u32 v17, v14, v4
-; GCN-NEXT: v_addc_co_u32_e32 v7, vcc, v7, v5, vcc
-; GCN-NEXT: v_xor_b32_e32 v7, v7, v5
-; GCN-NEXT: v_add_co_u32_e32 v15, vcc, v16, v15
-; GCN-NEXT: v_addc_co_u32_e32 v16, vcc, 0, v17, vcc
-; GCN-NEXT: v_mul_lo_u32 v17, v7, v3
-; GCN-NEXT: v_mul_hi_u32 v3, v7, v3
-; GCN-NEXT: v_mul_hi_u32 v18, v7, v4
-; GCN-NEXT: v_mul_lo_u32 v4, v7, v4
-; GCN-NEXT: v_add_co_u32_e32 v15, vcc, v15, v17
-; GCN-NEXT: v_addc_co_u32_e32 v3, vcc, v16, v3, vcc
-; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, 0, v18, vcc
-; GCN-NEXT: v_add_co_u32_e32 v3, vcc, v3, v4
-; GCN-NEXT: v_addc_co_u32_e32 v4, vcc, 0, v15, vcc
-; GCN-NEXT: v_mul_lo_u32 v4, v1, v4
-; GCN-NEXT: v_mul_hi_u32 v15, v1, v3
-; GCN-NEXT: v_mul_lo_u32 v16, v0, v3
-; GCN-NEXT: v_mul_lo_u32 v3, v1, v3
-; GCN-NEXT: v_add_u32_e32 v4, v15, v4
-; GCN-NEXT: v_add_u32_e32 v4, v4, v16
-; GCN-NEXT: v_sub_u32_e32 v15, v7, v4
-; GCN-NEXT: v_sub_co_u32_e32 v3, vcc, v14, v3
-; GCN-NEXT: v_subb_co_u32_e64 v14, s[0:1], v15, v0, vcc
-; GCN-NEXT: v_sub_co_u32_e64 v15, s[0:1], v3, v1
-; GCN-NEXT: v_subbrev_co_u32_e64 v16, s[2:3], 0, v14, s[0:1]
-; GCN-NEXT: v_cmp_ge_u32_e64 s[2:3], v16, v0
-; GCN-NEXT: v_cndmask_b32_e64 v17, 0, -1, s[2:3]
-; GCN-NEXT: v_cmp_ge_u32_e64 s[2:3], v15, v1
-; GCN-NEXT: v_subb_co_u32_e32 v4, vcc, v7, v4, vcc
-; GCN-NEXT: v_cndmask_b32_e64 v18, 0, -1, s[2:3]
-; GCN-NEXT: v_cmp_eq_u32_e64 s[2:3], v16, v0
-; GCN-NEXT: v_subb_co_u32_e64 v14, s[0:1], v14, v0, s[0:1]
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v4, v0
-; GCN-NEXT: v_cndmask_b32_e64 v17, v17, v18, s[2:3]
-; GCN-NEXT: v_sub_co_u32_e64 v18, s[0:1], v15, v1
-; GCN-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v3, v1
-; GCN-NEXT: v_subbrev_co_u32_e64 v14, s[0:1], 0, v14, s[0:1]
-; GCN-NEXT: v_cndmask_b32_e64 v1, 0, -1, vcc
-; GCN-NEXT: v_cmp_eq_u32_e32 vcc, v4, v0
-; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v17
-; GCN-NEXT: v_cndmask_b32_e32 v0, v7, v1, vcc
-; GCN-NEXT: v_cndmask_b32_e64 v15, v15, v18, s[0:1]
-; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
-; GCN-NEXT: v_cndmask_b32_e64 v14, v16, v14, s[0:1]
-; GCN-NEXT: v_cndmask_b32_e32 v1, v3, v15, vcc
-; GCN-NEXT: v_cndmask_b32_e32 v0, v4, v14, vcc
-; GCN-NEXT: v_xor_b32_e32 v1, v1, v5
-; GCN-NEXT: v_xor_b32_e32 v0, v0, v5
-; GCN-NEXT: v_sub_co_u32_e32 v14, vcc, v1, v5
-; GCN-NEXT: v_subb_co_u32_e32 v15, vcc, v0, v5, vcc
+; GCN-NEXT: s_ashr_i32 s0, s11, 31
+; GCN-NEXT: s_add_u32 s2, s10, s0
+; GCN-NEXT: s_mov_b32 s1, s0
+; GCN-NEXT: s_addc_u32 s3, s11, s0
+; GCN-NEXT: s_xor_b64 s[6:7], s[2:3], s[0:1]
+; GCN-NEXT: v_cvt_f32_u32_e32 v0, s6
+; GCN-NEXT: v_cvt_f32_u32_e32 v1, s7
+; GCN-NEXT: s_sub_u32 s0, 0, s6
+; GCN-NEXT: s_subb_u32 s1, 0, s7
+; GCN-NEXT: v_madmk_f32 v0, v1, 0x4f800000, v0
+; GCN-NEXT: v_rcp_f32_e32 v0, v0
+; GCN-NEXT: v_mul_f32_e32 v0, 0x5f7ffffc, v0
+; GCN-NEXT: v_mul_f32_e32 v1, 0x2f800000, v0
+; GCN-NEXT: v_trunc_f32_e32 v1, v1
+; GCN-NEXT: v_madmk_f32 v0, v1, 0xcf800000, v0
+; GCN-NEXT: v_cvt_u32_f32_e32 v1, v1
+; GCN-NEXT: v_cvt_u32_f32_e32 v0, v0
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v1
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v0
+; GCN-NEXT: v_mul_lo_u32 v13, s1, v0
+; GCN-NEXT: v_mul_lo_u32 v12, s0, v0
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v0, v12
+; GCN-NEXT: v_mul_lo_u32 v13, v0, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v0, v10
+; GCN-NEXT: v_mul_lo_u32 v14, v1, v12
+; GCN-NEXT: v_mul_hi_u32 v12, v1, v12
+; GCN-NEXT: v_mul_hi_u32 v16, v1, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v13
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v15, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v1, v10
+; GCN-NEXT: v_add_co_u32_e32 v11, vcc, v11, v14
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, v13, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v16, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v11, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v12, vcc
+; GCN-NEXT: v_add_co_u32_e32 v0, vcc, v0, v10
+; GCN-NEXT: v_addc_co_u32_e32 v1, vcc, v1, v11, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, s0, v1
+; GCN-NEXT: v_mul_hi_u32 v11, s0, v0
+; GCN-NEXT: v_mul_lo_u32 v12, s1, v0
+; GCN-NEXT: v_mul_lo_u32 v13, s0, v0
+; GCN-NEXT: v_add_u32_e32 v10, v11, v10
+; GCN-NEXT: v_add_u32_e32 v10, v10, v12
+; GCN-NEXT: v_mul_lo_u32 v14, v0, v10
+; GCN-NEXT: v_mul_hi_u32 v15, v0, v13
+; GCN-NEXT: v_mul_hi_u32 v16, v0, v10
+; GCN-NEXT: v_mul_hi_u32 v12, v1, v13
+; GCN-NEXT: v_mul_lo_u32 v13, v1, v13
+; GCN-NEXT: v_mul_hi_u32 v11, v1, v10
+; GCN-NEXT: v_add_co_u32_e32 v14, vcc, v15, v14
+; GCN-NEXT: v_addc_co_u32_e32 v15, vcc, 0, v16, vcc
+; GCN-NEXT: v_mul_lo_u32 v10, v1, v10
+; GCN-NEXT: v_add_co_u32_e32 v13, vcc, v14, v13
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, v15, v12, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v12, v10
+; GCN-NEXT: v_addc_co_u32_e32 v11, vcc, 0, v11, vcc
+; GCN-NEXT: v_add_co_u32_e32 v0, vcc, v0, v10
+; GCN-NEXT: v_addc_co_u32_e32 v1, vcc, v1, v11, vcc
+; GCN-NEXT: v_ashrrev_i32_e32 v11, 31, v3
+; GCN-NEXT: v_add_co_u32_e32 v10, vcc, v2, v11
+; GCN-NEXT: v_xor_b32_e32 v10, v10, v11
+; GCN-NEXT: v_mul_lo_u32 v12, v10, v1
+; GCN-NEXT: v_mul_hi_u32 v13, v10, v0
+; GCN-NEXT: v_mul_hi_u32 v14, v10, v1
+; GCN-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v11, vcc
+; GCN-NEXT: v_xor_b32_e32 v3, v3, v11
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v13, v12
+; GCN-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v14, vcc
+; GCN-NEXT: v_mul_lo_u32 v14, v3, v0
+; GCN-NEXT: v_mul_hi_u32 v0, v3, v0
+; GCN-NEXT: v_mul_hi_u32 v15, v3, v1
+; GCN-NEXT: v_mul_lo_u32 v1, v3, v1
+; GCN-NEXT: v_add_co_u32_e32 v12, vcc, v12, v14
+; GCN-NEXT: v_addc_co_u32_e32 v0, vcc, v13, v0, vcc
+; GCN-NEXT: v_addc_co_u32_e32 v12, vcc, 0, v15, vcc
+; GCN-NEXT: v_add_co_u32_e32 v0, vcc, v0, v1
+; GCN-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v12, vcc
+; GCN-NEXT: v_mul_lo_u32 v1, s6, v1
+; GCN-NEXT: v_mul_hi_u32 v12, s6, v0
+; GCN-NEXT: v_mul_lo_u32 v13, s7, v0
+; GCN-NEXT: v_mul_lo_u32 v0, s6, v0
+; GCN-NEXT: v_add_u32_e32 v1, v12, v1
+; GCN-NEXT: v_add_u32_e32 v1, v1, v13
+; GCN-NEXT: v_sub_u32_e32 v12, v3, v1
+; GCN-NEXT: v_mov_b32_e32 v13, s7
+; GCN-NEXT: v_sub_co_u32_e32 v0, vcc, v10, v0
+; GCN-NEXT: v_subb_co_u32_e64 v10, s[0:1], v12, v13, vcc
+; GCN-NEXT: v_subrev_co_u32_e64 v12, s[0:1], s6, v0
+; GCN-NEXT: v_subbrev_co_u32_e64 v14, s[2:3], 0, v10, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_cndmask_b32_e64 v15, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_le_u32_e64 s[2:3], s6, v12
+; GCN-NEXT: v_subb_co_u32_e64 v10, s[0:1], v10, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v16, 0, -1, s[2:3]
+; GCN-NEXT: v_cmp_eq_u32_e64 s[2:3], s7, v14
+; GCN-NEXT: v_subrev_co_u32_e64 v13, s[0:1], s6, v12
+; GCN-NEXT: v_subb_co_u32_e32 v1, vcc, v3, v1, vcc
+; GCN-NEXT: v_cndmask_b32_e64 v15, v15, v16, s[2:3]
+; GCN-NEXT: v_subbrev_co_u32_e64 v10, s[0:1], 0, v10, s[0:1]
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s7, v1
+; GCN-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v15
+; GCN-NEXT: v_cndmask_b32_e64 v3, 0, -1, vcc
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s6, v0
+; GCN-NEXT: v_cndmask_b32_e64 v12, v12, v13, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e64 v13, 0, -1, vcc
+; GCN-NEXT: v_cmp_eq_u32_e32 vcc, s7, v1
+; GCN-NEXT: v_cndmask_b32_e32 v3, v3, v13, vcc
+; GCN-NEXT: v_cmp_ne_u32_e32 vcc, 0, v3
+; GCN-NEXT: v_cndmask_b32_e64 v10, v14, v10, s[0:1]
+; GCN-NEXT: v_cndmask_b32_e32 v0, v0, v12, vcc
+; GCN-NEXT: v_cndmask_b32_e32 v1, v1, v10, vcc
+; GCN-NEXT: v_xor_b32_e32 v0, v0, v11
+; GCN-NEXT: v_xor_b32_e32 v1, v1, v11
+; GCN-NEXT: v_sub_co_u32_e32 v10, vcc, v0, v11
+; GCN-NEXT: v_subb_co_u32_e32 v11, vcc, v1, v11, vcc
; GCN-NEXT: s_cbranch_execnz .LBB12_12
; GCN-NEXT: .LBB12_11:
-; GCN-NEXT: v_cvt_f32_u32_e32 v0, v2
-; GCN-NEXT: v_sub_u32_e32 v1, 0, v2
-; GCN-NEXT: v_mov_b32_e32 v15, 0
+; GCN-NEXT: v_cvt_f32_u32_e32 v0, s10
+; GCN-NEXT: s_sub_i32 s0, 0, s10
+; GCN-NEXT: v_mov_b32_e32 v11, 0
; GCN-NEXT: v_rcp_iflag_f32_e32 v0, v0
; GCN-NEXT: v_mul_f32_e32 v0, 0x4f7ffffe, v0
; GCN-NEXT: v_cvt_u32_f32_e32 v0, v0
-; GCN-NEXT: v_mul_lo_u32 v1, v1, v0
+; GCN-NEXT: v_mul_lo_u32 v1, s0, v0
; GCN-NEXT: v_mul_hi_u32 v1, v0, v1
; GCN-NEXT: v_add_u32_e32 v0, v0, v1
-; GCN-NEXT: v_mul_hi_u32 v0, v6, v0
-; GCN-NEXT: v_mul_lo_u32 v0, v0, v2
-; GCN-NEXT: v_sub_u32_e32 v0, v6, v0
-; GCN-NEXT: v_sub_u32_e32 v1, v0, v2
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
+; GCN-NEXT: v_mul_hi_u32 v0, v2, v0
+; GCN-NEXT: v_mul_lo_u32 v0, v0, s10
+; GCN-NEXT: v_sub_u32_e32 v0, v2, v0
+; GCN-NEXT: v_subrev_u32_e32 v1, s10, v0
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s10, v0
; GCN-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
-; GCN-NEXT: v_sub_u32_e32 v1, v0, v2
-; GCN-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
-; GCN-NEXT: v_cndmask_b32_e32 v14, v0, v1, vcc
+; GCN-NEXT: v_subrev_u32_e32 v1, s10, v0
+; GCN-NEXT: v_cmp_le_u32_e32 vcc, s10, v0
+; GCN-NEXT: v_cndmask_b32_e32 v10, v0, v1, vcc
; GCN-NEXT: .LBB12_12:
; GCN-NEXT: v_mov_b32_e32 v0, 0
-; GCN-NEXT: global_store_dwordx4 v0, v[12:15], s[4:5] offset:16
-; GCN-NEXT: global_store_dwordx4 v0, v[8:11], s[4:5]
+; GCN-NEXT: global_store_dwordx4 v0, v[8:11], s[4:5] offset:16
+; GCN-NEXT: global_store_dwordx4 v0, v[4:7], s[4:5]
; GCN-NEXT: s_endpgm
; GCN-NEXT: .LBB12_13:
-; GCN-NEXT: ; implicit-def: $vgpr8_vgpr9
+; GCN-NEXT: ; implicit-def: $vgpr4_vgpr5
; GCN-NEXT: s_branch .LBB12_2
; GCN-NEXT: .LBB12_14:
; GCN-NEXT: s_branch .LBB12_5
; GCN-NEXT: .LBB12_15:
-; GCN-NEXT: ; implicit-def: $vgpr12_vgpr13
+; GCN-NEXT: ; implicit-def: $vgpr8_vgpr9
; GCN-NEXT: s_branch .LBB12_8
; GCN-NEXT: .LBB12_16:
; GCN-NEXT: s_branch .LBB12_11
@@ -6089,567 +6074,596 @@ define amdgpu_kernel void @srem_v4i64(ptr addrspace(1) %out, ptr addrspace(1) %i
; TONGA-LABEL: srem_v4i64:
; TONGA: ; %bb.0:
; TONGA-NEXT: s_load_dwordx4 s[4:7], s[2:3], 0x24
-; TONGA-NEXT: v_mov_b32_e32 v8, 0
; TONGA-NEXT: s_waitcnt lgkmcnt(0)
; TONGA-NEXT: s_add_u32 s0, s6, 48
; TONGA-NEXT: s_addc_u32 s1, s7, 0
-; TONGA-NEXT: s_add_u32 s2, s6, 32
+; TONGA-NEXT: v_mov_b32_e32 v5, s1
+; TONGA-NEXT: v_mov_b32_e32 v4, s0
+; TONGA-NEXT: s_add_u32 s0, s6, 32
+; TONGA-NEXT: s_addc_u32 s1, s7, 0
; TONGA-NEXT: v_mov_b32_e32 v0, s6
-; TONGA-NEXT: s_addc_u32 s3, s7, 0
-; TONGA-NEXT: v_mov_b32_e32 v2, s2
+; TONGA-NEXT: v_mov_b32_e32 v9, s1
; TONGA-NEXT: v_mov_b32_e32 v1, s7
-; TONGA-NEXT: v_mov_b32_e32 v3, s3
-; TONGA-NEXT: flat_load_dwordx4 v[10:13], v[2:3]
-; TONGA-NEXT: flat_load_dwordx4 v[14:17], v[0:1]
-; TONGA-NEXT: v_mov_b32_e32 v0, s0
-; TONGA-NEXT: v_mov_b32_e32 v1, s1
+; TONGA-NEXT: v_mov_b32_e32 v8, s0
; TONGA-NEXT: s_add_u32 s0, s6, 16
-; TONGA-NEXT: s_addc_u32 s1, s7, 0
-; TONGA-NEXT: v_mov_b32_e32 v5, s1
-; TONGA-NEXT: v_mov_b32_e32 v4, s0
; TONGA-NEXT: flat_load_dwordx4 v[0:3], v[0:1]
; TONGA-NEXT: flat_load_dwordx4 v[4:7], v[4:5]
-; TONGA-NEXT: s_waitcnt vmcnt(2)
-; TONGA-NEXT: v_or_b32_e32 v9, v15, v11
-; TONGA-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[8:9]
-; TONGA-NEXT: s_cbranch_vccz .LBB12_13
-; TONGA-NEXT: ; %bb.1:
-; TONGA-NEXT: v_ashrrev_i32_e32 v8, 31, v11
-; TONGA-NEXT: v_add_u32_e32 v9, vcc, v10, v8
-; TONGA-NEXT: v_addc_u32_e32 v11, vcc, v11, v8, vcc
-; TONGA-NEXT: v_xor_b32_e32 v22, v9, v8
-; TONGA-NEXT: v_xor_b32_e32 v11, v11, v8
-; TONGA-NEXT: v_cvt_f32_u32_e32 v8, v22
-; TONGA-NEXT: v_cvt_f32_u32_e32 v9, v11
-; TONGA-NEXT: v_sub_u32_e32 v23, vcc, 0, v22
-; TONGA-NEXT: v_subb_u32_e32 v24, vcc, 0, v11, vcc
-; TONGA-NEXT: v_madmk_f32 v8, v9, 0x4f800000, v8
-; TONGA-NEXT: v_rcp_f32_e32 v8, v8
-; TONGA-NEXT: v_mul_f32_e32 v8, 0x5f7ffffc, v8
-; TONGA-NEXT: v_mul_f32_e32 v9, 0x2f800000, v8
-; TONGA-NEXT: v_trunc_f32_e32 v9, v9
-; TONGA-NEXT: v_madmk_f32 v8, v9, 0xcf800000, v8
-; TONGA-NEXT: v_cvt_u32_f32_e32 v20, v9
-; TONGA-NEXT: v_cvt_u32_f32_e32 v21, v8
-; TONGA-NEXT: v_mul_lo_u32 v18, v23, v20
-; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v23, v21, 0
-; TONGA-NEXT: v_mul_lo_u32 v19, v24, v21
-; TONGA-NEXT: v_add_u32_e32 v9, vcc, v9, v18
-; TONGA-NEXT: v_add_u32_e32 v25, vcc, v9, v19
-; TONGA-NEXT: v_mad_u64_u32 v[18:19], s[0:1], v21, v25, 0
-; TONGA-NEXT: v_mul_hi_u32 v9, v21, v8
-; TONGA-NEXT: v_add_u32_e32 v26, vcc, v9, v18
-; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v20, v8, 0
-; TONGA-NEXT: v_addc_u32_e32 v27, vcc, 0, v19, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[18:19], s[0:1], v20, v25, 0
-; TONGA-NEXT: v_add_u32_e32 v8, vcc, v26, v8
-; TONGA-NEXT: v_addc_u32_e32 v8, vcc, v27, v9, vcc
-; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v19, vcc
-; TONGA-NEXT: v_add_u32_e32 v8, vcc, v8, v18
-; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v9, vcc
-; TONGA-NEXT: v_add_u32_e32 v25, vcc, v21, v8
-; TONGA-NEXT: v_addc_u32_e32 v26, vcc, v20, v9, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v23, v25, 0
-; TONGA-NEXT: v_mul_lo_u32 v20, v23, v26
-; TONGA-NEXT: v_mul_lo_u32 v21, v24, v25
-; TONGA-NEXT: v_mul_hi_u32 v23, v25, v8
-; TONGA-NEXT: v_mad_u64_u32 v[18:19], s[0:1], v26, v8, 0
-; TONGA-NEXT: v_add_u32_e32 v9, vcc, v20, v9
-; TONGA-NEXT: v_add_u32_e32 v9, vcc, v9, v21
-; TONGA-NEXT: v_mad_u64_u32 v[20:21], s[0:1], v25, v9, 0
-; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v26, v9, 0
-; TONGA-NEXT: v_add_u32_e32 v20, vcc, v23, v20
-; TONGA-NEXT: v_addc_u32_e32 v21, vcc, 0, v21, vcc
-; TONGA-NEXT: v_add_u32_e32 v18, vcc, v20, v18
-; TONGA-NEXT: v_addc_u32_e32 v18, vcc, v21, v19, vcc
-; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v9, vcc
-; TONGA-NEXT: v_add_u32_e32 v8, vcc, v18, v8
-; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v9, vcc
-; TONGA-NEXT: v_add_u32_e32 v18, vcc, v25, v8
-; TONGA-NEXT: v_addc_u32_e32 v19, vcc, v26, v9, vcc
-; TONGA-NEXT: v_ashrrev_i32_e32 v20, 31, v15
-; TONGA-NEXT: v_add_u32_e32 v8, vcc, v14, v20
-; TONGA-NEXT: v_xor_b32_e32 v21, v8, v20
-; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v21, v19, 0
-; TONGA-NEXT: v_mul_hi_u32 v23, v21, v18
-; TONGA-NEXT: v_addc_u32_e32 v15, vcc, v15, v20, vcc
-; TONGA-NEXT: v_xor_b32_e32 v15, v15, v20
-; TONGA-NEXT: v_add_u32_e32 v23, vcc, v23, v8
-; TONGA-NEXT: v_addc_u32_e32 v24, vcc, 0, v9, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v15, v18, 0
-; TONGA-NEXT: v_mad_u64_u32 v[18:19], s[0:1], v15, v19, 0
-; TONGA-NEXT: v_add_u32_e32 v8, vcc, v23, v8
-; TONGA-NEXT: v_addc_u32_e32 v8, vcc, v24, v9, vcc
-; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v19, vcc
-; TONGA-NEXT: v_add_u32_e32 v18, vcc, v8, v18
-; TONGA-NEXT: v_addc_u32_e32 v8, vcc, 0, v9, vcc
-; TONGA-NEXT: v_mul_lo_u32 v19, v22, v8
-; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v22, v18, 0
-; TONGA-NEXT: v_mul_lo_u32 v18, v11, v18
-; TONGA-NEXT: v_add_u32_e32 v9, vcc, v19, v9
-; TONGA-NEXT: v_add_u32_e32 v9, vcc, v18, v9
-; TONGA-NEXT: v_sub_u32_e32 v18, vcc, v15, v9
-; TONGA-NEXT: v_sub_u32_e32 v8, vcc, v21, v8
-; TONGA-NEXT: v_subb_u32_e64 v18, s[0:1], v18, v11, vcc
-; TONGA-NEXT: v_sub_u32_e64 v19, s[0:1], v8, v22
-; TONGA-NEXT: v_subbrev_u32_e64 v21, s[2:3], 0, v18, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v21, v11
-; TONGA-NEXT: v_cndmask_b32_e64 v23, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v19, v22
-; TONGA-NEXT: v_cndmask_b32_e64 v24, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], v21, v11
-; TONGA-NEXT: v_subb_u32_e64 v18, s[0:1], v18, v11, s[0:1]
-; TONGA-NEXT: v_cndmask_b32_e64 v23, v23, v24, s[2:3]
-; TONGA-NEXT: v_sub_u32_e64 v24, s[0:1], v19, v22
-; TONGA-NEXT: v_subb_u32_e32 v9, vcc, v15, v9, vcc
-; TONGA-NEXT: v_subbrev_u32_e64 v18, s[0:1], 0, v18, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v9, v11
-; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v23
-; TONGA-NEXT: v_cndmask_b32_e64 v15, 0, -1, vcc
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v8, v22
-; TONGA-NEXT: v_cndmask_b32_e64 v18, v21, v18, s[0:1]
-; TONGA-NEXT: v_cndmask_b32_e64 v21, 0, -1, vcc
-; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, v9, v11
-; TONGA-NEXT: v_cndmask_b32_e32 v11, v15, v21, vcc
-; TONGA-NEXT: v_cndmask_b32_e64 v19, v19, v24, s[0:1]
-; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v11
-; TONGA-NEXT: v_cndmask_b32_e32 v8, v8, v19, vcc
-; TONGA-NEXT: v_cndmask_b32_e32 v9, v9, v18, vcc
-; TONGA-NEXT: v_xor_b32_e32 v8, v8, v20
-; TONGA-NEXT: v_xor_b32_e32 v9, v9, v20
-; TONGA-NEXT: v_sub_u32_e32 v8, vcc, v8, v20
-; TONGA-NEXT: v_subb_u32_e32 v9, vcc, v9, v20, vcc
-; TONGA-NEXT: s_cbranch_execnz .LBB12_3
-; TONGA-NEXT: .LBB12_2:
-; TONGA-NEXT: v_cvt_f32_u32_e32 v8, v10
-; TONGA-NEXT: v_sub_u32_e32 v9, vcc, 0, v10
-; TONGA-NEXT: v_rcp_iflag_f32_e32 v8, v8
-; TONGA-NEXT: v_mul_f32_e32 v8, 0x4f7ffffe, v8
-; TONGA-NEXT: v_cvt_u32_f32_e32 v8, v8
-; TONGA-NEXT: v_mul_lo_u32 v9, v9, v8
-; TONGA-NEXT: v_mul_hi_u32 v9, v8, v9
-; TONGA-NEXT: v_add_u32_e32 v8, vcc, v8, v9
-; TONGA-NEXT: v_mul_hi_u32 v8, v14, v8
-; TONGA-NEXT: v_mul_lo_u32 v8, v8, v10
-; TONGA-NEXT: v_sub_u32_e32 v8, vcc, v14, v8
-; TONGA-NEXT: v_subrev_u32_e32 v9, vcc, v10, v8
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v8, v10
-; TONGA-NEXT: v_cndmask_b32_e32 v8, v8, v9, vcc
-; TONGA-NEXT: v_sub_u32_e32 v9, vcc, v8, v10
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v8, v10
-; TONGA-NEXT: v_cndmask_b32_e32 v8, v8, v9, vcc
-; TONGA-NEXT: v_mov_b32_e32 v9, 0
-; TONGA-NEXT: .LBB12_3:
-; TONGA-NEXT: v_or_b32_e32 v11, v17, v13
-; TONGA-NEXT: v_mov_b32_e32 v10, 0
-; TONGA-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[10:11]
-; TONGA-NEXT: s_cbranch_vccz .LBB12_14
-; TONGA-NEXT: ; %bb.4:
-; TONGA-NEXT: v_ashrrev_i32_e32 v10, 31, v13
-; TONGA-NEXT: v_add_u32_e32 v11, vcc, v12, v10
-; TONGA-NEXT: v_addc_u32_e32 v13, vcc, v13, v10, vcc
-; TONGA-NEXT: v_xor_b32_e32 v15, v11, v10
-; TONGA-NEXT: v_xor_b32_e32 v20, v13, v10
-; TONGA-NEXT: v_cvt_f32_u32_e32 v10, v15
-; TONGA-NEXT: v_cvt_f32_u32_e32 v11, v20
-; TONGA-NEXT: v_sub_u32_e32 v21, vcc, 0, v15
-; TONGA-NEXT: v_subb_u32_e32 v22, vcc, 0, v20, vcc
-; TONGA-NEXT: v_madmk_f32 v10, v11, 0x4f800000, v10
-; TONGA-NEXT: v_rcp_f32_e32 v10, v10
-; TONGA-NEXT: v_mul_f32_e32 v10, 0x5f7ffffc, v10
-; TONGA-NEXT: v_mul_f32_e32 v11, 0x2f800000, v10
-; TONGA-NEXT: v_trunc_f32_e32 v11, v11
-; TONGA-NEXT: v_madmk_f32 v10, v11, 0xcf800000, v10
-; TONGA-NEXT: v_cvt_u32_f32_e32 v18, v11
-; TONGA-NEXT: v_cvt_u32_f32_e32 v19, v10
-; TONGA-NEXT: v_mul_lo_u32 v13, v21, v18
-; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v21, v19, 0
-; TONGA-NEXT: v_mul_lo_u32 v14, v22, v19
-; TONGA-NEXT: v_add_u32_e32 v11, vcc, v11, v13
-; TONGA-NEXT: v_add_u32_e32 v23, vcc, v11, v14
-; TONGA-NEXT: v_mad_u64_u32 v[13:14], s[0:1], v19, v23, 0
-; TONGA-NEXT: v_mul_hi_u32 v11, v19, v10
-; TONGA-NEXT: v_add_u32_e32 v24, vcc, v11, v13
-; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v18, v10, 0
-; TONGA-NEXT: v_addc_u32_e32 v25, vcc, 0, v14, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[13:14], s[0:1], v18, v23, 0
-; TONGA-NEXT: v_add_u32_e32 v10, vcc, v24, v10
-; TONGA-NEXT: v_addc_u32_e32 v10, vcc, v25, v11, vcc
-; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v14, vcc
-; TONGA-NEXT: v_add_u32_e32 v10, vcc, v10, v13
-; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v11, vcc
-; TONGA-NEXT: v_add_u32_e32 v23, vcc, v19, v10
-; TONGA-NEXT: v_addc_u32_e32 v24, vcc, v18, v11, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v21, v23, 0
-; TONGA-NEXT: v_mul_lo_u32 v18, v21, v24
-; TONGA-NEXT: v_mul_lo_u32 v19, v22, v23
-; TONGA-NEXT: v_mul_hi_u32 v21, v23, v10
-; TONGA-NEXT: v_mad_u64_u32 v[13:14], s[0:1], v24, v10, 0
-; TONGA-NEXT: v_add_u32_e32 v11, vcc, v18, v11
-; TONGA-NEXT: v_add_u32_e32 v11, vcc, v11, v19
-; TONGA-NEXT: v_mad_u64_u32 v[18:19], s[0:1], v23, v11, 0
-; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v24, v11, 0
-; TONGA-NEXT: v_add_u32_e32 v18, vcc, v21, v18
-; TONGA-NEXT: v_addc_u32_e32 v19, vcc, 0, v19, vcc
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v18, v13
-; TONGA-NEXT: v_addc_u32_e32 v13, vcc, v19, v14, vcc
-; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v11, vcc
-; TONGA-NEXT: v_add_u32_e32 v10, vcc, v13, v10
-; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v11, vcc
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v23, v10
-; TONGA-NEXT: v_addc_u32_e32 v14, vcc, v24, v11, vcc
-; TONGA-NEXT: v_ashrrev_i32_e32 v18, 31, v17
-; TONGA-NEXT: v_add_u32_e32 v10, vcc, v16, v18
-; TONGA-NEXT: v_xor_b32_e32 v19, v10, v18
-; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v19, v14, 0
-; TONGA-NEXT: v_mul_hi_u32 v21, v19, v13
-; TONGA-NEXT: v_addc_u32_e32 v17, vcc, v17, v18, vcc
-; TONGA-NEXT: v_xor_b32_e32 v17, v17, v18
-; TONGA-NEXT: v_add_u32_e32 v21, vcc, v21, v10
-; TONGA-NEXT: v_addc_u32_e32 v22, vcc, 0, v11, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v17, v13, 0
-; TONGA-NEXT: v_mad_u64_u32 v[13:14], s[0:1], v17, v14, 0
-; TONGA-NEXT: v_add_u32_e32 v10, vcc, v21, v10
-; TONGA-NEXT: v_addc_u32_e32 v10, vcc, v22, v11, vcc
-; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v14, vcc
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v10, v13
-; TONGA-NEXT: v_addc_u32_e32 v10, vcc, 0, v11, vcc
-; TONGA-NEXT: v_mul_lo_u32 v14, v15, v10
-; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v15, v13, 0
-; TONGA-NEXT: v_mul_lo_u32 v13, v20, v13
-; TONGA-NEXT: v_add_u32_e32 v11, vcc, v14, v11
-; TONGA-NEXT: v_add_u32_e32 v11, vcc, v13, v11
-; TONGA-NEXT: v_sub_u32_e32 v13, vcc, v17, v11
-; TONGA-NEXT: v_sub_u32_e32 v10, vcc, v19, v10
-; TONGA-NEXT: v_subb_u32_e64 v13, s[0:1], v13, v20, vcc
-; TONGA-NEXT: v_sub_u32_e64 v14, s[0:1], v10, v15
-; TONGA-NEXT: v_subbrev_u32_e64 v19, s[2:3], 0, v13, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v19, v20
-; TONGA-NEXT: v_cndmask_b32_e64 v21, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v14, v15
-; TONGA-NEXT: v_subb_u32_e32 v11, vcc, v17, v11, vcc
-; TONGA-NEXT: v_cndmask_b32_e64 v22, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], v19, v20
-; TONGA-NEXT: v_subb_u32_e64 v13, s[0:1], v13, v20, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v11, v20
-; TONGA-NEXT: v_cndmask_b32_e64 v21, v21, v22, s[2:3]
-; TONGA-NEXT: v_sub_u32_e64 v22, s[0:1], v14, v15
-; TONGA-NEXT: v_cndmask_b32_e64 v17, 0, -1, vcc
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v10, v15
-; TONGA-NEXT: v_subbrev_u32_e64 v13, s[0:1], 0, v13, s[0:1]
-; TONGA-NEXT: v_cndmask_b32_e64 v15, 0, -1, vcc
-; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, v11, v20
-; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v21
-; TONGA-NEXT: v_cndmask_b32_e32 v15, v17, v15, vcc
-; TONGA-NEXT: v_cndmask_b32_e64 v14, v14, v22, s[0:1]
-; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v15
-; TONGA-NEXT: v_cndmask_b32_e64 v13, v19, v13, s[0:1]
-; TONGA-NEXT: v_cndmask_b32_e32 v10, v10, v14, vcc
-; TONGA-NEXT: v_cndmask_b32_e32 v11, v11, v13, vcc
-; TONGA-NEXT: v_xor_b32_e32 v10, v10, v18
-; TONGA-NEXT: v_xor_b32_e32 v11, v11, v18
-; TONGA-NEXT: v_sub_u32_e32 v10, vcc, v10, v18
-; TONGA-NEXT: v_subb_u32_e32 v11, vcc, v11, v18, vcc
-; TONGA-NEXT: s_cbranch_execnz .LBB12_6
-; TONGA-NEXT: .LBB12_5:
-; TONGA-NEXT: v_cvt_f32_u32_e32 v10, v12
-; TONGA-NEXT: v_sub_u32_e32 v11, vcc, 0, v12
-; TONGA-NEXT: v_rcp_iflag_f32_e32 v10, v10
-; TONGA-NEXT: v_mul_f32_e32 v10, 0x4f7ffffe, v10
-; TONGA-NEXT: v_cvt_u32_f32_e32 v10, v10
-; TONGA-NEXT: v_mul_lo_u32 v11, v11, v10
-; TONGA-NEXT: v_mul_hi_u32 v11, v10, v11
-; TONGA-NEXT: v_add_u32_e32 v10, vcc, v10, v11
-; TONGA-NEXT: v_mul_hi_u32 v10, v16, v10
-; TONGA-NEXT: v_mul_lo_u32 v10, v10, v12
-; TONGA-NEXT: v_sub_u32_e32 v10, vcc, v16, v10
-; TONGA-NEXT: v_subrev_u32_e32 v11, vcc, v12, v10
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v10, v12
-; TONGA-NEXT: v_cndmask_b32_e32 v10, v10, v11, vcc
-; TONGA-NEXT: v_subrev_u32_e32 v11, vcc, v12, v10
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v10, v12
-; TONGA-NEXT: v_cndmask_b32_e32 v10, v10, v11, vcc
-; TONGA-NEXT: v_mov_b32_e32 v11, 0
-; TONGA-NEXT: .LBB12_6:
+; TONGA-NEXT: flat_load_dwordx4 v[8:11], v[8:9]
+; TONGA-NEXT: s_addc_u32 s1, s7, 0
+; TONGA-NEXT: v_mov_b32_e32 v13, s1
+; TONGA-NEXT: v_mov_b32_e32 v12, s0
+; TONGA-NEXT: flat_load_dwordx4 v[12:15], v[12:13]
+; TONGA-NEXT: s_waitcnt vmcnt(3)
+; TONGA-NEXT: v_readfirstlane_b32 s19, v1
+; TONGA-NEXT: v_readfirstlane_b32 s18, v0
+; TONGA-NEXT: s_waitcnt vmcnt(1)
+; TONGA-NEXT: v_readfirstlane_b32 s21, v9
+; TONGA-NEXT: v_readfirstlane_b32 s20, v8
+; TONGA-NEXT: s_or_b64 s[0:1], s[18:19], s[20:21]
+; TONGA-NEXT: s_mov_b32 s0, 0
+; TONGA-NEXT: v_readfirstlane_b32 s15, v3
+; TONGA-NEXT: v_readfirstlane_b32 s14, v2
+; TONGA-NEXT: v_readfirstlane_b32 s7, v7
+; TONGA-NEXT: v_readfirstlane_b32 s6, v6
+; TONGA-NEXT: v_readfirstlane_b32 s11, v5
+; TONGA-NEXT: v_readfirstlane_b32 s10, v4
+; TONGA-NEXT: v_readfirstlane_b32 s17, v11
+; TONGA-NEXT: v_readfirstlane_b32 s16, v10
; TONGA-NEXT: s_waitcnt vmcnt(0)
-; TONGA-NEXT: v_or_b32_e32 v13, v5, v1
-; TONGA-NEXT: v_mov_b32_e32 v12, 0
-; TONGA-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[12:13]
-; TONGA-NEXT: s_cbranch_vccz .LBB12_15
-; TONGA-NEXT: ; %bb.7:
-; TONGA-NEXT: v_ashrrev_i32_e32 v12, 31, v1
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v0, v12
-; TONGA-NEXT: v_addc_u32_e32 v1, vcc, v1, v12, vcc
-; TONGA-NEXT: v_xor_b32_e32 v18, v13, v12
-; TONGA-NEXT: v_xor_b32_e32 v1, v1, v12
-; TONGA-NEXT: v_cvt_f32_u32_e32 v12, v18
-; TONGA-NEXT: v_cvt_f32_u32_e32 v13, v1
-; TONGA-NEXT: v_sub_u32_e32 v19, vcc, 0, v18
-; TONGA-NEXT: v_subb_u32_e32 v20, vcc, 0, v1, vcc
-; TONGA-NEXT: v_madmk_f32 v12, v13, 0x4f800000, v12
-; TONGA-NEXT: v_rcp_f32_e32 v12, v12
-; TONGA-NEXT: v_mul_f32_e32 v12, 0x5f7ffffc, v12
-; TONGA-NEXT: v_mul_f32_e32 v13, 0x2f800000, v12
-; TONGA-NEXT: v_trunc_f32_e32 v13, v13
-; TONGA-NEXT: v_madmk_f32 v12, v13, 0xcf800000, v12
-; TONGA-NEXT: v_cvt_u32_f32_e32 v16, v13
-; TONGA-NEXT: v_cvt_u32_f32_e32 v17, v12
-; TONGA-NEXT: v_mul_lo_u32 v14, v19, v16
-; TONGA-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v19, v17, 0
-; TONGA-NEXT: v_mul_lo_u32 v15, v20, v17
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v13, v14
-; TONGA-NEXT: v_add_u32_e32 v15, vcc, v13, v15
-; TONGA-NEXT: v_mad_u64_u32 v[13:14], s[0:1], v17, v15, 0
-; TONGA-NEXT: v_mul_hi_u32 v21, v17, v12
-; TONGA-NEXT: v_add_u32_e32 v21, vcc, v21, v13
-; TONGA-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v16, v12, 0
-; TONGA-NEXT: v_addc_u32_e32 v22, vcc, 0, v14, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[14:15], s[0:1], v16, v15, 0
-; TONGA-NEXT: v_add_u32_e32 v12, vcc, v21, v12
-; TONGA-NEXT: v_addc_u32_e32 v12, vcc, v22, v13, vcc
-; TONGA-NEXT: v_addc_u32_e32 v13, vcc, 0, v15, vcc
-; TONGA-NEXT: v_add_u32_e32 v12, vcc, v12, v14
-; TONGA-NEXT: v_addc_u32_e32 v13, vcc, 0, v13, vcc
-; TONGA-NEXT: v_add_u32_e32 v21, vcc, v17, v12
-; TONGA-NEXT: v_addc_u32_e32 v22, vcc, v16, v13, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v19, v21, 0
-; TONGA-NEXT: v_mul_lo_u32 v16, v19, v22
-; TONGA-NEXT: v_mul_lo_u32 v17, v20, v21
-; TONGA-NEXT: v_mul_hi_u32 v19, v21, v12
-; TONGA-NEXT: v_mad_u64_u32 v[14:15], s[0:1], v22, v12, 0
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v16, v13
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v13, v17
-; TONGA-NEXT: v_mad_u64_u32 v[16:17], s[0:1], v21, v13, 0
-; TONGA-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v22, v13, 0
-; TONGA-NEXT: v_add_u32_e32 v16, vcc, v19, v16
-; TONGA-NEXT: v_addc_u32_e32 v17, vcc, 0, v17, vcc
-; TONGA-NEXT: v_add_u32_e32 v14, vcc, v16, v14
-; TONGA-NEXT: v_addc_u32_e32 v14, vcc, v17, v15, vcc
-; TONGA-NEXT: v_addc_u32_e32 v13, vcc, 0, v13, vcc
-; TONGA-NEXT: v_add_u32_e32 v12, vcc, v14, v12
-; TONGA-NEXT: v_addc_u32_e32 v13, vcc, 0, v13, vcc
-; TONGA-NEXT: v_add_u32_e32 v14, vcc, v21, v12
-; TONGA-NEXT: v_addc_u32_e32 v15, vcc, v22, v13, vcc
-; TONGA-NEXT: v_ashrrev_i32_e32 v16, 31, v5
-; TONGA-NEXT: v_add_u32_e32 v12, vcc, v4, v16
-; TONGA-NEXT: v_xor_b32_e32 v17, v12, v16
-; TONGA-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v17, v15, 0
-; TONGA-NEXT: v_mul_hi_u32 v19, v17, v14
-; TONGA-NEXT: v_addc_u32_e32 v5, vcc, v5, v16, vcc
-; TONGA-NEXT: v_xor_b32_e32 v5, v5, v16
-; TONGA-NEXT: v_add_u32_e32 v19, vcc, v19, v12
-; TONGA-NEXT: v_addc_u32_e32 v20, vcc, 0, v13, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v5, v14, 0
-; TONGA-NEXT: v_mad_u64_u32 v[14:15], s[0:1], v5, v15, 0
-; TONGA-NEXT: v_add_u32_e32 v12, vcc, v19, v12
-; TONGA-NEXT: v_addc_u32_e32 v12, vcc, v20, v13, vcc
-; TONGA-NEXT: v_addc_u32_e32 v13, vcc, 0, v15, vcc
-; TONGA-NEXT: v_add_u32_e32 v14, vcc, v12, v14
-; TONGA-NEXT: v_addc_u32_e32 v12, vcc, 0, v13, vcc
-; TONGA-NEXT: v_mul_lo_u32 v15, v18, v12
-; TONGA-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v18, v14, 0
-; TONGA-NEXT: v_mul_lo_u32 v14, v1, v14
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v15, v13
-; TONGA-NEXT: v_add_u32_e32 v13, vcc, v14, v13
-; TONGA-NEXT: v_sub_u32_e32 v14, vcc, v5, v13
-; TONGA-NEXT: v_sub_u32_e32 v12, vcc, v17, v12
-; TONGA-NEXT: v_subb_u32_e64 v14, s[0:1], v14, v1, vcc
-; TONGA-NEXT: v_sub_u32_e64 v15, s[0:1], v12, v18
-; TONGA-NEXT: v_subbrev_u32_e64 v17, s[2:3], 0, v14, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v17, v1
-; TONGA-NEXT: v_cndmask_b32_e64 v19, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v15, v18
-; TONGA-NEXT: v_cndmask_b32_e64 v20, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], v17, v1
-; TONGA-NEXT: v_subb_u32_e64 v14, s[0:1], v14, v1, s[0:1]
-; TONGA-NEXT: v_cndmask_b32_e64 v19, v19, v20, s[2:3]
-; TONGA-NEXT: v_sub_u32_e64 v20, s[0:1], v15, v18
-; TONGA-NEXT: v_subb_u32_e32 v5, vcc, v5, v13, vcc
-; TONGA-NEXT: v_subbrev_u32_e64 v14, s[0:1], 0, v14, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v5, v1
-; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v19
-; TONGA-NEXT: v_cndmask_b32_e64 v13, 0, -1, vcc
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v12, v18
-; TONGA-NEXT: v_cndmask_b32_e64 v14, v17, v14, s[0:1]
-; TONGA-NEXT: v_cndmask_b32_e64 v17, 0, -1, vcc
-; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, v5, v1
-; TONGA-NEXT: v_cndmask_b32_e32 v1, v13, v17, vcc
-; TONGA-NEXT: v_cndmask_b32_e64 v15, v15, v20, s[0:1]
-; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v1
-; TONGA-NEXT: v_cndmask_b32_e32 v1, v5, v14, vcc
-; TONGA-NEXT: v_cndmask_b32_e32 v5, v12, v15, vcc
-; TONGA-NEXT: v_xor_b32_e32 v5, v5, v16
-; TONGA-NEXT: v_xor_b32_e32 v1, v1, v16
-; TONGA-NEXT: v_sub_u32_e32 v12, vcc, v5, v16
-; TONGA-NEXT: v_subb_u32_e32 v13, vcc, v1, v16, vcc
-; TONGA-NEXT: s_cbranch_execnz .LBB12_9
-; TONGA-NEXT: .LBB12_8:
-; TONGA-NEXT: v_cvt_f32_u32_e32 v1, v0
-; TONGA-NEXT: v_sub_u32_e32 v5, vcc, 0, v0
-; TONGA-NEXT: v_mov_b32_e32 v13, 0
-; TONGA-NEXT: v_rcp_iflag_f32_e32 v1, v1
-; TONGA-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
-; TONGA-NEXT: v_cvt_u32_f32_e32 v1, v1
-; TONGA-NEXT: v_mul_lo_u32 v5, v5, v1
-; TONGA-NEXT: v_mul_hi_u32 v5, v1, v5
-; TONGA-NEXT: v_add_u32_e32 v1, vcc, v1, v5
-; TONGA-NEXT: v_mul_hi_u32 v1, v4, v1
-; TONGA-NEXT: v_mul_lo_u32 v1, v1, v0
-; TONGA-NEXT: v_sub_u32_e32 v1, vcc, v4, v1
-; TONGA-NEXT: v_subrev_u32_e32 v4, vcc, v0, v1
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v1, v0
-; TONGA-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
-; TONGA-NEXT: v_subrev_u32_e32 v4, vcc, v0, v1
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v1, v0
-; TONGA-NEXT: v_cndmask_b32_e32 v12, v1, v4, vcc
-; TONGA-NEXT: .LBB12_9:
-; TONGA-NEXT: v_or_b32_e32 v1, v7, v3
-; TONGA-NEXT: v_mov_b32_e32 v0, 0
-; TONGA-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1]
-; TONGA-NEXT: s_cbranch_vccz .LBB12_16
-; TONGA-NEXT: ; %bb.10:
-; TONGA-NEXT: v_ashrrev_i32_e32 v0, 31, v3
-; TONGA-NEXT: v_add_u32_e32 v1, vcc, v2, v0
-; TONGA-NEXT: v_addc_u32_e32 v3, vcc, v3, v0, vcc
-; TONGA-NEXT: v_xor_b32_e32 v5, v1, v0
-; TONGA-NEXT: v_xor_b32_e32 v16, v3, v0
-; TONGA-NEXT: v_cvt_f32_u32_e32 v0, v5
-; TONGA-NEXT: v_cvt_f32_u32_e32 v1, v16
-; TONGA-NEXT: v_sub_u32_e32 v17, vcc, 0, v5
-; TONGA-NEXT: v_subb_u32_e32 v18, vcc, 0, v16, vcc
+; TONGA-NEXT: v_readfirstlane_b32 s9, v15
+; TONGA-NEXT: v_readfirstlane_b32 s8, v14
+; TONGA-NEXT: v_readfirstlane_b32 s13, v13
+; TONGA-NEXT: s_cmp_lg_u64 s[0:1], 0
+; TONGA-NEXT: v_readfirstlane_b32 s12, v12
+; TONGA-NEXT: s_cbranch_scc0 .LBB12_13
+; TONGA-NEXT: ; %bb.1:
+; TONGA-NEXT: s_ashr_i32 s0, s21, 31
+; TONGA-NEXT: s_add_u32 s2, s20, s0
+; TONGA-NEXT: s_mov_b32 s1, s0
+; TONGA-NEXT: s_addc_u32 s3, s21, s0
+; TONGA-NEXT: s_xor_b64 s[22:23], s[2:3], s[0:1]
+; TONGA-NEXT: v_cvt_f32_u32_e32 v0, s22
+; TONGA-NEXT: v_cvt_f32_u32_e32 v1, s23
+; TONGA-NEXT: s_sub_u32 s2, 0, s22
+; TONGA-NEXT: s_subb_u32 s3, 0, s23
+; TONGA-NEXT: s_ashr_i32 s26, s19, 31
; TONGA-NEXT: v_madmk_f32 v0, v1, 0x4f800000, v0
; TONGA-NEXT: v_rcp_f32_e32 v0, v0
+; TONGA-NEXT: s_mov_b32 s27, s26
; TONGA-NEXT: v_mul_f32_e32 v0, 0x5f7ffffc, v0
; TONGA-NEXT: v_mul_f32_e32 v1, 0x2f800000, v0
; TONGA-NEXT: v_trunc_f32_e32 v1, v1
; TONGA-NEXT: v_madmk_f32 v0, v1, 0xcf800000, v0
-; TONGA-NEXT: v_cvt_u32_f32_e32 v14, v1
-; TONGA-NEXT: v_cvt_u32_f32_e32 v15, v0
-; TONGA-NEXT: v_mul_lo_u32 v3, v17, v14
-; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v17, v15, 0
-; TONGA-NEXT: v_mul_lo_u32 v4, v18, v15
-; TONGA-NEXT: v_add_u32_e32 v1, vcc, v1, v3
-; TONGA-NEXT: v_add_u32_e32 v19, vcc, v1, v4
-; TONGA-NEXT: v_mad_u64_u32 v[3:4], s[0:1], v15, v19, 0
-; TONGA-NEXT: v_mul_hi_u32 v1, v15, v0
-; TONGA-NEXT: v_add_u32_e32 v20, vcc, v1, v3
-; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v14, v0, 0
-; TONGA-NEXT: v_addc_u32_e32 v21, vcc, 0, v4, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[3:4], s[0:1], v14, v19, 0
-; TONGA-NEXT: v_add_u32_e32 v0, vcc, v20, v0
-; TONGA-NEXT: v_addc_u32_e32 v0, vcc, v21, v1, vcc
-; TONGA-NEXT: v_addc_u32_e32 v1, vcc, 0, v4, vcc
-; TONGA-NEXT: v_add_u32_e32 v0, vcc, v0, v3
+; TONGA-NEXT: v_cvt_u32_f32_e32 v4, v1
+; TONGA-NEXT: v_cvt_u32_f32_e32 v5, v0
+; TONGA-NEXT: v_mul_lo_u32 v2, s2, v4
+; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], s2, v5, 0
+; TONGA-NEXT: v_mul_lo_u32 v3, s3, v5
+; TONGA-NEXT: v_add_u32_e32 v1, vcc, v1, v2
+; TONGA-NEXT: v_add_u32_e32 v3, vcc, v1, v3
+; TONGA-NEXT: v_mul_hi_u32 v6, v5, v0
+; TONGA-NEXT: v_mad_u64_u32 v[1:2], s[0:1], v5, v3, 0
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v6, v1
+; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v4, v0, 0
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v2, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], v4, v3, 0
+; TONGA-NEXT: v_add_u32_e32 v0, vcc, v6, v0
+; TONGA-NEXT: v_addc_u32_e32 v0, vcc, v7, v1, vcc
+; TONGA-NEXT: v_addc_u32_e32 v1, vcc, 0, v3, vcc
+; TONGA-NEXT: v_add_u32_e32 v0, vcc, v0, v2
; TONGA-NEXT: v_addc_u32_e32 v1, vcc, 0, v1, vcc
-; TONGA-NEXT: v_add_u32_e32 v19, vcc, v15, v0
-; TONGA-NEXT: v_addc_u32_e32 v20, vcc, v14, v1, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v17, v19, 0
-; TONGA-NEXT: v_mul_lo_u32 v14, v17, v20
-; TONGA-NEXT: v_mul_lo_u32 v15, v18, v19
-; TONGA-NEXT: v_mul_hi_u32 v17, v19, v0
-; TONGA-NEXT: v_mad_u64_u32 v[3:4], s[0:1], v20, v0, 0
-; TONGA-NEXT: v_add_u32_e32 v1, vcc, v14, v1
-; TONGA-NEXT: v_add_u32_e32 v1, vcc, v1, v15
-; TONGA-NEXT: v_mad_u64_u32 v[14:15], s[0:1], v19, v1, 0
-; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v20, v1, 0
-; TONGA-NEXT: v_add_u32_e32 v14, vcc, v17, v14
-; TONGA-NEXT: v_addc_u32_e32 v15, vcc, 0, v15, vcc
-; TONGA-NEXT: v_add_u32_e32 v3, vcc, v14, v3
-; TONGA-NEXT: v_addc_u32_e32 v3, vcc, v15, v4, vcc
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v5, v0
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, v4, v1, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], s2, v6, 0
+; TONGA-NEXT: v_mul_lo_u32 v4, s2, v7
+; TONGA-NEXT: v_mul_lo_u32 v5, s3, v6
+; TONGA-NEXT: v_mul_hi_u32 v8, v6, v0
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], v7, v0, 0
+; TONGA-NEXT: v_add_u32_e32 v1, vcc, v4, v1
+; TONGA-NEXT: v_add_u32_e32 v1, vcc, v1, v5
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], v6, v1, 0
+; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v7, v1, 0
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v8, v4
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, 0, v5, vcc
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v4, v2
+; TONGA-NEXT: v_addc_u32_e32 v2, vcc, v5, v3, vcc
; TONGA-NEXT: v_addc_u32_e32 v1, vcc, 0, v1, vcc
-; TONGA-NEXT: v_add_u32_e32 v0, vcc, v3, v0
+; TONGA-NEXT: v_add_u32_e32 v0, vcc, v2, v0
; TONGA-NEXT: v_addc_u32_e32 v1, vcc, 0, v1, vcc
-; TONGA-NEXT: v_add_u32_e32 v3, vcc, v19, v0
-; TONGA-NEXT: v_addc_u32_e32 v4, vcc, v20, v1, vcc
-; TONGA-NEXT: v_ashrrev_i32_e32 v15, 31, v7
-; TONGA-NEXT: v_add_u32_e32 v0, vcc, v6, v15
-; TONGA-NEXT: v_xor_b32_e32 v14, v0, v15
-; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v14, v4, 0
-; TONGA-NEXT: v_mul_hi_u32 v17, v14, v3
-; TONGA-NEXT: v_addc_u32_e32 v7, vcc, v7, v15, vcc
-; TONGA-NEXT: v_xor_b32_e32 v7, v7, v15
-; TONGA-NEXT: v_add_u32_e32 v17, vcc, v17, v0
-; TONGA-NEXT: v_addc_u32_e32 v18, vcc, 0, v1, vcc
-; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v7, v3, 0
-; TONGA-NEXT: v_mad_u64_u32 v[3:4], s[0:1], v7, v4, 0
-; TONGA-NEXT: v_add_u32_e32 v0, vcc, v17, v0
-; TONGA-NEXT: v_addc_u32_e32 v0, vcc, v18, v1, vcc
-; TONGA-NEXT: v_addc_u32_e32 v1, vcc, 0, v4, vcc
-; TONGA-NEXT: v_add_u32_e32 v3, vcc, v0, v3
+; TONGA-NEXT: s_add_u32 s0, s18, s26
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v6, v0
+; TONGA-NEXT: s_addc_u32 s1, s19, s26
+; TONGA-NEXT: v_addc_u32_e32 v3, vcc, v7, v1, vcc
+; TONGA-NEXT: s_xor_b64 s[28:29], s[0:1], s[26:27]
+; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], s28, v3, 0
+; TONGA-NEXT: v_mul_hi_u32 v4, s28, v2
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v4, v0
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, 0, v1, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], s29, v2, 0
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], s29, v3, 0
+; TONGA-NEXT: v_add_u32_e32 v0, vcc, v4, v0
+; TONGA-NEXT: v_addc_u32_e32 v0, vcc, v5, v1, vcc
+; TONGA-NEXT: v_addc_u32_e32 v1, vcc, 0, v3, vcc
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v0, v2
; TONGA-NEXT: v_addc_u32_e32 v0, vcc, 0, v1, vcc
-; TONGA-NEXT: v_mul_lo_u32 v4, v5, v0
-; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v5, v3, 0
-; TONGA-NEXT: v_mul_lo_u32 v3, v16, v3
-; TONGA-NEXT: v_add_u32_e32 v1, vcc, v4, v1
+; TONGA-NEXT: v_mul_lo_u32 v3, s22, v0
+; TONGA-NEXT: v_mad_u64_u32 v[0:1], s[0:1], s22, v2, 0
+; TONGA-NEXT: v_mul_lo_u32 v2, s23, v2
; TONGA-NEXT: v_add_u32_e32 v1, vcc, v3, v1
-; TONGA-NEXT: v_sub_u32_e32 v3, vcc, v7, v1
-; TONGA-NEXT: v_sub_u32_e32 v0, vcc, v14, v0
-; TONGA-NEXT: v_subb_u32_e64 v3, s[0:1], v3, v16, vcc
-; TONGA-NEXT: v_sub_u32_e64 v4, s[0:1], v0, v5
-; TONGA-NEXT: v_subbrev_u32_e64 v14, s[2:3], 0, v3, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v14, v16
-; TONGA-NEXT: v_cndmask_b32_e64 v17, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_ge_u32_e64 s[2:3], v4, v5
-; TONGA-NEXT: v_subb_u32_e32 v1, vcc, v7, v1, vcc
-; TONGA-NEXT: v_cndmask_b32_e64 v18, 0, -1, s[2:3]
-; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], v14, v16
-; TONGA-NEXT: v_subb_u32_e64 v3, s[0:1], v3, v16, s[0:1]
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v1, v16
-; TONGA-NEXT: v_cndmask_b32_e64 v17, v17, v18, s[2:3]
-; TONGA-NEXT: v_sub_u32_e64 v18, s[0:1], v4, v5
-; TONGA-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v0, v5
-; TONGA-NEXT: v_subbrev_u32_e64 v3, s[0:1], 0, v3, s[0:1]
+; TONGA-NEXT: v_add_u32_e32 v1, vcc, v2, v1
+; TONGA-NEXT: v_sub_u32_e32 v2, vcc, s29, v1
+; TONGA-NEXT: v_mov_b32_e32 v3, s23
+; TONGA-NEXT: v_sub_u32_e32 v0, vcc, s28, v0
+; TONGA-NEXT: v_subb_u32_e64 v2, s[0:1], v2, v3, vcc
+; TONGA-NEXT: v_subrev_u32_e64 v4, s[0:1], s22, v0
+; TONGA-NEXT: v_subbrev_u32_e64 v5, s[2:3], 0, v2, s[0:1]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s23, v5
+; TONGA-NEXT: v_cndmask_b32_e64 v6, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s22, v4
+; TONGA-NEXT: v_subb_u32_e64 v2, s[0:1], v2, v3, s[0:1]
+; TONGA-NEXT: v_cndmask_b32_e64 v7, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], s23, v5
+; TONGA-NEXT: v_subrev_u32_e64 v3, s[0:1], s22, v4
+; TONGA-NEXT: v_cndmask_b32_e64 v6, v6, v7, s[2:3]
+; TONGA-NEXT: v_subbrev_u32_e64 v2, s[0:1], 0, v2, s[0:1]
+; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v6
+; TONGA-NEXT: v_cndmask_b32_e64 v3, v4, v3, s[0:1]
+; TONGA-NEXT: v_mov_b32_e32 v4, s29
+; TONGA-NEXT: v_subb_u32_e32 v1, vcc, v4, v1, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s23, v1
+; TONGA-NEXT: v_cndmask_b32_e64 v4, 0, -1, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s22, v0
+; TONGA-NEXT: v_cndmask_b32_e64 v2, v5, v2, s[0:1]
; TONGA-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
-; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, v1, v16
-; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v17
-; TONGA-NEXT: v_cndmask_b32_e32 v5, v7, v5, vcc
-; TONGA-NEXT: v_cndmask_b32_e64 v4, v4, v18, s[0:1]
-; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v5
-; TONGA-NEXT: v_cndmask_b32_e64 v3, v14, v3, s[0:1]
-; TONGA-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc
-; TONGA-NEXT: v_cndmask_b32_e32 v1, v1, v3, vcc
-; TONGA-NEXT: v_xor_b32_e32 v0, v0, v15
-; TONGA-NEXT: v_xor_b32_e32 v1, v1, v15
-; TONGA-NEXT: v_sub_u32_e32 v14, vcc, v0, v15
-; TONGA-NEXT: v_subb_u32_e32 v15, vcc, v1, v15, vcc
-; TONGA-NEXT: s_cbranch_execnz .LBB12_12
-; TONGA-NEXT: .LBB12_11:
-; TONGA-NEXT: v_cvt_f32_u32_e32 v0, v2
-; TONGA-NEXT: v_sub_u32_e32 v1, vcc, 0, v2
-; TONGA-NEXT: v_mov_b32_e32 v15, 0
+; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, s23, v1
+; TONGA-NEXT: v_cndmask_b32_e32 v4, v4, v5, vcc
+; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v4
+; TONGA-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
+; TONGA-NEXT: v_cndmask_b32_e32 v1, v1, v2, vcc
+; TONGA-NEXT: v_xor_b32_e32 v0, s26, v0
+; TONGA-NEXT: v_xor_b32_e32 v1, s26, v1
+; TONGA-NEXT: v_mov_b32_e32 v2, s26
+; TONGA-NEXT: v_subrev_u32_e32 v0, vcc, s26, v0
+; TONGA-NEXT: v_subb_u32_e32 v1, vcc, v1, v2, vcc
+; TONGA-NEXT: s_cbranch_execnz .LBB12_3
+; TONGA-NEXT: .LBB12_2:
+; TONGA-NEXT: v_cvt_f32_u32_e32 v0, s20
+; TONGA-NEXT: s_sub_i32 s0, 0, s20
; TONGA-NEXT: v_rcp_iflag_f32_e32 v0, v0
; TONGA-NEXT: v_mul_f32_e32 v0, 0x4f7ffffe, v0
; TONGA-NEXT: v_cvt_u32_f32_e32 v0, v0
-; TONGA-NEXT: v_mul_lo_u32 v1, v1, v0
+; TONGA-NEXT: v_mul_lo_u32 v1, s0, v0
; TONGA-NEXT: v_mul_hi_u32 v1, v0, v1
; TONGA-NEXT: v_add_u32_e32 v0, vcc, v0, v1
-; TONGA-NEXT: v_mul_hi_u32 v0, v6, v0
-; TONGA-NEXT: v_mul_lo_u32 v0, v0, v2
-; TONGA-NEXT: v_sub_u32_e32 v0, vcc, v6, v0
-; TONGA-NEXT: v_subrev_u32_e32 v1, vcc, v2, v0
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
+; TONGA-NEXT: v_mul_hi_u32 v0, s18, v0
+; TONGA-NEXT: v_mul_lo_u32 v0, v0, s20
+; TONGA-NEXT: v_sub_u32_e32 v0, vcc, s18, v0
+; TONGA-NEXT: v_subrev_u32_e32 v1, vcc, s20, v0
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s20, v0
; TONGA-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
-; TONGA-NEXT: v_subrev_u32_e32 v1, vcc, v2, v0
-; TONGA-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
-; TONGA-NEXT: v_cndmask_b32_e32 v14, v0, v1, vcc
+; TONGA-NEXT: v_subrev_u32_e32 v1, vcc, s20, v0
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s20, v0
+; TONGA-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
+; TONGA-NEXT: v_mov_b32_e32 v1, 0
+; TONGA-NEXT: .LBB12_3:
+; TONGA-NEXT: s_or_b64 s[0:1], s[14:15], s[16:17]
+; TONGA-NEXT: s_mov_b32 s0, 0
+; TONGA-NEXT: s_cmp_lg_u64 s[0:1], 0
+; TONGA-NEXT: s_cbranch_scc0 .LBB12_14
+; TONGA-NEXT: ; %bb.4:
+; TONGA-NEXT: s_ashr_i32 s0, s17, 31
+; TONGA-NEXT: s_add_u32 s2, s16, s0
+; TONGA-NEXT: s_mov_b32 s1, s0
+; TONGA-NEXT: s_addc_u32 s3, s17, s0
+; TONGA-NEXT: s_xor_b64 s[18:19], s[2:3], s[0:1]
+; TONGA-NEXT: v_cvt_f32_u32_e32 v2, s18
+; TONGA-NEXT: v_cvt_f32_u32_e32 v3, s19
+; TONGA-NEXT: s_sub_u32 s2, 0, s18
+; TONGA-NEXT: s_subb_u32 s3, 0, s19
+; TONGA-NEXT: s_ashr_i32 s22, s15, 31
+; TONGA-NEXT: v_madmk_f32 v2, v3, 0x4f800000, v2
+; TONGA-NEXT: v_rcp_f32_e32 v2, v2
+; TONGA-NEXT: s_mov_b32 s23, s22
+; TONGA-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
+; TONGA-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
+; TONGA-NEXT: v_trunc_f32_e32 v3, v3
+; TONGA-NEXT: v_madmk_f32 v2, v3, 0xcf800000, v2
+; TONGA-NEXT: v_cvt_u32_f32_e32 v6, v3
+; TONGA-NEXT: v_cvt_u32_f32_e32 v7, v2
+; TONGA-NEXT: v_mul_lo_u32 v4, s2, v6
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], s2, v7, 0
+; TONGA-NEXT: v_mul_lo_u32 v5, s3, v7
+; TONGA-NEXT: v_add_u32_e32 v3, vcc, v3, v4
+; TONGA-NEXT: v_add_u32_e32 v5, vcc, v3, v5
+; TONGA-NEXT: v_mul_hi_u32 v8, v7, v2
+; TONGA-NEXT: v_mad_u64_u32 v[3:4], s[0:1], v7, v5, 0
+; TONGA-NEXT: v_add_u32_e32 v8, vcc, v8, v3
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], v6, v2, 0
+; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v4, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], v6, v5, 0
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v8, v2
+; TONGA-NEXT: v_addc_u32_e32 v2, vcc, v9, v3, vcc
+; TONGA-NEXT: v_addc_u32_e32 v3, vcc, 0, v5, vcc
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v2, v4
+; TONGA-NEXT: v_addc_u32_e32 v3, vcc, 0, v3, vcc
+; TONGA-NEXT: v_add_u32_e32 v8, vcc, v7, v2
+; TONGA-NEXT: v_addc_u32_e32 v9, vcc, v6, v3, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], s2, v8, 0
+; TONGA-NEXT: v_mul_lo_u32 v6, s2, v9
+; TONGA-NEXT: v_mul_lo_u32 v7, s3, v8
+; TONGA-NEXT: v_mul_hi_u32 v10, v8, v2
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], v9, v2, 0
+; TONGA-NEXT: v_add_u32_e32 v3, vcc, v6, v3
+; TONGA-NEXT: v_add_u32_e32 v3, vcc, v3, v7
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], v8, v3, 0
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], v9, v3, 0
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v10, v6
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v6, v4
+; TONGA-NEXT: v_addc_u32_e32 v4, vcc, v7, v5, vcc
+; TONGA-NEXT: v_addc_u32_e32 v3, vcc, 0, v3, vcc
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v4, v2
+; TONGA-NEXT: v_addc_u32_e32 v3, vcc, 0, v3, vcc
+; TONGA-NEXT: s_add_u32 s0, s14, s22
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v8, v2
+; TONGA-NEXT: s_addc_u32 s1, s15, s22
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, v9, v3, vcc
+; TONGA-NEXT: s_xor_b64 s[24:25], s[0:1], s[22:23]
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], s24, v5, 0
+; TONGA-NEXT: v_mul_hi_u32 v6, s24, v4
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v6, v2
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v3, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], s25, v4, 0
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], s25, v5, 0
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v6, v2
+; TONGA-NEXT: v_addc_u32_e32 v2, vcc, v7, v3, vcc
+; TONGA-NEXT: v_addc_u32_e32 v3, vcc, 0, v5, vcc
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v2, v4
+; TONGA-NEXT: v_addc_u32_e32 v2, vcc, 0, v3, vcc
+; TONGA-NEXT: v_mul_lo_u32 v5, s18, v2
+; TONGA-NEXT: v_mad_u64_u32 v[2:3], s[0:1], s18, v4, 0
+; TONGA-NEXT: v_mul_lo_u32 v4, s19, v4
+; TONGA-NEXT: v_add_u32_e32 v3, vcc, v5, v3
+; TONGA-NEXT: v_add_u32_e32 v3, vcc, v4, v3
+; TONGA-NEXT: v_sub_u32_e32 v4, vcc, s25, v3
+; TONGA-NEXT: v_mov_b32_e32 v5, s19
+; TONGA-NEXT: v_sub_u32_e32 v2, vcc, s24, v2
+; TONGA-NEXT: v_subb_u32_e64 v4, s[0:1], v4, v5, vcc
+; TONGA-NEXT: v_subrev_u32_e64 v6, s[0:1], s18, v2
+; TONGA-NEXT: v_subbrev_u32_e64 v7, s[2:3], 0, v4, s[0:1]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s19, v7
+; TONGA-NEXT: v_cndmask_b32_e64 v8, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s18, v6
+; TONGA-NEXT: v_subb_u32_e64 v4, s[0:1], v4, v5, s[0:1]
+; TONGA-NEXT: v_cndmask_b32_e64 v9, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], s19, v7
+; TONGA-NEXT: v_subrev_u32_e64 v5, s[0:1], s18, v6
+; TONGA-NEXT: v_cndmask_b32_e64 v8, v8, v9, s[2:3]
+; TONGA-NEXT: v_subbrev_u32_e64 v4, s[0:1], 0, v4, s[0:1]
+; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v8
+; TONGA-NEXT: v_cndmask_b32_e64 v5, v6, v5, s[0:1]
+; TONGA-NEXT: v_mov_b32_e32 v6, s25
+; TONGA-NEXT: v_subb_u32_e32 v3, vcc, v6, v3, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s19, v3
+; TONGA-NEXT: v_cndmask_b32_e64 v6, 0, -1, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s18, v2
+; TONGA-NEXT: v_cndmask_b32_e64 v4, v7, v4, s[0:1]
+; TONGA-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
+; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, s19, v3
+; TONGA-NEXT: v_cndmask_b32_e32 v6, v6, v7, vcc
+; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v6
+; TONGA-NEXT: v_cndmask_b32_e32 v2, v2, v5, vcc
+; TONGA-NEXT: v_cndmask_b32_e32 v3, v3, v4, vcc
+; TONGA-NEXT: v_xor_b32_e32 v2, s22, v2
+; TONGA-NEXT: v_xor_b32_e32 v3, s22, v3
+; TONGA-NEXT: v_mov_b32_e32 v4, s22
+; TONGA-NEXT: v_subrev_u32_e32 v2, vcc, s22, v2
+; TONGA-NEXT: v_subb_u32_e32 v3, vcc, v3, v4, vcc
+; TONGA-NEXT: s_cbranch_execnz .LBB12_6
+; TONGA-NEXT: .LBB12_5:
+; TONGA-NEXT: v_cvt_f32_u32_e32 v2, s16
+; TONGA-NEXT: s_sub_i32 s0, 0, s16
+; TONGA-NEXT: v_rcp_iflag_f32_e32 v2, v2
+; TONGA-NEXT: v_mul_f32_e32 v2, 0x4f7ffffe, v2
+; TONGA-NEXT: v_cvt_u32_f32_e32 v2, v2
+; TONGA-NEXT: v_mul_lo_u32 v3, s0, v2
+; TONGA-NEXT: v_mul_hi_u32 v3, v2, v3
+; TONGA-NEXT: v_add_u32_e32 v2, vcc, v2, v3
+; TONGA-NEXT: v_mul_hi_u32 v2, s14, v2
+; TONGA-NEXT: v_mul_lo_u32 v2, v2, s16
+; TONGA-NEXT: v_sub_u32_e32 v2, vcc, s14, v2
+; TONGA-NEXT: v_subrev_u32_e32 v3, vcc, s16, v2
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s16, v2
+; TONGA-NEXT: v_cndmask_b32_e32 v2, v2, v3, vcc
+; TONGA-NEXT: v_subrev_u32_e32 v3, vcc, s16, v2
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s16, v2
+; TONGA-NEXT: v_cndmask_b32_e32 v2, v2, v3, vcc
+; TONGA-NEXT: v_mov_b32_e32 v3, 0
+; TONGA-NEXT: .LBB12_6:
+; TONGA-NEXT: s_or_b64 s[0:1], s[12:13], s[10:11]
+; TONGA-NEXT: s_mov_b32 s0, 0
+; TONGA-NEXT: s_cmp_lg_u64 s[0:1], 0
+; TONGA-NEXT: s_cbranch_scc0 .LBB12_15
+; TONGA-NEXT: ; %bb.7:
+; TONGA-NEXT: s_ashr_i32 s0, s11, 31
+; TONGA-NEXT: s_add_u32 s2, s10, s0
+; TONGA-NEXT: s_mov_b32 s1, s0
+; TONGA-NEXT: s_addc_u32 s3, s11, s0
+; TONGA-NEXT: s_xor_b64 s[14:15], s[2:3], s[0:1]
+; TONGA-NEXT: v_cvt_f32_u32_e32 v4, s14
+; TONGA-NEXT: v_cvt_f32_u32_e32 v5, s15
+; TONGA-NEXT: s_sub_u32 s2, 0, s14
+; TONGA-NEXT: s_subb_u32 s3, 0, s15
+; TONGA-NEXT: s_ashr_i32 s18, s13, 31
+; TONGA-NEXT: v_madmk_f32 v4, v5, 0x4f800000, v4
+; TONGA-NEXT: v_rcp_f32_e32 v4, v4
+; TONGA-NEXT: s_mov_b32 s19, s18
+; TONGA-NEXT: v_mul_f32_e32 v4, 0x5f7ffffc, v4
+; TONGA-NEXT: v_mul_f32_e32 v5, 0x2f800000, v4
+; TONGA-NEXT: v_trunc_f32_e32 v5, v5
+; TONGA-NEXT: v_madmk_f32 v4, v5, 0xcf800000, v4
+; TONGA-NEXT: v_cvt_u32_f32_e32 v8, v5
+; TONGA-NEXT: v_cvt_u32_f32_e32 v9, v4
+; TONGA-NEXT: v_mul_lo_u32 v6, s2, v8
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], s2, v9, 0
+; TONGA-NEXT: v_mul_lo_u32 v7, s3, v9
+; TONGA-NEXT: v_add_u32_e32 v5, vcc, v5, v6
+; TONGA-NEXT: v_add_u32_e32 v7, vcc, v5, v7
+; TONGA-NEXT: v_mul_hi_u32 v10, v9, v4
+; TONGA-NEXT: v_mad_u64_u32 v[5:6], s[0:1], v9, v7, 0
+; TONGA-NEXT: v_add_u32_e32 v10, vcc, v10, v5
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], v8, v4, 0
+; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v6, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], v8, v7, 0
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v10, v4
+; TONGA-NEXT: v_addc_u32_e32 v4, vcc, v11, v5, vcc
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, 0, v7, vcc
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v4, v6
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, 0, v5, vcc
+; TONGA-NEXT: v_add_u32_e32 v10, vcc, v9, v4
+; TONGA-NEXT: v_addc_u32_e32 v11, vcc, v8, v5, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], s2, v10, 0
+; TONGA-NEXT: v_mul_lo_u32 v8, s2, v11
+; TONGA-NEXT: v_mul_lo_u32 v9, s3, v10
+; TONGA-NEXT: v_mul_hi_u32 v12, v10, v4
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], v11, v4, 0
+; TONGA-NEXT: v_add_u32_e32 v5, vcc, v8, v5
+; TONGA-NEXT: v_add_u32_e32 v5, vcc, v5, v9
+; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v10, v5, 0
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], v11, v5, 0
+; TONGA-NEXT: v_add_u32_e32 v8, vcc, v12, v8
+; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v9, vcc
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v8, v6
+; TONGA-NEXT: v_addc_u32_e32 v6, vcc, v9, v7, vcc
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, 0, v5, vcc
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v6, v4
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, 0, v5, vcc
+; TONGA-NEXT: s_add_u32 s0, s12, s18
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v10, v4
+; TONGA-NEXT: s_addc_u32 s1, s13, s18
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, v11, v5, vcc
+; TONGA-NEXT: s_xor_b64 s[20:21], s[0:1], s[18:19]
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], s20, v7, 0
+; TONGA-NEXT: v_mul_hi_u32 v8, s20, v6
+; TONGA-NEXT: v_add_u32_e32 v8, vcc, v8, v4
+; TONGA-NEXT: v_addc_u32_e32 v9, vcc, 0, v5, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], s21, v6, 0
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], s21, v7, 0
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v8, v4
+; TONGA-NEXT: v_addc_u32_e32 v4, vcc, v9, v5, vcc
+; TONGA-NEXT: v_addc_u32_e32 v5, vcc, 0, v7, vcc
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v4, v6
+; TONGA-NEXT: v_addc_u32_e32 v4, vcc, 0, v5, vcc
+; TONGA-NEXT: v_mul_lo_u32 v7, s14, v4
+; TONGA-NEXT: v_mad_u64_u32 v[4:5], s[0:1], s14, v6, 0
+; TONGA-NEXT: v_mul_lo_u32 v6, s15, v6
+; TONGA-NEXT: v_add_u32_e32 v5, vcc, v7, v5
+; TONGA-NEXT: v_add_u32_e32 v5, vcc, v6, v5
+; TONGA-NEXT: v_sub_u32_e32 v6, vcc, s21, v5
+; TONGA-NEXT: v_mov_b32_e32 v7, s15
+; TONGA-NEXT: v_sub_u32_e32 v4, vcc, s20, v4
+; TONGA-NEXT: v_subb_u32_e64 v6, s[0:1], v6, v7, vcc
+; TONGA-NEXT: v_subrev_u32_e64 v8, s[0:1], s14, v4
+; TONGA-NEXT: v_subbrev_u32_e64 v9, s[2:3], 0, v6, s[0:1]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s15, v9
+; TONGA-NEXT: v_cndmask_b32_e64 v10, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s14, v8
+; TONGA-NEXT: v_subb_u32_e64 v6, s[0:1], v6, v7, s[0:1]
+; TONGA-NEXT: v_cndmask_b32_e64 v11, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], s15, v9
+; TONGA-NEXT: v_subrev_u32_e64 v7, s[0:1], s14, v8
+; TONGA-NEXT: v_cndmask_b32_e64 v10, v10, v11, s[2:3]
+; TONGA-NEXT: v_subbrev_u32_e64 v6, s[0:1], 0, v6, s[0:1]
+; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v10
+; TONGA-NEXT: v_cndmask_b32_e64 v7, v8, v7, s[0:1]
+; TONGA-NEXT: v_mov_b32_e32 v8, s21
+; TONGA-NEXT: v_subb_u32_e32 v5, vcc, v8, v5, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s15, v5
+; TONGA-NEXT: v_cndmask_b32_e64 v8, 0, -1, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s14, v4
+; TONGA-NEXT: v_cndmask_b32_e64 v6, v9, v6, s[0:1]
+; TONGA-NEXT: v_cndmask_b32_e64 v9, 0, -1, vcc
+; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, s15, v5
+; TONGA-NEXT: v_cndmask_b32_e32 v8, v8, v9, vcc
+; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v8
+; TONGA-NEXT: v_cndmask_b32_e32 v4, v4, v7, vcc
+; TONGA-NEXT: v_cndmask_b32_e32 v5, v5, v6, vcc
+; TONGA-NEXT: v_xor_b32_e32 v4, s18, v4
+; TONGA-NEXT: v_xor_b32_e32 v5, s18, v5
+; TONGA-NEXT: v_mov_b32_e32 v6, s18
+; TONGA-NEXT: v_subrev_u32_e32 v4, vcc, s18, v4
+; TONGA-NEXT: v_subb_u32_e32 v5, vcc, v5, v6, vcc
+; TONGA-NEXT: s_cbranch_execnz .LBB12_9
+; TONGA-NEXT: .LBB12_8:
+; TONGA-NEXT: v_cvt_f32_u32_e32 v4, s10
+; TONGA-NEXT: s_sub_i32 s0, 0, s10
+; TONGA-NEXT: v_rcp_iflag_f32_e32 v4, v4
+; TONGA-NEXT: v_mul_f32_e32 v4, 0x4f7ffffe, v4
+; TONGA-NEXT: v_cvt_u32_f32_e32 v4, v4
+; TONGA-NEXT: v_mul_lo_u32 v5, s0, v4
+; TONGA-NEXT: v_mul_hi_u32 v5, v4, v5
+; TONGA-NEXT: v_add_u32_e32 v4, vcc, v4, v5
+; TONGA-NEXT: v_mul_hi_u32 v4, s12, v4
+; TONGA-NEXT: v_mul_lo_u32 v4, v4, s10
+; TONGA-NEXT: v_sub_u32_e32 v4, vcc, s12, v4
+; TONGA-NEXT: v_subrev_u32_e32 v5, vcc, s10, v4
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s10, v4
+; TONGA-NEXT: v_cndmask_b32_e32 v4, v4, v5, vcc
+; TONGA-NEXT: v_subrev_u32_e32 v5, vcc, s10, v4
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s10, v4
+; TONGA-NEXT: v_cndmask_b32_e32 v4, v4, v5, vcc
+; TONGA-NEXT: v_mov_b32_e32 v5, 0
+; TONGA-NEXT: .LBB12_9:
+; TONGA-NEXT: s_or_b64 s[0:1], s[8:9], s[6:7]
+; TONGA-NEXT: s_mov_b32 s0, 0
+; TONGA-NEXT: s_cmp_lg_u64 s[0:1], 0
+; TONGA-NEXT: s_cbranch_scc0 .LBB12_16
+; TONGA-NEXT: ; %bb.10:
+; TONGA-NEXT: s_ashr_i32 s0, s7, 31
+; TONGA-NEXT: s_add_u32 s2, s6, s0
+; TONGA-NEXT: s_mov_b32 s1, s0
+; TONGA-NEXT: s_addc_u32 s3, s7, s0
+; TONGA-NEXT: s_xor_b64 s[10:11], s[2:3], s[0:1]
+; TONGA-NEXT: v_cvt_f32_u32_e32 v6, s10
+; TONGA-NEXT: v_cvt_f32_u32_e32 v7, s11
+; TONGA-NEXT: s_sub_u32 s2, 0, s10
+; TONGA-NEXT: s_subb_u32 s3, 0, s11
+; TONGA-NEXT: s_ashr_i32 s14, s9, 31
+; TONGA-NEXT: v_madmk_f32 v6, v7, 0x4f800000, v6
+; TONGA-NEXT: v_rcp_f32_e32 v6, v6
+; TONGA-NEXT: s_mov_b32 s15, s14
+; TONGA-NEXT: v_mul_f32_e32 v6, 0x5f7ffffc, v6
+; TONGA-NEXT: v_mul_f32_e32 v7, 0x2f800000, v6
+; TONGA-NEXT: v_trunc_f32_e32 v7, v7
+; TONGA-NEXT: v_madmk_f32 v6, v7, 0xcf800000, v6
+; TONGA-NEXT: v_cvt_u32_f32_e32 v10, v7
+; TONGA-NEXT: v_cvt_u32_f32_e32 v11, v6
+; TONGA-NEXT: v_mul_lo_u32 v8, s2, v10
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], s2, v11, 0
+; TONGA-NEXT: v_mul_lo_u32 v9, s3, v11
+; TONGA-NEXT: v_add_u32_e32 v7, vcc, v7, v8
+; TONGA-NEXT: v_add_u32_e32 v9, vcc, v7, v9
+; TONGA-NEXT: v_mul_hi_u32 v12, v11, v6
+; TONGA-NEXT: v_mad_u64_u32 v[7:8], s[0:1], v11, v9, 0
+; TONGA-NEXT: v_add_u32_e32 v12, vcc, v12, v7
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], v10, v6, 0
+; TONGA-NEXT: v_addc_u32_e32 v13, vcc, 0, v8, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v10, v9, 0
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v12, v6
+; TONGA-NEXT: v_addc_u32_e32 v6, vcc, v13, v7, vcc
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v9, vcc
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v6, v8
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
+; TONGA-NEXT: v_add_u32_e32 v12, vcc, v11, v6
+; TONGA-NEXT: v_addc_u32_e32 v13, vcc, v10, v7, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], s2, v12, 0
+; TONGA-NEXT: v_mul_lo_u32 v10, s2, v13
+; TONGA-NEXT: v_mul_lo_u32 v11, s3, v12
+; TONGA-NEXT: v_mul_hi_u32 v14, v12, v6
+; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v13, v6, 0
+; TONGA-NEXT: v_add_u32_e32 v7, vcc, v10, v7
+; TONGA-NEXT: v_add_u32_e32 v7, vcc, v7, v11
+; TONGA-NEXT: v_mad_u64_u32 v[10:11], s[0:1], v12, v7, 0
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], v13, v7, 0
+; TONGA-NEXT: v_add_u32_e32 v10, vcc, v14, v10
+; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v11, vcc
+; TONGA-NEXT: v_add_u32_e32 v8, vcc, v10, v8
+; TONGA-NEXT: v_addc_u32_e32 v8, vcc, v11, v9, vcc
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v8, v6
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
+; TONGA-NEXT: s_add_u32 s0, s8, s14
+; TONGA-NEXT: v_add_u32_e32 v8, vcc, v12, v6
+; TONGA-NEXT: s_addc_u32 s1, s9, s14
+; TONGA-NEXT: v_addc_u32_e32 v9, vcc, v13, v7, vcc
+; TONGA-NEXT: s_xor_b64 s[16:17], s[0:1], s[14:15]
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], s16, v9, 0
+; TONGA-NEXT: v_mul_hi_u32 v10, s16, v8
+; TONGA-NEXT: v_add_u32_e32 v10, vcc, v10, v6
+; TONGA-NEXT: v_addc_u32_e32 v11, vcc, 0, v7, vcc
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], s17, v8, 0
+; TONGA-NEXT: v_mad_u64_u32 v[8:9], s[0:1], s17, v9, 0
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v10, v6
+; TONGA-NEXT: v_addc_u32_e32 v6, vcc, v11, v7, vcc
+; TONGA-NEXT: v_addc_u32_e32 v7, vcc, 0, v9, vcc
+; TONGA-NEXT: v_add_u32_e32 v8, vcc, v6, v8
+; TONGA-NEXT: v_addc_u32_e32 v6, vcc, 0, v7, vcc
+; TONGA-NEXT: v_mul_lo_u32 v9, s10, v6
+; TONGA-NEXT: v_mad_u64_u32 v[6:7], s[0:1], s10, v8, 0
+; TONGA-NEXT: v_mul_lo_u32 v8, s11, v8
+; TONGA-NEXT: v_add_u32_e32 v7, vcc, v9, v7
+; TONGA-NEXT: v_add_u32_e32 v7, vcc, v8, v7
+; TONGA-NEXT: v_sub_u32_e32 v8, vcc, s17, v7
+; TONGA-NEXT: v_mov_b32_e32 v9, s11
+; TONGA-NEXT: v_sub_u32_e32 v6, vcc, s16, v6
+; TONGA-NEXT: v_subb_u32_e64 v8, s[0:1], v8, v9, vcc
+; TONGA-NEXT: v_subrev_u32_e64 v10, s[0:1], s10, v6
+; TONGA-NEXT: v_subbrev_u32_e64 v11, s[2:3], 0, v8, s[0:1]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s11, v11
+; TONGA-NEXT: v_cndmask_b32_e64 v12, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_le_u32_e64 s[2:3], s10, v10
+; TONGA-NEXT: v_subb_u32_e64 v8, s[0:1], v8, v9, s[0:1]
+; TONGA-NEXT: v_cndmask_b32_e64 v13, 0, -1, s[2:3]
+; TONGA-NEXT: v_cmp_eq_u32_e64 s[2:3], s11, v11
+; TONGA-NEXT: v_subrev_u32_e64 v9, s[0:1], s10, v10
+; TONGA-NEXT: v_cndmask_b32_e64 v12, v12, v13, s[2:3]
+; TONGA-NEXT: v_subbrev_u32_e64 v8, s[0:1], 0, v8, s[0:1]
+; TONGA-NEXT: v_cmp_ne_u32_e64 s[0:1], 0, v12
+; TONGA-NEXT: v_cndmask_b32_e64 v9, v10, v9, s[0:1]
+; TONGA-NEXT: v_mov_b32_e32 v10, s17
+; TONGA-NEXT: v_subb_u32_e32 v7, vcc, v10, v7, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s11, v7
+; TONGA-NEXT: v_cndmask_b32_e64 v10, 0, -1, vcc
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s10, v6
+; TONGA-NEXT: v_cndmask_b32_e64 v8, v11, v8, s[0:1]
+; TONGA-NEXT: v_cndmask_b32_e64 v11, 0, -1, vcc
+; TONGA-NEXT: v_cmp_eq_u32_e32 vcc, s11, v7
+; TONGA-NEXT: v_cndmask_b32_e32 v10, v10, v11, vcc
+; TONGA-NEXT: v_cmp_ne_u32_e32 vcc, 0, v10
+; TONGA-NEXT: v_cndmask_b32_e32 v6, v6, v9, vcc
+; TONGA-NEXT: v_cndmask_b32_e32 v7, v7, v8, vcc
+; TONGA-NEXT: v_xor_b32_e32 v6, s14, v6
+; TONGA-NEXT: v_xor_b32_e32 v7, s14, v7
+; TONGA-NEXT: v_mov_b32_e32 v8, s14
+; TONGA-NEXT: v_subrev_u32_e32 v6, vcc, s14, v6
+; TONGA-NEXT: v_subb_u32_e32 v7, vcc, v7, v8, vcc
+; TONGA-NEXT: s_cbranch_execnz .LBB12_12
+; TONGA-NEXT: .LBB12_11:
+; TONGA-NEXT: v_cvt_f32_u32_e32 v6, s6
+; TONGA-NEXT: s_sub_i32 s0, 0, s6
+; TONGA-NEXT: v_rcp_iflag_f32_e32 v6, v6
+; TONGA-NEXT: v_mul_f32_e32 v6, 0x4f7ffffe, v6
+; TONGA-NEXT: v_cvt_u32_f32_e32 v6, v6
+; TONGA-NEXT: v_mul_lo_u32 v7, s0, v6
+; TONGA-NEXT: v_mul_hi_u32 v7, v6, v7
+; TONGA-NEXT: v_add_u32_e32 v6, vcc, v6, v7
+; TONGA-NEXT: v_mul_hi_u32 v6, s8, v6
+; TONGA-NEXT: v_mul_lo_u32 v6, v6, s6
+; TONGA-NEXT: v_sub_u32_e32 v6, vcc, s8, v6
+; TONGA-NEXT: v_subrev_u32_e32 v7, vcc, s6, v6
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s6, v6
+; TONGA-NEXT: v_cndmask_b32_e32 v6, v6, v7, vcc
+; TONGA-NEXT: v_subrev_u32_e32 v7, vcc, s6, v6
+; TONGA-NEXT: v_cmp_le_u32_e32 vcc, s6, v6
+; TONGA-NEXT: v_cndmask_b32_e32 v6, v6, v7, vcc
+; TONGA-NEXT: v_mov_b32_e32 v7, 0
; TONGA-NEXT: .LBB12_12:
-; TONGA-NEXT: v_mov_b32_e32 v0, s4
-; TONGA-NEXT: v_mov_b32_e32 v1, s5
+; TONGA-NEXT: v_mov_b32_e32 v9, s5
+; TONGA-NEXT: v_mov_b32_e32 v8, s4
; TONGA-NEXT: s_add_u32 s0, s4, 16
-; TONGA-NEXT: flat_store_dwordx4 v[0:1], v[8:11]
+; TONGA-NEXT: flat_store_dwordx4 v[8:9], v[0:3]
; TONGA-NEXT: s_addc_u32 s1, s5, 0
; TONGA-NEXT: v_mov_b32_e32 v0, s0
; TONGA-NEXT: v_mov_b32_e32 v1, s1
-; TONGA-NEXT: flat_store_dwordx4 v[0:1], v[12:15]
+; TONGA-NEXT: flat_store_dwordx4 v[0:1], v[4:7]
; TONGA-NEXT: s_endpgm
; TONGA-NEXT: .LBB12_13:
-; TONGA-NEXT: ; implicit-def: $vgpr8_vgpr9
+; TONGA-NEXT: ; implicit-def: $vgpr0_vgpr1
; TONGA-NEXT: s_branch .LBB12_2
; TONGA-NEXT: .LBB12_14:
; TONGA-NEXT: s_branch .LBB12_5
; TONGA-NEXT: .LBB12_15:
-; TONGA-NEXT: ; implicit-def: $vgpr12_vgpr13
+; TONGA-NEXT: ; implicit-def: $vgpr4_vgpr5
; TONGA-NEXT: s_branch .LBB12_8
; TONGA-NEXT: .LBB12_16:
; TONGA-NEXT: s_branch .LBB12_11
More information about the llvm-commits
mailing list