[llvm-branch-commits] [llvm] [WIP] AMDGPU: Handle v_add* in eliminateFrameIndex (PR #102346)
Matt Arsenault via llvm-branch-commits
llvm-branch-commits at lists.llvm.org
Tue Aug 20 04:54:38 PDT 2024
https://github.com/arsenm updated https://github.com/llvm/llvm-project/pull/102346
>From af30ca7d438747ce58dd79ef293a9914ceef592f Mon Sep 17 00:00:00 2001
From: Matt Arsenault <Matthew.Arsenault at amd.com>
Date: Tue, 6 Aug 2024 15:25:31 +0400
Subject: [PATCH] [WIP] AMDGPU: Handle v_add* in eliminateFrameIndex
---
llvm/lib/Target/AMDGPU/SIRegisterInfo.cpp | 246 ++++-
.../CodeGen/AMDGPU/GlobalISel/flat-scratch.ll | 76 +-
.../test/CodeGen/AMDGPU/amdgpu-cs-chain-cc.ll | 3 +-
.../AMDGPU/amdgpu-cs-chain-preserve-cc.ll | 3 +-
.../test/CodeGen/AMDGPU/array-ptr-calc-i32.ll | 7 +-
.../eliminate-frame-index-v-add-co-u32.mir | 917 +++++++-----------
.../eliminate-frame-index-v-add-u32.mir | 392 ++++----
llvm/test/CodeGen/AMDGPU/flat-scratch.ll | 8 +-
llvm/test/CodeGen/AMDGPU/frame-index.mir | 8 +-
.../materialize-frame-index-sgpr.gfx10.ll | 28 +-
.../CodeGen/AMDGPU/mubuf-offset-private.ll | 10 +-
llvm/test/CodeGen/AMDGPU/scratch-simple.ll | 2 +-
...tack-pointer-offset-relative-frameindex.ll | 2 +-
llvm/test/CodeGen/AMDGPU/stack-realign.ll | 2 +-
14 files changed, 802 insertions(+), 902 deletions(-)
diff --git a/llvm/lib/Target/AMDGPU/SIRegisterInfo.cpp b/llvm/lib/Target/AMDGPU/SIRegisterInfo.cpp
index 4c571a36e4896c..0427b021d47961 100644
--- a/llvm/lib/Target/AMDGPU/SIRegisterInfo.cpp
+++ b/llvm/lib/Target/AMDGPU/SIRegisterInfo.cpp
@@ -2250,7 +2250,7 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
assert(MF->getRegInfo().isReserved(MFI->getScratchRSrcReg()) &&
"unreserved scratch RSRC register");
- MachineOperand &FIOp = MI->getOperand(FIOperandNum);
+ MachineOperand *FIOp = &MI->getOperand(FIOperandNum);
int Index = MI->getOperand(FIOperandNum).getIndex();
Register FrameReg = FrameInfo.isFixedObjectIndex(Index) && hasBasePointer(*MF)
@@ -2432,6 +2432,208 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
MI->eraseFromParent();
return true;
}
+ case AMDGPU::V_ADD_U32_e32:
+ case AMDGPU::V_ADD_U32_e64:
+ case AMDGPU::V_ADD_CO_U32_e32:
+ case AMDGPU::V_ADD_CO_U32_e64: {
+ // TODO: Handle sub, and, or.
+ unsigned NumDefs = MI->getNumExplicitDefs();
+ unsigned Src0Idx = NumDefs;
+
+ bool HasClamp = false;
+ MachineOperand *VCCOp = nullptr;
+
+ switch (MI->getOpcode()) {
+ case AMDGPU::V_ADD_U32_e32:
+ break;
+ case AMDGPU::V_ADD_U32_e64:
+ HasClamp = MI->getOperand(3).getImm();
+ break;
+ case AMDGPU::V_ADD_CO_U32_e32:
+ VCCOp = &MI->getOperand(3);
+ break;
+ case AMDGPU::V_ADD_CO_U32_e64:
+ VCCOp = &MI->getOperand(1);
+ HasClamp = MI->getOperand(4).getImm();
+ break;
+ default:
+ break;
+ }
+ bool DeadVCC = !VCCOp || VCCOp->isDead();
+ MachineOperand &DstOp = MI->getOperand(0);
+ Register DstReg = DstOp.getReg();
+
+ unsigned OtherOpIdx =
+ FIOperandNum == Src0Idx ? FIOperandNum + 1 : Src0Idx;
+ MachineOperand *OtherOp = &MI->getOperand(OtherOpIdx);
+
+ unsigned Src1Idx = Src0Idx + 1;
+ Register MaterializedReg = FrameReg;
+ Register ScavengedVGPR;
+
+ if (FrameReg && !ST.enableFlatScratch()) {
+ // We should just do an in-place update of the result register. However,
+ // the value there may also be used by the add, in which case we need a
+ // temporary register.
+ //
+ // FIXME: The scavenger is not finding the result register in the
+ // common case where the add does not read the register.
+
+ ScavengedVGPR = RS->scavengeRegisterBackwards(
+ AMDGPU::VGPR_32RegClass, MI, /*RestoreAfter=*/false, /*SPAdj=*/0);
+
+ // TODO: If we have a free SGPR, it's sometimes better to use a scalar
+ // shift.
+ BuildMI(*MBB, *MI, DL, TII->get(AMDGPU::V_LSHRREV_B32_e64))
+ .addDef(ScavengedVGPR, RegState::Renamable)
+ .addImm(ST.getWavefrontSizeLog2())
+ .addReg(FrameReg);
+ MaterializedReg = ScavengedVGPR;
+ }
+
+ int64_t Offset = FrameInfo.getObjectOffset(Index);
+ // For the non-immediate case, we could fall through to the default
+ // handling, but we do an in-place update of the result register here to
+ // avoid scavenging another register.
+ if (OtherOp->isImm()) {
+ OtherOp->setImm(OtherOp->getImm() + Offset);
+ Offset = 0;
+ }
+
+ if ((!OtherOp->isImm() || OtherOp->getImm() != 0) && MaterializedReg) {
+ if (ST.enableFlatScratch() &&
+ !TII->isOperandLegal(*MI, Src1Idx, OtherOp)) {
+ // We didn't need the shift above, so we have an SGPR for the frame
+ // register, but may have a VGPR only operand.
+ //
+ // TODO: On gfx10+, we can easily change the opcode to the e64 version
+ // and use the higher constant bus restriction to avoid this copy.
+
+ if (!ScavengedVGPR) {
+ ScavengedVGPR = RS->scavengeRegisterBackwards(
+ AMDGPU::VGPR_32RegClass, MI, /*RestoreAfter=*/false,
+ /*SPAdj=*/0);
+ }
+
+ assert(ScavengedVGPR != DstReg);
+
+ BuildMI(*MBB, *MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), ScavengedVGPR)
+ .addReg(MaterializedReg,
+ MaterializedReg != FrameReg ? RegState::Kill : 0);
+ MaterializedReg = ScavengedVGPR;
+ }
+
+ auto AddI32 = BuildMI(*MBB, *MI, DL, TII->get(MI->getOpcode()))
+ .addDef(DstReg, RegState::Renamable);
+ if (NumDefs == 2)
+ AddI32.add(MI->getOperand(1));
+
+ unsigned MaterializedRegFlags =
+ MaterializedReg != FrameReg ? RegState::Kill : 0;
+
+ if (isVGPRClass(getPhysRegBaseClass(MaterializedReg))) {
+ // If we know we have a VGPR already, it's more likely the other
+ // operand is a legal vsrc0.
+ AddI32
+ .add(*OtherOp)
+ .addReg(MaterializedReg, MaterializedRegFlags);
+ } else {
+ // Commute operands to avoid violating VOP2 restrictions. This will
+ // typically happen when using scratch.
+ AddI32
+ .addReg(MaterializedReg, MaterializedRegFlags)
+ .add(*OtherOp);
+ }
+
+ if (MI->getOpcode() == AMDGPU::V_ADD_CO_U32_e64 ||
+ MI->getOpcode() == AMDGPU::V_ADD_U32_e64)
+ AddI32.addImm(0); // clamp
+
+ if (MI->getOpcode() == AMDGPU::V_ADD_CO_U32_e32)
+ AddI32.setOperandDead(3); // Dead vcc
+
+ MaterializedReg = DstReg;
+
+ OtherOp->ChangeToRegister(MaterializedReg, false);
+ OtherOp->setIsKill(true);
+ FIOp->ChangeToImmediate(Offset);
+ Offset = 0;
+ } else if (Offset != 0) {
+ assert(!MaterializedReg);
+ FIOp->ChangeToImmediate(Offset);
+ Offset = 0;
+ } else {
+ if (DeadVCC && !HasClamp) {
+ assert(Offset == 0);
+
+ // TODO: Losing kills and implicit operands. Just mutate to copy and
+ // let lowerCopy deal with it?
+ if (OtherOp->isReg() && OtherOp->getReg() == DstReg) {
+ // Folded to an identity copy.
+ MI->eraseFromParent();
+ return true;
+ }
+
+ // The immediate value should be in OtherOp
+ MI->setDesc(TII->get(AMDGPU::V_MOV_B32_e32));
+ MI->removeOperand(FIOperandNum);
+
+ unsigned NumOps = MI->getNumOperands();
+ for (unsigned I = NumOps - 2; I >= 2; --I)
+ MI->removeOperand(I);
+
+ if (NumDefs == 2)
+ MI->removeOperand(1);
+
+ // The code below can't deal with a mov.
+ return true;
+ }
+
+ // This folded to a constant, but we have to keep the add around for
+ // pointless implicit defs or clamp modifier.
+ FIOp->ChangeToImmediate(0);
+ }
+
+ // Try to improve legality by commuting.
+ if (!TII->isOperandLegal(*MI, Src1Idx) && TII->commuteInstruction(*MI)) {
+ std::swap(FIOp, OtherOp);
+ std::swap(FIOperandNum, OtherOpIdx);
+ }
+
+ for (unsigned SrcIdx : {Src1Idx, Src0Idx}) {
+ // Depending on operand constraints we may need to insert another copy.
+ if (!TII->isOperandLegal(*MI, SrcIdx)) {
+ // If commuting didn't make the operands legal, we need to materialize
+ // in a register.
+ // TODO: Can use SGPR on gfx10+ in some cases.
+ if (!ScavengedVGPR) {
+ ScavengedVGPR = RS->scavengeRegisterBackwards(
+ AMDGPU::VGPR_32RegClass, MI, /*RestoreAfter=*/false,
+ /*SPAdj=*/0);
+ }
+
+ assert(ScavengedVGPR != DstReg);
+
+ MachineOperand &Src = MI->getOperand(SrcIdx);
+ BuildMI(*MBB, *MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), ScavengedVGPR)
+ .add(Src);
+
+ Src.ChangeToRegister(ScavengedVGPR, false);
+ Src.setIsKill(true);
+ }
+ }
+
+ // Fold out add of 0 case that can appear in kernels.
+ if (FIOp->isImm() && FIOp->getImm() == 0 && DeadVCC && !HasClamp) {
+ if (OtherOp->isReg() && OtherOp->getReg() != DstReg) {
+ BuildMI(*MBB, *MI, DL, TII->get(AMDGPU::COPY), DstReg).add(*OtherOp);
+ }
+
+ MI->eraseFromParent();
+ }
+
+ return true;
+ }
case AMDGPU::S_ADD_I32: {
// TODO: Handle s_or_b32, s_and_b32.
unsigned OtherOpIdx = FIOperandNum == 1 ? 2 : 1;
@@ -2472,9 +2674,9 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
Offset = 0;
if (MaterializedReg)
- FIOp.ChangeToRegister(MaterializedReg, false);
+ FIOp->ChangeToRegister(MaterializedReg, false);
else
- FIOp.ChangeToImmediate(0);
+ FIOp->ChangeToImmediate(0);
} else if (MaterializedReg) {
// If we can't fold the other operand, do another increment.
Register DstReg = DstOp.getReg();
@@ -2497,19 +2699,19 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
OtherOp.ChangeToRegister(MaterializedReg, false);
OtherOp.setIsKill(true);
OtherOp.setIsRenamable(true);
- FIOp.ChangeToImmediate(Offset);
+ FIOp->ChangeToImmediate(Offset);
} else {
// If we don't have any other offset to apply, we can just directly
// interpret the frame index as the offset.
- FIOp.ChangeToImmediate(Offset);
+ FIOp->ChangeToImmediate(Offset);
}
if (DeadSCC && OtherOp.isImm() && OtherOp.getImm() == 0) {
assert(Offset == 0);
MI->removeOperand(3);
MI->removeOperand(OtherOpIdx);
- MI->setDesc(TII->get(FIOp.isReg() ? AMDGPU::COPY : AMDGPU::S_MOV_B32));
- } else if (DeadSCC && FIOp.isImm() && FIOp.getImm() == 0) {
+ MI->setDesc(TII->get(FIOp->isReg() ? AMDGPU::COPY : AMDGPU::S_MOV_B32));
+ } else if (DeadSCC && FIOp->isImm() && FIOp->getImm() == 0) {
assert(Offset == 0);
MI->removeOperand(3);
MI->removeOperand(FIOperandNum);
@@ -2517,7 +2719,7 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
TII->get(OtherOp.isReg() ? AMDGPU::COPY : AMDGPU::S_MOV_B32));
}
- assert(!FIOp.isFI());
+ assert(!FIOp->isFI());
return true;
}
default: {
@@ -2533,7 +2735,7 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
// The offset is always swizzled, just replace it
if (FrameReg)
- FIOp.ChangeToRegister(FrameReg, false);
+ FIOp->ChangeToRegister(FrameReg, false);
MachineOperand *OffsetOp =
TII->getNamedOperand(*MI, AMDGPU::OpName::offset);
@@ -2586,18 +2788,18 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
}
if (!FrameReg) {
- FIOp.ChangeToImmediate(Offset);
- if (TII->isImmOperandLegal(*MI, FIOperandNum, FIOp))
+ FIOp->ChangeToImmediate(Offset);
+ if (TII->isImmOperandLegal(*MI, FIOperandNum, *FIOp))
return false;
}
// We need to use register here. Check if we can use an SGPR or need
// a VGPR.
- FIOp.ChangeToRegister(AMDGPU::M0, false);
- bool UseSGPR = TII->isOperandLegal(*MI, FIOperandNum, &FIOp);
+ FIOp->ChangeToRegister(AMDGPU::M0, false);
+ bool UseSGPR = TII->isOperandLegal(*MI, FIOperandNum, FIOp);
if (!Offset && FrameReg && UseSGPR) {
- FIOp.setReg(FrameReg);
+ FIOp->setReg(FrameReg);
return false;
}
@@ -2606,8 +2808,8 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
Register TmpReg =
RS->scavengeRegisterBackwards(*RC, MI, false, 0, !UseSGPR);
- FIOp.setReg(TmpReg);
- FIOp.setIsKill();
+ FIOp->setReg(TmpReg);
+ FIOp->setIsKill();
if ((!FrameReg || !Offset) && TmpReg) {
unsigned Opc = UseSGPR ? AMDGPU::S_MOV_B32 : AMDGPU::V_MOV_B32_e32;
@@ -2636,8 +2838,8 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
if (!TmpSReg) {
// Use frame register and restore it after.
TmpSReg = FrameReg;
- FIOp.setReg(FrameReg);
- FIOp.setIsKill(false);
+ FIOp->setReg(FrameReg);
+ FIOp->setIsKill(false);
}
if (NeedSaveSCC) {
@@ -2885,7 +3087,7 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
MI->eraseFromParent();
return true;
}
- FIOp.ChangeToRegister(ResultReg, false, false, true);
+ FIOp->ChangeToRegister(ResultReg, false, false, true);
return false;
}
@@ -2916,13 +3118,13 @@ bool SIRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator MI,
// If the offset is simply too big, don't convert to a scratch wave offset
// relative index.
- FIOp.ChangeToImmediate(Offset);
- if (!TII->isImmOperandLegal(*MI, FIOperandNum, FIOp)) {
+ FIOp->ChangeToImmediate(Offset);
+ if (!TII->isImmOperandLegal(*MI, FIOperandNum, *FIOp)) {
Register TmpReg = RS->scavengeRegisterBackwards(AMDGPU::VGPR_32RegClass,
MI, false, 0);
BuildMI(*MBB, MI, DL, TII->get(AMDGPU::V_MOV_B32_e32), TmpReg)
.addImm(Offset);
- FIOp.ChangeToRegister(TmpReg, false, false, true);
+ FIOp->ChangeToRegister(TmpReg, false, false, true);
}
}
}
diff --git a/llvm/test/CodeGen/AMDGPU/GlobalISel/flat-scratch.ll b/llvm/test/CodeGen/AMDGPU/GlobalISel/flat-scratch.ll
index 04833eaaa3283b..a240906ac8532a 100644
--- a/llvm/test/CodeGen/AMDGPU/GlobalISel/flat-scratch.ll
+++ b/llvm/test/CodeGen/AMDGPU/GlobalISel/flat-scratch.ll
@@ -95,15 +95,13 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX9-LABEL: store_load_vindex_kernel:
; GFX9: ; %bb.0: ; %bb
; GFX9-NEXT: s_add_u32 flat_scratch_lo, s6, s11
-; GFX9-NEXT: v_lshlrev_b32_e32 v1, 2, v0
-; GFX9-NEXT: v_sub_u32_e32 v0, 0, v0
; GFX9-NEXT: s_addc_u32 flat_scratch_hi, s7, 0
-; GFX9-NEXT: v_add_u32_e32 v1, 0, v1
+; GFX9-NEXT: v_lshlrev_b32_e32 v1, 2, v0
; GFX9-NEXT: v_mov_b32_e32 v2, 15
-; GFX9-NEXT: v_lshlrev_b32_e32 v0, 2, v0
+; GFX9-NEXT: v_sub_u32_e32 v0, 0, v0
; GFX9-NEXT: scratch_store_dword v1, v2, off
; GFX9-NEXT: s_waitcnt vmcnt(0)
-; GFX9-NEXT: v_add_u32_e32 v0, 0, v0
+; GFX9-NEXT: v_lshlrev_b32_e32 v0, 2, v0
; GFX9-NEXT: scratch_load_dword v0, v0, off offset:124 glc
; GFX9-NEXT: s_waitcnt vmcnt(0)
; GFX9-NEXT: s_endpgm
@@ -118,8 +116,6 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX10-NEXT: v_lshlrev_b32_e32 v0, 2, v0
; GFX10-NEXT: v_mov_b32_e32 v2, 15
; GFX10-NEXT: v_lshlrev_b32_e32 v1, 2, v1
-; GFX10-NEXT: v_add_nc_u32_e32 v0, 0, v0
-; GFX10-NEXT: v_add_nc_u32_e32 v1, 0, v1
; GFX10-NEXT: scratch_store_dword v0, v2, off
; GFX10-NEXT: s_waitcnt_vscnt null, 0x0
; GFX10-NEXT: scratch_load_dword v0, v1, off offset:124 glc dlc
@@ -130,12 +126,11 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX940: ; %bb.0: ; %bb
; GFX940-NEXT: v_and_b32_e32 v0, 0x3ff, v0
; GFX940-NEXT: v_lshlrev_b32_e32 v1, 2, v0
-; GFX940-NEXT: v_sub_u32_e32 v0, 0, v0
; GFX940-NEXT: v_mov_b32_e32 v2, 15
-; GFX940-NEXT: v_lshlrev_b32_e32 v0, 2, v0
+; GFX940-NEXT: v_sub_u32_e32 v0, 0, v0
; GFX940-NEXT: scratch_store_dword v1, v2, off sc0 sc1
; GFX940-NEXT: s_waitcnt vmcnt(0)
-; GFX940-NEXT: v_add_u32_e32 v0, 0, v0
+; GFX940-NEXT: v_lshlrev_b32_e32 v0, 2, v0
; GFX940-NEXT: scratch_load_dword v0, v0, off offset:124 sc0 sc1
; GFX940-NEXT: s_waitcnt vmcnt(0)
; GFX940-NEXT: s_endpgm
@@ -150,7 +145,6 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX11-NEXT: v_lshlrev_b32_e32 v1, 2, v1
; GFX11-NEXT: scratch_store_b32 v0, v2, off dlc
; GFX11-NEXT: s_waitcnt_vscnt null, 0x0
-; GFX11-NEXT: v_add_nc_u32_e32 v1, 0, v1
; GFX11-NEXT: scratch_load_b32 v0, v1, off offset:124 glc dlc
; GFX11-NEXT: s_waitcnt vmcnt(0)
; GFX11-NEXT: s_endpgm
@@ -529,15 +523,15 @@ define void @store_load_vindex_small_offset_foo(i32 %idx) {
; GFX9-NEXT: scratch_load_dword v1, off, s32 glc
; GFX9-NEXT: s_waitcnt vmcnt(0)
; GFX9-NEXT: v_lshlrev_b32_e32 v1, 2, v0
-; GFX9-NEXT: s_add_i32 s0, s32, 0x100
; GFX9-NEXT: v_and_b32_e32 v0, 15, v0
-; GFX9-NEXT: v_add_u32_e32 v1, s0, v1
-; GFX9-NEXT: v_mov_b32_e32 v2, 15
+; GFX9-NEXT: v_add_u32_e32 v1, s32, v1
; GFX9-NEXT: v_lshlrev_b32_e32 v0, 2, v0
-; GFX9-NEXT: s_add_i32 s0, s32, 0x100
+; GFX9-NEXT: v_add_u32_e32 v1, 0x100, v1
+; GFX9-NEXT: v_mov_b32_e32 v2, 15
+; GFX9-NEXT: v_add_u32_e32 v0, s32, v0
; GFX9-NEXT: scratch_store_dword v1, v2, off
; GFX9-NEXT: s_waitcnt vmcnt(0)
-; GFX9-NEXT: v_add_u32_e32 v0, s0, v0
+; GFX9-NEXT: v_add_u32_e32 v0, 0x100, v0
; GFX9-NEXT: scratch_load_dword v0, v0, off glc
; GFX9-NEXT: s_waitcnt vmcnt(0)
; GFX9-NEXT: s_setpc_b64 s[30:31]
@@ -547,14 +541,14 @@ define void @store_load_vindex_small_offset_foo(i32 %idx) {
; GFX10-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GFX10-NEXT: v_and_b32_e32 v1, 15, v0
; GFX10-NEXT: v_lshlrev_b32_e32 v0, 2, v0
-; GFX10-NEXT: s_add_i32 s0, s32, 0x100
; GFX10-NEXT: v_mov_b32_e32 v2, 15
; GFX10-NEXT: scratch_load_dword v3, off, s32 glc dlc
; GFX10-NEXT: s_waitcnt vmcnt(0)
; GFX10-NEXT: v_lshlrev_b32_e32 v1, 2, v1
-; GFX10-NEXT: v_add_nc_u32_e32 v0, s0, v0
-; GFX10-NEXT: s_add_i32 s0, s32, 0x100
-; GFX10-NEXT: v_add_nc_u32_e32 v1, s0, v1
+; GFX10-NEXT: v_add_nc_u32_e32 v0, s32, v0
+; GFX10-NEXT: v_add_nc_u32_e32 v1, s32, v1
+; GFX10-NEXT: v_add_nc_u32_e32 v0, 0x100, v0
+; GFX10-NEXT: v_add_nc_u32_e32 v1, 0x100, v1
; GFX10-NEXT: scratch_store_dword v0, v2, off
; GFX10-NEXT: s_waitcnt_vscnt null, 0x0
; GFX10-NEXT: scratch_load_dword v0, v1, off glc dlc
@@ -567,8 +561,8 @@ define void @store_load_vindex_small_offset_foo(i32 %idx) {
; GFX940-NEXT: scratch_load_dword v1, off, s32 sc0 sc1
; GFX940-NEXT: s_waitcnt vmcnt(0)
; GFX940-NEXT: v_lshlrev_b32_e32 v1, 2, v0
-; GFX940-NEXT: s_add_i32 s0, s32, 0x100
-; GFX940-NEXT: v_add_u32_e32 v1, s0, v1
+; GFX940-NEXT: v_add_u32_e32 v1, s32, v1
+; GFX940-NEXT: v_add_u32_e32 v1, 0x100, v1
; GFX940-NEXT: v_mov_b32_e32 v2, 15
; GFX940-NEXT: v_and_b32_e32 v0, 15, v0
; GFX940-NEXT: scratch_store_dword v1, v2, off sc0 sc1
@@ -583,11 +577,12 @@ define void @store_load_vindex_small_offset_foo(i32 %idx) {
; GFX11-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GFX11-NEXT: v_dual_mov_b32 v2, 15 :: v_dual_lshlrev_b32 v1, 2, v0
; GFX11-NEXT: v_and_b32_e32 v0, 15, v0
-; GFX11-NEXT: s_add_i32 s0, s32, 0x100
; GFX11-NEXT: scratch_load_b32 v3, off, s32 glc dlc
; GFX11-NEXT: s_waitcnt vmcnt(0)
-; GFX11-NEXT: v_add_nc_u32_e32 v1, s0, v1
+; GFX11-NEXT: v_add_nc_u32_e32 v1, s32, v1
; GFX11-NEXT: v_lshlrev_b32_e32 v0, 2, v0
+; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_2)
+; GFX11-NEXT: v_add_nc_u32_e32 v1, 0x100, v1
; GFX11-NEXT: scratch_store_b32 v1, v2, off dlc
; GFX11-NEXT: s_waitcnt_vscnt null, 0x0
; GFX11-NEXT: scratch_load_b32 v0, v0, s32 offset:256 glc dlc
@@ -845,15 +840,15 @@ define void @store_load_vindex_large_offset_foo(i32 %idx) {
; GFX9-NEXT: scratch_load_dword v1, off, s32 offset:4 glc
; GFX9-NEXT: s_waitcnt vmcnt(0)
; GFX9-NEXT: v_lshlrev_b32_e32 v1, 2, v0
-; GFX9-NEXT: s_add_i32 s0, s32, 0x4004
; GFX9-NEXT: v_and_b32_e32 v0, 15, v0
-; GFX9-NEXT: v_add_u32_e32 v1, s0, v1
-; GFX9-NEXT: v_mov_b32_e32 v2, 15
+; GFX9-NEXT: v_add_u32_e32 v1, s32, v1
; GFX9-NEXT: v_lshlrev_b32_e32 v0, 2, v0
-; GFX9-NEXT: s_add_i32 s0, s32, 0x4004
+; GFX9-NEXT: v_add_u32_e32 v1, 0x4004, v1
+; GFX9-NEXT: v_mov_b32_e32 v2, 15
+; GFX9-NEXT: v_add_u32_e32 v0, s32, v0
; GFX9-NEXT: scratch_store_dword v1, v2, off
; GFX9-NEXT: s_waitcnt vmcnt(0)
-; GFX9-NEXT: v_add_u32_e32 v0, s0, v0
+; GFX9-NEXT: v_add_u32_e32 v0, 0x4004, v0
; GFX9-NEXT: scratch_load_dword v0, v0, off glc
; GFX9-NEXT: s_waitcnt vmcnt(0)
; GFX9-NEXT: s_setpc_b64 s[30:31]
@@ -863,14 +858,14 @@ define void @store_load_vindex_large_offset_foo(i32 %idx) {
; GFX10-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GFX10-NEXT: v_and_b32_e32 v1, 15, v0
; GFX10-NEXT: v_lshlrev_b32_e32 v0, 2, v0
-; GFX10-NEXT: s_add_i32 s0, s32, 0x4004
; GFX10-NEXT: v_mov_b32_e32 v2, 15
; GFX10-NEXT: scratch_load_dword v3, off, s32 offset:4 glc dlc
; GFX10-NEXT: s_waitcnt vmcnt(0)
; GFX10-NEXT: v_lshlrev_b32_e32 v1, 2, v1
-; GFX10-NEXT: v_add_nc_u32_e32 v0, s0, v0
-; GFX10-NEXT: s_add_i32 s0, s32, 0x4004
-; GFX10-NEXT: v_add_nc_u32_e32 v1, s0, v1
+; GFX10-NEXT: v_add_nc_u32_e32 v0, s32, v0
+; GFX10-NEXT: v_add_nc_u32_e32 v1, s32, v1
+; GFX10-NEXT: v_add_nc_u32_e32 v0, 0x4004, v0
+; GFX10-NEXT: v_add_nc_u32_e32 v1, 0x4004, v1
; GFX10-NEXT: scratch_store_dword v0, v2, off
; GFX10-NEXT: s_waitcnt_vscnt null, 0x0
; GFX10-NEXT: scratch_load_dword v0, v1, off glc dlc
@@ -883,8 +878,8 @@ define void @store_load_vindex_large_offset_foo(i32 %idx) {
; GFX940-NEXT: scratch_load_dword v1, off, s32 offset:4 sc0 sc1
; GFX940-NEXT: s_waitcnt vmcnt(0)
; GFX940-NEXT: v_lshlrev_b32_e32 v1, 2, v0
-; GFX940-NEXT: s_add_i32 s0, s32, 0x4004
-; GFX940-NEXT: v_add_u32_e32 v1, s0, v1
+; GFX940-NEXT: v_add_u32_e32 v1, s32, v1
+; GFX940-NEXT: v_add_u32_e32 v1, 0x4004, v1
; GFX940-NEXT: v_mov_b32_e32 v2, 15
; GFX940-NEXT: v_and_b32_e32 v0, 15, v0
; GFX940-NEXT: scratch_store_dword v1, v2, off sc0 sc1
@@ -903,9 +898,10 @@ define void @store_load_vindex_large_offset_foo(i32 %idx) {
; GFX11-NEXT: s_add_i32 s0, s32, 0x4004
; GFX11-NEXT: scratch_load_b32 v3, off, s32 offset:4 glc dlc
; GFX11-NEXT: s_waitcnt vmcnt(0)
-; GFX11-NEXT: v_add_nc_u32_e32 v1, s0, v1
-; GFX11-NEXT: s_add_i32 s0, s32, 0x4004
+; GFX11-NEXT: v_add_nc_u32_e32 v1, s32, v1
; GFX11-NEXT: v_lshlrev_b32_e32 v0, 2, v0
+; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_2)
+; GFX11-NEXT: v_add_nc_u32_e32 v1, 0x4004, v1
; GFX11-NEXT: scratch_store_b32 v1, v2, off dlc
; GFX11-NEXT: s_waitcnt_vscnt null, 0x0
; GFX11-NEXT: scratch_load_b32 v0, v0, s0 glc dlc
@@ -1129,7 +1125,6 @@ define amdgpu_kernel void @store_load_vidx_sidx_offset(i32 %sidx) {
; GFX9-NEXT: v_mov_b32_e32 v1, 15
; GFX9-NEXT: s_waitcnt lgkmcnt(0)
; GFX9-NEXT: v_add_lshl_u32 v0, s0, v0, 2
-; GFX9-NEXT: v_add_u32_e32 v0, 0, v0
; GFX9-NEXT: scratch_store_dword v0, v1, off offset:1024
; GFX9-NEXT: s_waitcnt vmcnt(0)
; GFX9-NEXT: scratch_load_dword v0, v0, off offset:1024 glc
@@ -1146,7 +1141,6 @@ define amdgpu_kernel void @store_load_vidx_sidx_offset(i32 %sidx) {
; GFX10-NEXT: v_mov_b32_e32 v1, 15
; GFX10-NEXT: s_waitcnt lgkmcnt(0)
; GFX10-NEXT: v_add_lshl_u32 v0, s0, v0, 2
-; GFX10-NEXT: v_add_nc_u32_e32 v0, 0, v0
; GFX10-NEXT: scratch_store_dword v0, v1, off offset:1024
; GFX10-NEXT: s_waitcnt_vscnt null, 0x0
; GFX10-NEXT: scratch_load_dword v0, v0, off offset:1024 glc dlc
@@ -1160,7 +1154,6 @@ define amdgpu_kernel void @store_load_vidx_sidx_offset(i32 %sidx) {
; GFX940-NEXT: v_mov_b32_e32 v1, 15
; GFX940-NEXT: s_waitcnt lgkmcnt(0)
; GFX940-NEXT: v_add_lshl_u32 v0, s0, v0, 2
-; GFX940-NEXT: v_add_u32_e32 v0, 0, v0
; GFX940-NEXT: scratch_store_dword v0, v1, off offset:1024 sc0 sc1
; GFX940-NEXT: s_waitcnt vmcnt(0)
; GFX940-NEXT: scratch_load_dword v0, v0, off offset:1024 sc0 sc1
@@ -1172,9 +1165,8 @@ define amdgpu_kernel void @store_load_vidx_sidx_offset(i32 %sidx) {
; GFX11-NEXT: s_load_b32 s0, s[2:3], 0x24
; GFX11-NEXT: v_dual_mov_b32 v1, 15 :: v_dual_and_b32 v0, 0x3ff, v0
; GFX11-NEXT: s_waitcnt lgkmcnt(0)
-; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_1) | instskip(NEXT) | instid1(VALU_DEP_1)
+; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_1)
; GFX11-NEXT: v_add_lshl_u32 v0, s0, v0, 2
-; GFX11-NEXT: v_add_nc_u32_e32 v0, 0, v0
; GFX11-NEXT: scratch_store_b32 v0, v1, off offset:1024 dlc
; GFX11-NEXT: s_waitcnt_vscnt null, 0x0
; GFX11-NEXT: scratch_load_b32 v0, v0, off offset:1024 glc dlc
diff --git a/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-cc.ll b/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-cc.ll
index c92b78cd45573a..3daf9135233f9a 100644
--- a/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-cc.ll
+++ b/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-cc.ll
@@ -867,7 +867,7 @@ define amdgpu_cs_chain void @amdgpu_cs_chain_dont_realign_stack(i32 %idx) {
; GISEL-GFX11-NEXT: s_mov_b32 s0, 1
; GISEL-GFX11-NEXT: v_lshlrev_b32_e32 v0, 4, v8
; GISEL-GFX11-NEXT: s_delay_alu instid0(VALU_DEP_1)
-; GISEL-GFX11-NEXT: v_add_nc_u32_e32 v4, 0, v0
+; GISEL-GFX11-NEXT: v_mov_b32_e32 v4, v0
; GISEL-GFX11-NEXT: v_dual_mov_b32 v0, s0 :: v_dual_mov_b32 v3, s3
; GISEL-GFX11-NEXT: v_dual_mov_b32 v1, s1 :: v_dual_mov_b32 v2, s2
; GISEL-GFX11-NEXT: scratch_store_b128 v4, v[0:3], off dlc
@@ -882,7 +882,6 @@ define amdgpu_cs_chain void @amdgpu_cs_chain_dont_realign_stack(i32 %idx) {
; GISEL-GFX10-NEXT: v_mov_b32_e32 v2, 2
; GISEL-GFX10-NEXT: v_mov_b32_e32 v3, 3
; GISEL-GFX10-NEXT: v_mov_b32_e32 v4, 4
-; GISEL-GFX10-NEXT: v_add_nc_u32_e32 v0, 0, v0
; GISEL-GFX10-NEXT: buffer_store_dword v1, v0, s[48:51], 0 offen
; GISEL-GFX10-NEXT: s_waitcnt_vscnt null, 0x0
; GISEL-GFX10-NEXT: buffer_store_dword v2, v0, s[48:51], 0 offen offset:4
diff --git a/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-preserve-cc.ll b/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-preserve-cc.ll
index 8d9ed9bb4343c6..41700f13439001 100644
--- a/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-preserve-cc.ll
+++ b/llvm/test/CodeGen/AMDGPU/amdgpu-cs-chain-preserve-cc.ll
@@ -592,7 +592,7 @@ define amdgpu_cs_chain_preserve void @amdgpu_cs_chain_preserve_dont_realign_stac
; GISEL-GFX11-NEXT: s_mov_b32 s0, 1
; GISEL-GFX11-NEXT: v_lshlrev_b32_e32 v0, 4, v8
; GISEL-GFX11-NEXT: s_delay_alu instid0(VALU_DEP_1)
-; GISEL-GFX11-NEXT: v_add_nc_u32_e32 v4, 0, v0
+; GISEL-GFX11-NEXT: v_mov_b32_e32 v4, v0
; GISEL-GFX11-NEXT: v_dual_mov_b32 v0, s0 :: v_dual_mov_b32 v3, s3
; GISEL-GFX11-NEXT: v_dual_mov_b32 v1, s1 :: v_dual_mov_b32 v2, s2
; GISEL-GFX11-NEXT: scratch_store_b128 v4, v[0:3], off dlc
@@ -607,7 +607,6 @@ define amdgpu_cs_chain_preserve void @amdgpu_cs_chain_preserve_dont_realign_stac
; GISEL-GFX10-NEXT: v_mov_b32_e32 v2, 2
; GISEL-GFX10-NEXT: v_mov_b32_e32 v3, 3
; GISEL-GFX10-NEXT: v_mov_b32_e32 v4, 4
-; GISEL-GFX10-NEXT: v_add_nc_u32_e32 v0, 0, v0
; GISEL-GFX10-NEXT: buffer_store_dword v1, v0, s[48:51], 0 offen
; GISEL-GFX10-NEXT: s_waitcnt_vscnt null, 0x0
; GISEL-GFX10-NEXT: buffer_store_dword v2, v0, s[48:51], 0 offen offset:4
diff --git a/llvm/test/CodeGen/AMDGPU/array-ptr-calc-i32.ll b/llvm/test/CodeGen/AMDGPU/array-ptr-calc-i32.ll
index d33196ba33109f..ac8782e1e542f7 100644
--- a/llvm/test/CodeGen/AMDGPU/array-ptr-calc-i32.ll
+++ b/llvm/test/CodeGen/AMDGPU/array-ptr-calc-i32.ll
@@ -12,7 +12,12 @@ declare void @llvm.amdgcn.s.barrier() #2
; SI-LABEL: {{^}}test_private_array_ptr_calc:
-; SI-ALLOCA: v_add_i32_e32 [[PTRREG:v[0-9]+]], vcc, 0, v{{[0-9]+}}
+; SI-ALLOCA: buffer_load_dword [[LOAD_A:v[0-9]+]]
+; SI-ALLOCA: buffer_load_dword [[LOAD_B:v[0-9]+]]
+
+; SI-ALLOCA: v_lshlrev_b32_e32 [[SIZE_SCALE:v[0-9]+]], 2, [[LOAD_A]]
+
+; SI-ALLOCA: v_mov_b32_e32 [[PTRREG:v[0-9]+]], [[SIZE_SCALE]]
; SI-ALLOCA: buffer_store_dword {{v[0-9]+}}, [[PTRREG]], s[{{[0-9]+:[0-9]+}}], 0 offen offset:64
; SI-ALLOCA: s_barrier
; SI-ALLOCA: buffer_load_dword {{v[0-9]+}}, [[PTRREG]], s[{{[0-9]+:[0-9]+}}], 0 offen offset:64
diff --git a/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-co-u32.mir b/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-co-u32.mir
index 6ec296144bf193..30ad6ce412251a 100644
--- a/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-co-u32.mir
+++ b/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-co-u32.mir
@@ -22,7 +22,7 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUFW64-LABEL: name: v_add_co_u32_e32__inline_imm__fi_offset0
- ; MUBUFW64: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
@@ -47,13 +47,15 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUFW64-LABEL: name: v_add_co_u32_e32__inline_imm__fi_offset0_live_vcc
- ; MUBUFW64: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__inline_imm__fi_offset0_live_vcc
; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0 = V_ADD_CO_U32_e32 12, %stack.0, implicit-def $vcc, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -72,40 +74,14 @@ machineFunctionInfo:
stackPtrOffsetReg: '$sgpr32'
body: |
bb.0:
- ; GFX7-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm
- ; GFX7: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 16, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm
- ; GFX8: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 16, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm
- ; GFX900: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm
- ; GFX90A: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm
- ; GFX10: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 28, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm
- ; FLATSCRW64: $sgpr4 = S_ADD_I32 $sgpr32, 16, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 28, killed $vgpr1, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 12, %stack.1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -124,40 +100,16 @@ machineFunctionInfo:
stackPtrOffsetReg: '$sgpr32'
body: |
bb.0:
- ; GFX7-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm_live_vcc
- ; GFX7: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 16, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm_live_vcc
- ; GFX8: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 16, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm_live_vcc
- ; GFX900: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm_live_vcc
- ; GFX90A: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm_live_vcc
- ; GFX10: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm_live_vcc
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 28, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__inline_imm___fi_offset_inline_imm_live_vcc
- ; FLATSCRW64: $sgpr4 = S_ADD_I32 $sgpr32, 16, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def $vcc, implicit $exec
+ ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 28, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0 = V_ADD_CO_U32_e32 12, %stack.1, implicit-def $vcc, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -176,7 +128,7 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUFW64-LABEL: name: v_add_co_u32_e32__literal__fi_offset0
- ; MUBUFW64: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
@@ -201,13 +153,15 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUFW64-LABEL: name: v_add_co_u32_e32__literal__fi_offset0_live_vcc
- ; MUBUFW64: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__literal__fi_offset0_live_vcc
; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0 = V_ADD_CO_U32_e32 68, %stack.0, implicit-def $vcc, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -226,40 +180,14 @@ machineFunctionInfo:
stackPtrOffsetReg: '$sgpr32'
body: |
bb.0:
- ; GFX7-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm
- ; GFX7: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 32, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm
- ; GFX8: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 32, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm
- ; GFX900: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm
- ; GFX90A: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm
- ; GFX10: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 100, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm
- ; FLATSCRW64: $sgpr4 = S_ADD_I32 $sgpr32, 32, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 100, killed $vgpr1, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 68, %stack.1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -278,40 +206,16 @@ machineFunctionInfo:
stackPtrOffsetReg: '$sgpr32'
body: |
bb.0:
- ; GFX7-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm_live_vcc
- ; GFX7: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 32, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm_live_vcc
- ; GFX8: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 32, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm_live_vcc
- ; GFX900: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm_live_vcc
- ; GFX90A: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm_live_vcc
- ; GFX10: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm_live_vcc
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 100, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__literal__fi_offset0__offset_inlineimm_live_vcc
- ; FLATSCRW64: $sgpr4 = S_ADD_I32 $sgpr32, 32, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 68, killed $vgpr1, implicit-def $vcc, implicit $exec
+ ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 100, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0 = V_ADD_CO_U32_e32 68, %stack.1, implicit-def $vcc, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -333,15 +237,14 @@ body: |
; MUBUFW64-LABEL: name: v_add_co_u32_e32__vgpr__fi_offset0
; MUBUFW64: liveins: $vgpr1
; MUBUFW64-NEXT: {{ $}}
- ; MUBUFW64-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__vgpr__fi_offset0
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr2 = V_MOV_B32_e32 $sgpr32, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr32, $vgpr1, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, %stack.0, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -363,8 +266,8 @@ body: |
; MUBUFW64-LABEL: name: v_add_co_u32_e32__fi_offset0__vgpr
; MUBUFW64: liveins: $vgpr1
; MUBUFW64-NEXT: {{ $}}
- ; MUBUFW64-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr2, $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__fi_offset0__vgpr
@@ -390,54 +293,19 @@ machineFunctionInfo:
body: |
bb.0:
liveins: $vgpr1
- ; GFX7-LABEL: name: v_add_co_u32_e32__vgpr__fi_literal_offset
- ; GFX7: liveins: $vgpr1
- ; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr2, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr2, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__vgpr__fi_literal_offset
- ; GFX8: liveins: $vgpr1
- ; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr2, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr2, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__vgpr__fi_literal_offset
- ; GFX900: liveins: $vgpr1
- ; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__vgpr__fi_literal_offset
- ; GFX90A: liveins: $vgpr1
- ; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__vgpr__fi_literal_offset
- ; GFX10: liveins: $vgpr1
- ; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__vgpr__fi_literal_offset
+ ; MUBUFW64: liveins: $vgpr1
+ ; MUBUFW64-NEXT: {{ $}}
+ ; MUBUFW64-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__vgpr__fi_literal_offset
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr2 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr32, $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, %stack.1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -457,53 +325,19 @@ machineFunctionInfo:
body: |
bb.0:
liveins: $vgpr1
- ; GFX7-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr
- ; GFX7: liveins: $vgpr1
- ; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr2, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr2, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr2, $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr
- ; GFX8: liveins: $vgpr1
- ; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr2, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr2, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr2, $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr
- ; GFX900: liveins: $vgpr1
- ; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr2, $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr
- ; GFX90A: liveins: $vgpr1
- ; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr2, $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr
- ; GFX10: liveins: $vgpr1
- ; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr2, $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr
+ ; MUBUFW64: liveins: $vgpr1
+ ; MUBUFW64-NEXT: {{ $}}
+ ; MUBUFW64-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $sgpr4, $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr32, $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 %stack.1, $vgpr1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -523,54 +357,20 @@ machineFunctionInfo:
body: |
bb.0:
liveins: $sgpr8
- ; GFX7-LABEL: name: v_add_co_u32_e32__sgpr__fi_literal_offset
- ; GFX7: liveins: $sgpr8
- ; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__sgpr__fi_literal_offset
- ; GFX8: liveins: $sgpr8
- ; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__sgpr__fi_literal_offset
- ; GFX900: liveins: $sgpr8
- ; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__sgpr__fi_literal_offset
- ; GFX90A: liveins: $sgpr8
- ; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__sgpr__fi_literal_offset
- ; GFX10: liveins: $sgpr8
- ; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__sgpr__fi_literal_offset
+ ; MUBUFW64: liveins: $sgpr8
+ ; MUBUFW64-NEXT: {{ $}}
+ ; MUBUFW64-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__sgpr__fi_literal_offset
; FLATSCRW64: liveins: $sgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, %stack.1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -589,12 +389,14 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUFW64-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset0
- ; MUBUFW64: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUFW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 0, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr0, 0, 0, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset0
- ; FLATSCRW64: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 12, $sgpr32, 0, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, 12, 0, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr0, 0, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 12, %stack.0, 0, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -613,12 +415,14 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUFW64-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset0__clamp
- ; MUBUFW64: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 1, implicit $exec
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 0, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 killed $vgpr0, 0, 1, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset0__clamp
- ; FLATSCRW64: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, $sgpr32, 1, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 $sgpr32, 12, 0, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 killed $vgpr0, 0, 1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, %stack.0, 1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -637,12 +441,14 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUFW64-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset0__live_vcc_clamp
- ; MUBUFW64: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 1, implicit $exec
+ ; MUBUFW64: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 0, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr0, 0, 1, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset0__live_vcc_clamp
- ; FLATSCRW64: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 12, $sgpr32, 1, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, 12, 0, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr0, 0, 1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 12, %stack.0, 1, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -665,65 +471,68 @@ body: |
; GFX7-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX7: liveins: $sgpr8
; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $sgpr4 = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 killed $sgpr4, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX7-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX8-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX8: liveins: $sgpr8
; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $sgpr4 = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 killed $sgpr4, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX8-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX900-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX900: liveins: $sgpr8
; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX900-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX90A-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX90A: liveins: $sgpr8
; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX90A-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX10-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX10: liveins: $sgpr8
; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX10-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX940-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX940: liveins: $sgpr8
; GFX940-NEXT: {{ $}}
- ; GFX940-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX940-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX11-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX11: liveins: $sgpr8
; GFX11-NEXT: {{ $}}
- ; GFX11-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $sgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $sgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX11-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX12-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr
; GFX12: liveins: $sgpr8
; GFX12-NEXT: {{ $}}
- ; GFX12-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $sgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $sgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX12-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 %stack.1, $sgpr8, 0, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -746,65 +555,68 @@ body: |
; GFX7-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX7: liveins: $sgpr8
; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $sgpr4 = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 killed $sgpr4, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 1, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX7-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX8-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX8: liveins: $sgpr8
; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $sgpr4 = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 killed $sgpr4, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 1, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX8-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX900-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX900: liveins: $sgpr8
; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 1, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX900-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX90A-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX90A: liveins: $sgpr8
; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 1, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX90A-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX10-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX10: liveins: $sgpr8
; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 1, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 1, implicit $exec
; GFX10-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX940-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX940: liveins: $sgpr8
; GFX940-NEXT: {{ $}}
- ; GFX940-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 1, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX940-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX11-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX11: liveins: $sgpr8
; GFX11-NEXT: {{ $}}
- ; GFX11-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $sgpr8, 1, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $sgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 1, implicit $exec
; GFX11-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX12-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr_clamp
; GFX12: liveins: $sgpr8
; GFX12-NEXT: {{ $}}
- ; GFX12-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $sgpr8, 1, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $sgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 1, implicit $exec
; GFX12-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 %stack.1, $sgpr8, 1, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -828,51 +640,68 @@ body: |
; GFX7-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
; GFX7: liveins: $vgpr8
; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX7-NEXT: SI_RETURN implicit $vgpr0
;
; GFX8-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
; GFX8: liveins: $vgpr8
; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX8-NEXT: SI_RETURN implicit $vgpr0
;
; GFX900-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
; GFX900: liveins: $vgpr8
; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX900-NEXT: SI_RETURN implicit $vgpr0
;
; GFX90A-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
; GFX90A: liveins: $vgpr8
; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX90A-NEXT: SI_RETURN implicit $vgpr0
;
; GFX10-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
; GFX10: liveins: $vgpr8
; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX10-NEXT: SI_RETURN implicit $vgpr0
;
- ; FLATSCRW64-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
- ; FLATSCRW64: liveins: $vgpr8
- ; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $vgpr8, 0, implicit $exec
- ; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
+ ; GFX940-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
+ ; GFX940: liveins: $vgpr8
+ ; GFX940-NEXT: {{ $}}
+ ; GFX940-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
+ ; GFX940-NEXT: SI_RETURN implicit $vgpr0
+ ;
+ ; GFX11-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
+ ; GFX11: liveins: $vgpr8
+ ; GFX11-NEXT: {{ $}}
+ ; GFX11-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
+ ; GFX11-NEXT: SI_RETURN implicit $vgpr0
+ ;
+ ; GFX12-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr
+ ; GFX12: liveins: $vgpr8
+ ; GFX12-NEXT: {{ $}}
+ ; GFX12-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
+ ; GFX12-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0, renamable dead $vcc = V_ADD_CO_U32_e64 %stack.1, $vgpr8, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -895,51 +724,68 @@ body: |
; GFX7-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
; GFX7: liveins: $vgpr8
; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX7-NEXT: SI_RETURN implicit $vgpr0
;
; GFX8-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
; GFX8: liveins: $vgpr8
; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX8-NEXT: SI_RETURN implicit $vgpr0
;
; GFX900-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
; GFX900: liveins: $vgpr8
; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX900-NEXT: SI_RETURN implicit $vgpr0
;
; GFX90A-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
; GFX90A: liveins: $vgpr8
; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; GFX90A-NEXT: SI_RETURN implicit $vgpr0
;
; GFX10-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
; GFX10: liveins: $vgpr8
; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 1, implicit $exec
; GFX10-NEXT: SI_RETURN implicit $vgpr0
;
- ; FLATSCRW64-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
- ; FLATSCRW64: liveins: $vgpr8
- ; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $vgpr8, 1, implicit $exec
- ; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
+ ; GFX940-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
+ ; GFX940: liveins: $vgpr8
+ ; GFX940-NEXT: {{ $}}
+ ; GFX940-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
+ ; GFX940-NEXT: SI_RETURN implicit $vgpr0
+ ;
+ ; GFX11-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
+ ; GFX11: liveins: $vgpr8
+ ; GFX11-NEXT: {{ $}}
+ ; GFX11-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 1, implicit $exec
+ ; GFX11-NEXT: SI_RETURN implicit $vgpr0
+ ;
+ ; GFX12-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__clamp
+ ; GFX12: liveins: $vgpr8
+ ; GFX12-NEXT: {{ $}}
+ ; GFX12-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 1, implicit $exec
+ ; GFX12-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0, renamable dead $vcc = V_ADD_CO_U32_e64 %stack.1, $vgpr8, 1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -962,51 +808,68 @@ body: |
; GFX7-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
; GFX7: liveins: $vgpr8
; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $sgpr4 = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 killed $sgpr4, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX7-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX8-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
; GFX8: liveins: $vgpr8
; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $sgpr4 = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $sgpr4_sgpr5 = V_ADD_CO_U32_e64 killed $sgpr4, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX8-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX900-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
; GFX900: liveins: $vgpr8
; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX900-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX90A-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
; GFX90A: liveins: $vgpr8
; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX90A-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
; GFX10-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
; GFX10: liveins: $vgpr8
; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $vgpr8, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX10-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
;
- ; FLATSCRW64-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
- ; FLATSCRW64: liveins: $vgpr8
- ; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $vgpr8, 0, implicit $exec
- ; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
+ ; GFX940-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
+ ; GFX940: liveins: $vgpr8
+ ; GFX940-NEXT: {{ $}}
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
+ ; GFX940-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
+ ;
+ ; GFX11-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
+ ; GFX11: liveins: $vgpr8
+ ; GFX11-NEXT: {{ $}}
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
+ ; GFX11-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
+ ;
+ ; GFX12-LABEL: name: v_add_co_u32_e64__fi_literal_offset__vgpr__live_vcc
+ ; GFX12: liveins: $vgpr8
+ ; GFX12-NEXT: {{ $}}
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
+ ; GFX12-NEXT: SI_RETURN implicit $vgpr0, implicit $vcc
renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 %stack.1, $vgpr8, 0, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vcc
@@ -1029,13 +892,11 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__inline_imm__fi_offset0__kernel
- ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 12, %stack.0, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1090,13 +951,11 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 84, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__inline_imm__fi_offset_literal__kernel
- ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 12, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_MOV_B32_e32 84, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 12, %stack.1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1121,8 +980,8 @@ body: |
; GFX7-NEXT: {{ $}}
; GFX7-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; GFX7-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 1, implicit $exec
+ ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 84, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 killed $vgpr1, 0, 1, implicit $exec
; GFX7-NEXT: SI_RETURN implicit $vgpr0
;
; GFX8-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset_literal__kernel__clamp
@@ -1130,8 +989,8 @@ body: |
; GFX8-NEXT: {{ $}}
; GFX8-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; GFX8-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 1, implicit $exec
+ ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 84, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 killed $vgpr1, 0, 1, implicit $exec
; GFX8-NEXT: SI_RETURN implicit $vgpr0
;
; GFX900-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset_literal__kernel__clamp
@@ -1139,8 +998,8 @@ body: |
; GFX900-NEXT: {{ $}}
; GFX900-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; GFX900-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 1, implicit $exec
+ ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 84, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 killed $vgpr1, 0, 1, implicit $exec
; GFX900-NEXT: SI_RETURN implicit $vgpr0
;
; GFX90A-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset_literal__kernel__clamp
@@ -1148,8 +1007,8 @@ body: |
; GFX90A-NEXT: {{ $}}
; GFX90A-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; GFX90A-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, killed $vgpr1, 1, implicit $exec
+ ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 84, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 killed $vgpr1, 0, 1, implicit $exec
; GFX90A-NEXT: SI_RETURN implicit $vgpr0
;
; GFX10-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset_literal__kernel__clamp
@@ -1157,20 +1016,20 @@ body: |
; GFX10-NEXT: {{ $}}
; GFX10-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; GFX10-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; GFX10-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, 72, 1, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 84, 0, 1, implicit $exec
; GFX10-NEXT: SI_RETURN implicit $vgpr0
;
; GFX940-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset_literal__kernel__clamp
- ; GFX940: $sgpr4 = S_MOV_B32 72
- ; GFX940-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, killed $sgpr4, 1, implicit $exec
+ ; GFX940: $vgpr1 = V_MOV_B32_e32 84, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 killed $vgpr1, 0, 1, implicit $exec
; GFX940-NEXT: SI_RETURN implicit $vgpr0
;
; GFX11-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset_literal__kernel__clamp
- ; GFX11: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, 72, 1, implicit $exec
+ ; GFX11: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 84, 0, 1, implicit $exec
; GFX11-NEXT: SI_RETURN implicit $vgpr0
;
; GFX12-LABEL: name: v_add_co_u32_e64__inline_imm__fi_offset_literal__kernel__clamp
- ; GFX12: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, 72, 1, implicit $exec
+ ; GFX12: renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 84, 0, 1, implicit $exec
; GFX12-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0, dead $vcc = V_ADD_CO_U32_e64 12, %stack.1, 1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1195,10 +1054,10 @@ body: |
; GFX7: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX7-NEXT: {{ $}}
; GFX7-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX7-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX7-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX7-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
; GFX7-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX7-NEXT: SI_RETURN implicit $vgpr0
@@ -1207,10 +1066,10 @@ body: |
; GFX8: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX8-NEXT: {{ $}}
; GFX8-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX8-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX8-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX8-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
; GFX8-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX8-NEXT: SI_RETURN implicit $vgpr0
@@ -1219,9 +1078,10 @@ body: |
; GFX900: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX900-NEXT: {{ $}}
; GFX900-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX900-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX900-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX900-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
; GFX900-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX900-NEXT: SI_RETURN implicit $vgpr0
@@ -1230,9 +1090,10 @@ body: |
; GFX90A: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX90A-NEXT: {{ $}}
; GFX90A-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX90A-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX90A-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX90A-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
; GFX90A-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX90A-NEXT: SI_RETURN implicit $vgpr0
@@ -1241,9 +1102,9 @@ body: |
; GFX10: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX10-NEXT: {{ $}}
; GFX10-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX10-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX10-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
; GFX10-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX10-NEXT: SI_RETURN implicit $vgpr0
@@ -1252,9 +1113,10 @@ body: |
; GFX940: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX940-NEXT: {{ $}}
; GFX940-NEXT: SCRATCH_STORE_DWORD_SADDR killed $vgpr1, $sgpr32, 132, 0, implicit $exec, implicit $flat_scr :: (store (s32) into %stack.2, addrspace 5)
- ; GFX940-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; GFX940-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; GFX940-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; GFX940-NEXT: $vgpr1 = SCRATCH_LOAD_DWORD_SADDR $sgpr32, 132, 0, implicit $exec, implicit $flat_scr :: (load (s32) from %stack.2, addrspace 5)
; GFX940-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX940-NEXT: SI_RETURN implicit $vgpr0
@@ -1262,16 +1124,16 @@ body: |
; GFX11-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr__scavenge_spill_required
; GFX11: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX11-NEXT: {{ $}}
- ; GFX11-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $sgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $sgpr8, 0, implicit $exec
+ ; GFX11-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX11-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX11-NEXT: SI_RETURN implicit $vgpr0
;
; GFX12-LABEL: name: v_add_co_u32_e64__fi_literal_offset__sgpr__scavenge_spill_required
; GFX12: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; GFX12-NEXT: {{ $}}
- ; GFX12-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 killed $sgpr4, $sgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 $sgpr32, $sgpr8, 0, implicit $exec
+ ; GFX12-NEXT: renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 128, killed $vgpr0, 0, implicit $exec
; GFX12-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; GFX12-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0, renamable $vcc = V_ADD_CO_U32_e64 %stack.1, $sgpr8, 0, implicit $exec
@@ -1295,70 +1157,24 @@ body: |
bb.0:
liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr252_vgpr253, $vgpr254, $vgpr255, $sgpr8
- ; GFX7-LABEL: name: v_add_co_u32_e32__fi_literal_offset__sgpr__scavenge_spill_required
- ; GFX7: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX7-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX7-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__fi_literal_offset__sgpr__scavenge_spill_required
- ; GFX8: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX8-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX8-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__fi_literal_offset__sgpr__scavenge_spill_required
- ; GFX900: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX900-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX900-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__fi_literal_offset__sgpr__scavenge_spill_required
- ; GFX90A: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX90A-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__fi_literal_offset__sgpr__scavenge_spill_required
- ; GFX10: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX10-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX10-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__fi_literal_offset__sgpr__scavenge_spill_required
+ ; MUBUFW64: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
+ ; MUBUFW64-NEXT: {{ $}}
+ ; MUBUFW64-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
+ ; MUBUFW64-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
+ ; MUBUFW64-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__fi_literal_offset__sgpr__scavenge_spill_required
; FLATSCRW64: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; FLATSCRW64-NEXT: {{ $}}
; FLATSCRW64-NEXT: SCRATCH_STORE_DWORD_SADDR killed $vgpr1, $sgpr32, 132, 0, implicit $exec, implicit $flat_scr :: (store (s32) into %stack.2, addrspace 5)
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: $vgpr1 = SCRATCH_LOAD_DWORD_SADDR $sgpr32, 132, 0, implicit $exec, implicit $flat_scr :: (load (s32) from %stack.2, addrspace 5)
; FLATSCRW64-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
@@ -1383,71 +1199,22 @@ body: |
bb.0:
liveins: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr252_vgpr253, $vgpr254, $vgpr255, $sgpr8
- ; GFX7-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr__scavenge_spill_required
- ; GFX7: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX7-NEXT: {{ $}}
- ; GFX7-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX7-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX7-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX7-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX7-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX7-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX7-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX7-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX8-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr__scavenge_spill_required
- ; GFX8: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX8-NEXT: {{ $}}
- ; GFX8-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX8-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX8-NEXT: $vcc_lo = S_MOV_B32 128
- ; GFX8-NEXT: $vgpr1, dead $vcc = V_ADD_CO_U32_e64 killed $vcc_lo, killed $vgpr1, 0, implicit $exec
- ; GFX8-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX8-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX8-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX8-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX900-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr__scavenge_spill_required
- ; GFX900: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX900-NEXT: {{ $}}
- ; GFX900-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX900-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX900-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX900-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX900-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX900-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX900-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX90A-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr__scavenge_spill_required
- ; GFX90A: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX90A-NEXT: {{ $}}
- ; GFX90A-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX90A-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX90A-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX90A-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX90A-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX90A-NEXT: SI_RETURN implicit $vgpr0
- ;
- ; GFX10-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr__scavenge_spill_required
- ; GFX10: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
- ; GFX10-NEXT: {{ $}}
- ; GFX10-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
- ; GFX10-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GFX10-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; GFX10-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; GFX10-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
- ; GFX10-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
- ; GFX10-NEXT: SI_RETURN implicit $vgpr0
+ ; MUBUFW64-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr__scavenge_spill_required
+ ; MUBUFW64: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
+ ; MUBUFW64-NEXT: {{ $}}
+ ; MUBUFW64-NEXT: BUFFER_STORE_DWORD_OFFSET killed $vgpr1, $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (store (s32) into %stack.2, addrspace 5)
+ ; MUBUFW64-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: $vgpr1 = BUFFER_LOAD_DWORD_OFFSET $sgpr0_sgpr1_sgpr2_sgpr3, $sgpr32, 132, 0, 0, implicit $exec :: (load (s32) from %stack.2, addrspace 5)
+ ; MUBUFW64-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
+ ; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__fi_literal_offset__vgpr__scavenge_spill_required
; FLATSCRW64: liveins: $sgpr8, $vgpr254, $vgpr255, $vgpr252_vgpr253, $vgpr248_vgpr249_vgpr250_vgpr251, $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: SCRATCH_STORE_DWORD_SADDR killed $vgpr1, $sgpr32, 132, 0, implicit $exec, implicit $flat_scr :: (store (s32) into %stack.2, addrspace 5)
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, killed $vgpr1, implicit-def dead $vcc, implicit $exec
- ; FLATSCRW64-NEXT: $vgpr1 = SCRATCH_LOAD_DWORD_SADDR $sgpr32, 132, 0, implicit $exec, implicit $flat_scr :: (load (s32) from %stack.2, addrspace 5)
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $sgpr32, $vgpr8, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 128, killed $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: S_NOP 0, implicit $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15, implicit $vgpr16_vgpr17_vgpr18_vgpr19_vgpr20_vgpr21_vgpr22_vgpr23_vgpr24_vgpr25_vgpr26_vgpr27_vgpr28_vgpr29_vgpr30_vgpr31, implicit $vgpr32_vgpr33_vgpr34_vgpr35_vgpr36_vgpr37_vgpr38_vgpr39_vgpr40_vgpr41_vgpr42_vgpr43_vgpr44_vgpr45_vgpr46_vgpr47, implicit $vgpr48_vgpr49_vgpr50_vgpr51_vgpr52_vgpr53_vgpr54_vgpr55_vgpr56_vgpr57_vgpr58_vgpr59_vgpr60_vgpr61_vgpr62_vgpr63, implicit $vgpr64_vgpr65_vgpr66_vgpr67_vgpr68_vgpr69_vgpr70_vgpr71_vgpr72_vgpr73_vgpr74_vgpr75_vgpr76_vgpr77_vgpr78_vgpr79, implicit $vgpr80_vgpr81_vgpr82_vgpr83_vgpr84_vgpr85_vgpr86_vgpr87_vgpr88_vgpr89_vgpr90_vgpr91_vgpr92_vgpr93_vgpr94_vgpr95, implicit $vgpr96_vgpr97_vgpr98_vgpr99_vgpr100_vgpr101_vgpr102_vgpr103_vgpr104_vgpr105_vgpr106_vgpr107_vgpr108_vgpr109_vgpr110_vgpr111, implicit $vgpr112_vgpr113_vgpr114_vgpr115_vgpr116_vgpr117_vgpr118_vgpr119_vgpr120_vgpr121_vgpr122_vgpr123_vgpr124_vgpr125_vgpr126_vgpr127, implicit $vgpr128_vgpr129_vgpr130_vgpr131_vgpr132_vgpr133_vgpr134_vgpr135_vgpr136_vgpr137_vgpr138_vgpr139_vgpr140_vgpr141_vgpr142_vgpr143, implicit $vgpr144_vgpr145_vgpr146_vgpr147_vgpr148_vgpr149_vgpr150_vgpr151_vgpr152_vgpr153_vgpr154_vgpr155_vgpr156_vgpr157_vgpr158_vgpr159, implicit $vgpr160_vgpr161_vgpr162_vgpr163_vgpr164_vgpr165_vgpr166_vgpr167_vgpr168_vgpr169_vgpr170_vgpr171_vgpr172_vgpr173_vgpr174_vgpr175, implicit $vgpr176_vgpr177_vgpr178_vgpr179_vgpr180_vgpr181_vgpr182_vgpr183_vgpr184_vgpr185_vgpr186_vgpr187_vgpr188_vgpr189_vgpr190_vgpr191, implicit $vgpr192_vgpr193_vgpr194_vgpr195_vgpr196_vgpr197_vgpr198_vgpr199_vgpr200_vgpr201_vgpr202_vgpr203_vgpr204_vgpr205_vgpr206_vgpr207, implicit $vgpr208_vgpr209_vgpr210_vgpr211_vgpr212_vgpr213_vgpr214_vgpr215_vgpr216_vgpr217_vgpr218_vgpr219_vgpr220_vgpr221_vgpr222_vgpr223, implicit $vgpr224_vgpr225_vgpr226_vgpr227_vgpr228_vgpr229_vgpr230_vgpr231_vgpr232_vgpr233_vgpr234_vgpr235_vgpr236_vgpr237_vgpr238_vgpr239, implicit $vgpr240_vgpr241_vgpr242_vgpr243_vgpr244_vgpr245_vgpr246_vgpr247, implicit $vgpr248_vgpr249_vgpr250_vgpr251, implicit $vgpr252_vgpr253, implicit $vgpr254, implicit $vgpr255
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr8, %stack.1, implicit-def dead $vcc, implicit $exec
@@ -1475,15 +1242,13 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: $vgpr2 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__kernel_fi_offset0__other_vgpr_live_after
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr2 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr1, killed $vgpr2, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
renamable $vgpr0 = V_ADD_CO_U32_e32 renamable $vgpr1, %stack.0, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vgpr1
@@ -1571,15 +1336,11 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__identity_vgpr__fi_offset0__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, %stack.0, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1604,13 +1365,11 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, $vgpr0, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__fi_offset0__identity_vgpr__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 %stack.0, $vgpr0, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1636,13 +1395,11 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr0, 0, 0, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e64__identity_vgpr__fi_offset0__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 $vgpr0, 0, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0, renamable dead $vcc = V_ADD_CO_U32_e64 $vgpr0, %stack.0, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1668,13 +1425,11 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 0, $vgpr0, 0, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e64__fi_offset0__identity_vgpr__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0, dead renamable $vcc = V_ADD_CO_U32_e64 0, $vgpr0, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0, renamable dead $vcc = V_ADD_CO_U32_e64 %stack.0, $vgpr0, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1699,13 +1454,11 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__fi_offset0__identity_vgpr__kernel_kill
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 0, killed $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 %stack.0, killed $vgpr0, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1763,15 +1516,13 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 32, $vgpr0, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__identity_vgpr__fi_offset32__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 32, $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, %stack.1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1799,15 +1550,13 @@ body: |
; MUBUFW64-NEXT: {{ $}}
; MUBUFW64-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW64-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW64-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; MUBUFW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 72, $vgpr0, implicit-def dead $vcc, implicit $exec
; MUBUFW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_co_u32_e32__identity_vgpr__fi_offset72__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 72, $vgpr0, implicit-def dead $vcc, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_CO_U32_e32 $vgpr0, %stack.1, implicit-def dead $vcc, implicit $exec
SI_RETURN implicit $vgpr0
diff --git a/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-u32.mir b/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-u32.mir
index af6823c1ab64a0..388bbc26bbea76 100644
--- a/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-u32.mir
+++ b/llvm/test/CodeGen/AMDGPU/eliminate-frame-index-v-add-u32.mir
@@ -18,12 +18,12 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUF-LABEL: name: v_add_u32_e32__inline_imm__fi_offset0
- ; MUBUF: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__inline_imm__fi_offset0
- ; MUBUFW32: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
@@ -54,27 +54,23 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUF-LABEL: name: v_add_u32_e32__inline_imm___fi_offset_inline_imm
- ; MUBUF: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; MUBUF: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 28, killed $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__inline_imm___fi_offset_inline_imm
- ; MUBUFW32: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr1 = V_ADD_U32_e32 16, killed $vgpr1, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; MUBUFW32: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 28, killed $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__inline_imm___fi_offset_inline_imm
- ; FLATSCRW64: $sgpr4 = S_ADD_I32 $sgpr32, 16, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 28, killed $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__inline_imm___fi_offset_inline_imm
- ; FLATSCRW32: $sgpr4 = S_ADD_I32 $sgpr32, 16, implicit-def $scc
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; FLATSCRW32: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 28, killed $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 12, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -93,12 +89,12 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUF-LABEL: name: v_add_u32_e32__literal__fi_offset0
- ; MUBUF: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 68, killed $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__literal__fi_offset0
- ; MUBUFW32: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 68, killed $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
@@ -129,27 +125,23 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUF-LABEL: name: v_add_u32_e32__literal__fi_offset0__offset_inlineimm
- ; MUBUF: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 68, killed $vgpr1, implicit $exec
+ ; MUBUF: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 100, killed $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__literal__fi_offset0__offset_inlineimm
- ; MUBUFW32: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr1 = V_ADD_U32_e32 32, killed $vgpr1, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 68, killed $vgpr1, implicit $exec
+ ; MUBUFW32: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 100, killed $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__literal__fi_offset0__offset_inlineimm
- ; FLATSCRW64: $sgpr4 = S_ADD_I32 $sgpr32, 32, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 68, killed $vgpr1, implicit $exec
+ ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 100, killed $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__literal__fi_offset0__offset_inlineimm
- ; FLATSCRW32: $sgpr4 = S_ADD_I32 $sgpr32, 32, implicit-def $scc
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 68, killed $vgpr1, implicit $exec
+ ; FLATSCRW32: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 100, killed $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 68, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -171,29 +163,27 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e32__vgpr__fi_offset0
; MUBUF: liveins: $vgpr1
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__vgpr__fi_offset0
; MUBUFW32: liveins: $vgpr1
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__vgpr__fi_offset0
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr2 = V_MOV_B32_e32 $sgpr32, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr32, $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__vgpr__fi_offset0
; FLATSCRW32: liveins: $vgpr1
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $vgpr2 = V_MOV_B32_e32 $sgpr32, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr32, $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, %stack.0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -215,15 +205,15 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e32__fi_offset0__vgpr
; MUBUF: liveins: $vgpr1
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 killed $vgpr2, $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__fi_offset0__vgpr
; MUBUFW32: liveins: $vgpr1
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 killed $vgpr2, $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__fi_offset0__vgpr
@@ -258,33 +248,31 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e32__vgpr__fi_literal_offset
; MUBUF: liveins: $vgpr1
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__vgpr__fi_literal_offset
; MUBUFW32: liveins: $vgpr1
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__vgpr__fi_literal_offset
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr2 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr32, $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__vgpr__fi_literal_offset
; FLATSCRW32: liveins: $vgpr1
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW32-NEXT: $vgpr2 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr32, $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -307,31 +295,31 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e32__fi_literal_offset__vgpr
; MUBUF: liveins: $vgpr1
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 killed $vgpr2, $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__fi_literal_offset__vgpr
; MUBUFW32: liveins: $vgpr1
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr2 = V_ADD_U32_e32 128, killed $vgpr2, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 killed $vgpr2, $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr2 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__fi_literal_offset__vgpr
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 killed $sgpr4, $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr32, $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__fi_literal_offset__vgpr
; FLATSCRW32: liveins: $vgpr1
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 killed $sgpr4, $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr32, $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 %stack.1, $vgpr1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -354,33 +342,33 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e32__sgpr__fi_literal_offset
; MUBUF: liveins: $sgpr8
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__sgpr__fi_literal_offset
; MUBUFW32: liveins: $sgpr8
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__sgpr__fi_literal_offset
; FLATSCRW64: liveins: $sgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__sgpr__fi_literal_offset
; FLATSCRW32: liveins: $sgpr8
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
+ ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 128, killed $vgpr0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -399,21 +387,21 @@ machineFunctionInfo:
body: |
bb.0:
; MUBUF-LABEL: name: v_add_u32_e64__inline_imm__fi_offset0
- ; MUBUF: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, killed $vgpr1, 0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__inline_imm__fi_offset0
- ; MUBUFW32: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, killed $vgpr1, 0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__inline_imm__fi_offset0
- ; FLATSCRW64: renamable $vgpr0 = V_ADD_U32_e64 12, $sgpr32, 0, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, 12, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__inline_imm__fi_offset0
- ; FLATSCRW32: renamable $vgpr0 = V_ADD_U32_e64 12, $sgpr32, 0, implicit $exec
+ ; FLATSCRW32: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, 12, 0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 12, %stack.0, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -436,32 +424,34 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e64__fi_literal_offset__sgpr
; MUBUF: liveins: $sgpr8
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__fi_literal_offset__sgpr
; MUBUFW32: liveins: $sgpr8
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 128, killed $vgpr0, 0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__fi_literal_offset__sgpr
; FLATSCRW64: liveins: $sgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 killed $sgpr4, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, $sgpr8, 0, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr32, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__fi_literal_offset__sgpr
; FLATSCRW32: liveins: $sgpr8
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $sgpr4, $sgpr8, 0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, $sgpr8, 0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 128, killed $vgpr0, 0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 %stack.1, $sgpr8, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -484,31 +474,33 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset
; MUBUF: liveins: $vgpr8
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset
; MUBUFW32: liveins: $vgpr8
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr0, 128, 0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset
; FLATSCRW64: liveins: $vgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $sgpr4, 0, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset
; FLATSCRW32: liveins: $vgpr8
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $sgpr4, 0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr0, 128, 0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, %stack.1, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -531,31 +523,33 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset__clamp
; MUBUF: liveins: $vgpr8
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset__clamp
; MUBUFW32: liveins: $vgpr8
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr0, 128, 1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset__clamp
; FLATSCRW64: liveins: $vgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $sgpr4, 1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__vgpr__fi_literal_offset__clamp
; FLATSCRW32: liveins: $vgpr8
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $sgpr4, 1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr0, 128, 1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, %stack.1, 1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -578,31 +572,33 @@ body: |
; MUBUF-LABEL: name: v_add_u32_e64__fi_literal_offset__vgpr__clamp
; MUBUF: liveins: $vgpr8
; MUBUF-NEXT: {{ $}}
- ; MUBUF-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; MUBUF-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__fi_literal_offset__vgpr__clamp
; MUBUFW32: liveins: $vgpr8
; MUBUFW32-NEXT: {{ $}}
- ; MUBUFW32-NEXT: $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
- ; MUBUFW32-NEXT: $vgpr1 = V_ADD_U32_e32 128, killed $vgpr1, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 5, $sgpr32, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 128, killed $vgpr0, 1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__fi_literal_offset__vgpr__clamp
; FLATSCRW64: liveins: $vgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $sgpr4, $vgpr8, 1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr0, 1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__fi_literal_offset__vgpr__clamp
; FLATSCRW32: liveins: $vgpr8
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $sgpr4 = S_ADD_I32 $sgpr32, 128, implicit-def $scc
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $sgpr4, $vgpr8, 1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr32, $vgpr8, 0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 128, killed $vgpr0, 1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 %stack.1, $vgpr8, 1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -643,8 +639,8 @@ body: |
; FLATSCRW64-LABEL: name: v_add_u32_e64__fi_literal_offset__vgpr__clamp__kernel
; FLATSCRW64: liveins: $vgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $sgpr4 = S_MOV_B32 128
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $sgpr4, $vgpr8, 1, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 128, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, $vgpr8, 1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__fi_literal_offset__vgpr__clamp__kernel
@@ -674,8 +670,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__inline_imm__fi_offset0__kernel
@@ -683,18 +678,15 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__inline_imm__fi_offset0__kernel
- ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__inline_imm__fi_offset0__kernel
- ; FLATSCRW32: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; FLATSCRW32: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 12, %stack.0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -718,7 +710,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, 0, 0, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__inline_imm__fi_offset0__kernel
@@ -726,15 +718,15 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, 0, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__inline_imm__fi_offset0__kernel
- ; FLATSCRW64: renamable $vgpr0 = V_ADD_U32_e64 12, 0, 0, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__inline_imm__fi_offset0__kernel
- ; FLATSCRW32: renamable $vgpr0 = V_ADD_U32_e64 12, 0, 0, implicit $exec
+ ; FLATSCRW32: renamable $vgpr0 = V_MOV_B32_e32 12, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 12, %stack.0, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -761,8 +753,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__inline_imm__fi_literal__kernel
@@ -770,18 +761,15 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__inline_imm__fi_literal__kernel
- ; FLATSCRW64: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__inline_imm__fi_literal__kernel
- ; FLATSCRW32: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 12, killed $vgpr1, implicit $exec
+ ; FLATSCRW32: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 12, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -806,7 +794,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 0, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__inline_imm__fi_literal__kernel
@@ -814,15 +802,15 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__inline_imm__fi_literal__kernel
- ; FLATSCRW64: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 0, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__inline_imm__fi_literal__kernel
- ; FLATSCRW32: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 0, implicit $exec
+ ; FLATSCRW32: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 12, %stack.1, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -847,7 +835,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 32, 12, 0, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__fi_literal__inline_imm__kernel
@@ -855,15 +843,15 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 32, 12, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__fi_literal__inline_imm__kernel
- ; FLATSCRW64: renamable $vgpr0 = V_ADD_U32_e64 32, 12, 0, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__fi_literal__inline_imm__kernel
- ; FLATSCRW32: renamable $vgpr0 = V_ADD_U32_e64 32, 12, 0, implicit $exec
+ ; FLATSCRW32: renamable $vgpr0 = V_MOV_B32_e32 44, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 %stack.1, 12, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -888,7 +876,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 44, 0, 1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__inline_imm__fi_literal__kernel__clamp
@@ -896,15 +884,15 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 44, 0, 1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__inline_imm__fi_literal__kernel__clamp
- ; FLATSCRW64: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 1, implicit $exec
+ ; FLATSCRW64: renamable $vgpr0 = V_ADD_U32_e64 44, 0, 1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__inline_imm__fi_literal__kernel__clamp
- ; FLATSCRW32: renamable $vgpr0 = V_ADD_U32_e64 12, 32, 1, implicit $exec
+ ; FLATSCRW32: renamable $vgpr0 = V_ADD_U32_e64 44, 0, 1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 12, %stack.1, 1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -932,7 +920,7 @@ body: |
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: renamable $vgpr0 = V_LSHLREV_B32_e32 2, killed $vgpr0, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr1 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr1 = V_MOV_B32_e32 $vgpr0, implicit $exec
; MUBUF-NEXT: renamable $vgpr2 = V_MOV_B32_e32 15, implicit $exec
; MUBUF-NEXT: SCRATCH_STORE_DWORD killed renamable $vgpr2, killed renamable $vgpr1, 0, 0, implicit $exec, implicit $flat_scr :: (volatile store (s32), addrspace 5)
; MUBUF-NEXT: renamable $vgpr0 = V_SUB_U32_e32 0, killed $vgpr0, implicit $exec
@@ -945,7 +933,7 @@ body: |
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: renamable $vgpr0 = V_LSHLREV_B32_e32 2, killed $vgpr0, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr1 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr1 = V_MOV_B32_e32 $vgpr0, implicit $exec
; MUBUFW32-NEXT: renamable $vgpr2 = V_MOV_B32_e32 15, implicit $exec
; MUBUFW32-NEXT: SCRATCH_STORE_DWORD killed renamable $vgpr2, killed renamable $vgpr1, 0, 0, implicit $exec, implicit $flat_scr :: (volatile store (s32), addrspace 5)
; MUBUFW32-NEXT: renamable $vgpr0 = V_SUB_U32_e32 0, killed $vgpr0, implicit $exec
@@ -956,7 +944,7 @@ body: |
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
; FLATSCRW64-NEXT: renamable $vgpr0 = V_LSHLREV_B32_e32 2, killed $vgpr0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr1 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr1 = V_MOV_B32_e32 $vgpr0, implicit $exec
; FLATSCRW64-NEXT: renamable $vgpr2 = V_MOV_B32_e32 15, implicit $exec
; FLATSCRW64-NEXT: SCRATCH_STORE_DWORD killed renamable $vgpr2, killed renamable $vgpr1, 0, 0, implicit $exec, implicit $flat_scr :: (volatile store (s32), addrspace 5)
; FLATSCRW64-NEXT: renamable $vgpr0 = V_SUB_U32_e32 0, killed $vgpr0, implicit $exec
@@ -967,7 +955,7 @@ body: |
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
; FLATSCRW32-NEXT: renamable $vgpr0 = V_LSHLREV_B32_e32 2, killed $vgpr0, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr1 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr1 = V_MOV_B32_e32 $vgpr0, implicit $exec
; FLATSCRW32-NEXT: renamable $vgpr2 = V_MOV_B32_e32 15, implicit $exec
; FLATSCRW32-NEXT: SCRATCH_STORE_DWORD killed renamable $vgpr2, killed renamable $vgpr1, 0, 0, implicit $exec, implicit $flat_scr :: (volatile store (s32), addrspace 5)
; FLATSCRW32-NEXT: renamable $vgpr0 = V_SUB_U32_e32 0, killed $vgpr0, implicit $exec
@@ -1001,8 +989,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr2 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; MUBUFW32-LABEL: name: v_add_u32_e32__kernel_fi_offset0__other_vgpr_live_after
@@ -1010,22 +997,19 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr2 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__kernel_fi_offset0__other_vgpr_live_after
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr2 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__kernel_fi_offset0__other_vgpr_live_after
; FLATSCRW32: liveins: $vgpr1
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $vgpr2 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr1, killed $vgpr2, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
renamable $vgpr0 = V_ADD_U32_e32 renamable $vgpr1, %stack.0, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vgpr1
@@ -1050,7 +1034,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; MUBUFW32-LABEL: name: v_add_u32_e32__kernel_other_vgpr_live_after__fi_offset0
@@ -1058,19 +1042,19 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__kernel_other_vgpr_live_after__fi_offset0
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__kernel_other_vgpr_live_after__fi_offset0
; FLATSCRW32: liveins: $vgpr1
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
renamable $vgpr0 = V_ADD_U32_e32 %stack.0, renamable $vgpr1, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vgpr1
@@ -1095,8 +1079,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $sgpr8, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; MUBUFW32-LABEL: name: v_add_u32_e32__kernel_fi_offset0__sgpr_live_after
@@ -1104,22 +1087,19 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $sgpr8, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__kernel_fi_offset0__sgpr_live_after
; FLATSCRW64: liveins: $sgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $sgpr8, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__kernel_fi_offset0__sgpr_live_after
; FLATSCRW32: liveins: $sgpr8
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $sgpr8, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
renamable $vgpr0 = V_ADD_U32_e32 renamable $sgpr8, %stack.0, implicit $exec
SI_RETURN implicit $vgpr0, implicit $sgpr8
@@ -1144,7 +1124,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr1, 0, 0, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; MUBUFW32-LABEL: name: v_add_u32_e64__kernel_fi_offset0__other_vgpr_live_after
@@ -1152,19 +1132,19 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr1, 0, 0, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__kernel_fi_offset0__other_vgpr_live_after
; FLATSCRW64: liveins: $vgpr1
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr1, 0, 0, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__kernel_fi_offset0__other_vgpr_live_after
; FLATSCRW32: liveins: $vgpr1
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr1, 0, 0, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_MOV_B32_e32 $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0, implicit $vgpr1
renamable $vgpr0 = V_ADD_U32_e64 renamable $vgpr1, %stack.0, 0, implicit $exec
SI_RETURN implicit $vgpr0, implicit $vgpr1
@@ -1190,8 +1170,8 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr8, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, killed $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; MUBUFW32-LABEL: name: v_add_u32_e32__kernel_fi_offset72__sgpr_live_after
@@ -1199,22 +1179,22 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr8, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, killed $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__kernel_fi_offset72__sgpr_live_after
; FLATSCRW64: liveins: $sgpr8
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr8, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, killed $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__kernel_fi_offset72__sgpr_live_after
; FLATSCRW32: liveins: $sgpr8
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $sgpr8, killed $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr8, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, killed $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
renamable $vgpr0 = V_ADD_U32_e32 renamable $sgpr8, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0, implicit $sgpr8
@@ -1240,8 +1220,9 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
+ ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr8, implicit $exec
; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr1, 0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; MUBUFW32-LABEL: name: v_add_u32_e64__kernel_fi_offset72__sgpr_live_after
@@ -1255,8 +1236,9 @@ body: |
; FLATSCRW64-LABEL: name: v_add_u32_e64__kernel_fi_offset72__sgpr_live_after
; FLATSCRW64: liveins: $sgpr8
; FLATSCRW64-NEXT: {{ $}}
+ ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 $sgpr8, implicit $exec
; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $sgpr8, killed $vgpr1, 0, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 killed $vgpr1, killed $vgpr1, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0, implicit $sgpr8
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__kernel_fi_offset72__sgpr_live_after
@@ -1288,8 +1270,6 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset0__kernel
@@ -1297,22 +1277,16 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset0__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset0__kernel
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 0, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, %stack.0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1337,7 +1311,6 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__fi_offset0__identity_vgpr__kernel
@@ -1345,19 +1318,16 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__fi_offset0__identity_vgpr__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__fi_offset0__identity_vgpr__kernel
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, $vgpr0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 %stack.0, $vgpr0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1383,7 +1353,6 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr0, 0, 0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__identity_vgpr__fi_offset0__kernel
@@ -1391,19 +1360,16 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr0, 0, 0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__identity_vgpr__fi_offset0__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr0, 0, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__identity_vgpr__fi_offset0__kernel
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 $vgpr0, 0, 0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 $vgpr0, %stack.0, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1429,7 +1395,6 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e64 0, $vgpr0, 0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e64__fi_offset0__identity_vgpr__kernel
@@ -1437,19 +1402,16 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 0, $vgpr0, 0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e64__fi_offset0__identity_vgpr__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e64 0, $vgpr0, 0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e64__fi_offset0__identity_vgpr__kernel
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e64 0, $vgpr0, 0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e64 %stack.0, $vgpr0, 0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1474,7 +1436,6 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, killed $vgpr0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__fi_offset0__identity_vgpr__kernel_kill
@@ -1482,19 +1443,16 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, killed $vgpr0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__fi_offset0__identity_vgpr__kernel_kill
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, killed $vgpr0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__fi_offset0__identity_vgpr__kernel_kill
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 0, killed $vgpr0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 %stack.0, killed $vgpr0, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1521,8 +1479,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 32, $vgpr0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset32__kernel
@@ -1530,22 +1487,19 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 32, $vgpr0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset32__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 32, $vgpr0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset32__kernel
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 32, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 32, $vgpr0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0
@@ -1573,8 +1527,7 @@ body: |
; MUBUF-NEXT: {{ $}}
; MUBUF-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUF-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUF-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; MUBUF-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, $vgpr0, implicit $exec
; MUBUF-NEXT: SI_RETURN implicit $vgpr0
;
; MUBUFW32-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset72__kernel
@@ -1582,22 +1535,19 @@ body: |
; MUBUFW32-NEXT: {{ $}}
; MUBUFW32-NEXT: $sgpr0 = S_ADD_U32 $sgpr0, $noreg, implicit-def $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
; MUBUFW32-NEXT: $sgpr1 = S_ADDC_U32 $sgpr1, 0, implicit-def dead $scc, implicit $scc, implicit-def $sgpr0_sgpr1_sgpr2_sgpr3
- ; MUBUFW32-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; MUBUFW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, $vgpr0, implicit $exec
; MUBUFW32-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW64-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset72__kernel
; FLATSCRW64: liveins: $vgpr0
; FLATSCRW64-NEXT: {{ $}}
- ; FLATSCRW64-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; FLATSCRW64-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, $vgpr0, implicit $exec
; FLATSCRW64-NEXT: SI_RETURN implicit $vgpr0
;
; FLATSCRW32-LABEL: name: v_add_u32_e32__identity_vgpr__fi_offset72__kernel
; FLATSCRW32: liveins: $vgpr0
; FLATSCRW32-NEXT: {{ $}}
- ; FLATSCRW32-NEXT: $vgpr1 = V_MOV_B32_e32 72, implicit $exec
- ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, killed $vgpr1, implicit $exec
+ ; FLATSCRW32-NEXT: renamable $vgpr0 = V_ADD_U32_e32 72, $vgpr0, implicit $exec
; FLATSCRW32-NEXT: SI_RETURN implicit $vgpr0
renamable $vgpr0 = V_ADD_U32_e32 $vgpr0, %stack.1, implicit $exec
SI_RETURN implicit $vgpr0
diff --git a/llvm/test/CodeGen/AMDGPU/flat-scratch.ll b/llvm/test/CodeGen/AMDGPU/flat-scratch.ll
index 9a9fd289e2d0c4..d92740db5ea224 100644
--- a/llvm/test/CodeGen/AMDGPU/flat-scratch.ll
+++ b/llvm/test/CodeGen/AMDGPU/flat-scratch.ll
@@ -674,7 +674,7 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX9-NEXT: s_add_u32 flat_scratch_lo, s6, s11
; GFX9-NEXT: v_lshlrev_b32_e32 v0, 2, v0
; GFX9-NEXT: s_addc_u32 flat_scratch_hi, s7, 0
-; GFX9-NEXT: v_add_u32_e32 v1, 0, v0
+; GFX9-NEXT: v_mov_b32_e32 v1, v0
; GFX9-NEXT: v_mov_b32_e32 v2, 15
; GFX9-NEXT: scratch_store_dword v1, v2, off
; GFX9-NEXT: s_waitcnt vmcnt(0)
@@ -691,7 +691,7 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX10-NEXT: s_setreg_b32 hwreg(HW_REG_FLAT_SCR_HI), s7
; GFX10-NEXT: v_lshlrev_b32_e32 v0, 2, v0
; GFX10-NEXT: v_mov_b32_e32 v2, 15
-; GFX10-NEXT: v_add_nc_u32_e32 v1, 0, v0
+; GFX10-NEXT: v_mov_b32_e32 v1, v0
; GFX10-NEXT: v_sub_nc_u32_e32 v0, 0, v0
; GFX10-NEXT: scratch_store_dword v1, v2, off
; GFX10-NEXT: s_waitcnt_vscnt null, 0x0
@@ -729,7 +729,7 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX9-PAL-NEXT: s_mov_b32 s10, s0
; GFX9-PAL-NEXT: s_load_dwordx2 s[10:11], s[10:11], 0x0
; GFX9-PAL-NEXT: v_lshlrev_b32_e32 v0, 2, v0
-; GFX9-PAL-NEXT: v_add_u32_e32 v1, 0, v0
+; GFX9-PAL-NEXT: v_mov_b32_e32 v1, v0
; GFX9-PAL-NEXT: v_mov_b32_e32 v2, 15
; GFX9-PAL-NEXT: v_sub_u32_e32 v0, 0, v0
; GFX9-PAL-NEXT: s_waitcnt lgkmcnt(0)
@@ -767,7 +767,7 @@ define amdgpu_kernel void @store_load_vindex_kernel() {
; GFX10-PAL-NEXT: s_setreg_b32 hwreg(HW_REG_FLAT_SCR_HI), s11
; GFX10-PAL-NEXT: v_lshlrev_b32_e32 v0, 2, v0
; GFX10-PAL-NEXT: v_mov_b32_e32 v2, 15
-; GFX10-PAL-NEXT: v_add_nc_u32_e32 v1, 0, v0
+; GFX10-PAL-NEXT: v_mov_b32_e32 v1, v0
; GFX10-PAL-NEXT: v_sub_nc_u32_e32 v0, 0, v0
; GFX10-PAL-NEXT: scratch_store_dword v1, v2, off
; GFX10-PAL-NEXT: s_waitcnt_vscnt null, 0x0
diff --git a/llvm/test/CodeGen/AMDGPU/frame-index.mir b/llvm/test/CodeGen/AMDGPU/frame-index.mir
index f388aeb0470291..2618bb3d7daaf8 100644
--- a/llvm/test/CodeGen/AMDGPU/frame-index.mir
+++ b/llvm/test/CodeGen/AMDGPU/frame-index.mir
@@ -23,8 +23,8 @@ body: |
; GCN-NEXT: {{ $}}
; GCN-NEXT: renamable $vgpr0 = V_AND_B32_e32 1023, killed $vgpr31, implicit $exec
; GCN-NEXT: renamable $vgpr0 = V_LSHLREV_B32_e32 2, killed $vgpr0, implicit $exec
- ; GCN-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GCN-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr1, killed $vgpr0, implicit-def dead $vcc, implicit $exec
+ ; GCN-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GCN-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
; GCN-NEXT: renamable $vgpr0, dead renamable $vcc = nuw V_ADD_CO_U32_e64 4, killed $vgpr0, 0, implicit $exec
; GCN-NEXT: $m0 = S_MOV_B32 -1
; GCN-NEXT: DS_WRITE_B32 undef renamable $vgpr0, killed renamable $vgpr0, 0, 0, implicit $m0, implicit $exec
@@ -206,8 +206,8 @@ body: |
; GCN-NEXT: {{ $}}
; GCN-NEXT: renamable $vgpr0 = V_AND_B32_e32 1023, killed $vgpr31, implicit $exec
; GCN-NEXT: renamable $vgpr0 = V_LSHLREV_B32_e32 2, killed $vgpr0, implicit $exec
- ; GCN-NEXT: $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
- ; GCN-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr1, killed $vgpr0, implicit-def dead $vcc, implicit $exec
+ ; GCN-NEXT: renamable $vgpr1 = V_LSHRREV_B32_e64 6, $sgpr32, implicit $exec
+ ; GCN-NEXT: renamable $vgpr0 = V_ADD_CO_U32_e32 killed $vgpr0, killed $vgpr1, implicit-def dead $vcc, implicit $exec
renamable $vgpr0 = V_AND_B32_e32 1023, killed $vgpr31, implicit $exec
renamable $vgpr0 = V_LSHLREV_B32_e32 2, killed $vgpr0, implicit $exec
renamable $vgpr0 = V_ADD_CO_U32_e32 %stack.0, killed $vgpr0, implicit-def dead $vcc, implicit $exec
diff --git a/llvm/test/CodeGen/AMDGPU/materialize-frame-index-sgpr.gfx10.ll b/llvm/test/CodeGen/AMDGPU/materialize-frame-index-sgpr.gfx10.ll
index 94d1eca05ed0ec..1993eec51be448 100644
--- a/llvm/test/CodeGen/AMDGPU/materialize-frame-index-sgpr.gfx10.ll
+++ b/llvm/test/CodeGen/AMDGPU/materialize-frame-index-sgpr.gfx10.ll
@@ -1621,13 +1621,13 @@ define void @scalar_mov_materializes_frame_index_unavailable_scc__gep_sgpr_offse
; GFX10_1-NEXT: s_lshl_b32 s4, s6, 2
; GFX10_1-NEXT: v_writelane_b32 v2, s59, 0
; GFX10_1-NEXT: v_lshrrev_b32_e64 v0, 5, s32
-; GFX10_1-NEXT: v_add_nc_u32_e32 v3, 0x4040, v3
+; GFX10_1-NEXT: v_add_nc_u32_e32 v1, s4, v3
+; GFX10_1-NEXT: s_and_b32 s4, 0, exec_lo
; GFX10_1-NEXT: v_add_nc_u32_e32 v0, 64, v0
; GFX10_1-NEXT: ;;#ASMSTART
; GFX10_1-NEXT: ; use alloca0 v0
; GFX10_1-NEXT: ;;#ASMEND
-; GFX10_1-NEXT: v_add_nc_u32_e32 v1, s4, v3
-; GFX10_1-NEXT: s_and_b32 s4, 0, exec_lo
+; GFX10_1-NEXT: v_add_nc_u32_e32 v1, 0x4040, v1
; GFX10_1-NEXT: v_readfirstlane_b32 s59, v1
; GFX10_1-NEXT: ;;#ASMSTART
; GFX10_1-NEXT: ; use s59, scc
@@ -1652,13 +1652,13 @@ define void @scalar_mov_materializes_frame_index_unavailable_scc__gep_sgpr_offse
; GFX10_3-NEXT: s_lshl_b32 s4, s6, 2
; GFX10_3-NEXT: v_writelane_b32 v2, s59, 0
; GFX10_3-NEXT: v_lshrrev_b32_e64 v0, 5, s32
-; GFX10_3-NEXT: v_add_nc_u32_e32 v3, 0x4040, v3
+; GFX10_3-NEXT: v_add_nc_u32_e32 v1, s4, v3
+; GFX10_3-NEXT: s_and_b32 s4, 0, exec_lo
; GFX10_3-NEXT: v_add_nc_u32_e32 v0, 64, v0
; GFX10_3-NEXT: ;;#ASMSTART
; GFX10_3-NEXT: ; use alloca0 v0
; GFX10_3-NEXT: ;;#ASMEND
-; GFX10_3-NEXT: v_add_nc_u32_e32 v1, s4, v3
-; GFX10_3-NEXT: s_and_b32 s4, 0, exec_lo
+; GFX10_3-NEXT: v_add_nc_u32_e32 v1, 0x4040, v1
; GFX10_3-NEXT: v_readfirstlane_b32 s59, v1
; GFX10_3-NEXT: ;;#ASMSTART
; GFX10_3-NEXT: ; use s59, scc
@@ -1680,15 +1680,15 @@ define void @scalar_mov_materializes_frame_index_unavailable_scc__gep_sgpr_offse
; GFX11-NEXT: s_mov_b32 exec_lo, s1
; GFX11-NEXT: s_lshl_b32 s0, s0, 2
; GFX11-NEXT: s_add_i32 s1, s32, 64
-; GFX11-NEXT: v_writelane_b32 v2, s59, 0
+; GFX11-NEXT: v_add_nc_u32_e64 v1, s0, s32
; GFX11-NEXT: v_mov_b32_e32 v0, s1
-; GFX11-NEXT: s_add_i32 s1, s32, 0x4040
+; GFX11-NEXT: v_writelane_b32 v2, s59, 0
+; GFX11-NEXT: s_and_b32 s0, 0, exec_lo
+; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_3) | instskip(SKIP_1) | instid1(VALU_DEP_1)
+; GFX11-NEXT: v_add_nc_u32_e32 v1, 0x4040, v1
; GFX11-NEXT: ;;#ASMSTART
; GFX11-NEXT: ; use alloca0 v0
; GFX11-NEXT: ;;#ASMEND
-; GFX11-NEXT: v_add_nc_u32_e64 v1, s0, s1
-; GFX11-NEXT: s_and_b32 s0, 0, exec_lo
-; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_1)
; GFX11-NEXT: v_readfirstlane_b32 s59, v1
; GFX11-NEXT: ;;#ASMSTART
; GFX11-NEXT: ; use s59, scc
@@ -1712,15 +1712,15 @@ define void @scalar_mov_materializes_frame_index_unavailable_scc__gep_sgpr_offse
; GFX12-NEXT: scratch_store_b32 off, v2, s32 offset:32768 ; 4-byte Folded Spill
; GFX12-NEXT: s_mov_b32 exec_lo, s1
; GFX12-NEXT: s_lshl_b32 s0, s0, 2
-; GFX12-NEXT: s_add_co_i32 s1, s32, 0x4000
; GFX12-NEXT: v_writelane_b32 v2, s59, 0
-; GFX12-NEXT: v_add_nc_u32_e64 v1, s0, s1
+; GFX12-NEXT: v_add_nc_u32_e64 v1, s0, s32
; GFX12-NEXT: v_mov_b32_e32 v0, s32
; GFX12-NEXT: s_and_b32 s0, 0, exec_lo
+; GFX12-NEXT: s_delay_alu instid0(VALU_DEP_2) | instskip(SKIP_1) | instid1(VALU_DEP_1)
+; GFX12-NEXT: v_add_nc_u32_e32 v1, 0x4000, v1
; GFX12-NEXT: ;;#ASMSTART
; GFX12-NEXT: ; use alloca0 v0
; GFX12-NEXT: ;;#ASMEND
-; GFX12-NEXT: s_delay_alu instid0(VALU_DEP_2)
; GFX12-NEXT: v_readfirstlane_b32 s59, v1
; GFX12-NEXT: ;;#ASMSTART
; GFX12-NEXT: ; use s59, scc
diff --git a/llvm/test/CodeGen/AMDGPU/mubuf-offset-private.ll b/llvm/test/CodeGen/AMDGPU/mubuf-offset-private.ll
index 4aed9dc2fca6ca..268ac4db000bdd 100644
--- a/llvm/test/CodeGen/AMDGPU/mubuf-offset-private.ll
+++ b/llvm/test/CodeGen/AMDGPU/mubuf-offset-private.ll
@@ -137,11 +137,15 @@ define amdgpu_kernel void @store_private_offset_i8_max_offset_plus2() #0 {
; so a possibly negative base index can't be used for the vgpr offset.
; GCN-LABEL: {{^}}store_private_unknown_bits_vaddr:
-; SICIVI: v_add_{{i|u}}32_e32 [[ADDR0:v[0-9]+]], vcc, 0
-; SICIVI: v_add_{{i|u}}32_e32 [[ADDR1:v[0-9]+]], vcc, 32, [[ADDR0]]
+; SICIVI: {{buffer|flat}}_load_dword [[VADDR:v[0-9]+]],
+; SICIVI: v_lshlrev_b32_e32 [[ADDR:v[0-9]+]], 2, [[VADDR]]
+; SICIVI-NOT [[ADDR]]
+; SICIVI: v_add_{{i|u}}32_e32 [[ADDR1:v[0-9]+]], vcc, 32, [[ADDR]]
; SICIVI: buffer_store_dword v{{[0-9]+}}, [[ADDR1]], s{{\[[0-9]+:[0-9]+\]}}, 0 offen{{$}}
-; GFX9: v_add_u32_e32 [[ADDR:v[0-9]+]], 0,
+; GFX9: global_load_dword [[VADDR:v[0-9]+]],
+; GFX9: v_lshlrev_b32_e32 [[ADDR:v[0-9]+]], 2, [[VADDR]]
+; GFX9-NOT [[ADDR]]
; GFX9: buffer_store_dword v{{[0-9]+}}, [[ADDR]], s{{\[[0-9]+:[0-9]+\]}}, 0 offen offset:32
define amdgpu_kernel void @store_private_unknown_bits_vaddr() #0 {
%alloca = alloca [16 x i32], align 4, addrspace(5)
diff --git a/llvm/test/CodeGen/AMDGPU/scratch-simple.ll b/llvm/test/CodeGen/AMDGPU/scratch-simple.ll
index 4cc469b9b49a06..224b7d4b53e686 100644
--- a/llvm/test/CodeGen/AMDGPU/scratch-simple.ll
+++ b/llvm/test/CodeGen/AMDGPU/scratch-simple.ll
@@ -74,7 +74,7 @@
; GFX11-FLATSCR: v_and_b32_e32 [[CLAMP_IDX:v[0-9]+]], 0x1fc, v0
; MUBUF-DAG: v_add{{_|_nc_}}{{i|u}}32_e32 [[HI_OFF:v[0-9]+]],{{.*}} 0x200, [[CLAMP_IDX]]
-; MUBUF-DAG: v_add{{_|_nc_}}{{i|u}}32_e32 [[LO_OFF:v[0-9]+]],{{.*}} {{v2|0}}, [[CLAMP_IDX]]
+
; FLATSCR: v_add{{_|_nc_}}{{u32|b32}}_e32 [[LO_OFF:v[0-9]+]],{{.*}} {{v2|0}}, [[CLAMP_IDX]]
; MUBUF: buffer_load_dword {{v[0-9]+}}, [[LO_OFF]], {{s\[[0-9]+:[0-9]+\]}}, 0 offen
diff --git a/llvm/test/CodeGen/AMDGPU/stack-pointer-offset-relative-frameindex.ll b/llvm/test/CodeGen/AMDGPU/stack-pointer-offset-relative-frameindex.ll
index 132775d81ca1ad..ebc82248cc80c1 100644
--- a/llvm/test/CodeGen/AMDGPU/stack-pointer-offset-relative-frameindex.ll
+++ b/llvm/test/CodeGen/AMDGPU/stack-pointer-offset-relative-frameindex.ll
@@ -34,7 +34,7 @@ define amdgpu_kernel void @kernel_background_evaluate(ptr addrspace(5) %kg, ptr
; MUBUF-NEXT: s_and_saveexec_b32 s0, vcc_lo
; MUBUF-NEXT: s_cbranch_execz .LBB0_2
; MUBUF-NEXT: ; %bb.1: ; %if.then4.i
-; MUBUF-NEXT: v_add_nc_u32_e64 v0, 4, 0x4000
+; MUBUF-NEXT: v_mov_b32_e32 v0, 0x4004
; MUBUF-NEXT: s_mov_b32 s0, 0x41c64e6d
; MUBUF-NEXT: s_clause 0x1
; MUBUF-NEXT: buffer_load_dword v1, v0, s[36:39], 0 offen
diff --git a/llvm/test/CodeGen/AMDGPU/stack-realign.ll b/llvm/test/CodeGen/AMDGPU/stack-realign.ll
index be3c0d741ac5f3..70bd63d31d5d77 100644
--- a/llvm/test/CodeGen/AMDGPU/stack-realign.ll
+++ b/llvm/test/CodeGen/AMDGPU/stack-realign.ll
@@ -11,7 +11,7 @@
; GCN-LABEL: {{^}}needs_align16_default_stack_align:
; GCN-DAG: v_lshlrev_b32_e32 [[SCALED_IDX:v[0-9]+]], 4, v0
; GCN-DAG: v_lshrrev_b32_e64 [[FRAMEDIFF:v[0-9]+]], 6, s32
-; GCN: v_add_u32_e32 [[FI:v[0-9]+]], vcc, [[FRAMEDIFF]], [[SCALED_IDX]]
+; GCN: v_add_u32_e32 [[FI:v[0-9]+]], vcc, [[SCALED_IDX]], [[FRAMEDIFF]]
; GCN-NOT: s32
More information about the llvm-branch-commits
mailing list